def executer(self, nomClasse, classeSNRC, requete, repTravail): #------------------------------------------------------------------------------------- """ Exécuter le traitement pour créer un Layer par Zone UTM contenant la classe de SNRC avec une requête correspondant à la sélection pour lesquels les éléments d'une classe sont présents dans le SNRC. Paramètres: ----------- nomClasse : Nom de la classe traité. classeSNRC : Nom de la FeatureClass contenant les éléments du découpage SNRC. requete : Requête attributive utilisé pour chaque Layer de zone UTM créée. repTravail : Nom du répertoire de travail dans lequel les Layers par zone UTM seront créés. Variables: ---------- """ #Forcer la destruction des fichiers de sortie arcpy.env.overwriteOutput = True #Traiter toutes les zone UTM for zoneUTM in range(7, 23): #Afficher la zone UTM traitée arcpy.AddMessage(" ") arcpy.AddMessage("-Traitement de la zone UTM :" + str(zoneUTM)) #Définir la requête par zone UTM requeteZoneUtm = requete.replace("[NOM_CLASSE]", nomClasse).replace( "[ZONE_UTM]", str(zoneUTM)) #Définir le nom du Layer des SNRC à traiter pour la zone UTM lyrDecoupage = "BDG_DBA.ges_Decoupage_SNRC50K_2" + "_" + str( zoneUTM) + ".lyr" #Process: Make Feature Layer arcpy.AddMessage('MakeFeatureLayer_management "' + classeSNRC + '" ' + requeteZoneUtm + '"') arcpy.MakeFeatureLayer_management(classeSNRC, lyrDecoupage, requeteZoneUtm) arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1)) #Process: Select Layer By Attribute arcpy.AddMessage("SelectLayerByAttribute_management " + lyrDecoupage + " NEW_SELECTION") arcpy.SelectLayerByAttribute_management(lyrDecoupage, "NEW_SELECTION") arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1)) #Process: Save To Layer File arcpy.AddMessage("SaveToLayerFile_management " + lyrDecoupage + " " + repTravail + "\\" + nomClasse + "\\" + lyrDecoupage) arcpy.SaveToLayerFile_management( lyrDecoupage, repTravail + "\\" + nomClasse + "\\" + lyrDecoupage) arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1)) # Sortir du traitement return
def createDBConnectionFile(instance, database_type, database, account_authentication, dbms_admin, dbms_admin_pwd): # Local variables instance_temp = instance.replace("\\", "_") instance_temp = instance_temp.replace("/", "_") instance_temp = instance_temp.replace(":", "_") Conn_File_NameT = instance_temp + "_" + database + "_" + dbms_admin if os.environ.get("TEMP") == None: temp = "c:\\temp" else: temp = os.environ.get("TEMP") if os.environ.get("TMP") == None: temp = "/usr/tmp" else: temp = os.environ.get("TMP") Connection_File_Name = Conn_File_NameT + ".sde" Connection_File_Name_full_path = temp + os.sep + Conn_File_NameT + ".sde" # Check for the .sde file and delete it if present arcpy.env.overwriteOutput = True if os.path.exists(Connection_File_Name_full_path): os.remove(Connection_File_Name_full_path) try: arcpy.AddMessage("Creating Database Connection File...") # Process: Create Database Connection File... # Usage: out_file_location, out_file_name, DBMS_TYPE, instnace, account_authentication, username, password, database, save_username_password(must be true) arcpy.CreateDatabaseConnection_management( out_folder_path=temp, out_name=Connection_File_Name, database_platform=database_type, instance=instance, database=database, account_authentication=account_authentication, username=dbms_admin, password=dbms_admin_pwd, save_user_pass="******") for i in range(arcpy.GetMessageCount()): if "000565" in arcpy.GetMessage( i): # Check if database connection was successful arcpy.AddReturnMessage(i) arcpy.AddMessage("++++++++++++++++++") arcpy.AddMessage("Exiting!!") arcpy.AddMessage("++++++++++++++++++") sys.exit(3) else: arcpy.AddReturnMessage(i) arcpy.AddMessage("++++++++++++++++++") except: for i in range(arcpy.GetMessageCount()): arcpy.AddReturnMessage(i) connection = Connection_File_Name_full_path return connection
def createUserConnectionFiles(instance, database_type, database, account_authentication, role, dbuser_pwd, out_folder_path): # Local variables if role == 'sde': dbuser_name = 'SDE' else: dbuser_name = database.upper() + '_' + role.upper() # if dbuser_pwd != '#': # dbuser_pwd = database.lower() + '_' + role.upper() + '_' + str(datetime.datetime.now().year) # arcpy.AddMessage(dbuser_pwd) instance_temp = instance.replace("\\", "_") instance_temp = instance_temp.replace("/", "_") instance_temp = instance_temp.replace(":", "_") Conn_File_NameT = instance_temp + "." + database + "." + dbuser_name Connection_File_Name = Conn_File_NameT + ".sde" Connection_File_Name_full_path = out_folder_path + os.sep + Conn_File_NameT + ".sde" # Check for the .sde file and delete it if present arcpy.env.overwriteOutput = True if os.path.exists(Connection_File_Name_full_path): os.remove(Connection_File_Name_full_path) try: arcpy.AddMessage("Creating Database Connection File...") # Process: Create Database Connection File... # Usage: out_file_location, out_file_name, DBMS_TYPE, instnace, account_authentication, username, password, database, save_username_password(must be true) arcpy.CreateDatabaseConnection_management( out_folder_path=out_folder_path, out_name=Connection_File_Name, database_platform=database_type, instance=instance, database=database, account_authentication=account_authentication, username=dbuser_name, password=dbuser_pwd, save_user_pass="******") for i in range(arcpy.GetMessageCount()): if "000565" in arcpy.GetMessage( i): # Check if database connection was successful arcpy.AddReturnMessage(i) arcpy.AddMessage("++++++++++++++++++") arcpy.AddMessage("Exiting!!") arcpy.AddMessage("++++++++++++++++++") sys.exit(3) else: arcpy.AddReturnMessage(i) arcpy.AddMessage("++++++++++++++++++") except: for i in range(arcpy.GetMessageCount()): arcpy.AddReturnMessage(i) return Connection_File_Name_full_path
def createDBRole(connection, role): try: arcpy.AddMessage('Creating database role ' + role) arcpy.CreateRole_management(input_database=connection, grant_revoke='GRANT', role=role, user_name='') for i in range(arcpy.GetMessageCount()): arcpy.AddReturnMessage(i) arcpy.AddMessage("++++++++++++++++++") except: for i in range(arcpy.GetMessageCount()): arcpy.AddReturnMessage(i)
def createVtpkIndexAndPackage(in_map, service_type, tileScheme, vertex_count, indexPolygon, outVtpk): try: arcpy.management.CreateVectorTileIndex(in_map=in_map, out_featureclass=indexPolygon, service_type=service_type, tiling_scheme=tileScheme, vertex_count=vertex_count) arcpy.AddMessage("tile index - ready.") arcpy.management.CreateVectorTilePackage(in_map=in_map, output_file=outVtpk, service_type=service_type, tiling_scheme=tileScheme, tile_structure="INDEXED", min_cached_scale="", max_cached_scale="", index_polygons=indexPolygon, summary=None, tags=None) if os.path.exists(outVtpk): arcpy.AddMessage("Pro standard tile package - ready!") return outVtpk except: arcpy.AddError("Failed to create vector tile package. Please see the following error for details.") for i in range(0, arcpy.GetMessageCount()): mesSev = arcpy.GetSeverity(i) if mesSev == 1: arcpy.AddWarning(arcpy.GetMessage(i)) elif mesSev == 2: arcpy.AddError(arcpy.GetMessage(i))
def logPreviousToolMessages(): i = 0 msgCount = arcpy.GetMessageCount() while i < msgCount: msg = arcpy.GetMessage(i) arcpy.AddReturnMessage(i) i += 1
def delete_cwd_dir(argv=None): """Deletes cost-weighted distance directory and CWD rasters """ if argv is None: argv = sys.argv # Get parameters from ArcGIS tool dialog projectDir = argv[1] cwdBaseDir = os.path.join(projectDir, "datapass\\cwd") try: if os.path.exists(cwdBaseDir): arcpy.Delete_management(cwdBaseDir) except Exception: try: if os.path.exists(cwdBaseDir): shutil.rmtree(cwdBaseDir) except Exception: arcpy.AddError("Unable to delete cwd directory. One of the rasters " "might have been open in ArcMap.\n You may " 'need to re-start ArcMap to release the file lock.') for msg in range(0, arcpy.GetMessageCount() - 1): if arcpy.GetSeverity(msg) == 2: arcpy.AddReturnMessage(msg) print(arcpy.AddReturnMessage(msg)) exit(0) return
def createDBRoleSQL(instance, database, dbms_admin, dbms_admin_pwd, role): try: arcpy.AddMessage('Creating database role ' + role) sql = gc_sql_utils.createRole.format(database=database, role=role) executeSQL(sql, instance, dbms_admin, dbms_admin_pwd) arcpy.AddMessage("++++++++++++++++++") except: for i in range(arcpy.GetMessageCount()): arcpy.AddReturnMessage(i)
def createDBUser(database, connection, dbuser_pwd, role): # if dbuser_pwd != '': # dbuser_pwd = database.lower() + '_' + role.upper() + '_' + str(datetime.datetime.now().year) # arcpy.AddWarning('No password specified. Setting the password to: ' + dbuser_pwd) try: dbuser = (database + '_' + role).upper() role = 'R_SDE_' + role.upper() arcpy.AddMessage('Creating database user ' + dbuser) arcpy.CreateDatabaseUser_management( input_database=connection, user_authentication_type='DATABASE_USER', user_name=dbuser, user_password=dbuser_pwd, role=role, tablespace_name='') for i in range(arcpy.GetMessageCount()): arcpy.AddReturnMessage(i) arcpy.AddMessage("++++++++++++++++++") except: for i in range(arcpy.GetMessageCount()): arcpy.AddReturnMessage(i)
def createSDE(database_type, instance, database, account_authentication, dbms_admin, dbms_admin_pwd, schema_type, gdb_admin, gdb_admin_pwd, tablespace, license): # Get the current product license product_license = arcpy.ProductInfo() # Checks required license level if product_license.upper() == "ARCVIEW" or product_license.upper( ) == 'ENGINE': print( "\n" + product_license + " license found!" + " Creating an enterprise geodatabase requires an ArcGIS for Desktop Standard or Advanced, ArcGIS Engine with the Geodatabase Update extension, or ArcGIS for Server license." ) sys.exit("Re-authorize ArcGIS before creating enterprise geodatabase.") else: print("\n" + product_license + " license available! Continuing to create...") arcpy.AddMessage("++++++++++++++++++") try: arcpy.AddMessage("Creating enterprise geodatabase...") arcpy.CreateEnterpriseGeodatabase_management( database_platform=database_type, instance_name=instance, database_name=database, account_authentication=account_authentication, database_admin=dbms_admin, database_admin_password=dbms_admin_pwd, sde_schema=schema_type, gdb_admin_name=gdb_admin, gdb_admin_password=gdb_admin_pwd, tablespace_name=tablespace, authorization_file=license) for i in range(arcpy.GetMessageCount()): arcpy.AddReturnMessage(i) arcpy.AddMessage("++++++++++++++++++") except: for i in range(arcpy.GetMessageCount()): arcpy.AddReturnMessage(i)
def exit_with_geoproc_error(filename): """Handle geoprocessor errors and provide details to user""" tb = sys.exc_info()[2] # get the traceback object # tbinfo contains the error's line number and the code tbinfo = traceback.format_tb(tb)[0] line = tbinfo.split(", ")[1] arcpy.AddError("Geoprocessing error on **" + line + "** of " + filename + " :") for msg in range(0, arcpy.GetMessageCount() - 1): if arcpy.GetSeverity(msg) == 2: arcpy.AddReturnMessage(msg) print(arcpy.AddReturnMessage(msg)) exit(0)
def GenerateVtpkTilingScheme(in_map,tileScheme): scales = "295829355.454565;147914677.727283;73957338.8636413;36978669.4318207;18489334.7159103;9244667.35795516;4622333.67897758;2311166.83948879;1155583.4197444;577791.709872198;288895.854936099;144447.927468049;72223.9637340247;36111.9818670124;18055.9909335062;9027.99546675309;4513.99773337654;2256.99886668827;1128.49943334414;564.249716672068" tile_origin = "-180 180" try: arcpy.server.GenerateMapServerCacheTilingScheme(in_map=in_map, tile_origin=tile_origin, output_tiling_scheme=tileScheme, num_of_scales=20, scales=scales, dots_per_inch=96, tile_size="512 x 512") arcpy.AddMessage("tile scheme - ready.") return tileScheme except: arcpy.AddError("Failed to create vector tile package. Please see the following error for details.") for i in range(0, arcpy.GetMessageCount()): mesSev = arcpy.GetSeverity(i) if mesSev == 1: arcpy.AddError(arcpy.GetMessage(i)) elif mesSev == 2: arcpy.AddError(arcpy.GetMessage(i))
def executer(self, database, repertoire, fgdb, proprietaire, compression): #------------------------------------------------------------------------------------- """ Exécuter le traitement pour créer une copie d'une Base de Données spatiale dans une FGDB. Paramètres: ----------- database : Nom de la Base de Données dans lequel on veut copier les tables et les FeatureClass dans une FGDB. repertoire : Nom du répertoire dans lequel la FGDB doit être créée. fgdb : Nom de la FGDB à créer. proprietaire : Nom du propriétaire des tables ou featureClass à copier dans la FGDB. compression : Indique si on doit effectuer une compression (True) ou non (False). Variables: ---------- """ #Définir le Workspace par défaut selan celui de la Base de Données arcpy.env.workspace = database #Extraire la liste des Tables listeTables = arcpy.ListTables(proprietaire + "*") #Extraire la liste des FeatureClass listeFeatureClass = arcpy.ListFeatureClasses(proprietaire + "*") #Vérifier si la FGDB existe déj� if arcpy.Exists(repertoire + "\\" + fgdb + ".gdb"): #Envoyer un avertissement arcpy.AddWarning("La FGDB existe déjà !") fgdb = repertoire + "\\" + fgdb + ".gdb" #Si la FGDB n'existe pas else: #Créer la FGDB arcpy.AddMessage(" ") fgdb = str(arcpy.CreateFileGDB_management(repertoire, fgdb)) arcpy.AddMessage(arcpy.GetMessages()) #Copier toutes les tables de la DataBase vers la FGDB for table in listeTables: #Afficher un message pour Copier la table dans la FGDB arcpy.AddMessage(" ") arcpy.AddMessage("Executing: TableToTable " + table + " " + fgdb + " " + table.replace(proprietaire + ".", "")) #Vérifier si la table existe déj� if arcpy.Exists(fgdb + "\\" + table.replace(proprietaire + ".", "")): #Envoyer un avertissement arcpy.AddWarning("La table existe déjà !") #Si la table n'existe pas else: try: #Copier la table dans la FGDB arcpy.TableToTable_conversion( table, fgdb, table.replace(proprietaire + ".", "")) #Afficher tous les messages sauf le premier for i in range(1, arcpy.GetMessageCount()): arcpy.AddMessage(arcpy.GetMessage(i)) #Gestion des erreurs except Exception, err: #Afficher tous les messages sauf le premier for i in range(1, arcpy.GetMessageCount()): #Afficher un message d'erreur arcpy.AddError(arcpy.GetMessage(i))
GPMsg("Shrinking edges by %.1f ..." % shrink_dist) tmpShrink = SetNull( \ EucDistance(SetNull(IsNull(tmpShrink) == 0, 1)) \ <= shrink_dist, 1) env.cellSize = None arcpy.RasterToPolygon_conversion(tmpShrink, out_polys, simplify) else: env.cellSize = None arcpy.RasterToPolygon_conversion(tmpShrink, out_polys, simplify) except MsgError, xmsg: GPMsg("e",str(xmsg)) except arcpy.ExecuteError: tbinfo = traceback.format_tb(sys.exc_info()[2])[0] GPMsg("e",tbinfo.strip()) numMsg = arcpy.GetMessageCount() for i in range(0, numMsg): GPMsg("return",i) except Exception, xmsg: tbinfo = traceback.format_tb(sys.exc_info()[2])[0] GPMsg("e",tbinfo + str(xmsg)) finally: ##GPMsg("cleaning up") for f in [lyrPolys, tmpFC, tmpClip, tmpPoly]: try: if f: arcpy.Delete_management(f) except: pass if __name__ == "__main__": # ArcGIS Script tool interface
os.remove(Connection_File_Name_full_path) print "Creating egdb Database Connection File..." # Process: Create egdb Database Connection File... # Usage: out_file_location, out_file_name, DBMS_TYPE, instance, database, account_authentication, username, password, save_username_password(must be true) arcpy.CreateDatabaseConnection_management( out_folder_path=Connection_File_Out_Folder, out_name=Connection_File_Name, database_platform=database_type, instance=instance, database=database, account_authentication=account_authentication, username=username, password=password, save_user_pass="******") for i in range(arcpy.GetMessageCount()): if "000565" in arcpy.GetMessage( i): #Check if database connection was successful arcpy.AddReturnMessage(i) arcpy.AddMessage("Exiting!!") sys.exit(3) else: arcpy.AddReturnMessage(i) #Check if no value entered for option except SystemExit as e: if e.code == 2: parser.usage = "" print "\n" parser.print_help() parser.exit(2)
httpConn = httplib.HTTPSConnection(ags_server) httpConn.request("POST", folderURL, params, headers) response = httpConn.getresponse() httpConn.close() print 'AGS stop done' # --------------------------------- # Step for updating the data # --------------------------------- import arcpy mylayer = 'MylayerName' arcpy.MakeFeatureLayer_management(shp_path, mylayer, 'someParamsField > 100') for i in xrange(0, arcpy.GetMessageCount()): arcpy.AddReturnMessage(i) arcpy.Append_management(mylayer, target_layer, 'NO_TEST') for i in xrange(0, arcpy.GetMessageCount()): arcpy.AddReturnMessage(i) arcpy.RebuildIndexes_management(work_db, '#', [target_layer_name]) for i in xrange(0, arcpy.GetMessageCount()): arcpy.AddReturnMessage(i) # starting service folderURL = "/arcgis/admin/services/{}/start".format(service) params = urllib.urlencode({'token': token, 'f': 'json'}) headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
def changePrivileges(dataset, role, view, write): try: arcpy.ChangePrivileges_management(dataset, role, view, write) except: for i in range(arcpy.GetMessageCount()): message(i, 2)
#Name: John Doe #Date: Aug, 2011 #Purpose: Working with errors in Python import arcpy try: arcpy.env.workspace = "C:\\Users\\Me\\Desktop\\GIS Programming\\Training" arcpy.Buffer_analysis("test.shp","sch_buff") except: print arcpy.GetMessage(1) print arcpy.GetMessage (arcpy.GetMessageCount()-1)
#coding=gbk import arcpy count = arcpy.GetMessageCount() print count
arcpy.AddMessage("Creating Filtered 8bit GRID image...") neighborhood = NbrRectangle(3, 3, "CELL") outFocalStatistics = FocalStatistics(outNFRaster, neighborhood, "MEDIAN", "DATA") outFocalStatistics.save(fsMedRaster) #Process: Copy Raster to create an 8bit tif. # The input image has a Min-max range of 0-255 but NoData is -32868 and therefore defaulted to 16bit in a GRID. # Using copy Raster does not change the data range but converts to TIF and assigns NoData value to 256, therefore is an 8bit. arcpy.AddMessage("Creating Filtered 8bit TIF image ...") arcpy.CopyRaster_management(fsMedRaster, outFRaster, "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED") except arcpy.ExecuteError: #Return Geoprocessing tool specific errors line, filename, err = trace() arcpy.AddError("Geoprocessing error on " + line + " of " + filename + " :") for msg in range(0, arcpy.GetMessageCount()): if arcpy.GetSeverity(msg) == 2: arcpy.AddReturnMessage(msg) except: #Returns Python and non-tool errors line, filename, err = trace() arcpy.AddError("Python error on " + line + " of " + filename) arcpy.AddError(err) finally: # Process: Delete intermediate files if arcpy.Exists(inRastC): arcpy.Delete_management(inRastC, "") if arcpy.Exists(scImage1): arcpy.Delete_management(scImage1, "") if arcpy.Exists(fsMedRaster):
def connect(database, server="<default server>", username="******", password="******", version="SDE.DEFAULT"): # Check if value entered for option try: #Usage parameters for spatial database connection to upgrade service = "sde:sqlserver:" + server account_authentication = 'DATABASE_AUTH' version = version.upper() database = database.lower() # Check if direct connection if service.find(":") <> -1: #This is direct connect ServiceConnFileName = service.replace(":", "") ServiceConnFileName = ServiceConnFileName.replace(";", "") ServiceConnFileName = ServiceConnFileName.replace("=", "") ServiceConnFileName = ServiceConnFileName.replace("/", "") ServiceConnFileName = ServiceConnFileName.replace("\\", "") else: arcpy.AddMessage("\n+++++++++") arcpy.AddMessage("Exiting!!") arcpy.AddMessage("+++++++++") sys.exit( "\nSyntax for a direct connection in the Service parameter is required for geodatabase upgrade." ) # Local variables Conn_File_NameT = server + "_" + ServiceConnFileName + "_" + database + "_" + username if os.environ.get("TEMP") == None: temp = "c:\\temp" else: temp = os.environ.get("TEMP") if os.environ.get("TMP") == None: temp = "/usr/tmp" else: temp = os.environ.get("TMP") Connection_File_Name = temp + os.sep + Conn_File_NameT + ".sde" if os.path.isfile(Connection_File_Name): return Connection_File_Name # Check for the .sde file and delete it if present arcpy.env.overwriteOutput = True # Variables defined within the script; other variable options commented out at the end of the line saveUserInfo = "SAVE_USERNAME" #DO_NOT_SAVE_USERNAME saveVersionInfo = "SAVE_VERSION" #DO_NOT_SAVE_VERSION print "\nCreating ArcSDE Connection File...\n" # Process: Create ArcSDE Connection File... # Usage: out_folder_path, out_name, server, service, database, account_authentication, username, password, save_username_password, version, save_version_info print temp print Conn_File_NameT print server print service print database print account_authentication print username print password print saveUserInfo print version print saveVersionInfo arcpy.CreateArcSDEConnectionFile_management(temp, Conn_File_NameT, server, service, database, account_authentication, username, password, saveUserInfo, version, saveVersionInfo) for i in range(arcpy.GetMessageCount()): if "000565" in arcpy.GetMessage( i): #Check if database connection was successful arcpy.AddReturnMessage(i) arcpy.AddMessage("\n+++++++++") arcpy.AddMessage("Exiting!!") arcpy.AddMessage("+++++++++\n") sys.exit(3) else: arcpy.AddReturnMessage(i) arcpy.AddMessage("+++++++++\n") return Connection_File_Name #Check if no value entered for option except SystemExit as e: print e.code return
class CreerCopieBaseDonneesFGDB(object): #******************************************************************************************* """ Permet de créer une copie d'une Base de Données spatiale dans une FGDB. """ #------------------------------------------------------------------------------------- def __init__(self): #------------------------------------------------------------------------------------- """ Initialisation du traitement pour créer une copie d'une Base de Données spatiale dans une FGDB. Paramètres: ----------- Aucun Variables: ---------- Aucun """ #Sortir return #------------------------------------------------------------------------------------- def validerParamObligatoire(self, database, repertoire, fgdb, proprietaire): #------------------------------------------------------------------------------------- """ Validation de la présence des paramètres obligatoires. Paramètres: ----------- database : Nom de la Base de Données dans lequel on veut copier les tables et les FeatureClass dans une FGDB. repertoire : Nom du répertoire dans lequel la FGDB doit être créée. fgdb : Nom de la FGDB à créer. proprietaire : Nom du propriétaire des tables ou featureClass à copier dans la FGDB. Retour: ------- Exception s'il y a une erreur de paramètre obligatoire """ #Affichage du message arcpy.AddMessage( "- Vérification de la présence des paramètres obligatoires") if (len(database) == 0): raise Exception("Paramètre obligatoire manquant: %s" % 'database') if (len(repertoire) == 0): raise Exception("Paramètre obligatoire manquant: %s" % 'repertoire') if (len(fgdb) == 0): raise Exception("Paramètre obligatoire manquant: %s" % 'fgdb') if (len(proprietaire) == 0): raise Exception("Paramètre obligatoire manquant: %s" % 'proprietaire') #Sortir return #------------------------------------------------------------------------------------- def executer(self, database, repertoire, fgdb, proprietaire, compression): #------------------------------------------------------------------------------------- """ Exécuter le traitement pour créer une copie d'une Base de Données spatiale dans une FGDB. Paramètres: ----------- database : Nom de la Base de Données dans lequel on veut copier les tables et les FeatureClass dans une FGDB. repertoire : Nom du répertoire dans lequel la FGDB doit être créée. fgdb : Nom de la FGDB à créer. proprietaire : Nom du propriétaire des tables ou featureClass à copier dans la FGDB. compression : Indique si on doit effectuer une compression (True) ou non (False). Variables: ---------- """ #Définir le Workspace par défaut selan celui de la Base de Données arcpy.env.workspace = database #Extraire la liste des Tables listeTables = arcpy.ListTables(proprietaire + "*") #Extraire la liste des FeatureClass listeFeatureClass = arcpy.ListFeatureClasses(proprietaire + "*") #Vérifier si la FGDB existe déj� if arcpy.Exists(repertoire + "\\" + fgdb + ".gdb"): #Envoyer un avertissement arcpy.AddWarning("La FGDB existe déjà !") fgdb = repertoire + "\\" + fgdb + ".gdb" #Si la FGDB n'existe pas else: #Créer la FGDB arcpy.AddMessage(" ") fgdb = str(arcpy.CreateFileGDB_management(repertoire, fgdb)) arcpy.AddMessage(arcpy.GetMessages()) #Copier toutes les tables de la DataBase vers la FGDB for table in listeTables: #Afficher un message pour Copier la table dans la FGDB arcpy.AddMessage(" ") arcpy.AddMessage("Executing: TableToTable " + table + " " + fgdb + " " + table.replace(proprietaire + ".", "")) #Vérifier si la table existe déj� if arcpy.Exists(fgdb + "\\" + table.replace(proprietaire + ".", "")): #Envoyer un avertissement arcpy.AddWarning("La table existe déjà !") #Si la table n'existe pas else: try: #Copier la table dans la FGDB arcpy.TableToTable_conversion( table, fgdb, table.replace(proprietaire + ".", "")) #Afficher tous les messages sauf le premier for i in range(1, arcpy.GetMessageCount()): arcpy.AddMessage(arcpy.GetMessage(i)) #Gestion des erreurs except Exception, err: #Afficher tous les messages sauf le premier for i in range(1, arcpy.GetMessageCount()): #Afficher un message d'erreur arcpy.AddError(arcpy.GetMessage(i)) #Copier toutes les FeatureClass de la DataBase vers la FGDB for featureClass in listeFeatureClass: #Afficher un message pour Copier la featureClass dans la FGDB arcpy.AddMessage(" ") arcpy.AddMessage("Executing: FeatureClassToFeatureClass " + featureClass + " " + fgdb + " " + featureClass.replace(proprietaire + ".", "")) #Vérifier si la featureClass existe déj� if arcpy.Exists(fgdb + "\\" + featureClass.replace(proprietaire + ".", "")): #Envoyer un avertissement arcpy.AddWarning("La featureClass existe déjà !") #Si la featureClass n'existe pas else: #Copier la featureClass dans la FGDB arcpy.FeatureClassToFeatureClass_conversion( featureClass, fgdb, featureClass.replace(proprietaire + ".", "")) #Afficher tous les messages sauf le premier for i in range(1, arcpy.GetMessageCount()): arcpy.AddMessage(arcpy.GetMessage(i)) #Vérifier si on doit compresser la FGDB if compression: #Compression de la FGDB arcpy.AddMessage(" ") arcpy.AddMessage( "Executing: CompressFileGeodatabaseData_management " + fgdb) arcpy.CompressFileGeodatabaseData_management(fgdb) arcpy.AddMessage(arcpy.GetMessages()) #Sortir arcpy.AddMessage(" ") return
def Solve(Input_Layer_Location, ScenarioNames, Input_Dataset, Evacuation_Prefix, Safe_Zone_Prefix, Dynamics_Prefix, ThreadCount, msgLock, dbLock, ThreadNum): # Check out any necessary licenses if arcpy.CheckExtension("Network") == "Available": arcpy.CheckOutExtension("Network") else: arcpy.AddMessage("Network Analyst Extension Is Not Available") print "Network Analyst Is Not Available" sys.exit(0) # Load required toolboxes arcpy.env.overwriteOutput = True SolveCount = 0 try: # load layer file lyrFile = arcpy.mapping.Layer(Input_Layer_Location) messages = [] # loop over all scenarios, import them into each NA layer for ExpName in ScenarioNames: # arcpy.AddMessage("Importing scenario: " + ExpName[0]) EVC = Input_Dataset + '\\' + Evacuation_Prefix + ExpName[0] SAFE = Input_Dataset + '\\' + Safe_Zone_Prefix + ExpName[0] DYN = Input_Dataset + '\\' + Dynamics_Prefix + ExpName[0] # now loop over all NA layers and solve them one by one for lyr in arcpy.mapping.ListLayers(lyrFile): try: desc = arcpy.Describe(Input_Layer_Location + "\\" + lyr.longName) # only solve if the layer is a network analysis layer if desc.dataType == "NALayer": # We check if this setup and scenario is intended for this particular sub-process SolveCount += 1 if SolveCount % ThreadCount != ThreadNum: continue # load input locations del messages[:] try: messages.append( "Thread {}: loading input points to {} from scenario {}" .format(ThreadNum, lyr.name, ExpName[0])) arcpy.AddLocations_na( lyr, "Evacuees", EVC, "VehicleCount POPULATION #;Name UID #", "5000 Meters", "", "Streets NONE;SoCal_ND_Junctions SHAPE", "MATCH_TO_CLOSEST", "CLEAR", "NO_SNAP", "5 Meters", "EXCLUDE", "Streets #;SoCal_ND_Junctions #") for msg in range(0, arcpy.GetMessageCount()): messages.append(arcpy.GetMessage(msg)) arcpy.AddLocations_na( lyr, "Zones", SAFE, "Name OBJECTID #;Capacity Capacity #", "5000 Meters", "", "Streets NONE;SoCal_ND_Junctions SHAPE", "MATCH_TO_CLOSEST", "CLEAR", "NO_SNAP", "5 Meters", "EXCLUDE", "Streets #;SoCal_ND_Junctions #") for msg in range(0, arcpy.GetMessageCount()): messages.append(arcpy.GetMessage(msg)) arcpy.AddLocations_na( lyr, "DynamicChanges", DYN, "EdgeDirection Zones_EdgeDirection #;StartingCost Zones_StartingCost #;EndingCost Zones_EndingCost #;CostChangeRatio Zones_CostChangeRatio #;CapacityChangeRatio Zones_CapacityChangeRatio #", "5000 Meters", "", "Streets SHAPE;SoCal_ND_Junctions NONE", "MATCH_TO_CLOSEST", "CLEAR", "NO_SNAP", "5 Meters", "INCLUDE", "Streets #;SoCal_ND_Junctions #") for msg in range(0, arcpy.GetMessageCount()): messages.append(arcpy.GetMessage(msg)) # solve the layer messages.append("Solving NALayer " + lyr.name + " with scenario " + ExpName[0]) arcpy.Solve_na(lyr, "SKIP", "TERMINATE") for msg in range(0, arcpy.GetMessageCount()): messages.append(arcpy.GetMessage(msg)) # going to export route and edge sub_layers solved_layers = arcpy.mapping.ListLayers(lyr) # lock and then write outputs to gdb try: dbLock.acquire() arcpy.CopyFeatures_management( solved_layers[4], Input_Dataset + "\\Routes_" + lyr.name + "_" + ExpName[0]) #Routes for msg in range(0, arcpy.GetMessageCount()): messages.append(arcpy.GetMessage(msg)) arcpy.CopyFeatures_management( solved_layers[5], Input_Dataset + "\\EdgeStat_" + lyr.name + "_" + ExpName[0]) #EdgeStat for msg in range(0, arcpy.GetMessageCount()): messages.append(arcpy.GetMessage(msg)) finally: dbLock.release() del solved_layers messages.append( "Combination {}: Solved {} with scenario {}{}". format(SolveCount, lyr.name, ExpName[0], os.linesep)) except BaseException as e: messages.append("Error: {}".format(e)) messages.append( "Combination {}: Errored {} with scenario {}{}" .format(SolveCount, lyr.name, ExpName[0], os.linesep)) # lock and then print messages try: msgLock.acquire() for msg in messages: arcpy.AddMessage(msg) finally: msgLock.release() except BaseException as e: arcpy.AddError(e) finally: del desc except BaseException as e: arcpy.AddError(e) finally: del lyrFile del messages arcpy.CheckInExtension("Network")
1) #User selected Kansas-wide shapefile. For use with scriptTool ##geoIndex = "AFFGEOID" geoIndex = arcpy.GetParameterAsText( 2 ) #User selected index field within selected shapefile. Default is AFFGEOID. For use with scriptTool filename = os.path.splitext(os.path.basename(geography)) directory = os.path.split(os.path.dirname(geography)) geoPoints = filename[0] + "Points_TEST.shp" outworkspace = "C:\\Users\\ismae\\OneDrive - Kansas State University\\finalProjectRodriguez\\scratch\\" arcpy.FeatureToPoint_management( geography, geoPoints, "CENTROID" ) #Generates a point shapefile with points centroids of user selected shapefile messageA = arcpy.GetMessageCount() arcpy.AddMessage("FeatureToPoint Tool: " + arcpy.GetMessage(messageA - 1)) print "FeatureToPoint Tool: " + arcpy.GetMessage(messageA - 1) rasterFolder = arcpy.ListFiles( "*.tif") #Creates a folder of raster tif files # This loop builds a list of raster files and associated index labels. targetSet = ['indexRaster.tif','index'] targetSet = [] for rasterFile in rasterFolder: rasterName = os.path.splitext(os.path.basename(rasterFile)) voteName = rasterName[0] geographyRaster = arcpy.sa.ZonalStatistics( geography, geoIndex, rasterFile, "SUM", "DATA") #estimates total votes for specified geography geographyRaster.save(
def main(config_file, *args): """ Import the incidents to a feature class, filtering out duplicates if necessary, assign geometry using addresses or XY values, and publish the results usign AGOL or ArcGIS for Server. Output is an updated feature class, processign reports, and optionally a service """ # Current date and time for file names fileNow = dt.strftime(dt.now(), prefix) if isfile(config_file): cfg = ConfigParser.ConfigParser() cfg.read(config_file) else: raise Exception(e1.format("Configuration file", config_file, "")) # Get general configuration values incidents = cfg.get('GENERAL', 'spreadsheet') inc_features = cfg.get('GENERAL', 'incident_features') id_field = cfg.get('GENERAL', 'incident_id') report_date_field = cfg.get('GENERAL', 'report_date_field') reports = cfg.get('GENERAL', 'reports') loc_type = cfg.get('GENERAL', 'loc_type') summary_field = cfg.get('GENERAL', 'summary_field') transform_method = cfg.get('GENERAL', 'transform_method') pub_status = cfg.get('GENERAL', 'pub_status') delete_duplicates = cfg.get('GENERAL', 'delete_duplicates') if delete_duplicates in ('true', 'True', True): delete_duplicates = True if report_date_field == "": raise Exception(e16) if delete_duplicates in ('false', 'False'): delete_duplicates = False # Log file if exists(reports): rptLog = join(reports, "{0}_{1}.log".format(fileNow, log_name)) else: raise Exception(e1.format("Report location", reports, w5)) # Scratch workspace tempgdb = arcpy.env.scratchGDB with open(rptLog, "w") as log: try: # Log file header log.write(l1.format(fileNow)) log.write(l2.format(getpass.getuser())) log.write(l3.format(incidents)) log.write(l4.format(inc_features)) if loc_type == "ADDRESSES": log.write(l5.format(cfg.get('ADDRESSES', 'locator'))) # Validate output feature class geometry type desc = arcpy.Describe(inc_features) if not desc.shapeType == "Point": raise Exception(e6.format(inc_features)) # Identify field names in both fc and csv if arcpy.Exists(incidents): csvfieldnames = [f.name for f in arcpy.ListFields(incidents)] else: raise Exception(e1.format("Spreadsheet", incidents, "")) if arcpy.Exists(inc_features): incfieldnames = [ f.name for f in arcpy.ListFields(inc_features) ] else: raise Exception(e1.format("Feature Class", inc_features, "")) matchfieldnames = [] for name in csvfieldnames: if name in incfieldnames: matchfieldnames.append(name) # If data is to be geocoded if loc_type == "ADDRESSES": # Get geocoding parameters address_field = cfg.get('ADDRESSES', 'address_field') city_field = cfg.get('ADDRESSES', 'city_field') state_field = cfg.get('ADDRESSES', 'state_field') zip_field = cfg.get('ADDRESSES', 'zip_field') locator = cfg.get('ADDRESSES', 'locator') # Geocoding field names reqFields = [address_field, id_field] #, report_date_field] opFields = [ city_field, state_field, zip_field, summary_field, report_date_field ] if locator == "": raise Exception(e13) # Test geolocator fields loc_address_fields = [ loc_address_field, loc_city_field, loc_zip_field, loc_state_field ] for a in loc_address_fields: if not a == "": if not a in all_locator_fields: raise Exception(e14) # If data has coordinate values else: # Get coordinate parameters lg_field = cfg.get('COORDINATES', 'long_field') lt_field = cfg.get('COORDINATES', 'lat_field') coord_system = cfg.get('COORDINATES', 'coord_system') remove_zeros = cfg.get('COORDINATES', 'ignore_zeros') if remove_zeros in ('true', 'True'): remove_zeros = True if remove_zeros in ('false', 'False'): remove_zeros = False # Coordinate field names reqFields = [id_field, lg_field, lt_field] #, report_date_field] opFields = [summary_field, report_date_field] # Validate required field names field_test(incidents, reqFields, csvfieldnames, True) field_test(inc_features, reqFields, incfieldnames, True) # Validate optional field names field_test(incidents, opFields, csvfieldnames) field_test(inc_features, opFields, incfieldnames) # Validate basic publishing parameters if not pub_status == "": # Get general publishing parameters mxd = cfg.get('PUBLISHING', 'mxd') username = cfg.get('PUBLISHING', 'user_name') password = cfg.get('PUBLISHING', 'password') # Test for required inputs if not arcpy.Exists(mxd): raise Exception(e1.format("Map document", mxd, "")) if splitext(mxd)[1] != ".mxd": raise Exception(e3) # Test for required inputs if username == "" or password == "": if pub_status == "ARCGIS_ONLINE": raise Exception(e8) # Get address fields for geocoding if loc_type == "ADDRESSES": addresses = "" loc_fields = [] adr_string = "{0} {1} VISIBLE NONE;" for loc_field in all_locator_fields: if loc_field == loc_address_field: addresses += adr_string.format(loc_field, address_field) loc_fields.append(address_field) elif loc_field == loc_city_field and city_field != "": addresses += adr_string.format(loc_field, city_field) loc_fields.append(city_field) elif loc_field == loc_state_field and state_field != "": addresses += adr_string.format(loc_field, state_field) loc_fields.append(state_field) elif loc_field == loc_zip_field and zip_field != "": addresses += adr_string.format(loc_field, zip_field) loc_fields.append(zip_field) else: addresses += adr_string.format(loc_field, "<None>") # Get coordinate fields else: loc_fields = [lg_field, lt_field] total_records = len(field_vals(incidents, id_field)) messages(m17.format(total_records, incidents), log) if not summary_field == "": SumVals = field_vals(incidents, summary_field) listSumVals = [val for val in SumVals if val != None] if not len(SumVals) == len(listSumVals): print m19.format(len(SumVals) - len(listSumVals)) log.write(m19.format(len(SumVals) - len(listSumVals))) listSumVals.sort() log.write(l10.format(summary_field)) dateCount = 1 i = 0 n = len(listSumVals) while i < n: try: if listSumVals[i] == listSumVals[i + 1]: dateCount += 1 else: log.write(l11.format(listSumVals[i], dateCount)) dateCount = 1 except: log.write(l11.format(listSumVals[i], dateCount)) i += 1 log.write("\n") # Remove duplicate incidents if delete_duplicates: timeNow = dt.strftime(dt.now(), time_format) messages(m13.format(timeNow), log) incidents, req_nulls, countUpdate, countDelete = remove_dups( tempgdb, incidents, inc_features, matchfieldnames, id_field, report_date_field, loc_fields) if not req_nulls == "": req_nulls = "{}\n".format(req_nulls) messages(w3.format(req_nulls), log, 1) if not countUpdate == 0: messages(m14.format(countUpdate, inc_features), log) if countDelete > 0: messages(m15.format(countDelete, inc_features), log) # Create features tempFC = join(tempgdb, "tempDataLE") # Create point features from spreadsheet timeNow = dt.strftime(dt.now(), time_format) messages(m1.format(timeNow), log) if loc_type == "ADDRESSES": timeNow = dt.strftime(dt.now(), time_format) messages(m3.format(timeNow), log) # Geocode the incidents arcpy.GeocodeAddresses_geocoding(incidents, locator, addresses, tempFC, "STATIC") # Initiate geocoding report counts countMatch = 0 countTrueMatch = 0 countUnmatch = 0 # Create geocoding reports rptUnmatch = join(reports, "{0}_{1}.csv".format(fileNow, unmatch_name)) fieldnames = [f.name for f in arcpy.ListFields(tempFC)] # Sort incidents based on match status statusIndex = fieldnames.index(status) locIndex = fieldnames.index(addr_type) # Write incidents that were not well geocoded to file and # delete from temp directory with open(rptUnmatch, "wb") as umatchFile: unmatchwriter = csv.writer(umatchFile) unmatchwriter.writerow(fieldnames) # Delete incidents that were not Matched countUnmatch = sort_records(tempFC, unmatchwriter, statusIndex, match_value, False, True) if not countUnmatch == 0: messages(w6.format(countUnmatch, rptUnmatch), log, 1) # Incidents that were not matched to an acceptable accuracy countMatch = sort_records(tempFC, unmatchwriter, locIndex, addrOK, False, True) if not countMatch == 0: messages(w7.format(countMatch, addrOK, rptUnmatch), log, 1) countTrueMatch = len(field_vals(tempFC, "OBJECTID")) messages(m16.format(countTrueMatch, inc_features), log) else: # Create temporary output storage tempFL = arcpy.MakeXYEventLayer_management( incidents, lg_field, lt_field, "tempLayerLE", coord_system) # Convert the feature layer to a feature class to prevent # field name changes arcpy.CopyFeatures_management(tempFL, tempFC) arcpy.Delete_management(tempFL) timeNow = dt.strftime(dt.now(), time_format) messages(m4.format(timeNow, inc_features), log) # Fields that will be copied from geocode results to final fc copyfieldnames = [] copyfieldnames.extend(matchfieldnames) copyfieldnames.append("SHAPE@XY") # Fields for error reporting errorfieldnames = [] errorfieldnames.extend(matchfieldnames) errorfieldnames.insert(0, errorfield) errorfieldnames += [long_field, lat_field] # Reproject the features sr_input = arcpy.Describe(tempFC).spatialReference sr_output = arcpy.Describe(inc_features).spatialReference if sr_input != sr_output: proj_out = "{}_proj".format(tempFC) arcpy.Project_management(tempFC, proj_out, sr_output, transform_method) tempFC = proj_out # Append geocode results to fc rptNoAppend = join(reports, "{0}_{1}.csv".format(fileNow, noappend_name)) with arcpy.da.SearchCursor(tempFC, copyfieldnames) as csvrows: with arcpy.da.InsertCursor(inc_features, copyfieldnames) as incrows: # Open csv for un-appended records with open(rptNoAppend, "wb") as appendFile: appendwriter = csv.writer(appendFile) appendwriter.writerow(errorfieldnames) # Index of field with incident ID record = errorfieldnames.index(id_field) # Initiate count of successfully appended records countAppend = 0 # List of ids of records not successfully appended errorRecords = [] for csvrow in csvrows: try: if loc_type == "COORDINATES": if remove_zeros: lt_index = copyfieldnames.index( lt_field) lg_index = copyfieldnames.index( lg_field) ltVal = csvrow[lt_index] lgVal = csvrow[lg_index] if ltVal == 0 and lgVal == 0: raise Exception( "invalid_coordinates") # If the row can be appended incrows.insertRow(csvrow) countAppend += 1 except Exception as reason: # e.g. 'The value type is incompatible with the # field type. [INCIDENTDAT]' # Alternatively, the exception # 'invalid_coordinates' raised by the # remove_zeros test above # Get the name of the problem field badfield = reason[0].split(" ")[-1] badfield = badfield.strip(" []") # Append field name to start of record csvrow = list(csvrow) csvrow.insert(0, badfield) # Split the coordinate tuple into X and Y lng, lat = list(csvrow[-1]) csvrow[-1] = lng csvrow.append(lat) csvrow = tuple(csvrow) # Write the record out to csv appendwriter.writerow(csvrow) # Add id and field to issue list errorRecords.append( w4.format(csvrow[record], badfield)) # If issues were reported, print them if len(errorRecords) != 0: messages( w1.format(len(errorRecords), inc_features, rptNoAppend), log, 1) messages(m18.format(countAppend, inc_features), log) del incrows, csvrows # Convert times to UTC if publishing to AGOL if pub_status == "ARCGIS_ONLINE": # Get date fields date_fields = [ f.name for f in arcpy.ListFields(inc_features) if f.type == "Date" and f.name in matchfieldnames ] # Convert from system timezone to UTC convert_to_utc(inc_features, date_fields) # Publish incidents if not pub_status == "": timeNow = dt.strftime(dt.now(), time_format) messages(m5.format(timeNow), log) errors = serviceutils.publish_service(cfg, pub_status, mxd, username, password) # Print analysis errors if errors: raise Exception(e4.format(errors)) # Convert times from UTC to system timezone if pub_status == "ARCGIS_ONLINE": convert_from_utc(inc_features, date_fields) timeNow = dt.strftime(dt.now(), time_format) messages(m8.format(timeNow), log) except arcpy.ExecuteError: print("{}\n{}\n".format(gp_error, arcpy.GetMessages(2))) timeNow = dt.strftime(dt.now(), "{} {}".format(date_format, time_format)) arcpy.AddError("{} {}:\n".format(timeNow, gp_error)) arcpy.AddError("{}\n".format(arcpy.GetMessages(2))) log.write("{} ({}):\n".format(gp_error, timeNow)) log.write("{}\n".format(arcpy.GetMessages(2))) for msg in range(0, arcpy.GetMessageCount()): if arcpy.GetSeverity(msg) == 2: code = arcpy.GetReturnCode(msg) print("Code: {}".format(code)) print("Message: {}".format(arcpy.GetMessage(msg))) except Exception as ex: print("{}: {}\n".format(py_error, ex)) timeNow = dt.strftime(dt.now(), "{}".format(time_format)) arcpy.AddError("{} {}:\n".format(timeNow, py_error)) arcpy.AddError("{}\n".format(ex)) log.write("{} {}:\n".format(timeNow, py_error)) log.write("{}\n".format(ex)) finally: # Clean up try: arcpy.Delete_management(tempgdb) except: pass
def executer(self, env, featureLayerDecoupage, attributDecoupage, featureClassValider, requeteClassValider, relationSpatiale, typeSelection, featureClassRelation, requeteClassRelation, repLayerErreur, featureClassErreur): #------------------------------------------------------------------------------------- """ Permet d'exécuter le traitement pour valider les données entre deux classes selon une requête spatiale pour tous les éléments de découpage sélectionnés. Paramètres: ----------- env : Environnement de travail featureLayerDecoupage : Nom du FeatureLayer contenant les éléments de découpage à valider. attributDecoupage : Nom de l'attribut du FeatureLayer contenant l'identifiant de découpage à valider. featureClassValider : Nom de la FeatureClass à valider. requeteClassValider : Requête attributive utilisée de la classe à valider. relationSpatiale : Relation spatiale pour effectuer la validation. typeSelection : Type de sélection appliqué sur le résultat obtenu de la relation spatiale. featureClassRelation : Nom de la FeatureClass en relation. requeteClassRelation : Requête attributive de la classe en relation. repLayerErreur : Nom du répertoire contenant les FeatureLayer des éléments en erreurs. featureClassErreur : Nom de la FeatureClass contenant les géométries des éléments en erreurs. Retour: ------- Aucun """ #Initialisation du nombre total d'erreurs nbErrTotal = 0 #Vérifier si on doit écrire les erreurs dans une FeatureClass d'erreurs if len(featureClassErreur) > 0: #Message de vérification de la FeatureClass d'erreurs arcpy.AddMessage("- Vérifier la FeatureClass d'erreurs") #Extraire la desciption de la FeatureClass à valider desc = arcpy.Describe(featureClassValider) #Vérifier si la FeatureClass est présente if arcpy.Exists(featureClassErreur): #Message de vérification de la FeatureClass d'erreur arcpy.AddWarning("FeatureClass déjà présente : " + featureClassErreur) #Extraire la desciption de la FeaturClass d'erreurs descClsErr = arcpy.Describe(featureClassErreur) #Vérifier si le type de géométrie correspond if desc.shapeType <> descClsErr.shapeType: #Retourner une exception raise Exception( "Le type de géométrie entre la FeatureClass à valider et celle d'erreurs ne correspond pas : "******"<>" + descClsErr.shapeType) #Si elle est absente else: #Définir le nom de la classe baseName = os.path.basename(featureClassErreur) #Créer la FeatureClass d'erreurs arcpy.AddMessage("CreateFeatureclass_management " + featureClassErreur.replace(baseName, "") + " " + baseName + " " + desc.shapeType + " " + desc.spatialReference.name) arcpy.CreateFeatureclass_management( featureClassErreur.replace(baseName, ""), baseName, desc.shapeType, spatial_reference=desc.spatialReference) arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1)) #Créer le curseur pour ajouter les éléments dans la FeatureClass d'erreurs cursor = arcpy.da.InsertCursor(featureClassErreur, ["SHAPE@"]) #Forcer la destruction des fichiers de sortie arcpy.env.overwriteOutput = True #Afficher le message pour traiter tous les éléments sélectionnés dans le FeatureLayer de découpage arcpy.AddMessage( "- Traiter tous les éléments sélectionnés du FeatureLayer de découpage : " + featureLayerDecoupage) #Créer le curseur des éléments de découpage cursorDecoupage = arcpy.SearchCursor(featureLayerDecoupage) #Extraire le premier élément feature = cursorDecoupage.next() #Traiter tant qu'il y aura des éléments de découpage while feature: #Définir le découpage traité decoupage = str(feature.getValue(attributDecoupage)) #Message de validation du découpage arcpy.AddMessage(" ") arcpy.AddMessage("- Validation des données : " + attributDecoupage + "=" + decoupage) # Process: Make Feature Layer lyrErrName = decoupage + "_Erreur" arcpy.AddMessage( "MakeFeatureLayer_management " + featureClassValider + " " + lyrErrName + " " + requeteClassValider.replace("<DECOUPAGE>", decoupage)) arcpy.MakeFeatureLayer_management( featureClassValider, lyrErrName, requeteClassValider.replace("<DECOUPAGE>", decoupage)) arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1)) # Process: Select Layer By Attribute arcpy.AddMessage("SelectLayerByAttribute_management " + lyrErrName + " NEW_SELECTION") arcpy.SelectLayerByAttribute_management(lyrErrName, "NEW_SELECTION") arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1)) # Process: Make Feature Layer (2) desc = arcpy.Describe(featureClassRelation) lyrName = desc.baseName.split(".")[-1] arcpy.AddMessage( "MakeFeatureLayer_management " + featureClassRelation + " " + lyrName + " " + requeteClassRelation.replace("<DECOUPAGE>", decoupage)) arcpy.MakeFeatureLayer_management( featureClassRelation, lyrName, requeteClassRelation.replace("<DECOUPAGE>", decoupage)) arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1)) # Process: Select Layer By Location arcpy.AddMessage("SelectLayerByLocation_management " + lyrErrName + " " + relationSpatiale + " " + lyrName + " " + typeSelection) lyrErr = arcpy.SelectLayerByLocation_management( lyrErrName, relationSpatiale, lyrName, "", typeSelection) arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1)) #Initialiser le nombre d'erreurs nbErr = 0 #Extraire la desciption du FeatureLayer d'erreurs descLyrErr = arcpy.Describe(lyrErr) #Vérifier la présence d'erreurs if len(descLyrErr.fidSet) > 0: #Définir le nombre d'erreurs nbErr = len(descLyrErr.fidSet.split(";")) nbErrTotal = nbErrTotal + nbErr #Mettre le featureLayer non-visible lyrErr.visible = False #Définir le nom du featureLayer à écrire sur disque featureLayerErreurSnrc = repLayerErreur + "\\" + lyrErrName #Vérifier si le FeatureLayer est déjà présent if os.path.exists(featureLayerErreurSnrc): #Détruire le FeatureLayer os.remove(featureLayerErreurSnrc) # Process: Save To Layer File arcpy.AddMessage("SaveToLayerFile_management " + lyrErrName + " " + featureLayerErreurSnrc) arcpy.SaveToLayerFile_management(lyrErrName, featureLayerErreurSnrc) arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1)) #Vérifier si on doit écrire les erreurs dans une FeatureClass d'erreurs if len(featureClassErreur) > 0: #Écriture des erreus dans la FeatureClass arcpy.AddMessage("Écriture des erreurs dans : " + featureClassErreur) #Traiter tous les éléments du FeatureLayer d'erreurs for row in arcpy.SearchCursor(lyrErr): #Extraire le OBJECTID #arcpy.AddMessage(str(row.getValue("OBJECTID"))) #Extraire la géométrie geometrie = row.getValue("SHAPE") #Insérer l'élément dans la FeatureClass cursor.insertRow([geometrie]) #Afficher le nombre d'erreurs arcpy.AddMessage("Nombre d'erreurs : " + str(nbErr)) #Extraire le prochain élément feature = cursorDecoupage.next() #Vérifier si on doit écrire les erreurs dans une FeatureClass d'erreurs if len(featureClassErreur) > 0: #Accepter les modifications del cursor #Afficher le nombre total d'erreurs arcpy.AddMessage(" ") arcpy.AddMessage("Nombre total d'erreurs : " + str(nbErrTotal)) arcpy.AddMessage(" ") #Sortir return
print ap.GetMessages() # gets a complex result from GetCount roadresult = ap.GetCount_management("roads.shp") print roadresult # uses getOutput to pull the actual count as a string # int function converts to an integer roadcount = (roadresult.getOutput(0)) print roadcount # prints messages from last tool execution print ap.GetMessages() # prints the last message only print ap.GetMessage(ap.GetMessageCount()-1) # prints the maximum serverity among messages print ap.GetMaxSeverity() # uses Exists function to test if file exists ap.Exists("utm83.prj") # creates a complex arcpy spatial reference object utmfile = "utm83.prj" sref = ap.SpatialReference(utmfile) # try to print object; can't access except for it's methods print sref # list an object's properties and methods dir(sref) # show a property's type type(sref.longitude)
def executer(self, env, geodatabase, tableContraintes, classeDecoupage, attributDecoupage, repTravail, dateFin, typeTravail, courriel): #------------------------------------------------------------------------------------- """ Exécuter le traitement pour valider l'intégrité des données spatiales livrées dans la BDG selon une table de contraintes spatiales. Paramètres: ----------- env : Type d'environnement. geodatabase : Nom de la géodatabase contenant les tables spatiales. tableContraintes : Nom de la table contenant les contraintes spatiales. classeDecoupage : Nom de la classe contenant les polygones et les identifiants de découpage. attributDecoupage : Nom de l'attribut de la classe de découpage contenant les identifiants de découpage. repTravail : Nom du répertoire de travail. dateFin : Date de fin du traitement de livraison des données BDG (ex:2015-12-15 16:21:54). typeTravail : Liste des types de travaux présents dans la table SER_RECONCILE_LOG et dont les identifiants ont été livrés après la date de fin spécifiée. courriel : Adresse courriel utilisée pour envoyer le rapport d'exécution. Variables: ---------- self.CompteSib : Objet utilitaire pour la gestion des connexion à SIB. oSib : Objet utilitaire pour traiter des services SIB. """ #Instanciation de la classe Sib et connexion à la BD Sib arcpy.AddMessage("- Connexion à la BD SIB") oSib = self.CompteSib.OuvrirConnexionSib(env, env) #Créer la requête SQL. arcpy.AddMessage(" ") arcpy.AddMessage( "- Extraction des travaux effectués sur le nombre d'identifiants livrés" ) sSql = ("SELECT TY_TRAV, COUNT(*)" " FROM SER_RECONCILE_LOG@BDG_DBA" " WHERE STATUT=9 AND DATE_FIN>TO_DATE('" + dateFin + "','yyyy-mm-dd HH24:MI:SS')") #Vérifier si on spécifier les types de travail dans la requête if len(typeTravail) > 0: #Initialiser la liste des travaux listeTrav = "" #Extraire les types de travaux for trav in typeTravail.split(";"): #Ajouter le travail à la liste listeTrav = listeTrav + trav.split(":")[0] + "," #Afficher le travail et le nombre d'identifiants arcpy.AddMessage(str(trav)) #Ajouter la liste des travaux à la requête SQL sSql = sSql + " AND TY_TRAV IN ('" + listeTrav[:-1].replace( ",", "','") + "')" #Ajouter le regroupement et le tri dans la requête sql sSql = sSql + " GROUP BY TY_TRAV ORDER BY TY_TRAV" #Exécuter la requête SQL arcpy.AddMessage(sSql) resultatTrav = oSib.requeteSib(sSql) #Traiter tous les travaux for trav in resultatTrav: #Créer la requête SQL. arcpy.AddMessage(" ") arcpy.AddMessage(str(trav)) #---------------------------------------------------------- #Créer la requête SQL pour extraire les identifiants livrés sSql = ("SELECT IDENTIFIANT" " FROM SER_RECONCILE_LOG@BDG_DBA" " WHERE STATUT=9 AND DATE_FIN>TO_DATE('" + dateFin + "','yyyy-mm-dd HH24:MI:SS')" " AND TY_TRAV='" + trav[0] + "'" " ORDER BY IDENTIFIANT") #Exécuter la requête SQL #arcpy.AddMessage(sSql) resultatId = oSib.requeteSib(sSql) #Vérifier si aucun identifiant if len(resultatId) == 0: #Créer la liste des identifiants à traiter raise Exception("ERREUR : Aucun identifiant à traiter") #Vérifier si le nombre est supérieur à 999 if len(resultatId) > 999: #Créer la liste des identifiants à traiter raise Exception( "Nombre d'identifiant supérieur à la limite permise : (" + str(len(resultatId)) + ">999)") #Initialisation de la liste de sélection des identifiants listeId = "" #Traiter tous les identifiants for id in resultatId: #Ajouter l'identifiant à la liste listeId = listeId + id[0] + "," #Définir la requete des identifiants requeteId = attributDecoupage + " IN ('" + listeId[:-1].replace( ",", "','") + "')" #Créer le Layer de découpage arcpy.AddMessage("- Création du Layer de découpage ...") dateHeure = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") clsDecoupage = geodatabase + "\\" + classeDecoupage lyrDecoupage = "Decoupage_" + trav[0] + "_" + dateHeure + ".lyr" #Process: Make Feature Layer arcpy.AddMessage('MakeFeatureLayer_management "' + clsDecoupage + '" ' + geodatabase + ' "' + requeteId + '"') arcpy.MakeFeatureLayer_management(clsDecoupage, lyrDecoupage, requeteId) arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1)) #Process: Select Layer By Attribute arcpy.AddMessage("SelectLayerByAttribute_management " + lyrDecoupage + " NEW_SELECTION") arcpy.SelectLayerByAttribute_management(lyrDecoupage, "NEW_SELECTION") arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1)) #Process: Save To Layer File arcpy.AddMessage("SaveToLayerFile_management " + lyrDecoupage + " " + repTravail + "\\" + lyrDecoupage) arcpy.SaveToLayerFile_management(lyrDecoupage, repTravail + "\\" + lyrDecoupage) arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1)) #---------------------------------------------------------- #Créer la requête SQL pour extraire les identifiants livrés sSql = ( " SELECT DISTINCT B.FEAT_TYPE_NAME_DATABASE" " FROM FEAT_CATALOGUE@BDG_VIEW A, FEAT_TYPE@BDG_VIEW B" " WHERE A.FEAT_CATAL_TYPE=1" " AND A.FEAT_CATAL_ID=B.FEAT_CATAL_FK" " AND B.FEAT_TYPE_CODE_BD IN " " (" " SELECT DISTINCT LE.CD_ELEM_TOPO" " FROM F502_PS PS, F502_LE LE, F503_TR TR, F601_LO LO" " WHERE TR.TY_TRAV='" + trav[0] + "'" " AND PS.NO_MAP=TR.NO_MAP AND PS.NO_MAP=LE.NO_MAP AND TR.NO_LOT=LO.NO_LOT" " )" " ORDER BY B.FEAT_TYPE_NAME_DATABASE") #Exécuter la requête SQL resultatClasse = oSib.requeteSib(sSql) #Vérifier si aucune classe if len(resultatClasse) == 0: #Créer la liste des identifiants à traiter arcpy.AddMessage(sSql) raise Exception("ERREUR : Aucune classe à traiter") #Initialisation de la liste des classes à traiter listeClasse = "" #Traiter toutes les classes for cls in resultatClasse: #Ajouter la classe à la liste listeClasse = listeClasse + cls[0] + "," #Définir la requete des identifiants listeClasse = listeClasse[:-1] #---------------------------------------------------------- #Afficher le message d'exécution du traitement de validation pour le travail arcpy.AddMessage( "- Exécution du traitement de validation spatiale ...") #Définir le nom du rapport d'exécution nomRapport = repTravail + "\\Rapport_" + trav[ 0] + "_" + dateHeure + ".txt" #Définir la commande de validation cmd = ( "D:\\cits\\EnvCits\\applications\\gestion_bdg\\pro\\Geotraitement\\exe\\ValiderIntegriteSpatiale.exe" ' "' + geodatabase + '"' " " + tableContraintes + "" " #" " " + listeClasse + "" " " + nomRapport + "" " " + repTravail + "\\%" + attributDecoupage + "%_" + trav[0] + "_" + dateHeure + ".mdb" " " + courriel + "" " " + repTravail + "\\" + lyrDecoupage + "" " " + attributDecoupage + "") #Afficher la commande arcpy.AddMessage(cmd) #Exécuter la commande err = os.system(cmd + " > " + nomRapport.replace(".txt", ".log")) # Fermeture de la connexion de la BD SIB arcpy.AddMessage(" ") arcpy.AddMessage("- Fermeture de la connexion SIB") oSib.fermerConnexionSib() # Sortir du traitement return
def RotateFeatureClass(inputFC, outputFC, angle=0, pivot_point=None): """Rotate Feature Class inputFC Input features outputFC Output feature class angle Angle to rotate, in degrees pivot_point X,Y coordinates (as space-separated string) Default is lower-left of inputFC As the output feature class no longer has a "real" xy locations, after rotation, it no coordinate system defined. """ def RotateXY(x, y, xc=0, yc=0, angle=0, units="DEGREES"): """Rotate an xy cooordinate about a specified origin x,y xy coordinates xc,yc center of rotation angle angle units "DEGREES" (default) or "RADIANS" """ import math x = x - xc y = y - yc # make angle clockwise (like Rotate_management) angle = angle * -1 if units == "DEGREES": angle = math.radians(angle) xr = (x * math.cos(angle)) - (y * math.sin(angle)) + xc yr = (x * math.sin(angle)) + (y * math.cos(angle)) + yc return xr, yr # temp names for cleanup env_file = None lyrFC, lyrTmp, lyrOut = [None] * 3 # layers tmpFC = None # temp dataset Row, Rows, oRow, oRows = [None] * 4 # cursors try: # process parameters try: xcen, ycen = [float(xy) for xy in pivot_point.split()] pivot_point = xcen, ycen except: # if pivot point was not specified, get it from # the lower-left corner of the feature class ext = arcpy.Describe(inputFC).extent xcen, ycen = ext.XMin, ext.YMin pivot_point = xcen, ycen angle = float(angle) # set up environment env_file = arcpy.CreateScratchName("xxenv",".xml","file", os.environ["TEMP"]) arcpy.SaveSettings(env_file) # Disable any GP environment clips or project on the fly arcpy.ClearEnvironment("extent") arcpy.ClearEnvironment("outputCoordinateSystem") WKS = env.workspace if not WKS: if os.path.dirname(outputFC): WKS = os.path.dirname(outputFC) else: WKS = os.path.dirname( arcpy.Describe(inputFC).catalogPath) env.workspace = env.scratchWorkspace = WKS # Disable GP environment clips or project on the fly arcpy.ClearEnvironment("extent") arcpy.ClearEnvironment("outputCoordinateSystem") # get feature class properties lyrFC = "lyrFC" arcpy.MakeFeatureLayer_management(inputFC, lyrFC) dFC = arcpy.Describe(lyrFC) shpField = dFC.shapeFieldName shpType = dFC.shapeType FID = dFC.OIDFieldName # create temp feature class tmpFC = arcpy.CreateScratchName("xxfc","","featureclass") arcpy.CreateFeatureclass_management(os.path.dirname(tmpFC), os.path.basename(tmpFC), shpType) lyrTmp = "lyrTmp" arcpy.MakeFeatureLayer_management(tmpFC, lyrTmp) # set up id field (used to join later) TFID = "XXXX_FID" arcpy.AddField_management(lyrTmp, TFID, "LONG") arcpy.DeleteField_management(lyrTmp, "ID") # rotate the feature class coordinates # only points, polylines, and polygons are supported # open read and write cursors Rows = arcpy.SearchCursor(lyrFC, "", "", "%s;%s" % (shpField,FID)) oRows = arcpy.InsertCursor(lyrTmp) if shpType == "Point": for Row in Rows: shp = Row.getValue(shpField) pnt = shp.getPart() pnt.X, pnt.Y = RotateXY(pnt.X,pnt.Y,xcen,ycen,angle) oRow = oRows.newRow() oRow.setValue(shpField, pnt) oRow.setValue(TFID,Row.getValue(FID)) oRows.insertRow(oRow) elif shpType in ["Polyline","Polygon"]: parts = arcpy.Array() rings = arcpy.Array() ring = arcpy.Array() for Row in Rows: shp = Row.getValue(shpField) p = 0 for part in shp: for pnt in part: if pnt: x, y = RotateXY(pnt.X, pnt.Y, xcen, ycen, angle) ring.add(arcpy.Point(x, y, pnt.ID)) else: # if we have a ring, save it if len(ring) > 0: rings.add(ring) ring.removeAll() # we have our last ring, add it rings.add(ring) ring.removeAll() # if only one, remove nesting if len(rings) == 1: rings = rings.getObject(0) parts.add(rings) rings.removeAll() p += 1 # if only one, remove nesting if len(parts) == 1: parts = parts.getObject(0) if dFC.shapeType == "Polyline": shp = arcpy.Polyline(parts) else: shp = arcpy.Polygon(parts) parts.removeAll() oRow = oRows.newRow() oRow.setValue(shpField, shp) oRow.setValue(TFID,Row.getValue(FID)) oRows.insertRow(oRow) else: #raise Exception, "Shape type {0} is not supported".format(shpType) #UPDATE raise Exception("Shape type {0} is not supported".format(shpType)) del oRow, oRows # close write cursor (ensure buffer written) oRow, oRows = None, None # restore variables for cleanup # join attributes, and copy to output arcpy.AddJoin_management(lyrTmp, TFID, lyrFC, FID) env.qualifiedFieldNames = False arcpy.Merge_management(lyrTmp, outputFC) lyrOut = "lyrOut" arcpy.MakeFeatureLayer_management(outputFC, lyrOut) # drop temp fields 2,3 (TFID, FID) fnames = [f.name for f in arcpy.ListFields(lyrOut)] dropList = ";".join(fnames[2:4]) arcpy.DeleteField_management(lyrOut, dropList) #except MsgError, xmsg: #UPDATE except MsgError as xmsg: arcpy.AddError(str(xmsg)) except arcpy.ExecuteError: tbinfo = traceback.format_tb(sys.exc_info()[2])[0] arcpy.AddError(tbinfo.strip()) arcpy.AddError(arcpy.GetMessages()) numMsg = arcpy.GetMessageCount() for i in range(0, numMsg): arcpy.AddReturnMessage(i) #except Exception, xmsg: #UPDATE except Exception as xmsg: tbinfo = traceback.format_tb(sys.exc_info()[2])[0] arcpy.AddError(tbinfo + str(xmsg)) finally: # reset environment if env_file: arcpy.LoadSettings(env_file) # Clean up temp files for f in [lyrFC, lyrTmp, lyrOut, tmpFC, env_file]: try: if f: arcpy.Delete_management(f) except: pass # delete cursors try: for c in [Row, Rows, oRow, oRows]: del c except: pass # return pivot point try: pivot_point = "{0} {1}".format(*pivot_point) except: pivot_point = None return pivot_point
# define the enterprise geodatabase workspace env.workspace = r'C:\Data\OSM\Mxds\NewOSMDEV.sde' # get the feature set (first parameter) to extract the AOI envelope aoi_featureset = arcpy.GetParameter(0) inputName = arcpy.GetParameterAsText(1) validatedTableName = arcpy.ValidateTableName(inputName, env.workspace) nameOfTargetDataset = arcpy.os.path.join(env.workspace, validatedTableName) nameOfPointFeatureClass = arcpy.os.path.join(env.workspace, validatedTableName, validatedTableName + r'_osm_pt') nameOfLineFeatureClass = arcpy.os.path.join(env.workspace, validatedTableName, validatedTableName + r'_osm_ln') nameOfPolygonFeatureClass = arcpy.os.path.join( env.workspace, validatedTableName, validatedTableName + r'_osm_ply') # request the data from the OSM server and store it in the target feature dataset arcpy.OSMGPDownload_osmtools(r'http://www.openstreetmap.org', aoi_featureset, 'DO_NOT_INCLUDE_REFERENCES', nameOfTargetDataset, nameOfPointFeatureClass, nameOfLineFeatureClass, nameOfPolygonFeatureClass) # Return the resulting messages as script tool output messages # x = 0 while x < arcpy.GetMessageCount(): arcpy.AddReturnMessage(x) x = x + 1