예제 #1
0
    def executer(self, nomClasse, classeSNRC, requete, repTravail):
        #-------------------------------------------------------------------------------------
        """
        Exécuter le traitement pour créer un Layer par Zone UTM contenant la classe de SNRC avec une requête correspondant à la sélection
        pour lesquels les éléments d'une classe sont présents dans le SNRC.
        
        Paramètres:
        -----------
        nomClasse           : Nom de la classe traité.
        classeSNRC          : Nom de la FeatureClass contenant les éléments du découpage SNRC.
        requete             : Requête attributive utilisé pour chaque Layer de zone UTM créée.
        repTravail          : Nom du répertoire de travail dans lequel les Layers par zone UTM seront créés.
               
        Variables:
        ----------
        """

        #Forcer la destruction des fichiers de sortie
        arcpy.env.overwriteOutput = True

        #Traiter toutes les zone UTM
        for zoneUTM in range(7, 23):
            #Afficher la zone UTM traitée
            arcpy.AddMessage(" ")
            arcpy.AddMessage("-Traitement de la zone UTM :" + str(zoneUTM))

            #Définir la requête par zone UTM
            requeteZoneUtm = requete.replace("[NOM_CLASSE]",
                                             nomClasse).replace(
                                                 "[ZONE_UTM]", str(zoneUTM))

            #Définir le nom du Layer des SNRC à traiter pour la zone UTM
            lyrDecoupage = "BDG_DBA.ges_Decoupage_SNRC50K_2" + "_" + str(
                zoneUTM) + ".lyr"

            #Process: Make Feature Layer
            arcpy.AddMessage('MakeFeatureLayer_management "' + classeSNRC +
                             '" ' + requeteZoneUtm + '"')
            arcpy.MakeFeatureLayer_management(classeSNRC, lyrDecoupage,
                                              requeteZoneUtm)
            arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1))

            #Process: Select Layer By Attribute
            arcpy.AddMessage("SelectLayerByAttribute_management " +
                             lyrDecoupage + " NEW_SELECTION")
            arcpy.SelectLayerByAttribute_management(lyrDecoupage,
                                                    "NEW_SELECTION")
            arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1))

            #Process: Save To Layer File
            arcpy.AddMessage("SaveToLayerFile_management " + lyrDecoupage +
                             " " + repTravail + "\\" + nomClasse + "\\" +
                             lyrDecoupage)
            arcpy.SaveToLayerFile_management(
                lyrDecoupage,
                repTravail + "\\" + nomClasse + "\\" + lyrDecoupage)
            arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1))

        # Sortir du traitement
        return
예제 #2
0
def createVtpkIndexAndPackage(in_map, service_type, tileScheme, vertex_count, indexPolygon, outVtpk):
    try:
        arcpy.management.CreateVectorTileIndex(in_map=in_map,
                                               out_featureclass=indexPolygon,
                                               service_type=service_type,
                                               tiling_scheme=tileScheme,
                                               vertex_count=vertex_count)
        arcpy.AddMessage("tile index - ready.")
        arcpy.management.CreateVectorTilePackage(in_map=in_map,
                                                 output_file=outVtpk,
                                                 service_type=service_type,
                                                 tiling_scheme=tileScheme,
                                                 tile_structure="INDEXED",
                                                 min_cached_scale="",
                                                 max_cached_scale="",
                                                 index_polygons=indexPolygon,
                                                 summary=None,
                                                 tags=None)
        if os.path.exists(outVtpk):
            arcpy.AddMessage("Pro standard tile package - ready!")
        return outVtpk
    except:
        arcpy.AddError("Failed to create vector tile package. Please see the following error for details.")
        for i in range(0, arcpy.GetMessageCount()):
            mesSev = arcpy.GetSeverity(i)
            if mesSev == 1:
                arcpy.AddWarning(arcpy.GetMessage(i))
            elif mesSev == 2:
                arcpy.AddError(arcpy.GetMessage(i))
예제 #3
0
def main():
    arcpy.env.workspace = 'C:\Users\owner\Downloads\Sample_scripts\ch06\shapefiles'
    arcpy.env.overwriteOutput = True
    features = arcpy.ListFeatureClasses()
    for feature in features:
        try:
            outfile = feature[:-4] + 'Bound.shp'
            arcpy.MinimumBoundingGeometry_management(feature, outfile)
            print('{} created'.format(outfile))
        except arcpy.ExecuteError:
            print(arcpy.GetMessage(2))
            print(arcpy.GetMessage(3))
예제 #4
0
    def check_geometry(self):
        Printboth('Checking geometry...')
        result = IUCNresult('-----CHECK GEOMETRY-----')

        try:
            arcpy.CheckGeometry_management(self.inputFL, self.outputLoc + os.sep + 'geometry_checks.dbf')
        except:
            Printboth(arcpy.GetMessage(2))
            result.log('\t'+arcpy.GetMessage(2))
        else:
            result.log('\tPlease check \'geometry_checks.dbf\' in the same folder for detailed information')
        finally:
            self.resultList.append(result)
예제 #5
0
    def merge_resources(self):
        """Iterates through the proxy database and merges shapes with the same PrimCo or PrimNo."""
        #  Make the following string formatted so that the layer appends from the
        #  FeatureLayer created by the select function.
        print "    Adding ICDB Data..."
        fields = [
            "sPCountyNum", "sPNumber", "sTriState", "sTriCounty", "sTriNumber",
            "sTriSuffix", "sResourceType", "sDistrictElement", "sCollection",
            "sPeriod", "sAgcyResourceID", "sResourceName", "dRecordedDate"
        ]
        self.connect_to_icdb()
        arcpy.env.workspace = self.target_db
        try:
            with arcpy.da.Editor(self.target_db) as edit:
                with arcpy.da.UpdateCursor(self.proxy, fields) as proxy_cursor:
                    for row in proxy_cursor:
                        # Main Data from tblResource
                        self.icdb.execute(
                            "SELECT TrinNo, TrinH, "
                            "ResTypeBuilding, ResTypeStructure, ResTypeObject, "
                            "ResTypeSite, ResTypeDistrict, ResTypeOther, ResTypeElement, "
                            "ResourceCollections, "
                            "AgePre, AgePro, AgeHist, AgeUnk "
                            "FROM tblResource "
                            "WHERE PrimCo = {} AND PrimNo = {}".format(
                                row[0], row[1].zfill(6)))
                        tblResource = self.icdb.fetchone()
                        # Identifiers and Names from tblResourcesIdent
                        self.icdb.execute(
                            "SELECT IdentifierType, Identifier "
                            "FROM tblResourceIdent "
                            "WHERE PrimCo = {} AND PrimNo = {} ".format(
                                row[0], row[1].zfill(6)))
                        tblResourceIdent = self.icdb.fetchall()
                        # Recording Dates from tblRescourceEvents
                        self.icdb.execute("SELECT FIRST(RecDate) "
                                          "FROM tblResourceEvents "
                                          "WHERE PrimCo = {} AND PrimNo = {} "
                                          "GROUP BY PrimCo, PrimNo".format(
                                              row[0], row[1].zfill(6)))
                        tblResourceEvent = self.icdb.fetchall()
                        try:
                            results = FormatResource(row[0], row[1].zfill(6),
                                                     tblResource,
                                                     tblResourceIdent,
                                                     tblResourceEvent)
                        except TypeError:
                            pass
                        position = 2
                        for field in fields[2:]:
                            row[position] = results.formatted_resource[field]
                            position += 1
                        try:
                            proxy_cursor.updateRow(row)
                        except RuntimeError, e:
                            print str(e)
                            print row

        except arcpy.ExecuteError:
            print arcpy.GetMessage(2)
def logPreviousToolMessages():
    i = 0
    msgCount = arcpy.GetMessageCount()
    while i < msgCount:
        msg = arcpy.GetMessage(i)
        arcpy.AddReturnMessage(i)
        i += 1
def createDBConnectionFile(instance, database_type, database,
                           account_authentication, dbms_admin, dbms_admin_pwd):
    # Local variables
    instance_temp = instance.replace("\\", "_")
    instance_temp = instance_temp.replace("/", "_")
    instance_temp = instance_temp.replace(":", "_")
    Conn_File_NameT = instance_temp + "_" + database + "_" + dbms_admin

    if os.environ.get("TEMP") == None:
        temp = "c:\\temp"
    else:
        temp = os.environ.get("TEMP")

    if os.environ.get("TMP") == None:
        temp = "/usr/tmp"
    else:
        temp = os.environ.get("TMP")

    Connection_File_Name = Conn_File_NameT + ".sde"
    Connection_File_Name_full_path = temp + os.sep + Conn_File_NameT + ".sde"

    # Check for the .sde file and delete it if present
    arcpy.env.overwriteOutput = True
    if os.path.exists(Connection_File_Name_full_path):
        os.remove(Connection_File_Name_full_path)

    try:
        arcpy.AddMessage("Creating Database Connection File...")
        # Process: Create Database Connection File...
        # Usage:  out_file_location, out_file_name, DBMS_TYPE, instnace, account_authentication, username, password, database, save_username_password(must be true)
        arcpy.CreateDatabaseConnection_management(
            out_folder_path=temp,
            out_name=Connection_File_Name,
            database_platform=database_type,
            instance=instance,
            database=database,
            account_authentication=account_authentication,
            username=dbms_admin,
            password=dbms_admin_pwd,
            save_user_pass="******")
        for i in range(arcpy.GetMessageCount()):
            if "000565" in arcpy.GetMessage(
                    i):  # Check if database connection was successful
                arcpy.AddReturnMessage(i)
                arcpy.AddMessage("++++++++++++++++++")
                arcpy.AddMessage("Exiting!!")
                arcpy.AddMessage("++++++++++++++++++")
                sys.exit(3)
            else:
                arcpy.AddReturnMessage(i)
        arcpy.AddMessage("++++++++++++++++++")
    except:
        for i in range(arcpy.GetMessageCount()):
            arcpy.AddReturnMessage(i)
    connection = Connection_File_Name_full_path
    return connection
def createUserConnectionFiles(instance, database_type, database,
                              account_authentication, role, dbuser_pwd,
                              out_folder_path):
    # Local variables
    if role == 'sde':
        dbuser_name = 'SDE'
    else:
        dbuser_name = database.upper() + '_' + role.upper()

    # if dbuser_pwd != '#':
    #    dbuser_pwd = database.lower() + '_' + role.upper() + '_' + str(datetime.datetime.now().year)
    #    arcpy.AddMessage(dbuser_pwd)

    instance_temp = instance.replace("\\", "_")
    instance_temp = instance_temp.replace("/", "_")
    instance_temp = instance_temp.replace(":", "_")
    Conn_File_NameT = instance_temp + "." + database + "." + dbuser_name
    Connection_File_Name = Conn_File_NameT + ".sde"
    Connection_File_Name_full_path = out_folder_path + os.sep + Conn_File_NameT + ".sde"

    # Check for the .sde file and delete it if present
    arcpy.env.overwriteOutput = True
    if os.path.exists(Connection_File_Name_full_path):
        os.remove(Connection_File_Name_full_path)

    try:
        arcpy.AddMessage("Creating Database Connection File...")
        # Process: Create Database Connection File...
        # Usage:  out_file_location, out_file_name, DBMS_TYPE, instnace, account_authentication, username, password, database, save_username_password(must be true)
        arcpy.CreateDatabaseConnection_management(
            out_folder_path=out_folder_path,
            out_name=Connection_File_Name,
            database_platform=database_type,
            instance=instance,
            database=database,
            account_authentication=account_authentication,
            username=dbuser_name,
            password=dbuser_pwd,
            save_user_pass="******")
        for i in range(arcpy.GetMessageCount()):
            if "000565" in arcpy.GetMessage(
                    i):  # Check if database connection was successful
                arcpy.AddReturnMessage(i)
                arcpy.AddMessage("++++++++++++++++++")
                arcpy.AddMessage("Exiting!!")
                arcpy.AddMessage("++++++++++++++++++")
                sys.exit(3)
            else:
                arcpy.AddReturnMessage(i)
        arcpy.AddMessage("++++++++++++++++++")
    except:
        for i in range(arcpy.GetMessageCount()):
            arcpy.AddReturnMessage(i)
    return Connection_File_Name_full_path
예제 #9
0
def GenerateVtpkTilingScheme(in_map,tileScheme):
    scales = "295829355.454565;147914677.727283;73957338.8636413;36978669.4318207;18489334.7159103;9244667.35795516;4622333.67897758;2311166.83948879;1155583.4197444;577791.709872198;288895.854936099;144447.927468049;72223.9637340247;36111.9818670124;18055.9909335062;9027.99546675309;4513.99773337654;2256.99886668827;1128.49943334414;564.249716672068"
    tile_origin = "-180 180"
    try:
        arcpy.server.GenerateMapServerCacheTilingScheme(in_map=in_map,
                                                        tile_origin=tile_origin,
                                                        output_tiling_scheme=tileScheme,
                                                        num_of_scales=20,
                                                        scales=scales,
                                                        dots_per_inch=96,
                                                        tile_size="512 x 512")
        arcpy.AddMessage("tile scheme - ready.")
        return tileScheme
    except:
        arcpy.AddError("Failed to create vector tile package. Please see the following error for details.")
        for i in range(0, arcpy.GetMessageCount()):
            mesSev = arcpy.GetSeverity(i)
            if mesSev == 1:
                arcpy.AddError(arcpy.GetMessage(i))
            elif mesSev == 2:
                arcpy.AddError(arcpy.GetMessage(i))
예제 #10
0
def doRGBI(RGBI_path, RGB_outpath, CIR_outpath):
    outWorkspace = RGB_outpath

    try:
        makePath(RGB_outpath, "temp")
        arcpy.env.workspace = RGBI_path
        RGBIrasters = arcpy.ListRasters("", "TIF")
        rastCount = len(RGBIrasters)
        arcpy.AddMessage("Reconfiguring " + str(rastCount) + " rasters.")
        current = 1
        for RGBIraster in RGBIrasters:
            arcpy.env.workspace = RGBI_path
            # Get Bands for RGBI
            redin = RGBIraster + "\Band_1"
            red = os.path.join(RGB_outpath, "temp",
                               RGBIraster[:-4] + "_red.tif")
            arcpy.CopyRaster_management(redin, red)
            greenin = RGBIraster + "\Band_2"
            green = os.path.join(RGB_outpath, "temp",
                                 RGBIraster[:-4] + "_green.tif")
            arcpy.CopyRaster_management(greenin, green)
            bluein = RGBIraster + "\Band_3"
            blue = os.path.join(RGB_outpath, "temp",
                                RGBIraster[:-4] + "_blue.tif")
            arcpy.CopyRaster_management(bluein, blue)
            IRin = RGBIraster + "\Band_4"
            IR = os.path.join(RGB_outpath, "temp", RGBIraster[:-4] + "_ir.tif")
            arcpy.CopyRaster_management(IRin, IR)

            outRaster = RGBIraster[:-4] + ".tif"
            start = time.clock()
            arcpy.AddMessage("Reconfiguring file " + str(current) + " of " +
                             str(rastCount) + ": " + RGBIraster)

            arcpy.CompositeBands_management(
                '"' + red + ";" + green + ";" + blue + '"',
                os.path.join(RGB_outpath,
                             os.path.basename(RGBIraster[:-4] + ".tif")))
            arcpy.AddMessage("Successfully reconfigured: " + RGBIraster +
                             " to " + RGB_outpath)
            arcpy.CompositeBands_management(
                '"' + IR + ";" + red + ";" + green + '"',
                os.path.join(CIR_outpath,
                             os.path.basename(RGBIraster[:-4] + ".tif")))
            elapsed = (time.clock() - start)
            arcpy.AddMessage("Execution time: " + str(elapsed / 60)[:4] +
                             " minutes.")
            current = current + 1
            print arcpy.GetMessage(0)
    except Exception as e:
        arcpy.AddMessage("Reconfiguration attempt failed.")
        arcpy.AddError(e.message)
예제 #11
0
 def merge_reports(self):
     fields = [
         "sSHPOTrackNum", 'sLeadAgencyNum', 'sSurveyOrg', 'sAuthor',
         'sReportTitle', 'dReportDte'
     ]
     self.connect_to_icdb()
     arcpy.env.workspace = self.target_db
     try:
         with arcpy.da.Editor(self.target_db) as edit:
             with arcpy.da.UpdateCursor(self.proxy, fields) as proxy_cursor:
                 for row in proxy_cursor:
                     # Main Data from tblInventory
                     self.icdb.execute(
                         "SELECT CitPublisher, CitTitle, CitMonth, CitYear "
                         "FROM tblInventory "
                         "WHERE DocNo = {}".format(row[0]))
                     tblInventory = self.icdb.fetchone()
                     # Author Names from tblInventory Author
                     self.icdb.execute("SELECT DocAuthorText "
                                       "FROM tblInventoryAuthor "
                                       "WHERE DocNo = {}".format(row[0]))
                     tblInventoryAuthor = self.icdb.fetchall()
                     # Identifiers from tblResourcesIdent
                     self.icdb.execute("SELECT IdentifierType, Identifier "
                                       "FROM tblInventoryIdent "
                                       "WHERE DocNo = {}".format(row[0]))
                     tblInventoryIdent = self.icdb.fetchall()
                     results = FormatReport(tblInventory,
                                            tblInventoryAuthor,
                                            tblInventoryIdent)
                     position = 1
                     for field in fields[1:]:
                         row[position] = results.formatted_report[field]
                         position += 1
                     try:
                         proxy_cursor.updateRow(row)
                     except RuntimeError, e:
                         print str(e)
                         print row
     except arcpy.ExecuteError:
         print arcpy.GetMessage(2)
예제 #12
0
def makeCIR(inWS, outWS):
	try:
		makePath(outWS,"temp")
		arcpy.env.workspace = inWS
		rasters = arcpy.ListRasters("","")
		rastCount = len(rasters)
		arcpy.AddMessage("Converting " + str(rastCount) + " images to CIR.")
		i = 1
		for raster in rasters:
			arcpy.AddMessage("Reconfiguring file " + str(i) + " of " + str(rastCount) + ": " + raster)
			start = time.clock()
			redIn = raster + "\Band_1"
			red = os.path.join(outWS, "temp", raster[:-4] + "_red.tif")
			arcpy.CopyRaster_management(redIn,red)
			greenIn = raster + "\Band_2"
			green = os.path.join(outWS, "temp", raster[:-4] + "_green.tif")
			arcpy.CopyRaster_management(greenIn,green)
			irIn = raster + "\Band_3"
			ir = os.path.join(outWS, "temp", raster[:-4] + "_ir.tif")
			arcpy.CopyRaster_management(irIn,ir)
			
			outRaster = raster[:-4] + ".tif"
			arcpy.CompositeBands_management('"' + ir + ";" + red + ";" + green + '"', os.path.join(outWS, os.path.basename(raster[:-4]+".tif")))
			
			os.remove(os.path.join(outWS,raster[:-4] + ".tfw"))
			os.remove(os.path.join(outWS,raster[:-4] + ".tif.aux.xml"))
			os.remove(os.path.join(outWS,raster[:-4] + ".tif.xml"))
			
			arcpy.AddMessage("Successfully reconfigured: " + raster + " to " + outWS)
			elapsed = (time.clock() - start)
			arcpy.AddMessage("Execution time: " + str(elapsed / 60)[:4] + " minutes.")
			i += 1
			print arcpy.GetMessage(0)
	
	except Exception as e:
		arcpy.AddMessage("Reconfiguration attempt failed.")
		arcpy.AddError(e.message)
예제 #13
0
def connect(database,
            server="<default server>",
            username="******",
            password="******",
            version="SDE.DEFAULT"):
    # Check if value entered for option
    try:
        #Usage parameters for spatial database connection to upgrade
        service = "sde:sqlserver:" + server
        account_authentication = 'DATABASE_AUTH'
        version = version.upper()
        database = database.lower()

        # Check if direct connection
        if service.find(":") <> -1:  #This is direct connect
            ServiceConnFileName = service.replace(":", "")
            ServiceConnFileName = ServiceConnFileName.replace(";", "")
            ServiceConnFileName = ServiceConnFileName.replace("=", "")
            ServiceConnFileName = ServiceConnFileName.replace("/", "")
            ServiceConnFileName = ServiceConnFileName.replace("\\", "")
        else:
            arcpy.AddMessage("\n+++++++++")
            arcpy.AddMessage("Exiting!!")
            arcpy.AddMessage("+++++++++")
            sys.exit(
                "\nSyntax for a direct connection in the Service parameter is required for geodatabase upgrade."
            )

        # Local variables
        Conn_File_NameT = server + "_" + ServiceConnFileName + "_" + database + "_" + username

        if os.environ.get("TEMP") == None:
            temp = "c:\\temp"
        else:
            temp = os.environ.get("TEMP")

        if os.environ.get("TMP") == None:
            temp = "/usr/tmp"
        else:
            temp = os.environ.get("TMP")

        Connection_File_Name = temp + os.sep + Conn_File_NameT + ".sde"
        if os.path.isfile(Connection_File_Name):
            return Connection_File_Name

        # Check for the .sde file and delete it if present
        arcpy.env.overwriteOutput = True

        # Variables defined within the script; other variable options commented out at the end of the line
        saveUserInfo = "SAVE_USERNAME"  #DO_NOT_SAVE_USERNAME
        saveVersionInfo = "SAVE_VERSION"  #DO_NOT_SAVE_VERSION

        print "\nCreating ArcSDE Connection File...\n"
        # Process: Create ArcSDE Connection File...
        # Usage: out_folder_path, out_name, server, service, database, account_authentication, username, password, save_username_password, version,   save_version_info
        print temp
        print Conn_File_NameT
        print server
        print service
        print database
        print account_authentication
        print username
        print password
        print saveUserInfo
        print version
        print saveVersionInfo
        arcpy.CreateArcSDEConnectionFile_management(temp, Conn_File_NameT,
                                                    server, service, database,
                                                    account_authentication,
                                                    username, password,
                                                    saveUserInfo, version,
                                                    saveVersionInfo)
        for i in range(arcpy.GetMessageCount()):
            if "000565" in arcpy.GetMessage(
                    i):  #Check if database connection was successful
                arcpy.AddReturnMessage(i)
                arcpy.AddMessage("\n+++++++++")
                arcpy.AddMessage("Exiting!!")
                arcpy.AddMessage("+++++++++\n")
                sys.exit(3)
            else:
                arcpy.AddReturnMessage(i)
                arcpy.AddMessage("+++++++++\n")
                return Connection_File_Name
    #Check if no value entered for option
    except SystemExit as e:
        print e.code
        return
예제 #14
0
import arcpy, os

arcpy.CreateFeatureclass_management(os.getcwd(), "testMessage.shp")
fc = os.getcwd() + os.sep + "testMessage.shp"
try:
    arcpy.AddField_management(fc, "TEST", "TEXT")
    arcpy.AddField_management(fc, "TEST", "TEXT")
except arcpy.ExecuteError:
    pass
print "Test GetMessageCount:"
messageCount = arcpy.GetMessageCount()
print messageCount
print "Test GetMessages:"
print arcpy.GetMessages()
print "Test GetMessage:"
print "GetMessage(0):", arcpy.GetMessage(0)
print "GetMessage(1):", arcpy.GetMessage(1)
print "GetMessage(2):", arcpy.GetMessage(2)
print "Test GetIDMessage:"
print "GetIDMessage(84001):", arcpy.GetIDMessage(84001)
print "GetIDMessage(999999):", arcpy.GetIDMessage(999999)
print "Test GetReturnCode:"
print "Message[1]'s ReturnCode:", arcpy.GetReturnCode(1)
print "Message[2]'s ReturnCode:", arcpy.GetReturnCode(2)
print "Test GetSeverity:"
print "Message[1]'s Severity:", arcpy.GetSeverity(1)
print "Message[2]'s Severity:", arcpy.GetSeverity(2)
print "Test GetSeverityLevel:"
print arcpy.GetSeverityLevel()
arcpy.SetSeverityLevel(1)
print arcpy.GetSeverityLevel()
         if not re.search(".000$", item) and re.search(
                 ".[0-9][0-9][0-9]$", item):
             update_cells.append(os.path.join(root, item))
     update_cells.sort()
     base_cell = os.path.join(root, file)
     base_cells.append(base_cell)
     # Run Import S-57 to Geodatabase GP tool on base cell and update cells
     arcpy.AddMessage("\t" + file)
     if update_cells:
         arcpy.AddMessage("\t\t" + "\n\t\t".join(
             [os.path.basename(cell) for cell in update_cells]))
     try:
         arcpy.ImportS57ToGeodatabase_nautical(
             base_cell, target_wrkspc, update_cells)
     except:
         msgs = arcpy.GetMessage(0)
         msgs += arcpy.GetMessages(2)
         arcpy.AddWarning(msgs)
 # Handle international exchange sets
 else:
     # Get base cell
     for item in os.listdir(root):
         if re.search(".000$", item):
             base_cell = os.path.join(root, item)
             base_cells.append(base_cell)
             update_cells = []
     # Get update cells
     pre_root = os.path.dirname(root)
     for root2, dirs2, files2 in os.walk(pre_root):
         if root2 != pre_root and os.path.basename(
                 root2) != "0":
예제 #16
0
    "03": "\\Marzo",
    "04": "\\Abril",
    "05": "\\Mayo",
    "06": "\\Junio",
    "07": "\\Julio",
    "08": "\\Agosto",
    "09": "\\Setiembre",
    "10": "\\Octubre",
    "11": "\\Noviembre",
    "12": "\\Diciembre"
}
resultados = r"C:\Scripts\Atributos\2018"
env.workspace = resultados

if quincena == True:
    path_exporta = "_I.xls"

else:
    path_exporta = "_II.xls"

#path_exporta = "_I.xls"

try:
    path_mapa = resultados + mes + path_exporta
    arcpy.TableToExcel_conversion(insumo_base, path_mapa, "ALIAS", "CODE")
    arcpy.ExcelToTable_conversion(path_mapa, path_mapa)

except:
    print "Se ha detectado un error que impide ejecutar el script"
    print(arcpy.GetMessage(2))
예제 #17
0
def main(config_file, *args):
    """
    Import the incidents to a feature class,
    filtering out duplicates if necessary,
    assign geometry using addresses or XY values,
    and publish the results usign AGOL or ArcGIS for Server.
    Output is an updated feature class, processign reports,
    and optionally a service
    """

    # Current date and time for file names
    fileNow = dt.strftime(dt.now(), prefix)

    if isfile(config_file):
        cfg = ConfigParser.ConfigParser()
        cfg.read(config_file)
    else:
        raise Exception(e1.format("Configuration file", config_file, ""))

    # Get general configuration values
    incidents = cfg.get('GENERAL', 'spreadsheet')
    inc_features = cfg.get('GENERAL', 'incident_features')
    id_field = cfg.get('GENERAL', 'incident_id')
    report_date_field = cfg.get('GENERAL', 'report_date_field')
    reports = cfg.get('GENERAL', 'reports')
    loc_type = cfg.get('GENERAL', 'loc_type')
    summary_field = cfg.get('GENERAL', 'summary_field')
    transform_method = cfg.get('GENERAL', 'transform_method')
    pub_status = cfg.get('GENERAL', 'pub_status')
    delete_duplicates = cfg.get('GENERAL', 'delete_duplicates')

    if delete_duplicates in ('true', 'True', True):
        delete_duplicates = True
        if report_date_field == "":
            raise Exception(e16)
    if delete_duplicates in ('false', 'False'):
        delete_duplicates = False

    # Log file
    if exists(reports):
        rptLog = join(reports, "{0}_{1}.log".format(fileNow, log_name))

    else:
        raise Exception(e1.format("Report location", reports, w5))

    # Scratch workspace
    tempgdb = arcpy.env.scratchGDB

    with open(rptLog, "w") as log:
        try:
            # Log file header
            log.write(l1.format(fileNow))
            log.write(l2.format(getpass.getuser()))
            log.write(l3.format(incidents))
            log.write(l4.format(inc_features))
            if loc_type == "ADDRESSES":
                log.write(l5.format(cfg.get('ADDRESSES', 'locator')))

            # Validate output feature class geometry type
            desc = arcpy.Describe(inc_features)
            if not desc.shapeType == "Point":
                raise Exception(e6.format(inc_features))

            # Identify field names in both fc and csv
            if arcpy.Exists(incidents):
                csvfieldnames = [f.name for f in arcpy.ListFields(incidents)]

            else:
                raise Exception(e1.format("Spreadsheet", incidents, ""))

            if arcpy.Exists(inc_features):
                incfieldnames = [
                    f.name for f in arcpy.ListFields(inc_features)
                ]
            else:
                raise Exception(e1.format("Feature Class", inc_features, ""))

            matchfieldnames = []
            for name in csvfieldnames:
                if name in incfieldnames:
                    matchfieldnames.append(name)

            # If data is to be geocoded
            if loc_type == "ADDRESSES":

                # Get geocoding parameters
                address_field = cfg.get('ADDRESSES', 'address_field')
                city_field = cfg.get('ADDRESSES', 'city_field')
                state_field = cfg.get('ADDRESSES', 'state_field')
                zip_field = cfg.get('ADDRESSES', 'zip_field')
                locator = cfg.get('ADDRESSES', 'locator')

                # Geocoding field names
                reqFields = [address_field, id_field]  #, report_date_field]
                opFields = [
                    city_field, state_field, zip_field, summary_field,
                    report_date_field
                ]

                if locator == "":
                    raise Exception(e13)

                # Test geolocator fields
                loc_address_fields = [
                    loc_address_field, loc_city_field, loc_zip_field,
                    loc_state_field
                ]
                for a in loc_address_fields:
                    if not a == "":
                        if not a in all_locator_fields:
                            raise Exception(e14)

            # If data has coordinate values
            else:

                # Get coordinate parameters
                lg_field = cfg.get('COORDINATES', 'long_field')
                lt_field = cfg.get('COORDINATES', 'lat_field')
                coord_system = cfg.get('COORDINATES', 'coord_system')
                remove_zeros = cfg.get('COORDINATES', 'ignore_zeros')
                if remove_zeros in ('true', 'True'):
                    remove_zeros = True
                if remove_zeros in ('false', 'False'):
                    remove_zeros = False

                # Coordinate field names
                reqFields = [id_field, lg_field,
                             lt_field]  #, report_date_field]
                opFields = [summary_field, report_date_field]

            # Validate required field names
            field_test(incidents, reqFields, csvfieldnames, True)
            field_test(inc_features, reqFields, incfieldnames, True)

            # Validate optional field names
            field_test(incidents, opFields, csvfieldnames)
            field_test(inc_features, opFields, incfieldnames)

            # Validate basic publishing parameters
            if not pub_status == "":

                # Get general publishing parameters
                mxd = cfg.get('PUBLISHING', 'mxd')
                username = cfg.get('PUBLISHING', 'user_name')
                password = cfg.get('PUBLISHING', 'password')

                # Test for required inputs
                if not arcpy.Exists(mxd):
                    raise Exception(e1.format("Map document", mxd, ""))

                if splitext(mxd)[1] != ".mxd":
                    raise Exception(e3)

                # Test for required inputs
                if username == "" or password == "":
                    if pub_status == "ARCGIS_ONLINE":
                        raise Exception(e8)

            # Get address fields for geocoding
            if loc_type == "ADDRESSES":

                addresses = ""
                loc_fields = []
                adr_string = "{0} {1} VISIBLE NONE;"

                for loc_field in all_locator_fields:
                    if loc_field == loc_address_field:
                        addresses += adr_string.format(loc_field,
                                                       address_field)
                        loc_fields.append(address_field)

                    elif loc_field == loc_city_field and city_field != "":
                        addresses += adr_string.format(loc_field, city_field)
                        loc_fields.append(city_field)

                    elif loc_field == loc_state_field and state_field != "":
                        addresses += adr_string.format(loc_field, state_field)
                        loc_fields.append(state_field)

                    elif loc_field == loc_zip_field and zip_field != "":
                        addresses += adr_string.format(loc_field, zip_field)
                        loc_fields.append(zip_field)

                    else:
                        addresses += adr_string.format(loc_field, "<None>")

            # Get coordinate fields
            else:
                loc_fields = [lg_field, lt_field]

            total_records = len(field_vals(incidents, id_field))

            messages(m17.format(total_records, incidents), log)

            if not summary_field == "":
                SumVals = field_vals(incidents, summary_field)
                listSumVals = [val for val in SumVals if val != None]

                if not len(SumVals) == len(listSumVals):
                    print m19.format(len(SumVals) - len(listSumVals))
                    log.write(m19.format(len(SumVals) - len(listSumVals)))
                listSumVals.sort()

                log.write(l10.format(summary_field))
                dateCount = 1
                i = 0
                n = len(listSumVals)

                while i < n:

                    try:
                        if listSumVals[i] == listSumVals[i + 1]:
                            dateCount += 1
                        else:
                            log.write(l11.format(listSumVals[i], dateCount))
                            dateCount = 1
                    except:
                        log.write(l11.format(listSumVals[i], dateCount))
                    i += 1

                log.write("\n")

            # Remove duplicate incidents
            if delete_duplicates:

                timeNow = dt.strftime(dt.now(), time_format)
                messages(m13.format(timeNow), log)

                incidents, req_nulls, countUpdate, countDelete = remove_dups(
                    tempgdb, incidents, inc_features, matchfieldnames,
                    id_field, report_date_field, loc_fields)

                if not req_nulls == "":
                    req_nulls = "{}\n".format(req_nulls)
                    messages(w3.format(req_nulls), log, 1)

                if not countUpdate == 0:
                    messages(m14.format(countUpdate, inc_features), log)

                if countDelete > 0:
                    messages(m15.format(countDelete, inc_features), log)

            # Create features
            tempFC = join(tempgdb, "tempDataLE")

            # Create point features from spreadsheet

            timeNow = dt.strftime(dt.now(), time_format)
            messages(m1.format(timeNow), log)

            if loc_type == "ADDRESSES":

                timeNow = dt.strftime(dt.now(), time_format)
                messages(m3.format(timeNow), log)

                # Geocode the incidents
                arcpy.GeocodeAddresses_geocoding(incidents, locator, addresses,
                                                 tempFC, "STATIC")

                # Initiate geocoding report counts
                countMatch = 0
                countTrueMatch = 0
                countUnmatch = 0

                # Create geocoding reports
                rptUnmatch = join(reports,
                                  "{0}_{1}.csv".format(fileNow, unmatch_name))

                fieldnames = [f.name for f in arcpy.ListFields(tempFC)]

                # Sort incidents based on match status
                statusIndex = fieldnames.index(status)
                locIndex = fieldnames.index(addr_type)

                # Write incidents that were not well geocoded to file and
                #       delete from temp directory
                with open(rptUnmatch, "wb") as umatchFile:
                    unmatchwriter = csv.writer(umatchFile)
                    unmatchwriter.writerow(fieldnames)

                    # Delete incidents that were not Matched
                    countUnmatch = sort_records(tempFC, unmatchwriter,
                                                statusIndex, match_value,
                                                False, True)

                    if not countUnmatch == 0:
                        messages(w6.format(countUnmatch, rptUnmatch), log, 1)

                    # Incidents that were not matched to an acceptable accuracy
                    countMatch = sort_records(tempFC, unmatchwriter, locIndex,
                                              addrOK, False, True)

                    if not countMatch == 0:
                        messages(w7.format(countMatch, addrOK, rptUnmatch),
                                 log, 1)

                    countTrueMatch = len(field_vals(tempFC, "OBJECTID"))

                    messages(m16.format(countTrueMatch, inc_features), log)

            else:
                # Create temporary output storage
                tempFL = arcpy.MakeXYEventLayer_management(
                    incidents, lg_field, lt_field, "tempLayerLE", coord_system)

                # Convert the feature layer to a feature class to prevent
                #   field name changes

                arcpy.CopyFeatures_management(tempFL, tempFC)
                arcpy.Delete_management(tempFL)

            timeNow = dt.strftime(dt.now(), time_format)
            messages(m4.format(timeNow, inc_features), log)

            # Fields that will be copied from geocode results to final fc
            copyfieldnames = []
            copyfieldnames.extend(matchfieldnames)
            copyfieldnames.append("SHAPE@XY")

            # Fields for error reporting
            errorfieldnames = []
            errorfieldnames.extend(matchfieldnames)
            errorfieldnames.insert(0, errorfield)
            errorfieldnames += [long_field, lat_field]

            # Reproject the features
            sr_input = arcpy.Describe(tempFC).spatialReference
            sr_output = arcpy.Describe(inc_features).spatialReference

            if sr_input != sr_output:
                proj_out = "{}_proj".format(tempFC)

                arcpy.Project_management(tempFC, proj_out, sr_output,
                                         transform_method)
                tempFC = proj_out

            # Append geocode results to fc
            rptNoAppend = join(reports,
                               "{0}_{1}.csv".format(fileNow, noappend_name))

            with arcpy.da.SearchCursor(tempFC, copyfieldnames) as csvrows:
                with arcpy.da.InsertCursor(inc_features,
                                           copyfieldnames) as incrows:
                    # Open csv for un-appended records
                    with open(rptNoAppend, "wb") as appendFile:

                        appendwriter = csv.writer(appendFile)
                        appendwriter.writerow(errorfieldnames)

                        # Index of field with incident ID
                        record = errorfieldnames.index(id_field)

                        # Initiate count of successfully appended records
                        countAppend = 0

                        # List of ids of records not successfully appended
                        errorRecords = []

                        for csvrow in csvrows:
                            try:
                                if loc_type == "COORDINATES":
                                    if remove_zeros:
                                        lt_index = copyfieldnames.index(
                                            lt_field)
                                        lg_index = copyfieldnames.index(
                                            lg_field)

                                        ltVal = csvrow[lt_index]
                                        lgVal = csvrow[lg_index]

                                        if ltVal == 0 and lgVal == 0:
                                            raise Exception(
                                                "invalid_coordinates")

                                # If the row can be appended
                                incrows.insertRow(csvrow)
                                countAppend += 1

                            except Exception as reason:
                                # e.g. 'The value type is incompatible with the
                                #       field type. [INCIDENTDAT]'
                                # Alternatively, the exception
                                #      'invalid_coordinates' raised by the
                                #       remove_zeros test above

                                # Get the name of the problem field
                                badfield = reason[0].split(" ")[-1]
                                badfield = badfield.strip(" []")

                                # Append field name to start of record
                                csvrow = list(csvrow)
                                csvrow.insert(0, badfield)

                                # Split the coordinate tuple into X and Y
                                lng, lat = list(csvrow[-1])
                                csvrow[-1] = lng
                                csvrow.append(lat)
                                csvrow = tuple(csvrow)

                                # Write the record out to csv
                                appendwriter.writerow(csvrow)

                                # Add id and field to issue list
                                errorRecords.append(
                                    w4.format(csvrow[record], badfield))

            # If issues were reported, print them
            if len(errorRecords) != 0:
                messages(
                    w1.format(len(errorRecords), inc_features, rptNoAppend),
                    log, 1)

            messages(m18.format(countAppend, inc_features), log)

            del incrows, csvrows

            # Convert times to UTC if publishing to AGOL
            if pub_status == "ARCGIS_ONLINE":

                # Get date fields
                date_fields = [
                    f.name for f in arcpy.ListFields(inc_features)
                    if f.type == "Date" and f.name in matchfieldnames
                ]

                # Convert from system timezone to UTC
                convert_to_utc(inc_features, date_fields)

            # Publish incidents
            if not pub_status == "":

                timeNow = dt.strftime(dt.now(), time_format)
                messages(m5.format(timeNow), log)

                errors = serviceutils.publish_service(cfg, pub_status, mxd,
                                                      username, password)

                # Print analysis errors
                if errors:
                    raise Exception(e4.format(errors))

            # Convert times from UTC to system timezone
            if pub_status == "ARCGIS_ONLINE":
                convert_from_utc(inc_features, date_fields)

            timeNow = dt.strftime(dt.now(), time_format)
            messages(m8.format(timeNow), log)

        except arcpy.ExecuteError:
            print("{}\n{}\n".format(gp_error, arcpy.GetMessages(2)))
            timeNow = dt.strftime(dt.now(),
                                  "{} {}".format(date_format, time_format))
            arcpy.AddError("{} {}:\n".format(timeNow, gp_error))
            arcpy.AddError("{}\n".format(arcpy.GetMessages(2)))

            log.write("{} ({}):\n".format(gp_error, timeNow))
            log.write("{}\n".format(arcpy.GetMessages(2)))

            for msg in range(0, arcpy.GetMessageCount()):
                if arcpy.GetSeverity(msg) == 2:
                    code = arcpy.GetReturnCode(msg)
                    print("Code: {}".format(code))
                    print("Message: {}".format(arcpy.GetMessage(msg)))

        except Exception as ex:
            print("{}: {}\n".format(py_error, ex))
            timeNow = dt.strftime(dt.now(), "{}".format(time_format))

            arcpy.AddError("{} {}:\n".format(timeNow, py_error))
            arcpy.AddError("{}\n".format(ex))

            log.write("{} {}:\n".format(timeNow, py_error))
            log.write("{}\n".format(ex))

        finally:
            # Clean up
            try:
                arcpy.Delete_management(tempgdb)
            except:
                pass
예제 #18
0
    arcpy.MakeRasterLayer_management(Mask, "MaskFeature")

    # Set environmental variable mask to input feature
    arcpy.env.mask = "MaskFeature"

    # Set off road values to a mph default
    OffRoad = Con(IsNull("RasRoad"), float(ccSpd), "RasRoad")
    arcpy.AddMessage("Off road speed set to " + str(ccSpd) + " mph.")

    # Calculate Slope
    ElevSlope = Slope(Elevation, "DEGREE", 1)
    arcpy.AddMessage("Slope created from elevation.")

    # Adjust off road speed by slope
    ModOffRoad = Con(OffRoad == float(ccSpd), (float(ccSpd) / ElevSlope),
                     OffRoad)
    arcpy.AddMessage("Off road speed adjusted for slope.")

    # Create final cost surface. Convert speed to time
    TravelCost = (1.0 / (ModOffRoad * float(spdTime)))
    TravelCost.save("TravelCost")

    arcpy.AddMessage("Cost surface created successfully!")

except:
    #Report Error Message
    print "There was an error creating the cost surface."
    arcpy.AddMessage(arcpy.GetMessage(2))

Tags
예제 #19
0
    print "Creating egdb Database Connection File..."
    # Process: Create egdb Database Connection File...
    # Usage:  out_file_location, out_file_name, DBMS_TYPE, instance, database, account_authentication, username, password, save_username_password(must be true)
    arcpy.CreateDatabaseConnection_management(
        out_folder_path=Connection_File_Out_Folder,
        out_name=Connection_File_Name,
        database_platform=database_type,
        instance=instance,
        database=database,
        account_authentication=account_authentication,
        username=username,
        password=password,
        save_user_pass="******")
    for i in range(arcpy.GetMessageCount()):
        if "000565" in arcpy.GetMessage(
                i):  #Check if database connection was successful
            arcpy.AddReturnMessage(i)
            arcpy.AddMessage("Exiting!!")
            sys.exit(3)
        else:
            arcpy.AddReturnMessage(i)

#Check if no value entered for option
except SystemExit as e:
    if e.code == 2:
        parser.usage = ""
        print "\n"
        parser.print_help()
        parser.exit(2)
예제 #20
0
    ##geoIndex = "AFFGEOID"
    geoIndex = arcpy.GetParameterAsText(
        2
    )  #User selected index field within selected shapefile.  Default is AFFGEOID.  For use with scriptTool

    filename = os.path.splitext(os.path.basename(geography))
    directory = os.path.split(os.path.dirname(geography))
    geoPoints = filename[0] + "Points_TEST.shp"
    outworkspace = "C:\\Users\\ismae\\OneDrive - Kansas State University\\finalProjectRodriguez\\scratch\\"

    arcpy.FeatureToPoint_management(
        geography, geoPoints, "CENTROID"
    )  #Generates a point shapefile with points centroids of user selected shapefile
    messageA = arcpy.GetMessageCount()
    arcpy.AddMessage("FeatureToPoint Tool: " + arcpy.GetMessage(messageA - 1))
    print "FeatureToPoint Tool: " + arcpy.GetMessage(messageA - 1)

    rasterFolder = arcpy.ListFiles(
        "*.tif")  #Creates a folder of raster tif files

    # This loop builds a list of raster files and associated index labels.  targetSet = ['indexRaster.tif','index']
    targetSet = []
    for rasterFile in rasterFolder:
        rasterName = os.path.splitext(os.path.basename(rasterFile))
        voteName = rasterName[0]
        geographyRaster = arcpy.sa.ZonalStatistics(
            geography, geoIndex, rasterFile, "SUM",
            "DATA")  #estimates total votes for specified geography
        geographyRaster.save(
            os.path.join(outworkspace,
#Name: John Doe
#Date: Aug, 2011
#Purpose: Working with errors in Python
import arcpy
try:
	arcpy.env.workspace = "C:\\Users\\Me\\Desktop\\GIS Programming\\Training"
	arcpy.Buffer_analysis("test.shp","sch_buff")
except:
	print arcpy.GetMessage(1)
	print arcpy.GetMessage (arcpy.GetMessageCount()-1)
예제 #22
0
separator = sys.argv[6]

##input_table = r"C:\Student\ICTPythonGIS\Data\Texas\NWHouston.dbf"
##sql_expression = "PRICESQFT > 0"
##new_field = "PriceSize"
##field_one = "PRICESQFT"
##field_two = "SQFT"
##separator = " psf/Size: "

# Add a field to table
try:
    arcpy.AddField_management(input_table, new_field, "TEXT", "", "", 50,
                              "PriceSize")

    # Create an update cursor to add data to new field
    field_list = [new_field, field_one, field_two]
    with arcpy.da.UpdateCursor(input_table, field_list,
                               sql_expression) as u_cursor:
        for u_row in u_cursor:
            u_row[0] = "{}{}{}".format(u_row[1], separator, u_row[2])
            u_cursor.updateRow(u_row)
##            print(u_row[0])
except RuntimeError as e:
    arcpy.GetMessage(2)
    print(e)

# Print message confirming success
print("New field populated: {}".format(new_field))
# Delete variables
del input_table, sql_expression, new_field, field_one, field_two, separator, field_list, u_row, u_cursor
예제 #23
0
    def executer(self, env, geodatabase, tableContraintes, classeDecoupage,
                 attributDecoupage, repTravail, dateFin, typeTravail,
                 courriel):
        #-------------------------------------------------------------------------------------
        """
        Exécuter le traitement pour valider l'intégrité des données spatiales livrées dans la BDG selon une table de contraintes spatiales.
        
        Paramètres:
        -----------
        env                 : Type d'environnement.
        geodatabase         : Nom de la géodatabase contenant les tables spatiales.
        tableContraintes    : Nom de la table contenant les contraintes spatiales.
        classeDecoupage     : Nom de la classe contenant les polygones et les identifiants de découpage.
        attributDecoupage   : Nom de l'attribut de la classe de découpage contenant les identifiants de découpage.
        repTravail          : Nom du répertoire de travail.
        dateFin             : Date de fin du traitement de livraison des données BDG (ex:2015-12-15 16:21:54).
        typeTravail         : Liste des types de travaux présents dans la table SER_RECONCILE_LOG et
                              dont les identifiants ont été livrés après la date de fin spécifiée.
        courriel            : Adresse courriel utilisée pour envoyer le rapport d'exécution.
               
        Variables:
        ----------
        self.CompteSib  : Objet utilitaire pour la gestion des connexion à SIB.       
        oSib            : Objet utilitaire pour traiter des services SIB.
        """

        #Instanciation de la classe Sib et connexion à la BD Sib
        arcpy.AddMessage("- Connexion à la BD SIB")
        oSib = self.CompteSib.OuvrirConnexionSib(env, env)

        #Créer la requête SQL.
        arcpy.AddMessage(" ")
        arcpy.AddMessage(
            "- Extraction des travaux effectués sur le nombre d'identifiants livrés"
        )
        sSql = ("SELECT TY_TRAV, COUNT(*)"
                " FROM SER_RECONCILE_LOG@BDG_DBA"
                " WHERE STATUT=9 AND DATE_FIN>TO_DATE('" + dateFin +
                "','yyyy-mm-dd HH24:MI:SS')")
        #Vérifier si on spécifier les types de travail dans la requête
        if len(typeTravail) > 0:
            #Initialiser la liste des travaux
            listeTrav = ""
            #Extraire les types de travaux
            for trav in typeTravail.split(";"):
                #Ajouter le travail à la liste
                listeTrav = listeTrav + trav.split(":")[0] + ","
                #Afficher le travail et le nombre d'identifiants
                arcpy.AddMessage(str(trav))
            #Ajouter la liste des travaux à la requête SQL
            sSql = sSql + " AND TY_TRAV IN ('" + listeTrav[:-1].replace(
                ",", "','") + "')"
        #Ajouter le regroupement et le tri dans la requête sql
        sSql = sSql + " GROUP BY TY_TRAV ORDER BY TY_TRAV"

        #Exécuter la requête SQL
        arcpy.AddMessage(sSql)
        resultatTrav = oSib.requeteSib(sSql)

        #Traiter tous les travaux
        for trav in resultatTrav:
            #Créer la requête SQL.
            arcpy.AddMessage(" ")
            arcpy.AddMessage(str(trav))

            #----------------------------------------------------------
            #Créer la requête SQL pour extraire les identifiants livrés
            sSql = ("SELECT IDENTIFIANT"
                    "  FROM SER_RECONCILE_LOG@BDG_DBA"
                    " WHERE STATUT=9 AND DATE_FIN>TO_DATE('" + dateFin +
                    "','yyyy-mm-dd HH24:MI:SS')"
                    "   AND TY_TRAV='" + trav[0] + "'"
                    " ORDER BY IDENTIFIANT")

            #Exécuter la requête SQL
            #arcpy.AddMessage(sSql)
            resultatId = oSib.requeteSib(sSql)

            #Vérifier si aucun identifiant
            if len(resultatId) == 0:
                #Créer la liste des identifiants à traiter
                raise Exception("ERREUR : Aucun identifiant à traiter")

            #Vérifier si le nombre est supérieur à 999
            if len(resultatId) > 999:
                #Créer la liste des identifiants à traiter
                raise Exception(
                    "Nombre d'identifiant supérieur à la limite permise : ("
                    + str(len(resultatId)) + ">999)")

            #Initialisation de la liste de sélection des identifiants
            listeId = ""
            #Traiter tous les identifiants
            for id in resultatId:
                #Ajouter l'identifiant à la liste
                listeId = listeId + id[0] + ","
            #Définir la requete des identifiants
            requeteId = attributDecoupage + " IN ('" + listeId[:-1].replace(
                ",", "','") + "')"

            #Créer le Layer de découpage
            arcpy.AddMessage("- Création du Layer de découpage ...")
            dateHeure = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
            clsDecoupage = geodatabase + "\\" + classeDecoupage
            lyrDecoupage = "Decoupage_" + trav[0] + "_" + dateHeure + ".lyr"
            #Process: Make Feature Layer
            arcpy.AddMessage('MakeFeatureLayer_management "' + clsDecoupage +
                             '" ' + geodatabase + ' "' + requeteId + '"')
            arcpy.MakeFeatureLayer_management(clsDecoupage, lyrDecoupage,
                                              requeteId)
            arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1))
            #Process: Select Layer By Attribute
            arcpy.AddMessage("SelectLayerByAttribute_management " +
                             lyrDecoupage + " NEW_SELECTION")
            arcpy.SelectLayerByAttribute_management(lyrDecoupage,
                                                    "NEW_SELECTION")
            arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1))
            #Process: Save To Layer File
            arcpy.AddMessage("SaveToLayerFile_management " + lyrDecoupage +
                             " " + repTravail + "\\" + lyrDecoupage)
            arcpy.SaveToLayerFile_management(lyrDecoupage,
                                             repTravail + "\\" + lyrDecoupage)
            arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1))

            #----------------------------------------------------------
            #Créer la requête SQL pour extraire les identifiants livrés
            sSql = (
                " SELECT DISTINCT B.FEAT_TYPE_NAME_DATABASE"
                " FROM FEAT_CATALOGUE@BDG_VIEW A, FEAT_TYPE@BDG_VIEW B"
                " WHERE A.FEAT_CATAL_TYPE=1"
                "   AND A.FEAT_CATAL_ID=B.FEAT_CATAL_FK"
                "   AND B.FEAT_TYPE_CODE_BD IN "
                "   ("
                "       SELECT DISTINCT LE.CD_ELEM_TOPO"
                "      FROM F502_PS PS, F502_LE LE, F503_TR TR, F601_LO LO"
                "     WHERE TR.TY_TRAV='" + trav[0] + "'"
                "       AND PS.NO_MAP=TR.NO_MAP AND PS.NO_MAP=LE.NO_MAP AND TR.NO_LOT=LO.NO_LOT"
                "   )"
                " ORDER BY B.FEAT_TYPE_NAME_DATABASE")

            #Exécuter la requête SQL
            resultatClasse = oSib.requeteSib(sSql)

            #Vérifier si aucune classe
            if len(resultatClasse) == 0:
                #Créer la liste des identifiants à traiter
                arcpy.AddMessage(sSql)
                raise Exception("ERREUR : Aucune classe à traiter")

            #Initialisation de la liste des classes à traiter
            listeClasse = ""
            #Traiter toutes les classes
            for cls in resultatClasse:
                #Ajouter la classe à la liste
                listeClasse = listeClasse + cls[0] + ","
            #Définir la requete des identifiants
            listeClasse = listeClasse[:-1]

            #----------------------------------------------------------
            #Afficher le message d'exécution du traitement de validation pour le travail
            arcpy.AddMessage(
                "- Exécution du traitement de validation spatiale ...")
            #Définir le nom du rapport d'exécution
            nomRapport = repTravail + "\\Rapport_" + trav[
                0] + "_" + dateHeure + ".txt"
            #Définir la commande de validation
            cmd = (
                "D:\\cits\\EnvCits\\applications\\gestion_bdg\\pro\\Geotraitement\\exe\\ValiderIntegriteSpatiale.exe"
                ' "' + geodatabase + '"'
                " " + tableContraintes + ""
                " #"
                " " + listeClasse + ""
                " " + nomRapport + ""
                " " + repTravail + "\\%" + attributDecoupage + "%_" + trav[0] +
                "_" + dateHeure + ".mdb"
                " " + courriel + ""
                " " + repTravail + "\\" + lyrDecoupage + ""
                " " + attributDecoupage + "")
            #Afficher la commande
            arcpy.AddMessage(cmd)
            #Exécuter la commande
            err = os.system(cmd + " > " + nomRapport.replace(".txt", ".log"))

        # Fermeture de la connexion de la BD SIB
        arcpy.AddMessage(" ")
        arcpy.AddMessage("- Fermeture de la connexion SIB")
        oSib.fermerConnexionSib()

        # Sortir du traitement
        return
예제 #24
0
    except:
        arcpy.AddError(
            "The script could not read your ratio because it is not a number")
        quit()
print(ratio)
desc = arcpy.Describe(dem)  # Get information from the DEM
nombre = os.path.join(files,
                      desc.baseName + ".txt")  # Get the name from the DEM

# Create a .txt file and populate it with the data from the Surface volume
# calculation, given the thresholds and the interval
with open(nombre, "w") as f:
    for plane in range(minalt, maxalt, interval):
        try:
            result = arcpy.SurfaceVolume_3d(dem, "", "ABOVE", plane)
            print(arcpy.GetMessage(0), file=f)
            print(arcpy.GetMessage(1), file=f)
            print(arcpy.GetMessage(2), file=f)
            print(arcpy.GetMessage(3), file=f)
        except Exception as e:
            print(e.message)
    f.close

# Create list of altitudes and populate primervalor
primervalor = []
start_altitude = minalt
while start_altitude >= minalt and start_altitude < maxalt:
    primervalor.append(start_altitude)
    start_altitude = start_altitude + interval

# Read the .txt and populate lists with the relevant data
예제 #25
0
    myFile.write(
        "*****                     Check below for any FAILED Statements           *****"
        + "\n")
    myFile.write(
        "*****                     Check below for any WARNING Statements          *****"
        + "\n")
    myFile.write(
        "*****                     Raw MBPS required                               *****"
        + "\n")
    myFile.write(
        "*******************************************************************************"
        + "\n")
    myFile.close()
    del theDay, theMonth, theYear
except:
    arcpy.AddMessage(arcpy.GetMessage(0))
    arcpy.AddMessage(arcpy.GetMessage(1))
    arcpy.AddMessage(arcpy.GetMessage(2))
    theMsg = "Something bad happened during the writing of the reciept;"
    theMsg = theMsg + "please re-run"
    arcpy.AddMessage(theMsg)
    del theMsg


#write out functions
#Function sbdd_qry creates a layer from a query and determines if the
#count is greater than 0; essentially this function looks for unexpected
#values in a source layer field
def sbdd_qry(theFL, myFL, myQry, mySeverity):
    if mySeverity == "Fail":
        myMsg = "    FAILED      " + myFL + " YOU MUST FIX THESE"
    def executer(self, env, featureLayerDecoupage, attributDecoupage,
                 featureClassValider, requeteClassValider, relationSpatiale,
                 typeSelection, featureClassRelation, requeteClassRelation,
                 repLayerErreur, featureClassErreur):
        #-------------------------------------------------------------------------------------
        """
        Permet d'exécuter le traitement pour valider les données entre deux classes selon une requête spatiale pour tous les éléments de découpage sélectionnés.
        
        Paramètres:
        -----------
        env                     : Environnement de travail
        featureLayerDecoupage   : Nom du FeatureLayer contenant les éléments de découpage à valider.
        attributDecoupage       : Nom de l'attribut du FeatureLayer contenant l'identifiant de découpage à valider.
        featureClassValider     : Nom de la FeatureClass à valider.
        requeteClassValider     : Requête attributive utilisée de la classe à valider.
        relationSpatiale        : Relation spatiale pour effectuer la validation.
        typeSelection           : Type de sélection appliqué sur le résultat obtenu de la relation spatiale.
        featureClassRelation    : Nom de la FeatureClass en relation.
        requeteClassRelation    : Requête attributive de la classe en relation.
        repLayerErreur          : Nom du répertoire contenant les FeatureLayer des éléments en erreurs.
        featureClassErreur      : Nom de la FeatureClass contenant les géométries des éléments en erreurs.
        
        Retour:
        -------
        Aucun
        
        """

        #Initialisation du nombre total d'erreurs
        nbErrTotal = 0

        #Vérifier si on doit écrire les erreurs dans une FeatureClass d'erreurs
        if len(featureClassErreur) > 0:
            #Message de vérification de la FeatureClass d'erreurs
            arcpy.AddMessage("- Vérifier la FeatureClass d'erreurs")

            #Extraire la desciption de la FeatureClass à valider
            desc = arcpy.Describe(featureClassValider)

            #Vérifier si la FeatureClass est présente
            if arcpy.Exists(featureClassErreur):
                #Message de vérification de la FeatureClass d'erreur
                arcpy.AddWarning("FeatureClass déjà présente : " +
                                 featureClassErreur)

                #Extraire la desciption de la FeaturClass d'erreurs
                descClsErr = arcpy.Describe(featureClassErreur)

                #Vérifier si le type de géométrie correspond
                if desc.shapeType <> descClsErr.shapeType:
                    #Retourner une exception
                    raise Exception(
                        "Le type de géométrie entre la FeatureClass à valider et celle d'erreurs ne correspond pas : "******"<>" + descClsErr.shapeType)

            #Si elle est absente
            else:
                #Définir le nom de la classe
                baseName = os.path.basename(featureClassErreur)

                #Créer la FeatureClass d'erreurs
                arcpy.AddMessage("CreateFeatureclass_management " +
                                 featureClassErreur.replace(baseName, "") +
                                 " " + baseName + " " + desc.shapeType + " " +
                                 desc.spatialReference.name)
                arcpy.CreateFeatureclass_management(
                    featureClassErreur.replace(baseName, ""),
                    baseName,
                    desc.shapeType,
                    spatial_reference=desc.spatialReference)
                arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1))

            #Créer le curseur pour ajouter les éléments dans la FeatureClass d'erreurs
            cursor = arcpy.da.InsertCursor(featureClassErreur, ["SHAPE@"])

        #Forcer la destruction des fichiers de sortie
        arcpy.env.overwriteOutput = True

        #Afficher le message pour traiter tous les éléments sélectionnés dans le FeatureLayer de découpage
        arcpy.AddMessage(
            "- Traiter tous les éléments sélectionnés du FeatureLayer de découpage : "
            + featureLayerDecoupage)

        #Créer le curseur des éléments de découpage
        cursorDecoupage = arcpy.SearchCursor(featureLayerDecoupage)

        #Extraire le premier élément
        feature = cursorDecoupage.next()

        #Traiter tant qu'il y aura des éléments de découpage
        while feature:
            #Définir le découpage traité
            decoupage = str(feature.getValue(attributDecoupage))

            #Message de validation du découpage
            arcpy.AddMessage(" ")
            arcpy.AddMessage("- Validation des données : " +
                             attributDecoupage + "=" + decoupage)

            # Process: Make Feature Layer
            lyrErrName = decoupage + "_Erreur"
            arcpy.AddMessage(
                "MakeFeatureLayer_management " + featureClassValider + " " +
                lyrErrName + " " +
                requeteClassValider.replace("<DECOUPAGE>", decoupage))
            arcpy.MakeFeatureLayer_management(
                featureClassValider, lyrErrName,
                requeteClassValider.replace("<DECOUPAGE>", decoupage))
            arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1))

            # Process: Select Layer By Attribute
            arcpy.AddMessage("SelectLayerByAttribute_management " +
                             lyrErrName + " NEW_SELECTION")
            arcpy.SelectLayerByAttribute_management(lyrErrName,
                                                    "NEW_SELECTION")
            arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1))

            # Process: Make Feature Layer (2)
            desc = arcpy.Describe(featureClassRelation)
            lyrName = desc.baseName.split(".")[-1]
            arcpy.AddMessage(
                "MakeFeatureLayer_management " + featureClassRelation + " " +
                lyrName + " " +
                requeteClassRelation.replace("<DECOUPAGE>", decoupage))
            arcpy.MakeFeatureLayer_management(
                featureClassRelation, lyrName,
                requeteClassRelation.replace("<DECOUPAGE>", decoupage))
            arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1))

            # Process: Select Layer By Location
            arcpy.AddMessage("SelectLayerByLocation_management " + lyrErrName +
                             " " + relationSpatiale + " " + lyrName + " " +
                             typeSelection)
            lyrErr = arcpy.SelectLayerByLocation_management(
                lyrErrName, relationSpatiale, lyrName, "", typeSelection)
            arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1))

            #Initialiser le nombre d'erreurs
            nbErr = 0

            #Extraire la desciption du FeatureLayer d'erreurs
            descLyrErr = arcpy.Describe(lyrErr)

            #Vérifier la présence d'erreurs
            if len(descLyrErr.fidSet) > 0:
                #Définir le nombre d'erreurs
                nbErr = len(descLyrErr.fidSet.split(";"))
                nbErrTotal = nbErrTotal + nbErr

                #Mettre le featureLayer non-visible
                lyrErr.visible = False
                #Définir le nom du featureLayer à écrire sur disque
                featureLayerErreurSnrc = repLayerErreur + "\\" + lyrErrName

                #Vérifier si le FeatureLayer est déjà présent
                if os.path.exists(featureLayerErreurSnrc):
                    #Détruire le FeatureLayer
                    os.remove(featureLayerErreurSnrc)

                # Process: Save To Layer File
                arcpy.AddMessage("SaveToLayerFile_management " + lyrErrName +
                                 " " + featureLayerErreurSnrc)
                arcpy.SaveToLayerFile_management(lyrErrName,
                                                 featureLayerErreurSnrc)
                arcpy.AddMessage(arcpy.GetMessage(arcpy.GetMessageCount() - 1))

                #Vérifier si on doit écrire les erreurs dans une FeatureClass d'erreurs
                if len(featureClassErreur) > 0:
                    #Écriture des erreus dans la FeatureClass
                    arcpy.AddMessage("Écriture des erreurs dans : " +
                                     featureClassErreur)
                    #Traiter tous les éléments du FeatureLayer d'erreurs
                    for row in arcpy.SearchCursor(lyrErr):
                        #Extraire le OBJECTID
                        #arcpy.AddMessage(str(row.getValue("OBJECTID")))

                        #Extraire la géométrie
                        geometrie = row.getValue("SHAPE")

                        #Insérer l'élément dans la FeatureClass
                        cursor.insertRow([geometrie])

            #Afficher le nombre d'erreurs
            arcpy.AddMessage("Nombre d'erreurs : " + str(nbErr))

            #Extraire le prochain élément
            feature = cursorDecoupage.next()

        #Vérifier si on doit écrire les erreurs dans une FeatureClass d'erreurs
        if len(featureClassErreur) > 0:
            #Accepter les modifications
            del cursor

        #Afficher le nombre total d'erreurs
        arcpy.AddMessage(" ")
        arcpy.AddMessage("Nombre total d'erreurs : " + str(nbErrTotal))
        arcpy.AddMessage(" ")

        #Sortir
        return
예제 #27
0
inputFolder = arcpy.GetParameterAsText(0)
inputRoads = arcpy.GetParameterAsText(1)
inputPrivateLand = arcpy.GetParameterAsText(2)
bufferdist = arcpy.GetParameterAsText(3)
birdviews = arcpy.GetParameterAsText(4)
inputExtent = arcpy.GetParameterAsText(5)

#setup env params, overwrite and extent
arcpy.env.overwriteOutput = True
arcpy.env.extent = inputExtent

#buffer road and create new file named and define path for erase tool
buffName = inputRoads.replace(".shp", "_buffered.shp")
arcpy.Buffer_analysis(inputRoads, buffName, bufferdist)
buffPath = os.path.join(buffName)
#erase unwanted area from clipped feature and create new file and define path for clip tool
eraseName = buffPath.replace("_buffered.shp", "_erased.shp")
arcpy.Erase_analysis(buffPath, inputPrivateLand, eraseName)
clipPath = os.path.join(eraseName)

#for each habitat file the tool will clip the suitable area and create new file
arcpy.env.workspace = inputFolder
fcList = arcpy.ListFeatureClasses()
for fc in fcList:
    outputName = os.path.join(birdviews, fc + "_view")
    arcpy.Clip_analysis(fc, clipPath, outputName)

#add finish message
arcpy.AddMessage("Tool is successful.")
print arcpy.GetMessage(0)
예제 #28
0
def Solve(Input_Layer_Location, ScenarioNames, Input_Dataset,
          Evacuation_Prefix, Safe_Zone_Prefix, Dynamics_Prefix, ThreadCount,
          msgLock, dbLock, ThreadNum):
    # Check out any necessary licenses
    if arcpy.CheckExtension("Network") == "Available":
        arcpy.CheckOutExtension("Network")
    else:
        arcpy.AddMessage("Network Analyst Extension Is Not Available")
        print "Network Analyst Is Not Available"
        sys.exit(0)

    # Load required toolboxes
    arcpy.env.overwriteOutput = True
    SolveCount = 0

    try:
        # load layer file
        lyrFile = arcpy.mapping.Layer(Input_Layer_Location)
        messages = []

        # loop over all scenarios, import them into each NA layer
        for ExpName in ScenarioNames:
            # arcpy.AddMessage("Importing scenario: " + ExpName[0])
            EVC = Input_Dataset + '\\' + Evacuation_Prefix + ExpName[0]
            SAFE = Input_Dataset + '\\' + Safe_Zone_Prefix + ExpName[0]
            DYN = Input_Dataset + '\\' + Dynamics_Prefix + ExpName[0]

            # now loop over all NA layers and solve them one by one
            for lyr in arcpy.mapping.ListLayers(lyrFile):
                try:
                    desc = arcpy.Describe(Input_Layer_Location + "\\" +
                                          lyr.longName)
                    # only solve if the layer is a network analysis layer
                    if desc.dataType == "NALayer":

                        # We check if this setup and scenario is intended for this particular sub-process
                        SolveCount += 1
                        if SolveCount % ThreadCount != ThreadNum:
                            continue

                        # load input locations
                        del messages[:]
                        try:
                            messages.append(
                                "Thread {}: loading input points to {} from scenario {}"
                                .format(ThreadNum, lyr.name, ExpName[0]))

                            arcpy.AddLocations_na(
                                lyr, "Evacuees", EVC,
                                "VehicleCount POPULATION #;Name UID #",
                                "5000 Meters", "",
                                "Streets NONE;SoCal_ND_Junctions SHAPE",
                                "MATCH_TO_CLOSEST", "CLEAR", "NO_SNAP",
                                "5 Meters", "EXCLUDE",
                                "Streets #;SoCal_ND_Junctions #")
                            for msg in range(0, arcpy.GetMessageCount()):
                                messages.append(arcpy.GetMessage(msg))
                            arcpy.AddLocations_na(
                                lyr, "Zones", SAFE,
                                "Name OBJECTID #;Capacity Capacity #",
                                "5000 Meters", "",
                                "Streets NONE;SoCal_ND_Junctions SHAPE",
                                "MATCH_TO_CLOSEST", "CLEAR", "NO_SNAP",
                                "5 Meters", "EXCLUDE",
                                "Streets #;SoCal_ND_Junctions #")
                            for msg in range(0, arcpy.GetMessageCount()):
                                messages.append(arcpy.GetMessage(msg))
                            arcpy.AddLocations_na(
                                lyr, "DynamicChanges", DYN,
                                "EdgeDirection Zones_EdgeDirection #;StartingCost Zones_StartingCost #;EndingCost Zones_EndingCost #;CostChangeRatio Zones_CostChangeRatio #;CapacityChangeRatio Zones_CapacityChangeRatio #",
                                "5000 Meters", "",
                                "Streets SHAPE;SoCal_ND_Junctions NONE",
                                "MATCH_TO_CLOSEST", "CLEAR", "NO_SNAP",
                                "5 Meters", "INCLUDE",
                                "Streets #;SoCal_ND_Junctions #")
                            for msg in range(0, arcpy.GetMessageCount()):
                                messages.append(arcpy.GetMessage(msg))

                            # solve the layer
                            messages.append("Solving NALayer " + lyr.name +
                                            " with scenario " + ExpName[0])
                            arcpy.Solve_na(lyr, "SKIP", "TERMINATE")
                            for msg in range(0, arcpy.GetMessageCount()):
                                messages.append(arcpy.GetMessage(msg))

                            # going to export route and edge sub_layers
                            solved_layers = arcpy.mapping.ListLayers(lyr)

                            # lock and then write outputs to gdb
                            try:
                                dbLock.acquire()
                                arcpy.CopyFeatures_management(
                                    solved_layers[4],
                                    Input_Dataset + "\\Routes_" + lyr.name +
                                    "_" + ExpName[0])  #Routes
                                for msg in range(0, arcpy.GetMessageCount()):
                                    messages.append(arcpy.GetMessage(msg))
                                arcpy.CopyFeatures_management(
                                    solved_layers[5],
                                    Input_Dataset + "\\EdgeStat_" + lyr.name +
                                    "_" + ExpName[0])  #EdgeStat
                                for msg in range(0, arcpy.GetMessageCount()):
                                    messages.append(arcpy.GetMessage(msg))
                            finally:
                                dbLock.release()
                                del solved_layers

                            messages.append(
                                "Combination {}: Solved {} with scenario {}{}".
                                format(SolveCount, lyr.name, ExpName[0],
                                       os.linesep))

                        except BaseException as e:
                            messages.append("Error: {}".format(e))
                            messages.append(
                                "Combination {}: Errored {} with scenario {}{}"
                                .format(SolveCount, lyr.name, ExpName[0],
                                        os.linesep))

                        # lock and then print messages
                        try:
                            msgLock.acquire()
                            for msg in messages:
                                arcpy.AddMessage(msg)
                        finally:
                            msgLock.release()

                except BaseException as e:
                    arcpy.AddError(e)
                finally:
                    del desc

    except BaseException as e:
        arcpy.AddError(e)
    finally:
        del lyrFile
        del messages
        arcpy.CheckInExtension("Network")
예제 #29
0
#-------------------------------------------------------------------------------
# Name:
# Purpose:
#
# Author:      Marie Cline Delgado
#
#-------------------------------------------------------------------------------

import arcpy, os, time

timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
arcpy.GetMessage("Start time: " + timestamp)

# Variables
inGDB = arcpy.GetParameterAsText(0)  # GDB to extract CIP from
name = arcpy.GetParameterAsText(1)  # Installation name - string, no spaces
path = arcpy.GetParameterAsText(2)  # Output location - folder directory
xmlWorkspace = arcpy.GetParameterAsText(
    3)  # XML Shell Document - file variable
name = str(name + ".gdb").replace(" ", "_")

# Create CIP shell with empty GDB and XML Workspace Document
arcpy.GetMessage("Creating empty FGDB")
arcpy.CreateFileGDB_management(path, name)
arcpy.GetMessage("Importing XML schema into FGDB")
arcpy.ImportXMLWorkspaceDocument_management(name, xmlWorkspace, "SCHEMA_ONLY",
                                            "DEFAULTS")
cipLoc = os.path.join(path, name)

arcpy.env.workspace = inGDB  # Location of GDB to extract CIP from
예제 #30
0
    def calculate_field_stats(self):
        Printboth('Calculating field stats...')
        result = IUCNresult('-----CALCULATE FIELD STATS-----')
        # get row iterator
        layer = "featurelayer"
        arcpy.MakeFeatureLayer_management(self.inputFL, layer)

        try:
            rows = arcpy.SearchCursor(layer)
            row = rows.next()

            #pydict to store dictionaries for each field (a dictionray)
            pyDict = {}
            for eachfield in self.fields:
                pyDict[eachfield] = dict()

            while row:
                for eachfield in self.fields:
                    # need to test to see if the selected field is in the table
                    if eachfield in self._inputFL_field_list:
                        if row.getValue(eachfield) not in pyDict[eachfield].keys():
                            pyDict[eachfield][row.getValue(eachfield)] = 1
                        else:
                            pyDict[eachfield][row.getValue(eachfield)] += 1
                    else:
                        pass

                row = rows.next()

        except:
            result.log('\t'+ arcpy.GetMessage(2))

        else:
            # summary stats for all
            result.log('\t[SUMMARY]\n\tFieldname, Count of unique values')
            for key in pyDict.keys():
                try:
                    result.log('\t' + str(key) + ',' + str(len(pyDict[key])))
                except:
                    pass
            result.log('\n')

            # key for each field
            for key in pyDict.keys():
                result.log('\t[FIELD]: '+ str(key))
                result.log('\tvalue, count')
                # key for each distinct value in field
                for subkey in pyDict[key].keys():
                    try:
                        result.log('\t' + str(subkey) + ',' + str(pyDict[key][subkey]))
                    except:
                        pass
                result.log('\n')

        finally:
            if row:
                del row
            if rows:
                del rows

        del layer
        self.resultList.append(result)