예제 #1
0
while row:
    state = row.getValue("NAME10")
    if state in selected_states:
        zctas[row.getValue("ZCTA5CE10")] = [
            row.getValue("POINT_X"),
            row.getValue("POINT_Y")
        ]
    if state not in states:
        states[state] = 1
    row = sc.next()

notfound = 0
for each_state in selected_states:
    if each_state not in states:
        arcpy.AddWarning(
            "{0} state is not found in ZCTA file... Please contact [email protected] to update the tool!!!"
            .format(each_state))
        notfound += 1
if notfound == 0:
    arcpy.AddMessage("All state(s) have been found!!")

arcpy.AddMessage("Generating tables...")
f = open(output_file, "w")
f.write("OZCTA,DZCTA,olat,olong,dlat,dlong\n")
i = 0
for each_ozcta in zctas:
    for each_dzcta in zctas:
        if i % 10000 == 0:
            arcpy.AddMessage("Processing {0} records...".format(i))
            #print("Printing {0} records...".format(i))
        if each_ozcta != each_dzcta:
     
    print "Complete"  

        
def calibrationPointRoutes(routesMZ, idField, outFeatureName, refPointLayer):
    arcpy.CalibrateRoutes_lr(routesMZ,
                            idField,
                            refPointLayer,
                            "LABEL",
                            "REF_VALUE",
                            outFeatureName,
                            "MEASURES")
         

curvesRemoved = removeCurves(inRoutesFullPath, newRtIdFieldName)
mz_RoutesReturn = createMZ_Route(curvesRemoved, outFc, zSurfacePath, newRtIdFieldName)
tempOutsList.append(outFc)
routeFlipTemp(mz_RoutesReturn[0], newRtIdFieldName, referencePoints)
add3dLengthToM(mz_RoutesReturn[0], mz_RoutesReturn[1])
calibrationPointRoutes(mz_RoutesReturn[0], newRtIdFieldName, outFcNoMerge, referencePoints)
routePartMerge(outFcNoMerge, os.path.join(outDirectoryWorkspace, newOutputGDB), outFcMerge, lrsSchemaTemplate) 

arcpy.AddMessage("Route creation completed")
arcpy.AddMessage("Final output routes located at: " + finalOutPath)

for layer in tempOutsList:
    try:
        arcpy.Delete_management(layer)
    except:
        arcpy.AddWarning("Some temp layers did not delete")
예제 #3
0
def mainFunction(updateFolder,fileName,updateMode,geodatabase): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)  
    try:
        # --------------------------------------- Start of code --------------------------------------- #
        
        # Get the arcgis version
        arcgisVersion = arcpy.GetInstallInfo()['Version']   

        # If a specific file is provided
        if (fileName):
            latestFile = os.path.join(updateFolder, fileName)
        # Otherwise get the latest file in a folder
        else:
            # Get the newest zip file from the update folder
            latestFile = max(glob.iglob(updateFolder + r"\*.zip"), key=os.path.getmtime)
      
        # Setup geodatabase to load data into in temporary workspace
        tempFolder = arcpy.CreateFolder_management(arcpy.env.scratchFolder, "WebData-" + str(uuid.uuid1()))
        arcpy.AddMessage("Copying datasets...")    
          
        # Extract the zip file to a temporary location
        zip = zipfile.ZipFile(latestFile, mode="r")
        zip.extractall(str(tempFolder))

        # Loop through the files in the extracted folder
        for file in os.listdir(str(tempFolder)):               
            # If it's a shapefile
            if file.endswith(".shp"):
               # Get count of the source dataset
               datasetCount = arcpy.GetCount_management(os.path.join(str(tempFolder), file))
               eachFeatureclass = file.replace(".shp","")
          
               # Check Dataset record count is more than 0
               if (long(str(datasetCount)) > 0):
                   # If update mode is then copy, otherwise delete and appending records                
                   if (updateMode == "New"):                                           
                       # Logging
                       arcpy.AddMessage("Copying over feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
                       if (enableLogging == "true"):
                          logger.info("Copying over feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
                                
                       # Copy feature class into geodatabase using the same dataset name
                       arcpy.CopyFeatures_management(os.path.join(str(tempFolder), file), os.path.join(geodatabase, eachFeatureclass), "", "0", "0", "0")

                       # Get dataset count
                       datasetCount = arcpy.GetCount_management(os.path.join(geodatabase, eachFeatureclass)) 
                       arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                       if (enableLogging == "true"):
                           logger.info("Dataset record count - " + str(datasetCount))   
                   else:
                        # If dataset exists in geodatabase, delete features and load in new data
                        if arcpy.Exists(os.path.join(geodatabase, eachFeatureclass)):
                            # Logging
                            arcpy.AddMessage("Updating feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
                            if (enableLogging == "true"):
                               logger.info("Updating feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
             
                            arcpy.DeleteFeatures_management(os.path.join(geodatabase, eachFeatureclass))
                            arcpy.Append_management(os.path.join(str(tempFolder), file), os.path.join(geodatabase, eachFeatureclass), "NO_TEST", "", "")

                            # Get dataset count
                            datasetCount = arcpy.GetCount_management(os.path.join(geodatabase, eachFeatureclass)) 
                            arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                            if (enableLogging == "true"):
                               logger.info("Dataset record count - " + str(datasetCount))   
                        else:
                            # Log warning
                            arcpy.AddWarning("Warning: " + os.path.join(geodatabase, eachFeatureclass) + " does not exist. Copying over...")
                            # Logging
                            if (enableLogging == "true"):
                                logger.warning(os.path.join(geodatabase, eachFeatureclass) + " does not exist. Copying over...")
                                
                            # Copy feature class into geodatabase using the same dataset name
                            arcpy.CopyFeatures_management(os.path.join(str(tempFolder), file), os.path.join(geodatabase, eachFeatureclass), "", "0", "0", "0")           
               else:
                   arcpy.AddWarning("Dataset " + eachFeatureclass + " is empty and won't be copied...")                        
                   # Logging
                   if (enableLogging == "true"):
                       logger.warning("Dataset " + eachFeatureclass + " is empty and won't be copied...")
                               
            # If it's a FGDB
            if file.endswith(".gdb"):
                # Assign the geodatabase workspace and load in the datasets to the lists
                arcpy.env.workspace = file
                featureclassList = arcpy.ListFeatureClasses()   
                tableList = arcpy.ListTables()       
      
                # Load the feature classes into the geodatabase if at least one is in the geodatabase provided
                if (len(featureclassList) > 0):        
                    # Loop through the feature classes
                    for eachFeatureclass in featureclassList:
                       # Get count of the source dataset
                       datasetCount = arcpy.GetCount_management(eachFeatureclass)                   
                       # Check Dataset record count is more than 0
                       if (long(str(datasetCount)) > 0):
                           # Create a Describe object from the dataset
                           describeDataset = arcpy.Describe(eachFeatureclass)
                           # If update mode is then copy, otherwise delete and appending records                
                           if (updateMode == "New"):                                           
                               # Logging
                               arcpy.AddMessage("Copying over feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
                               if (enableLogging == "true"):
                                  logger.info("Copying over feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
                                        
                               # Copy feature class into geodatabase using the same dataset name
                               arcpy.CopyFeatures_management(eachFeatureclass, os.path.join(geodatabase, describeDataset.name), "", "0", "0", "0")

                               # Get dataset count
                               datasetCount = arcpy.GetCount_management(os.path.join(geodatabase, describeDataset.name)) 
                               arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                               if (enableLogging == "true"):
                                   logger.info("Dataset record count - " + str(datasetCount))   
                           else:
                                # If dataset exists in geodatabase, delete features and load in new data
                                if arcpy.Exists(os.path.join(geodatabase, eachFeatureclass)):
                                    # Logging
                                    arcpy.AddMessage("Updating feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
                                    if (enableLogging == "true"):
                                       logger.info("Updating feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
                     
                                    arcpy.DeleteFeatures_management(os.path.join(geodatabase, eachFeatureclass))
                                    arcpy.Append_management(os.path.join(arcpy.env.workspace, eachFeatureclass), os.path.join(geodatabase, eachFeatureclass), "NO_TEST", "", "")

                                    # Get dataset count
                                    datasetCount = arcpy.GetCount_management(os.path.join(geodatabase, eachFeatureclass)) 
                                    arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                    if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))   
                                else:
                                    # Log warning
                                    arcpy.AddWarning("Warning: " + os.path.join(geodatabase, eachFeatureclass) + " does not exist. Copying over...")
                                    # Logging
                                    if (enableLogging == "true"):
                                        logger.warning(os.path.join(geodatabase, eachFeatureclass) + " does not exist. Copying over...")
                                        
                                    # Copy feature class into geodatabase using the same dataset name
                                    arcpy.CopyFeatures_management(eachFeatureclass, os.path.join(geodatabase, describeDataset.name), "", "0", "0", "0")           
                       else:
                           arcpy.AddWarning("Dataset " + eachFeatureclass + " is empty and won't be copied...")                        
                           # Logging
                           if (enableLogging == "true"):
                               logger.warning("Dataset " + eachFeatureclass + " is empty and won't be copied...")

                                                         
                if (len(tableList) > 0):    
                    # Loop through of the tables
                    for eachTable in tableList:
                       # Get count of the source dataset
                       datasetCount = arcpy.GetCount_management(eachTable)                   
                       # Check Dataset record count is more than 0
                       if (long(str(datasetCount)) > 0):
                           # Create a Describe object from the dataset
                           describeDataset = arcpy.Describe(eachTable)
                           # If update mode is then copy, otherwise delete and appending records                
                           if (updateMode == "New"):
                               # Logging
                               arcpy.AddMessage("Copying over table - " + os.path.join(geodatabase, eachTable) + "...")
                               if (enableLogging == "true"):
                                  logger.info("Copying over table - " + os.path.join(geodatabase, eachTable) + "...")
                                  
                               # Copy table into geodatabase using the same dataset name
                               arcpy.TableSelect_analysis(eachTable, os.path.join(geodatabase, describeDataset.name), "")

                               # Get dataset count
                               datasetCount = arcpy.GetCount_management(os.path.join(geodatabase, describeDataset.name)) 
                               arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                               if (enableLogging == "true"):
                                   logger.info("Dataset record count - " + str(datasetCount))   
                           else:
                                # If dataset exists in geodatabase, delete features and load in new data
                                if arcpy.Exists(os.path.join(geodatabase, eachTable)):
                                    # Logging
                                    arcpy.AddMessage("Updating table - " + os.path.join(geodatabase, eachTable) + "...")
                                    if (enableLogging == "true"):
                                       logger.info("Updating table - " + os.path.join(geodatabase, eachTable) + "...")

                                    arcpy.DeleteFeatures_management(os.path.join(geodatabase, eachTable))
                                    arcpy.Append_management(os.path.join(arcpy.env.workspace, eachTable), os.path.join(geodatabase, eachTable), "NO_TEST", "", "")

                                    # Get dataset count
                                    datasetCount = arcpy.GetCount_management(os.path.join(geodatabase, eachTable)) 
                                    arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                    if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))   
                                else:
                                    # Log warning
                                    arcpy.AddWarning("Warning: " + os.path.join(geodatabase, eachTable) + " does not exist. Copying over...")
                                    # Logging
                                    if (enableLogging == "true"):
                                        logger.warning(os.path.join(geodatabase, eachTable) + " does not exist. Copying over...")

                                    # Copy table into geodatabase using the same dataset name
                                    arcpy.TableSelect_analysis(eachTable, os.path.join(geodatabase, describeDataset.name), "")
                                    
        #################### Custom code for WCRC and BDC ####################
                           # For WCRC data updates
                           if "wcrc" in updateFolder.lower():
                               # For the property details view from WCRC
                               if "vw_propertydetails" in eachTable.lower():
                                   # Copy property details view into enterprise geodatabase
                                   arcpy.TableSelect_analysis(eachTable, os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vw_WCRCPropertyDetails"), "")
                                   # Copy property spatial view into file geodatabase and dissolve on valuation ID
                                   arcpy.AddMessage("Copying over feature class - " + os.path.join("D:\Data\WCRC.gdb", "Property") + "...")
                                   if (enableLogging == "true"):
                                      logger.info("Copying over feature class - " + os.path.join("D:\Data\WCRC.gdb", "Property") + "...") 
                                   arcpy.CopyFeatures_management(os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vwWCRCProperty"), os.path.join("D:\Data\WCRC.gdb", "PropertyParcel"), "", "0", "0", "0")
                                   arcpy.Dissolve_management(os.path.join("D:\Data\WCRC.gdb", "PropertyParcel"), os.path.join("D:\Data\WCRC.gdb", "Property"), "ValuationID", "", "MULTI_PART", "DISSOLVE_LINES")
                                   arcpy.JoinField_management(os.path.join("D:\Data\WCRC.gdb", "Property"), "ValuationID", os.path.join("D:\Data\WCRC.gdb", "PropertyParcel"), "ValuationID", "")

                                   # Get dataset count
                                   datasetCount = arcpy.GetCount_management(os.path.join("D:\Data\WCRC.gdb", "Property")) 
                                   arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                   if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))   
                           # For BDC data updates
                           if "bdc" in updateFolder.lower():                             
                               # For the property match table from BDC and WCRC
                               if "matchtable" in eachTable.lower():
                                   # Update the West Coast match table
                                   # WCRC match table - Copy table and tidy up the fields
                                   arcpy.TableSelect_analysis("D:\Data\FTP\WCRC\WCRCPropertyToParcel.csv", os.path.join("D:\Data\WCRC.gdb", "CoreLogic_PropertyToParcel"), "")
                                   arcpy.AddField_management(os.path.join("D:\Data\WCRC.gdb", "CoreLogic_PropertyToParcel"), "ValuationID", "TEXT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
                                   arcpy.AddField_management(os.path.join("D:\Data\WCRC.gdb", "CoreLogic_PropertyToParcel"), "ParcelID", "TEXT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
                                   arcpy.CalculateField_management(os.path.join("D:\Data\WCRC.gdb", "CoreLogic_PropertyToParcel"), "ValuationID", "!ValRef_Formatted!", "PYTHON_9.3", "")
                                   arcpy.CalculateField_management(os.path.join("D:\Data\WCRC.gdb", "CoreLogic_PropertyToParcel"), "ParcelID", "!Parcel_ID!", "PYTHON_9.3", "")
                                   arcpy.DeleteField_management(os.path.join("D:\Data\WCRC.gdb", "CoreLogic_PropertyToParcel"), "QPID;Roll;Assessment;Suffix;ValRef_Formatted;Apportionment;Category;Building_Floor_Area;Building_Site_Cover;Parcel_ID;Physical_Address;Physical_Suburb;Physical_City;Legal_Description")
                                       
                                   # BDC match table - Tidy up the fields
                                   arcpy.AddField_management(eachTable, "ValuationID", "TEXT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
                                   arcpy.AddField_management(eachTable, "ParcelID", "TEXT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
                                   arcpy.CalculateField_management(eachTable, "ValuationID", "!val_id!", "PYTHON_9.3", "")
                                   arcpy.CalculateField_management(eachTable, "ParcelID", "!PAR_ID!", "PYTHON_9.3", "")
                                   arcpy.DeleteField_management(eachTable, "PERIMETER;LEGAL_ID;PAR_ID;LEGAL;HOW;ASSESS;FLAG;COMMENT;POLYGONID;Edited_By;Edit_Date;Descriptio;OBJECTID_12;LEGAL_1;OBJECTID_12_13;val_id;val1;root_val_id;ra_unique_id;POINT_X;POINT_Y")
                                   # Copy out the WCRC match table
                                   arcpy.TableSelect_analysis(os.path.join("D:\Data\WCRC.gdb", "CoreLogic_PropertyToParcel"), "in_memory\\PropertyToParcel", "")
                                   # Join the Buller match table
                                   arcpy.JoinField_management("in_memory\\PropertyToParcel", "ValuationID", eachTable, "ValuationID", "ValuationID")
                                   # Select out the non-Buller records
                                   arcpy.TableSelect_analysis("in_memory\\PropertyToParcel", "in_memory\\PropertyToParcel_NoBDC", "ValuationID_1 IS NULL")
                                   # Merge Buller match table with the WCRC match table 
                                   arcpy.Merge_management("in_memory\\PropertyToParcel_NoBDC;" + eachTable, os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "PropertyToParcel"), "")
                                   arcpy.DeleteField_management(os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "PropertyToParcel"), "ValuationID_1")

                               # For the property view from BDC
                               if "vwproperty" in eachTable.lower():
                                   # Copy property view into enterprise geodatabase
                                   arcpy.TableSelect_analysis(eachTable, os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vw_BDCProperty"), "")
                                   # Copy property spatial view into file geodatabase and dissolve on valuation ID
                                   arcpy.AddMessage("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "Property") + "...")
                                   if (enableLogging == "true"):
                                       logger.info("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "Property") + "...")
                                   arcpy.CopyFeatures_management(os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vwBDCProperty"), os.path.join("D:\Data\BDC.gdb", "PropertyParcel"), "", "0", "0", "0")
                                   arcpy.Dissolve_management(os.path.join("D:\Data\BDC.gdb", "PropertyParcel"), os.path.join("D:\Data\BDC.gdb", "Property"), "ValuationID", "", "MULTI_PART", "DISSOLVE_LINES")
                                   arcpy.JoinField_management(os.path.join("D:\Data\BDC.gdb", "Property"), "ValuationID", os.path.join("D:\Data\BDC.gdb", "PropertyParcel"), "ValuationID", "")

                                   # Get dataset count
                                   datasetCount = arcpy.GetCount_management(os.path.join("D:\Data\BDC.gdb", "Property")) 
                                   arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                   if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))
                                       
                               # For the resource consent view from BDC
                               if "vwresourceconsent" in eachTable.lower():
                                   # Copy resource consent view into enterprise geodatabase
                                   arcpy.TableSelect_analysis(eachTable, os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vw_BDCResourceConsent"), "")
                                   # Copy resource consent spatial view into file geodatabase
                                   arcpy.AddMessage("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "ResourceConsent") + "...")
                                   if (enableLogging == "true"):
                                       logger.info("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "ResourceConsent") + "...")
                                   arcpy.CopyFeatures_management(os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vwBDCResourceConsent"), "in_memory\ResourceConsent", "", "0", "0", "0")
                                   arcpy.Dissolve_management("in_memory\ResourceConsent", os.path.join("D:\Data\BDC.gdb", "ResourceConsent"), "ConsentID", "", "MULTI_PART", "DISSOLVE_LINES")
                                   arcpy.JoinField_management(os.path.join("D:\Data\BDC.gdb", "ResourceConsent"), "ConsentID", "in_memory\ResourceConsent", "ConsentID", "")

                                    # Get dataset count
                                   datasetCount = arcpy.GetCount_management(os.path.join("D:\Data\BDC.gdb", "ResourceConsent")) 
                                   arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                   if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))
                                       
                               # For the building consent view from BDC
                               if "vwbuildingconsent" in eachTable.lower():
                                   # Copy building consent view into enterprise geodatabase
                                   arcpy.TableSelect_analysis(eachTable, os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vw_BDCBuildingConsent"), "")
                                   # Copy building consent spatial view into file geodatabase
                                   arcpy.AddMessage("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "BuildingConsent") + "...")
                                   if (enableLogging == "true"):
                                       logger.info("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "BuildingConsent") + "...")
                                   arcpy.CopyFeatures_management(os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vwBDCBuildingConsent"), "in_memory\BuildingConsent", "", "0", "0", "0")
                                   arcpy.Dissolve_management("in_memory\BuildingConsent", os.path.join("D:\Data\BDC.gdb", "BuildingConsent"), "ConsentID", "", "MULTI_PART", "DISSOLVE_LINES")
                                   arcpy.JoinField_management(os.path.join("D:\Data\BDC.gdb", "BuildingConsent"), "ConsentID", "in_memory\BuildingConsent", "ConsentID", "")

                                    # Get dataset count
                                   datasetCount = arcpy.GetCount_management(os.path.join("D:\Data\BDC.gdb", "BuildingConsent")) 
                                   arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                   if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))
                                       
                               # For the licence view from BDC
                               if "vwlicence" in eachTable.lower():
                                   # Copy licence view into enterprise geodatabase
                                   arcpy.TableSelect_analysis(eachTable, os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vw_BDCLicence"), "Valuation_No <> ''")
                                   # Copy licence spatial view into file geodatabase
                                   arcpy.AddMessage("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "Licence") + "...")
                                   if (enableLogging == "true"):
                                       logger.info("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "Licence") + "...")
                                   arcpy.CopyFeatures_management(os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vwBDCLicence"), "in_memory\Licence", "", "0", "0", "0")
                                   arcpy.Dissolve_management("in_memory\Licence", os.path.join("D:\Data\BDC.gdb", "Licence"), "LicenceNo", "", "MULTI_PART", "DISSOLVE_LINES")
                                   arcpy.JoinField_management(os.path.join("D:\Data\BDC.gdb", "Licence"), "LicenceNo", "in_memory\Licence", "LicenceNo", "")

                                    # Get dataset count
                                   datasetCount = arcpy.GetCount_management(os.path.join("D:\Data\BDC.gdb", "Licence")) 
                                   arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                   if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))
                                       
                                # For the LIM view from BDC
                               if "vwlim" in eachTable.lower():
                                   # Copy lim view into enterprise geodatabase
                                   arcpy.TableSelect_analysis(eachTable, os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vw_BDCLIM"), "")            
                                   # Copy lim spatial view into file geodatabase
                                   arcpy.AddMessage("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "LIM") + "...")
                                   if (enableLogging == "true"):
                                       logger.info("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "LIM") + "...")
                                   arcpy.CopyFeatures_management(os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vwBDCLIM"), "in_memory\LIM", "", "0", "0", "0")
                                   arcpy.Dissolve_management("in_memory\LIM", os.path.join("D:\Data\BDC.gdb", "LIM"), "RecordID", "", "MULTI_PART", "DISSOLVE_LINES")
                                   arcpy.JoinField_management(os.path.join("D:\Data\BDC.gdb", "LIM"), "RecordID", "in_memory\LIM", "RecordID", "")

                                   # Get dataset count
                                   datasetCount = arcpy.GetCount_management(os.path.join("D:\Data\BDC.gdb", "LIM")) 
                                   arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                   if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))  
                       else:
                           arcpy.AddWarning("Dataset " + eachTable + " is empty and won't be copied...")                        
                           # Logging
                           if (enableLogging == "true"):
                               logger.warning("Dataset " + eachTable + " is empty and won't be copied...")             
        
        # --------------------------------------- End of code --------------------------------------- #
        # If called from gp tool return the arcpy parameter   
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                # If ArcGIS desktop installed
                if (arcgisDesktop == "true"):
                    arcpy.SetParameter(1, output)
                # ArcGIS desktop not installed
                else:
                    return output 
        # Otherwise return the result          
        else:
            # Return the output if there is any
            if output:
                return output      
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file        
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
    # If arcpy error
    except arcpy.ExecuteError:           
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)   
        printMessage(errorMessage,"error")           
        # Logging
        if (enableLogging == "true"):
            # Log error          
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")            
            # Remove file handler and close log file        
            logMessage.flush()
            logMessage.close()
            logger.handlers = []   
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""         
        # Build and show the error message
        # If many arguments
        if (e.args):
            for i in range(len(e.args)):        
                if (i == 0):
                    # Python version check
                    if sys.version_info[0] >= 3:
                        # Python 3.x
                        errorMessage = str(e.args[i]).encode('utf-8').decode('utf-8')
                    else:
                        # Python 2.x
                        errorMessage = unicode(e.args[i]).encode('utf-8')
                else:
                    # Python version check
                    if sys.version_info[0] >= 3:
                        # Python 3.x
                        errorMessage = errorMessage + " " + str(e.args[i]).encode('utf-8').decode('utf-8')
                    else:
                        # Python 2.x
                        errorMessage = errorMessage + " " + unicode(e.args[i]).encode('utf-8')
        # Else just one argument
        else:
            errorMessage = e
        printMessage(errorMessage,"error")
        # Logging
        if (enableLogging == "true"):
            # Log error            
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")            
            # Remove file handler and close log file        
            logMessage.flush()
            logMessage.close()
            logger.handlers = []   
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)            
            RclsSomGLGras = ReclassifySomGxG(somGLG, RngGLG, VegKode, "somGLG")
            uitTms = RclsSomGHGras * RclsSomGLGras
            uitTms.save(VegKansResult)

        ap.Delete_management(rcl_f_nm)
        del somGLG, somGHG, RclsSomGHGras, RclsSomGLGras, uitTms

# Opruimen
# --------
    ap.AddMessage("\n")
    try:
        if ap.Exists(tmpdir):
            rmtree(tmpdir)
            ap.RefreshCatalog(os.path.dirname(tmpdir))
    except:
        ap.AddWarning("Mogelijk probleempje bij opruimen tijdelijke TEMPdir '"+tmpdir+"'; " \
                      "deze mag gewist worden.\n" +str(sys.exc_value))

except FOUT as f:
    ap.AddError("\nFOUT-> " + f.message + "\n")
    if ap.Exists(tmpdir):
        rmtree(tmpdir)
        ap.AddWarning("Mogelijk blijft een tijdelijke TEMPdir '"+tmpdir+ \
                      "' achter; deze mag gewist worden.\n")

except:
    ap.AddError("\nFOUT in blok " + prgblok + "\n" + str(sys.exc_type) + ": " +
                str(sys.exc_value))
    if ap.Exists(tmpdir):
        rmtree(tmpdir)
        ap.AddWarning("Mogelijk blijft een tijdelijke TEMPdir '"+tmpdir+ \
                      "' achter; deze mag gewist worden.\n")
예제 #5
0
    #oProcessor = None
    try:
        iQHType = flooddsconfig.QHType.H  #default to check only the H
        debugLevel = 0
        if (len(sys.argv) < 2):
            arcpy.AddMessage("Usage: {} {} {}".format(
                sys.argv[0], "ComIDHQFile"
                "OutFile (Optional)"))
            sys.exit(0)
        pQHFile = sys.argv[1]
        pOutFile = ""
        if (len(sys.argv) > 2):
            pOutFile = sys.argv[2]
        if (pOutFile == ""):
            (pOutFile, ext) = os.path.splitext(pQHFile)
            pOutFile = "{}_max{}".format(pOutFile, ext)

        #pParams = (pQHFile, pFilter, pOutFile, iQHType, 0)
        pProcessor = ClassOp()
        pProcessor.DebugLevel = debugLevel
        (sOK, pOutFile, sMsg) = pProcessor.execute(pQHFile, pOutFile, iQHType)
        del pProcessor
    except arcpy.ExecuteError:
        arcpy.AddError("{} {}".format(arcpy.GetMessages(2), trace()))
    except:
        arcpy.AddWarning("{} {}".format(arcpy.GetMessages(2), trace()))
    finally:
        dt = datetime.datetime.now()
        arcpy.AddMessage('Finished at {}'.format(
            dt.strftime("%Y-%m-%d %H:%M:%S")))
    def traiterIdentifiant(self, no_nc, no_tache, procdesc, ty_produit,
                           identifiant, ed_debut, ver_debut, ed_fin, ver_fin):
        #-------------------------------------------------------------------------------------
        """Exécuter le traitement de fermeture pour un identifiant de la non-conformité.
        
        Paramètres:
        -----------
        no_nc           : Numéro de la non-conformité à fermer.
        no_tache        : Numéro de la tâche Alloy qui correspond à une demande effectuée par un usager.
        procdesc        : Étiquette qui correspond à la description du processus utilisé pour corriger la non-conformité.
        ty_produit      : Type de produit traité.
        identifiant     : Identifiant de découpage traité.
        ed_debut        : Édition de début de la non-conformité.
        ver_debut       : Version de début de la non-conformité.
        ed_fin          : Édition de fin de la non-conformité.
        ver_fin         : Version de fin de la non-conformité.
               
        Variables:
        ----------
        self.CompteSib  : Objet utilitaire pour la gestion des connexion à SIB.       
        self.Sib        : Objet utilitaire pour traiter des services SIB.
        resultat        : Résultat de la requête SIB.
        ed_cour         : Édition courante du jeu de données.
        ver_cour        : Version courante du jeu de données.
        """

        #-----------------------------------
        #Valider le Proc/Desc
        arcpy.AddMessage("  -Valider le Proc/Desc : " + procdesc)
        sql = "SELECT * FROM F235_VP WHERE TY_PRODUIT='" + ty_produit + "' AND CD_CHAMP='procdesc' AND CD_VALEUR='" + procdesc + "'"
        arcpy.AddMessage("  " + sql)
        #Exécuter la requête
        resultat = self.Sib.requeteSib(sql)
        #Vérifier la présence du paramètre
        if (len(resultat) == 0):
            #Retourner une erreur
            raise Exception(
                "La description de la procédure utilisée pour le produit " +
                ty_produit + " est invalide!")

        #-----------------------------------
        #Créer la requête SQL (pour extraire l'édition et la version du jeu courant ainsi que la version des métadonnées)
        arcpy.AddMessage(
            "  -Extraire l'édition et la version du jeu courant ...")
        sql = "SELECT ED, VER, VER_META FROM F235_PR WHERE TY_PRODUIT='" + ty_produit + "' AND IDENTIFIANT='" + identifiant + "' AND JEU_COUR=1"
        arcpy.AddMessage("  " + sql)
        #Exécuter la requête SQL
        resultat2 = self.Sib.requeteSib(sql)
        #Vérifier la présence du paramètre
        if (len(resultat) == 0):
            raise Exception(
                "Aucun jeu courant n'est présent dans la table F235_pr")
        #Extraire la valeur de l'édition et version courante
        ed_cour = resultat2[0][0]
        ver_cour = resultat2[0][1]
        ver_meta = resultat2[0][2]
        #Vérifier si l'édition.version de début est plus élevé que l'édition.version courante
        if float(str(ed_debut) + "." +
                 str(ver_debut)) > float(str(ed_cour) + "." + str(ver_cour)):
            #Retourner une erreur
            raise Exception("Ed.Ver_Début:" +
                            str(float(str(ed_debut) + "." + str(ver_debut))) +
                            " > Ed.Ver_Cour:" +
                            str(float(str(ed_cour) + "." + str(ver_cour))))
        #Afficher les valeurs
        arcpy.AddMessage("  " + str(resultat2))

        #-----------------------------------
        #Extraire le no_seq du procDesc
        arcpy.AddMessage("  -Extraire le NO_SEQ du Proc/Desc ...")
        sql = "SELECT MAX(NO_SEQ) + 1 FROM F235_PS WHERE TY_PRODUIT='" + ty_produit + "' AND IDENTIFIANT='" + identifiant + "' AND ED=" + str(
            ed_cour) + " AND VER=" + str(ver_cour)
        arcpy.AddMessage("  " + sql)
        #Exécuter la requête SQL
        resultat3 = self.Sib.requeteSib(sql)
        #Définir le NO_SEQ
        no_seq = str(resultat3[0][0])

        #-----------------------------------
        #Créer l'étiquette du Proc/Desc
        arcpy.AddMessage(
            "  -Créer l'étiquette du Proc/Desc dans les métadonnées ...")
        sql = "INSERT INTO F235_PS Values (P0G03_UTL.PU_HORODATEUR, '" + no_tache + "', SYSDATE, SYSDATE, '" + ty_produit + "', '" + identifiant + "', " + str(
            ed_cour
        ) + ", " + str(
            ver_cour
        ) + ", " + no_seq + ", '{$PS$" + procdesc + "}', TO_CHAR(SYSDATE, 'YYYYMMDD'), 0)"
        arcpy.AddWarning("  " + sql)
        #Exécuter la commande
        self.Sib.execute(sql)

        #-----------------------------------
        #Modifier la date des métadonnées
        arcpy.AddMessage("  -Modifier la date des métadonnées ...")
        sql = "UPDATE F235_MR SET ETAMPE='" + no_tache + "', DT_M=SYSDATE, DT_METADATA=TO_CHAR(SYSDATE, 'YYYYMMDD') WHERE TY_PRODUIT='" + ty_produit + "' AND IDENTIFIANT='" + identifiant + "' AND ED=" + str(
            ed_cour) + " AND VER=" + str(ver_cour)
        arcpy.AddWarning("  " + sql)
        #Exécuter la commande
        self.Sib.execute(sql)

        #-----------------------------------
        #Incrémenter la version des métadonnées
        ver_meta = ver_meta + 1
        #Créer la commande SQL pour modifier la version des métadonnées
        arcpy.AddMessage("  -Modifier la version des métadonnées ...")
        sql = "UPDATE F235_PR SET ETAMPE='" + no_tache + "', DT_M=SYSDATE, VER_META=" + str(
            ver_meta
        ) + " WHERE TY_PRODUIT='" + ty_produit + "' AND IDENTIFIANT='" + identifiant + "' AND JEU_COUR=1"
        arcpy.AddWarning("  " + sql)
        #Exécuter la commande
        self.Sib.execute(sql)

        #-----------------------------------
        #Modifier l'édition et version de fin
        arcpy.AddMessage(
            "  -Modifier l'édition et version de fin de non-conformité ...")
        sql = "UPDATE F705_PR SET ETAMPE='" + no_tache + "', DT_M=SYSDATE, ED_FIN=" + str(
            ed_cour
        ) + ", VER_FIN=" + str(
            ver_cour
        ) + " WHERE NO_NC='" + no_nc + "' AND TY_PRODUIT='" + ty_produit + "' AND IDENTIFIANT='" + identifiant + "'"
        arcpy.AddWarning("  " + sql)
        #Exécuter la commande
        self.Sib.execute(sql)

        # Sortir du traitement
        return
예제 #7
0
    srInputPoints = arcpy.Describe(copyInFeatures).spatialReference
    arcpy.AddMessage("Projecting input points to Web Mercator ...")
    arcpy.Project_management(copyInFeatures, prjInFeature, webMercator)
    deleteme.append(prjInFeature)
    tempFans = os.path.join(scratch, "tempFans")

    initialBearing = -9999.0
    traversal = -9999.0
    # get mod of 360.0
    leftBearing = math.fmod(leftBearing, 360.0)
    rightBearing = math.fmod(rightBearing, 360.0)

    # calc traversal and center (initial) bearing from limits
    if (leftBearing == rightBearing):
        arcpy.AddWarning(
            "Left and Right Bearings are equal! Applying 1 degree offset to Right Bearing."
        )
        rightBearing += 1

    if (leftBearing < rightBearing):
        traversal = rightBearing - leftBearing
        initialBearing = math.fmod(leftBearing + (traversal / 2.0), 360.0)

    else:
        # if left bearing > right bearing: ex L:180, R:90, eg. traversal is 270deg with initial bearing at 315 deg
        traversal = (360.0 - leftBearing) + rightBearing
        initialBearing = math.fmod(leftBearing + (traversal / 2.0), 360.0)

    # change Angles from geogaphic to arithmetic for geometry calc
    initialAngle = Geo2Arithmetic(initialBearing)
    leftAngle = Geo2Arithmetic(leftBearing)
def Get_Data_Type():  #Determine what type of data set is being evaluated

    ### Define type of data set

    if InputIsXML:
        myDataType = "XML File"
        myFeatType = "None"
    elif InputIsCSV:
        myDataType = "CSV File"
        myFeatType = "None"
    elif InputIsExcel:
        myDataType = "CSV File"
        myFeatType = "None"
    else:
        if desc.DatasetType == "RasterDataset":
            myDataType = "Raster"
        elif desc.DatasetType == "FeatureClass":
            myDataType = "Vector"
        elif desc.DatasetType == "ShapeFile":  # This does not seem to occur any more, but keep for now
            myDataType = "Vector"
        elif desc.DatasetType == "Table":
            myDataType = "Table"
        elif desc.DatasetType == "FeatureDataset":
            arcpy.AddWarning("!!!!!!!")
            arcpy.AddWarning(
                "This is a feature dataset (e.g., a coverage or multiple feature classes).  This tool can only be run on feature classes."
            )
            arcpy.AddWarning("!!!!!!!")
            sys.exit(1)
        elif desc.DatasetType == "GeometricNetwork":
            myDataType = "GeometricNetwork"
        else:
            arcpy.AddWarning(
                "The provided data set does not appear to be a valid input. Please review the tool documentation."
            )
            sys.exit(1)

        ### Define type of shape for non raster datasets
        if myDataType not in [
                "Raster", "Table", "FeatureDataset", "GeometricNetwork"
        ]:
            if desc.shapeType == "Polygon":
                myFeatType = "Polygon"
            elif desc.shapeType == "Polyline":
                myFeatType = "Polyline"
            elif desc.shapeType == "Point":
                myFeatType = "Point"
            elif desc.shapeType == "MultiPoint" or desc.shapeType == "Multipoint":
                myFeatType = "Point"
            else:
                arcpy.AddWarning(
                    "The feature type for the provided data set could not be determined. It will be set to 'None'."
                )
                myFeatType = "None"
        elif myDataType == "Raster":
            myFeatType = "None"
        elif myDataType == "Table":
            myFeatType = "None"
        elif myDataType == "FeatureDataset":
            myFeatType = "None"
        elif myDataType == "GeometricNetwork":
            myFeatType = "None"
        else:
            arcpy.AddWarning("!!!!!!!")
            arcpy.AddWarning(
                "The provided data set does not appear to be a valid input. Please review the tool documentation."
            )
            arcpy.AddWarning("!!!!!!!")
            sys.exit(1)

    ### Return desired objects
    return myDataType, myFeatType
예제 #9
0
# Default: nlcd_2011_lc_sdm
outGDB = arcpy.GetParameterAsText(6)  # Geodatabase to hold final products
scratchGDB = arcpy.GetParameterAsText(
    7)  # Geodatabase to hold intermediate products

# Additional script parameters and environment settings
arcpy.env.overwriteOutput = True  # Existing data may be overwritten
arcpy.env.snapRaster = inSnap  # Make sure outputs align with snap raster
arcpy.env.extent = 'MAXOF'  # Make sure outputs are not truncated
outCS = arcpy.Describe(inSnap).SpatialReference
arcpy.env.outputCoordinateSystem = outCS

# Validate that snap raster has NAD83 datum
if outCS.GCS.Name != 'GCS_North_American_1983':
    arcpy.AddWarning(
        'NHD data use the NAD83 datum, but your snap raster has a different datum.'
    )
    arcpy.AddWarning('Proceeding, but the resulting raster may be suspect.')


# Define function to create selection expressions based on a selection field
def PopFCodeList(SelFld):
    code_list = list()  #Create empty list
    where_clause = '"%s" = 1' % (SelFld)  #Selection expression
    with arcpy.da.SearchCursor(inFCodes, 'FCode', where_clause) as FCodes:
        for code in FCodes:
            code_list.append(code[0])
    code_list = str(code_list).replace('[', '(').replace(']', ')')
    where_clause = '"FCode" in %s' % code_list
    return where_clause
WorkingDir = arcpy.GetParameterAsText(1)
CreateStandAloneXML = arcpy.GetParameterAsText(
    2
)  #Toggle to delete/keep final modified stand-alone XML after it is re-imported into data set.
UseStartTemplate = arcpy.GetParameterAsText(
    3
)  #Toggle to run MD Wizard using the custom template saved by the user as the starting point.
CustomStarterTemplate = arcpy.GetParameterAsText(4)
GenericTemplate = os.path.join(os.path.dirname(sys.argv[0]),
                               "GenericFGDCTemplate.xml")

#'Entity and Attribute Builder' tool and 'Metadata Editor' will be shipped with Toolbox.

installDir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.dirname(os.path.dirname(installDir))
arcpy.AddWarning("installDir :" + installDir)
arcpy.AddWarning("root_dir :" + root_dir)

pymdwiz_dir = os.path.join(root_dir, 'pymdwizard')
arcpy.AddWarning("pymdwiz_dir :" + pymdwiz_dir)

python_dir = os.path.join(root_dir, 'Python36_64')
if not os.path.exists(python_dir):
    python_dir = os.path.join(root_dir, 'Python35_64')
    if not os.path.exists(python_dir):
        # The Python installation that ships with the application is missing
        msg = '\n\n' + '!' * 79
        msg += "\nCould not find the version of Python installed with the "
        msg += "MetadataWizard.\n{}\n\nPlease verify that the application"
        msg += " was installed correctly, and the toolbox has not been moved."
        msg += '\n' + '!' * 79 + "\n\n"
def ProcessRoutine(ArgVariables):
    """Main Function that operates the logic of the script."""
    try:

        arcpy.AddMessage("\nInputData: " + InputData)
        arcpy.AddMessage("WorkingDir: " + WorkingDir)
        arcpy.AddMessage("CreateStandAloneXML: " + CreateStandAloneXML)
        arcpy.AddMessage("UseStartTemplate: " + UseStartTemplate)
        arcpy.AddMessage("StarterTemplate: " + CustomStarterTemplate)

        myDataType, myFeatType = Get_Data_Type(
        )  #Determine data type, and feature type if applicable
        arcpy.AddMessage("Data type being evaluated: " + myDataType)
        arcpy.AddMessage("Feature type being evaluated: " + myFeatType + "\n")

        SourceFile = os.path.split(os.path.splitext(InputData)[0])[
            1]  #The name of the input file. No extension. No full path.
        OriginalMDRecord = os.path.join(
            WorkingDir, SourceFile +
            "_Original.xml")  #File pointer to unmodified original.
        FGDCXML = os.path.join(
            WorkingDir, SourceFile +
            "_FGDC.xml")  #File pointer to the copy we will modify/update.

        #Create and keep 'Original' metadata copy in working directory.
        try:
            MDTools.CreateCopyMDRecord(InputData, OriginalMDRecord)
        except:
            pass

        #After we made a copy of the input's original MD, start process from custom template if it is toggled.
        if str(UseStartTemplate) == "true":
            try:
                arcpy.MetadataImporter_conversion(
                    CustomStarterTemplate, InputData
                )  # This imports only: does not convert and does not sync
                arcpy.AddMessage(
                    "The user's custom starter record is now being imported into the input data set...\n"
                )
            except:
                arcpy.AddWarning("!!!!!!!")
                arcpy.AddWarning(
                    "There was a problem importing from the Custom Starter Template. Please ensure that the file is here: ("
                    + CustomStarterTemplate + ")")
                arcpy.AddWarning("!!!!!!!\n")
                sys.exit(1)

        try:  #Extract any existing metadata, and translate to FGDC format if necessary.
            ExportFGDC_MD_Utility.GetMDContent(
                InputData, FGDCXML, WorkingDir
            )  #Export (translate if necessary) input metadata to FGDC format. Remove ESRI 'sync' & 'reminder' elements.
        except:
            arcpy.AddMessage(
                "No metadata could be found for this record. A new file will be created.\n"
            )
            MDTools.CreateCopyMDRecord(GenericTemplate, FGDCXML)

        MDTools.RemoveNameSpace(
            FGDCXML
        )  #Eliminate namespace tags from root element in xml if present (appear when tool is run on spatial data sets).
        MDTools.CheckMasterNodes(
            FGDCXML
        )  #Ensure all the key FGDC-CSDGM nodes are present in the record.

        if not InputIsXML and not InputIsCSV and not InputIsExcel and desc.DatasetType != "Table":  #Only attempt to extract/update spatial properties from spatial data sets.

            try:
                GCS_ExtentList = Get_LatLon_BndBox()[1]
            except:
                arcpy.AddWarning("!!!!!!!")
                arcpy.AddWarning(
                    "A problem was encountered when attempting to retrieve the spatial extent of the input data set. Please review the tool documentation and ensure the data set is a valid input and ENSURE THAT A COORDINATE SYSTEM HAS BEEN DEFINED."
                )
                arcpy.AddWarning("!!!!!!!\n")
                sys.exit()

            #Get/Update Bounding Coordinates
            GCS_ExtentList = Get_LatLon_BndBox()[1]
            Local_ExtentList = Get_LatLon_BndBox()[0]
            if "nan" in str(Local_ExtentList):
                arcpy.AddWarning(
                    "No spatial extent could be found for the input spatial data set. Please review the 'Bounding Extent' in the final metadata record. (Values will be set to maximum global extent).\n"
                )
            arcpy.AddMessage("Bounding Coordinates (Local): " +
                             str(Local_ExtentList))
            arcpy.AddMessage("Bounding Coordinates (Geographic): " +
                             str(GCS_ExtentList) + "\n")

            WestBC = Get_LatLon_BndBox()[1][0]
            EastBC = Get_LatLon_BndBox()[1][2]
            NorthBC = Get_LatLon_BndBox()[1][3]
            SouthBC = Get_LatLon_BndBox()[1][1]
            MDTools.WriteBoundingInfo(FGDCXML, WestBC, EastBC, NorthBC,
                                      SouthBC)

            #Get/Update Spatial Data Organization
            SpatialDataOrgInfo = Get_Spatial_Data_OrgInfo(
                InputData, myDataType, myFeatType)
            MDTools.WriteSpatialDataOrgInfo(FGDCXML, SpatialDataOrgInfo)

            #Get/Update Spatial Reference Information
            SpatialReferenceInfo = SpatialRefTools.SpatialRefInfo(
                GCS_PrjFile, InputData, WorkingDir, GCS_ExtentList)
            MDTools.WriteSpatialRefInfo(FGDCXML, SpatialReferenceInfo)
            #Handle vertical coordinate system?

        #Get/Update Geospatial Presentation Form. Also updates Format Name (within Distribution Info).
        #(Skip this step and leave existing content if tool input is XML).
        if not InputIsXML:
            MDTools.WriteGeospatialForm(FGDCXML, myDataType, myFeatType)

        #Get/Update Native Environment Details
        #This will be used as a switch to determine which .exe for the EA builder needs to be run (for either 10.0, 10.1, or 10.2).
        #The version info is also written out to the XML record in the 'Native Environment' section.
        ESRIVersion = GetESRIVersion_WriteNativeEnv(FGDCXML)

        #Get/Update Metadata Date of Editing
        Now = datetime.datetime.now()
        MDDate = Now.strftime("%Y%m%d")
        MDTools.WriteMDDate(FGDCXML, MDDate)

        #Update Entity/Attribute Section
        if InputIsCSV or InputIsExcel:
            contents_fname = InputData
        elif not InputIsXML:
            data_contents = introspector.introspect_dataset(InputData)
            input_fname = os.path.split(InputData)[1]
            contents_fname = os.path.join(WorkingDir, input_fname + ".p")
            pickle.dump(data_contents, open(contents_fname, "wb"))
        else:
            contents_fname = ''

        #Rerun FGDC Translator tool to handle newly-added elements that are out of order in XML tree.
        MDTools.ReRunFGDCTranslator(FGDCXML)

        #Re-import new metadata to the data set to capture E/A tool changes. If input file is a stand alone .xml this step is skipped
        if not InputIsXML:
            try:
                arcpy.MetadataImporter_conversion(
                    FGDCXML, InputData
                )  # This imports only: does not convert and does not sync
            except:
                print "There was a problem during the metadata importation process."

        #Open up Metadata Editor and allow user to review/update
        outXML = os.path.splitext(FGDCXML)[0] + "temp.xml"
        #Arg = '"' + MetadataEditor + '"' + " " + '"' + FGDCXML + '"' + " " + '"' + outXML + '"' + " " + '"' + Browser + '"' #Start and end quotes are necessary to handle spaces in file names and IE Path when passing to Command Prompt.
        #Arg = '"' + MetadataEditor + '"' + " " + '"' + FGDCXML + '"' + " " + '"' + outXML + '"' + " "
        Arg = '"%s" "%s" "%s"' % (python_exe, mdwiz_py_fname, FGDCXML)
        if contents_fname:
            Arg += ' "{}"'.format(contents_fname)
        arcpy.AddWarning(Arg)
        arcpy.AddMessage("*************************")
        arcpy.AddMessage(
            "\nPLEASE UPDATE/REVIEW THE METADATA INFO IN THE POP-UP WINDOW.")
        arcpy.AddMessage("(Allow a moment for the window to open).\n")
        arcpy.AddMessage("*************************")
        try:
            winsound.PlaySound(
                r"C:\Windows\Media\Cityscape\Windows Exclamation.wav",
                winsound.SND_FILENAME)
        except:
            pass
        #os.popen(Arg)
        p = subprocess.Popen(Arg)
        p.wait()

        try:
            MDTools.RemoveStyleSheet(
                FGDCXML
            )  #MP actually removes the stylesheet in VB.NET app... this is a redundancy here.
            # MDTools.ReplaceXML(FGDCXML, outXML)
        except:
            arcpy.AddWarning(
                "No content was saved in the Metadata Editor window. The metadata record was not updated.\n"
            )

        #Re-import new metadata to the data set to capture user edits from the Metadata Editor window.
        try:
            arcpy.MetadataImporter_conversion(
                FGDCXML, InputData
            )  # This imports only: does not convert and does not sync
            arcpy.AddMessage(
                "The updated metadata record is now being re-imported into the input data set...\n"
            )
        except:
            arcpy.AddMessage(
                "There was a problem during the metadata importation process!")

        #Remove the Error Report file generated by MP from the Main Metadata Editor.
        MP_ErrorReport = os.path.splitext(
            FGDCXML)[0] + "temp_MP_ErrorReport.xml"
        try:
            os.remove(MP_ErrorReport)
        except:
            pass

        #Remove FGDC XML file if the toggle to preserve 'stand-alone' file is configured to FALSE. This appears to be passed as a string rather than boolean.
        if str(CreateStandAloneXML) == "false":
            try:
                arcpy.Delete_management(FGDCXML)
                arcpy.AddMessage(
                    "The Wizard will now remove the stand-alone FGDC XML, as requested in the tool interface...\n"
                )
            except:
                arcpy.AddMessage(
                    "There was a problem removing the stand-alone XML file. Try removing the file (%s) manually from the working directory.\n"
                    % FGDCXML)

        #Remove the 'ArcpyTranslate.xml' temp file that gets created when exporting from ESRI metadata to FGDC.
        try:
            os.remove(os.path.join(WorkingDir, 'ArcpyTranslate.xml'))
        except:
            pass

    except arcpy.ExecuteError:
        arcpyError()
    except:
        pythonError()
예제 #12
0

try:

    if debug == True:
        arcpy.AddMessage("initialVelocityMPS: " + str(initialVelocityMPS))
        arcpy.AddMessage("elevationAngleDegrees: " +
                         str(elevationAngleDegrees))
        arcpy.AddMessage("azimuthAngleDegrees: " + str(azimuthAngleDegrees))

    env.overwriteOutput = True
    scratch = env.scratchWorkspace

    if commonSpatialReferenceAsText == '':
        arcpy.AddWarning(
            "Spatial Reference is not defined. Using Spatial Reference of Weapon Location: "
            + str(commonSpatialReference.name))
        commonSpatialReference = arcpy.Describe(inputFeature).spatialReference

    env.outputCoordinateSystem = commonSpatialReference

    # angular inputs in radians
    azimuthAngleRadians = math.radians(
        float(Geo2Arithmetic(azimuthAngleDegrees)))
    elevationAngleRadians = math.radians(float(elevationAngleDegrees))

    # create output layer
    arcpy.CreateFeatureclass_management(os.path.dirname(outFeature),
                                        os.path.basename(outFeature),
                                        "POLYLINE", "", "ENABLED", "ENABLED",
                                        env.outputCoordinateSystem)
예제 #13
0
def polylines(fcLineFeatures, listfcPointFeatures):

    TempPoints = "in_memory\\ZSnap_tempPoints"
    lyrTempPointsLineIntersect = "ZsnapTempPointsLineIntersect"
    lyrLines = "ZsnapLines"

    ## Preprocessing
    if arcpy.Exists(TempPoints):
        arcpy.Delete_management(TempPoints)
    arcpy.Merge_management(listfcPointFeatures, TempPoints)
    if arcpy.Exists(lyrTempPointsLineIntersect):
        arcpy.Delete_management(lyrTempPointsLineIntersect)
    if arcpy.Exists(lyrLines):
        arcpy.Delete_management(lyrLines)
    arcpy.MakeFeatureLayer_management(TempPoints, lyrTempPointsLineIntersect)
    arcpy.MakeFeatureLayer_management(fcLineFeatures, lyrLines, "")
    arcpy.SelectLayerByLocation_management(lyrTempPointsLineIntersect,
                                           "INTERSECT", lyrLines)
    TempPointGeometry = arcpy.CopyFeatures_management(
        lyrTempPointsLineIntersect, arcpy.Geometry())

    ## Run
    with arcpy.da.UpdateCursor(fcLineFeatures, ["OID@", "SHAPE@"], '', '',
                               "True") as ucLines:

        for line in ucLines:
            if line[1] is not None:  # Do not process lines with "Null Geometry"
                arcpy.AddMessage("Line Feature: " + str(line[0]))
                arcpy.AddMessage("   Point Count: " + str(line[1].pointCount))
                vertexCount = 0
                vertexArray = []

                arcpy.SelectLayerByAttribute_management(
                    lyrLines, "NEW_SELECTION", '"FID" = ' + str(line[0]))
                arcpy.SelectLayerByLocation_management(
                    lyrTempPointsLineIntersect, "INTERSECT", lyrLines, "",
                    "NEW_SELECTION")
                if arcpy.Exists(TempPointGeometry):
                    arcpy.Delete_management(TempPointGeometry)
                TempPointGeometry = arcpy.CopyFeatures_management(
                    lyrTempPointsLineIntersect, arcpy.Geometry())

                for lineVertex in line[1].getPart(vertexCount):
                    arcpy.AddMessage("   Vertex: " + str(vertexCount) + " X:" +
                                     str(lineVertex.X) + " Y: " +
                                     str(lineVertex.Y))
                    vertexPoint = arcpy.Point(lineVertex.X, lineVertex.Y)
                    vertexPointGeometry = arcpy.PointGeometry(
                        vertexPoint, fcLineFeatures)

                    for pointGeom in TempPointGeometry:
                        intersect = pointGeom.intersect(vertexPointGeometry, 1)
                        if intersect.firstPoint:
                            arcpy.AddMessage("   Point Intersect: X: " +
                                             str(intersect.firstPoint.X) +
                                             " Y: " +
                                             str(intersect.firstPoint.Y))
                            arcpy.AddMessage("       Old Z: " +
                                             str(lineVertex.Z))
                            lineVertex.Z = intersect.firstPoint.Z
                            arcpy.AddMessage("       New Z: " +
                                             str(lineVertex.Z))
                        #if intersect.pointCount == 0:
                        #    arcpy.AddWarning("   No Intersecting Point Found.")
                    vertexArray.append(
                        [lineVertex.X, lineVertex.Y, lineVertex.Z])
                    vertexCount = vertexCount + 1
                line[1] = vertexArray
                ucLines.updateRow(line)
            else:
                arcpy.AddWarning("Line Feature: " + str(line[0]) +
                                 " Has no Geometry.")

    return
예제 #14
0
def clipFeatures(params, job, convertFeaturesDuringClip=True):
	global haveDataInterop
	cleanUpFeatureLayer = False
	# get the path and a validated name for the output
	layerName, outputpath = make_output_path(False, job['layer'], job['name'], convertFeaturesDuringClip, params.input_feature_format, params.zip_folder_path, params.scratch_folder_path, outputDataFolderName=params.output_folder_name)
	arcpy.AddMessage("Starting layer: %s where: %s" % (job['layer'], job['where']))
	feature_layer = layerName
	
	cleanUpFeatureLayer = True

	try:
		arcpy.MakeFeatureLayer_management(job['layer'], feature_layer)
		arcpy.SelectLayerByAttribute_management(feature_layer, "NEW_SELECTION", job['where'])
		#introspect_featureset(feature_layer, job['where'])
		count = int(arcpy.GetCount_management(feature_layer).getOutput(0))
		
	except:
		arcpy.AddWarning("Select Attributes Error ::  Layer=%s; Clause=%s" % (feature_layer, job['where']))
		arcpy.AddWarning(arcpy.GetMessages(2))
		raise
		return

	if count == 0:
		arcpy.AddWarning("Where clause yielded no records ::  Layer=%s; Clause=%s" % (feature_layer, job['where']))
		return

	try:

		if params.output_projection and params.output_projection in VALID_PROJECTION_ALIASES.keys():
			arcpy.AddMessage('Ready to project: feature_layer=%s; outputpath=%s' % (feature_layer, outputpath))
			out_coordinate_system = os.path.join(PROJECTIONS_FOLDER, VALID_PROJECTION_ALIASES[params.output_projection])
			arcpy.Project_management(feature_layer, outputpath, out_coordinate_system)
		else:
			arcpy.AddMessage('Ready to copy: feature_layer=%s; outputpath=%s' % (feature_layer, outputpath))
			arcpy.CopyFeatures_management(feature_layer, outputpath)

		# if format needs data interop, convert with data interop
		if not convertFeaturesDuringClip:
			# get path to zip
			outputinzip = os.path.join(zipFolderPath, layerName + featureFormat[2])
			if featureFormat[2].lower() in [".dxf", ".dwg", ".dgn"]:
				#Message "..using export to cad.."
				arcpy.AddWarning(get_ID_message(86139))
				arcpy.ExportCAD_conversion(outputpath, featureFormat[1], outputinzip)
			else:
				if not haveDataInterop:
					raise LicenseError
					
				diFormatString = "%s,%s" % (featureFormat[1], outputinzip)
				# run quick export
				arcpy.quickexport_interop(outputpath, diFormatString)

	except LicenseError:
		#Message "  failed to export to %s.  The requested formats require the Data Interoperability extension.  This extension is currently unavailable."
		arcpy.AddWarning(get_ID_message(86140) % featureFormat[1])
		pass

	except:
		raise

	finally:
		if cleanUpFeatureLayer and arcpy.Exists(feature_layer):
			arcpy.Delete_management(feature_layer)
def processJob(ProjectJob, project, ProjectUID):
    start = time.time()
    a = start
    # From ContourConfig
    cont_int = CONTOUR_INTERVAL
    cont_unit = CONTOUR_UNIT
    smooth_unit = CONTOUR_SMOOTH_UNIT
    distance_to_clip_md = DISTANCE_TO_CLIP_MOSAIC_DATASET
    distance_to_clip_contours = DISTANCE_TO_CLIP_CONTOURS

    ProjectFolder = ProjectFolders.getProjectFolderFromDBRow(
        ProjectJob, project)
    derived_folder = ProjectFolder.derived.path
    published_folder = ProjectFolder.published.path
    #     project_id = ProjectJob.getProjectID(project)
    ProjectFolder = ProjectFolders.getProjectFolderFromDBRow(
        ProjectJob, project)
    contour_folder = ProjectFolder.derived.contour_path
    #     raster_folder = ProjectFolder.published.demLastTiff_path

    filegdb_name, filegdb_ext = os.path.splitext(
        ProjectFolder.published.fgdb_name)  # @UnusedVariable
    publish_filegdb_name = "{}_{}.gdb".format(filegdb_name, DTM)

    #     published_path = os.path.join(published_folder, DTM)
    published_filegdb_path = os.path.join(published_folder,
                                          publish_filegdb_name)
    md = os.path.join(published_filegdb_path, "{}{}".format(DTM, OCS))

    derived_filegdb_path = os.path.join(derived_folder,
                                        ProjectFolder.derived.fgdb_name)
    ref_md = os.path.join(derived_filegdb_path, "ContourPrep")
    ft_prints = A05_C_ConsolidateRasterInfo.getRasterFootprintPath(
        derived_filegdb_path, DTM)

    ###############################################################################
    # CMDR Class Variables & Inputs From Previous Jobs
    ###############################################################################
    #     contour_folder    = r'C:\Users\jeff8977\Desktop\NGCE\OK_Sugar_Testing\DERIVED\CONTOURS'
    #     published_folder  = r'C:\Users\jeff8977\Desktop\NGCE\OK_Sugar_Testing\PUBLISHED'
    #     raster_folder     = r'C:\Users\jeff8977\Desktop\NGCE\OK_Sugar_Testing\PUBLISHED\DTM'
    #     project_id = r'OK_SugarCreek_2008'

    #     md = r'C:\Users\jeff8977\Desktop\NGCE\OK_Sugar\DERIVED\CONTOURS\Temp_MD_origCS.gdb\MD'
    #     ft_prints = r'C:\Users\jeff8977\Desktop\NGCE\OK_Sugar\DERIVED\CONTOURS\Temp_MD_origCS.gdb\MD_Footprints'

    raster_vertical_unit = 'MT'
    foot_fields = [FIELD_INFO[V_UNIT][0]]
    for row in arcpy.da.SearchCursor(ft_prints,
                                     foot_fields):  # @UndefinedVariable
        raster_vertical_unit = row[0]
        break
    del row
    arcpy.AddMessage(
        "Got input raster vertical unit: {}".format(raster_vertical_unit))

    #     PYTHON_EXE = os.path.join(r'C:\Python27\ArcGISx6410.5', 'pythonw.exe')
    #
    #     jobId = '1'
    ###############################################################################
    ###############################################################################

    try:
        a = datetime.now()
        # Generate Script Workspaces
        contour_gdb, scratch_path = generate_con_workspace(contour_folder)
        a = doTime(
            a, "Created Contour Workspace\n\t{}\n\t{}".format(
                contour_gdb, scratch_path))

        # Create referenced DTM mosaic with the pixel pre-setup for contour output
        createRefDTMMosaic(md, ref_md, raster_vertical_unit)

        # Collect Processing Extents
        run_dict = create_iterable(scratch_path, ft_prints,
                                   distance_to_clip_md,
                                   distance_to_clip_contours)

    except Exception as e:
        arcpy.AddWarning('Exception Raised During Script Initialization')
        arcpy.AddWarning('Exception: ' + str(e))

    try:
        createTiledContours(ref_md, cont_int, cont_unit, raster_vertical_unit,
                            smooth_unit, scratch_path, run_dict)

        # Merge Contours
        handle_results(scratch_path, contour_gdb)

    except Exception as e:
        arcpy.AddMessage('Exception Raised During Multiprocessing')
        arcpy.AddError('Exception: ' + str(e))

    finally:
        run = time.time() - start
        arcpy.AddMessage('Script Ran: ' + str(run))
예제 #16
0
def createSaraMap(sara_site, risk_radii, sara_name, sara_address, patts,
                  chem_info, output_dir):
    try:
        # create a layer file to disk for SARA Facility
        sara_lyr = saveLayerFile(sara_site, 'SARA Site', output_dir)
        # create a layer file to disk for Risk Radii
        risk_radii_lyr = saveLayerFile(risk_radii, 'Risk Radii', output_dir)

        # create map document object from template map
        mxd_template = arcpy.mapping.MapDocument(
            r'C:\GIS\Scripts\SARA\Templates\SARA Radius Map Template.mxd')
        # create a copy of the template map document
        project_mxd_file = os.path.join(output_dir, 'SARA_Project_Map.mxd')
        # save a copy of template map
        mxd_template.saveACopy(project_mxd_file)
        # add message
        arcpy.AddMessage('\nCreated a project map document')
        # create a map document object for project map
        project_mxd = arcpy.mapping.MapDocument(project_mxd_file)
        # create data frame object (so you can add a layer to a map)
        data_frame = arcpy.mapping.ListDataFrames(project_mxd)[0]
        # gain access to legend element
        map_legend = arcpy.mapping.ListLayoutElements(project_mxd,
                                                      "LEGEND_ELEMENT",
                                                      "Legend")[0]
        # if a layer is added to map, add it to map legend
        map_legend.autoAdd = True

        # add SARA Facility to map document
        # sara layer file on disk - this represents a layer file, not the layer as it is added to the map document
        sara_temp = arcpy.mapping.Layer(sara_lyr)
        # add layer to map document
        arcpy.mapping.AddLayer(data_frame, sara_temp, 'TOP')
        # create object reference streams layer within map document
        sara_of_interest = arcpy.mapping.ListLayers(project_mxd, '*SARA*',
                                                    data_frame)[0]
        # add symbology layer
        sara_symbol_file = arcpy.mapping.Layer(
            r'C:\GIS\Scripts\SARA\Templates\SARA of Interest.lyr')
        # update symbology
        arcpy.mapping.UpdateLayer(data_frame, sara_of_interest,
                                  sara_symbol_file, True)

        # add risk radii to map
        # risk radii layer file on disk - this represents a layer file, not the layer as it is added to the map document
        risk_radii_temp = arcpy.mapping.Layer(risk_radii_lyr)
        # add layer to map document
        arcpy.mapping.AddLayer(data_frame, risk_radii_temp, 'TOP')
        # create object reference streams layer within map document
        risk_radii_of_interest = arcpy.mapping.ListLayers(
            project_mxd, '*Risk*', data_frame)[0]
        # add symbology layer
        risk_radii_symbol_file = arcpy.mapping.Layer(
            r'C:\GIS\Scripts\SARA\Templates\Risk Radii.lyr')
        # update symbology
        arcpy.mapping.UpdateLayer(data_frame, risk_radii_of_interest,
                                  risk_radii_symbol_file, True)

        # set map extent
        data_frame.extent = risk_radii_of_interest.getExtent(True)
        # set scale a little larger to add padding
        data_frame.scale = data_frame.scale * 1.1

        # update SARA Name for map
        sara_name_text = arcpy.mapping.ListLayoutElements(
            project_mxd, 'TEXT_ELEMENT', 'SARA_Title_Text')[0]
        sara_name_text.text = str(sara_name)

        # update SARA Address for map
        sara_address_text = arcpy.mapping.ListLayoutElements(
            project_mxd, 'TEXT_ELEMENT', 'SARA_Address_Text')[0]
        sara_address_text.text = str(sara_address)

        # update SARA PATTS for map
        sara_patts_text = arcpy.mapping.ListLayoutElements(
            project_mxd, 'TEXT_ELEMENT', 'SARA_PATTS_Text')[0]
        sara_patts_text.text = sara_patts_text.text.replace('x', str(patts))

        # update chemical information
        sara_chem_text = arcpy.mapping.ListLayoutElements(
            project_mxd, 'TEXT_ELEMENT', 'SARA_Chem_Text')[0]
        sara_chem_text.text = str('Chemical: {}'.format(chem_info))

        # update risk radii information
        # container for risk radii information
        risk_radii_info = ''
        # fields for cursor
        risk_radii_fields = ['BUFFDIST', 'UNITS']
        # perform search cursor on risk radii layer to get risk radii information
        with arcpy.da.SearchCursor(risk_radii, risk_radii_fields) as cursor:
            for row in cursor:
                risk_radii_info += '{}-{}; '.format(row[0], row[1])
            # end for in
        # end cursor
        # get map layout element
        risk_radii_text = arcpy.mapping.ListLayoutElements(
            project_mxd, 'TEXT_ELEMENT', 'SARA_Radii_Text')[0]
        risk_radii_text.text = str(
            'Risk Radii Distances: {}'.format(risk_radii_info))

        # update date text element with current date
        # get current date
        date_today = datetime.date.today()
        # reformat date
        date_formatted = date_today.strftime("%m-%d-%Y")
        # create object reference to date text element
        date_text = arcpy.mapping.ListLayoutElements(project_mxd,
                                                     'TEXT_ELEMENT',
                                                     'Date_Text')[0]
        # update text
        date_text.text = str(date_formatted)

        # save map
        project_mxd.save()
        # add message
        arcpy.AddMessage('\nSaved the project map document')

        # export map to pdf using current date in file name
        # file name
        pdf_name = r'{} Risk Radius Map {}.pdf'.format(sara_name,
                                                       date_formatted)
        # export map to pdf using default settings
        arcpy.mapping.ExportToPDF(project_mxd,
                                  os.path.join(output_dir, pdf_name),
                                  'PAGE_LAYOUT',
                                  resolution=300)
        # add message
        arcpy.AddMessage(
            '\nExported project map to .pdf format.  File is named {}'.format(
                pdf_name))
    # If an error occurs running geoprocessing tool(s) capture error and write message
    # handle error outside of Python system
    except EnvironmentError as e:
        arcpy.AddError(
            '\nAn error occured running this tool. Please provide the GIS Department the following error messages:'
        )
        # call error logger method
        errorLogger.PrintException(e)
    # handle exception error
    except Exception as e:
        arcpy.AddError(
            '\nAn error occured running this tool. Please provide the GIS Department the following error messages:'
        )
        # call error logger method
        errorLogger.PrintException(e)
    finally:
        try:
            # delete variables to release locks on map documents (.mxd) and layer files (.lyr)
            del mxd_template, project_mxd, sara_lyr, sara_temp, sara_symbol_file, risk_radii_lyr, risk_radii_temp, risk_radii_symbol_file
            arcpy.AddMessage(
                '\nReleased locks on map documents and layer files')
            arcpy.AddMessage('\nCompleted running tool')
        except:
            arcpy.AddWarning(
                '\nLocks may still exist on map documents and layer files')
            arcpy.AddMessage('\nCompleted running tool')
예제 #17
0
def run_CBA():

    # Get the value of the input parameter
    inputFC = arcpy.GetParameterAsText(0)  #Polygon or point feature class
    inputFC_join_field = arcpy.GetParameterAsText(
        1)  #field in the input feature class used for join
    economics_table = arcpy.GetParameterAsText(
        2)  # CSV file with data on economic costs and revenues
    economics_join_field = arcpy.GetParameterAsText(
        3
    )  #field in the economics table used for join with input feature class

    # Local variable:
    outCBA_fc = r"in_memory\SystemsCopy"
    strata_tab = r"in_memory\JoinTable"
    out_Name = "Cost-Benefit Analysis"

    try:
        ##Check if Input Join Field from Input Feature Layer exists as attribute
        fieldnames_inputFC = [f.name for f in arcpy.ListFields(inputFC)]
        if inputFC_join_field not in fieldnames_inputFC:
            arcpy.AddError(
                "Input Join Field \"%s\" is not an attribute inside the Input Feature Layer!"
                % inputFC_join_field)
            raise arcpy.ExecuteError

        #Create feature class from copy of input feature layer
        arcpy.AddMessage('Creating Feature Class from Input Feature Layer ...')
        arcpy.SetProgressorLabel(
            'Creating Feature Class from Input Feature Layer ...')
        arcpy.CopyFeatures_management(inputFC, outCBA_fc)

        ### ADD FIELD: creating new field to store CO2 total emission per trip ###
        arcpy.AddMessage('Adding New Fields to Feature Class ...')
        arcpy.SetProgressorLabel('Adding New Fields to Feature Class ...')
        # add three new fields ("costs", "revenues", "returns")
        arcpy.AddField_management(outCBA_fc, "COSTS", "DOUBLE")
        arcpy.AddField_management(outCBA_fc, "REVENUES", "DOUBLE")
        arcpy.AddField_management(outCBA_fc, "RETURNS", "DOUBLE")

        # Process: Copy Table to a temporary GDB table (workaround for bug in MakeTableView --ArcGIS 10.3.1)
        ### JOIN: only keep common records between input layer and join table ###
        arcpy.AddMessage('Creating Copy of Table from Input Economic Data ...')
        arcpy.SetProgressorLabel(
            'Creating Copy of Table from Input Economic Data ...')
        arcpy.CopyRows_management(economics_table, strata_tab)

        ##Check if Economic Table Join Field from Economic Data Table exists as attribute
        fieldnames_tbl = [f.name for f in arcpy.ListFields(strata_tab)]
        if economics_join_field not in fieldnames_tbl:
            arcpy.AddError(
                "Economic Table Join Field \"%s\" is not an attribute inside the Input Feature Layer!"
                % economics_join_field)
            raise arcpy.ExecuteError

        ### Create list of value fields, leaving out OID field and key/join field ###
        flistObj = arcpy.ListFields(strata_tab)
        flist = [
            f.name for f in flistObj
            if f.type != "OID" and f.name != economics_join_field
        ]

        ### Create empty dict object then populate each key with a sub dict by row using value fields as keys ###
        arcpy.AddMessage('Creating Join Dictionary for Economics Data ...')
        arcpy.SetProgressorLabel(
            'Creating Join Dictionary for Economics Data ...')
        strataDict = {}
        for r in arcpy.SearchCursor(strata_tab):
            fieldvaldict = {}
            for field in flist:
                fieldvaldict[field] = r.getValue(field)

            strataDict[r.getValue(economics_join_field)] = fieldvaldict

        del strata_tab, flistObj

        arcpy.AddMessage('Calculating Returns from Costs and Revenues ...')
        arcpy.SetProgressorLabel(
            'Calculating Returns from Costs and Revenues ...')

        #LOOP through nested dictionaries to check whether any values are missing or n/a
        for k, v in strataDict.iteritems():
            for k2, v2 in strataDict[k].iteritems():
                if v2 is None or v2 == "n/a" or v2 == r"n\a":
                    strataDict[k][k2] = 0.0

        with arcpy.da.UpdateCursor(
                outCBA_fc,
            [inputFC_join_field, 'COSTS', 'REVENUES', 'RETURNS'],
                where_clause="\"%s\" IS NOT NULL" %
                inputFC_join_field) as cursor:

            for row in cursor:
                costs = 0.0
                revenues = 0.0
                strata = row[0]
                if not strata in strataDict:
                    arcpy.AddWarning(
                        "The attribute \"{}\" was not found in the economics table!"
                        .format(strata))
                    continue
                else:
                    cost_lst = [
                        float(v) for k, v in strataDict[strata].iteritems()
                        if 'cost' in k
                    ]
                    costs += sum(cost_lst)

                    revenues_lst = [
                        float(v) for k, v in strataDict[strata].iteritems()
                        if 'revenue' in k
                    ]
                    revenues += sum(revenues_lst)

                    costs = float(strataDict[strata]['cost_per_animal_transfer']) + \
                            float(strataDict[strata]['cost_feeding_per_animal']) + \
                            float(strataDict[strata]['cost_transportation_per_travel']) + \
                            float(strataDict[strata]['cost_maintenance_per_animal'])

                    revenues = float(strataDict[strata]['revenue_from_transfer_per_animal']) + \
                               float(strataDict[strata]['revenue_from_tourism']) + \
                               float(strataDict[strata]['revenue_from_food_prod'])

                    returns = revenues - costs

                    row[1] = costs
                    row[2] = revenues
                    row[3] = returns

                    cursor.updateRow(row)

        arcpy.AddWarning("Warning: negative monetary values in 'RETURNS' " + \
                         "can be the consequence of missing values in the financial table!!")

        #convert temp feature class to feature layer for output
        out_fl = arcpy.MakeFeatureLayer_management(outCBA_fc, out_Name)

        #### Set Parameters ####
        arcpy.SetParameter(4, out_fl)

    except Exception:
        e = sys.exc_info()[1]
        arcpy.AddError('An error occurred: {}'.format(e.args[0]))
def writeFeaturesFromMessageFile() :

    foundEmptyRuleId = False  # used to detect if we can not set a RuleID for any rows

    # Get the input message file
    inputFileName = arcpy.GetParameterAsText(0)
    if (inputFileName == "") or (inputFileName is None):
        inputFileName = os.path.join(MilitaryUtilities.dataPath, r"/messages/Mil2525CMessages.xml")
                
    if not os.path.isfile(inputFileName) :
        arcpy.AddError("Bad Input File: " + inputFileName)
        return

    inputFile=open(inputFileName, "r")
    if (inputFile is None) : 
        arcpy.AddError("Input file can't be opened, exiting")
        return
        
    # Get the output feature class
    outputFC = arcpy.GetParameter(1)
    if (outputFC == "") or (outputFC is None):
        outputFC = os.path.join(MilitaryUtilities.dataPath, r"/test_outputs.gdb/FriendlyOperations/FriendlyUnits")
        
    desc = arcpy.Describe(outputFC)
    if desc is None :
        arcpy.AddError("Can't open Output Dataset: " + str(outputFC)) 
        return

    shapeType = desc.shapeType

    # Get standard
    standard = arcpy.GetParameterAsText(2)
        
    # Message Type Field
    messageTypeField = arcpy.GetParameterAsText(3)            

    arcpy.AddMessage("Running with Parameters:")
    arcpy.AddMessage("0 - input XML File: " + str(inputFileName))
    arcpy.AddMessage("1 - output FC: " + str(outputFC))
    arcpy.AddMessage("2 - symbology standard: " + str(standard))        
    arcpy.AddMessage("3 - MessageTypeField: " + messageTypeField)
        
    if not ((messageTypeField == "") or (messageTypeField is None)) :
        if desc.Fields.contains(messageTypeField) :
            MilitaryUtilities.MessageTypeField = messageTypeField
        else :
            arcpy.AddWarning("MessageTypeField does not exist in output: " + MessageTypeField + " , using default")

    print "Exporting message objects from: " + str(inputFileName)
    print "To Feature Class: " + str(outputFC)
    print "That match shape type: " + shapeType

    # initialize the standard
    MilitaryUtilities.getGeometryConverterStandard(standard)
        
    ruleFieldName = MilitaryUtilities.symbolDictionary.initializeRulesByMilitaryFeatures(outputFC) 

    if (ruleFieldName == "") or (ruleFieldName is None) :
        arcpy.AddError("RuleFieldName not found, exiting")
        return

    # Projected or geographic?
    xname = "lon"
    yname = "lat"
    isProjected = desc.spatialReference.type == "Projected"
    if (isProjected):
        xname = "x"
        yname = "y"
    outputWkid = desc.spatialReference.factoryCode

    ################Begin Export ##########################
    
    featureFields = desc.fields

    # Iterate through the messages and check the shape
    WRITE_OUTPUT = True # debug switch when output not needed
    newRow = None
    newRows = None

    try : 

        if WRITE_OUTPUT : 
            newRows = arcpy.InsertCursor(outputFC)
        messageCount = 0

        # for each message in the message file, get its attributes and copy to the output FeatureClass
        for sic, controlPoints, attributes in MessageIterator.MessageIterator(inputFileName) :
            print sic, controlPoints, attributes

            geoType = MilitaryUtilities.geoConverter.expectedGeometryType(sic)
            if not DictionaryConstants.isCorrectShapeTypeForFeature(geoType, shapeType) : 
                skipMsg = "Skipping SIC: " + sic + " - does not match feature type" + shapeType
                arcpy.AddMessage(skipMsg)
                continue

            # Used for those SICs that map to 2 lines (ex. Task Screen/Guard/Cover)
            repeatForPairFeatures = True
            repeatCount = 0

            while repeatForPairFeatures :

                outputPointList, conversionNotes = MilitaryUtilities.geoConverter.controlPointsToGeometry(sic, controlPoints, attributes)
                if outputPointList is None :
                    msg = "Failed to Convert Points from Military to MilFeature format for SIDC: " + sic
                    arcpy.AddError(msg)
                    arcpy.AddError("Conversion Notes: " + conversionNotes)
                    repeatForPairFeatures = False
                    continue

                inputWkid = 0
                if attributes.has_key(DictionaryConstants.Tag_Wkid) :
                    inputWkid = int(attributes[DictionaryConstants.Tag_Wkid])

                if outputWkid != inputWkid :
                    msg = "ERROR: Input Message and Output Feature WKIDs do not match (InsertFeature will fail)"
                    arcpy.AddError(msg)
                    msg = "Output WKID = " + str(outputWkid) + " , Input WKID = " + str(inputWkid)
                    arcpy.AddError(msg)

                ruleId, symbolName = MilitaryUtilities.symbolDictionary.symbolIdToRuleId(sic)

                if ruleId < 0 :
                    foundEmptyRuleId = True
                    # arcpy.AddWarning("WARNING: Could not map ruleId to SIDC: " + sic)

                # For those SIC that map to 2 lines (ex. Task Screen/Guard/Cover)
                # will need to clone/repeat the message here for Left/Right Upper/Lower pair
                repeatForPairFeatures = False 
                geoConversion = MilitaryUtilities.symbolDictionary.symbolIdToGeometryConversionType(sic)
                if (geoConversion == DictionaryConstants.GCT_TWOLINE) or \
                    (geoConversion == DictionaryConstants.GCT_TWOLINE3OR4PT) :
                    if repeatCount > 0 : 
                        repeatForPairFeatures = False # Only do once
                        ## TODO: find better way to set rule Id for 2nd line (Left/Right) version
                        # This is quite kludgy, and relies on the 2nd ruleid code being the 1st + 1
                        # and this may not always be the case
                        ruleId = ruleId + 1
                    else : 
                        repeatForPairFeatures = True 
                        attributes[DictionaryConstants.Tag_TwoLinesNeeded] = "True"
                        # don't let id get repeated, so append "_2"
                        if attributes.has_key(DictionaryConstants.Tag_Id) : 
                            attributes[DictionaryConstants.Tag_Id] = attributes[DictionaryConstants.Tag_Id] + "_2"
                repeatCount = repeatCount + 1

                arcpy.AddMessage("Adding feature #" + str(messageCount) + " with SIDC: " + sic)
                if WRITE_OUTPUT : 
                    try : 
                        shape = MilitaryUtilities.pointsToArcPyGeometry(outputPointList, shapeType)
                        newRow = newRows.newRow()
                        newRow.setValue(desc.shapeFieldName, shape)
                        newRow.setValue(ruleFieldName, ruleId)
                        
                        # both "sic" and "sidc" used
                        try : 
                            newRow.setValue("sic", sic)
                        except :
                            try :                             
                                newRow.setValue("sidc", sic)
                            except : 
                                arcpy.AddWarning("Failed to set SIDC field in output")
                            
                        # add any extra fields
                        for field in featureFields :  
                            if not (field.name in DictionaryConstants.MILFEATURES_FIELD_EXCLUDE_LIST) :
                                lowerFieldName = field.name.lower()
                                # we don't the case of the attribute so have to search
                                for key in attributes.keys() :                                     
                                    lowerKey = key.lower() 
                                    if (lowerKey == lowerFieldName) :
                                        try : 
                                            newRow.setValue(field.name, attributes[key])
                                        except : 
                                            print "Could not add: Field: " + field.name + ", Value: " + str(attributes[key])

                        newRows.insertRow(newRow) 
                        arcpy.AddMessage("Message successfully added: " + str(messageCount))
                    except : 
                        arcpy.AddError("ERROR: Exception while adding new feature (does Spatial Ref match?)")
                        tb = traceback.format_exc()
                        print tb
                else :
                    print "WRITING OUTPUT:"
                    print "SIC: " + sic + ", Name: " + symbolName                
                    print "Adding geometry to feature, with points: "
                    for point in outputPointList : 
                        x = point.split(',')[0]
                        y = point.split(',')[1]
                        print "(", x, ",", y, ")"                                     
                
            messageCount += 1
            
        if messageCount == 0 :
            arcpy.AddWarning("No Messages Found in Input")

        if foundEmptyRuleId :
            arcpy.AddWarning("IMPORTANT: Some rows do not have Symbol RuleId set - you may need to run CalcRepRuleField tool.")            
           
    except :
        tb = traceback.format_exc()
        arcpy.AddError("Exception:")
        arcpy.AddError(tb)        

    finally :
        # Delete cursor and row objects to remove locks on the data 
        if not newRow is None : 
            del newRow 
        if not newRows is None : 
            del newRows
    def executer(self, env, no_nc, no_tache, procdesc):
        #-------------------------------------------------------------------------------------
        """Exécuter le traitement de fermeture d'une non-conformité.
        
        Paramètres:
        -----------
        env         : Type d'environnement.
        no_nc       : Numéro de la non-conformité à fermer.
        no_tache    : Numéro de la tâche Alloy qui correspond à une demande effectuée par un usager.
        procdesc    : Étiquette qui correspond à la description du processus utilisé pour corriger la non-conformité.
               
        Variables:
        ----------
        self.CompteSib  : Objet utilitaire pour la gestion des connexion à SIB.       
        self.Sib        : Objet utilitaire pour traiter des services SIB.
        resultat        : Résultat de la requête SIB.
        ty_produit      : Type de produit traité.
        identifiant     : Identifiant de découpage traité.
        ed_fin          : Édition de fin de la non-conformité.
        ver_fin         : Version de fin de la non-conformité.
        ed_cour         : Édition courante du jeu de données.
        ver_cour        : Version courante du jeu de données.
        """

        #Instanciation de la classe Sib et connexion à la BD Sib
        arcpy.AddMessage("- Connexion à la BD SIB")
        self.Sib = self.CompteSib.OuvrirConnexionSib(env, env)

        #Extraire le nom de l'usager SIB
        sUsagerSib = self.CompteSib.UsagerSib()
        sUsagerBd = self.CompteSib.UsagerBd()

        #-----------------------------------
        #Extraction de l'information de la non-conformité
        arcpy.AddMessage(" ")
        arcpy.AddMessage(
            "- Extraction de l'information de la non-conformité : " +
            str(no_nc))
        sql = "SELECT DATE_FERMETURE, DATE_TRAITEMENT, TY_NC, TY_TRAIT_NC, RESP_DESCR, RESP_CORR, RESP_SUIVI FROM F702_NC WHERE NO_NC='" + no_nc + "'"
        arcpy.AddMessage(sql)
        #Exécuter la requête SQL
        resultat = self.Sib.requeteSib(sql)
        #Vérifier la présence du paramètre
        if (len(resultat) == 0):
            raise Exception("Numéro de non-conformité invalide : %s" % no_nc)
        #Vérifier si la non-conformité est fermée
        if resultat[0][0] <> None:
            raise Exception(
                "La non-conformité est déjà fermée : date_fermeture=" +
                str(resultat[0][0]))
        #Vérifier si la non-conformité a déjà été traitée
        dateTraitement = resultat[0][1]
        if dateTraitement <> None:
            arcpy.AddWarning(
                "La non-conformité a déjà été traitée : date_traitement="
                + str(dateTraitement))
        #Vérifier si la non-conformité est de type DM
        if resultat[0][2] <> "DM":
            raise Exception(
                "Le type de non-conformité n'est pas 'DM' : TY_NC=" +
                str(resultat[0][2]))
        #Vérifier si la non-conformité a le traitement SP ou SM
        if "SP" not in resultat[0][3] and "SM" not in resultat[0][3]:
            raise Exception(
                "Le type de traitement de non-conformité n'est pas 'SP%' ou 'SM%' : TY_TRAIT_NC="
                + str(resultat[0][3]))
        #Conserver l'information de la non-conformité
        resp_descr = resultat[0][4]
        resp_corr = resultat[0][5]
        resp_suivi = resultat[0][6]
        #Afficher les valeurs
        arcpy.AddMessage(str(resultat))

        #-----------------------------------
        #Extraction des métadonnées d'identifiants de la non-conformité
        arcpy.AddMessage(" ")
        arcpy.AddMessage("- Extraction des identifiants non-conformes : " +
                         str(no_nc))
        sql = "SELECT TY_PRODUIT, IDENTIFIANT, ED_DEBUT, VER_DEBUT, ED_FIN, VER_FIN FROM F705_PR WHERE NO_NC='" + no_nc + "' ORDER BY IDENTIFIANT"
        arcpy.AddMessage(sql)
        #Exécuter la requête
        resultat = self.Sib.requeteSib(sql)
        #Vérifier la présence du paramètre
        if (len(resultat) == 0):
            #Retourner une erreur
            raise Exception(
                "Aucun identifiant n'est associé à la non-conformité")

        #-----------------------------------
        #Initialiser les compteurs
        cpt = 0
        total = len(resultat)
        #Traiter tous les identifiants de non-conformité
        for item in resultat:
            #Afficher les valeurs
            cpt = cpt + 1
            arcpy.AddMessage(" ")
            arcpy.AddMessage(str(cpt) + "/" + str(total) + " : " + str(item))
            #Définir les variables
            ty_produit = item[0]  #ty_produit
            identifiant = item[1]  #identifiant
            ed_debut = item[2]  #ed_debut
            ver_debut = item[3]  #ver_debut
            ed_fin = item[4]  #ed_fin
            ver_fin = item[5]  #ver_fin

            #Vérifier si l'intervalle de fin n'est pas traitée
            if ed_fin == 99999:
                #Traiter l'identifiant de non-conformité
                self.traiterIdentifiant(no_nc, no_tache, procdesc, ty_produit,
                                        identifiant, ed_debut, ver_debut,
                                        ed_fin, ver_fin)

            #si l'intervalle de fin est déjà traitée
            else:
                #Afficher un avertissement
                arcpy.AddWarning("  L'intervalle de fin est déjà traitée")

        #Vérifier si la non-conformité ne possède déjà pas de date de traitement
        if dateTraitement == None:
            #-----------------------------------
            #Mettre à jour la date de traitement
            arcpy.AddMessage(" ")
            arcpy.AddMessage("- Mise à jour de la date de traitement")
            sql = "UPDATE F702_NC SET ETAMPE='" + no_tache + "', DT_M=SYSDATE, DATE_TRAITEMENT=SYSDATE WHERE NO_NC='" + no_nc + "'"
            arcpy.AddWarning(sql)
            self.Sib.execute(sql)

        #-----------------------------------
        #Accepter les modifications
        arcpy.AddMessage(" ")
        arcpy.AddMessage("- Accepter les modifications")
        sql = "COMMIT"
        arcpy.AddWarning(sql)
        self.Sib.execute(sql)

        #-----------------------------------
        #Envoyer les courriels aux responsables
        arcpy.AddMessage(" ")
        arcpy.AddMessage("- Envoit d'un courriel aux responsables")
        sql = "SELECT DISTINCT ADR_EMAIL FROM F005_US WHERE CD_USER='******' OR CD_USER='******' OR CD_USER='******'"
        #Exécuter la requête SQL
        resultat = self.Sib.requeteSib(sql)
        #Envoyer un courriel aux responsables
        for courriel in resultat:
            #Envoyer un courriel
            destinataire = str(courriel[0])
            sujet = unicode(
                "Fermeture de la correction INSITU de la non-conformité #" +
                no_nc, "utf-8")
            contenu = unicode(
                "Bonjour,\n\nTous les jeux de données associés à la non conformité #"
                + no_nc + " sont maitenant corrigés.\n\n" + sUsagerSib +
                "\n" + sUsagerBd, "utf-8")
            arcpy.AddMessage("EnvoyerCourriel('" + destinataire + "','" +
                             sujet + "')")
            EnvoyerCourriel.EnvoyerCourriel(destinataire, contenu, sujet)

        # Fermeture de la connexion de la BD SIB
        arcpy.AddMessage(" ")
        self.Sib.fermerConnexionSib()

        # Sortir du traitement
        return
예제 #20
0
            try:
                inMultiplier = int(inMultiplier)
            except:
                inMultiplier = 100

            oProcessor = ApFloodplainFromHAND()
            oProcessor.DebugLevel = 0
            inStep = inStep
            tReturn = oProcessor.execute(inRiv, inCat, inRasterHAND, inRasterMinLocal, inRasterStr, inStep, inDeltaH, inRWKS, inFWKS, bConnectedOnly) 
            if(tReturn[0]==apwrutils.C_OK):
                fcZoneRslt =tReturn[1]
                fpRaster = tReturn[2]
                fpDepthRName = "{}_{}".format(flooddsconfig.LN_FPZone,inStep) 
                flZone = arcpy.management.MakeFeatureLayer(fcZoneRslt, flooddsconfig.LN_FPZone)   #fpZoneName)
                rlZone = arcpy.management.MakeRasterLayer(fpRaster, fpDepthRName) 
                arcpy.SetParameterAsText(9, flZone)   #fpZone polygon
                arcpy.SetParameterAsText(10, rlZone)  #fpZone Raster

    except arcpy.ExecuteError:
        sMsg = str(arcpy.GetMessages(2))
        arcpy.AddError(sMsg)
    except:
        arcpy.AddWarning(arcpy.GetMessages(2))
        sMsg = trace()
        arcpy.AddMessage(sMsg)
    finally:
        dt = datetime.datetime.now()
        print  ('Finished at ' + dt.strftime("%Y-%m-%d %H:%M:%S"))  


예제 #21
0
def main(argv=None):
    """Iterate over LM, BM, and restoration tasks."""
    if argv is None:
        argv = sys.argv  # Get parameters from ArcGIS tool dialog

    start_time = time.clock()

    # USER SETTINGS ######################################################

    # Restoration Settings
    # ALL input data must be in the same projection

    # Set to True to restore highest ROI. Set to False to restore strongest
    # barrier
    restore_max_roi = argv[1]

    # Resistance value of restored habitat.  Must be 1 or greater.
    restored_resistance_val = argv[2]

    # No spaces or special chars in paths or gdb names
    restoration_data_gdb = argv[3]

    # No spaces in path, avoid using dropbox or network drive
    # Project directories will be created in this (iter1, iter2...) as will an
    # output geodatabase
    output_dir = argv[4]

    # Resistance raster. Should be in input GDB
    resistance_ras = argv[5]
    # Core area feature class. Should be in input GDB 'URWA_HCAs_Doug_Grant'
    core_fc = argv[6]

    core_fn = argv[7]  # Core area field name

    radius = argv[8]  # Restoration radius in meters
    iterations = argv[9]  # Number of restorations to perform

    # If less than this proportion of ag in circle, don't consider restoring
    # circle
    min_ag_threshold = argv[10]

    # Don't consider barriers below this improvement score (average improvement
    # per meter diameter restored)
    min_improvement_val = argv[11]

    # Average per-m2 parcel cost per pixel. Snapped to resistance raster.
    parcel_cost_ras = argv[12]

    # Right now this is just a raster with all pixels set to 0.113174
    restoration_cost_ras = argv[13]

    ag_ras = argv[14]  # 1=Ag, 0=Not Ag

    # Some restorations benefit multiple corridors.
    # 'Maximum' takes the greatest improvement across core area pairs
    # 'Sum' adds improvement scores acreoss all pairs.
    barrier_combine_method = argv[15]

    # Use cwd_thresh = None for no threshold. Use cwd_thresh = X to not
    # consider restorations more than X map units away from each core area.
    cwd_thresh = argv[16]

    # END USER SETTINGS ######################################################

    try:
        # Setup path and create directories
        gprint('Hey! Make sure everything is in the same projection!\n')
        gprint('Setting up paths and creating directories')
        sys.path.append('..\\toolbox\\scripts')
        res_ras = os.path.join(restoration_data_gdb, resistance_ras)
        core_fc_path = os.path.join(restoration_data_gdb, core_fc)

        # Set up a NEW output gdb (leave previous ones on drive)
        i = None
        for i in range(1, 200):
            output_gdb = 'restorationOutput' + str(i) + '.gdb'
            if not arcpy.Exists(os.path.join(output_dir, output_gdb)):
                break
            gprint('Previous output GDB ' + output_gdb + ' exists.  '
                   'Delete to save disk space.')
        arcpy.CreateFileGDB_management(output_dir, output_gdb)
        output_gdb = os.path.join(output_dir, output_gdb)
        log_file = os.path.join(output_gdb,
                                'Iterate Barriers' + str(i) + '.py')

        # Write a copy of this file to output dir as a record of settings
        shutil.copyfile(__file__, log_file)

        arcpy.env.cellSize = res_ras
        arcpy.env.extent = res_ras
        arcpy.env.snapRaster = res_ras
        arcpy.env.overwriteOutput = True
        arcpy.env.scratchWorkspace = output_gdb
        arcpy.env.workspace = output_gdb

        spatialref = arcpy.Describe(res_ras).spatialReference
        mapunits = spatialref.linearUnitName
        gprint('Cell size = ' + str(arcpy.env.cellSize) + ' ' + mapunits + 's')

        # Calculate fraction of ag within radius of each pixel
        gprint('Calculating purchase cost, fraction of ag, etc within radius '
               'of each pixel.')
        ag_ras = os.path.join(restoration_data_gdb, ag_ras)
        in_neighborhood = arcpy.sa.NbrCircle(radius, "MAP")
        arcpy.env.extent = ag_ras
        out_focal_stats = arcpy.sa.FocalStatistics(ag_ras, in_neighborhood,
                                                   "MEAN", "NODATA")
        proportion_ag_ras = os.path.join(output_gdb, 'proportionAgRas')
        out_focal_stats.save(proportion_ag_ras)
        arcpy.env.extent = res_ras

        # Calculate purchase cost of circles
        parcel_cost_ras = os.path.join(restoration_data_gdb, parcel_cost_ras)
        arcpy.env.extent = parcel_cost_ras
        out_focal_stats = arcpy.sa.FocalStatistics(parcel_cost_ras,
                                                   in_neighborhood, "MEAN",
                                                   "DATA")
        cost_focal_stats_ras = os.path.join(output_gdb, 'cost_focal_stats_ras')
        out_focal_stats.save(cost_focal_stats_ras)
        arcpy.env.extent = res_ras

        circle_area = float(npy.pi * radius * radius)
        outras = arcpy.sa.Raster(cost_focal_stats_ras) * circle_area
        purch_cost_ras = os.path.join(output_gdb, 'purchaseCostRaster')
        outras.save(purch_cost_ras)
        lu.delete_data(cost_focal_stats_ras)

        restoration_cost_ras = os.path.join(restoration_data_gdb,
                                            restoration_cost_ras)
        outras = (
            arcpy.sa.Raster(purch_cost_ras) +
            (arcpy.sa.Raster(restoration_cost_ras) * radius * radius * npy.pi))
        total_cost_ras = os.path.join(output_gdb, 'totalCostRaster')
        outras.save(total_cost_ras)

        # Create mask to remove areas without cost data
        arcpy.env.extent = total_cost_ras
        cost_mask_ras = os.path.join(output_gdb, 'costMaskRaster')
        cost_thresh = 0
        out_con = arcpy.sa.Con(
            (arcpy.sa.Raster(total_cost_ras) > float(cost_thresh)), 1)
        out_con.save(cost_mask_ras)
        arcpy.env.extent = res_ras

        # Create mask to remove areas below ag threshold
        out_con = arcpy.sa.Con(
            (arcpy.sa.Raster(proportion_ag_ras) > float(min_ag_threshold)), 1)
        ag_mask_ras = os.path.join(output_gdb, 'agMaskRaster')
        out_con.save(ag_mask_ras)

        do_step_1 = 'true'
        do_step_2 = 'true'
        do_step_5 = 'false'
        all_restored_areas_ras = ''

        for cur_iter in range(1, iterations + 1):
            start_time1 = time.clock()

            # Some env settings get changed by linkage mapper and must be
            # reset here
            arcpy.env.cellSize = res_ras
            arcpy.env.extent = res_ras
            arcpy.env.snapRaster = res_ras
            arcpy.env.scratchWorkspace = output_gdb
            arcpy.env.workspace = output_gdb

            lu.dashline(1)
            gprint('Running iteration number ' + str(cur_iter))
            proj_dir = os.path.join(output_dir,
                                    'iter' + str(cur_iter) + 'Proj')
            lu.create_dir(output_dir)
            lu.delete_dir(proj_dir)
            lu.create_dir(proj_dir)
            if cur_iter > 1:  # Copy previous s2 linktable to new project dir
                datapass_dir = os.path.join(proj_dir, 'datapass')
                lu.create_dir(datapass_dir)
                proj_dir1 = os.path.join(output_dir, 'iter1Proj')
                datapass_dir_iter1 = os.path.join(proj_dir1, 'datapass')
                s2_link_tbl_iter1 = os.path.join(datapass_dir_iter1,
                                                 'linkTable_s2.csv')
                s2_link_tbl = os.path.join(datapass_dir, 'linkTable_s2.csv')
                shutil.copyfile(s2_link_tbl_iter1, s2_link_tbl)

            # Run Linkage Mapper

            # Copy distances text file from earlier LM run to the output
            # directory- speeds things up!
            dist_file = os.path.join(output_dir, core_fc + '_dists.txt')

            if not os.path.exists(dist_file):
                if cur_iter == 1:
                    gprint('Will calculate distance file.')
                    dist_file = '#'
                else:
                    proj_dir1 = os.path.join(output_dir, 'iter1Proj')
                    dist_file1 = os.path.join(proj_dir1,
                                              core_fc + '_dists.txt')
                    # Put a copy here for future runs
                    shutil.copyfile(dist_file1, dist_file)

            arcpy.env.scratchWorkspace = output_gdb
            arcpy.env.workspace = output_gdb

            argv = ('lm_master.py', proj_dir, core_fc_path, core_fn, res_ras,
                    do_step_1, do_step_2, 'Cost-Weighted & Euclidean',
                    dist_file, 'true', 'true', 'false', '4', 'Cost-Weighted',
                    'true', do_step_5, 'true', '200000', '10000', '#', '#',
                    '#', '#')
            gprint('Running ' + str(argv))
            lm_master.lm_master(argv)
            do_step_1 = 'false'  # Can skip for future iterations
            do_step_2 = 'false'  # Can skip for future iterations
            do_step_5 = 'false'  # Skipping for future iterations

            start_radius = str(radius)
            end_radius = str(radius)
            radius_step = '0'
            save_radius_ras = 'false'
            write_pct_ras = 'false'

            argv = ('barrier_master.py', proj_dir, res_ras, start_radius,
                    end_radius, radius_step, barrier_combine_method,
                    save_radius_ras, write_pct_ras, cwd_thresh)
            gprint('Running ' + str(argv))
            barrier_master.bar_master(argv)

            # Some env settings get changed by linkage mapper and must be
            # reset here
            arcpy.env.cellSize = res_ras
            arcpy.env.extent = res_ras
            arcpy.env.snapRaster = res_ras
            arcpy.env.scratchWorkspace = output_gdb
            arcpy.env.workspace = output_gdb

            gprint('Finding restoration circles with max barrier score / ROI')
            # Find points with max ROI
            prefix = os.path.basename(proj_dir)
            if barrier_combine_method == 'Sum':
                sum_suffix = 'Sum'
            else:
                sum_suffix = ''
            barrier_fn = (prefix + "_BarrierCenters" + sum_suffix + "_Rad" +
                          str(radius))
            barrier_ras = os.path.join(proj_dir, 'output', 'barriers.gdb',
                                       barrier_fn)
            if not arcpy.Exists(barrier_ras):
                msg = ('Error: cannot find barrier output: ' + barrier_ras)
                lu.raise_error(msg)

            if cur_iter > 1:
                gprint('Creating mask for previously restored areas')
                in_neighborhood = arcpy.sa.NbrCircle(radius, "MAP")
                arcpy.env.extent = all_restored_areas_ras
                out_focal_stats = arcpy.sa.FocalStatistics(
                    all_restored_areas_ras, in_neighborhood, "MEAN", "DATA")
                all_restored_focal_ras = os.path.join(
                    output_gdb, 'allRestFocRas_iter' + str(cur_iter))

                # Anything > 0 would include a restored area
                out_focal_stats.save(all_restored_focal_ras)
                arcpy.env.extent = res_ras
                rest_mask_ras = os.path.join(
                    output_gdb, 'restMaskRaster_iter' + str(cur_iter))
                minval = 0
                out_con = arcpy.sa.Con(
                    (arcpy.sa.Raster(all_restored_focal_ras) == float(minval)),
                    1)
                out_con.save(rest_mask_ras)

            # Candidate areas have not been restored, have cost data, meet
            # minimum improvement score criteria, and have enough ag in them
            candidate_barrier_ras = os.path.join(
                output_gdb, 'candidateBarrierRaster' + '_iter' + str(cur_iter))
            if cur_iter > 1:
                gprint('Creating candidate restoration raster using barrier '
                       'results, previous restorations, and selection '
                       'criteria')

                # ROI scores will be in terms of total improvement
                # (= score * diameter)
                out_calc = (arcpy.sa.Raster(cost_mask_ras) *
                            arcpy.sa.Raster(ag_mask_ras) *
                            arcpy.sa.Raster(barrier_ras) *
                            arcpy.sa.Raster(rest_mask_ras) * (radius * 2))
            else:
                out_calc = (arcpy.sa.Raster(cost_mask_ras) *
                            arcpy.sa.Raster(ag_mask_ras) *
                            arcpy.sa.Raster(barrier_ras) * radius * 2)

            min_barrier_score = min_improvement_val * radius * 2
            if restored_resistance_val != 1:
                out_calc_2 = (out_calc - (2 * radius *
                                          (restored_resistance_val - 1)))
                out_con = arcpy.sa.Con(
                    (out_calc_2 >= float(min_barrier_score)), out_calc_2)
            else:
                out_con = arcpy.sa.Con((out_calc >= float(min_barrier_score)),
                                       out_calc)
            out_con.save(candidate_barrier_ras)
            lu.build_stats(candidate_barrier_ras)

            purchase_roi_ras = os.path.join(
                output_gdb, 'purchaseRoiRaster' + '_iter' + str(cur_iter))
            out_calc = (arcpy.sa.Raster(candidate_barrier_ras) /
                        arcpy.sa.Raster(purch_cost_ras))
            out_calc.save(purchase_roi_ras)
            lu.build_stats(purchase_roi_ras)

            total_roi_ras = os.path.join(
                output_gdb, 'purchaseRestRoiRaster' + '_iter' + str(cur_iter))
            out_calc = (arcpy.sa.Raster(candidate_barrier_ras) /
                        arcpy.sa.Raster(total_cost_ras))
            out_calc.save(total_roi_ras)
            lu.build_stats(total_roi_ras)

            max_barrier = float(
                arcpy.GetRasterProperties_management(candidate_barrier_ras,
                                                     "MAXIMUM").getOutput(0))
            gprint('Maximum barrier improvement score: ' + str(max_barrier))
            if max_barrier < 0:
                arcpy.AddWarning("\nNo barriers found that meet CWD or Ag "
                                 "threshold criteria.")

            max_purch_roi = arcpy.GetRasterProperties_management(
                purchase_roi_ras, "MAXIMUM")
            gprint('Maximum purchase ROI score: ' +
                   str(max_purch_roi.getOutput(0)))

            max_roi = arcpy.GetRasterProperties_management(
                total_roi_ras, "MAXIMUM")
            gprint('Maximum total ROI score: ' + str(max_roi.getOutput(0)))

            if restore_max_roi:
                out_point = os.path.join(
                    output_gdb, 'maxRoiPoint' + '_iter' + str(cur_iter))
                gprint('Choosing circle with maximum ROI to restore')
                out_con = arcpy.sa.Con(
                    (arcpy.sa.Raster(total_roi_ras) >= float(
                        max_roi.getOutput(0))), total_roi_ras)
                max_roi_ras = os.path.join(output_gdb, 'max_roi_ras')
                out_con.save(max_roi_ras)
                # Save max ROI to point
                try:
                    arcpy.RasterToPoint_conversion(max_roi_ras, out_point)
                except Exception:
                    msg = ('Error: it looks like there are no viable '
                           'restoration candidates.')
                    lu.raise_error(msg)

            else:  # Restoring strongest barrier instead
                out_point = os.path.join(
                    output_gdb, 'maxBarrierPoint' + '_iter' + str(cur_iter))
                gprint('Choosing circle with maximum BARRIER IMPROVEMENT SCORE'
                       ' to restore')
                out_con = arcpy.sa.Con(
                    (arcpy.sa.Raster(candidate_barrier_ras) >= max_barrier),
                    candidate_barrier_ras)
                max_barrier_ras = os.path.join(output_gdb, 'maxBarrierRaster')
                out_con.save(max_barrier_ras)
                # Save max barrier to point
                try:
                    arcpy.RasterToPoint_conversion(max_barrier_ras, out_point)
                except Exception:
                    msg = ('Error: it looks like there are no viable '
                           'restoration candidates.')
                    lu.raise_error(msg)

            gprint('Done evaluating candidate restorations')
            result = int(arcpy.GetCount_management(out_point).getOutput(0))
            if result > 1:
                # Would be better to retain point with max barrier score when
                # we have multiple points with same ROI
                arcpy.AddWarning('Deleting points with identical '
                                 'ROI/improvement score values')

                arcpy.DeleteIdentical_management(out_point, "grid_code", 0.1,
                                                 0.1)

            arcpy.sa.ExtractMultiValuesToPoints(
                out_point,
                [[candidate_barrier_ras, "barrierScore"],
                 [purch_cost_ras, "purchCost"], [total_cost_ras, "totalCost"],
                 [purchase_roi_ras, "purchaseROI"],
                 [total_roi_ras, "totalROI"]], "NONE")

            arcpy.AddField_management(out_point, "restorationNumber", "SHORT")
            arcpy.CalculateField_management(out_point, "restorationNumber",
                                            cur_iter, "PYTHON_9.3")
            arcpy.AddField_management(out_point, "radius", "DOUBLE")
            arcpy.CalculateField_management(out_point, "radius", radius,
                                            "PYTHON_9.3")
            arcpy.AddField_management(out_point, "barrierScore_per_m",
                                      "DOUBLE")
            arcpy.CalculateField_management(
                out_point, "barrierScore_per_m",
                "(float(!barrierScore!) / (!radius! * 2))", "PYTHON_9.3")

            gprint('\nCreating restoration circles')
            if restore_max_roi:
                circle_fc = os.path.join(
                    output_gdb, 'maxRoiCircle' + '_iter' + str(cur_iter))
            else:
                circle_fc = os.path.join(
                    output_gdb, 'maxBarrierCircle' + '_iter' + str(cur_iter))
            arcpy.Buffer_analysis(out_point, circle_fc, radius)
            gprint('Rasterizing restoration circles')
            if restore_max_roi:
                circle_ras = os.path.join(
                    output_gdb, 'maxRoicircle_ras' + '_iter' + str(cur_iter))
            else:
                circle_ras = os.path.join(
                    output_gdb,
                    'maxBarrierCircleRas' + '_iter' + str(cur_iter))
            arcpy.FeatureToRaster_conversion(circle_fc, 'totalROI', circle_ras,
                                             arcpy.env.cellSize)

            # restore raster
            gprint('Digitally restoring resistance raster')
            res_ras_restored = os.path.join(
                output_gdb, 'resRastRestored' + '_iter' + str(cur_iter))
            out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras), res_ras,
                                   restored_resistance_val)
            out_con.save(res_ras_restored)

            all_restored_areas_ras = os.path.join(
                output_gdb, 'allRestoredAreas_iter' + str(cur_iter))
            prev_restored_areas_ras = os.path.join(
                output_gdb, 'allRestoredAreas_iter' + str(cur_iter - 1))
            if cur_iter == 1:
                out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras), 0, 1)
            else:
                # Add this restoration to areas restored
                out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras),
                                       prev_restored_areas_ras, 1)
            out_con.save(all_restored_areas_ras)

            lu.delete_data(circle_ras)

            # Use for next iteration resistance raster
            res_ras = res_ras_restored

            # Add circle into feature class with all circles
            if restore_max_roi:
                all_circles_fc = os.path.join(output_gdb, "allCirclesMaxROI")
            else:
                all_circles_fc = os.path.join(output_gdb,
                                              "allCirclesMaxBarriers")
            if cur_iter == 1:
                arcpy.CopyFeatures_management(circle_fc, all_circles_fc)
            else:
                arcpy.Append_management(circle_fc, all_circles_fc, "TEST")
            gprint('Finished iteration #' + str(cur_iter))
            start_time1 = lu.elapsed_time(start_time1)

        gprint('\nDone with iterations.')
        start_time = lu.elapsed_time(start_time)
        gprint('Outputs saved in: ' + output_gdb)
        gprint('Back up your project directories if you want to save '
               'corridor/barrier results.')

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Iteration script failed. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except Exception:
        lu.dashline(1)
        gprint('****Iteration script failed. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)
예제 #22
0
    def execute(self, inRiv, inCat, inRasterHAND, inRasterMinLocal, inRasterStr, inStep, inDeltaH, inRWKS, inFWKS, bConnectedOnly = True, pScratchWorkspace = None, nProcessors = 0): 
        ''' for a given inRiver3DRaster, construct the floodplain
        1. inRasterRiv3D - raster of river water level Z
        2. inRasterElev - DEM of the terrain 
        3. inStep - index of the waterlevel in a sequence of waterlevel, used to construct the output raster name (R+inStep)
        4. inCat - catchment used to limit the floodplain
        '''
        sOK = apwrutils.C_OK 
        dCatID2RivID = dict()
        dRivID2DH = dict() 
       
        inDeltaH = float(inDeltaH)
        if(inDeltaH<0):
            #arcpy.AddMessage("{} {}  {}".format(inRiv, flooddsconfig.FN_HYDROID, flooddsconfig.FN_DH ))
            try:
                with arcpy.da.SearchCursor(inRiv, [flooddsconfig.FN_HYDROID, flooddsconfig.FN_DH]) as rows:
                    for row in rows:
                        try:
                            dRivID2DH.setdefault(row[0], row[1])
                            if((self.DebugLevel & 1)==1):  arcpy.AddMessage("HID->DH={}->{}".format(row[0], row[1]))
                        except:
                            pass
            
            except arcpy.ExecuteError:
                sMsg = "{} {}".format(str(arcpy.GetMessages(2)), trace())
                arcpy.AddMessage(sMsg) 

            except:   
                arcpy.AddMessage(trace())             
                pass     

        pHandMasked = ""
        flZoneTempRiver = ""
        flZoneDslv = ""
        flRiv = ""
        arcpy.AddMessage("FloodplainFromHAND.execute ScratchWorkspace={} nProcessors={}".format(pScratchWorkspace,nProcessors) ) 
        if((flooddsconfig.debugLevel & 2)==2): arcpy.AddMessage("in floodplainfromhand: os.environ['TMP']={}, os.environ['TEMP']={}".format(os.environ['TMP'], os.environ['TEMP']))
        try:
            lRiverFlds = [flooddsconfig.FN_HYDROID, flooddsconfig.FN_DRAINID]
            if(len(arcpy.ListFields(inRiv,flooddsconfig.FN_DRAINID))==0):
                arcpy.AddMessage("Required field {} does not exist in {}".format(apwrutils.FN_DRAINID, inRiv))
            
            with arcpy.da.SearchCursor(inRiv, lRiverFlds) as rows:
                for row in rows:
                    try:
                        dCatID2RivID.setdefault(row[1],row[0])    #Catchment.HYDROID->River.HYDROID
                    except:
                        pass

            if((flooddsconfig.debugLevel & 2)==2):
                for catid, rivid in iter(dCatID2RivID.items()):
                    arcpy.AddMessage("catID={} rivID={}".format(catid,rivid))

            arcpy.CheckOutExtension("Spatial")
            if((flooddsconfig.debugLevel & 1)==1):
                sMsg = "inFWKS={} \ninRiv={} \ninCat={} \ninRasterHAND={} \ninStep={} \ninDeltaH={} \ninRWKS={}".format(inFWKS, inRiv, inCat, inRasterHAND, inStep, inDeltaH, inRWKS)
                apwrutils.Utils.ShowMsg(sMsg)
            
            if(pScratchWorkspace==None):  
                scratch_wks = flooddsconfig.pScratchWorkspace      #   arcpy.env.scratchWorkspace  
                scratchFolder = flooddsconfig.pScratchFolder         #  arcpy.env.scratchFolder
                arcpy.env.scratchWorkspace = scratch_wks
            else:
                scratch_wks = pScratchWorkspace
                arcpy.env.scratchWorkspace = scratch_wks
                scratchFolder = arcpy.env.scratchFolder 
            arcpy.AddMessage("arcpy.env.scratchWorkspace={}".format(arcpy.env.scratchWorkspace))
            #..arcpy.AddMessage("arcpy.env.scratchFolder={} scratch_wks={}".format(scratchFolder, pScratchWorkspace))
            if((flooddsconfig.debugLevel & 2)==2): arcpy.AddMessage("arcpy.env.scratchFolder={} scratch_wks={}".format(scratchFolder, pScratchWorkspace))
            if((flooddsconfig.debugLevel & 1)==1):
                sMsg = "arcpy.env.scratchFolder={}, scratch_wks={}".format(scratchFolder, scratch_wks)
                arcpy.AddMessage(sMsg)
            #if(scratch_wks==None):
            #    scratch_wks = os.path.join(scratchFolder, "scratch.gdb") 
            #    if(arcpy.Exists(scratch_wks)==False):
            #        arcpy.CreateFileGDB_management(scratchFolder, "scratch.gdb") 
            #    arcpy.env.scratchWorkspace = scratch_wks

            rasterDescribe = arcpy.Describe(inRasterHAND)
            arcpy.env.snapRaster = rasterDescribe.catalogPath #SnapRaster
            arcpy.env.overwriteOutput = True

            bExists = apwrutils.Utils.makeSureDirExists(inRWKS)
            #filGrdInt = os.path.join(scratchFolder, arcpy.CreateUniqueName('filGrdInt', scratchFolder)) 
            cellSize = arcpy.GetRasterProperties_management(inRasterHAND, "CELLSIZEX") 
            sr = arcpy.Describe(inRasterHAND).spatialReference
        
            #Holdes final raster results (depth grid)
            sDepthRWKS = os.path.join(inRWKS,flooddsconfig.FDN_Depth)   #Depth folder
            bExists = apwrutils.Utils.makeSureDirExists(sDepthRWKS)     #Depth folder
            sWseRWKS = os.path.join(inRWKS, flooddsconfig.FDN_WSE)      #WSE folder
            bExists = apwrutils.Utils.makeSureDirExists(sWseRWKS)       #WSE folder
            sGDepth = os.path.join(inRWKS, flooddsconfig.FND_G_Depth)
            bExists = apwrutils.Utils.makeSureDirExists(sGDepth)
            sGPFZone = os.path.join(inRWKS, flooddsconfig.FND_G_PFZone)
            bExists = apwrutils.Utils.makeSureDirExists(sGPFZone)

            if(inCat!=None):
                #..Create floodzone featureclass to hold fp polygons for each river
                fcZoneRiver = os.path.join(inFWKS, flooddsconfig.LN_FPZoneRiver)
                if((flooddsconfig.debugLevel & 1) == 1):  arcpy.AddMessage("fcZoneRiver: {}".format(fcZoneRiver))
                if(arcpy.Exists(fcZoneRiver)==False):
                    arcpy.CreateFeatureclass_management(inFWKS, flooddsconfig.LN_FPZoneRiver, "POLYGON", None, None, None, sr)

                fieldsRiver = {flooddsconfig.FN_StreamID:'LONG', flooddsconfig.FN_STEP:'TEXT', 
                      flooddsconfig.FN_GridCode:'LONG', flooddsconfig.FN_DateCreated :'TEXT', 
                      flooddsconfig.FN_FLDESC:'DOUBLE', apwrutils.FN_HYDROCODE:'TEXT'}

                try:
                    ii = apwrutils.Utils.addFields(fcZoneRiver, fieldsRiver)
                    if((flooddsconfig.debugLevel & 1)==1): arcpy.AddMessage("Processing inStep={}. (Fields added={}).".format(inStep,ii))
                   
                except arcpy.ExecuteError:
                    arcpy.AddError(str(arcpy.GetMessages(2)))
                lFieldsRiver = [apwrutils.FN_ShapeAt,flooddsconfig.FN_StreamID, flooddsconfig.FN_STEP, 
                    flooddsconfig.FN_GridCode, flooddsconfig.FN_DateCreated, flooddsconfig.FN_FLDESC, apwrutils.FN_HYDROCODE]
                
                if((flooddsconfig.debugLevel & 1)==1): 
                    sMsg = "Processing raster by each catchment contained in {}".format(inCat)     
                    arcpy.AddMessage(sMsg)
                #  makesure the temp Raster dir exist
                sCatRWKS = os.path.join(inRWKS, "RCat")
                bExists = apwrutils.Utils.makeSureDirExists(sCatRWKS)
                #    maskGrd = arcpy.sa.Polygon
                #apwrutils.Utils.ShowMsg("TobeImplemented....")
                oDesc = arcpy.Describe(inCat) 
                sOIDFld = oDesc.OIDFieldName
                lCatFlds = [apwrutils.FN_ShapeAt, sOIDFld, apwrutils.FN_HYDROID]
                rivID = 0
                sRasters = ""
                sp = " " * 2
                fl = ""
                deltaH = 0.0
                #for k in dCatID2RivID:
                #    arcpy.AddMessage("{} -> {}".format(k, dCatID2RivID[k]))
                nCats = arcpy.GetCount_management(inCat)[0] 
                with arcpy.da.SearchCursor(inCat, lCatFlds) as rows:
                    for iRow, row in enumerate(rows):
                        ddt = time.clock()
                        rivID = 0
                        catID = 0
                        try:        #try in row
                            iOID = row[lCatFlds.index(sOIDFld)]
                            catID = row[lCatFlds.index(apwrutils.FN_HYDROID)]
                            if(catID in dCatID2RivID):
                                rivID = dCatID2RivID[catID]
                            else:
                                arcpy.AddMessage("catID {} is not found in dCatID2RiverID".format(catID))
                                rivID = -1

                            oPoly = row[lCatFlds.index(apwrutils.FN_ShapeAt)]
                            oExt = oPoly.extent
                            #sWhere = "{}={}".format(sOIDFld, iOID) 
                            sWhere = "{}={}".format(apwrutils.FN_HYDROID, catID) 
                            pHandMasked = os.path.join(sCatRWKS, "cat{}".format(catID))
                            arcpy.env.extent = oExt
                            if(os.path.exists(pHandMasked)==False):
                                fl = "DH{}_{}".format(inStep, catID)
                                if(arcpy.Exists(fl)):
                                   arcpy.Delete_management(fl) 
                                arcpy.MakeFeatureLayer_management(inCat, fl, sWhere)
                                #if((flooddsconfig.debugLevel & 1)==1):  arcpy.AddMessage("PolygonToRaster_conversion -> {},  {} where {}".format(pHandMasked, inCat, sWhere))
                                pHandMask = arcpy.sa.ExtractByMask(inRasterHAND, fl)
                                pHandMask.save(pHandMasked) 
                            else:
                                #flOutFile = arcpy.management.MakeRasterLayer(pHandMasked, "flCat{}".format(rivID))
                                if((flooddsconfig.debugLevel & 8) == 8):  arcpy.AddMessage("{} already existed for catchment {}".format(pHandMasked, sWhere))
                            
                            if(inDeltaH<0):
                                try:
                                    deltaH = dRivID2DH[rivID]
                                    #arcpy.AddMessage("deltaH = {}".format(deltaH)) 
                                except:
                                    deltaH = float(inDeltaH)
                            else:
                                deltaH = float(inDeltaH)
              
                            #(zFactor,zUnit) = apwrutils.Utils.getZFactorUnit(inRasterHAND)
                            #deltaH = deltaH * zFactor 
                            expression = "value <= {}".format(deltaH) 
                            #..save the rivNibble to wse location.
                            #wseRaster = arcpy.sa.Con(inRasterHAND, inRasterHAND, "", expression) 
                            if(arcpy.Exists(inRasterStr)):
                                wseRaster = arcpy.sa.Con(pHandMasked, pHandMasked, inRasterStr, expression)
                            else:  
                                wseRaster = arcpy.sa.Con(pHandMasked, pHandMasked, "", expression)                                                     
                            #..Get the river depth and save the depth grid  '..ye, @1/28/2016 12:12:40 PM on ZYE1
                            sName = "{}_{}_{}_{}{}".format(flooddsconfig.HD_Depth,inStep,flooddsconfig.HD_River, rivID, flooddsconfig.Ext_R) 
                            fpDepth = arcpy.sa.Minus(float(deltaH), wseRaster)
                            sDepthFile = os.path.join(sDepthRWKS,sName)
                            #..arcpy.AddMessage("fpDept={}".format(fpDepth))
                            fpDepth.save(sDepthFile)    # Depth grid.
                            if(arcpy.Exists(inRasterMinLocal)):
                                wseRaster = arcpy.sa.Plus(wseRaster, inRasterMinLocal)
                            sWseName = "{}_{}_{}_{}{}".format(flooddsconfig.HD_WSE, inStep, flooddsconfig.HD_River, rivID, flooddsconfig.Ext_R)
                            wseRaster.save(os.path.join(sWseRWKS, sWseName))                            
                            #..Save the fpDepth
                            fpZone4PolyRiver = arcpy.sa.Con(fpDepth, 1, 0, 'value >= 0'  ) 
                            #..arcpy.AddMessage("inStep_{}, rz_{}".format(inStep, inStep))
                            fpZoneTempRiver = os.path.join(scratch_wks, "rz{}_{}".format(inStep ,rivID)) 
                            #..arcpy.AddMessage("fpZoneTempRiver={}".format(fpZoneTempRiver))
                            arcpy.RasterToPolygon_conversion(fpZone4PolyRiver, fpZoneTempRiver, "NO_SIMPLIFY" )
                            sDslvName = sName.split(".")[0]
                            flZoneDslv = "{}DSLV".format(sDslvName)                           
                            if(bConnectedOnly==True):
                                sRivWhere = "{}={}".format(apwrutils.FN_HYDROID, rivID)
                                flZoneTempRiver = "flrz{}_{}".format(inStep,rivID)
                                flRiv = "flrv{}".format(rivID) 
                                arcpy.MakeFeatureLayer_management(inRiv, flRiv, sRivWhere) 
                                arcpy.MakeFeatureLayer_management(fpZoneTempRiver, flZoneTempRiver) 
                                arcpy.SelectLayerByLocation_management(flZoneTempRiver, 'INTERSECT', flRiv)
                                fpZoneTempDslv = os.path.join(scratch_wks, "fpr{}_{}".format(inStep, rivID))
                                arcpy.Dissolve_management(flZoneTempRiver, fpZoneTempDslv, [flooddsconfig.FN_GridCode])
                                sWhereGridCode = "{}>0".format(flooddsconfig.FN_GridCode)
                                arcpy.MakeFeatureLayer_management(fpZoneTempDslv, flZoneDslv, sWhereGridCode)
                                try:
                                    fpDepth = arcpy.sa.ExtractByMask(fpDepth, flZoneDslv)    #pMaskFC)   #flZoneDslv)  # pMaskFC)   #flZoneDslv)
                                    #if save directly to .tif format as extractbymask is applied, the nodata would be presented as '-3.4028234663853E+38', which in other places would not be treated as NODATA (by other functions)
                                    pRaster = arcpy.sa.Plus(fpDepth, 0.0)    
                                    pRaster.save(sDepthFile) 
                                except:
                                    arcpy.AddMessage(trace())
                                    pass
                                    #arcpy.CopyRaster_management(sDepthFile, ssOutFileNew) 
                            else:
                                fpZoneTempDslv = os.path.join(scratch_wks, "fpr{}_{}".format(inStep, rivID))
                                arcpy.Dissolve_management(fpZoneTempRiver, fpZoneTempDslv, [flooddsconfig.FN_GridCode])
                                arcpy.MakeFeatureLayer_management(fpZoneTempDslv, flZoneDslv)
                           
                            if((flooddsconfig.debugLevel & 2)==2): arcpy.AddMessage("sName{}, fpZoneTempDslv={}".format(sName, fpZoneTempDslv) )
                            if(sRasters ==""):
                                sRasters = sName
                            else:
                                sRasters = sRasters + ";" + sName
                            sDateCreated = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") 
                            with arcpy.da.InsertCursor(fcZoneRiver, lFieldsRiver) as inRows:
                                #with arcpy.da.SearchCursor(fpZoneTempDslv, [apwrutils.FN_ShapeAt,flooddsconfig.FN_GridCode]) as prows:
                                with arcpy.da.SearchCursor(flZoneDslv, [apwrutils.FN_ShapeAt,flooddsconfig.FN_GridCode]) as prows:
                                    for prow in prows:
                                        try:
                                            #fieldsRiver = {Shape@, flooddsconfig.FN_StreamID:'LONG', flooddsconfig.FN_STEP:'TEXT', 
                                            #      flooddsconfig.FN_GridCode:'LONG', flooddsconfig.FN_DateCreated :'TEXT', 
                                            #      flooddsconfig.FN_FLDESC:'TEXT', apwrutils.FN_HYDROCODE:'TEXT'}
                                            inRow = []
                                            oShp = prow[0]
                                            inRow.append(oShp)
                                            inRow.append(rivID)    #StreamID
                                            inRow.append(inStep)   #FPStep
                                            inRow.append(prow[1])  #GRidCode
                                            inRow.append(sDateCreated)  #DateCreated
                                            inRow.append(deltaH)   #FPDESC
                                            inRow.append(rivID)    #HYDROCODE
                                            inRows.insertRow(inRow)       
                                        except:
                                            arcpy.AddMessage(trace()) 

                        except arcpy.ExecuteError:   #try in row for Cat
                            sMsg = str(arcpy.GetMessages(2))
                            arcpy.AddError(sMsg)
                        except:
                            arcpy.AddWarning(arcpy.GetMessages(2))
                            sMsg = trace()
                            arcpy.AddMessage(sMsg)
                        finally:   ##try in row - per catchment
                            if(fl!=""):
                                arcpy.Delete_management(fl)

                            if(flZoneTempRiver!=""):
                                arcpy.Delete_management(flZoneTempRiver)
                            if(flZoneDslv!=""):
                                arcpy.Delete_management(flZoneDslv)

                            if(flRiv!=""):
                                arcpy.Delete_management(flRiv)
                            sMsg = "{} (inStep,dh)=({},{}) {} of {} catchments, {} (rivid={} catid={} dt={})".format(sp, inStep, ("%.2f" % deltaH), (iRow+1), nCats, sWhere, rivID, catID, apwrutils.Utils.GetDSMsg(ddt, "")) 
                            arcpy.AddMessage(sMsg)
                            #if((flooddsconfig.debugLevel & 1)==1): 
                            #    sMsg = "Done, processing raster on catchment {} (rivid={} catid={} dt={})".format(sWhere, rivID, catID, apwrutils.Utils.GetDSMsg(ddt)) 
                            #    arcpy.AddMessage(sMsg)

                if (nProcessors<=1) :            
                    try:
                        arcpy.env.extent = inRasterHAND                      
                        arcpy.env.workspace = sDepthRWKS
                        arcpy.env.mask = inRasterHAND
                        # sDepthName = "{}{}{}".format(flooddsconfig.HD_Depth,inStep,flooddsconfig.Ext_R)  # did not work when .tif is used, it would produce a mosaic ds with Nodata being filled with -128 or 0.
                        sDepthName = "{}_{}.tif".format(flooddsconfig.HD_Depth,inStep)
                        sCellSize = "" 
                        if(apwrutils.Utils.isNumeric(cellSize)==True):
                             sCellSize = cellSize 

                        arcpy.MosaicToNewRaster_management(sRasters, sGDepth, sDepthName, sr, pixel_type="16_BIT_SIGNED", cellsize=sCellSize, number_of_bands="1", mosaic_method="MAXIMUM", mosaic_colormap_mode="FIRST")
                        #arcpy.MosaicToNewRaster_management(sRasters, sGDepth, sDepthName, sr, pixel_type="32_BIT_FLOAT", cellsize=sCellSize, number_of_bands="1", mosaic_method="MAXIMUM", mosaic_colormap_mode="FIRST")
                        #arcpy.MosaicToNewRaster_management(sRasters, sGDepth, sDepthName, sr, "32_BIT_FLOAT", cellSize, "1", "LAST","FIRST")
                        flDepthName = os.path.join(sGDepth, sDepthName)
                           
                        if(flooddsconfig.Ext_R!=""):
                             fpDepthF = arcpy.sa.SetNull(flDepthName, flDepthName, '"value" <= 0')
                             fpDepthF.save(flDepthName)
                        else:
                             fpDepthF = flDepthName
                
                        if((flooddsconfig.debugLevel & 1)==1): 
                            sMsg = "mosaic raster depth grid: fpDepthF={}".format(fpDepthF)
                            arcpy.AddMessage(sMsg)

                    except arcpy.ExecuteError:   #try in row for Cat
                        sMsg = str(arcpy.GetMessages(2))
                        arcpy.AddWarning(sMsg)
                    except:
                        arcpy.AddWarning(arcpy.GetMessages(2))
                        sMsg = trace()
                        arcpy.AddMessage(sMsg)
            else:
                 isNullGrd = arcpy.sa.IsNull(river3DInt)
                 nibSrc = arcpy.sa.Con(isNullGrd, river3DInt, "-99999", "Value = 0")
                 nibLevel = arcpy.sa.Nibble(nibSrc, river3DInt)  #, "ALL_VALUES") 
                 fpDepth = arcpy.sa.Minus(nibLevel, filGrdInt)
                 fpDepthF = arcpy.sa.Con(fpDepth, fpDepth, "#", '"value" >= 0')     #fpDepth>0, return fpDepth, else null.
            
            fpZoneName = flooddsconfig.LN_FPZone
            fcZoneRslt = os.path.join(inFWKS, fpZoneName)
            fpDepthRName = "{}_{}".format(flooddsconfig.LN_FPZone,inStep) 
            fpRaster = os.path.join(sGPFZone, fpDepthRName)
            if(nProcessors<=1):
                if((flooddsconfig.debugLevel & 1)==1): arcpy.AddMessage(fpDepthF)
                fpZone4Poly = arcpy.sa.Con(fpDepthF, 1, 0, '"value" >= 0')            
                fpZoneTemp = os.path.join(scratch_wks, "r{}".format(inStep)) 
                arcpy.RasterToPolygon_conversion(fpZone4Poly, fpZoneTemp, "NO_SIMPLIFY")
                
                if(inRiv!=None):
                    #try to remove the floodplain polygons not connected with the inRiv
                    flZoneOnRiv = ""
                    try:
                        flZoneOnRiv = "flzr{}".format(inStep)    #Zone that overlay with river lines.
                        if(arcpy.Exists(flZoneOnRiv)==True): arcpy.Delete_management(flZoneOnRiv)
                        if(arcpy.Exists(fpZoneTemp)):
                            arcpy.MakeFeatureLayer_management(fpZoneTemp, flZoneOnRiv)    
                            arcpy.SelectLayerByLocation_management(flZoneOnRiv, "INTERSECT", inRiv)
                            #Connected Raster Area:
                            sRasterConn = os.path.join(scratchFolder, "C{}".format(inStep))
                            arcpy.PolygonToRaster_conversion(flZoneOnRiv, flooddsconfig.FN_GridCode, sRasterConn,"","",cellSize)
                            fpZone4Poly = arcpy.sa.Con(sRasterConn, fpZone4Poly)
                            fpZone4Poly = arcpy.sa.SetNull(fpZone4Poly, fpZone4Poly, '"value" = 0')
                            arcpy.RasterToPolygon_conversion(fpZone4Poly, fpZoneTemp, "NO_SIMPLIFY")
                            try:
                                del fpZone4Poly
                            except:
                                pass
                            try:
                                del flZoneOnRiv
                            except:
                                pass 

                    except arcpy.ExecuteError:
                        sMsg = "{}, {}".format(arcpy.GetMessages(2), trace())
                        arcpy.AddMessage(sMsg)
                    except:
                        arcpy.AddMessage("try to remove floodplain not intersecting with a river. {}".format(trace()))
                    finally:
                        pass

                fpZoneTempDslv = os.path.join(scratch_wks, "FPD{}".format(inStep))
                arcpy.Dissolve_management(fpZoneTemp, fpZoneTempDslv, [flooddsconfig.FN_GridCode])
                if(arcpy.Exists(fcZoneRslt)==False):
                    arcpy.CreateFeatureclass_management(inFWKS, fpZoneName, "POLYGON", fpZoneTempDslv, None, None, sr)   
        
                oDesc = arcpy.Describe(fcZoneRslt)
                fields = {flooddsconfig.FN_FLDESC:'DOUBLE', flooddsconfig.FN_STEP:'LONG', 
                          flooddsconfig.FN_GridCode:'LONG', flooddsconfig.FN_DateCreated :'TEXT'}

                apwrutils.Utils.addFields(fcZoneRslt, fields)
                if(not inDeltaH): inDeltaH = datetime.datetime.now()
                sDateCreated = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") 
                shpFldName = apwrutils.Utils.GetShapeFieldName(fcZoneRslt)            
                lFieldsZR = [apwrutils.FN_ShapeAt,flooddsconfig.FN_STEP,flooddsconfig.FN_FLDESC,flooddsconfig.FN_GridCode, flooddsconfig.FN_DateCreated]
                lFieldsDslv = [apwrutils.FN_ShapeAt, flooddsconfig.FN_GridCode] 
                with arcpy.da.InsertCursor(fcZoneRslt, lFieldsZR) as inRows:
                    with arcpy.da.SearchCursor(fpZoneTempDslv,lFieldsDslv) as rows:
                        for row in rows:
                            try:
                                inRow = []
                                oShp = row[0]
                                inRow.append(oShp)
                                inRow.append(inStep)
                                inRow.append(inDeltaH)
                                inRow.append(row[lFieldsDslv.index(flooddsconfig.FN_GridCode)])
                                inRow.append(sDateCreated)
                                inRows.insertRow(inRow)       
                            except:
                                arcpy.AddMessage(trace())  
        
                # ExtractByMask - extract the fpZoneF (floodplain (depth) zone with in float)   
                fpDepthFExt = arcpy.sa.ExtractByMask(fpDepthF, fcZoneRslt) 
                fpDepthFExt.save(fpRaster)
                try:
                    del fpDepthFExt
                    del fpZone4Poly
                except:
                    pass 
        except:
            sOK = trace()
            arcpy.AddMessage(sOK)
            
        finally:
            if((flooddsconfig.debugLevel & 1)==1):  arcpy.AddMessage("floodplainfromhand Cleaning up...")
            arcpy.ResetEnvironments()

        if(sOK == apwrutils.C_OK):
            tReturn = (sOK, fcZoneRslt, fpRaster)
        else:
            tReturn = (sOK)

        return tReturn
예제 #23
0
def GetServiceIDListsAndNonOverlaps(day, start_sec, end_sec, DepOrArr, Specific=False):
    ''' Get the lists of service ids for today, yesterday, and tomorrow, and
    combine non-overlapping date range list for all days'''

    # Determine if it's early enough in the day that we need to consider trips
    # still running from yesterday - these set global variables
    if not ConsiderYesterday:
        ShouldConsiderYesterday(start_sec, DepOrArr)
    # If our time window spans midnight, we need to check tomorrow's trips, too.
    if not ConsiderTomorrow:
        ShouldConsiderTomorrow(end_sec)
    # And what weekdays are yesterday and tomorrow?
    if Specific == False:
        Yesterday = days[(days.index(day) - 1)%7] # %7 wraps it around
        Tomorrow = days[(days.index(day) + 1)%7] # %7 wraps it around
    else:
        Yesterday = day - datetime.timedelta(days=1)
        Tomorrow = day + datetime.timedelta(days=1)

    try:
        # Get the service ids applicable for the current day of the week
        # Furthermore, get list of service ids with non-overlapping date ranges.
        serviceidlist, nonoverlappingsids = MakeServiceIDList(day, Specific)

        # If we need to consider yesterday's trips, get the service ids.
        serviceidlist_yest = []
        nonoverlappingsids_yest = []
        if ConsiderYesterday:
            serviceidlist_yest, nonoverlappingsids_yest = MakeServiceIDList(Yesterday, Specific)

        # If we need to consider tomorrow's trips, get the service ids.
        serviceidlist_tom = []
        nonoverlappingsids_tom = []
        if ConsiderTomorrow:
            serviceidlist_tom, nonoverlappingsids_tom = MakeServiceIDList(Tomorrow, Specific)
    except:
        arcpy.AddError("Error getting list of service_ids for time window.")
        raise

    # Make sure there is service on the day we're analyzing.
    if not serviceidlist and not serviceidlist_yest and not serviceidlist_tom:
        arcpy.AddWarning("There is no transit service during this time window. \
No service_ids cover the weekday or specific date you have selected.")

    # Combine lists of non-overlapping date range pairs of service ids
    nonoverlappingsids += nonoverlappingsids_yest
    nonoverlappingsids += nonoverlappingsids_tom
    nonoverlappingsids = list(set(nonoverlappingsids))
    nonoverlappingsids = nonoverlappingsids[:10] # Truncate to 10 records
    
    if nonoverlappingsids:
        overlapwarning = u"Warning! The trips being counted in this analysis \
have service_ids with non-overlapping date ranges in your GTFS dataset's \
calendar.txt file(s). As a result, your analysis might double count the number \
of trips available if you are analyzing a generic weekday instead of a specific \
date.  This is especially likely if the non-overlapping pairs are in the same \
GTFS dataset.  Please check the date ranges in your calendar.txt file(s), and \
consider running this analysis for a specific date instead of a generic weekday. \
See the User's Guide for further assistance.  Date ranges do not overlap in the \
following pairs of service_ids: "
        if len(nonoverlappingsids) == 10:
            overlapwarning += "(Showing the first 10 non-overlaps) "
        overlapwarning += str(nonoverlappingsids)
        arcpy.AddWarning(overlapwarning)   
    
    return serviceidlist, serviceidlist_yest, serviceidlist_tom
def mainFunction(
    datasets, inputGeodatabase, outputGeodatabase
):  # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)
    try:
        # --------------------------------------- Start of code --------------------------------------- #

        # Get the list of datasets
        datasetsList = string.split(datasets, ";")

        # For each of the datasets in the list
        for dataset in datasetsList:
            # Get dataset name and type
            datasetType = string.split(dataset, ":")

            # Get a list of the feature classes in the database
            arcpy.env.workspace = inputGeodatabase
            featureClassList = arcpy.ListFeatureClasses()
            datasetMergeList = []
            # Go through each of the datasets in the database
            for featureClass in featureClassList:
                # If feature class is present matching the name
                if (datasetType[0].lower() in featureClass.lower()):
                    # Check dataset type
                    desc = arcpy.Describe(featureClass)
                    shapeType = desc.shapeType
                    if (shapeType.lower() == (datasetType[1].lower())):
                        # Add in the forest and tenure info
                        forestName = string.split(desc.name, "_")
                        descWorkspace = arcpy.Describe(inputGeodatabase)

                        addForest = 0
                        addTenure = 0
                        # Check if field is already present
                        fieldList = arcpy.ListFields(featureClass)
                        for field in fieldList:
                            if (field.name == "Forest"):
                                addForest += 1
                            if (field.name == "Tenure"):
                                addTenure += 1

                        if (addForest == 0):
                            arcpy.AddField_management(featureClass, "Forest",
                                                      "TEXT", "", "", "", "",
                                                      "NULLABLE",
                                                      "NON_REQUIRED", "")
                        if (addTenure == 0):
                            arcpy.AddField_management(featureClass, "Tenure",
                                                      "TEXT", "", "", "", "",
                                                      "NULLABLE",
                                                      "NON_REQUIRED", "")

                        arcpy.CalculateField_management(
                            featureClass, "Forest",
                            "\"" + forestName[0] + "\"", "PYTHON_9.3", "")
                        arcpy.CalculateField_management(
                            featureClass, "Tenure",
                            "\"" + descWorkspace.baseName + "\"", "PYTHON_9.3",
                            "")

                        # Add to merge list
                        datasetMergeList.append(featureClass)

            # If dataset to be merged
            if (len(datasetMergeList) > 0):
                arcpy.AddMessage("Merging and creating " + datasetType[0] +
                                 " dataset...")
                arcpy.Merge_management(
                    datasetMergeList,
                    os.path.join(outputGeodatabase, datasetType[0]), "")
            else:
                arcpy.AddWarning("No data for " + datasetType[0] +
                                 " dataset...")

        # --------------------------------------- End of code --------------------------------------- #

        # If called from gp tool return the arcpy parameter
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                arcpy.SetParameterAsText(1, output)
        # Otherwise return the result
        else:
            # Return the output if there is any
            if output:
                return output
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        pass
    # If arcpy error
    except arcpy.ExecuteError:
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)
        arcpy.AddError(errorMessage)
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""
        # Build and show the error message
        for i in range(len(e.args)):
            if (i == 0):
                errorMessage = unicode(e.args[i]).encode('utf-8')
            else:
                errorMessage = errorMessage + " " + unicode(
                    e.args[i]).encode('utf-8')
        arcpy.AddError(errorMessage)
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
예제 #25
0
        file_size_dl += len(buffer)
        f.write(buffer)
        status = r"%10d  [%3.2f%%]" % (file_size_dl,
                                       file_size_dl * 100. / file_size)
        status = status + chr(8) * (len(status) + 1)
        print status,

    f.close()


#build types list

#download
download = getDownloadDict(inputData)
numYes = 0
for item in typeDict.values():
    if item == "true": numYes = numYes + 1

arcpy.AddMessage("Downloading %d Tiles" % (len(download) * numYes))
arcpy.SetProgressor("step", "Downloading Tiles", 0, len(download) * numYes, 1)
for type, yes in typeDict.iteritems():
    if (yes == "true"):
        for sid, county in download.iteritems():
            try:
                getZip("http://gis3.oit.ohio.gov/ZIPARCHIVES/" +
                       tileDict[type][0] + "/" + type + "/" + county.rstrip() +
                       "/" + sid + "_" + tileDict[type][1] + ".zip")
            except urllib2.URLError:
                arcpy.AddWarning("Error downloading %s-%s" % (sid, county))
            arcpy.SetProgressorPosition()
solarMosaic = arcpy.CreateMosaicDataset_management(scratchGDB, md, coordSys)

# Loop through the geodatabases and add rasters to the mosaic dataset
for gdb in inGDB.split(';'):
    try:
        # Process: Add Rasters To Mosaic Dataset
        arcpy.AddMessage('Adding rasters from %s to mosaic dataset...' % gdb)
        arcpy.AddRastersToMosaicDataset_management(
            solarMosaic, "Raster Dataset", gdb, "UPDATE_CELL_SIZES",
            "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "0", "1500", "", "",
            "SUBFOLDERS", "OVERWRITE_DUPLICATES", "NO_PYRAMIDS",
            "NO_STATISTICS", "NO_THUMBNAILS", "", "NO_FORCE_SPATIAL_REFERENCE")
        Log.write('Successfully added rasters from %s to mosaic dataset. \n' %
                  gdb)
    except:
        arcpy.AddWarning('Failed to add rasters from %s.' % gdb)
        Log.write('Failed to add rasters from %s to mosaic dataset. \n' % gdb)

# Process: Import Mosaic Dataset Geometry
arcpy.AddMessage('Importing footprint geometry...')
arcpy.ImportMosaicDatasetGeometry_management(solarMosaic, "FOOTPRINT", "Name",
                                             inFprints, joinFld)

# Process: Copy final output raster dataset
try:
    arcpy.AddMessage(
        'Exporting final raster dataset.  This may take awhile...')
    rd = outGDB + os.sep + mosaicName
    arcpy.CopyRaster_management(solarMosaic, rd)
except:
    arcpy.AddWarning('Unable to export raster dataset.')
예제 #27
0
## for each orientation point
# extract xyz, azi, inc
# validate azi and inc values
with arcpy.da.SearchCursor(
        inPts,
    ['OID@', 'POINT_X', 'POINT_Y', 'Z', aziField, incField]) as cursor:
    for row in cursor:
        oid = row[0]
        xyz = [row[1], row[2], row[3]]
        azi = row[4] + gridDec  ############### or is it minus?
        inc = row[5]
        addMsgAndPrint('OID=' + str(oid) + ' strike=' + str(azi) + ' dip=' +
                       str(inc))
        if inc == 90:
            arcpy.AddWarning('  Substituting dip = 89.99')
            inc = 89.99
        #addMsgAndPrint(str(xyz))
        coordsList = makeCoordsList(xyz, azi, inc, radius)
        controlPoints = makeControlPoints(dem, coordsList, scratchgdb)
        outCon = makeOutcropRaster(dem, controlPoints, inc, scratchgdb)
        testAndDelete(controlPoints)
        planeName = 'OID' + str(oid) + '_' + str(int(azi)) + '_' + str(
            int(inc))
        saveRaster = os.path.join(scratchgdb, planeName)
        testAndDelete(saveRaster)
        outCon.save(saveRaster)
        ## import existing layer file, reset source, rename to source
        addMsgAndPrint('  inserting new layer')
        lyrFile = os.path.join(os.path.dirname(sys.argv[0]),
                               'ProjectedBedding.lyr')
예제 #28
0
def runTool(FCs, SQLDbase, dayString, start_time, end_time, DepOrArrChoice):

    def RetrieveStatsForStop(stop_id, rtdirtuple):
        '''For a given stop, query the stoptimedict {stop_id: [[trip_id, stop_time]]}
        and return the NumTrips, NumTripsPerHr, MaxWaitTime, and AvgHeadway given a
        specific route_id and direction'''

        try:
            stoptimedict = stoptimedict_rtdirpair[rtdirtuple]
        except KeyError:
            # We will get a KeyError if there were no trips found for the route/direction
            # pair, which usually happens if the wrong SQL database was selected.
            stoptimedict = {}

        # Make a list of stop_times
        StopTimesAtThisPoint = []
        try:
            for trip in stoptimedict[stop_id]:
                StopTimesAtThisPoint.append(trip[1])
        except KeyError:
            pass
        StopTimesAtThisPoint.sort()

        # Calculate the number of trips
        NumTrips = len(StopTimesAtThisPoint)
        NumTripsPerHr = float(NumTrips) / TimeWindowLength

        # Get the max wait time and the average headway
        MaxWaitTime = BBB_SharedFunctions.CalculateMaxWaitTime(StopTimesAtThisPoint, start_sec, end_sec)
        AvgHeadway = BBB_SharedFunctions.CalculateAvgHeadway(StopTimesAtThisPoint)

        return NumTrips, NumTripsPerHr, MaxWaitTime, AvgHeadway

    try:
        # ------ Get input parameters and set things up. -----
        try:
            OverwriteOutput = arcpy.env.overwriteOutput # Get the orignal value so we can reset it.
            arcpy.env.overwriteOutput = True

            BBB_SharedFunctions.CheckArcVersion(min_version_pro="1.2")

            # Stops and Polygons from Step 1 (any number and route combo)
            FCList = FCs.split(";")
            # Remove single quotes ArcGIS puts in if there are spaces in the filename.
            for d in FCList:
                if d[0] == "'" and d[-1] == "'":
                    loc = FCList.index(d)
                    FCList[loc] = d[1:-1]

            # Get list of field names from the input data and check that the required ones are there
            FieldNames = {}
            RequiredFields = ["stop_id", "route_id", "direction_id"]
            for FC in FCList:
                Fields = arcpy.ListFields(FC)
                FieldNames[FC] = [f.name for f in Fields]
                for field in RequiredFields:
                    if not field in FieldNames[FC]:
                        arcpy.AddError("Feature class %s does not have the required \
fields %s. Please choose a valid feature class." % (FC, str(RequiredFields)))
                        raise BBB_SharedFunctions.CustomError

            # SQL database of preprocessed GTFS from Step 1
            conn = BBB_SharedFunctions.conn = sqlite3.connect(SQLDbase)
            c = BBB_SharedFunctions.c = conn.cursor()

            Specific, day = BBB_SharedFunctions.CheckSpecificDate(dayString)
            # For field names in the output file
            if Specific:
                dayshort = BBB_SharedFunctions.days[day.weekday()][0:3] 
            else:
                dayshort = dayString[0:3]
            
            if start_time == "":
                start_time = "00:00"
            start_time_pretty = start_time.replace(":", "") # For field names in the output file
            if end_time == "":
                end_time = "23:59"
            end_time_pretty = end_time.replace(":", "") # For field names in the output file
            start_sec, end_sec = BBB_SharedFunctions.ConvertTimeWindowToSeconds(start_time, end_time)
            TimeWindowLength = (end_sec - start_sec) / 3600

            # Does the user want to count arrivals or departures at the stops?
            DepOrArr = BBB_SharedFunctions.CleanUpDepOrArr(DepOrArrChoice)

        except:
            arcpy.AddError("Error getting inputs.")
            raise


        # ----- Get list of route_ids and direction_ids to analyze from input files -----
        try:
            # We just check the first line in each file for this information.
            FC_route_dir_dict = {} # {FC: [route_id, direction_id]}
            route_dir_list = [] # [[route_id, direction_id], ...]
            for FC in FCList:
                with arcpy.da.SearchCursor(FC, ["route_id", "direction_id"]) as cur:
                    rt_dir = cur.next()
                route_dir_pair = [rt_dir[0], rt_dir[1]]
                FC_route_dir_dict[FC] = route_dir_pair
                if not route_dir_pair in route_dir_list:
                    route_dir_list.append(route_dir_pair)

        except:
            arcpy.AddError("Error getting route_id and direction_id values from input feature classes.")
            raise


        # ----- Get trips associated with route and direction -----

        try:
            arcpy.AddMessage("Getting list of trips...")

            # Get the service_ids serving the correct days
            serviceidlist, serviceidlist_yest, serviceidlist_tom = \
                BBB_SharedFunctions.GetServiceIDListsAndNonOverlaps(day, start_sec, end_sec, DepOrArr, Specific)

            trip_route_dict = {} #{(route_id, direction_id): [trip_id, trip_id,..]}
            trip_route_dict_yest = {}
            trip_route_dict_tom = {}
            for rtpair in route_dir_list:
                key = tuple(rtpair)
                route_id = rtpair[0]
                direction_id = rtpair[1]

                # Get list of trips
                # Ignore direction if this route doesn't have a direction
                if direction_id:
                    triproutefetch = '''
                        SELECT trip_id, service_id FROM trips
                        WHERE route_id='%s'
                        AND direction_id=%s
                        ;''' % (route_id, direction_id)
                else:
                    triproutefetch = '''
                        SELECT trip_id, service_id FROM trips
                        WHERE route_id='%s'
                        ;''' % route_id
                c.execute(triproutefetch)
                triproutelist = c.fetchall()

                if not triproutelist:
                    arcpy.AddWarning("Your GTFS dataset does not contain any trips \
corresponding to Route %s and Direction %s. Please ensure that \
you have selected the correct GTFS SQL file for this input file or that your \
GTFS data is good. Output fields will be generated, but \
the values will be 0 or <Null>." % (route_id, str(direction_id)))

                for triproute in triproutelist:
                    # Only keep trips running on the correct day
                    if triproute[1] in serviceidlist:
                        trip_route_dict.setdefault(key, []).append(triproute[0])
                    if triproute[1] in serviceidlist_tom:
                        trip_route_dict_tom.setdefault(key, []).append(triproute[0])
                    if triproute[1] in serviceidlist_yest:
                        trip_route_dict_yest.setdefault(key, []).append(triproute[0])

                if not trip_route_dict and not trip_route_dict_tom and not trip_route_dict_yest:
                    arcpy.AddWarning("There is no service for route %s in direction %s \
on %s during the time window you selected. Output fields will be generated, but \
the values will be 0 or <Null>." % (route_id, str(direction_id), str(day)))

        except:
            arcpy.AddError("Error getting trips associated with route.")
            raise


        #----- Query the GTFS data to count the trips at each stop -----
        try:
            arcpy.AddMessage("Calculating the number of transit trips available during the time window...")

            frequencies_dict = BBB_SharedFunctions.MakeFrequenciesDict()

            stoptimedict_rtdirpair = {}
            for rtdirpair in list(set([rt for rt in list(trip_route_dict.keys()) + list(trip_route_dict_yest.keys()) + list(trip_route_dict_tom.keys())])):

                # Get the stop_times that occur during this time window
                stoptimedict = {}
                stoptimedict_yest = {}
                stoptimedict_tom = {}
                try:
                    triplist = trip_route_dict[rtdirpair]
                    stoptimedict = BBB_SharedFunctions.GetStopTimesForStopsInTimeWindow(start_sec, end_sec, DepOrArr, triplist, "today", frequencies_dict)
                except KeyError: # No trips
                    pass
                try:
                    triplist_yest = trip_route_dict_yest[rtdirpair]
                    stoptimedict_yest = BBB_SharedFunctions.GetStopTimesForStopsInTimeWindow(start_sec, end_sec, DepOrArr, triplist_yest, "yesterday", frequencies_dict)
                except KeyError: # No trips
                    pass
                try:
                    triplist_tom = trip_route_dict_tom[rtdirpair]
                    stoptimedict_tom = BBB_SharedFunctions.GetStopTimesForStopsInTimeWindow(start_sec, end_sec, DepOrArr, triplist_tom, "tomorrow", frequencies_dict)
                except KeyError: # No trips
                    pass

                # Combine the three dictionaries into one master
                for stop in stoptimedict_yest:
                    stoptimedict[stop] = stoptimedict.setdefault(stop, []) + stoptimedict_yest[stop]
                for stop in stoptimedict_tom:
                    stoptimedict[stop] = stoptimedict.setdefault(stop, []) + stoptimedict_tom[stop]

                stoptimedict_rtdirpair[rtdirpair] = stoptimedict

                # Add a warning if there is no service.
                if not stoptimedict:
                    arcpy.AddWarning("There is no service for route %s in direction %s \
on %s during the time window you selected. Output fields will be generated, but \
the values will be 0 or <Null>." % (rtdirpair[0], str(rtdirpair[1]), dayString))

        except:
            arcpy.AddError("Error counting arrivals or departures at stop during time window.")
            raise


        #----- Write to output -----

        arcpy.AddMessage("Writing output...")

        try:
            # Prepare the fields we're going to add to the feature classes
            ending = "_" + dayshort + "_" + start_time_pretty + "_" + end_time_pretty
            fields_to_fill = ["NumTrips" + ending, "NumTripsPerHr" + ending, "MaxWaitTime" + ending, "AvgHeadway" + ending]
            fields_to_read = ["stop_id", "route_id", "direction_id"] + fields_to_fill
            field_type_dict = {"NumTrips" + ending: "Short", "NumTripsPerHr" + ending: "Double", "MaxWaitTime" + ending: "Short", "AvgHeadway" + ending: "Short"}

            for FC in FCList:
                # We probably need to add new fields for our calculations, but if the field
                # is already there, don't add it because we'll overwrite it.
                for field in fields_to_fill:
                    if field not in FieldNames[FC]:
                        arcpy.management.AddField(FC, field, field_type_dict[field])
                with arcpy.da.UpdateCursor(FC, fields_to_read) as cur2:
                    for row in cur2:
                        rtpairtuple = (row[1], row[2]) # (route_id, direction_id)
                        stop = row[0]
                        NumTrips, NumTripsPerHr, MaxWaitTime, AvgHeadway = RetrieveStatsForStop(stop, rtpairtuple)
                        row[3] = NumTrips
                        row[4] = NumTripsPerHr
                        row[5] = MaxWaitTime
                        row[6] = AvgHeadway
                        cur2.updateRow(row)

        except:
            arcpy.AddError("Error writing output to feature class(es).")
            raise

        arcpy.AddMessage("Finished!")
        arcpy.AddMessage("Calculated trip counts, frequency, max wait time, and \
headway were written to the following fields in your input feature class(es):")
        for field in fields_to_fill:
            arcpy.AddMessage("- " + field)

        # Tell the tool that this is output. This will add the output to the map.
        arcpy.SetParameterAsText(6, FCs)

    except BBB_SharedFunctions.CustomError:
        arcpy.AddError("Failed to calculate transit statistics for this route and time window.")
        pass

    except:
        arcpy.AddError("Failed to calculate transit statistics for this route and time window.")
        raise

    finally:
        arcpy.env.overwriteOutput = OverwriteOutput
예제 #29
0
 ruw1 = vak[5]
 ruw2 = vak[6]
 ruw3 = vak[7]
 ruw4 = vak[8]
 ruw5 = vak[9]
 NORMAAL = vak[10]    
 kolommen = ["SHAPE@XY", kol]
 were = kol +" LIKE '%_prfl'"
 #---------------------------------------------------------
 # check of de waarden van de uitgelezen kolommen(van het VAK) bruikbaar zijn.
 # dus ruw1-ruw5 moeten er zijn als er ook meerdere taludsegmenten zijn.
 # Dit is in de voorgaande stappen gecontroleerd dus wordt er hier vanuit gegaan dat de waarden juist zijn.
 if ruw1 >= 0.5 and ruw1 <= 1.0:
     arcpy.AddMessage("ruw1 = ok")
 else:
     arcpy.AddWarning("ruw1 = "+str(ruw1)+" = Niet ingevoerd! wordt op 1 gezet!")
     ruw1 = ruwheid
 if ruw2 >= 0.5 and ruw1 <= 1.0:
     arcpy.AddMessage("ruw2 = ok")
 else:
     arcpy.AddWarning("ruw2 = "+str(ruw2)+" = Niet ingevoerd! wordt op 1 gezet")
     ruw2 = ruwheid
 if ruw3 >= 0.5 and ruw1 <= 1.0:
     arcpy.AddMessage("ruw3 = ok")
 else:
     arcpy.AddWarning("ruw3 = "+str(ruw3)+" = Niet ingevoerd! wordt op 1 gezet")
     ruw3 = ruwheid
 if ruw4 >= 0.5 and ruw1 <= 1.0:
     arcpy.AddMessage("ruw4 = ok")
 else:
     arcpy.AddWarning("ruw4 = "+str(ruw4)+" = Niet ingevoerd! wordt op 1 gezet")
예제 #30
0
def createQARasterMosaics(isClassified, gdb_path, spatial_reference, target_folder, mxd, footprint_path=None, lasd_boundary_path=None):
    mosaics = []
    simple_footprint_path = None
    simple_lasd_boundary_path = None

    stats_methods = STATS_METHODS
    for method in stats_methods:
        arcpy.AddMessage("Creating {} MDS".format(method))
        for dataset_name in DATASET_NAMES:
            name = dataset_name

            if not isClassified:
                # Using a generic name for non-classified data
                name = ""


            md_name = method
            if len(name) > 0:
                md_name = "{}{}".format(method, name)

            input_folder = os.path.join(target_folder, method, name[1:])

            arcpy.AddMessage("Creating {} MD from {}".format(md_name, input_folder))
            try:
                if simple_footprint_path is None:
                    simple_footprint_path = "{}_Simple".format(footprint_path)
                    arcpy.SimplifyPolygon_cartography(in_features=footprint_path, out_feature_class=simple_footprint_path,
                                                    algorithm="POINT_REMOVE", tolerance=Raster.boundary_interval, minimum_area="0 SquareMeters",
                                                    error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP")
                    Utility.addToolMessages()
                    deleteFields(simple_footprint_path)
                    #try:
                    #    arcpy.DeleteField_management(in_table=simple_footprint_path, drop_field="Id;ORIG_FID;InPoly_FID;SimPgnFlag;MaxSimpTol;MinSimpTol")
                    #except:
                    #    pass

                if simple_lasd_boundary_path is None:
                    simple_lasd_boundary_path = "{}_Simple".format(lasd_boundary_path)
                    arcpy.SimplifyPolygon_cartography(in_features=lasd_boundary_path, out_feature_class=simple_lasd_boundary_path,
                                                    algorithm="POINT_REMOVE", tolerance=Raster.boundary_interval, minimum_area="0 SquareMeters",
                                                    error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP")
                    Utility.addToolMessages()
                    deleteFields(simple_lasd_boundary_path)
                    #try:
                    #    arcpy.DeleteField_management(in_table=simple_lasd_boundary_path, drop_field="Id;ORIG_FID;InPoly_FID;SimPgnFlag;MaxSimpTol;MinSimpTol")
                    #except:
                    #    pass
            except:
                arcpy.AddWarning("Failed to create simplified footprints and boundaries in '{}'".format(gdb_path))

            qa_md = createQARasterMosaicDataset(md_name, gdb_path, spatial_reference, input_folder, mxd, simple_footprint_path, simple_lasd_boundary_path)
            if qa_md is not None:
                mosaics.append(qa_md)






    md_name = CANOPY_DENSITY
    dhm_md_path = os.path.join(gdb_path, md_name)
    mosaics.append([dhm_md_path, md_name])

    if arcpy.Exists(dhm_md_path):
        arcpy.AddMessage("{} already exists.".format(md_name))
    else:
        try:
            vert_cs_name, vert_unit_name = Utility.getVertCSInfo(spatial_reference)  # @UnusedVariable
            # No need to update boundary and footprints since it will inherit from the original
            pc_all_md_path = os.path.join(gdb_path, "POINT_COUNT_ALL")
            createReferenceddMosaicDataset(pc_all_md_path, dhm_md_path, spatial_reference, vert_unit_name)
        except:
            arcpy.AddMessage("Failed to create {}".format(dhm_md_path))


    deleteFileIfExists(simple_footprint_path, True)
    deleteFileIfExists(simple_lasd_boundary_path, True)

    return mosaics