# ==============================================================================

#                      CREATE A SHAPEFILE OF XY VALUES

# First, a layer file needs to be created, after which it will be converted into
# a point FeatureClass

# ==============================================================================

# Define the filepaths for .lyr and .shp files
lyr = 'del_obst'
out_lyr = r"C:filepath_to_output_lyr_file\output\testi_ajo_kopio\VSS_pnt.lyr"
out_pnt_class = r"C:filepath_to_shp_file\output\VSS_pnt_to_class.shp"

# Incorporate ArcPy module to create point feature class of the flight obstacles
# to be shown on a map
arcpy.MakeXYEventLayer_management(output_csv, 'Longitude', 'Latitude', lyr)

# Check all rows are included
print(arcpy.GetCount_management(lyr))

# First save as a .lyr-file
arcpy.SaveToLayerFile_management(lyr, out_lyr)

# Second, save the .lyr file as a shapefile
arcpy.FeatureToPoint_management(out_lyr, out_pnt_class, "INSIDE")

# ==============================================================================

print('DATA PROCESSING IS READY!')
示例#2
0
    name_out = 'SP_FF_sr{0}'.format(sr_in)
    check_exists(os.path.join(output_dir_in, name_out))
    ff_routes, ff_routes_protection = route_fiber(nd_in, cluster, co_in,
                                                  name_out, output_dir_in,
                                                  True, pro_in)
    # Get the fiber and duct lengths
    # Working paths
    ff_dict = post_processing_fiber(sr_in, ff_routes_protection, True,
                                    ff_routes)[0]

    #arcpy.AddMessage(ff_dict)

    return lmf_dict, ff_dict


if __name__ == '__main__':
    nd = r'D:\GISworkspace\Test_for_Scripting.gdb\NewYork_nodes\New_York_JOCN_125_ND'
    co = r'D:\GISworkspace\Test_for_Scripting.gdb\NewYork_nodes\CO_to_streets'
    demands_in = r'D:\GISworkspace\Test_for_Scripting.gdb\NewYork_nodes\bs_to_street_125_to_streets'

    output_dir = r'D:\GISworkspace\Test_for_Scripting.gdb\NewYork_nodes'
    facilities = 'Intersections'
    sr = 32
    protection = True
    pro = False

    arcpy.MakeTableView_management(demands_in, "myTableView")
    n_nodes = int(arcpy.GetCount_management("myTableView").getOutput(0))
    n_clusters = int(math.ceil(float(n_nodes) / float(sr)))

    main(nd, n_clusters, co, output_dir, facilities, sr, protection, pro)
示例#3
0
def main(nd_in,
         n_clusters_in,
         co_in,
         output_dir_in,
         facilities_in,
         sr_in,
         protection_in=False,
         pro_in=False):
    """
    This function routes the fiber in the city grid in a single stage. If specified it also provides the link disjoint 
    protection of all the fiber. It returns a list with last mile and feeder fiber lengths and ducts in meters. For the 
    protection the additional duct for the protection is included and additional fiber. 
     
    :param nd_in: network dataset on which the shortest path routing is done, network dataset
    :param n_clusters_in: the number of PS that were placed at the clustering stage, int
    :param co_in: central office location (predefined), feature class
    :param output_dir_in: the directory to store the results, path
    :param facilities_in: the PSs locations (cluster heads), feature class
    :param sr_in: user splitting ratio at the clustering , int
    :param protection_in: if the protection has to be included, binary 
    :param pro_in: if the script is executed in arcgis pro, binary
    
    :return: planning_results = [lmf, lm_duct, ff, f_duct] or 
    [lmf, lm_duct, ff, f_duct, lmf_protection, lm_duct_protection-lm_duct, ff_protection, f_duct_protection-f_duct]
    """
    routes_all_list = []
    routes_all_list_protection = []

    lmf_dict = {'LMF': {}}

    # LMF
    # Route fiber for all the clusters
    for i in range(int(n_clusters_in)):
        cluster = os.path.join(output_dir_in,
                               'Cluster_{0}_sr{1}'.format(i, sr_in))
        n_members = int(arcpy.GetCount_management(cluster).getOutput(0))
        cluster_head = os.path.join(output_dir_in,
                                    'Cluster_head_{0}_sr{1}'.format(i, sr_in))

        with arcpy.da.SearchCursor(cluster_head, 'Name') as cursor:
            for row in cursor:
                head_name = str(row[0]).split(' ')[1]

        name_out = 'SP_LMF_{0}_sr{1}'.format(i, sr_in)
        check_exists(os.path.join(output_dir_in, name_out))

        # Working  paths, i.e., shortest paths
        route, route_protection = route_fiber(nd_in, cluster, cluster_head,
                                              name_out, output_dir_in,
                                              protection_in, pro_in)
        routes_all_list.append(route)

        # Protection paths, i.e., disjoint with working
        if protection_in:
            routes_all_list_protection.append(route_protection)

        lmf, lm_duct = post_processing_fiber(sr_in, route)

        lmf_dict['LMF'][head_name] = {
            'average fiber': float(lmf) / float(n_members),
            'average duct': float(lm_duct) / float(n_members)
        }

    #arcpy.AddMessage(lmf_dict)

    # FF
    # Route fiber from the CO to the cluster heads
    cluster = os.path.join(output_dir_in, 'Cluster_heads_sr{0}'.format(sr_in))
    name_out = 'SP_FF_sr{0}'.format(sr_in)
    check_exists(os.path.join(output_dir_in, name_out))
    ff_routes, ff_routes_protection = route_fiber(nd_in, cluster, co_in,
                                                  name_out, output_dir_in,
                                                  True, pro_in)
    # Get the fiber and duct lengths
    # Working paths
    ff_dict = post_processing_fiber(sr_in, ff_routes_protection, True,
                                    ff_routes)[0]

    #arcpy.AddMessage(ff_dict)

    return lmf_dict, ff_dict
    # ---------------------------------------------------------------------- Datasets
    tempDEM = watershedGDB_path + os.sep + "tempDEM"
    storageTable = watershedGDB_path + os.sep + arcpy.ValidateTableName(
        poolName) + "_storageTable"
    PoolMerge = watershedFD + os.sep + arcpy.ValidateTableName(
        poolName) + "_All_Pools"

    # Set log file path and start logging
    textFilePath = userWorkspace + os.sep + os.path.basename(
        userWorkspace).replace(" ", "_") + "_EngTools.txt"
    logBasicSettings()

    # ---------------------------------------------------------------------------------------------- Check Parameters
    AddMsgAndPrint("\nChecking inputs...", 0)
    # Exit if inPool has more than 1 polygon
    if int(arcpy.GetCount_management(inPool).getOutput(0)) > 1:
        AddMsgAndPrint(
            "\tOnly ONE Watershed or Pool Polygon can be submitted.", 2)
        AddMsgAndPrint(
            "\tEither export an individual polygon from your " +
            os.path.basename(inPool) + " layer, ", 2)
        AddMsgAndPrint(
            "\tmake a single selection, or provide a different input. Exiting...",
            2)
        sys.exit()

    # Exit if inPool is not a Polygon geometry
    if arcpy.Describe(inPool).ShapeType != "Polygon":
        AddMsgAndPrint(
            "\tYour watershed or pool area must be a polygon layer! Exiting...",
            2)
示例#5
0

        for polyrow in polycursor:
            row_counter = 0
            print "row counter " + str(row_counter)

            #name fields
            fid = polyrow[0]
            polyYrfield = polyrow[1]

            print fid

            #select current row
            arcpy.SelectLayerByAttribute_management (fc_layer, "NEW_SELECTION", "FID = {}".format(polyrow[0]))
            #count number selected in polygon
            polygon_count = arcpy.GetCount_management(fc_layer)
            print "Number of selected rows in the current polygon is " + str(polygon_count) + "\n"

            #fires points
            #for current year, select all points in that year in fires data
            #arcpy.SelectLayerByAttribute_management ("fireslyr", "NEW_SELECTION", " [Year] = '" + str(year) + "' ")
            arcpy.SelectLayerByAttribute_management ("fireslyr", "NEW_SELECTION", """ "Year" = '""" + str(year) + """'""")

            #count number selected in fire points
            all_fires_count_yr = arcpy.GetCount_management("fireslyr")
            print "Number of selected rows in fires is " + str(all_fires_count_yr) + " for year " + str(year) + "\n"

            #select by location number of selected fire points for that year that intersect with each feature in current dataset
            arcpy.SelectLayerByLocation_management ("fireslyr", "INTERSECT", fc_layer, "", "SUBSET_SELECTION")

            #count number of points selected within polygon intersection
示例#6
0
    arcpy.CalculateField_management(PeterTable, "ShortCode",
                                    ShortCodeExpression, "PYTHON_9.3",
                                    codeblock)

    arcpy.AddField_management(PeterTable, "DATE", "DATE")

    dateExpression = "Date"
    arcpy.CalculateField_management(PeterTable, "DATE", dateExpression)

    #Calculates Field with expression for Peter Text File
    arcpy.CalculateField_management(PeterTable, "PtrText", expression)

    #Search Cursor to extract Peter Text Field
    myOutputFile = open("C:\PeterScripts\EV\Peter.txt", 'w')
    rows = arcpy.da.SearchCursor(PeterTable, ["PtrText"])
    rowssent = arcpy.GetCount_management(PeterTable)

    for row in rows:
        myOutputFile.write(str(row[0]) + '\n')
    del row, rows
    myOutputFile.close()

    import time
    date = time.strftime("%m/%d/%Y")
    print date

    ATTACHMENTS = ["C:\PeterScripts\EV\Peter.txt"]
    send_from = '*****@*****.**'
    send_to = [
        '*****@*****.**', '*****@*****.**',
        '*****@*****.**', '*****@*****.**',
示例#7
0
    time.sleep(2)

    print("Locating feature layer...")

    try:
        mem_point = arcpy.MakeFeatureLayer_management(POINT_DATASET_NAME,
                                                      "pointlayer")
    except Exception:
        print("Couldn't find " + POINT_DATASET_NAME + ".\n\nDoes it exist?")
        e = sys.exc_info()[1]
        print("\n\nArcpy says: \n" + e.args[0])
        raw_input("\nHit Enter to Continue...")
        sys.exit()

    total_points = arcpy.GetCount_management(mem_point).getOutput(0)

    arcpy.Delete_management(mem_point)

    print("Done.\n")

    print("Collecting IPC tools...")

    try:
        NUM_PROCS = determineNumProcs(total_points)

        Flags = [
            Value('i', PARENT_BUSY, lock=False)
            for i in repeat(None, NUM_PROCS)
        ]
示例#8
0
    def test_HLZ_Touchdown_Points_002(self):
        ''' This test is for some of the default values in the HLZ Touchdown tool. '''
        try:
            arcpy.AddMessage("test_HLZ_Touchdown_Points_002")
            # move TestSunPositionAndHillshade code in here
            print("Importing toolbox... ")
            arcpy.ImportToolbox(TestUtilities.toolbox, "tdpoints")
            arcpy.env.overwriteOutput = True

            # Inputs
            print("Setting up inputs... ")
            inputAirframeTable = "#"
            inputAirframeString = "#"
            inputSuitableAreas = self.inputSuitableAreas
            inputSlope = self.inputSlope
            outputGeodatabase = self.outputGeodatabase
            outputCenterpoints = "#"
            outputCircles = "#"

            # Testing
            print("Running tool (HLZ Touchdown Points) ...")
            arcpy.HLZTouchdownPoints_tdpoints(inputAirframeTable,
                                              inputAirframeString,
                                              inputSuitableAreas, inputSlope,
                                              outputGeodatabase,
                                              outputCenterpoints,
                                              outputCircles)

            print("Comparing expected results...")
            # count output center points
            countCenterPoints = arcpy.GetCount_management(
                os.path.join(self.outputGeodatabase,
                             self.outputCenterpoints)).getOutput(0)
            # count output circles
            countOutputCircles = arcpy.GetCount_management(
                os.path.join(self.outputGeodatabase,
                             self.outputCircles)).getOutput(0)

            self.assertEqual(countCenterPoints, float(934))
            self.assertEqual(countOutputCircles, float(934))

            #TODO: make sure center points fall within circles

        except arcpy.ExecuteError:
            # Get the arcpy error messages
            msgs = arcpy.GetMessages()
            #TODO: need to add 'msgs' to logger
            print(msgs)

        except:
            # Get the traceback object
            tb = sys.exc_info()[2]
            tbinfo = traceback.format_tb(tb)[0]

            # Concatenate information together concerning the error into a message string
            pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n"\
                + str(sys.exc_info()[1])
            msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

            #TODO: need to add 'msgs' and 'pymsg' to logger

            # Print Python error messages for use in Python / Python Window
            print(pymsg + "\n")
            print(msgs)
示例#9
0
def createViewshed(inputObserverPoints, elevationRaster, outerRadiusInput, \
    leftAzimuthInput, rightAzimuthInput, observerOffsetInput, \
    innerRadiusInput, viewshed, sectorWedge, fullWedge):

    # Error Checking:
    if arcpy.CheckExtension("3D") != "Available":
        arcpy.AddError("3D license is not available.")
        return

    if not arcpy.Exists(inputObserverPoints):
        arcpy.AddError('Dataset does not exist: ' + str(inputObserverPoints))
        return

    if not arcpy.Exists(elevationRaster):
        arcpy.AddError('Dataset does not exist: ' + str(elevationRaster))
        return

    inputPointsCount = int(
        arcpy.GetCount_management(inputObserverPoints).getOutput(0))
    if inputPointsCount == 0:
        arcpy.AddError('No features in input feature set: ' +
                       str(inputObserverPoints))
        return

    elevDesc = arcpy.Describe(elevationRaster)
    elevationSR = elevDesc.spatialReference

    if not elevationSR.type == "Projected":
        msgErrorNonProjectedSurface = \
            "Error: Input elevation raster must be in a projected coordinate system. Existing elevation raster is in {0}.".format(elevationSR.name)
        arcpy.AddError(msgErrorNonProjectedSurface)
        return

    # Done error checking, do processing:
    arcpy.env.outputCoordinateSystem = elevationSR

    donutWedges = []
    pieWedges = []

    tempObserverPoints = r"in_memory\tempPoints"
    copyFeaturesAndProject(inputObserverPoints, tempObserverPoints,
                           elevationSR)

    # Check if points falls within surface extent
    isWithin = surfaceContainsPoints(tempObserverPoints, elevationRaster)
    if not isWithin:
        msgErrorPointNotInSurface = \
            "Error: Input Observer(s) does not fall within the extent of the input surface: {0}!".format(os.path.basename(elevationRaster))
        arcpy.AddError(msgErrorPointNotInSurface)
        return

    addViewshedFields(tempObserverPoints, innerRadiusInput, outerRadiusInput, \
        leftAzimuthInput, rightAzimuthInput, observerOffsetInput, \
        0) # Set Target Height to 0

    arcpy.AddMessage("Buffering observers...")
    arcpy.Buffer_analysis(tempObserverPoints, \
        r"in_memory\OuterBuffer", "RADIUS2", "FULL", "ROUND", "NONE", "", "GEODESIC")

    desc = arcpy.Describe(r"in_memory\OuterBuffer")
    xMin = desc.Extent.XMin
    yMin = desc.Extent.YMin
    xMax = desc.Extent.XMax
    yMax = desc.Extent.YMax
    Extent = str(xMin) + " " + str(yMin) + " " + str(xMax) + " " + str(yMax)

    arcpy.env.extent = desc.Extent

    # Set Raster Output Mask (to improve performance)
    arcpy.env.mask = r"in_memory\OuterBuffer"

    arcpy.AddMessage("Clipping image to observer buffer...")
    arcpy.Clip_management(elevationRaster, Extent, r"in_memory\clip")

    arcpy.AddMessage("Calculating viewshed...")
    arcpy.Viewshed_3d("in_memory\clip", tempObserverPoints,
                      r"in_memory\intervis", "1", "FLAT_EARTH", "0.13")

    arcpy.AddMessage("Creating features from raster...")
    arcpy.RasterToPolygon_conversion(
        in_raster=r"in_memory\intervis",
        out_polygon_features=r"in_memory\unclipped",
        simplify="NO_SIMPLIFY")

    fields = ["SHAPE@XY", "RADIUS1", "RADIUS2", "AZIMUTH1", "AZIMUTH2"]
    ## get the attributes from the input point
    with arcpy.da.SearchCursor(tempObserverPoints, fields) as cursor:
        for row in cursor:
            centerX = row[0][0]
            centerY = row[0][1]
            radiusInner = row[1]
            radiusOuter = row[2]
            startBearing = row[3]
            endBearing = row[4]

            # TODO/IMPORTANT: radius must be in map units
            donutWedge = drawWedge(centerX, centerY, radiusInner, radiusOuter,
                                   startBearing, endBearing)
            donutWedges.append(donutWedge)

            pieWedge = drawWedge(centerX, centerY, 0, radiusOuter,
                                 startBearing, endBearing)
            pieWedges.append(pieWedge)

    arcpy.CopyFeatures_management(donutWedges, sectorWedge)
    arcpy.CopyFeatures_management(pieWedges, fullWedge)

    arcpy.AddMessage("Finishing output features...")
    arcpy.Clip_analysis(r"in_memory\unclipped", sectorWedge,
                        r"in_memory\dissolve")
    arcpy.Dissolve_management(r"in_memory\dissolve", viewshed, "gridcode", "",
                              "MULTI_PART", "DISSOLVE_LINES")

    # Output Symbol layer requires the field to be "VISIBILITY"
    arcpy.AddField_management(viewshed, "VISIBILITY", "LONG")
    arcpy.CalculateField_management(viewshed, "VISIBILITY", '!gridcode!',
                                    "PYTHON_9.3")
    # Select to create the individual feature classes...
    SelectStatement = " \"MCL1\" = '" + MapClass + "' or \"MCL2\" = '" + MapClass + "' or \"MCL3\" = '" + MapClass + "'"
    TempFeatureClass = "%scratchWorkspace%\\Scratch.gdb\\" + MapClass
    ## If workspace is not a geodatabase, need extension to create shapefile...
    #Last4 = arcpy.env.scratchWorkspace[len(arcpy.env.scratchWorkspace) - 4:len(arcpy.env.scratchWorkspace)].lower()
    #if Last4 != ".mdb" and Last4 != ".gdb":
    #    TempFeatureClass = TempFeatureClass + ".shp"
    arcpy.Select_analysis(TEM_SEM, TempFeatureClass, SelectStatement)

    # Add field for percent cover
    arcpy.AddField_management(TempFeatureClass, "MCL_PERC", "FLOAT")

    # For each of the unique mapclasses calculate Decile value for each of the three possible ecosystem levels into the
    # newly created field...
    if int(arcpy.GetCount_management(TempFeatureClass).getOutput(0)) > 0:
        with arcpy.da.UpdateCursor(TempFeatureClass, ["MCL1", "MCL2", "MCL3", "DEC1", "DEC2","DEC3",
                                                      "MCL_PERC"]) as OutputCursor:
            for OutputRow in OutputCursor:
                # Accumulate the percentages across the 3 original values...
                perc = 0.0
                if OutputRow[0] == MapClass:
                    perc = perc + (OutputRow[3] / 100.0)
                if OutputRow[1] == MapClass:
                    perc = perc + (OutputRow[4] / 100.0)
                if OutputRow[2] == MapClass:
                    perc = perc + (OutputRow[5] / 100.0)
                OutputRow[6] = perc
                OutputCursor.updateRow(OutputRow)
        del OutputCursor, OutputRow
示例#11
0
if (arcpy.GetParameter(7)):
    fields_to_pass = [
        address, city, state, zip, 'Latitude', 'Longitude', 'MatchType',
        'Result', "Block", "Group", "Tract", "Cnty_Fips", "CBSA_Fips",
        "CBSA_Micro", "MCD_Fips", "MetDiv", "MSA_Fips", "Place_Fips",
        "State_Fips", "Group"
    ]

else:
    fields_to_pass = [
        address, city, state, zip, 'Latitude', 'Longitude', 'MatchType',
        'Result'
    ]

with arcpy.da.UpdateCursor(tablePath, fields_to_pass) as cursor:
    totalRows = int(arcpy.GetCount_management(tablePath).getOutput(0))
    arcpy.AddWarning("Processing " + str(totalRows) + " total records...")
    #this sets up the progress bar
    arcpy.SetProgressor("step", "Copying shapefiles to geodatabase...", 0,
                        totalRows, 1)
    start_time = time.time()
    success = 1
    for row in cursor:
        arcpy.SetProgressorLabel("Loading {0}...".format(row))
        url = "https://geoservices.tamu.edu/Services/Geocode/WebService/GeocoderWebServiceHttpNonParsed_V04_01.aspx?"

        #payload = "streetAddress="+str(row[0])+"&city="+str(row[1])+"&state="+str(row[2])+"&zip="+str(row[3])+"&apikey="+apikey+"&format=csv&census=false&censusYear=2010&notStore=false&version=4.01"
        payload = "streetAddress=" + str(row[0]) + "&city=" + str(
            row[1]
        ) + "&state=" + str(row[2]) + "&zip=" + str(
            row[3]
示例#12
0
def refresh():

    logFile = open("logs/refreshMaster.log", "a")

    # Get licensed
    if arcpy.CheckExtension("Spatial"):
        arcpy.CheckOutExtension("Spatial")
    else:
        print "No SA licence"
        exit

    # Load the environment
    env.workspace = "C:/Users/hengstam/Desktop/projects/proglacial"
    hashlength = 30
    disphashlength = 12

    # This is where intersection results will be temporarily held
    output = "/temp/intersection_output.shp"
    if arcpy.Exists(output):
        arcpy.Delete_management(output)

    # Make sure we can mess with stuff
    arcpy.env.overwriteOutput = True
    arcpy.env.XYTolerance = "10 Meters"

    # Get some names
    masterlakefile = "/master_lakes/master_lake_file.shp"

    ####################################################
    ## Generate union shapes and update the master files
    print "Beginning master lake file update..."

    # Make a new master lake file
    arcpy.Delete_management(masterlakefile)
    arcpy.CreateFeatureclass_management(env.workspace, masterlakefile,
                                        "POLYGON")
    arcpy.AddField_management(masterlakefile, "ref_id", "STRING")
    arcpy.AddField_management(masterlakefile, "n", "SHORT")
    arcpy.AddField_management(masterlakefile, "n_real", "SHORT")
    arcpy.AddField_management(masterlakefile, "n_ratio", "SHORT")

    print "Master lake file reset."

    # Open the shape folder directory
    os.chdir(env.workspace + "/master_lakes/lakes/")

    # Iterate through all shapefiles
    for file in glob.glob("*.shp"):

        # Error management
        try:
            ref_id = file[:-4]

            # Count how many things the thing has
            number = arcpy.GetCount_management(file)
            dates = set()

            # Iterate through all elements of that lake
            count_cursor = arcpy.da.SearchCursor(file, ['date'])
            for crow in count_cursor:
                dates.add(crow[0])

            print "Adding lake", ref_id, "to new master lake file. Has", number[
                0], "lake images over", len(dates), "dates."

            # Make a union of the thing
            arcpy.Dissolve_management(file, output)

            # Get ready to add reference stuff to the thing
            arcpy.AddField_management(output, "ref_id", "STRING")
            arcpy.AddField_management(output, "n", "SHORT")
            arcpy.AddField_management(output, "n_real", "SHORT")
            arcpy.AddField_management(output, "n_ratio", "SHORT")

            # This cursor will let up change up that reference id
            cursor = arcpy.da.UpdateCursor(
                output, ["ref_id", "n", "n_real", "n_ratio"])

            # Update that thang
            for row in cursor:
                row[0] = ref_id
                row[1] = int(number[0])
                row[2] = len(dates)
                row[3] = row[1] / row[2]
                print row
                cursor.updateRow(row)
            del cursor

            # Add it to the master lake file
            arcpy.Append_management(output, masterlakefile, 'NO_TEST')

            # Remove the temp file
            arcpy.Delete_management(output)

        # Return geoprocessing specific errors
        except arcpy.ExecuteError:
            # Display in terminal
            print("ERROR: arcpy.ExecuteError")
            arcpy.AddError(arcpy.GetMessages(2))
            # Report in logfile
            logFile.write(str(datetime.now()) + " ERROR: arcpy.ExecuteError")
            logFile.write(arcpy.GetMessages(2))
            logFile.flush()

        # Return any other type of error
        except:
            # Display in terminal
            e = sys.exc_info()[1]
            print("ERROR: default error")
            print(e.args[0])
            # Report in logfile
            logFile.write(str(datetime.now()) + " ERROR: default error")
            logFile.write(e.args[0])
            logFile.flush()

    print "Success!"

    logFile.close()
def RunTest():
    try:
        arcpy.AddMessage("Starting Test: CoordinateConversion")
        
        # WORKAROUND
        print("Creating New Scratch Workspace (Workaround)")
        TestUtilities.createScratch()
            
        inputTable =  os.path.join(TestUtilities.csvPath, "SigActs.csv")
        outputDbf =  os.path.join(TestUtilities.scratchPath, "test_coordinate_cc.dbf")
        toolbox = TestUtilities.toolbox        
        
        # Set environment settings
        print("Running from: " + str(TestUtilities.currentPath))
        print("Geodatabase path: " + str(TestUtilities.geodatabasePath))
        
        arcpy.env.overwriteOutput = True
        arcpy.env.scratchWorkspace = TestUtilities.scratchGDB
        arcpy.ImportToolbox(toolbox, "InC")
    
        inputFeatureCount = int(arcpy.GetCount_management(inputTable).getOutput(0)) 
        print("Input FeatureClass: " + str(inputTable))
        print("Input Feature Count: " +  str(inputFeatureCount))
            
        if (inputFeatureCount < 1) :
            print("Invalid Input Feature Count: " +  str(inputFeatureCount))
                       
        coordinateConversionFrom = 'MGRS'
        coordinateFieldX = 'Location'
        coordinateFieldY = None
        
        ########################################################3       
        arcpy.ConvertCoordinates_InC(inputTable, coordinateConversionFrom, coordinateFieldX, coordinateFieldY, outputDbf)
        ########################################################3
    
        # Verify the results    
        outputFeatureCount = int(arcpy.GetCount_management(outputDbf).getOutput(0)) 
        print("Output FeatureClass: " + str(outputDbf))
        print("Output Feature Count: " +  str(outputFeatureCount))
                    
        if (outputFeatureCount <>  inputFeatureCount) :
            print("Input / Output Feature Count don't match: " +  str(inputFeatureCount) + ":" + str(outputFeatureCount))
            raise Exception("Test Failed")            
            
        # WORKAROUND: delete scratch db
        print("Deleting Scratch Workspace (Workaround)")
        TestUtilities.deleteScratch()        
        
        print("Test Successful")
                
    except arcpy.ExecuteError: 
        # Get the tool error messages 
        msgs = arcpy.GetMessages() 
        arcpy.AddError(msgs) 
    
        # return a system error code
        sys.exit(-1)
        
    except Exception as e:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]
    
        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"
    
        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)
    
        # return a system error code
        sys.exit(-1)
示例#14
0
        arcpy.AddError('\nUnknown field type: {0} for field: {1}'.format(
            type, name))

# Write values to join fields
arcpy.AddMessage('\nJoining data...')
# Create generator for values
fieldList = [outJoinField] + joinFields.split(';')
joinDataGen = joindataGen(joinTable, fieldList, outJoinField)
version = sys.version_info[0]
if version == 2:
    joinTuple = joinDataGen.next()
else:
    joinTuple = next(joinDataGen)
#
fieldList = [inJoinField] + joinFields.split(';')
count = int(arcpy.GetCount_management(inTable).getOutput(0))
breaks = [percentile(count, b) for b in range(10, 100, 10)]
j = 0
with arcpy.da.UpdateCursor(inTable,
                           fieldList,
                           sql_clause=(None,
                                       'ORDER BY ' + inJoinField)) as cursor:
    for row in cursor:
        j += 1
        if j in breaks:
            arcpy.AddMessage(
                str(int(round(j * 100.0 / count))) + ' percent complete...')
        row = list(row)
        key = row[0]
        try:
            while joinTuple[0] < key:
示例#15
0
# Clean polygon layer if exists
polygonLayer = "M:\ArcGIS Development\Site Drawing Automation\POLYGON_DDP.shp"
if os.path.exists(polygonLayer):
    arcpy.Delete_management(polygonLayer)
    arcpy.AddMessage("\nPolygon Layer exists, would be deleted \n")

# GUI Interface to create indexed polygons
polylineDDP = arcpy.GetParameterAsText(0)  # Polyline DDP
lengthDDP = arcpy.GetParameterAsText(1)  # DDP length for polygon
widthDDP = arcpy.GetParameterAsText(2)  # Data width for polygon
scaleDDP = arcpy.GetParameterAsText(3)  # Data scale
outfileDDP = arcpy.GetParameterAsText(4)  # output file directory

# Check if anypolyline inside the the layer
polylineCount = arcpy.GetCount_management(polylineDDP)
if polylineCount == 0:
    arcpy.AddMessage(
        "\nNo data in polyline, please draw using editor tool. \n")
    quit()

# Compute the segments
arcpy.StripMapIndexFeatures_cartography(polylineDDP, outfileDDP,
                                        "NO_USEPAGEUNIT", scaleDDP, lengthDDP,
                                        widthDDP, "HORIZONTAL", 0)

# Load shapefile result to geodatabase
out_location = r"M:\ArcGIS Development\Site Drawing Automation\Index Data\DDP.gdb\POLYGON_DDP"

arcpy.DeleteFeatures_management(out_location)
arcpy.Append_management(outfileDDP, out_location)
    def getVectorDensity(self, Characteristic):
        '''
        Is a modification of getPointFeatureDensity. Initially created to calculate the percent of dams per stream.
        This method is a mash-up of prior work done within this method and the getFeatureStatistic Method found in SpatialOps.py.
        Mashed-up by JWX.
        '''

        map = []
        analysisFeatures = []
        ML = None
        result = {Characteristic.Name: 0}
        try:
            self._sm("Computing " + Characteristic.Name)

            ML = MapLayer(MapLayerDef(Characteristic.MapLayers[0]), "",
                          self.mask)

            if not ML.Activated:
                raise Exception("Map Layer could not be activated.")

            spOverlayWhole = self.spatialOverlay(
                ML.Dataset, self.mask,
                "INTERSECTS")  #Added back after sumMain was removed
            analysisFeatures.append(spOverlayWhole[0])

            #Create query
            queryField = "{}".format(
                Characteristic.Field
            )  #Generalized to pass whatever field needed
            operator = Characteristic.Operator  #Generalized to whatever operator e.g. =, LIKE, !=
            if operator == "LIKE":  #If operator = LIKE, flanking "%" are needed
                keyword = "'%{}%'".format(Characteristic.Keyword)
            else:
                keyword = Characteristic.Keyword
            query = "{} {} {}".format(queryField, operator,
                                      keyword)  #Build query

            #Create sub-set feature class using query
            arcpy.MakeFeatureLayer_management(
                spOverlayWhole, "Subsetlayer")  #Make feature layer
            arcpy.SelectLayerByAttribute_management(
                "Subsetlayer", "NEW_SELECTION", query)  #Carry out selection
            outName = os.path.join(
                self._TempLocation,
                "vdtmp.shp")  #SHP has to be included for proper function
            arcpy.CopyFeatures_management(
                "Subsetlayer",
                outName)  #Copy out features to avoid selection errors
            arcpy.SelectLayerByAttribute_management("Subsetlayer",
                                                    "CLEAR_SELECTION")
            if arcpy.GetCount_management(outName).getOutput(
                    0) == "0":  #Catch if the dataset is blank
                self._sm(
                    "Warning: Subset feature is blank. Zero will be substituded."
                )
                result[Characteristic.Name] = 0  #If blank, result is zero
            else:
                analysisFeatures.append(outName)

            #Get methods and field for analysis
            statisticRules = Characteristic.Method
            Fields = Characteristic.MethField  #Method operation field (newly added to config.json)
            #methods = [x.strip() for x in statisticRules.split(';')]                                #Could be used to scale the method section
            #Fields = [x.strip() for x in fieldStr.split(';')]                                       #Could be used to scale the fields section
            map.append([Fields, statisticRules])  #Build statistics statement
            resultCalculation = []  #AN ARRAY TO CAPTURE VALUES***

            for feaure in analysisFeatures:  #NEEDED CALCULATE EVERYTHING***
                tblevalue = arcpy.Statistics_analysis(
                    feaure, os.path.join(self._TempLocation, "aftmp"), map)
                mappedFeilds = [x[1] + "_" + x[0] for x in map]
                cursor = arcpy.da.SearchCursor(tblevalue, mappedFeilds)
                for row in cursor:
                    resultCalculation.append(row[0])

            #Generate values for results
            if len(analysisFeatures) == 1:  #Catch streams only instances
                result[Characteristic.Name] = 0
            else:
                if resultCalculation[0] == 0:  #Catch canal only instances
                    result[Characteristic.Name] = 100
                else:
                    result[Characteristic.Name] = (
                        resultCalculation[1] /
                        resultCalculation[0]) * 100  #Otherwise carry out math

        except:
            tb = traceback.format_exc()
            self._sm(arcpy.GetMessages(), 'GP')
            self._sm("getVectorDensity " + Characteristic.Name + " " + tb,
                     "ERROR", 71)
            result[Characteristic.Name] = float('nan')

        finally:
            #Cleans up workspace
            ML = None
            arcpy.SelectLayerByAttribute_management(
                "Subsetlayer", "CLEAR_SELECTION")  #This was added by JWX

        return result
示例#17
0
def ppt_ratio_parameters(config_path):
    """Calculate GSFLOW Precipitation Ratio Parameters

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.

    Returns
    -------
    None

    """
    # Hardcoded HRU field formats for now
    ppt_field_format = 'PPT_{:02d}'
    ratio_field_format = 'PPT_RT_{:02d}'

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'ppt_ratio_parameters_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW Precipitation Ratio Parameters')

    # Units
    ppt_obs_units = support.get_param('ppt_obs_units', 'mm',
                                      inputs_cfg).lower()
    ppt_units_list = ['mm', 'cm', 'm', 'in', 'ft']
    # Compare against the lower case of the values in the list
    #   but don't modify the acceptable units list
    if ppt_obs_units not in ppt_units_list:
        logging.error('\nERROR: Invalid observed precipitation units ({})\n  '
                      'Valid units are: {}'.format(ppt_obs_units,
                                                   ', '.join(ppt_units_list)))
        sys.exit()

    # Convert units while reading obs values
    if ppt_obs_units == 'mm':
        units_factor = 1
    elif ppt_obs_units == 'cm':
        units_factor = 10
    elif ppt_obs_units == 'm':
        units_factor = 1000
    elif ppt_obs_units == 'in':
        units_factor = 25.4
    elif ppt_obs_units == 'ft':
        units_factor = 304.8
    else:
        units_factor = 1

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()

    # PPT Zones
    set_ppt_zones_flag = inputs_cfg.getboolean('INPUTS', 'set_ppt_zones_flag')
    if set_ppt_zones_flag:
        ppt_zone_orig_path = inputs_cfg.get('INPUTS', 'ppt_zone_path')
        try:
            ppt_zone_field = inputs_cfg.get('INPUTS', 'ppt_zone_field')
        except:
            logging.error(
                '\nERROR: ppt_zone_field must be set in INI to apply '
                'zone specific ppt ratios\n')
            sys.exit()
        try:
            ppt_hru_id_field = inputs_cfg.get('INPUTS', 'ppt_hru_id_field')
        except:
            ppt_hru_id_field = None
            logging.warning(
                '  ppt_hru_id_field was not set in the INI file\n'
                '  PPT ratios will not be adjusted to match station '
                'values'.format(ppt_zone_field, hru.ppt_zone_id_field))

        # Field name for PSTA hard coded, but could be changed to be read from
        # config file like ppt_zone
        hru_psta_field = 'HRU_PSTA'

        try:
            ppt_obs_field_format = inputs_cfg.get('INPUTS',
                                                  'ppt_obs_field_format')
        except:
            ppt_obs_field_format = 'PPT_{:02d}'
            logging.info('  Defaulting ppt_obs_field_format = {}'.format(
                ppt_obs_field_format))

        if not arcpy.Exists(ppt_zone_orig_path):
            logging.error('\nERROR: PPT Zone ({}) does not exist'.format(
                ppt_zone_orig_path))
            sys.exit()
        # ppt_zone_path must be a polygon shapefile
        if arcpy.Describe(ppt_zone_orig_path).datasetType != 'FeatureClass':
            logging.error('\nERROR: ppt_zone_path must be a polygon shapefile')
            sys.exit()

        # Check ppt_zone_field
        if ppt_zone_field.upper() in ['FID', 'OID']:
            ppt_zone_field = arcpy.Describe(ppt_zone_orig_path).OIDFieldName
            logging.warning('\n  NOTE: Using {} to set {}\n'.format(
                ppt_zone_field, hru.ppt_zone_id_field))
        elif not arcpy.ListFields(ppt_zone_orig_path, ppt_zone_field):
            logging.error(
                '\nERROR: ppt_zone_field field {} does not exist\n'.format(
                    ppt_zone_field))
            sys.exit()
        # Need to check that field is an int type
        # Only check active cells (HRU_TYPE >0)?!
        elif not [
                f.type for f in arcpy.Describe(ppt_zone_orig_path).fields
                if (f.name == ppt_zone_field
                    and f.type in ['SmallInteger', 'Integer'])
        ]:
            logging.error(
                '\nERROR: ppt_zone_field field {} must be an integer type\n'.
                format(ppt_zone_field))
            sys.exit()
        # Need to check that field values are all positive
        # Only check active cells (HRU_TYPE >0)?!
        elif min([
                row[0] for row in arcpy.da.SearchCursor(
                    ppt_zone_orig_path, [ppt_zone_field])
        ]) <= 0:
            logging.error(
                '\nERROR: ppt_zone_field values must be positive\n'.format(
                    ppt_zone_field))
            sys.exit()

        # Check hru_psta_field
        if not arcpy.ListFields(ppt_zone_orig_path, hru_psta_field):
            logging.error(
                '\nERROR: hru_psta_field field {} does not exist\n'.format(
                    hru_psta_field))
            sys.exit()
        # Need to check that field is an int type
        # Should we only check active cells (HRU_TYPE > 0)?
        elif not [
                f.type for f in arcpy.Describe(ppt_zone_orig_path).fields
                if (f.name == hru_psta_field
                    and f.type in ['SmallInteger', 'Integer'])
        ]:
            logging.error(
                '\nERROR: hru_psta_field field {} must be an integer type\n'.
                format(hru_psta_field))
            sys.exit()
        # Need to check that field values are all positive
        # Should we only check active cells (HRU_TYPE > 0)?
        elif min([
                row[0] for row in arcpy.da.SearchCursor(
                    ppt_zone_orig_path, [hru_psta_field])
        ]) <= 0:
            logging.error(
                '\nERROR: hru_psta_field values must be positive\n'.format(
                    hru_psta_field))
            sys.exit()

        # Check ppt_hru_id_field
        # ppt_hru_id values are checked later
        if ppt_hru_id_field is not None:
            if not arcpy.ListFields(ppt_zone_orig_path, ppt_hru_id_field):
                logging.error(
                    '\nERROR: ppt_hru_id_field field {} does not exist\n'.
                    format(ppt_hru_id_field))
                sys.exit()
            # Need to check that field is an int type
            elif not [
                    f.type for f in arcpy.Describe(ppt_zone_orig_path).fields
                    if (f.name == ppt_hru_id_field
                        and f.type in ['SmallInteger', 'Integer'])
            ]:
                logging.error(
                    '\nERROR: ppt_hru_id_field field {} must be an integer type\n'
                    .format(ppt_hru_id_field))
                sys.exit()
            # Need to check that field values are not negative (0 is okay)
            elif min([
                    row[0] for row in arcpy.da.SearchCursor(
                        ppt_zone_orig_path, [ppt_hru_id_field])
            ]) < 0:
                logging.error(
                    '\nERROR: ppt_hru_id_field values cannot be negative\n'.
                    format(ppt_hru_id_field))
                sys.exit()
    else:
        # If a zone shapefile is not used, PPT must be set manually
        ppt_obs_list = inputs_cfg.get('INPUTS', 'ppt_obs_list')

        # Check that values are floats
        try:
            ppt_obs_list = map(float, ppt_obs_list.split(','))
        except ValueError:
            logging.error('\nERROR: ppt_obs_list (mean monthly precipitation) '
                          'values could not be parsed as floats\n')
            sys.exit()

        # Check that there are 12 values
        if len(ppt_obs_list) != 12:
            logging.error(
                '\nERROR: There must be exactly 12 mean monthly '
                'observed precipitation values based to ppt_obs_list\n')
            sys.exit()
        logging.info(
            '  Observed Mean Monthly PPT ({}):\n    {}\n'
            '    (Script will assume these are listed in month order, '
            'i.e. Jan, Feb, ...)'.format(ppt_obs_units,
                                         ', '.join(map(str, ppt_obs_list))))

        # Check if all the values are 0
        if ppt_obs_list == ([0.0] * 12):
            logging.error(
                '\nERROR: The observed precipitation values are all 0.\n'
                '  To compute PPT ratios, please set the ppt_obs_list '
                'parameter in the INI with\n  observed mean monthly PPT '
                'values (i.e. from a weather station)')
            sys.exit()

        # Get the PPT HRU ID
        try:
            ppt_hru_id = inputs_cfg.getint('INPUTS', 'ppt_hru_id')
        except:
            ppt_hru_id = 0

        # Check that the ppt_hru_id is a valid cell hru_id
        # If ppt_hru_id is 0, PPT ratios will not be adjusted
        if ppt_hru_id > 0:
            # Check that HRU_ID is valid
            logging.info('    PPT HRU_ID: {}'.format(ppt_hru_id))
            arcpy.MakeTableView_management(
                hru.polygon_path, "layer",
                "{} = {}".format(hru.id_field, ppt_hru_id))
            if (ppt_hru_id != 0 and int(
                    arcpy.GetCount_management("layer").getOutput(0)) == 0):
                logging.error(
                    '\nERROR: ppt_hru_id {0} is not a valid cell hru_id'
                    '\nERROR: ppt_ratios will NOT be forced to 1'
                    ' at cell {0}\n'.format(ppt_hru_id))
                ppt_hru_id = 0
            arcpy.Delete_management("layer")
        else:
            logging.info(
                '  PPT ratios will not be adjusted to match station values\n'
                '    (ppt_hru_id = 0)')

        # Could add a second check that HRU_PSTA has values >0

    # Build output folders if necessary
    ppt_ratio_temp_ws = os.path.join(hru.param_ws, 'ppt_ratio')
    if not os.path.isdir(ppt_ratio_temp_ws):
        os.mkdir(ppt_ratio_temp_ws)
    ppt_zone_path = os.path.join(ppt_ratio_temp_ws, 'ppt_zone.shp')
    # ppt_zone_clip_path = os.path.join(ppt_ratio_temp_ws, 'ppt_zone_clip.shp')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    # env.pyramid = 'PYRAMIDS -1'
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = hru.param_ws
    env.scratchWorkspace = hru.scratch_ws

    # Set month list based on flags
    month_list = range(1, 13)
    ppt_field_list = [ppt_field_format.format(m) for m in month_list]
    ratio_field_list = [ratio_field_format.format(m) for m in month_list]

    # Check fields
    logging.info('\nAdding PPT ratio fields if necessary')
    # PPT zone fields
    support.add_field_func(hru.polygon_path, hru.ppt_zone_id_field, 'LONG')
    # PPT ratio fields
    for ratio_field in ratio_field_list:
        support.add_field_func(hru.polygon_path, ratio_field, 'DOUBLE')

    # Calculate PPT zone ID
    if set_ppt_zones_flag:
        logging.info('\nCalculating cell HRU Precipitation Zone ID')
        ppt_zone_desc = arcpy.Describe(ppt_zone_orig_path)
        ppt_zone_sr = ppt_zone_desc.spatialReference
        logging.debug('  Zones:      {}'.format(ppt_zone_orig_path))
        logging.debug('  Projection: {}'.format(ppt_zone_sr.name))
        logging.debug('  GCS:        {}'.format(ppt_zone_sr.GCS.name))

        # Reset PPT_ZONE_ID
        # if set_ppt_zones_flag:
        logging.info('  Resetting {} to 0'.format(hru.ppt_zone_id_field))
        arcpy.CalculateField_management(hru.polygon_path,
                                        hru.ppt_zone_id_field, 0, 'PYTHON')

        # If ppt_zone spat_ref doesn't match hru_param spat_ref
        # Project ppt_zone to hru_param spat ref
        # Otherwise, read ppt_zone directly
        if hru.sr.name != ppt_zone_sr.name:
            logging.info('  Projecting precipitation zones...')
            # Set preferred transforms
            transform_str = support.transform_func(hru.sr, ppt_zone_sr)
            logging.debug('    Transform: {}'.format(transform_str))
            # Project ppt_zone shapefile
            arcpy.Project_management(ppt_zone_orig_path, ppt_zone_path, hru.sr,
                                     transform_str, ppt_zone_sr)
            del transform_str
        else:
            arcpy.Copy_management(ppt_zone_orig_path, ppt_zone_path)

        # # Remove all unnecessary fields
        # for field in arcpy.ListFields(ppt_zone_path):
        #     skip_field_list = ppt_obs_field_list + [ppt_zone_field, 'Shape']
        #     if field.name not in skip_field_list:
        #         try:
        #             arcpy.DeleteField_management(ppt_zone_path, field.name)
        #         except:
        #             pass

        # Set ppt zone ID
        logging.info('  Setting {}'.format(hru.ppt_zone_id_field))
        support.zone_by_centroid_func(ppt_zone_path, hru.ppt_zone_id_field,
                                      ppt_zone_field, hru.polygon_path,
                                      hru.point_path, hru)
        # support.zone_by_area_func(
        #    ppt_zone_layer, hru.ppt_zone_id_field, ppt_zone_field,
        #    hru.polygon_path, hru, hru_area_field, None, 50)

        # Set HRU_PSTA
        logging.info('  Setting {}'.format(hru.hru_psta_field))
        support.zone_by_centroid_func(ppt_zone_path, hru.hru_psta_field,
                                      hru_psta_field, hru.polygon_path,
                                      hru.point_path, hru)

        del ppt_zone_desc, ppt_zone_sr
    else:
        # Set all cells to zone 1
        arcpy.CalculateField_management(hru.polygon_path,
                                        hru.ppt_zone_id_field, 1, 'PYTHON')

    # Calculate ratios
    logging.info('\nCalculating mean monthly PPT ratios')
    if set_ppt_zones_flag:
        # Read mean monthly values for each zone
        ppt_obs_dict = dict()
        ppt_obs_field_list = [
            ppt_obs_field_format.format(m) for m in month_list
        ]
        fields = [ppt_zone_field] + ppt_obs_field_list
        logging.debug('  Obs. Fields: {}'.format(', '.join(fields)))

        with arcpy.da.SearchCursor(ppt_zone_path, fields) as s_cursor:
            for row in s_cursor:
                ppt_obs_dict[int(row[0])] = map(float, row[1:13])

        # Convert values to mm if necessary to match PRISM
        if units_factor != 1:
            ppt_obs_dict = {z: p * units_factor for z, p in ppt_obs_dict}

        ppt_zone_list = sorted(ppt_obs_dict.keys())
        logging.debug('  PPT Zones: {}'.format(ppt_zone_list))

        # Print the observed PPT values
        logging.debug('  Observed PPT')
        for zone, ppt_obs in ppt_obs_dict.items():
            logging.debug('    {}: {}'.format(
                zone, ', '.join(['{:.2f}'.format(x) for x in ppt_obs])))

        # Default all zones to a ratio of 1
        ppt_ratio_dict = {z: [1] * 12 for z in ppt_zone_list}

        # Get list of HRU_IDs for each zone
        fields = [hru.ppt_zone_id_field, hru.id_field]
        zone_hru_id_dict = defaultdict(list)
        with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor:
            for row in s_cursor:
                zone_hru_id_dict[int(row[0])].append(int(row[1]))

        # Check that PPT_HRU_IDs are in the correct zone
        # Default all PPT Zone HRU IDs to 0
        ppt_hru_id_dict = {z: 0 for z in ppt_zone_list}
        if ppt_hru_id_field is not None:
            fields = [ppt_zone_field, ppt_hru_id_field]
            logging.debug('  PPT Zone ID field: {}'.format(ppt_zone_field))
            logging.debug('  PPT HRU ID field: {}'.format(ppt_hru_id_field))
            with arcpy.da.SearchCursor(ppt_zone_path, fields) as s_cursor:
                for row in s_cursor:
                    ppt_zone = int(row[0])
                    hru_id = int(row[1])
                    if hru_id == 0 or hru_id in zone_hru_id_dict[ppt_zone]:
                        ppt_hru_id_dict[ppt_zone] = hru_id
                        logging.debug('    {}: {}'.format(ppt_zone, hru_id))
                    else:
                        logging.error(
                            '\nERROR: HRU_ID {} is not in PPT ZONE {}'.format(
                                hru_id, ppt_hru_id_dict[ppt_zone]))
                        sys.exit()

        # Get gridded PPT values for each PPT_HRU_ID
        fields = [hru.ppt_zone_id_field, hru.id_field] + ppt_field_list
        # ppt_ratio_dict = dict()
        with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor:
            for row in s_cursor:
                ppt_zone = int(row[0])
                hru_id = int(row[1])
                if hru_id == 0:
                    pass
                elif hru_id in ppt_hru_id_dict.values():
                    ppt_gridded_list = map(float, row[2:14])
                    ppt_obs_list = ppt_obs_dict[ppt_zone]
                    ppt_ratio_list = [
                        float(o) / p if p > 0 else 0
                        for o, p in zip(ppt_obs_list, ppt_gridded_list)
                    ]
                    ppt_ratio_dict[ppt_zone] = ppt_ratio_list
        del ppt_hru_id_dict, zone_hru_id_dict, fields

        logging.debug('  PPT Ratio Adjustment Factors:')
        for k, v in ppt_ratio_dict.items():
            logging.debug('    {}: {}'.format(
                k, ', '.join(['{:.3f}'.format(x) for x in v])))

        # DEADBEEF - ZONE_VALUE is calculated in zone_by_centroid_func
        # There is probably a cleaner way of linking these two
        fields = [hru.ppt_zone_id_field] + ppt_field_list + ratio_field_list
        with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
            for row in u_cursor:
                ppt_zone = int(row[0])
                for i, month in enumerate(month_list):
                    ppt_i = fields.index(ppt_field_format.format(month))
                    ratio_i = fields.index(ratio_field_format.format(month))

                    if ppt_zone in ppt_zone_list:
                        ppt_obs = ppt_obs_dict[ppt_zone][i]
                    else:
                        ppt_obs = 0

                    if ppt_obs > 0:
                        row[ratio_i] = (ppt_ratio_dict[ppt_zone][i] *
                                        row[ppt_i] / ppt_obs)
                    else:
                        row[ratio_i] = 0
                u_cursor.updateRow(row)
            del row
    else:
        # Get gridded precip at PPT_HRU_ID
        fields = [hru.id_field] + ppt_field_list
        logging.debug('  Fields: {}'.format(', '.join(ppt_field_list)))

        # Convert values to mm if necessary to match PRISM
        if units_factor != 1:
            ppt_obs_list = [p * units_factor for p in ppt_obs_list]
            logging.debug('\nConverted Mean Monthly PPT ({}):\n  {}'.format(
                ppt_obs_units, ', '.join(map(str, ppt_obs_list))))

        # Scale all ratios so gridded PPT will match observed PPT at target cell
        if ppt_hru_id != 0:
            ppt_gridded_list = map(
                float,
                arcpy.da.SearchCursor(
                    hru.polygon_path, fields,
                    '"{}" = {}'.format(hru.id_field, ppt_hru_id)).next()[1:])
            logging.info('  Gridded PPT: {}'.format(', '.join(
                ['{:.2f}'.format(p) for p in ppt_gridded_list])))

            # Ratio of MEASURED or OBSERVED PPT to GRIDDED PPT
            # This will be multiplied by GRIDDED/OBSERVED below
            ppt_ratio_list = [
                float(o) / p if p > 0 else 0
                for o, p in zip(ppt_obs_list, ppt_gridded_list)
            ]
            logging.info('  Obs./Gridded: {}'.format(', '.join(
                ['{:.3f}'.format(p) for p in ppt_ratio_list])))
        else:
            ppt_ratio_list = [1 for p in ppt_obs_list]

        # Use single mean monthly PPT for all cells
        # Assume ppt_obs_list is in month order
        fields = ppt_field_list + ratio_field_list
        with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
            for row in u_cursor:
                for i, month in enumerate(month_list):
                    ppt_i = fields.index(ppt_field_format.format(month))
                    ratio_i = fields.index(ratio_field_format.format(month))

                    if ppt_obs_list[i] > 0:
                        row[ratio_i] = (ppt_ratio_list[i] * row[ppt_i] /
                                        ppt_obs_list[i])
                    else:
                        row[ratio_i] = 0
                u_cursor.updateRow(row)
            del row
    if debug == True:
        arcpy.AddMessage(
            "fishnet: " +
            str(time.strftime("%m/%d/%Y  %H:%M:%S", time.localtime())))
    arcpy.CreateFishnet_management(prefishnet, originPoint, axisPoint, 10000,
                                   10000, numRows, numCols, "#", "#", "#",
                                   "POLYGON")
    deleteme.append(prefishnet)
    if debug == True:
        arcpy.AddMessage(
            "intersect fishnet & AOI: " +
            str(time.strftime("%m/%d/%Y  %H:%M:%S", time.localtime())))
    arcpy.Intersect_analysis([inputAOI, prefishnet], fishnet)
    deleteme.append(fishnet)

    numTiles = int(arcpy.GetCount_management(fishnet).getOutput(0))
    arcpy.AddMessage("AOI has " + str(numTiles) + " 10km square tiles.")

    fishnetBoundary = os.path.join("in_memory", "fishnetBoundary")
    if debug == True:
        arcpy.AddMessage(
            "fishnet boundary: " +
            str(time.strftime("%m/%d/%Y  %H:%M:%S", time.localtime())))
    arcpy.Dissolve_management(fishnet, fishnetBoundary)
    deleteme.append(fishnetBoundary)

    # Clip slope service layers over fishnet
    env.extent = fishnetBoundary
    env.mask = fishnetBoundary
    #arcpy.MakeImageServerLayer_management(inputSlope,"SlopeLayer")
    arcpy.MakeRasterLayer_management(inputSlope, "SlopeLayer")
示例#19
0
arcpy.AddField_management(temp_shape, field, "SHORT", "9", "", "", "",
                          "NULLABLE", "NON_REQUIRED", "")
arcpy.MakeFeatureLayer_management(temp_shape, "fc_lyr")
arcpy.MakeFeatureLayer_management(Update_file, "up_lyr")
arcpy.AddJoin_management("fc_lyr", joinField, "up_lyr", joinField,
                         "KEEP_COMMON")
arcpy.CalculateField_management("fc_lyr", field, "1", "PYTHON", "")

print "rows updated"
arcpy.RemoveJoin_management("fc_lyr")

WhereClause = '"' + field + '" = 1'
arcpy.SelectLayerByAttribute_management("fc_lyr", "NEW_SELECTION", WhereClause)

if int(arcpy.GetCount_management("fc_lyr").getOutput(0)) > 0:
    arcpy.DeleteRows_management("fc_lyr")
    print "rows deleted"
arcpy.DeleteField_management("fc_lyr", [field])
arcpy.Merge_management(["fc_lyr", "up_lyr"], outfile)

try:
    fields = ["CommonName", "SciName", "Name", "Name_Sci"]
    uc = arcpy.da.UpdateCursor(outfile, fields)
    for row in uc:
        if (row[0] == None):
            row[0] = row[2]
        if (row[1] == None):
            row[1] = row[3]
        uc.updateRow(row)
except:
示例#20
0
def crs6_add_data_preparation(args):

    # parameters
    wkgFolder = args[0]
    labelGDBname = args[1]
    log = args[2]

    # Set locations, etc
    labelGDBpath = os.path.join(wkgFolder, labelGDBname)
    fcEPrclPath = os.path.join(labelGDBpath, fcEsmntPrcl)
    fcELeasePath = os.path.join(labelGDBpath, fcEsmntLease)
    fcLPrclPath = os.path.join(labelGDBpath, fcLeasePrcl)
    fcRCLPath = os.path.join(labelGDBpath, fcRoadCL)
    fcRCdsslvPath = os.path.join(labelGDBpath, fcRCLdsslv)
    fcCadP = fcCadastre + "_P"
    fcCadPPath = os.path.join(labelGDBpath, fcCadP)
    tblPLnkPath = os.path.join(labelGDBpath, tblPropLink)
    fcPrclPLPath = os.path.join(labelGDBpath, fcPrclPLink)
    tblPropPath = os.path.join(labelGDBpath, tblProperty)
    fcCnnctPPath = os.path.join(labelGDBpath, fcConnectProp)
    fcDsslvIDPath = os.path.join(labelGDBpath, dsslvIDFC)

    # Set environment
    arcpy.env.workspace = labelGDBpath
    arcpy.env.overwriteOutput = True
    arcpy.env.configkeyword = "GEOMETRY"

    # log function
    etgLib.log_info(log, 'calling {}'.format(script_name), True)

    # variables
    err_message = None

    try:
        ### Easement lease
        # Copy easement parcel
        etgLib.log_info(log, 'Copying easement parcel data in labels gdb ...',
                        True)
        arcpy.Copy_management(fcEPrclPath, fcELeasePath)

        # Select records to append
        etgLib.log_info(log, 'Selecting lease parcels to append...')
        etgLib.delete_layer("leaseplyr")
        arcpy.MakeFeatureLayer_management(fcLPrclPath, "leaseplyr")
        parcelClause = '"PARCEL_INTENT" = ' + "'LCOV'" + ' OR "PARCEL_INTENT" = ' + "'EASM'"
        arcpy.SelectLayerByAttribute_management("leaseplyr", "NEW_SELECTION",
                                                parcelClause)

        etgLib.log_info(log, 'Appending lease parcels...')
        arcpy.Append_management("leaseplyr", fcELeasePath, "NO_TEST")

        ### Road CL
        etgLib.log_info(log, 'Working on road data...', True)
        if etgLib.field_exist(fcRoadCL, rfield) == False:
            # Add field
            arcpy.AddField_management(fcRoadCL, rfield, "TEXT", "", "",
                                      rfieldlen)

        # Calculate values
        etgLib.log_info(log, 'Calculate values: {} ...'.format(rfield))
        calcexpr = ('!{}!.upper() + ", " + !{}!.upper()').format(
            statsfields[0][0], statsfields[1][0])
        arcpy.CalculateField_management(fcRoadCL, rfield, calcexpr,
                                        "PYTHON_9.3")
        # Dissolve data, using statistics fields
        etgLib.log_info(log, 'Dissolving ...')
        arcpy.Dissolve_management(fcRCLPath, fcRCdsslvPath, rfield,
                                  statsfields)

        # Add fields
        arcpy.AddField_management(fcRCLdsslv, statsfields[0][0], "TEXT", "",
                                  "", sfieldlen)
        arcpy.AddField_management(fcRCLdsslv, statsfields[1][0], "TEXT", "",
                                  "", sfieldlen)
        # Calculate values
        sfields = []
        for i in range(len(statsfields)):
            sfields.append(statsfields[i][0])
            arcpy.AddField_management(fcRCLdsslv, statsfields[i][0], "TEXT",
                                      "", "", sfieldlen)
            sfield = statsfields[i][1] + "_" + statsfields[i][0]
            calcexpr = ('!{}!').format(sfield)
            arcpy.CalculateField_management(fcRCLdsslv, statsfields[i][0],
                                            calcexpr, "PYTHON_9.3")

        ### Connect_Property
        etgLib.log_info(log, 'Working on Connect_Property', True)

        # Make query table
        mqtblList = [fcCadPPath, tblPLnkPath]
        whereClause = tblPropLink + "." + parIDfield + " = " + fcCadP + "." + parIDfield  # NOTE: no quotes - known bug
        arcpy.MakeQueryTable_management(mqtblList, "propQueryTbl",
                                        "ADD_VIRTUAL_KEY_FIELD", "", "",
                                        whereClause)
        # Get number of rows
        numMQrows = int(arcpy.GetCount_management("propQueryTbl").getOutput(0))
        # Export
        etgLib.log_info(log, 'Exporting...')
        arcpy.CopyFeatures_management("propQueryTbl", fcPrclPLPath)
        # Check number of rows
        numPPLrows = int(arcpy.GetCount_management(fcPrclPLPath).getOutput(0))
        if numPPLrows != numMQrows:
            etgLib.log_error(
                log,
                'ERROR: Wrong number of rows exported for link FC; {} versus {}'
                .format(numMQrows, numPPLrows))
        else:
            etgLib.log_info(log,
                            'Correct number of rows exported for link FC.')

        # Dissolve on ID
        etgLib.log_info(log, 'Dissolving on ID...', True)
        dfield = tblPropLink + "_" + propIDfield
        sfield = tblPropLink + "_" + parIDfield
        statsfield = [[sfield, "COUNT"]]
        arcpy.Dissolve_management(fcPrclPLink, fcDsslvIDPath, dfield,
                                  statsfield)

        # Join the TP_Property table
        etgLib.log_info(log, 'Preparing to join property table...')
        # Create temporary layer/view
        etgLib.delete_layer('dsslvlyr')
        arcpy.MakeFeatureLayer_management(fcDsslvIDPath, "dsslvlyr")
        etgLib.delete_layer('proptblview')
        arcpy.MakeTableView_management(tblPropPath, "proptblview")

        # Make join
        etgLib.log_info(log, 'Adding join ...')
        arcpy.AddJoin_management("dsslvlyr", dfield, "proptblview",
                                 propIDfield, "KEEP_ALL")
        etgLib.log_info(log, 'Property table joined')

        # Output
        etgLib.log_info(log, 'Copying features...')
        arcpy.CopyFeatures_management("dsslvlyr", fcCnnctPPath)

        etgLib.log_process_time(log, starttime)

    except Exception as e:
        err_message = "ERROR while running {0}: {1}".format(script_name, e)

    return err_message
示例#21
0
def getACdataset(Flag, MinID, MaxID, layerLock, ID):

    #Sets up the workspace the child will use.
    mem_points_child_workspace = r"in_memory\tempACdata" + str(ID)
    high_slope_memory = r"in_memory\tempACdataHighSlope" + str(ID)

    randomVal = 0
    randomVal2 = 0

    #While there's data to process...
    while True:
        #Child waits for parent to say go.
        while Flag.value != CHILD_GO and Flag.value != NO_MORE_DATA:
            pass

        if Flag.value == NO_MORE_DATA:
            break

        minID = MinID.value
        maxID = MaxID.value

        if randomVal % 26 == 0:
            randomVal2 += 1

        Points = []

        indexID = minID

        try:
            layerLock.acquire()
            arcpy.MakeFeatureLayer_management(
                POINT_DATASET_NAME, mem_points_child_workspace,
                "OBJECTID <= {} AND OBJECTID >= {}".format(maxID, minID))
            layerLock.release()
        except Exception:
            Flag.value = CHILD_ERROR
            return

        search_fields = [
            SLOPE_COLUMN_NAME, ELEVATION_COLUMN_NAME, "POINT_X", "POINT_Y",
            "pointid"
        ]

        arcpy.SelectLayerByAttribute_management(
            mem_points_child_workspace, "NEW_SELECTION",
            "{} >= {}".format(SLOPE_COLUMN_NAME, "30"))

        if int(
                arcpy.GetCount_management(
                    mem_points_child_workspace).getOutput(0)) == 0:
            arcpy.Delete_management(mem_points_child_workspace)
            Flag.value = PARENT_GO
            continue

        arcpy.CopyFeatures_management(mem_points_child_workspace,
                                      high_slope_memory)

        arcpy.SelectLayerByAttribute_management(mem_points_child_workspace,
                                                "CLEAR_SELECTION")

        with arcpy.da.SearchCursor(mem_points_child_workspace,
                                   search_fields) as searchAll:
            with arcpy.da.SearchCursor(high_slope_memory,
                                       search_fields) as searchHighSlope:
                try:
                    pointA = searchHighSlope.next()
                    pointB = searchHighSlope.next()
                    pointC = searchAll.next()
                    global tempIndex
                    while True:
                        if isclose(float(pointB[3]), float(pointA[3])):
                            if float(pointB[2]) - float(
                                    pointA[2]) < 2 * RESOLUTION:
                                pass
                            elif float(pointB[2]) - float(
                                    pointA[2]) < MAX_STREAM_WIDTH:
                                tempIndex = indexID
                                pointC = SyncPoints(pointC, searchAll, pointA)
                                indexID = tempIndex

                                pointC = searchAll.next()
                                indexID += 1

                                while not isclose(float(pointC[2]),
                                                  float(pointB[2])):
                                    if float(pointC[0]) <= 10:
                                        if float(pointC[1]) <= float(
                                                pointA[1]) and float(
                                                    pointC[1]) <= float(
                                                        pointB[1]):
                                            Points.append(
                                                Point(indexID,
                                                      float(pointC[2]),
                                                      float(pointC[3])))
                                        elif float(pointC[1]) <= float(
                                                pointA[1]) or float(
                                                    pointC[1]) <= float(
                                                        pointB[1]):
                                            if existsAdjacent(pointC, Points):
                                                Points.append(
                                                    Point(
                                                        indexID,
                                                        float(pointC[2]),
                                                        float(pointC[3])))

                                    pointC = searchAll.next()
                                    indexID += 1
                            else:
                                tempIndex = indexID
                                pointC = SyncPointsClose(
                                    pointC, searchAll, pointA)
                                indexID = tempIndex

                                while abs(float(pointC[2]) -
                                          float(pointA[2])) < MAX_STREAM_WIDTH:
                                    if float(pointC[1]) <= float(
                                            pointA[1]) and float(
                                                pointC[0]) <= 10:
                                        if existsAdjacent(pointC, Points):
                                            Points.append(
                                                Point(indexID,
                                                      float(pointC[2]),
                                                      float(pointC[3])))
                                    pointC = searchAll.next()
                                    indexID += 1

##                                tempIndex = indexID
##                                pointC = SyncPointsClose(pointC, searchAll, pointB)
##                                indexID = tempIndex
##
##                                while abs(float(pointC[2]) - float(pointB[2])) < MAX_STREAM_WIDTH:
##                                    if float(pointC[1]) <= float(pointB[1]) and float(pointC[0]) <= 10:
##                                        if existsAdjacent(pointC, Points):
##                                            Points.append(Point(indexID, float(pointC[2]), float(pointC[3])))
##                                    pointC = searchAll.next()
##                                    indexID += 1

                            pointA = pointB
                            pointB = searchHighSlope.next()
                            continue

                        else:
                            tempIndex = indexID
                            pointC = SyncPointsClose(pointC, searchAll, pointA)
                            indexID = tempIndex

                            while abs(float(pointC[2]) -
                                      float(pointA[2])) < MAX_STREAM_WIDTH:
                                if float(pointC[1]) <= float(
                                        pointA[1]) and float(pointC[0]) <= 10:
                                    if existsAdjacent(pointC, Points):
                                        Points.append(
                                            Point(indexID, float(pointC[2]),
                                                  float(pointC[3])))

                                pointC = searchAll.next()
                                indexID += 1

                            while abs(float(pointB[3]) -
                                      float(pointC[3])) > 2 * RESOLUTION:
                                tempIndex = indexID
                                pointC = SyncPointsCloseOnX(
                                    pointC, searchAll, pointA)
                                indexID = tempIndex

                                while abs(float(pointC[2]) -
                                          float(pointA[2])) < MAX_STREAM_WIDTH:
                                    if float(pointC[1]) <= float(
                                            pointA[1]) and float(
                                                pointC[0]) <= 10:
                                        if existsAdjacent(pointC, Points):
                                            Points.append(
                                                Point(indexID,
                                                      float(pointC[2]),
                                                      float(pointC[3])))

                                    pointC = searchAll.next()
                                    indexID += 1

                                tempIndex = indexID
                                pointC = getToNewYvalue(pointC, searchAll)
                                indexID = tempIndex


##                            tempIndex = indexID
##                            pointC = SyncPointsClose(pointC, searchAll, pointB)
##                            indexID = tempIndex
##
##                            while abs(float(pointC[2]) - float(pointB[2])) < MAX_STREAM_WIDTH:
##                                if float(pointC[1]) <= float(pointB[1]) and float(pointC[0]) <= 10:
##                                    Points.append(Point(indexID, float(pointC[2]), float(pointC[3])))
##
##                                pointC = searchAll.next()
##                                indexID += 1

                            pointA = pointB
                            pointB = searchHighSlope.next()
                            continue

                except StopIteration:
                    pass

        if len(Points) == 0:
            arcpy.Delete_management(mem_points_child_workspace)
            arcpy.Delete_management(high_slope_memory)
            Flag.value = PARENT_GO
            randomVal += 1
            continue

        ACdataset = list((item.ID for item in Points))

        #Massaging data into a format that arcpy functions will like.
        string = str(ACdataset)
        QS = convertStringToQueryString(string)

        #MAKE LAYER FROM AC LIST
        arcpy.SelectLayerByAttribute_management(mem_points_child_workspace,
                                                "CLEAR_SELECTION")
        arcpy.SelectLayerByAttribute_management(mem_points_child_workspace,
                                                "NEW_SELECTION",
                                                "OBJECTID IN " + QS)

        try:
            layerLock.acquire()
            arcpy.CopyFeatures_management(
                mem_points_child_workspace,
                os.path.join(
                    DATABASE, CURRENT_DATE + "_" +
                    numsToAlpha(ID, randomVal, randomVal2) + "_PTs"))
            layerLock.release()
        except Exception as e:
            print e
            Flag.value = CHILD_ERROR
            return

        randomVal += 1

        Flag.value = PARENT_GO

        arcpy.Delete_management(mem_points_child_workspace)
        arcpy.Delete_management(high_slope_memory)
示例#22
0
arcpy.env.overwriteOutput = True

polygon = arcpy.GetParameterAsText(0)
labels = arcpy.GetParameterAsText(1)
q_grid = arcpy.GetParameterAsText(2)
output = arcpy.GetParameterAsText(3)

#Assigning labels
one, two, three, four = labels.split(", ")

mem_point = arcpy.CreateFeatureclass_management("in_memory", "mem_point",
                                                "POINT", "", "DISABLED",
                                                "DISABLED", polygon)
arcpy.AddField_management(mem_point, "GridLabel", "TEXT")

result = arcpy.GetCount_management(polygon)
count = int(result.getOutput(0))

arcpy.SetProgressor("step", "Creating Q Section Labels...", 0, count, 1)

insert_cursor = arcpy.da.InsertCursor(mem_point, ["SHAPE@XY", "GridLabel"])
search_cursor = arcpy.da.SearchCursor(polygon, ["SHAPE@"])

for row in search_cursor:
    try:
        coordinateList = []
        lowerLeft_distances = {}
        lowerRight_distances = {}
        upperLeft_distances = {}
        upperRight_distances = {}
示例#23
0
def temp_adjust_parameters(config_path):
    """Calculate GSFLOW Temperature Adjustment Parameters

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.

    Returns
    -------
    None

    """
    # Hardcoded HRU field formats for now
    tmax_field_fmt = 'TMAX_{:02d}'
    tmin_field_fmt = 'TMIN_{:02d}'
    tmax_adj_field_fmt = 'TMX_ADJ_{:02d}'
    tmin_adj_field_fmt = 'TMN_ADJ_{:02d}'

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'temp_adjust_parameters_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW Temperature Adjustment Parameters')

    # Units
    temp_obs_units = support.get_param('temp_obs_units', 'C',
                                       inputs_cfg).upper()
    temp_units_list = ['C', 'F', 'K']
    # Compare against the upper case of the values in the list
    #   but don't modify the acceptable units list
    if temp_obs_units not in temp_units_list:
        logging.error('\nERROR: Invalid observed temperature units ({})\n  '
                      'Valid units are: {}'.format(temp_obs_units,
                                                   ', '.join(temp_units_list)))
        sys.exit()

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()

    # Temperature calculation method
    try:
        temp_calc_method = inputs_cfg.get('INPUTS',
                                          'temperature_calc_method').upper()
    except:
        temp_calc_method = '1STA'
        logging.info('  Defaulting temperature_calc_method = {}'.format(
            temp_calc_method))

    temp_calc_options = ['ZONES', '1STA', 'LAPSE']
    if temp_calc_method not in temp_calc_options:
        logging.error(
            '\nERROR: Invalid temperature calculation method ({})\n  '
            'Valid methods are: {}'.format(temp_calc_method,
                                           ', '.join(temp_calc_options)))
        sys.exit()
    if temp_calc_method == 'LAPSE':
        logging.warning(
            '  If temperature calculation set to LAPSE,\n'
            '  it is not necessary to run the temp_adjust_parameters.py\n'
            '  Exiting\n')
        return False

    if temp_calc_method == 'ZONES':
        temp_zone_orig_path = inputs_cfg.get('INPUTS', 'temp_zone_path')
        try:
            temp_zone_field = inputs_cfg.get('INPUTS', 'temp_zone_field')
        except:
            logging.error(
                '\nERROR: temp_zone_field must be set in INI to apply '
                'zone specific temperature adjustments\n')
            sys.exit()
        try:
            temp_hru_id_field = inputs_cfg.get('INPUTS', 'temp_hru_id_field')
        except:
            temp_hru_id_field = None
            logging.warning(
                '  temp_hru_id_field was not set in the INI file\n'
                '  Temperature adjustments will not be changed to match station '
                'values'.format(temp_zone_field, hru.temp_zone_id_field))

        # Field name for TSTA hard coded, but could be changed to be read from
        # config file like temp_zone
        hru_tsta_field = 'HRU_TSTA'

        try:
            tmax_obs_field_fmt = inputs_cfg.get('INPUTS',
                                                'tmax_obs_field_format')
        except:
            tmax_obs_field_fmt = 'TMAX_{:02d}'
            logging.info('  Defaulting tmax_obs_field_format = {}'.format(
                tmax_obs_field_fmt))

        try:
            tmin_obs_field_fmt = inputs_cfg.get('INPUTS',
                                                'temp_obs_field_format')
        except:
            tmin_obs_field_fmt = 'TMIN_{:02d}'
            logging.info('  Defaulting tmin_obs_field_format = {}'.format(
                tmin_obs_field_fmt))

        if not arcpy.Exists(temp_zone_orig_path):
            logging.error(
                '\nERROR: Temperature Zone ({}) does not exist'.format(
                    temp_zone_orig_path))
            sys.exit()
        # temp_zone_path must be a polygon shapefile
        if arcpy.Describe(temp_zone_orig_path).datasetType != 'FeatureClass':
            logging.error(
                '\nERROR: temp_zone_path must be a polygon shapefile')
            sys.exit()

        # Check temp_zone_field
        if temp_zone_field.upper() in ['FID', 'OID']:
            temp_zone_field = arcpy.Describe(temp_zone_orig_path).OIDFieldName
            logging.warning('\n  NOTE: Using {} to set {}\n'.format(
                temp_zone_field, hru.temp_zone_id_field))
        elif not arcpy.ListFields(temp_zone_orig_path, temp_zone_field):
            logging.error(
                '\nERROR: temp_zone_field field {} does not exist\n'.format(
                    temp_zone_field))
            sys.exit()
        # Need to check that field is an int type
        # Should we only check active cells (HRU_TYPE > 0)?
        elif not [
                f.type for f in arcpy.Describe(temp_zone_orig_path).fields
                if (f.name == temp_zone_field
                    and f.type in ['SmallInteger', 'Integer'])
        ]:
            logging.error(
                '\nERROR: temp_zone_field field {} must be an integer type\n'.
                format(temp_zone_field))
            sys.exit()
        # Need to check that field values are all positive
        # Should we only check active cells (HRU_TYPE > 0)?
        elif min([
                row[0] for row in arcpy.da.SearchCursor(
                    temp_zone_orig_path, [temp_zone_field])
        ]) <= 0:
            logging.error(
                '\nERROR: temp_zone_field values must be positive\n'.format(
                    temp_zone_field))
            sys.exit()

        # Check hru_tsta_field
        if not arcpy.ListFields(temp_zone_orig_path, hru_tsta_field):
            logging.error(
                '\nERROR: hru_tsta_field field {} does not exist\n'.format(
                    hru_tsta_field))
            sys.exit()
        # Need to check that field is an int type
        # Only check active cells (HRU_TYPE >0)?!
        elif not [
                f.type for f in arcpy.Describe(temp_zone_orig_path).fields
                if (f.name == hru_tsta_field
                    and f.type in ['SmallInteger', 'Integer'])
        ]:
            logging.error(
                '\nERROR: hru_tsta_field field {} must be an integer type\n'.
                format(hru_tsta_field))
            sys.exit()
        # Need to check that field values are all positive
        # Only check active cells (HRU_TYPE >0)?!
        elif min([
                row[0] for row in arcpy.da.SearchCursor(
                    temp_zone_orig_path, [hru_tsta_field])
        ]) <= 0:
            logging.error(
                '\nERROR: hru_tsta_field values must be positive\n'.format(
                    hru_tsta_field))
            sys.exit()

        # Check temp_hru_id_field
        # temp_hru_id values are checked later
        if temp_hru_id_field is not None:
            if not arcpy.ListFields(temp_zone_orig_path, temp_hru_id_field):
                logging.error(
                    '\nERROR: temp_hru_id_field field {} does not exist\n'.
                    format(temp_hru_id_field))
                sys.exit()
            # Need to check that field is an int type
            elif not [
                    f.type for f in arcpy.Describe(temp_zone_orig_path).fields
                    if (f.name == temp_hru_id_field
                        and f.type in ['SmallInteger', 'Integer'])
            ]:
                logging.error(
                    '\nERROR: temp_hru_id_field field {} must be an integer type\n'
                    .format(temp_hru_id_field))
                sys.exit()
            # Need to check that field values are not negative (0 is okay)
            elif min([
                    row[0] for row in arcpy.da.SearchCursor(
                        temp_zone_orig_path, [temp_hru_id_field])
            ]) < 0:
                logging.error(
                    '\nERROR: temp_hru_id_field values cannot be negative\n'.
                    format(temp_hru_id_field))
                sys.exit()
    elif temp_calc_method == '1STA':
        # If a zone shapefile is not used, temperature must be set manually
        tmax_obs_list = inputs_cfg.get('INPUTS', 'tmax_obs_list')
        tmin_obs_list = inputs_cfg.get('INPUTS', 'tmin_obs_list')

        # Check that values are floats
        try:
            tmax_obs_list = map(float, tmax_obs_list.split(','))
        except ValueError:
            logging.error('\nERROR: tmax_obs_list (mean monthly tmax) '
                          'values could not be parsed as floats\n')
            sys.exit()
        try:
            tmin_obs_list = map(float, tmin_obs_list.split(','))
        except ValueError:
            logging.error('\nERROR: tmin_obs_list (mean monthly tmin) '
                          'values could not be parsed as floats\n')
            sys.exit()

        # Check that there are 12 values
        if len(tmax_obs_list) != 12:
            logging.error('\nERROR: There must be exactly 12 mean monthly '
                          'observed tmax values based to tmax_obs_list\n')
            sys.exit()
        logging.info(
            '  Observed mean monthly tmax ({}):\n    {}\n'
            '    (Script will assume these are listed in month order, '
            'i.e. Jan, Feb, ...)'.format(temp_obs_units,
                                         ', '.join(map(str, tmax_obs_list))))

        if len(tmin_obs_list) != 12:
            logging.error('\nERROR: There must be exactly 12 mean monthly '
                          'observed tmin values based to tmin_obs_list\n')
            sys.exit()
        logging.info(
            '  Observed mean monthly tmin ({}):\n    {}\n'
            '    (Script will assume these are listed in month order, '
            'i.e. Jan, Feb, ...)'.format(temp_obs_units,
                                         ', '.join(map(str, tmin_obs_list))))

        # Check if all the values are 0
        if tmax_obs_list == ([0.0] * 12):
            logging.error(
                '\nERROR: The observed tmax values are all 0.\n'
                '  To compute tmax adjustments, please set the tmax_obs_list '
                'parameter in the INI with\n  observed mean monthly tmax '
                'values (i.e. from a weather station)')
            sys.exit()
        if tmin_obs_list == ([0.0] * 12):
            logging.error(
                '\nERROR: The observed tmin values are all 0.\n'
                '  To compute tmin adjustments, please set the tmin_obs_list '
                'parameter in the INI with\n  observed mean monthly tmin '
                'values (i.e. from a weather station)')
            sys.exit()

        # Get the temperature HRU ID
        try:
            temp_hru_id = inputs_cfg.getint('INPUTS', 'temp_hru_id')
        except:
            temp_hru_id = 0

        # Check that the temp_hru_id is a valid cell hru_id
        # If temp_hru_id is 0, temperature adjustments will not be adjusted
        if temp_hru_id > 0:
            # Check that HRU_ID is valid
            logging.info('    Temperature HRU_ID: {}'.format(temp_hru_id))
            arcpy.MakeTableView_management(
                hru.polygon_path, "layer",
                "{} = {}".format(hru.id_field, temp_hru_id))
            if (temp_hru_id != 0 and int(
                    arcpy.GetCount_management("layer").getOutput(0)) == 0):
                logging.error(
                    '\nERROR: temp_hru_id {0} is not a valid cell hru_id'
                    '\nERROR: temp adjustments will NOT be forced to 1'
                    ' at cell {0}\n'.format(temp_hru_id))
                temp_hru_id = 0
            arcpy.Delete_management("layer")
        else:
            logging.info(
                '  Temperatures adjustments will not be adjusted to match '
                'station values\n    (temp_hru_id = 0)')

        # Could add a second check that HRU_TSTA has values >0

    # Build output folders if necessary
    temp_adj_temp_ws = os.path.join(hru.param_ws, 'temp_adjust')
    if not os.path.isdir(temp_adj_temp_ws):
        os.mkdir(temp_adj_temp_ws)
    temp_zone_path = os.path.join(temp_adj_temp_ws, 'temp_zone.shp')
    # temp_zone_clip_path = os.path.join(temp_adj_temp_ws, 'temp_zone_clip.shp')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    # env.pyramid = 'PYRAMIDS -1'
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = hru.param_ws
    env.scratchWorkspace = hru.scratch_ws

    # Set month list based on flags
    month_list = range(1, 13)
    tmax_field_list = [tmax_field_fmt.format(m) for m in month_list]
    tmin_field_list = [tmin_field_fmt.format(m) for m in month_list]
    tmax_adj_field_list = [tmax_adj_field_fmt.format(m) for m in month_list]
    tmin_adj_field_list = [tmin_adj_field_fmt.format(m) for m in month_list]

    # Check fields
    logging.info('\nAdding temperature adjust fields if necessary')
    # Temperature zone fields
    support.add_field_func(hru.polygon_path, hru.temp_zone_id_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.hru_tsta_field, 'SHORT')
    # Temperature adjustment fields
    for tmax_adj_field in tmax_adj_field_list:
        support.add_field_func(hru.polygon_path, tmax_adj_field, 'DOUBLE')
    for tmin_adj_field in tmin_adj_field_list:
        support.add_field_func(hru.polygon_path, tmin_adj_field, 'DOUBLE')

    # Calculate temperature zone ID
    if temp_calc_method == 'ZONES':
        logging.info('\nCalculating cell HRU Temperature Zone ID')
        temp_zone_desc = arcpy.Describe(temp_zone_orig_path)
        temp_zone_sr = temp_zone_desc.spatialReference
        logging.debug('  Zones:      {}'.format(temp_zone_orig_path))
        logging.debug('  Projection: {}'.format(temp_zone_sr.name))
        logging.debug('  GCS:        {}'.format(temp_zone_sr.GCS.name))

        # Reset temp_ZONE_ID
        logging.info('  Resetting {} to 0'.format(hru.temp_zone_id_field))
        arcpy.CalculateField_management(hru.polygon_path,
                                        hru.temp_zone_id_field, 0, 'PYTHON')

        # If temp_zone spat_ref doesn't match hru_param spat_ref
        # Project temp_zone to hru_param spat ref
        # Otherwise, read temp_zone directly
        if hru.sr.name != temp_zone_sr.name:
            logging.info('  Projecting temperature zones...')
            # Set preferred transforms
            transform_str = support.transform_func(hru.sr, temp_zone_sr)
            logging.debug('    Transform: {}'.format(transform_str))
            # Project temp_zone shapefile
            arcpy.Project_management(temp_zone_orig_path, temp_zone_path,
                                     hru.sr, transform_str, temp_zone_sr)
            del transform_str
        else:
            arcpy.Copy_management(temp_zone_orig_path, temp_zone_path)

        # # Remove all unnecessary fields
        # for field in arcpy.ListFields(temp_zone_path):
        #     skip_field_list = temp_obs_field_list + [temp_zone_field, 'Shape']
        #     if field.name not in skip_field_list:
        #         try:
        #             arcpy.DeleteField_management(temp_zone_path, field.name)
        #         except:
        #             pass

        # Set temperature zone ID
        logging.info('  Setting {}'.format(hru.temp_zone_id_field))
        support.zone_by_centroid_func(temp_zone_path, hru.temp_zone_id_field,
                                      temp_zone_field, hru.polygon_path,
                                      hru.point_path, hru)
        # support.zone_by_area_func(
        #    temp_zone_layer, hru.temp_zone_id_field, temp_zone_field,
        #    hru.polygon_path, hru, hru_area_field, None, 50)

        # Set HRU_TSTA
        logging.info('  Setting {}'.format(hru.hru_tsta_field))
        support.zone_by_centroid_func(temp_zone_path, hru.hru_tsta_field,
                                      hru_tsta_field, hru.polygon_path,
                                      hru.point_path, hru)

        del temp_zone_desc, temp_zone_sr
    elif temp_calc_method == '1STA':
        # Set all cells to zone 1
        arcpy.CalculateField_management(hru.polygon_path, hru.hru_tsta_field,
                                        1, 'PYTHON')

    # Calculate adjustments
    logging.info('\nCalculating mean monthly temperature adjustments')
    if temp_calc_method == 'ZONES':
        # Read mean monthly values for each zone
        tmax_obs_dict = dict()
        tmin_obs_dict = dict()
        tmax_obs_field_list = [
            tmax_obs_field_fmt.format(m) for m in month_list
        ]
        tmin_obs_field_list = [
            tmin_obs_field_fmt.format(m) for m in month_list
        ]
        tmax_fields = [temp_zone_field] + tmax_obs_field_list
        tmin_fields = [temp_zone_field] + tmin_obs_field_list
        logging.debug('  Tmax Obs. Fields: {}'.format(', '.join(tmax_fields)))
        logging.debug('  Tmin Obs. Fields: {}'.format(', '.join(tmax_fields)))

        with arcpy.da.SearchCursor(temp_zone_path, tmax_fields) as s_cursor:
            for row in s_cursor:
                tmax_obs_dict[int(row[0])] = map(float, row[1:13])
        with arcpy.da.SearchCursor(temp_zone_path, tmin_fields) as s_cursor:
            for row in s_cursor:
                tmin_obs_dict[int(row[0])] = map(float, row[1:13])

        # Convert values to Celsius if necessary to match PRISM
        if temp_obs_units == 'F':
            tmax_obs_dict = {
                z: [(t - 32) * (5.0 / 9) for t in t_list]
                for z, t_list in tmax_obs_dict.items()
            }
            tmin_obs_dict = {
                z: [(t - 32) * (5.0 / 9) for t in t_list]
                for z, t_list in tmin_obs_dict.items()
            }
        elif temp_obs_units == 'K':
            tmax_obs_dict = {
                z: [(t - 273.15) for t in t_list]
                for z, t_list in tmax_obs_dict.items()
            }
            tmin_obs_dict = {
                z: [(t - 273.15) for t in t_list]
                for z, t_list in tmin_obs_dict.items()
            }

        tmax_zone_list = sorted(tmax_obs_dict.keys())
        tmin_zone_list = sorted(tmin_obs_dict.keys())
        logging.debug('  Tmax Zones: {}'.format(tmax_zone_list))
        logging.debug('  Tmin Zones: {}'.format(tmin_zone_list))

        # Print the observed temperature values
        logging.debug('  Observed Tmax')
        for zone, tmax_obs in tmax_obs_dict.items():
            logging.debug('    {}: {}'.format(
                zone, ', '.join(['{:.2f}'.format(x) for x in tmax_obs])))
        logging.debug('  Observed Tmin')
        for zone, tmin_obs in tmin_obs_dict.items():
            logging.debug('    {}: {}'.format(
                zone, ', '.join(['{:.2f}'.format(x) for x in tmin_obs])))

        # Default all zones to an adjustment of 0
        tmax_adj_dict = {z: [0] * 12 for z in tmax_zone_list}
        tmin_adj_dict = {z: [0] * 12 for z in tmin_zone_list}

        # Get list of HRU_IDs for each zone
        fields = [hru.temp_zone_id_field, hru.id_field]
        zone_hru_id_dict = defaultdict(list)
        with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor:
            for row in s_cursor:
                zone_hru_id_dict[int(row[0])].append(int(row[1]))

        # Check that TEMP_HRU_IDs are in the correct zone
        # Default all temperature zone HRU IDs to 0
        temp_hru_id_dict = {z: 0 for z in tmax_zone_list}
        if temp_hru_id_field is not None:
            fields = [temp_zone_field, temp_hru_id_field]
            logging.debug('  Temp Zone ID field: {}'.format(temp_zone_field))
            logging.debug('  Temp HRU ID field: {}'.format(temp_hru_id_field))
            with arcpy.da.SearchCursor(temp_zone_path, fields) as s_cursor:
                for row in s_cursor:
                    temp_zone = int(row[0])
                    hru_id = int(row[1])
                    if hru_id == 0 or hru_id in zone_hru_id_dict[temp_zone]:
                        temp_hru_id_dict[temp_zone] = hru_id
                        logging.debug('    {}: {}'.format(temp_zone, hru_id))
                    else:
                        logging.error(
                            '\nERROR: HRU_ID {} is not in temperature ZONE {}'.
                            format(hru_id, temp_hru_id_dict[temp_zone]))
                        sys.exit()

        # Get gridded tmax values for each TEMP_HRU_ID
        fields = [hru.temp_zone_id_field, hru.id_field] + tmax_field_list
        with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor:
            for row in s_cursor:
                temp_zone = int(row[0])
                hru_id = int(row[1])
                if hru_id == 0:
                    pass
                elif hru_id in temp_hru_id_dict.values():
                    tmax_gridded_list = map(float, row[2:14])
                    tmax_obs_list = tmax_obs_dict[temp_zone]
                    tmax_adj_list = [
                        float(o) - t
                        for o, t in zip(tmax_obs_list, tmax_gridded_list)
                    ]
                    tmax_adj_dict[temp_zone] = tmax_adj_list

        # Get gridded tmin values for each TEMP_HRU_ID
        fields = [hru.temp_zone_id_field, hru.id_field] + tmin_field_list
        with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor:
            for row in s_cursor:
                temp_zone = int(row[0])
                hru_id = int(row[1])
                if hru_id == 0:
                    pass
                elif hru_id in temp_hru_id_dict.values():
                    tmin_gridded_list = map(float, row[2:14])
                    tmin_obs_list = tmin_obs_dict[temp_zone]
                    tmin_adj_list = [
                        float(o) - t
                        for o, t in zip(tmin_obs_list, tmin_gridded_list)
                    ]
                    tmin_adj_dict[temp_zone] = tmin_adj_list
        del temp_hru_id_dict, zone_hru_id_dict, fields

        logging.debug('  Tmax Adjustment Factors:')
        for k, v in tmax_adj_dict.items():
            logging.debug('    {}: {}'.format(
                k, ', '.join(['{:.3f}'.format(x) for x in v])))

        logging.debug('  Tmin Adjustment Factors:')
        for k, v in tmin_adj_dict.items():
            logging.debug('    {}: {}'.format(
                k, ', '.join(['{:.3f}'.format(x) for x in v])))

        logging.debug('\nWriting adjustment values to hru_params')
        fields = [hru.temp_zone_id_field]
        fields.extend(tmax_field_list + tmax_adj_field_list)
        fields.extend(tmin_field_list + tmin_adj_field_list)
        with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
            for row in u_cursor:
                zone = int(row[0])
                for i, month in enumerate(month_list):
                    tmax_i = fields.index(tmax_field_fmt.format(month))
                    tmax_adj_i = fields.index(tmax_adj_field_fmt.format(month))
                    row[tmax_adj_i] = (row[tmax_i] - tmax_obs_dict[zone][i] +
                                       tmax_adj_dict[zone][i])

                    tmin_i = fields.index(tmin_field_fmt.format(month))
                    tmin_adj_i = fields.index(tmin_adj_field_fmt.format(month))
                    row[tmin_adj_i] = (row[tmin_i] - tmin_obs_dict[zone][i] +
                                       tmin_adj_dict[zone][i])
                u_cursor.updateRow(row)
            del row

    elif temp_calc_method == '1STA':
        # Get gridded temperature at temp_HRU_ID
        tmax_fields = [hru.id_field] + tmax_field_list
        tmin_fields = [hru.id_field] + tmin_field_list
        logging.debug('  Tmax Fields: {}'.format(', '.join(tmax_field_list)))
        logging.debug('  Tmin Fields: {}'.format(', '.join(tmin_field_list)))

        # Convert values to Celsius if necessary to match PRISM
        if temp_obs_units == 'F':
            tmax_obs_list = [(t - 32) * (5.0 / 9) for t in tmax_obs_list]
            tmin_obs_list = [(t - 32) * (5.0 / 9) for t in tmin_obs_list]
        elif temp_obs_units == 'K':
            tmax_obs_list = [t - 273.15 for t in tmax_obs_list]
            tmin_obs_list = [t - 273.15 for t in tmin_obs_list]
        if temp_obs_units != 'C':
            logging.info('\nConverted Mean Monthly Tmax ({}):\n  {}'.format(
                temp_obs_units, ', '.join(map(str, tmax_obs_list))))
            logging.info('Converted Mean Monthly Tmin ({}):\n  {}'.format(
                temp_obs_units, ', '.join(map(str, tmin_obs_list))))

        # Scale all adjustments so gridded temperature will match observed
        # temperature at target cell
        if temp_hru_id != 0:
            tmax_gridded_list = map(
                float,
                arcpy.da.SearchCursor(
                    hru.polygon_path, tmax_fields,
                    '"{}" = {}'.format(hru.id_field, temp_hru_id)).next()[1:])
            logging.debug('  Gridded Tmax: {}'.format(', '.join(
                ['{:.2f}'.format(p) for p in tmax_gridded_list])))

            tmin_gridded_list = map(
                float,
                arcpy.da.SearchCursor(
                    hru.polygon_path, tmin_fields,
                    '"{}" = {}'.format(hru.id_field, temp_hru_id)).next()[1:])
            logging.debug('  Gridded Tmin: {}'.format(', '.join(
                ['{:.2f}'.format(p) for p in tmin_gridded_list])))

            # Difference of MEASURED or OBSERVED TEMP to GRIDDED TEMP
            tmax_adj_list = [
                float(o) - t for o, t in zip(tmax_obs_list, tmax_gridded_list)
            ]
            logging.info('  Obs./Gridded: {}'.format(', '.join(
                ['{:.3f}'.format(p) for p in tmax_adj_list])))

            tmin_adj_list = [
                float(o) - t for o, t in zip(tmin_obs_list, tmin_gridded_list)
            ]
            logging.info('  Obs./Gridded: {}'.format(', '.join(
                ['{:.3f}'.format(p) for p in tmin_adj_list])))
        else:
            tmax_adj_list = [0 for p in tmax_obs_list]
            tmin_adj_list = [0 for p in tmin_obs_list]

        # Use single mean monthly tmax for all cells
        # Assume tmax_obs_list is in month order
        fields = tmax_field_list + tmax_adj_field_list
        with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
            for row in u_cursor:
                for i, month in enumerate(month_list):
                    tmax_i = fields.index(tmax_field_fmt.format(month))
                    tmax_adj_i = fields.index(tmax_adj_field_fmt.format(month))
                    row[tmax_adj_i] = (row[tmax_i] - tmax_obs_list[i] +
                                       tmax_adj_list[i])
                u_cursor.updateRow(row)
            del row

        # Use single mean monthly tmax for all cells
        # Assume tmax_obs_list is in month order
        fields = tmin_field_list + tmin_adj_field_list
        with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
            for row in u_cursor:
                for i, month in enumerate(month_list):
                    tmin_i = fields.index(tmin_field_fmt.format(month))
                    tmin_adj_i = fields.index(tmin_adj_field_fmt.format(month))
                    row[tmin_adj_i] = (row[tmin_i] - tmin_obs_list[i] +
                                       tmin_adj_list[i])
                u_cursor.updateRow(row)
            del row
示例#24
0
            idx = -1
            with arcpy.da.SearchCursor(connectionRoot%(switchFC),SwitchFldsOrig,where_clause="eSupportStructure_GLOBALID = '"  + unit[0] + "' AND SUBTYPE IN (1,2,3,4)") as swtCur:
                for swt in swtCur:
                    #print 'Updating...',switchFC
                    idx +=1
                    updateRelatedUnits(swt,fldsDestSwitch[idx],swt[0])
                idx = -1
            idx = -1
            with arcpy.da.SearchCursor(connectionRoot%(servicepointFC),ServicePointFldsOrig,where_clause="eSupportStructure_GLOBALID = '"  + unit[0] + "'") as svcCur:
                for svc in svcCur:
                    #print 'Updating...',servicepointFC
                    idx +=1
                    updateRelatedUnits(svc,fldsDestServicePoint[idx],svc[0])
                idx = -1
##with arcpy.da.Editor(workspace) as edit:
##    print 'Inserting SDE MM_PoleEquipment...'
##    ic = arcpy.da.InsertCursor(unitTableSDE,"*")
##    with arcpy.da.SearchCursor(unitTable,"*") as sc:
##        for scrow in sc:
##            ic.insertRow(scrow)
##    del ic
##    del sc
result = arcpy.GetCount_management (r'C:\arcdata\MIMS_Electric_Extract.gdb\eSupportStructure')
print('fGDB Pole Count '),int(result.getOutput(0))
result = arcpy.GetCount_management (unitTable)
print('fGDB PoleEquipment Count '),int(result.getOutput(0))
#result = arcpy.GetCount_management (unitTableSDE)
#print('SDE PoleEquipment Count '),int(result.getOutput(0))

print ('finished - elapsed time: '),str((time.time() - startTime)/3600)           
示例#25
0
    if newname in names:

        return getname(name, number + 1, names)

    else:

        names.append(newname)

        return newname


if __name__ == "__main__":

    jzzp = arcpy.GetParameterAsText(0)
    count = int(arcpy.GetCount_management(jzzp).getOutput(0))
    arcpy.SetProgressor('step', 'ÐÞ¸´ÕÕƬµã', 0, count, 1)

    names = []

    with arcpy.da.UpdateCursor(jzzp, ["TBYBH", "NAME", "FJLX"]) as cur:

        for row in cur:

            NAME = getname(row[0] + "_" + row[2] + "_", 1, names)

            row[1] = NAME

            cur.updateRow(row)

            arcpy.SetProgressorPosition()
示例#26
0
#/assessment of the angle between three successive nodes
ncurrentstep += 1
arcpy.AddMessage(
    "Iterative process to determine the angle between three successive nodes - Step "
    + str(ncurrentstep) + "/" + str(nstep))

rows0 = arcpy.UpdateCursor(ToPts)
rows1 = arcpy.UpdateCursor(ToPts)
rows2 = arcpy.UpdateCursor(ToPts)
rows3 = arcpy.UpdateCursor(ToPts)
line2 = rows2.next()
line3 = rows3.next()
line3 = rows3.next()
n = -1
k = 0
nrows = int(str(arcpy.GetCount_management(ToPts)))

# Each point of the smoothed (i.e. lateral offset tolerance) in-line has
#     - an "Angle" field : angle with the two next points
#     - an "Inflection" field : set at "1" if the point is an inflection point
# --> a point is an inflection point is the angle sign changes.
for line1 in rows1:
    if k >= (nrows - 2):
        # At the end of the table, set the last point as an inflection point and stop the process
        line3.Inflection = 1
        rows3.updateRow(line3)
        break
    k += 1

    line2 = rows2.next()
    line3 = rows3.next()
示例#27
0
def route_fiber(nd_in,
                incidents_in,
                facilities_in,
                name_in,
                output_dir_in,
                protection_in=False,
                pro_in=False):
    """
    This function routes the working and if requested the protection fiber. It returns the respective paths to the 
    created feature classes (lines).
    
    :param nd_in: network dataset on which the shortest path routing is done, network dataset
    :param incidents_in: the demands, feature class
    :param facilities_in: the ps or co location, feature class
    :param name_in: the name that is used to store the output, string
    :param output_dir_in: the directory to store the results, path
    :param protection_in: if the protection has to be included, binary 
    :param pro_in: if the script is executed in arcgis pro, binary 
    
    :return: 
    """

    arcpy.CheckOutExtension('Network')
    # Set local variables
    layer_name = "ClosestFacility"
    impedance = "Length"

    # First route the working paths, shortes paths
    # MakeClosestFacilityLayer_na (in_network_dataset, out_network_analysis_layer, impedance_attribute,
    # {travel_from_to}, {default_cutoff}, {default_number_facilities_to_find}, {accumulate_attribute_name},
    # {UTurn_policy}, {restriction_attribute_name}, {hierarchy}, {hierarchy_settings}, {output_path_shape},
    # {time_of_day}, {time_of_day_usage})
    #
    # http://desktop.arcgis.com/en/arcmap/10.3/tools/network-analyst-toolbox/make-closest-facility-layer.htm
    result_object = arcpy.na.MakeClosestFacilityLayer(
        nd_in,
        layer_name,
        impedance,
        'TRAVEL_TO',
        default_cutoff=None,
        default_number_facilities_to_find=1,
        output_path_shape='TRUE_LINES_WITH_MEASURES')

    # Get the layer object from the result object. The Closest facility layer can
    # now be referenced using the layer object.
    layer_object = result_object.getOutput(0)

    # Get the names of all the sublayers within the Closest facility layer.
    sublayer_names = arcpy.na.GetNAClassNames(layer_object)

    # Stores the layer names that we will use later
    incidents_layer_name = sublayer_names["Incidents"]  # as origins
    facilities_layer_name = sublayer_names["Facilities"]  # as destinations
    lines_layer_name = sublayer_names["CFRoutes"]  # as lines

    arcpy.na.AddLocations(layer_object, incidents_layer_name, incidents_in)
    arcpy.na.AddLocations(layer_object, facilities_layer_name, facilities_in)

    # Solve the Closest facility  layer
    arcpy.na.Solve(layer_object)

    # # Save the solved Closest facility layer as a layer file on disk
    # output_layer_file = os.path.join(output_dir_in, layer_name)
    # arcpy.MakeFeatureLayer_management(layer_object, output_layer_file)

    # Get the Lines Sublayer (all the distances)
    if pro_in:
        lines_sublayer = layer_object.listLayers(lines_layer_name)[0]
    elif not pro_in:
        lines_sublayer = arcpy.mapping.ListLayers(layer_object,
                                                  lines_layer_name)[0]

    # Save results
    layer_out_path = os.path.join(output_dir_in, name_in)
    arcpy.management.CopyFeatures(lines_sublayer, layer_out_path)

    protection_out_path = "#"

    # If requested route the protection paths
    if protection_in:
        # For all the routed apths the disjoint path has to be found
        n_paths = int(arcpy.GetCount_management(incidents_in).getOutput(0))

        field_objects = arcpy.ListFields(layer_out_path)
        fields = [
            field.name for field in field_objects if field.type != 'Geometry'
        ]

        if 'Total_Length' in fields:
            field_len = 'Total_Length'
        elif 'Shape_Length' in fields:
            field_len = 'Shape_Length'

        # Iterate through all the facility-demand pairs and their respective routes
        cursor_r = arcpy.da.SearchCursor(layer_out_path, ['SHAPE@', field_len])
        cursor_n = arcpy.da.SearchCursor(incidents_in, 'SHAPE@')

        protection_out_path = os.path.join(output_dir_in,
                                           '{0}_protection'.format(name_in))
        check_exists(protection_out_path)
        arcpy.CreateFeatureclass_management(output_dir_in,
                                            '{0}_protection'.format(name_in),
                                            template=layer_out_path)

        for i in range(n_paths):
            path = cursor_r.next()
            node = cursor_n.next()
            if not path[1] == 0:
                tmp = protection_routing(nd_in, facilities_in, node[0],
                                         path[0], pro_in)
                # Add the protection route to the output feature class
                arcpy.Append_management(tmp,
                                        protection_out_path,
                                        schema_type="NO_TEST")

    return layer_out_path, protection_out_path
示例#28
0
    "; Max-Costs: "+str(Max_Costs)+";tofind: "+str(to_find)+"; Origins: "+str(A_Shape.split("\\")[-1])+"; Places: "+str(P_Shape.split("\\")[-1])
    if "Potential" in Modus: text = text + "; Measures: "+str("/".join(Measures))
    if Filter_Group_P: text = text + "; Filter Group: "+str(Filter_Group_P)
    return text

#--preparation--#
costattr()
file5 = h5py.File(Database,'r+')
group5_Results = file5[Group_R]
Results_T = HDF5_Results()

#--measures--#
arcpy.AddMessage("> calculate measures\n")
ODLayer()

dataP = arcpy.da.FeatureClassToNumPyArray(P_Shape,["*"],null_value=0)
if Modus == "Distance":
    if Filter_Group_P: Groups = np.unique(dataP[Filter_Group_P])
    else: Groups = [1]
    for i in Groups: distance(i, Groups)

if "Potential" in Modus:
    Origins = int(arcpy.GetCount_management(A_Shape).getOutput(0))
    for origin_l in range(0,Origins,loops): potential(origin_l,loops)

arcpy.AddMessage("> "+Modus+" measures finished")

#end
arcpy.AddMessage("> finished after "+str(int(time.time()-start_time))+" seconds")
file5.flush()
file5.close()
示例#29
0
                ]
                data_fmt = '%s,%s,%s,%2.1f,%2.1f,%s'
            if key == 'chorizon':
                fields = [
                    'cokey', 'desgnmaster', 'om_r', 'hzdept_r', 'hzdepb_r',
                    'dbthirdbar_r', 'wthirdbar_r', 'wfifteenbar_r',
                    'ph1to1h2o_r', 'sandtotal_r', 'claytotal_r', 'kwfact'
                ]
                data_fmt = '%s,%s,%2.1f,%d,%d,%2.1f,%2.1f,%2.1f,%2.1f,%2.1f,%2.1f,%s'

##                  fields = ['cokey','om_r']
##                  data_fmt = '%s,%2.1f'

            params = ','.join(fields)

            result = arcpy.GetCount_management(intable)
            count = int(result.getOutput(0))

            if count > 100000 and count <= 200000:
                print(str(arcpy.GetCount_management(intable)))
                query1 = 'OBJECTID <= 50000'
                query2 = 'OBJECTID > 50000 AND OBJECTID <= 100000'
                query3 = 'OBJECTID > 100000'
                arr1 = arcpy.da.TableToNumPyArray(intable,
                                                  fields,
                                                  query1,
                                                  null_value=-9999)
                arr2 = arcpy.da.TableToNumPyArray(intable,
                                                  fields,
                                                  query2,
                                                  null_value=-9999)
示例#30
0
        arcpy.mapping.RemoveLayer(df, lyr)
    if lyr.name in visibleLocationLayers:
        lyr.visible = True

mxd.activeView = "PAGE_LAYOUT"  #pan to location and refresh layout
arcpy.RefreshActiveView()  #update view

lyr = arcpy.mapping.ListLayers(mxd, "LOCATION.address_point", df)[0]
srcLyr = arcpy.mapping.ListLayers(mxd, "source_symbology", df)[0]
parcels = arcpy.mapping.ListLayers(mxd, "Parcels", df)[0]
structureLayer = arcpy.mapping.ListLayers(mxd, "Structures", df)[0]
arcpy.SelectLayerByAttribute_management(
    lyr, "NEW_SELECTION",
    expression)  # search address points and select location
#-------------------------ERROR CHECKING------------------------
results = arcpy.GetCount_management(lyr)  #return number of features selected
featureCount = int(results.getOutput(0))  #extract result as integer
if featureCount > 1:
    arcpy.AddMessage(
        "!------Multiple addresses found! Please refine query------!")
    sys.exit()
if featureCount == 0:
    arcpy.AddMessage("!------No address found! Please refine query------!")
    sys.exit()
for row in arcpy.SearchCursor(lyr):  #retrieve address of selected feature
    selectedAddress = row.FULL_STREET_NAME  #this variable used later for legend
    arcpy.AddMessage("Selected address is ---> " + selectedAddress +
                     " <---")  #print address name
#----------------------------------------------------------------------
arcpy.AddMessage("Selecting corresponding parcel in location map...")
arcpy.SelectLayerByLocation_management(