def calculateOtherFields(layer):
    # calculate other fields
    arcpy.CalculateField_management(
        layer, "CLASSIFICA",
        "checkType(!CLASSIFICA!, !CLASSIFI_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "RESOURCE_T",
        "checkType(!RESOURCE_T!, !RESOURCE_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "ID_CLASS",
        "checkType(!ID_CLASS!, !ID_CLASS_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "MAIN_CLASS",
        "checkType(!MAIN_CLASS!, !MAIN_CLA_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "OTHER_CLAS",
        "checkType(!OTHER_CLAS!, !OTHER_CL_2!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "OTHER_CL_1",
        "checkType(!OTHER_CL_1!, !OTHER_CL_3!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "CLASS_DESC",
        "checkType(!CLASS_DESC!, !CLASS_DE_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "ID_TYPE",
        "checkType(!ID_TYPE!, !ID_TYPE_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "OTHER_TYPE",
        "checkType(!OTHER_TYPE!, !OTHER_TY_2!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "OTHER_TY_1",
        "checkType(!OTHER_TY_1!, !OTHER_TY_3!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "TYPE_DESCR",
        "checkType(!TYPE_DESCR!, !TYPE_DES_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "DATA_SOURC",
        "checkType(!DATA_SOURC!, !DATA_SOU_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "DATASET_AC",
        "checkType(!DATASET_AC!, !DATASET__1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "FARMING_SY",
        "checkType(!FARMING_SY!, !FARMING__1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "CROP_PLANT",
        "checkType(!CROP_PLANT!, !CROP_PLA_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "JAN",
        "checkType(!JAN!, !JAN_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "FEB",
        "checkType(!FEB!, !FEB_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "MAR",
        "checkType(!MAR!, !MAR_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "APR",
        "checkType(!APR!, !APR_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "MAY",
        "checkType(!MAY!, !MAY_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "JUN",
        "checkType(!JUN!, !JUN_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "JUL",
        "checkType(!JUL!, !JUL_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "AUG",
        "checkType(!AUG!, !AUG_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "SEP",
        "checkType(!SEP!, !SEP_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "OCT",
        "checkType(!OCT!, !OCT_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "NOV",
        "checkType(!NOV!, !NOV_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "DEC",
        "checkType(!DEC!, !DEC_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "REGION",
        "checkType(!REGION!, !REGION_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "PROVINCE",
        "checkType(!PROVINCE!, !PROVINCE_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "CITYMUNI",
        "checkType(!CITYMUNI!, !CITYMUNI_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "BARANGAY",
        "checkType(!BARANGAY!, !BARANGAY_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(
        layer, "REMARKS",
        "checkType(!REMARKS!, !REMARKS_1!, !MAIN_TYPE!, !MAIN_TYP_1!, !UTYPE!)",
        "PYTHON_9.3", codeblock_type)
    arcpy.CalculateField_management(layer, "MAIN_TYPE", "!UTYPE!",
                                    "PYTHON_9.3")
    arcpy.DeleteField_management(layer, [
        "CLASSIFI_1", "RESOURCE_1", "ID_CLASS_1", "MAIN_CLA_1", "OTHER_CL_2",
        "OTHER_CL_3", "CLASS_DE_1", "ID_TYPE_1", "MAIN_TYP_1", "OTHER_TY_2",
        "OTHER_TY_3", "TYPE_DES_1", "DATA_SOU_1", "DATASET__1", "FARMING__1",
        "CROP_PLA_1", "JAN_1", "FEB_1", "MAR_1", "APR_1", "MAY_1", "JUN_1",
        "JUL_1", "AUG_1", "SEP_1", "OCT_1", "NOV_1", "DEC_1", "REGION_1",
        "PROVINCE_1", "CITYMUNI_1", "BARANGAY_1", "REMARKS_1", "AREA_1",
        "SHAPE_Leng", "SHAPE_Area", "SHAPE_Le_1", "SHAPE_Ar_1"
    ])
Exemplo n.º 2
0
arcpy.Merge_management("Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl;Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/wdpa_flat_1km2_final", Merge_WDPA_flat_land, "objectid \"objectid\" true true false 9 Long 0 9 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,objectid,-1,-1;id_object \"id_object\" true true false 9 Long 0 9 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,id_object,-1,-1;id_country \"id_country\" true true false 9 Long 0 9 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,id_country,-1,-1;name_iso31 \"name_iso31\" true true false 254 Text 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,name_iso31,-1,-1;iso3 \"iso3\" true true false 254 Text 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,iso3,-1,-1;orig_fid \"orig_fid\" true true false 9 Long 0 9 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,orig_fid,-1,-1,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/wdpa_flat_1km2_final,ORIG_FID,-1,-1;AREA_GEO \"AREA_GEO\" true true false 3407925 Double 57 3670065 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,AREA_GEO,-1,-1,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/wdpa_flat_1km2_final,AREA_GEO,-1,-1;nodeID \"nodeID\" true true false 0 Long 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,nodeID,-1,-1,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/wdpa_flat_1km2_final,nodeID,-1,-1;ISO3final \"ISO3final\" true true false 50 Text 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/gaul_singleparted_shape_simpl,ISO3final,-1,-1,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/wdpa_flat_1km2_final,ISO3final,-1,-1;Shape_Length \"Shape_Length\" false true true 8 Double 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/wdpa_flat_1km2_final,Shape_Length,-1,-1;Shape_Area \"Shape_Area\" false true true 8 Double 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_Mar2020.gdb/wdpa_flat_1km2_final,Shape_Area,-1,-1")
print("Gaul and wdpa_flat merged")

# Process: Make Feature Layer
arcpy.MakeFeatureLayer_management(Merge_WDPA_flat_land, Merge_WDPA_flat_land_lyr, "\"nodeID\" >100000", "", "OBJECTID_1 OBJECTID_1 VISIBLE NONE;Shape Shape VISIBLE NONE;objectid objectid VISIBLE NONE;id_object id_object VISIBLE NONE;id_country id_country VISIBLE NONE;name_iso31 name_iso31 VISIBLE NONE;sovereign_ sovereign_ VISIBLE NONE;sovereig_1 sovereig_1 VISIBLE NONE;sovereig_2 sovereig_2 VISIBLE NONE;iso3 iso3 VISIBLE NONE;iso2 iso2 VISIBLE NONE;un_m49 un_m49 VISIBLE NONE;source source VISIBLE NONE;status status VISIBLE NONE;original_d original_d VISIBLE NONE;original_n original_n VISIBLE NONE;source_cod source_cod VISIBLE NONE;sqkm sqkm VISIBLE NONE;orig_fid orig_fid VISIBLE NONE;AREA_GEO AREA_GEO VISIBLE NONE;cte0 cte0 VISIBLE NONE;nodeID nodeID VISIBLE NONE;ISO3final ISO3final VISIBLE NONE;Shape_Length Shape_Length VISIBLE NONE;Shape_Area Shape_Area VISIBLE NONE")

# Process: Calculate Field AREA_GEO=0
arcpy.CalculateField_management(Merge_WDPA_flat_land_lyr, "AREA_GEO", "0", "PYTHON_9.3", "")
print("Area_geo set to 0 for features coming from gaul (nodeid>100000)")

# Process: Repair Geometry (2)
arcpy.RepairGeometry_management(Merge_WDPA_flat_land, "DELETE_NULL")
print("Geometries repaired, once again...")

# Process: Delete Field
arcpy.DeleteField_management(Merge_WDPA_flat_land, "id_object;id_country;name_iso31;sovereign_;sovereig_1;sovereig_2;iso3;iso2;un_m49;source;status;original_d;original_n;source_cod;sqkm;orig_fid;cte0")
print("Useless fields deleted")

# Process: Make Feature Layer 2
arcpy.MakeFeatureLayer_management(Merge_WDPA_flat_land, Merge_WDPA_flat_land_lyr2,"\"ISO3final\" LIKE '%|%'", field_info="OBJECTID_1 OBJECTID_1 VISIBLE NONE;Shape Shape VISIBLE NONE;objectid objectid VISIBLE NONE;AREA_GEO AREA_GEO VISIBLE NONE;nodeID nodeID VISIBLE NONE;ISO3final ISO3final VISIBLE NONE;Shape_Length Shape_Length VISIBLE NONE;Shape_Area Shape_Area VISIBLE NONE")

# Process: Calculate Field ISO3final (assign 'DSP' to disputed territories)
arcpy.CalculateField_management(Merge_WDPA_flat_land_lyr2, "ISO3final", "'DSP'", "PYTHON_9.3", "")
print("Iso3 = DSP for multi-iso3 (disputed) objects")

# Process: Copy Features
arcpy.CopyFeatures_management(Merge_WDPA_flat_land, WDPA_plus_LAND_flat_1km2_final, "", "0", "0", "0")
print("Features copied to final layer")

# Process: Calculate Field objectid
arcpy.CalculateField_management(WDPA_plus_LAND_flat_1km2_final, field="objectid", expression="!OBJECTID_1!", expression_type="PYTHON_9.3", code_block="")
arcpy.CalculateField_management(inputLyr, "temp", expression, "PYTHON3",
                                codeblock)

# Apply Symbology from layer for each demolition Plan month
## Get month list from the layerFileDir being defined
monthList = [re.sub('.lyrx', '', _) for _ in os.listdir(layerFileDir)]

#monthList= ["202101Mid", "202101End", "202102Mid", "202102End", "202103Mid", "202103End"]

for m in monthList:
    symbolLyrx = os.path.join(layerFileDir, m + ".lyrx")
    arcpy.ApplySymbologyFromLayer_management(
        inputLyr,
        symbolLyrx, [["VALUE_FIELD", demolitionM, demolitionM]],
        update_symbology="MAINTAIN")[0]

    # Get the corresponding map series page numbers
    pages_list = Pages.split(";")  # Construction boundary names
    pages_str = ",".join("'" + p + "'" for p in pages_list)
    sql = 'Station IN ({})'.format(pages_str)
    arcpy.SelectLayerByAttribute_management(mapSeriesLyr, "NEW_SELECTION", sql)

    # Get the corresponding map series page numbers
    msPDF = os.path.join(outputDir, asofDate + "_" + "TimeSlice_" + m + ".pdf")
    #ms.exportToPDF(msPDF, 'SELECTED')
    ms.exportToPDF(msPDF,
                   'SELECTED',
                   multiple_files="PDF_MULTIPLE_FILES_PAGE_NAME")

arcpy.DeleteField_management(inputLyr, "temp")
Exemplo n.º 4
0
def roadNetworkEnrich(objects):
    networkPreProcessing(objects)
    network = "C:/Users/Simon/Documents/GitHub/Enrichment/networkPr_update.shp"
    #Deleting all the fields that are generated in this definition to prevent errors.
    arcpy.DeleteField_management(network, ["lightQual", "roadType", "roadQual", "roadHindr", "surfQual"])
    #For the quality of street lighting.
    arcpy.AddField_management(network, "lightQual", "FLOAT", 4, 4, "", "", "NULLABLE")
    #The block of python code that allows a text variable to be reclassed into a float variable!
    expression1 = "Reclass(!verlichtin!, !lightQual!)"
    codeblock1 = """def Reclass(x, y):
        if (x == 'ONBEKEND' or x == 'niet verlicht'):
            y = 0.00
        elif (x == 'beperkt verlicht (bijvoorbeeld alleen bij kruispunten)'):
            y = 0.50
        elif (x == ' ' or x == 'goed verlicht'):
            y = 1.00
        return y"""
    arcpy.CalculateField_management(network, "lightQual", expression1, "PYTHON_9.3", codeblock1)

    #For the type of road surface.
    arcpy.AddField_management(network, "roadType", "FLOAT", 4, 4, "", "", "NULLABLE")
    #The block of python code that allows a text variable to be reclassed into a float variable!
    expression1 = "Reclass(!wegdeksrt!, !roadType!)"
    codeblock1 = """def Reclass(x, y):
        if (x == ' ' or x == 'asfalt/beton'):
            y = 0.00
        elif (x == 'klinkers' or x == 'ONBEKEND' or x == 'tegels'):
            y = 0.33
        elif (x == 'halfverhard' or x == 'overig(hout/kinderkopjes e.d.)'):
            y = 0.67
        elif (x == 'onverhard' or x == 'schelpenpad'):
            y = 1.00
        return y"""
    arcpy.CalculateField_management(network, "roadType", expression1, "PYTHON_9.3", codeblock1)
    #For the quality of the surface.
    arcpy.AddField_management(network, "roadQual", "FLOAT", 4, 4, "", "", "NULLABLE")
    expression2 = "Reclass(!wegkwal!, !roadQual!)"
    codeblock2 = """def Reclass(x, y):
        if (x == 'ONBEKEND'):
            y = 0.50
        elif (x == 'slecht'):
            y = 0.00
        elif (x == 'redelijk'):
            y = 0.75
        elif (x == ' ' or x == 'goed'):
            y = 1.00
        return y"""
    arcpy.CalculateField_management(network, "roadQual", expression2, "PYTHON_9.3", codeblock2)
    #For the amount of hindrances on the surface.
    arcpy.AddField_management(network, "roadHindr", "FLOAT", 4, 4, "", "", "NULLABLE")
    expression3 = "Reclass(!hinder!, !roadHindr!)"
    codeblock3 = """def Reclass(x, y):
        if (x == ' ' or x == 'ONBEKEND'):
            y = 0.50
        elif (x == 'zeer veel'):
            y = 0.00
        elif (x == 'veel'):
            y = 0.25
        elif (x == 'redelijk'):
            y = 0.50
        elif (x == 'weinig'):
            y = 0.75
        elif (x == 'zeer weinig'):
            y = 1.00
        return y"""
    arcpy.CalculateField_management(network, "roadHindr", expression3, "PYTHON_9.3", codeblock3)
    #Calculating the total surface quality score.
    arcpy.AddField_management(network, "surfQual", "FLOAT", 4, 4, "", "", "NULLABLE")
    expression4 = "([roadType] + [roadQual] + [roadHindr]) / 3.0000"
    arcpy.CalculateField_management(network, "surfQual", expression4)
        arcpy.Near_analysis(projects, feature, "", "NO_LOCATION", "NO_ANGLE")

        ## Add the new field
        if not (fieldName in projectFieldNames):
            arcpy.AddField_management(projects, fieldName, "DOUBLE")

        if (fieldName == "d_tra" or fieldName == "d_sub"):
            arcpy.CalculateField_management(
                projects, fieldName,
                "!NEAR_DIST! * " + str(transmissionDistMultiplier),
                "PYTHON_9.3")
        else:
            arcpy.CalculateField_management(projects, fieldName, "!NEAR_DIST!",
                                            "PYTHON_9.3")

        arcpy.DeleteField_management(projects, "NEAR_DIST")
        arcpy.AddMessage("Distance calculations are complete for " + fieldName)
        arcpy.SetProgressorPosition()
arcpy.ResetProgressor()
'''
#######################################
## GET ATTRIBUTES FROM PROJECTS FILE ##
#######################################
'''
## SPATIAL JOIN ZONES' ZONEID FIELD TO PROJECTS FILE
# create the fieldmap and fieldmapings object
fmJoin = arcpy.FieldMap()
fms = arcpy.FieldMappings()

# add all the fields in the projects file to the input and output field list in the fieldmappings object
fms.addTable(projects)
def RotateFeatureClass(inputFC, outputFC, angle=0, pivot_point=None):
    """Rotate Feature Class

    inputFC     Input features
    outputFC    Output feature class
    angle       Angle to rotate, in degrees
    pivot_point X,Y coordinates (as space-separated string)
                Default is lower-left of inputFC

    As the output feature class no longer has a "real" xy locations,
    after rotation, it no coordinate system defined.
    """
    def RotateXY(x, y, xc=0, yc=0, angle=0, units="DEGREES"):
        """Rotate an xy cooordinate about a specified origin

        x,y      xy coordinates
        xc,yc   center of rotation
        angle   angle
        units    "DEGREES" (default) or "RADIANS"
        """
        x = x - xc
        y = y - yc
        # make angle clockwise (like Rotate_management)
        angle = angle * -1
        if units == "DEGREES":
            angle = math.radians(angle)
        xr = (x * math.cos(angle)) - (y * math.sin(angle)) + xc
        yr = (x * math.sin(angle)) + (y * math.cos(angle)) + yc
        return xr, yr

    # temp names for cleanup
    env_file = None
    lyrFC, lyrTmp, lyrOut = [None] * 3  # layers
    tmpFC = None  # temp dataset
    Row, Rows, oRow, oRows = [None] * 4  # cursors

    try:
        # process parameters
        try:
            xcen, ycen = [float(xy) for xy in pivot_point.split()]
            pivot_point = xcen, ycen
        except:
            # if pivot point was not specified, get it from
            # the lower-left corner of the feature class
            ext = arcpy.Describe(inputFC).extent
            xcen, ycen = ext.XMin, ext.YMin
            pivot_point = xcen, ycen

        angle = float(angle)

        # set up environment
        env_file = arcpy.CreateScratchName("xxenv", ".xml", "file",
                                           os.environ["TEMP"])
        arcpy.SaveSettings(env_file)

        # Disable any GP environment clips or project on the fly
        arcpy.ClearEnvironment("extent")
        arcpy.ClearEnvironment("outputCoordinateSystem")
        WKS = env.workspace
        if not WKS:
            if os.path.dirname(outputFC):
                WKS = os.path.dirname(outputFC)
            else:
                WKS = os.path.dirname(arcpy.Describe(inputFC).catalogPath)
        env.workspace = env.scratchWorkspace = WKS

        # Disable GP environment clips or project on the fly
        arcpy.ClearEnvironment("extent")
        arcpy.ClearEnvironment("outputCoordinateSystem")

        # get feature class properties
        lyrFC = 'lyrFC'  #g_ESRI_variable_1
        arcpy.MakeFeatureLayer_management(inputFC, lyrFC)
        dFC = arcpy.Describe(lyrFC)
        shpField = dFC.shapeFieldName
        shpType = dFC.shapeType
        FID = dFC.OIDFieldName

        # create temp feature class
        tmpFC = arcpy.CreateScratchName("xxfc", "", "featureclass")
        arcpy.CreateFeatureclass_management(os.path.dirname(tmpFC),
                                            os.path.basename(tmpFC), shpType)
        lyrTmp = 'lyrTmp'  #g_ESRI_variable_2
        arcpy.MakeFeatureLayer_management(tmpFC, lyrTmp)

        # set up id field (used to join later)
        TFID = "XXXX_FID"
        arcpy.AddField_management(lyrTmp, TFID, "LONG")
        arcpy.DeleteField_management(lyrTmp, 'ID')  # g_ESRI_variable_3 = 'ID'

        # rotate the feature class coordinates
        # only points, polylines, and polygons are supported

        # open read and write cursors
        Rows = arcpy.SearchCursor(lyrFC, "", "", "%s;%s" % (shpField, FID))
        oRows = arcpy.InsertCursor(lyrTmp)
        arcpy.AddMessage("Opened search cursor")
        if shpType == "Point":
            for Row in Rows:
                shp = Row.getValue(shpField)
                pnt = shp.getPart()
                pnt.X, pnt.Y = RotateXY(pnt.X, pnt.Y, xcen, ycen, angle)
                oRow = oRows.newRow()
                oRow.setValue(shpField, pnt)
                oRow.setValue(TFID, Row.getValue(FID))
                oRows.insertRow(oRow)
        elif shpType in ["Polyline", "Polygon"]:
            parts = arcpy.Array()
            rings = arcpy.Array()
            ring = arcpy.Array()
            for Row in Rows:
                shp = Row.getValue(shpField)
                p = 0
                for part in shp:
                    for pnt in part:
                        if pnt:
                            x, y = RotateXY(pnt.X, pnt.Y, xcen, ycen, angle)
                            ring.add(arcpy.Point(x, y, pnt.ID))
                        else:
                            # if we have a ring, save it
                            if len(ring) > 0:
                                rings.add(ring)
                                ring.removeAll()
                    # we have our last ring, add it
                    rings.add(ring)
                    ring.removeAll()
                    # if only one, remove nesting
                    if len(rings) == 1: rings = rings.getObject(0)
                    parts.add(rings)
                    rings.removeAll()
                    p += 1

                # if only one, remove nesting
                if len(parts) == 1: parts = parts.getObject(0)
                if dFC.shapeType == "Polyline":
                    shp = arcpy.Polyline(parts)
                else:
                    shp = arcpy.Polygon(parts)
                parts.removeAll()
                oRow = oRows.newRow()
                oRow.setValue(shpField, shp)
                oRow.setValue(TFID, Row.getValue(FID))
                oRows.insertRow(oRow)
        else:
            raise Exception("Shape type {0} is not supported".format(shpType))

        del oRow, oRows  # close write cursor (ensure buffer written)
        oRow, oRows = None, None  # restore variables for cleanup

        # join attributes, and copy to output
        arcpy.AddJoin_management(lyrTmp, TFID, lyrFC, FID)
        env.qualifiedFieldNames = False
        arcpy.Merge_management(lyrTmp, outputFC)
        lyrOut = 'lyrOut'  #g_ESRI_variable_4
        arcpy.MakeFeatureLayer_management(outputFC, lyrOut)
        # drop temp fields 2,3 (TFID, FID)
        fnames = [f.name for f in arcpy.ListFields(lyrOut)]
        dropList = ';'.join(fnames[2:4])  #g_ESRI_variable_5 = ';'
        arcpy.DeleteField_management(lyrOut, dropList)

    except MsgError as xmsg:
        arcpy.AddError(str(xmsg))
    except arcpy.ExecuteError:
        tbinfo = traceback.format_tb(sys.exc_info()[2])[0]
        arcpy.AddError(tbinfo.strip())
        arcpy.AddError(arcpy.GetMessages())
        numMsg = arcpy.GetMessageCount()
        for i in range(0, numMsg):
            arcpy.AddReturnMessage(i)
    except Exception as xmsg:
        tbinfo = traceback.format_tb(sys.exc_info()[2])[0]
        arcpy.AddError(tbinfo + str(xmsg))
    finally:
        # reset environment
        if env_file: arcpy.LoadSettings(env_file)
        # Clean up temp files
        for f in [lyrFC, lyrTmp, lyrOut, tmpFC, env_file]:
            try:
                if f: arcpy.Delete_management(f)
            except:
                pass
        # delete cursors
        try:
            for c in [Row, Rows, oRow, oRows]:
                del c
        except:
            pass

        # return pivot point
        try:
            pivot_point = "{0} {1}".format(*pivot_point)
        except:
            pivot_point = None

        return pivot_point
print "Step 2 completed at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

## ---------------------------------------------------------------------------
## 3. Edit and join attribute table
## Description: Rename and delete fields

print "\nStep 3 Edit attribute table starts at", datetime.datetime.now(
).strftime("%A, %B %d %Y %I:%M:%S%p")

fieldsDel = [
    "FID_1", "Shape__Are", "Shape__Len", "FY2016YTD", "FY2017YTD", "FY162FY17_"
]
# Delete fields
for fc in gdbList:
    arcpy.DeleteField_management(fc, fieldsDel)

print "Step 3 completed at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

## ---------------------------------------------------------------------------
## 4. Join table with feature
## Description: Join table of statistics of apprehension with the US Border Patrol Sectors feature

print "\nStep 4 Join starts at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

# Convert excel table to dbf
in_excel = "StatisticsApprehension.xlsx"
dbf_file = os.path.splitext(in_excel)[0]
joinTable = os.path.join(interFolder, dbf_file)
             row[2] = cp
         cursor.updateRow(row)
         
 # Get a unique name
 listLayer = []
 with arcpy.da.SearchCursor(Polylines, addField) as cursor:
     for row in cursor:
         if row[0] is not None:
             listLayer.append(str(row[0])) 
     
 uniqueList = list(Counter(listLayer))
 
 # Drop Field
 fieldNames=[f.name for f in arcpy.ListFields(Polylines)]
 dropField = [e for e in fieldNames if e not in (addField, addField2, 'SymbolID', 'Shape','Shape_Length','Shape_Area','Shape.STArea()','Shape.STLength()','OBJECTID','GlobalID', 'OID')]
 arcpy.DeleteField_management(Polylines, dropField)
 
 for types in uniqueList: 
     # 3.1. Filter each type and copy to the file geodatabase
     if cp == 'N-01' and types == 'columnation':
         sqlExpression = "SymbolID = 0" + " AND " + "type = '{}'".format(types)
     elif cp == 'N-01' and types == 'station_platform': # symbolID = 9 and 10
         sqlExpression = "SymbolID = 0" + " AND " + "type = '{}'".format(types)             
     elif cp == 'N-01' and types == 'main_alignment': # symbolID = 9 and 10
         sqlExpression = "SymbolID IN (0, 11)" + " AND " + "type = '{}'".format(types) 
     elif cp == 'N-01' and types == 'mcrp_row': # symbolID = 9 and 10
         sqlExpression = "SymbolID IN (9, 10)" + " AND " + "type = '{}'".format(types)
     elif cp == 'N-01' and types == 'centerline':   
         sqlExpression = "SymbolID = 15" + " AND " + "type = '{}'".format(types)
         
     elif cp == 'N-03' and types == 'columnation':
Exemplo n.º 9
0
def deleteFields(shapePath, fields):
    return arcpy.DeleteField_management(shapePath, fields)
Exemplo n.º 10
0
def unplanarize(cafp, caf, connectFIDs):
    addMsgAndPrint('Unplanarizing ' + os.path.basename(cafp))
    # add NewLineID to cafp
    arcpy.AddField_management(cafp, 'NewLineID', 'LONG')
    # go through connectFIDs to set NewLineID values
    addMsgAndPrint('  building newLineIDs dictionary')
    newLineIDs = {}
    for pair in connectFIDs:
        f1 = pair[0]
        f2 = pair[1]
        if newLineIDs.has_key(f1):
            if newLineIDs.has_key(
                    f2):  # both keys in dict, set all values of f2 = f1
                for i in newLineIDs.keys():
                    if newLineIDs[i] == f2:
                        newLineIDs[i] = newLineIDs[f1]
            else:  # only f1 is in newLineIDs.keys()
                newLineIDs[f2] = newLineIDs[f1]
        elif newLineIDs.has_key(f2):  # only f2 in newLineIDs.keys()
            newLineIDs[f1] = newLineIDs[f2]
        else:  # neither f1 nor f2 in newLineIDs.keys()
            newLineIDs[f1] = f1
            newLineIDs[f2] = f1
    addMsgAndPrint('  ' + str(len(newLineIDs)) + ' entries in newLineIDs')
    # update cursor on cafp, if newLineIDs.has_key(caf.OFID): NewLineID = newLineIDs(caf.OFID) else NewLineID = caf.OFID
    addMsgAndPrint('  setting NewLineID values')
    with arcpy.da.UpdateCursor(cafp, ['OBJECTID', 'NewLineID']) as cursor:
        for row in cursor:
            if newLineIDs.has_key(row[0]):
                row[1] = newLineIDs[row[0]]
            else:
                row[1] = row[0]
            cursor.updateRow(row)
    # dissolve cafp on GeMS attribs and NewLineID to get cafu
    cafu = cafp.replace('planarized', 'unplanarized')
    addMsgAndPrint('  dissolving to get ' + os.path.basename(cafu))
    testAndDelete(cafu)
    dissolveFields = gemsFields
    if 'Notes' in fieldNameList(cafp):
        dissolveFields.append('Notes')
    dissolveFields.append('NewLineID')
    arcpy.Dissolve_management(cafp, cafu, dissolveFields, '', '',
                              'UNSPLIT_LINES')
    # delete NewLineID from caf_unplanarized. maybe add _ID field??
    arcpy.DeleteField_management(cafu, 'NewLineID')
    addMsgAndPrint(
        str(numberOfRows(caf)) + ' arcs in ' + os.path.basename(caf))
    addMsgAndPrint(
        str(numberOfRows(cafp)) + ' arcs in ' + os.path.basename(cafp))
    addMsgAndPrint(
        str(numberOfRows(cafu)) + ' arcs in ' + os.path.basename(cafu))

    txtPath = os.path.join(outWksp, 'connectedFIDs.txt')
    outTxt = open(txtPath, 'w')
    connectFIDs.sort()
    for aline in connectFIDs:
        outTxt.write(
            str(aline) + '  ' + str(newLineIDs[aline[0]]) + ' ' +
            str(newLineIDs[aline[1]]) + '\n')
    outTxt.close()

    return cafu
Exemplo n.º 11
0
def wetlands_in_zones(zones_fc,
                      zone_field,
                      wetlands_fc,
                      output_table,
                      dissolve_wetlands=True):

    # make sure we're only using the right types of wetlands, our feature
    # class excludes everything else but this is a guarantee this will
    # get checked at some point
    arcpy.env.workspace = 'in_memory'
    need_selection = False
    with arcpy.da.SearchCursor(wetlands_fc,
                               ["ATTRIBUTE", "WETLAND_TYPE"]) as cursor:
        while need_selection is False:
            for row in cursor:
                if row[0][0] <> 'P':
                    need_selection = True
                if row[1] <> 'Freshwater Pond':
                    need_selection = True
    if need_selection:
        main_expr = """"ATTRIBUTE" LIKE 'P%'AND "WETLAND_TYPE" <> 'Freshwater_Pond'"""
        arcpy.Select_analysis(wetlands_fc, "wetlands_fc_checked", main_expr)
        wetlands_fc = os.path.join(arcpy.env.workspace, "wetlands_fc_checked")

    selections = [
        '', """"WetOrder" = 'Isolated'""", """"WetOrder" = 'Single'""",
        """"WetOrder" = 'Connected'""", """"VegType" = 'PFO'""",
        """"VegType" = 'PSS'""", """"VegType" = 'PEMorPAB'""",
        """"Regime" = 'A'""", """"Regime" = 'C'""", """"Regime" = 'F'""",
        """"Regime" = 'G'""", """"Regime" = 'H'"""
    ]
    temp_tables = [
        'AllWetlandsUndissolved', 'IsolatedWetlandsUndissolved',
        'SingleWetlandsUndissolved', 'ConnectedWetlandsUndissolved',
        'ForestedWetlandsUndissolved', 'ScrubShrubWetlandsUndissolved',
        'OpenWaterWetlandsUndissolved', 'RegimeAWetlandsUndissolved',
        'RegimeCWetlandsUndissolved', 'RegimeFWetlandsUndissolved',
        'RegimeGWetlandsUndissolved', 'RegimeHWetlandsUndissolved'
    ]

    for sel, temp_table in zip(selections, temp_tables):
        if sel:
            print(
                "Creating temporary table for wetlands where {0}".format(sel))
            selected_wetlands = 'selected_wetlands'
            arcpy.Select_analysis(wetlands_fc, selected_wetlands, sel)
        else:
            print("Creating temporary table for all wetlands")
            selected_wetlands = wetlands_fc
            if dissolve_wetlands == True:
                arcpy.Dissolve_management(selected_wetlands,
                                          'dissolved_wetlands',
                                          multi_part='SINGLE_PART')
                dissolved_temp_table = temp_table.replace(
                    'Undissolved', 'Dissolved')
                temp_tables.append(dissolved_temp_table)
                polygons_in_zones.polygons_in_zones(zones_fc, zone_field,
                                                    'dissolved_wetlands',
                                                    dissolved_temp_table, '')
                new_fields = [
                    'Poly_Overlapping_AREA_ha', 'Poly_Contributing_AREA_ha',
                    'Poly_Overlapping_AREA_pct', 'Poly_Count'
                ]
                avg_size_field = dissolved_temp_table + '_AvgSize_ha'
                arcpy.AddField_management(dissolved_temp_table, avg_size_field,
                                          'DOUBLE')
                arcpy.CalculateField_management(
                    dissolved_temp_table, avg_size_field,
                    '!Poly_Contributing_AREA_ha!/!Poly_Count!', 'PYTHON')
                for f in new_fields:
                    cu.rename_field(dissolved_temp_table, f,
                                    f.replace('Poly', dissolved_temp_table),
                                    True)

        polygons_in_zones.polygons_in_zones(zones_fc, zone_field,
                                            selected_wetlands, temp_table, '')
        new_fields = [
            'Poly_Overlapping_AREA_ha', 'Poly_Contributing_AREA_ha',
            'Poly_Overlapping_AREA_pct', 'Poly_Count'
        ]
        avg_size_field = temp_table + '_AvgSize_ha'
        arcpy.AddField_management(temp_table, avg_size_field, 'DOUBLE')
        arcpy.CalculateField_management(
            temp_table, avg_size_field,
            '!Poly_Contributing_AREA_ha!/!Poly_Count!', 'PYTHON')
        for f in new_fields:
            cu.rename_field(temp_table, f, f.replace('Poly', temp_table), True)

    # join em up and copy to final
    temp_tables.remove('AllWetlandsUndissolved')
    for t in temp_tables:
        try:
            arcpy.JoinField_management('AllWetlandsUndissolved', zone_field, t,
                                       zone_field)
        # sometimes there's no table if it was an empty selection
        except:
            empty_fields = [f.replace('Poly', t) for f in new_fields]
            for ef in empty_fields:
                arcpy.AddField_management('AllWetlandsUndissolved', ef,
                                          'Double')
                arcpy.CalculateField_management('AllWetlandsUndissolved', ef,
                                                '0', 'PYTHON')
            continue
    # remove all the extra zoneID fields, which have underscore in name
    drop_fields = [
        f.name
        for f in arcpy.ListFields('AllWetlandsUndissolved', zone_field + "_*")
    ]
    for f in drop_fields:
        arcpy.DeleteField_management('AllWetlandsUndissolved', f)
    arcpy.CopyRows_management('AllWetlandsUndissolved', output_table)

    for item in [
            'AllWetlandsUndissolved', 'wetlands_fc_checked',
            'dissolved_wetlands'
    ] + temp_tables:
        try:
            arcpy.Delete_management(item)
        except:
            continue
    arcpy.ResetEnvironments()
Exemplo n.º 12
0
def planarizeAndGetArcEndPoints(fds, caf, mup, fdsToken):
    # returns a feature class of endpoints of all caf lines, two per planarized line segment
    addMsgAndPrint('Planarizing ' + os.path.basename(caf) +
                   ' and getting segment endpoints')
    #   add LineID (so we can recover lines after planarization)
    arcpy.AddField_management(caf, 'LineID', 'LONG')
    arcpy.CalculateField_management(caf, 'LineID', '!OBJECTID!', 'PYTHON_9.3')
    # planarize CAF by FeatureToLine
    addMsgAndPrint('  planarizing caf')
    planCaf = caf + '_xxx_plan'
    testAndDelete(planCaf)
    arcpy.FeatureToLine_management(caf, planCaf)
    #   planarize CAF (by IDENTITY with MUP)
    addMsgAndPrint('  IDENTITYing caf with mup')
    cafp = caf + '_planarized'
    testAndDelete(cafp)
    arcpy.Identity_analysis(planCaf, mup, cafp, 'ALL', '',
                            'KEEP_RELATIONSHIPS')
    # delete extra fields
    addMsgAndPrint('  deleting extra fields')
    fns = fieldNameList(cafp)
    deleteFields = []
    for f in fieldNameList(mup):
        if f <> 'MapUnit':
            for hf in ('RIGHT_' + f, 'LEFT_' + f):
                if hf in fns:
                    deleteFields.append(hf)
    arcpy.DeleteField_management(cafp, deleteFields)
    #   calculate azimuths startDir and endDir
    addMsgAndPrint('  adding StartAzimuth and EndAzimuth')
    for f in ('LineDir', 'StartAzimuth', 'EndAzimuth'):
        arcpy.AddField_management(cafp, f, 'FLOAT')
    arcpy.AddField_management(cafp, 'ToFrom', 'TEXT', '', '', 4)
    fields = ['SHAPE@', 'StartAzimuth', 'EndAzimuth']
    with arcpy.da.UpdateCursor(cafp, fields) as cursor:
        for row in cursor:
            lineSeg = row[0].getPart(0)
            row[1], row[2] = startEndGeogDirections(lineSeg)
            cursor.updateRow(row)
    #   make endpoint feature class
    addMsgAndPrint('  converting line ends to points')
    arcEndPoints = fds + '/' + fdsToken + 'xxx_EndPoints'  # will be a feature class in fds
    arcEndPoints2 = arcEndPoints + '_end'
    testAndDelete(arcEndPoints)
    arcpy.FeatureVerticesToPoints_management(cafp, arcEndPoints, 'START')
    arcpy.CalculateField_management(arcEndPoints, 'LineDir', '!StartAzimuth!',
                                    'PYTHON')
    arcpy.CalculateField_management(arcEndPoints, 'ToFrom', '"From"', 'PYTHON')
    testAndDelete(arcEndPoints2)
    arcpy.FeatureVerticesToPoints_management(cafp, arcEndPoints2, 'END')
    arcpy.CalculateField_management(arcEndPoints2, 'LineDir', '!EndAzimuth!',
                                    'PYTHON')
    arcpy.CalculateField_management(arcEndPoints2, 'ToFrom', '"To"', 'PYTHON')
    arcpy.Append_management(arcEndPoints2, arcEndPoints)
    testAndDelete(arcEndPoints2)
    #  delete some more fields
    deleteFields = [
        'EndAzimuth', 'StartAzimuth', 'LEFT_MapUnitPolys', 'RIGHT_MapUnitPolys'
    ]
    arcpy.DeleteField_management(arcEndPoints, deleteFields)
    addMsgAndPrint('  adding POINT_X and POINT_Y')
    arcpy.AddXY_management(arcEndPoints)
    testAndDelete(planCaf)
    return cafp, arcEndPoints
Exemplo n.º 13
0
        if row[0] == "In Production" or row[0] == "Not Started" or row[
                0] == "On Hold" or row[0] == "Ready For Submission":
            row[1] = "In Design"

        elif row[0] == "Permitted":
            row[1] = "Permits Received"

        elif row[0] == "Entity Review" or row[0] == "Respond To Entity Review":
            row[1] = "Permits Submitted"

        elif row[0] == "Comm Moves In Progress":
            row[1] = "Comm Moves In Progress"

        cursor.updateRow(row)

arcpy.DeleteField_management(dissolvePath, 'MIN_STATUS_CODE')

##### Clementini Selection of Trans_PermitPoly_SpJoin & Export to GDB (PG. 9) #####

arcpy.MakeFeatureLayer_management(outSpatJoin, "spatJoinLyr")

arcpy.SelectLayerByLocation_management("spatJoinLyr", "CONTAINS_CLEMENTINI",
                                       dissolvePath, "", "NEW_SELECTION")
result4 = arcpy.GetCount_management("spatJoinLyr")
print "... " + str(result4) + " records selected"
print("\n")

arcpy.SelectLayerByAttribute_management("spatJoinLyr", "SWITCH_SELECTION")
result5 = arcpy.GetCount_management("spatJoinLyr")
print "... " + str(result5) + " records were reverse selected"
print("\n")
# import and merge git/manual_output/sp_rgn_manual.csv for editing
#arcpy.Dissolve_management('sp_m', 'sp_m_d', ['sp_type','sp_id','sp_name','sp_key']) # redo sp_id ...
d = pandas.DataFrame(arcpy.da.TableToNumPyArray('sp_m_d', ['OBJECTID','sp_type','sp_id','sp_name','sp_key'])) # print(set(d['sp_type'])) # set([u'ccamlr', u'land', u'eez', u'land-noeez', u'fao', u'eez-inland'])
# convert from Unicode to ASCII for matching lookup
for u,a in {u'Curaçao':'Curacao', u'République du Congo':'Republique du Congo', u'Réunion':'Reunion'}.iteritems(): # u=u'Réunion'; a='Reunion'
    d.ix[d.sp_name==u,'sp_name'] = a
d = d.rename(columns={'sp_type':'sp_type_orig','sp_name':'sp_name_orig', 'sp_id':'sp_id_orig','sp_key':'sp_key_orig'})
z = pandas.io.parsers.read_csv(sp_rgn_csv) # , encoding='utf-8') #z_cols = ['sp_type','sp_name_orig','sp_id','sp_name','sp_key','rgn_typ','rgn_id','rgn_name','rgn_key','country_id_2012','region_id_2012','region_name_2012']
m = pandas.merge(d, z, how='left', on=['sp_type_orig','sp_name_orig'])
# missing and duplicate checks should return 0 rows:
#  m[m.sp_name.isnull()][['sp_type_orig','sp_name_orig']]
#  m[m.duplicated('OBJECTID')].sort(['sp_type_orig','sp_name_orig'])[['sp_type_orig','sp_name_orig']]
arcpy.AddField_management('sp_m_d', 'OID', 'LONG')
arcpy.CalculateField_management('sp_m_d', 'OID', '!OBJECTID!', 'PYTHON_9.3')
arcpy.CopyFeatures_management('sp_m_d','sp_m_d_i')
arcpy.DeleteField_management('sp_m_d_i', [x.name for x in arcpy.ListFields('sp_m_d_i') if x.name not in ('OBJECTID','OID','Shape','Shape_Length','Shape_Area')])
r = m[['OBJECTID',
       'sp_type','sp_id','sp_name','sp_key',
       'rgn_type','rgn_id','rgn_name','rgn_key',
       'cntry_id12','rgn_id12','rgn_name12']].to_records(index=False) # m[[x for x in m.columns if x!='Shape_Area']]
r = r.astype(
    [('OBJECTID', '<i4'),
     ('sp_type'     , '<U20'), ('sp_id'     , '<i4'), ('sp_name'     , '<U255'), ('sp_key'     , '<U10'),
     ('rgn_type'    , '<U20'), ('rgn_id'    , '<i4'), ('rgn_name'    , '<U255'), ('rgn_key'    , '<U10'),
     ('cntry_id12'  ,'<U255'), ('rgn_id12'  , '<i4'), ('rgn_name12'  , '<U255'), ('notes'      , '<U255')])
arcpy.da.ExtendTable('sp_m_d_i', 'OID', r, 'OBJECTID', append_only=False)
arcpy.Dissolve_management('sp_m_d_i', 'sp_gcs' , ['sp_type','sp_id','sp_name','sp_key','rgn_type','rgn_id','rgn_name','rgn_key','cntry_id12','rgn_id12','rgn_name12'])
arcpy.Dissolve_management('sp_m_d_i', 'rgn_gcs', ['rgn_type','rgn_id','rgn_name','rgn_key'])
arcpy.RepairGeometry_management('sp_gcs')
arcpy.RepairGeometry_management('rgn_gcs')
Exemplo n.º 15
0
print("Extracting elevation")
ExtractValuesToPoints(outProfPt, inDEM, "temp_elev")
print(" Attaching elevation to profile points")
arcpy.JoinField_management(outProfPt, "OBJECTID", "temp_elev", "OBJECTID",
                           "RASTERVALU")
print(" Renaming field")
arcpy.AlterField_management(outProfPt, "RASTERVALU", "Elevation", "#", "#",
                            "#", "#", "True")

# Attaching geology age
print("Attaching geology info")
# delete fields if exist
for fld in [geolField, ageMinField, ageMaxField]:
    if fld in [f.name for f in arcpy.ListFields(outProfPt)]:
        print(" Deleting " + fld + " field")
        arcpy.DeleteField_management(outProfPt, fld)
print(" Performing Identity")
arcpy.Identity_analysis(outProfPt, inGeol, "temp_" + fld)
print(" Joining geology info to summary points")
arcpy.JoinField_management(outProfPt, "OBJECTID", "temp_" + fld,
                           "FID_" + outProfPt,
                           [geolField, ageMinField, ageMaxField])

# Writing profile files
print("Writing files")
with arcpy.da.SearchCursor(inProfile, [profIDField]) as line_cursor:
    for line_row in line_cursor:
        profID = line_row[0]  # get profile id
        print(" Profile: " + str(profID))
        outFile = os.path.join(outCSVFolder, "profile_" + str(profID) +
                               ".csv")  # set output file
Exemplo n.º 16
0
arcpy.AddMessage(" ")
arcpy.AddMessage("Calculate length of GPS tracks.")
arcpy.AddMessage(" ")
expression0 = "!shape.length@miles!"
arcpy.CalculateField_management(fc1, fieldName0, expression0, "PYTHON")

arcpy.AddMessage("Buffer GPS tracks to account for sweepwidth and team size.")
arcpy.AddMessage(" ")
# Buffer areas around GPS track
bufferUnit = "meters"
distanceField = "TeamSweep_m"
sideType = "GPSLoc"
dissolveType = "NONE"
arcpy.Buffer_analysis(fc1, fc5, Sweep, "", "", dissolveType, "")
arcpy.DeleteField_management(fc1, "Sweep")

del bufferUnit
del distanceField
del sideType
del dissolveType
del Sweep

# Execute AddField
arcpy.AddField_management(fc5, fieldName8, "SHORT")
arcpy.CalculateField_management(fc5, fieldName8, "1", "PYTHON")

arcpy.AddMessage(
    "Intersection analysis between GPS tracks and Search Segments")
arcpy.AddMessage(" ")
# Process: Intersect
Exemplo n.º 17
0
r = arcpy.CopyRows_management("Parcels2", PD_Notify, "")
print(r[0])

# Process: Add Field
r = arcpy.AddField_management(PD_Notify, "address", "TEXT", "", "", "50", "",
                              "NULLABLE", "NON_REQUIRED", "")
print(r[0])

# Process: Calculate Field
r = arcpy.CalculateField_management(
    PD_Notify, "address",
    "!House_Num! + ' ' + !Street_Name! + ' ' + !Street_Type!", "PYTHON_9.3",
    "")
print(r[0])

# Process: Delete Field
r = arcpy.DeleteField_management(
    PD_Notify,
    "SubName;BLK;LotNo;Pre_Type;Pre_Dir;House_Num;Street_Name;Street_Type;Suffix_Dir;Use_Code;PlatStatus"
)
print(r[0])

arcpy.CopyRows_management(r, "D:/GISProjects/accident/temp.txt")
filein = open("D:/GISProjects/accident/temp.txt", 'r')

for row in csv.reader(filein):
    outstr = row[-1]
    print(outstr)

filein.close()
os.remove("D:/GISProjects/accident/temp.txt")

#===============================================================================
# CODING
#===============================================================================
#/creation of the final stream network
    # The sequencing module will operate on this layer
arcpy.AddMessage("Creating final shp - Step 0")
InputFCMTS_TEMP = arcpy.MultipartToSinglepart_management(InputFC, "%ScratchWorkspace%\\InputFCMTS_TEMP")
InputFCMTS = arcpy.Intersect_analysis([InputFCMTS_TEMP, InputFCMTS_TEMP], "%ScratchWorkspace%\\InputFCMTS", "", "", "")

fieldnamesInFC = [f.name for f in arcpy.ListFields(InputFCMTS_TEMP)]
fieldnamesOutFC = [f.name for f in arcpy.ListFields(InputFCMTS)]
for fieldOut in fieldnamesOutFC :
    if  str(fieldOut) not in str(fieldnamesInFC) :
            arcpy.DeleteField_management(InputFCMTS, str(fieldOut))
            
arcpy.AddField_management(InputFCMTS, "Rank_UGO", "SHORT", "", "", "", "","NULLABLE", "NON_REQUIRED")
arcpy.AddField_management(InputFCMTS, "NextDownID", "SHORT", "", "", "", "","NULLABLE", "NON_REQUIRED")
arcpy.AddField_management(InputFCMTS, "NextUpID", "SHORT", "", "", "", "","NULLABLE", "NON_REQUIRED")
arcpy.AddField_management(InputFCMTS, "From_X", "DOUBLE", "", "", "", "","NULLABLE", "NON_REQUIRED")

#/creation of the up and down points 
if path[size-4:size] == ".gdb" :
    UDPts = arcpy.FeatureVerticesToPoints_management(InputFCMTS, OutFP, "BOTH_ENDS")
else :
    UDPts = arcpy.FeatureVerticesToPoints_management(InputFCMTS, "%ScratchWorkspace%\\UDPts", "BOTH_ENDS")

fieldnamesUDPts = [f.name for f in arcpy.ListFields(UDPts)]
for field in fieldnamesUDPts :
    try :
Exemplo n.º 19
0
                          "", "", "")
expression = "getClass(!VALUE!)"
codeblock = """def getClass(a):
	if a == 1:
		return u"极重要"
	if a == 3:
		return u"高度重要"
	else:
		return u'一般重要'"""
arcpy.CalculateField_management(OUTPUTPATH + OUTPUTNAME + ".tif", "fbl", FBLL,
                                "PYTHON_9.3")
arcpy.CalculateField_management(OUTPUTPATH + OUTPUTNAME + ".tif", "dengji",
                                expression, "PYTHON_9.3", codeblock)
arcpy.CalculateField_management(OUTPUTPATH + OUTPUTNAME + ".tif", "mj",
                                "(!fbl!)*(!COUNT!)", "PYTHON_9.3")
arcpy.DeleteField_management(OUTPUTPATH + OUTPUTNAME + ".tif", "fbl")
arcpy.Delete_management("stfwshp.shp")
arcpy.Delete_management("stmgshp.shp")
arcpy.Delete_management("stjcshp.shp")
arcpy.Delete_management("jc.tif")
if arcpy.Exists("tihuan1.tif"):
    arcpy.Delete_management("tihuan1.tif")
if arcpy.Exists("tihuan.tif"):
    arcpy.Delete_management("tihuan.tif")
######################################
RasterTableToTxtAndExcel.write2excel(OUTPUTPATH + OUTPUTNAME + ".tif",
                                     OUTPUTPATH, OUTPUTNAME)
if IN_XZQQ != "99999":
    StaticticsByXZQ.StatisticBYXZQ(OUTPUTPATH + OUTPUTNAME + ".tif", IN_XZQQ,
                                   OUTPUTPATH, OUTPUTNAME + "_XZQ")
arcpy.AddColormap_management(OUTPUTPATH + OUTPUTNAME + ".tif", "#",
Exemplo n.º 20
0
fieldmappings.replaceFieldMap(bikefieldindex, fieldmap)
pedfieldindex = fieldmappings.findFieldMapIndex("MeanPedRating")
fieldmap2 = fieldmappings.getFieldMap(pedfieldindex)
fieldmap2.mergeRule = "mean"
fieldmappings.replaceFieldMap(pedfieldindex, fieldmap2)

arcpy.SpatialJoin_analysis("ZonesJoin13", "RoadsWithPathways", "DemandScore",
                           "JOIN_ONE_TO_ONE", "KEEP_ALL", fieldmappings,
                           "INTERSECT")

print("Spatial Joins Complete!")

removefields = getFieldNames("DemandScore")
print(removefields)

arcpy.DeleteField_management("DemandScore", removefields)

# Clean up - delete intermediate feature classes
print("Deleting intermediate feature classes...")
removefcs = [
    "ZonesJoin1", "ZonesJoin2", "ZonesJoin3", "ZonesJoin4", "ZonesJoin5",
    "ZonesJoin6", "ZonesJoin7", "ZonesJoin8", "ZonesJoin9", "ZonesJoin10",
    "ZonesJoin11", "ZonesJoin12", "ZonesJoin13"
]
for f in removefcs:
    arcpy.Delete_management(f)

print("Intermediate feature classes deleted!")

# Deal with null values in scores fields
Exemplo n.º 21
0
                                                  "", "", "", "NULLABLE",
                                                  "NON_REQUIRED", "")
                    except arcpy.ExecuteError, error:
                        print ""
                    # pfnRemarks
                    arcpy.AddField_management(output, "pfnRemarks", "TEXT", "",
                                              "", "", "", "NULLABLE",
                                              "NON_REQUIRED", "")
                    try:
                        arcpy.CalculateField_management(
                            output, "pfnRemarks", remarks, "VB", "")
                    except arcpy.ExecuteError, error:
                        print ""
                    #Process : Delete Field
                    print "Removing Fields : " + fields
                    arcpy.DeleteField_management(output, fields)
#2
                elif jens == "2":
                    print "Pulau"
                    # objID
                    arcpy.AddField_management(output, "objID", "SHORT", "", "",
                                              "", "", "NULLABLE",
                                              "NON_REQUIRED", "")

                    # objType
                    arcpy.AddField_management(output, "objType", "TEXT", "",
                                              "", "", "", "NULLABLE",
                                              "NON_REQUIRED", "")
                    arcpy.CalculateField_management(output, "objType", types,
                                                    "VB", "")
                    # objYear
Exemplo n.º 22
0
            row.setValue(fieldCapMNo, "-99")
        value15 = row.getValue(fieldSurHa)
        if value15 < 0:
            row.setValue(fieldSurHa, "-99")
        value16 = row.getValue(fieldDrainSqk)
        if value16 < 0:
            row.setValue(fieldDrainSqk, "-99")
        cur.updateRow(row)

print "Step 6 completed at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

## ---------------------------------------------------------------------------
## 7. Export the feature class
## Description: Rename fields, Add new fields (units in U.S. system)

print "\nStep 7 Export starts at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

for fc in clipList:
    name = os.path.split(fc)[1]
    out_feature = os.path.join(finalFolder, name + ".shp")
    arcpy.CopyFeatures_management(fc, out_feature)
    arcpy.DeleteField_management(out_feature, "OID_")

print "Step 7 completed at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

print "Geoprocess Dams completed at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")
Exemplo n.º 23
0
## ---------------------------------------------------------------------------
## 3. Homogeneize fields
## Description: Add and delete fields

print "\nStep 3 Add and delete fields starts at", datetime.datetime.now().strftime("%A, %B %d %Y %I:%M:%S%p")

fieldName = "STATE_AGEN"
fieldNameList = [fieldName]
fieldAlias = 'STATE_AGENCY'
fieldType = "TEXT"
fieldsDel = ["AREA", "PERIMETER"]

# Add field
for x in fieldNameList:
    arcpy.AddField_management(out_featureclass, x, fieldType, "", "", 75, fieldAlias)
    arcpy.DeleteField_management(out_featureclass, fieldsDel)

#Update lines
cur = arcpy.UpdateCursor(out_featureclass)
for row in cur:
    row.setValue(fieldName, 'TCEQ')
    cur.updateRow(row)

print "Step 3 completed at", datetime.datetime.now().strftime("%A, %B %d %Y %I:%M:%S%p")

## ---------------------------------------------------------------------------
## 4. Project the polygon and calculate new area
## Description: Project to North America Albers Equal Area Conic

print "\nStep 4 Project starts at", datetime.datetime.now().strftime("%A, %B %d %Y %I:%M:%S%p")
Exemplo n.º 24
0
for n in rec_list:
    d = []
    fields0 = [c.name for c in arcpy.ListFields(n) if not c.required]
    arcpy.AddMessage(fields0)
    for k in fields0:
        if k == "VALUE" or k == "COUNT":
            d.append(k)
            arcpy.AddMessage("k=" + k)
        else:
            arcpy.AddMessage("k value yada count deðil")
            pass

    if len(fields0) > 2 and len(d) == 2:
        fields0.remove(d[0])
        fields0.remove(d[1])
        arcpy.DeleteField_management(n, fields0)
    arcpy.AddMessage(n + "Fields_Silindi.......")

##-Analiz Hesaplamalari Yapýlýyor-------------------------
hey_list = arcpy.ListDatasets("hey_rec*", "Raster")
for j, k in zip(rec_list, hey_list):
    arcpy.JoinField_management(j, "Value", k, "Value", "count")
lst = []
lst2 = []
lstmax = []
lstmin = []
top_pix_field = "count", "hp"
fieldTpx = ['sumtpx', 'sumlpx']
max_min = "max", "min"
for l in rec_list:
    outname = str("tablo" + (l) + ".dbf")
Exemplo n.º 25
0
def delete_dir2_fields(temp_arc):
    ''' Delete all <FIELDNAME>2 fields, leaving only the A-to-B directional
        values. '''
    arcpy.DeleteField_management(temp_arc, ['{0}2'.format(field) for field in dir_fields])
    arcpy.DeleteField_management(temp_arc, ['DIRECTIONS'])
    return temp_arc
Exemplo n.º 26
0
def WVT(city, inDir, workFld):
    import traceback, time, arcpy, os
    from arcpy import env
    arcpy.CheckOutExtension('Spatial')

    #-------- DIRECTORY SETUP ------------------------------------------------
    """ Working Directory """
    try:
        arcpy.CreateFileGDB_management(str(workFld), str(city) + '_TreeWV.gdb')
    except:
        print 'TreeWV GDB already exists'
    workDir = str(workFld) + '/' + city + '_TreeWV.gdb'
    arcpy.env.workspace = workDir
    """ Report File Directory """
    reportfileDir = str(workFld) + '/Logs'
    """ Frequent Directory """
    freqDir = str(workFld) + '/' + city + '_Freq.gdb'
    """ Final Geodatabase """
    finalDir = str(workFld) + '/' + city + '_Final.gdb'
    """ Projection File Directory """
    prjDir = str(inDir) + '/Prj'
    prjfile = prjDir + '/USA Contiguous Albers Equal Area Conic USGS.prj'
    """ Split Raster Directory """
    if os.path.isdir(str(workFld) + '/' + city + '_Split') == True:
        pass
    else:
        os.makedirs(str(workFld) + '/' + city + '_Split')
    splitDir = str(workFld) + '/' + city + '_Split'
    """ Set Workspace Environments """
    arcpy.env.workspace = workDir
    arcpy.env.scratch = str(inDir) + '/Scratch.gdb'
    arcpy.env.overwriteOutput = True

    #-----------------------------------------------------------------------------
    # BEGIN ANALYSIS
    #-----------------------------------------------------------------------------
    try:
        #-------- LOGFILE CREATION ---------------------------------------------
        """ Create report file for each metric """
        tmpName = city + '_TreeWV_' + time.strftime('%Y%m%d_%H-%M')
        reportfileName = reportfileDir + '/' + tmpName + '.txt'
        reportFile = open(reportfileName, 'w')

        try:
            loglist = sorted(f for f in os.listdir(reportfileDir)
                             if f.startswith(str(city) + '_Reuse'))
            tmpName = loglist[-1]
        except:
            tmpName = city + '_Reuse_' + time.strftime('%Y%m%d_%H-%M') + '.txt'
        reportfileName = reportfileDir + '/' + tmpName

        try:
            ReuseRF = open(reportfileName, 'a')
        except:
            ReuseRF = open(reportfileName, 'w')
            print 'Creating Reuse Log'
        """ Write out first line of report file """
        print 'Window Views of Trees Start Time: ' + time.asctime()
        reportFile.write(
            "Begin with EnviroAtlas 1-meter Land Cover for the EnviroAtlas community--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        #-------- PROCESSING LAYERS ----------------------------------------------
        """ Set Environments """
        arcpy.env.snapRaster = freqDir + '/LC'
        arcpy.env.extent = freqDir + '/LC'
        arcpy.env.overwriteOutput = True
        """-------- Reclassify LC into Binary Forest ----------------------------- """
        if arcpy.Exists(str(freqDir) + '/MForestIO') == False:
            outReclass = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 0], [20, 0], [21,
                                                                0], [22, 0],
                                     [30, 0], [40, 1], [52, 0], [70, 0],
                                     [80, 0], [82, 1], [91, 1], [92, 0]]))
            outReclass.save(str(freqDir) + '/MForestIO')
            reportFile.write(
                "Reclassify the Land Cover into a Forest binary REPLACE-MFE" +
                time.strftime('%Y%m%d--%H%M%S') + '--\n')
            print("Forest area reclassified to binary raster..." +
                  time.asctime())
            ReuseRF.write("MForestIO--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')

        else:
            print("Forest binary raster already exists..." + time.asctime())
            reportFile.write(
                "Reclassify the Land Cover into a Forest binary REPLACE-MFE--MForestIO"
                + '--\n')
        """-------- Create 50m Moving Window ------------------------------------- """
        MW = arcpy.sa.FocalStatistics(freqDir + '/MForestIO',
                                      arcpy.sa.NbrCircle(50, 'CELL'), 'SUM',
                                      'NODATA')
        MW.save('MFor_50C')
        print("Moving window complete..." + time.asctime())
        reportFile.write(
            "Run Focal Statistics on the Forest Binary Raster with a circular window of 50 meters and statistics = SUM.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Reclassify Moving Window into Trees/NoTrees ------------------- """
        ReC = arcpy.sa.Reclassify(
            'MFor_50C', 'Value',
            arcpy.sa.RemapRange([[0, 0.99999, 1], [0.99999, 10300, 0]]))
        ReC.save('NoForView')
        print(
            "Moving window completed and reclassified to tree / no trees..." +
            time.asctime())
        reportFile.write(
            "Reclassify the Focal Statistics into Forest (>0 -> 0) or No Forest (0 -> 1).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Split the Raster As Needs, Process Each Piece ----------------- """
        """ Check if the raster should be split """
        columns = arcpy.GetRasterProperties_management(
            'NoForView', 'COLUMNCOUNT').getOutput(0)
        xsplit = int(float(columns) / 20000) + 1
        rows = arcpy.GetRasterProperties_management('NoForView',
                                                    'ROWCOUNT').getOutput(0)
        ysplit = int(float(rows) / 20000) + 1
        """-------- If no split, run the analysis --------------------------------- """
        if xsplit * ysplit == 1:
            """ Convert Raster to Polygon """
            arcpy.RasterToPolygon_conversion('NoForView', 'NFVP_Whole',
                                             'NO_SIMPLIFY')
            """ Extract areas with no forest in 50m """
            arcpy.Select_analysis('NFVP_Whole', 'NFVS_Whole', 'gridcode=1')
            reportFile.write(
                "Convert the raster into a polygon and select the features where gridcode = 1.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ Project into Albers for use with Dasymetric """
            arcpy.Project_management('NFVS_Whole', 'NFVA_Whole', prjfile)
            print("Raster small enough, carry on..." + time.asctime())
            reportFile.write("Convert the polygons into Albers projection.--" +
                             time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """-------- If split, run the analysis on each piece and recombine --------- """
        else:
            """ Delete the raster, if necessary """
            xy = (xsplit * ysplit)
            for rast in range(xy):
                try:
                    arcpy.Delete_management(splitDir + '/nfvsp_' + str(rast))
                except:
                    pass
            try:
                arcpy.Delete_management(splitDir + '/noforview')
            except:
                pass
            """ Split the Raster """
            arcpy.RasterToOtherFormat_conversion('NoForView', splitDir, 'GRID')
            print("Raster too big, splitting into " + str(xsplit) +
                  " rows and " + str(ysplit) + " columns..." + time.asctime())
            arcpy.SplitRaster_management(splitDir + '/NoForView', splitDir,
                                         'NFVSp_', 'NUMBER_OF_TILES', 'GRID',
                                         '',
                                         str(xsplit) + ' ' + str(ysplit))
            reportFile.write(
                "Split the raster into pieces for easier processing. The Python script determines the number of pieces based on the number of rows and columns in the raster where no piece can have a side larger than 20,000 cells--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ For each raster: """
            prjfile = prjDir + '/USA Contiguous Albers Equal Area Conic USGS.prj'
            xy = (xsplit * ysplit)
            for Chunk in range(0, xy):
                try:
                    result = float(
                        arcpy.GetRasterProperties_management(
                            splitDir + '/NFVSp_' + str(Chunk),
                            'MEAN').getOutput(0))
                    """ If the raster piece has data: """
                    if (result > 0):
                        """ Set Environments """
                        arcpy.env.snapRaster = freqDir + '/MForestIO'
                        arcpy.env.extent = freqDir + '/MForestIO'
                        """ Copy the piece back to the Working Directory """
                        arcpy.RasterToOtherFormat_conversion(
                            splitDir + '/NFVSp_' + str(Chunk), workDir)
                        """ Convert Raster to Polygon """
                        arcpy.RasterToPolygon_conversion(
                            'NFVSp_' + str(Chunk), 'NFVP_' + str(Chunk),
                            'NO_SIMPLIFY')
                        """ Extract areas with no forest in 50m """
                        arcpy.Select_analysis('NFVP_' + str(Chunk),
                                              'NFVS_' + str(Chunk),
                                              'gridcode=1')
                        """ Project into Albers for use with Dasymetric """
                        arcpy.Project_management('NFVS_' + str(Chunk),
                                                 'NFVA_' + str(Chunk), prjfile)
                        print("Chunk " + str(Chunk) + " / " + str(xy) +
                              " processed..." + time.asctime())
                except:
                    pass
            reportFile.write(
                "For each piece, convert the raster into a polygon and select the features where gridcode = 1--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            reportFile.write(
                "For each piece, convert the polygons into Albers projection.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            """ Combine the resulting polygons """
            NFVchunks = arcpy.ListFeatureClasses('NFVA_*')
            arcpy.Merge_management(NFVchunks, workDir + '/NFVA_Whole')
            print("All chunks remerged..." + time.asctime())
            reportFile.write(
                "Merge all of the projected polygons together.--" +
                time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Set Environments """
        arcpy.env.snapRaster = freqDir + '/Dasy'
        arcpy.env.extent = freqDir + '/Dasy'
        """-------- End of Split Processing ---------------------------------------- """
        """ Extract Dasymetric Pixels where there is no forest in 50m """
        EbM = arcpy.sa.ExtractByMask(freqDir + '/Dasy', 'NFVA_Whole')
        EbM.save('Pop_NoForView')
        reportFile.write(
            "Extract by Mask the EnviroAtlas Dasymetric (2011/October 2015) pixels within the polygons--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Calculate the Extracted Dasy Population with Each CBG """
        arcpy.sa.ZonalStatisticsAsTable(freqDir + '/BG_Alb', 'bgrp',
                                        'Pop_NoForView', 'BG_TWV', 'DATA',
                                        'SUM')
        reportFile.write(
            "Calculate Zonal Statistics as Table for the extracted dasymetrics with the zones being the 2010 block groups for the EnviroAtlas community.--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Calculate Total Dasy Population, if necessary ------------------ """
        """ Use the existing data """
        fieldNames = [f.name for f in arcpy.ListFields(freqDir + '/BG_Alb')]
        if 'Dasy_Pop' in fieldNames:
            reportFile.write(
                "Calculate Zonal Statistics as a Table for the EnviroAtlas Dasymetrics (2011/October 2015) with the zones being the 2010 block groups within the EnviroAtlas community boundary. Add resulting population sums to the community block groups as attribute Dasy_Pop--Dasy_Pop"
                + '--\n')
            """ Create population data """
        else:
            arcpy.AddField_management(freqDir + '/BG_Alb', 'Dasy_Pop', 'LONG')
            ZonalStatisticsAsTable(freqDir + '/BG_Alb', 'bgrp',
                                   freqDir + '/Dasy', freqDir + '/Dasy_ZS', '',
                                   'SUM')
            arcpy.JoinField_management(freqDir + '/BG_Alb', 'bgrp',
                                       freqDir + '/Dasy_ZS', 'bgrp', ['SUM'])
            arcpy.CalculateField_management(freqDir + '/BG_Alb', 'Dasy_Pop',
                                            '!SUM!', 'PYTHON_9.3')
            arcpy.DeleteField_management(freqDir + '/BG_Alb', ['SUM'])
            arcpy.JoinField_management(freqDir + '/BG', 'bgrp',
                                       freqDir + '/BG_Alb', 'bgrp',
                                       ['Dasy_Pop'])
            reportFile.write(
                "Calculate Zonal Statistics as a Table for the EnviroAtlas Dasymetrics (2011/October 2015) with the zones being the 2010 block groups within the EnviroAtlas community boundary. Add resulting population sums to the community block groups as attribute Dasy_Pop--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
            ReuseRF.write("Dasy_Pop--" + time.strftime('%Y%m%d--%H%M%S') +
                          '--\n')
        """-------- Create Final Table --------------------------------------------- """
        arcpy.TableToTable_conversion(freqDir + '/BG_Alb', workDir, 'TreeWV',
                                      '', 'bgrp')
        arcpy.DeleteField_management('TreeWV', [
            'PLx2_Pop', 'PLx2_Pct', 'SUM_HOUSIN', 'NonWhite', 'LandA_M',
            'Density', 'under_1', 'under_1pct', 'under_13', 'under_13pc',
            'over_70', 'over_70pct', 'Shape_Length', 'Shape_Leng',
            'NonWhite_Pop', 'NonWt_Pct', 'Area_M', 'Shape_Le_1', 'Shape_Area',
            'ALAND', 'NonWhite_P', 'H_Income_M', 'State'
        ])
        TreeView = 'TreeWV'
        reportFile.write(
            "Create a new table based on the EnviroAtlas community block groups table retaining the BGRP and Dasy_Pop fields--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Add fields to new table """
        arcpy.AddField_management(TreeView, 'WVT_Pop', 'LONG')
        arcpy.AddField_management(TreeView, 'WVT_Pct', 'FLOAT', 5, 2)
        reportFile.write(
            "Add fields to the new table for WVT_Pop (long), WVT_Pct (float).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Join Each Table to the final table and WVW_Pop """
        arcpy.JoinField_management(TreeView, 'bgrp', 'BG_TWV', 'bgrp', ['SUM'])
        arcpy.CalculateField_management(TreeView, 'WVT_Pop', '!SUM!',
                                        'PYTHON_9.3')
        arcpy.MakeTableView_management(TreeView, 'TreeView_Tbl')
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'NEW_SELECTION', 'SUM IS NULL')
        arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pop', 0,
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'CLEAR_SELECTION')
        arcpy.DeleteField_management('TreeView_Tbl', 'SUM')
        reportFile.write(
            "Join the zonal statistics table with the new table to calculate the new fields: WVT_Pop = zonal statistics.SUM; remove join--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Calculate WVT_Pct """
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'NEW_SELECTION', 'Dasy_Pop >0')
        arcpy.CalculateField_management(
            'TreeView_Tbl', 'WVT_Pct',
            '"%.2f" % (float(!WVT_Pop!)/float(!Dasy_Pop!) * 100)',
            'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'CLEAR_SELECTION')
        reportFile.write(
            "Calculate field WVT_Pct = WVT_Pop / Dasy_Pop * 100 (limited to 2 decimal places).--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Calculate NULL values, where applicable """
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'NEW_SELECTION',
                                                'Dasy_Pop = 0')
        arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pct', '-99999',
                                        'PYTHON_9.3')
        arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pop', '-99999',
                                        'PYTHON_9.3')
        arcpy.SelectLayerByAttribute_management('TreeView_Tbl',
                                                'CLEAR_SELECTION')
        arcpy.DeleteField_management('TreeView_Tbl', [
            'SUM_POP10', 'EAID', 'NonWhite', 'LandA_M', 'Density', 'Dasy_Pop',
            'SUM'
        ])
        print("Dasy raster summarized to BGs and stats calculated..." +
              time.asctime())
        reportFile.write(
            "Calculate fields where Dasy_Pop = 0: All Fields = -99999.--" +
            time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """-------- Check that the Analysis Area is covered by the LC -------------- """
        """ Create a Polygon Version of the LC """
        if arcpy.Exists(freqDir + '/LC_Poly') == False:
            arcpy.env.extent = freqDir + '/LC'
            arcpy.env.snapRaster = freqDir + '/LC'
            ReC = arcpy.sa.Reclassify(
                str(freqDir) + '/LC', 'Value',
                arcpy.sa.RemapValue([[0, 0], [10, 1], [20, 1], [21,
                                                                1], [22, 1],
                                     [30, 1], [40, 1], [52, 1], [70, 1],
                                     [80, 1], [82, 1], [91, 1], [92, 1]]))
            ReC.save(str(freqDir) + '/AreaIO')
            arcpy.RasterToPolygon_conversion(
                str(freqDir) + '/AreaIO',
                str(freqDir) + '/LC_Poly', 'SIMPLIFY')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/LC_Poly',
                str(freqDir) + '/LC_Poly_EP', 'PERCENT', '', '5',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/LC_Poly')
            arcpy.Rename_management(
                str(freqDir) + '/LC_Poly_EP',
                str(freqDir) + '/LC_Poly')
        """ Buffer the LC Polygon by -500m """
        if arcpy.Exists(freqDir + '/Bnd_Cty_500m') == False:
            arcpy.Buffer_analysis(
                str(freqDir) + '/Bnd_Cty',
                str(freqDir) + '/Bnd_Cty_500m', '500 meters')
            arcpy.EliminatePolygonPart_management(
                str(freqDir) + '/Bnd_Cty_500m',
                str(freqDir) + '/Bnd_Cty_500m_EP', 'PERCENT', '', '30',
                'CONTAINED_ONLY')
            arcpy.Delete_management(str(freqDir) + '/Bnd_Cty_500m')
            arcpy.Rename_management(
                str(freqDir) + '/Bnd_Cty_500m_EP',
                str(freqDir) + '/Bnd_Cty_500m')
        """ Identify whether LC is large enough """
        arcpy.MakeFeatureLayer_management(str(freqDir) + '/LC_Poly', 'LClyr')
        arcpy.MakeFeatureLayer_management(
            str(freqDir) + '/Bnd_Cty_500m', 'BC_500lyr')

        arcpy.SelectLayerByLocation_management('BC_500lyr',
                                               'COMPLETELY_WITHIN', 'LClyr',
                                               '', 'NEW_SELECTION')
        bigEnough = float(arcpy.GetCount_management('BC_500lyr').getOutput(0))
        arcpy.SelectLayerByAttribute_management('BC_500lyr', 'CLEAR_SELECTION')
        """ If the LC isn't large enough, edit erroneous BGS """
        if bigEnough == 0:
            """ Identify BGs within 50m of the LC edge """
            arcpy.Buffer_analysis(
                str(freqDir) + '/LC_Poly', 'LC_Poly_Minus50', '-50 meters',
                'FULL', 'FLAT', 'ALL')
            arcpy.MakeFeatureLayer_management('LC_Poly_Minus50', 'Minus50')
            arcpy.MakeFeatureLayer_management(freqDir + '/BG', 'BG')

            arcpy.SelectLayerByLocation_management('BG', 'COMPLETELY_WITHIN',
                                                   'Minus50', '',
                                                   'NEW_SELECTION', 'INVERT')

            bgValue = float(arcpy.GetCount_management('BG').getOutput(0))
            print("LC extends beyond BG boundary, carry on..." +
                  time.asctime())
            """ For all BGs too close to the LC edge, assign both fields a value of -99998 """
            if bgValue > 0:
                bgrps = []
                cursor = arcpy.SearchCursor('BG')
                for row in cursor:
                    value = row.getValue('bgrp')
                    bgrps.append(value)
                bgrps = list(set(bgrps))
                expression = ''
                for bgrp in bgrps:
                    expression = expression + " OR bgrp = '" + str(bgrp) + "'"
                expression = expression[4:]
                arcpy.SelectLayerByAttribute_management(
                    'TreeView_Tbl', 'NEW_SELECTION', expression)
                arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pop',
                                                '-99998', 'PYTHON_9.3')
                arcpy.CalculateField_management('TreeView_Tbl', 'WVT_Pct',
                                                '-99998', 'PYTHON_9.3')
                arcpy.SelectLayerByAttribute_management(
                    'TreeView_Tbl', 'CLEAR_SELECTION')
            print(
                "LC doesn't extend beyond BGs, removing border BGs from analysis..."
                + time.asctime())
            reportFile.write(
                "Calculate Field for BGs within 50m of the edge of the land cover, WVT_Pop and WVW_Pct = -99998.--"
                + time.strftime('%Y%m%d--%H%M%S') + '--\n')
        """ Create final table """
        arcpy.CopyRows_management('TreeView_Tbl', 'TreeWV_Fnl')
        try:
            arcpy.Delete_management(finalDir + '/' + str(city) + '_TreeWV')
        except:
            pass
        arcpy.TableToTable_conversion('TreeWV_Fnl', finalDir, city + '_TreeWV')
        allFields = [
            f.name for f in arcpy.ListFields(finalDir + '/' + city + '_TreeWV')
        ]
        for field in allFields:
            if field not in ['bgrp', 'OBJECTID', 'WVT_Pop', 'WVT_Pct']:
                arcpy.DeleteField_management(finalDir + '/' + city + '_TreeWV',
                                             [field])
            print 'Window Views of Trees End Time: ' + time.asctime() + '\n'
        reportFile.write(
            "Export the fields to be displayed in the EnviroAtlas to a final gdb table. WVT_Pop, WVT_Pct--"
            + time.strftime('%Y%m%d--%H%M%S') + '--\n')

        #-------- COMPELETE LOGFILES ---------------------------------------------
        reportFile.close()
        ReuseRF.close()

#-----------------------------------------------------------------------------
# END ANALYSIS
#-----------------------------------------------------------------------------
    except:
        """ This part of the script executes if anything went wrong in the main script above """
        #-------- PRINT ERRORS ---------------------------------------------------
        print "\nSomething went wrong.\n\n"
        print "Python Traceback Message below:"
        print traceback.format_exc()
        print "\nArcMap Error Messages below:"
        print arcpy.GetMessages(2)
        print "\nArcMap Warning Messages below:"
        print arcpy.GetMessages(1)

        #-------- COMPLETE LOGFILE ------------------------------------------------
        reportFile.write("\nSomething went wrong.\n\n")
        reportFile.write("Pyton Traceback Message below:")
        reportFile.write(traceback.format_exc())
        reportFile.write("\nArcMap Error Messages below:")
        reportFile.write(arcpy.GetMessages(2))
        reportFile.write("\nArcMap Warning Messages below:")
        reportFile.write(arcpy.GetMessages(1))

        reportFile.write("\n\nEnded at " + time.asctime() + '\n')
        reportFile.write("\n---End of Log File---\n")

        if reportFile:
            reportFile.close()
Exemplo n.º 27
0
                arcpy.env.workspace = str(os.path.join(tools_folder, gdb, pmn))
                arcpy.env.overwriteOutput = True
                print arcpy.env.workspace
                # Process: Project (moving)
                print 'Moving Feature ' + output + ' to Database'

                #project
                arcpy.Project_management(
                    shp, output,
                    "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]",
                    "", "", "NO_PRESERVE_SHAPE", "", "NO_VERTICAL")
                #Process : Delete Field
                print 'Deleting Originals Fields for Feature ' + output
                # pmnID
                try:
                    arcpy.DeleteField_management(output, "wadName")
                except arcpy.ExecuteError, error:
                    continue

                print "repeat"

            except arcpy.ExecuteError, error:
                print "Something went wrong handling " + ". Here's a traceback:"
                print output
                continue

        else:

            pass

        lompati = lompati + 1
Exemplo n.º 28
0
SelectionPoints = arcpy.SelectLayerByAttribute_management(
    MakeFracTEMPToPoints, "NEW_SELECTION", "\"NEAR_FID\" IS NULL")
SelectLine = arcpy.SelectLayerByLocation_management(MakeFracTEMP,
                                                    "BOUNDARY_TOUCHES",
                                                    SelectionPoints, "",
                                                    "NEW_SELECTION")
arcpy.CalculateField_management(SelectLine, "Fusion", "10000", "VB", "")

FracPoly_TEMP = arcpy.Dissolve_management(FracTEMP,
                                          "%ScratchWorkspace%\\FracPoly_TEMP",
                                          "Fusion", "", "MULTI_PART",
                                          "DISSOLVE_LINES")

FracPoly = arcpy.MultipartToSinglepart_management(
    FracPoly_TEMP, "%ScratchWorkspace%\\FracPoly")
arcpy.DeleteField_management(FracPoly, "Fusion")

ncurrentstep += 1
arcpy.AddMessage("Split the input polygon - Step " + str(ncurrentstep) + "/" +
                 str(nstep))
PolySplitTEMP = dS.SLEM(FracPoly, DisaggregationStep,
                        "%ScratchWorkspace%\\PolySplitTEMP", ScratchW, "true")
PolySplit = arcpy.Sort_management(
    PolySplitTEMP, "%ScratchWorkspace%\\PolySplit",
    [["Rank_UGO", "ASCENDING"], ["Distance", "ASCENDING"]])

ncurrentstep += 1
arcpy.AddMessage("Converting Split polygon to points - Step " +
                 str(ncurrentstep) + "/" + str(nstep))
PolySplitToPoint = arcpy.FeatureToPoint_management(
    PolySplit, "%ScratchWorkspace%\\PolySplitToPoint", "INSIDE")
Exemplo n.º 29
0
    def execute(self, params, messages):
        arcpy.AddMessage(
            """Welcome to the Source Feature and EO Assigner! This tool is designed to prepare a feature class or shapefile for bulk load into Biotics by assigning an existing or new SFID and EOID grouping variable to observations based on eparation distance. This used to be done manually, so sit back and enjoy all the other work you can be doing instead of this!"""
        )

        in_points = params[0].valueAsText
        in_lines = params[1].valueAsText
        in_poly = params[2].valueAsText
        species_code = params[3].valueAsText
        lu_separation = params[4].valueAsText
        eo_reps = params[5].valueAsText
        eo_id_field = params[6].valueAsText
        eo_sourcept = params[7].valueAsText
        eo_sourceln = params[8].valueAsText
        eo_sourcepy = params[9].valueAsText
        sf_id_field = params[10].valueAsText
        species_code_field = params[11].valueAsText

        arcpy.env.workspace = "in_memory"

        arcpy.AddMessage("Preparing input data")
        #prepare single fc from biotics sf fcs
        sfs_in = [eo_sourcept, eo_sourceln, eo_sourcepy]
        sfs_out = ["eo_sourcept", "eo_sourceln", "eo_sourcepy"]
        for sf_in, sf_out in zip(sfs_in, sfs_out):
            arcpy.Buffer_analysis(sf_in, sf_out, 1)
        sf_merge = arcpy.Merge_management(sfs_out, "sf_merge")
        sf_lyr = arcpy.MakeFeatureLayer_management(sf_merge, "sf_lyr")

        data_in = []
        data_out = []
        if in_points:
            data_in.append(in_points)
            data_out.append("pts")
        if in_lines:
            data_in.append(in_lines)
            data_out.append("lines")
        if in_poly:
            data_in.append(in_poly)
            data_out.append("polys")

        join_id = 1
        for i, o in zip(data_in, data_out):
            arcpy.AddField_management(i, "temp_join_id", "TEXT")
            with arcpy.da.UpdateCursor(i, "temp_join_id") as cursor:
                for row in cursor:
                    row[0] = str(join_id)
                    cursor.updateRow(row)
                    join_id += 1
            arcpy.Buffer_analysis(i, o, 1)
        data_merge = arcpy.Merge_management(data_out, "data_merge")
        data_lyr = arcpy.MakeFeatureLayer_management(data_merge, "data_lyr")

        if arcpy.ListFields(data_lyr, species_code)[0].type == 'Integer':
            species_query = "{}={}"
        else:
            species_query = "{}='{}'"
        if arcpy.ListFields(data_lyr, species_code_field)[0].type == 'Integer':
            eo_species_query = "{}={}"
        else:
            eo_species_query = "{}='{}'"

        #get name of true OID field
        objectid_field = arcpy.Describe(data_lyr).OIDFieldName

        #create feature layers to allow for selections
        eo_reps = arcpy.MakeFeatureLayer_management(eo_reps, "eo_reps")

        #add EO/SF ID fields if they do not already exist
        add_fields_text = ["SF_ID", "SF_NEW", "EO_ID", "EO_NEW"]
        for field in add_fields_text:
            if len(arcpy.ListFields(data_lyr, field)) == 0:
                arcpy.AddField_management(data_lyr, field, "TEXT", "", "", 50)
        add_fields_int = ["UNIQUEID"]
        for field in add_fields_int:
            if len(arcpy.ListFields(data_lyr, field)) == 0:
                arcpy.AddField_management(data_lyr, field, "LONG")

        #set word index to assign words to new EO groups
        word_index = 1
        observation_num = 1

        arcpy.AddMessage("Beginning to assign EO IDs")
        #get total records in data_lyr for progress reporting messages
        total_obs = arcpy.GetCount_management(data_lyr)
        #start assigning loop
        search_fields = [
            objectid_field, "EO_ID", "EO_NEW", species_code, lu_separation
        ]
        with arcpy.da.SearchCursor(data_lyr, search_fields) as cursor:
            for row in cursor:
                objectid = row[0]
                #if one of the EOID fields already have a value, continue on to next feature
                if row[2] != None and (row[1] != None or row[1] != 0):
                    arcpy.AddMessage(
                        "ObjectID " + str(objectid) +
                        " EO Observation number " + str(observation_num) +
                        "/" + str(total_obs) +
                        " has already been assigned to a new or existing EO.")
                    pass
                else:
                    sname = row[3]
                    distance = str(row[4] * 1000) + " METERS"

                    #select feature and assign sname and separation distance variables
                    arcpy.SelectLayerByAttribute_management(
                        data_lyr, "NEW_SELECTION",
                        "{}={}".format(objectid_field, objectid))
                    #check for existing EO reps within separation distance of feature
                    arcpy.SelectLayerByAttribute_management(
                        eo_reps, 'NEW_SELECTION',
                        eo_species_query.format(species_code_field, sname))
                    arcpy.SelectLayerByLocation_management(
                        eo_reps, "WITHIN_A_DISTANCE", data_lyr, distance,
                        "SUBSET_SELECTION")
                    #check for selection on eo_reps layer - if there is a selection, get eoid, select all observations within the separation distance, and assign existing eoid to selected features
                    selection_num = arcpy.Describe(eo_reps).fidset
                    if selection_num is not u'':
                        with arcpy.da.SearchCursor(eo_reps,
                                                   eo_id_field) as cursor:
                            #eoid = sorted({row[0] for row in cursor}, reverse=True)[0] #use this if keeping newest EO
                            eoid = ",".join(
                                sorted({str(row[0])
                                        for row in cursor})
                            )  #use this if filling with EOIDs of all EOs within separation distance
                        #set arbitrary unequal counts to start while loop
                        countBefore = 0
                        countAfter = 1
                        while (countBefore != countAfter):
                            countBefore = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                            arcpy.SelectLayerByLocation_management(
                                data_lyr, "WITHIN_A_DISTANCE", data_lyr,
                                distance, "ADD_TO_SELECTION")
                            arcpy.SelectLayerByAttribute_management(
                                data_lyr, "SUBSET_SELECTION",
                                species_query.format(species_code, sname))
                            countAfter = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                        with arcpy.da.UpdateCursor(data_lyr,
                                                   "EO_ID") as cursor:
                            for row in cursor:
                                row[0] = str(eoid)
                                cursor.updateRow(row)
                        arcpy.AddMessage(
                            "ObjectID " + str(objectid) + ", along with " +
                            str(countAfter - 1) +
                            " observations were assigned an existing EO: " +
                            str(eoid) + ". " + str(observation_num) + "/" +
                            str(total_obs) + " completed.")
                    #if no existing EOs selected within separation distance, select all observations within the separation distance and assign new random word
                    else:
                        countBefore = 0
                        countAfter = 1
                        while (countBefore != countAfter):
                            countBefore = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                            arcpy.SelectLayerByLocation_management(
                                data_lyr, "WITHIN_A_DISTANCE", data_lyr,
                                distance, "ADD_TO_SELECTION")
                            arcpy.SelectLayerByAttribute_management(
                                data_lyr, "SUBSET_SELECTION",
                                species_query.format(species_code, sname))
                            countAfter = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                        with arcpy.da.UpdateCursor(data_lyr,
                                                   "EO_NEW") as cursor:
                            for row in cursor:
                                row[0] = str(
                                    word_index)  #word_list[word_index]
                                cursor.updateRow(row)
                        arcpy.AddMessage(
                            "ObjectID " + str(objectid) + ", along with " +
                            str(countAfter - 1) +
                            " observations were assigned a new EO: " +
                            str(word_index) + ". " + str(observation_num) +
                            "/" + str(total_obs) + " completed.")
                        word_index += 1
                observation_num += 1
                arcpy.SelectLayerByAttribute_management(
                    data_lyr, "CLEAR_SELECTION")

        arcpy.AddMessage("Beginning to assign SF IDs")
        observation_num = 1
        search_fields = [objectid_field, "SF_ID", "SF_NEW", species_code]
        with arcpy.da.SearchCursor(data_lyr, search_fields) as cursor:
            for row in cursor:
                objectid = row[0]
                if row[2] != None and (row[1] != None or row[1] != 0):
                    arcpy.AddMessage(
                        "ObjectID " + str(objectid) +
                        " SF Observation number " + str(observation_num) +
                        "/" + str(total_obs) +
                        " has already been assigned to a new or existing SF.")
                else:
                    sname = row[3]

                    #check for existing SFs within 9m of feature (8m because of 1m buffer on SF layers)
                    arcpy.SelectLayerByAttribute_management(
                        data_lyr, "NEW_SELECTION",
                        "{}={}".format(objectid_field, objectid))
                    arcpy.SelectLayerByAttribute_management(
                        sf_lyr, 'NEW_SELECTION',
                        eo_species_query.format(species_code_field, sname))
                    arcpy.SelectLayerByLocation_management(
                        sf_lyr, "WITHIN_A_DISTANCE", data_lyr, "7 METERS",
                        "SUBSET_SELECTION")
                    #check for selection on sf_merge layer - if there is a selection, get sfid, select all observations within the separation distance, and assign existing eoid to selected features
                    if arcpy.Describe('sf_lyr').fidset is not u'':
                        with arcpy.da.SearchCursor('sf_lyr',
                                                   sf_id_field) as cursor:
                            #sfid = sorted({row[0] for row in cursor}, reverse=True)[0] #use this line if you want to use the newest SF ID within separation distance
                            sfid = ",".join(
                                sorted({str(row[0])
                                        for row in cursor})
                            )  # use this line if you want to list all SF IDs within separation distance
                        countBefore = 0
                        countAfter = 1
                        while (countBefore != countAfter):
                            countBefore = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                            arcpy.SelectLayerByLocation_management(
                                data_lyr, "WITHIN_A_DISTANCE", data_lyr,
                                "7 METERS", "ADD_TO_SELECTION")
                            arcpy.SelectLayerByAttribute_management(
                                data_lyr, "SUBSET_SELECTION",
                                species_query.format(species_code, sname))

                            countAfter = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                        with arcpy.da.UpdateCursor(data_lyr,
                                                   "SF_ID") as cursor:
                            for row in cursor:
                                row[0] = sfid
                                cursor.updateRow(row)
                        arcpy.AddMessage(
                            "ObjectID " + str(objectid) + ", along with " +
                            str(countAfter - 1) +
                            " observations were assigned an existing SF: " +
                            str(sfid) + ". " + str(observation_num) + "/" +
                            str(total_obs) + " completed.")
                    #if no existing SFs selected within separation distance, select all observations within the separation distance and assign new random word
                    else:
                        countBefore = 0
                        countAfter = 1
                        while (countBefore != countAfter):
                            countBefore = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                            arcpy.SelectLayerByLocation_management(
                                data_lyr, "WITHIN_A_DISTANCE", data_lyr,
                                "7 METERS", "ADD_TO_SELECTION")
                            arcpy.SelectLayerByAttribute_management(
                                data_lyr, "SUBSET_SELECTION",
                                species_query.format(species_code, sname))
                            countAfter = int(
                                arcpy.GetCount_management(
                                    "data_lyr").getOutput(0))
                        with arcpy.da.UpdateCursor(
                                data_lyr, ["SF_NEW", "EO_NEW"]) as cursor:
                            for row in cursor:
                                if row[1] != None:
                                    sf_id = row[1] + "_" + str(
                                        word_index)  #word_list[word_index]
                                    row[0] = sf_id
                                else:
                                    sf_id = str(
                                        word_index)  #word_list[word_index]
                                    row[0] = sf_id
                                cursor.updateRow(row)
                        arcpy.AddMessage(
                            "ObjectID " + str(objectid) + ", along with " +
                            str(countAfter - 1) +
                            " observations were assigned a new SF: " + sf_id +
                            ". " + str(observation_num) + "/" +
                            str(total_obs) + " completed.")
                        word_index += 1
                observation_num += 1
                arcpy.SelectLayerByAttribute_management(
                    data_lyr, "CLEAR_SELECTION")

        #create unique id value for each unique source feature
        i = 1
        with arcpy.da.SearchCursor(data_lyr,
                                   ["SF_ID", "SF_NEW", "UNIQUEID"]) as cursor:
            sfid1 = sorted({row[0] for row in cursor})
        with arcpy.da.SearchCursor(data_lyr,
                                   ["SF_ID", "SF_NEW", "UNIQUEID"]) as cursor:
            sfid2 = sorted({row[1] for row in cursor})
        sfid = sfid1 + sfid2
        sfid = [x for x in sfid if x is not None]
        sfid = [x.encode('UTF8') for x in sfid]
        for sf in sfid:
            with arcpy.da.UpdateCursor(
                    data_lyr, ["SF_ID", "SF_NEW", "UNIQUEID"]) as cursor:
                for row in cursor:
                    if row[0] == sf:
                        row[2] = i
                        cursor.updateRow(row)
                    elif row[1] == sf:
                        row[2] = i
                        cursor.updateRow(row)
                    else:
                        pass
            i += 1

        add_fields = add_fields_int + add_fields_text
        for data in data_in:
            arcpy.JoinField_management(data, "temp_join_id", data_lyr,
                                       "temp_join_id", add_fields)
            arcpy.DeleteField_management(data, "temp_join_id")

        arcpy.Delete_management("in_memory")
        return
def calculateJoinedFields(layer):
    # calculate other fields
    arcpy.CalculateField_management(layer, "MAIN_TYPE", "!MAIN_TYP_1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "CLASSIFICA", "!CLASSIFI_1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "RESOURCE_T", "!RESOURCE_1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "ID_CLASS", "!ID_CLASS_1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "MAIN_CLASS", "!MAIN_CLA_1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "OTHER_CLAS", "!OTHER_CL_2!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "OTHER_CL_1", "!OTHER_CL_3!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "CLASS_DESC", "!CLASS_DE_1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "ID_TYPE", "!ID_TYPE_1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "OTHER_TYPE", "!OTHER_TY_2!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "OTHER_TY_1", "!OTHER_TY_3!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "TYPE_DESCR", "!TYPE_DES_1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "DATA_SOURC", "!DATA_SOU_1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "DATASET_AC", "!DATASET__1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "FARMING_SY", "!FARMING__1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "CROP_PLANT", "!CROP_PLA_1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "JAN", "!JAN_1!", "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "FEB", "!FEB_1!", "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "MAR", "!MAR_1!", "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "APR", "!APR_1!", "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "MAY", "!MAY_1!", "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "JUN", "!JUN_1!", "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "JUL", "!JUL_1!", "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "AUG", "!AUG_1!", "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "SEP", "!SEP_1!", "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "OCT", "!OCT_1!", "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "NOV", "!NOV_1!", "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "DEC", "!DEC_1!", "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "REGION", "!REGION_1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "PROVINCE", "!PROVINCE_1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "CITYMUNI", "!CITYMUNI_1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "BARANGAY", "!BARANGAY_1!",
                                    "PYTHON_9.3")
    arcpy.CalculateField_management(layer, "REMARKS", "!REMARKS_1!",
                                    "PYTHON_9.3")

    arcpy.DeleteField_management(layer, [
        "UTYPE", "NEAR_FID", "NEAR_DIST", "CLASSIFI_1", "RESOURCE_1",
        "ID_CLASS_1", "MAIN_CLA_1", "OTHER_CL_2", "OTHER_CL_3", "CLASS_DE_1",
        "ID_TYPE_1", "MAIN_TYP_1", "OTHER_TY_2", "OTHER_TY_3", "TYPE_DES_1",
        "DATA_SOU_1", "DATASET__1", "FARMING__1", "CROP_PLA_1", "JAN_1",
        "FEB_1", "MAR_1", "APR_1", "MAY_1", "JUN_1", "JUL_1", "AUG_1", "SEP_1",
        "OCT_1", "NOV_1", "DEC_1", "REGION_1", "PROVINCE_1", "CITYMUNI_1",
        "BARANGAY_1", "REMARKS_1"
    ])