Example #1
0
featureClass = ['Town2']

arcpy.env.workspace = current_workspace

arcpy.env.overwriteOutput = True

def createFeatureClass(in_fc_name):

    arcpy.CreateFeatureclass_management(current_workspace, in_fc_name, geometry_type, "", "DISABLED", "DISABLED", spatial_reference)

    print('Feature Class ' + in_fc_name + ' was sucessfully created.')

createFC = [createFeatureClass(fc) for fc in featureClass]

in_table = 'Town2'
field_name = 'Town_Name'
field_type = 'TEXT'
gdb = r'C:\Users\NeonJuiceman\Desktop\Exercise 3\Q5GDB.gdb'

arcpy.AddField_management(in_table, field_name, field_type)

arcpy.env.workspace = r'C:\Users\NeonJuiceman\Desktop\Exercise 3\Q5GDB.gdb'

arcpy.CreateDomain_management(gdb, 'Name1', 'Names of Towns', 'TEXT', 'CODED')

domDict = {'T':'Tigard', 'B':'Beaverton', 'C':'Corvallis', 'H':'Hillsboro', 'G':'Gresham'}

for code in domDict:
    arcpy.AddCodedValueToDomain_management(gdb, 'Name1', code, domDict[code])
Example #2
0
        ODmatrix_bus__4_ = "ODmatrix_bus2"
        ODmatrix_Car__8_ = "ODmatrix_Car"
        GaussianPCP__7_ = "GaussianPCP"
        ODmatrix_bus__8_ = "ODmatrix_bus2"
        GaussianPCP__2_ = "GaussianPCP"
        ODmatrix_Car__12_ = "ODmatrix_Car"
        ODmatrix_bus__7_ = "ODmatrix_bus2"
        ODmatrix_Car__15_ = "ODmatrix_Car"
        ODmatrix_bus__5_ = "ODmatrix_bus2"
        GaussianPCP__9_ = "GaussianPCP"
        ODmatrix_Car__19_ = "ODmatrix_Car"
        ODmatrix_bus__2_ = "ODmatrix_bus2"
        GaussianPCP__11_ = "GaussianPCP"

        # Process: Add Field (6)
        arcpy.AddField_management(ODmatrix_Car__17_, "AkCar2", "DOUBLE", "",
                                  "", "", "", "NULLABLE", "NON_REQUIRED", "")

        # Process: Add Field
        arcpy.AddField_management(ODmatrix_Car__18_, "linearW", "DOUBLE", "",
                                  "", "", "", "NULLABLE", "NON_REQUIRED", "")

        # Process: Calculate Field
        ##230
        arcpy.CalculateField_management(
            ODmatrix_Car__22_, "linearW", "val", "VB",
            "Dim val\\n\\nIf [Total_Trav] < 10  Then\\nval = Exp (-5* 5/" +
            str(p) +
            ")\\nelseif [Total_Trav] > 20 Then\\nval = Exp (-25* 25/" +
            str(p) + " )\\nelse\\n     val = Exp (-15* 15/" + str(p) +
            ")\\nend if\\n")
        arcpy.AddField_management(ODmatrix_Car__16_, "PkWk", "DOUBLE", "", "",
def lineDirections(inFds, outFds, outHtml):
    inCaf = os.path.basename(getCaf(inFds))
    nameToken = inCaf.replace('ContactsAndFaults', '')
    inCaf = inFds + '/' + inCaf
    fNodes = outFds + '/errors_' + nameToken + 'FaultDirNodes'
    fNodes2 = fNodes + '2'
    ### NEXT LINE IS A PROBLEM--NEED BETTER WAY TO SELECT FAULTS
    query = """ "TYPE" LIKE '%fault%' """
    testAndDelete('xxxFaults')
    arcpy.MakeFeatureLayer_management(inCaf, 'xxxFaults', query)
    testAndDelete(fNodes)
    addMsgAndPrint('  getting TO and FROM nodes')
    arcpy.FeatureVerticesToPoints_management('xxxFaults', fNodes, 'START')
    arcpy.AddField_management(fNodes, 'NodeType', 'TEXT', '#', '#', 5)
    arcpy.CalculateField_management(fNodes, 'NodeType', "'FROM'", 'PYTHON')
    testAndDelete(fNodes2)
    arcpy.FeatureVerticesToPoints_management('xxxFaults', fNodes2, 'END')
    arcpy.AddField_management(fNodes2, 'NodeType', 'TEXT', '#', '#', 5)
    arcpy.CalculateField_management(fNodes2, 'NodeType', "'TO'", 'PYTHON')
    addMsgAndPrint('  merging TO and FROM node classes')
    arcpy.Append_management(fNodes2, fNodes)
    testAndDelete(fNodes2)
    arcpy.AddXY_management(fNodes)
    arcpy.Sort_management(fNodes, fNodes2,
                          [['POINT_X', 'ASCENDING'], ['POINT_Y', 'ASCENDING']])
    testAndDelete(fNodes)
    fields = ['NodeType', 'POINT_X', 'POINT_Y']
    listOfSharedNodes = []
    with arcpy.da.SearchCursor(fNodes2, fields) as cursor:
        oldxy = None
        NodeTypes = []
        for row in cursor:
            nArcs = 0
            xy = (row[1], row[2])
            if xy == oldxy:
                NodeTypes.append(row[0])
                nArcs = nArcs + 1
            else:  # is new Node
                if len(NodeTypes) > 1:
                    listOfSharedNodes.append([oldxy, NodeTypes, nArcs])
                oldxy = xy
                NodeTypes = [row[0]]
                nArcs = 1
    addMsgAndPrint('  ' + str(len(listOfSharedNodes)) +
                   ' nodes in listOfSharedNodes')

    arcpy.CreateFeatureclass_management(outFds, os.path.basename(fNodes),
                                        'POINT')
    arcpy.AddField_management(fNodes, 'NodeTypes', 'TEXT', '#', '#', 40)
    arcpy.AddField_management(fNodes, 'NArcs', 'SHORT')

    fields = ["SHAPE@XY", "NodeTypes", "NArcs"]
    d = arcpy.da.InsertCursor(fNodes, fields)
    for aRow in listOfSharedNodes:
        if isFlippedNode(aRow[1]):
            nodeList = ''
            for nd in aRow[1]:
                nodeList = nodeList + nd + ','
            d.insertRow([aRow[0], nodeList[:-1], aRow[2]])
    testAndDelete(fNodes2)
    addMsgAndPrint('  ' + str(numberOfRows(fNodes)) +
                   ' nodes with arcs that may need flipping')
    outHtml.write('<h3>End-points of fault arcs that may need flipping</h3>\n')
    outHtml.write('&nbsp;&nbsp; ' + os.path.basename(fNodes) + '<br>\n')
    if numberOfRows(fNodes) > 0:
        outHtml.write('<tt>&nbsp;&nbsp;&nbsp; ' + str(numberOfRows(fNodes)) +
                      ' nodes</tt><br>\n')
    else:
        outHtml.write('<tt>&nbsp;&nbsp;&nbsp; no errors</tt><br>\n')
        testAndDelete(fNodes)
Example #4
0
def main(in_network_fc, in_network_table, outflow_id):
    arcpy.AddMessage("Searching for topology features and issues: ")

    # Get file geodatabase from input stream network feature class
    wspace_path = arcpy.Describe(in_network_fc).path
    wspace_type = arcpy.Describe(wspace_path).dataType

    # Check if input network feature class has required attribute fields
    req_fields = ["IsHeadwatr", "ReachID", "IsBraided"]
    input_fields = []
    field_objects = arcpy.ListFields(in_network_fc)
    for obj in field_objects:
        input_fields.append(obj.name)
    if set(req_fields) < set(input_fields):
        # Create temporary, in_memory version of stream network table
        if arcpy.Exists("in_network_fc"):
            arcpy.Delete_management("in_network_fc")
        if arcpy.Exists("in_network_table"):
            arcpy.Delete_management("in_network_table")
        if arcpy.Exists("tmp_memory_table"):
            arcpy.Delete_management("tmp_memory_table")
        arcpy.MakeTableView_management(in_network_table,
                                       "in_network_table_view")
        arcpy.CopyRows_management("in_network_table_view",
                                  r"in_memory\tmp_network_table")
        arcpy.MakeTableView_management(r"in_memory\tmp_network_table",
                                       "tmp_network_table_view")

        # Add required fields
        code_field = arcpy.ListFields("tmp_network_table_view", "FTR_CODE")
        if len(code_field) != 1:
            arcpy.AddField_management("tmp_network_table_view", "FTR_CODE",
                                      "LONG")
            arcpy.CalculateField_management("tmp_network_table_view",
                                            "FTR_CODE", "0", "PYTHON_9.3")

        # Find network features and issues
        flow_direction("tmp_network_table_view")
        braids(in_network_fc, "tmp_network_table_view")
        duplicates(in_network_fc, "tmp_network_table_view")
        reach_pairs(in_network_fc, "tmp_network_table_view", outflow_id)
        disconnected(in_network_fc, "tmp_network_table_view")
        other_errors("tmp_network_table_view")

        # Clean up and write final output table
        oid_field = arcpy.Describe("tmp_network_table_view").OIDFieldName
        keep_fields = [oid_field, "ReachID", "FTR_CODE"]
        list_obj = arcpy.ListFields("tmp_network_table_view")
        tmp_field_names = [f.name for f in list_obj]
        for field_name in tmp_field_names:
            if field_name not in keep_fields:
                arcpy.DeleteField_management("tmp_network_table_view",
                                             field_name)
        expr = """"{0}" > {1}""".format("FTR_CODE", "0")
        arcpy.SelectLayerByAttribute_management("tmp_network_table_view",
                                                "NEW_SELECTION", expr)
        if wspace_type == "Folder":
            arcpy.CopyRows_management("tmp_network_table_view",
                                      wspace_path + "\NetworkFeatures.dbf")
        elif wspace_type == "Workspace":
            arcpy.CopyRows_management("tmp_network_table_view",
                                      wspace_path + "\NetworkFeatures")
    else:
        arcpy.AddError(in_network_fc + " does not include required attribute fields. Please use the feature class " \
                                     "produced by the Build Network Topology Table tool.")
        sys.exit(0)
## then delete features
if numberOfRows('cP2Layer') > 0:
    arcpy.DeleteFeatures_management('cP2Layer')

#adjust center point fields (delete extra, add any missing. Use NCGMP09_Definition as guide)
## get list of fields in centerPoints2
cp2Fields = fieldNameList(centerPoints2)
## add fields not in MUP as defined in Definitions
fieldDefs = tableDict['MapUnitPolys']
for fDef in fieldDefs:
    if fDef[0] not in cp2Fields:
        addMsgAndPrint('field ' + fd + ' is missing')
        try:
            if fDef[1] == 'String':
                arcpy.AddField_management(thisFC, fDef[0], transDict[fDef[1]],
                                          '#', '#', fDef[3], '#',
                                          transDict[fDef[2]])
            else:
                arcpy.AddField_management(thisFC, fDef[0], transDict[fDef[1]],
                                          '#', '#', '#', '#',
                                          transDict[fDef[2]])
            cp2Fields.append(fDef[0])
        except:
            addMsgAndPrint('Failed to add field ' + fDef[0] +
                           ' to feature class ' + featureClass)
            addMsgAndPrint(arcpy.GetMessages(2))

# if labelPoints specified
## add any missing fields to centerPoints2
if arcpy.Exists(labelPoints):
    lpFields = arcpy.ListFields(labelPoints)
Example #6
0
import arcpy

arcpy.env.workspace = r'D:\GIS 610 _ Assignment 3\Exercise 3.gdb\Exercise 3.gdb\Exercise 3.gdb'
inFeatures = 'CallsforService'
fieldname = 'Crime_Explanation'
field_type = 'text'

arcpy.AddField_management(inFeatures, fieldname, "TEXT")

featureClass = r'D:\GIS 610 _ Assignment 3\Exercise 3.gdb\Exercise 3.gdb\CallsforService'
FieldNames = ['CFSType', 'Crime_Explanation']

with arcpy.da.UpdateCursor(featureClass, FieldNames) as cursor:
    for x in cursor:
        if x[0] == ('Burglary Call'):
            x[1] = 'This is a burglary'
            cursor.updateRow(x)

            print('Finished')
# Luodaan uusi lista validoinnissa mukana oleville pisteille
validoitavat = []
for track in GPS_lista:
    if "vr_" + track in reittilista:
        validoitavat.append(track)

# Lasketaan matka-aika tekemällä summataulu DURATION_s -kentästä
GPS_tbl_fp = os.path.join(ws_dir, "Validointi_GPSajat.gdb")
for points in validoitavat:
    tablename = points
    outtbl = os.path.join(GPS_tbl_fp, tablename)
    statsFields = [["Trackname", "FIRST"], ["CycID", "FIRST"],
                   ["DURATION_s", "SUM"], ["DISTANCE_m", "SUM"],
                   ["SPEED_mps", "MEAN"]]
    arcpy.Statistics_analysis(points, outtbl, statsFields)
    arcpy.AddField_management(outtbl, "Trackname2", "TEXT")
    arcpy.CalculateField_management(outtbl, "Trackname2", "'" + points + "'",
                                    "PYTHON")
    # Ajan summa on sekunteja, lisätään kenttä johon aika lasketaan minuutteina
    arcpy.AddField_management(outtbl, "DURATION_mins", "DOUBLE")
    arcpy.CalculateField_management(outtbl, "DURATION_mins",
                                    '!SUM_DURATION_s!/60', "PYTHON")

# Listataan GPS-pisteiden summataulut
arcpy.env.workspace = GPS_tbl_fp
GPSsumtables = arcpy.ListTables()
# Yhdistetään ne mergellä
GPStable = "Validointi_GPSsummary"
arcpy.Merge_management(GPSsumtables, GPStable)

# Valitaan tieverkosta select by locationilla kuljettu reitti
Example #8
0
#11/20/2014 Trying Refresh Active View
#3/18/2015 Added Try/except to edit and update the acres and it worked!

import arcpy
arcpy.env.overwriteOutput = True

inFC = arcpy.GetParameterAsText(0)
workspace = arcpy.GetParameterAsText(1)

try:

    #Add Field

    arcpy.AddField_management(
        inFC,
        "ACRES",
        "DOUBLE",
    )

    #Calculate Field

    with arcpy.da.Editor(workspace) as edit:
        arcpy.CalculateField_management(
            inFC,
            "ACRES",
            '!shape.area@ACRES!',
            "PYTHON_9.3",
        )

except arcpy.ExecuteError:
    print(arcpy.GetMesssages(2))
Example #9
0
def main(pathf, pathdab, infile, restart, ixstop, fname):
    
    '''
    pathf = "Y:/nilscp/GeologyMoon/FRESH_IMPACT_WILLIAMS_2018/"
    infile = pathf + 'XYdownloadSupplement.shp'
    pathdab = "Y:/nilscp/GeologyMoon/FRESH_IMPACT_WILLIAMS_2018/database.gdb/"
    fname = '0_200'
    '''
    
    bufferField = "BUFFER_TXT"
    sideType = "FULL"
    endType = "ROUND"
    dissolveType = "NONE"

    # path to folder
    os.chdir(pathf)
    
    # Set overwrite option
    arcpy.env.overwriteOutput = True
    
    arcpy.CheckOutExtension("3D")
    arcpy.CheckOutExtension("Spatial")
    
    # define paths and workspace (I need to create the gdb at some points)
    env.workspace = env.scratchWorkspace = pathdab
        
    # extract the centers of craters (OK regardless of the projection)
    arcpy.FeatureToPoint_management(infile, 'CENTER', "CENTROID")
    
    infile2 = pathdab + 'CENTER'								
    								
    # crater name and buffer extent
    fieldname1 = arcpy.ValidateFieldName("CRATER_ID")
    fieldname2 = arcpy.ValidateFieldName("BUFFER_TXT")
    
    # add fields
    arcpy.AddField_management(infile, fieldname1, "TEXT","","",30)
    
    # get the number of rows in infile
    n = int(arcpy.GetCount_management(infile2)[0])
    
    # prepare empty arrays
    diam = np.ones(n)
    crater_id = np.chararray(n, itemsize=30)
    buffer_txt = np.chararray(n, itemsize=30)
    
    ## some constant variables to define
    if restart:
        areac = np.loadtxt(pathf + "numbers" + fname + ".txt", delimiter=";")
        areaa_20_40 = areac[:,0]#np.concatenate((areac[:,0], np.zeros(1781))) # work around (only need to do that once)
        areaa_40_55 = areac[:,1] #np.concatenate((areac[:,1], np.zeros(1781)))
        areaa_55_80 = areac[:,2] #np.concatenate((areac[:,2], np.zeros(1781)))
        
        NAC_selected_tmp_20_40 = np.genfromtxt(pathf + "NAC_i20_40_" + fname + ".txt", dtype='str')
        NAC_selected_tmp_40_55 = np.genfromtxt(pathf + "NAC_i40_55_" + fname + ".txt", dtype='str')
        NAC_selected_tmp_55_80 = np.genfromtxt(pathf + "NAC_i55_80_" + fname + ".txt", dtype='str')
        edr_selected_tmp_20_40 = np.genfromtxt(pathf + "EDR_i20_40_" + fname + ".txt", dtype='str')
        edr_selected_tmp_40_55 = np.genfromtxt(pathf + "EDR_i40_55_" + fname + ".txt", dtype='str')
        edr_selected_tmp_55_80 = np.genfromtxt(pathf + "EDR_i55_80_" + fname + ".txt", dtype='str')
        
        NAC_selected_all_20_40 = []
        edr_selected_all_20_40 = []
        NAC_selected_all_40_55 = []
        edr_selected_all_40_55 = []
        NAC_selected_all_55_80 = []
        edr_selected_all_55_80 = []
        
        # needs to be list
        for ip, var in np.ndenumerate(NAC_selected_tmp_20_40):
            NAC_selected_all_20_40.append(NAC_selected_tmp_20_40[ip])
            edr_selected_all_20_40.append(edr_selected_tmp_20_40[ip])
            
        for ip, var in np.ndenumerate(NAC_selected_tmp_40_55):            
            NAC_selected_all_40_55.append(NAC_selected_tmp_40_55[ip])
            edr_selected_all_40_55.append(edr_selected_tmp_40_55[ip])
            
        for ip, var in np.ndenumerate(NAC_selected_tmp_55_80):            
            NAC_selected_all_55_80.append(NAC_selected_tmp_55_80[ip])
            edr_selected_all_55_80.append(edr_selected_tmp_55_80[ip])
    else:
        NAC_selected_all_20_40 = []
        edr_selected_all_20_40 = []
        NAC_selected_all_40_55 = []
        edr_selected_all_40_55 = []
        NAC_selected_all_55_80 = []
        edr_selected_all_55_80 = []
        areaa_20_40 = np.zeros(n)
        areaa_40_55 = np.zeros(n)
        areaa_55_80 = np.zeros(n)
    
    
    # we add info about the name of the craters here
    
    with arcpy.da.UpdateCursor(infile, ["Diameter", "CRATER_ID"]) as cursor:    	
        ix = 0
        for row in cursor:
            a = 'crater' + str(int(ix)).zfill(4)
            buffer_value = np.round((row[0]) * 10.0, decimals=4)
            b = str(buffer_value) + ' Meters'	
            row[1] = a
            cursor.updateRow(row)
            diam[ix] = row[0]
            crater_id[ix] = a
            buffer_txt[ix] = b
            ix = ix + 1
    
    # add two fields
    arcpy.AddField_management(infile2, fieldname1, "TEXT","","",30)
    arcpy.AddField_management(infile2, fieldname2, "TEXT","","",30)
    
    with arcpy.da.UpdateCursor(infile2, ["CRATER_ID", "BUFFER_TXT"]) as cursor:
    	ix = 0
    	for row in cursor:
    		row[0] = crater_id[ix]
    		row[1] = buffer_txt[ix]
    		cursor.updateRow(row)
    		ix = ix + 1
            
    #arcpy.AddField_management(infile2, fieldname3, "DOUBLE")
    #arcpy.AddField_management(infile2, fieldname4, "DOUBLE")
    #arcpy.AddField_management(infile2, fieldname5, "DOUBLE")
    
    # Make a layer from the feature class
    arcpy.MakeFeatureLayer_management("CENTER", "CENTER_lyr")
           
    with arcpy.da.UpdateCursor("CENTER_lyr", ["Shape@", "Lon", "Lat"]) as cursor:
        ix = 0
        
        for row in cursor:
            
            #print index
            #print (ix)
            
            # could reset here too (just to be sure)
            if ix > 0:
                arcpy.SelectLayerByAttribute_management("CENTER_lyr", "CLEAR_SELECTION")
            
            else:
                None
            
            # if first time or timesteps over last restarted 
            if ((restart & (ix > ixstop)) | (restart == False)):
                #query selection CENTER         
                query = "CRATER_ID = '" + crater_id[ix] + "'"
                print (query)
                arcpy.SelectLayerByAttribute_management("CENTER_lyr", "NEW_SELECTION", query)
                #print ("YESx2")
                
                # make a layer of the selection
                arcpy.CopyFeatures_management("CENTER_lyr", "CENTER_TMP")
                
                # old coordinate systems
                desc = arcpy.Describe("CENTER_TMP")
                spatialReference = desc.spatialReference
                
                # project to the right coordinate systems
                # central meridian should be replaced by the longitude
                # standard parallel_1 by the latitude
                cent_med = np.round(row[1],decimals=0)
                std_parall = np.round(row[2],decimals=0)
                
                str_bef = "PROJCS['Equirectangular_Moon',GEOGCS['GCS_Moon',DATUM['D_Moon',SPHEROID['Moon_localRadius',1737400.0,0.0]],PRIMEM['Reference_Meridian',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Equidistant_Cylindrical'],PARAMETER['false_easting',0.0],PARAMETER['false_northing',0.0],"
                str_cent_med = "PARAMETER['central_meridian'," + str(cent_med) + "],"
                str_parall = "PARAMETER['standard_parallel_1'," + str(std_parall) + "],"
                str_after = "UNIT['Meter',1.0]]"
                
                # the whole string is
                spatialReference_new = str_bef + str_cent_med + str_parall + str_after
                
                # projection
                arcpy.Project_management(in_dataset="CENTER_TMP", out_dataset="CENTER_PROJ", out_coor_system= spatialReference_new, transform_method="", in_coor_system=spatialReference, preserve_shape="NO_PRESERVE_SHAPE", max_deviation="", vertical="NO_VERTICAL")	
                
                # buffer creation
                arcpy.Buffer_analysis("CENTER_PROJ", "miniarea_TMP", bufferField, sideType, endType, dissolveType)
                
                # run feature to envelope tool
                arcpy.FeatureEnvelopeToPolygon_management(pathdab + "miniarea_TMP",
        												  pathdab + "miniarea_square",
        												  "SINGLEPART")
                
                
                # get area of the 10 diameters squared polygon
                area_bnd = "miniarea_square"
                with arcpy.da.SearchCursor(area_bnd, ["SHAPE@"]) as rows:
                
                    # get the area of the 10 diameters squared polygon
                    for row in rows:
                        feat = row[0]
                        fresh_crater_area = feat.area
                    
                # select nac images (left and right satellite images) between x and y degrees of incidence
                select_NAC("nac_all", area_bnd, "nac_selection_20_40", 20., 40.)
                select_NAC("nac_all", area_bnd, "nac_selection_40_55", 40., 55.)
                select_NAC("nac_all", area_bnd, "nac_selection_55_80", 55., 80.)
                
                # define covered area here!
                fieldname_area = arcpy.ValidateFieldName("COVERED_AREA")
                
                for shpfile in ["nac_selection_20_40", "nac_selection_40_55", "nac_selection_55_80"]:
                    if len(arcpy.ListFields(shpfile,"COVERED_AREA"))>0:
                        None
                    else:
                        arcpy.AddField_management(shpfile, fieldname_area, "DOUBLE") 
                
                #I could project the nac_selection here
                arcpy.Project_management(in_dataset="nac_selection_20_40", out_dataset="nac_selection_20_40_proj", out_coor_system= spatialReference_new, transform_method="", in_coor_system=spatialReference, preserve_shape="NO_PRESERVE_SHAPE", max_deviation="", vertical="NO_VERTICAL")	
                arcpy.Project_management(in_dataset="nac_selection_40_55", out_dataset="nac_selection_40_55_proj", out_coor_system= spatialReference_new, transform_method="", in_coor_system=spatialReference, preserve_shape="NO_PRESERVE_SHAPE", max_deviation="", vertical="NO_VERTICAL")	
                arcpy.Project_management(in_dataset="nac_selection_55_80", out_dataset="nac_selection_55_80_proj", out_coor_system= spatialReference_new, transform_method="", in_coor_system=spatialReference, preserve_shape="NO_PRESERVE_SHAPE", max_deviation="", vertical="NO_VERTICAL")	
                
                
                # selected NAC + EDR + Area covered
                #print ("last function")
                (NAC_selected_20_40, edr_source_selected_20_40, areac_20_40) = calculate_newly_added_area(pathdab, "nac_selection_20_40_proj", area_bnd, fresh_crater_area, spatialReference, spatialReference_new)
                #print ("2")
                (NAC_selected_40_55, edr_source_selected_40_55, areac_40_55) = calculate_newly_added_area(pathdab, "nac_selection_40_55_proj", area_bnd, fresh_crater_area, spatialReference, spatialReference_new)
                #print ("3")
                (NAC_selected_55_80, edr_source_selected_55_80, areac_55_80) = calculate_newly_added_area(pathdab, "nac_selection_55_80_proj", area_bnd, fresh_crater_area, spatialReference, spatialReference_new)
                
                
                #print (ix)
                arcpy.Delete_management("miniarea_square")
                arcpy.Delete_management("miniarea_TMP")
                arcpy.Delete_management("CENTER_PROJ")
                arcpy.Delete_management("CENTER_TMP")
                arcpy.Delete_management("nac_selection_20_40")
                arcpy.Delete_management("nac_selection_20_40_proj")
                arcpy.Delete_management("nac_selection_40_55")
                arcpy.Delete_management("nac_selection_40_55_proj")
                arcpy.Delete_management("nac_selection_55_80")
                arcpy.Delete_management("nac_selection_55_80_proj")            
                
                #NAC_selected_tmp = NAC_selected_20_40 + NAC_selected_40_55 + NAC_selected_55_80
                #edr_selected_tmp = edr_source_selected_20_40 + edr_source_selected_40_55 + edr_source_selected_55_80
                
                NAC_selected_all_20_40 = NAC_selected_all_20_40 + NAC_selected_20_40
                edr_selected_all_20_40 = edr_selected_all_20_40 + edr_source_selected_20_40
                areaa_20_40[ix] =  areac_20_40
                
                NAC_selected_all_40_55 = NAC_selected_all_40_55 + NAC_selected_40_55
                edr_selected_all_40_55 = edr_selected_all_40_55 + edr_source_selected_40_55
                areaa_40_55[ix] =  areac_40_55
                
                NAC_selected_all_55_80 = NAC_selected_all_55_80 + NAC_selected_55_80
                edr_selected_all_55_80 = edr_selected_all_55_80 + edr_source_selected_55_80
                areaa_55_80[ix] =  areac_55_80
                
                # save every 5 timesteps
                if (ix % 25 == 0):
                    
                    
                    output_nbr = np.column_stack((np.array(areaa_20_40), np.array(areaa_40_55), np.array(areaa_55_80)))
    
                    np.savetxt("numbers0_" + str(int(ix)) + ".txt", output_nbr, delimiter=";")
                    np.savetxt("NAC_i20_40_0_" + str(int(ix)) + ".txt", np.array((NAC_selected_all_20_40)), delimiter=";",fmt="%s")
                    np.savetxt("NAC_i40_55_0_" + str(int(ix)) + ".txt", np.array((NAC_selected_all_40_55)), delimiter=";",fmt="%s")
                    np.savetxt("NAC_i55_80_0_" + str(int(ix)) + ".txt", np.array((NAC_selected_all_55_80)), delimiter=";",fmt="%s")
                    np.savetxt("EDR_i20_40_0_" + str(int(ix)) + ".txt", np.array((edr_selected_all_20_40)), delimiter=";",fmt="%s")
                    np.savetxt("EDR_i40_55_0_" + str(int(ix)) + ".txt", np.array((edr_selected_all_40_55)), delimiter=";",fmt="%s")
                    np.savetxt("EDR_i55_80_0_" + str(int(ix)) + ".txt", np.array((edr_selected_all_55_80)), delimiter=";",fmt="%s")
                    
                elif (ix == 2281):
                    
                    output_nbr = np.column_stack((np.array(areaa_20_40), np.array(areaa_40_55), np.array(areaa_55_80)))
    
                    np.savetxt("numbers0_" + str(int(ix)) + ".txt", output_nbr, delimiter=";")
                    np.savetxt("NAC_i20_40_0_" + str(int(ix)) + ".txt", np.array((NAC_selected_all_20_40)), delimiter=";",fmt="%s")
                    np.savetxt("NAC_i40_55_0_" + str(int(ix)) + ".txt", np.array((NAC_selected_all_40_55)), delimiter=";",fmt="%s")
                    np.savetxt("NAC_i55_80_0_" + str(int(ix)) + ".txt", np.array((NAC_selected_all_55_80)), delimiter=";",fmt="%s")
                    np.savetxt("EDR_i20_40_0_" + str(int(ix)) + ".txt", np.array((edr_selected_all_20_40)), delimiter=";",fmt="%s")
                    np.savetxt("EDR_i40_55_0_" + str(int(ix)) + ".txt", np.array((edr_selected_all_40_55)), delimiter=";",fmt="%s")
                    np.savetxt("EDR_i55_80_0_" + str(int(ix)) + ".txt", np.array((edr_selected_all_55_80)), delimiter=";",fmt="%s")
                    
                    
            else:
                None
                
            ix = ix + 1
            
    	
    return (NAC_selected_all_20_40, edr_selected_all_20_40, areaa_20_40, 
            NAC_selected_all_40_55, edr_selected_all_40_55, areaa_40_55, 
            NAC_selected_all_55_80, edr_selected_all_55_80, areaa_55_80) 
Example #10
0
# import feature Datasets path
env.workspace = arcpy.GetParameterAsText(0)
featureLists = arcpy.ListFeatureClasses()
print(featureLists)

# Add status field and fill Value
typeLists = ['pysys', 'sfsys', 'handalarmsys']
for type in typeLists:
    for feature in featureLists:
        # typeLenth = len(type)
        # if feature[0:typeLenth] == type:
        if type in feature:
            arcpy.AddField_management(feature,
                                      "status",
                                      "TEXT",
                                      field_length="50",
                                      field_alias="状态")
            arcpy.CalculateField_management(feature, "status", "'运行'",
                                            "PYTHON_9.3")
            # print(feature + "Finished")
            arcpy.AddMessage(feature + "_Finished")

typeLists = [
    'inhydrants', 'autoalarmsys', 'broadcastsys', 'spraysys', 'fire_curtains'
]
for type in typeLists:
    for feature in featureLists:
        typeLenth = len(type)
        if type in feature:
            arcpy.AddField_management(feature,
    try:
        deletefield(fc, DissolveField)
        for field in fccol:
            deletefield(fc, field)
    except:
        addnmList = str(fc)
        FailedFileNm.append(addnmList)

for fc in fcs_in_workspace(InGDB):
    print "Start: " + str(fc)
    # add col based on import
    for field in fccol:
        name = str(field)
        print name
        if name is fccol_a:
            arcpy.AddField_management(fc, name, "TEXT", "", "", "10", "",
                                      "NULLABLE", "NON_REQUIRED", "")
        elif name is fccol_b:
            arcpy.AddField_management(fc, name, "TEXT", "", "", "5", "",
                                      "NULLABLE", "NON_REQUIRED", "")
        elif name is fccol_c:
            arcpy.AddField_management(fc, name, "TEXT", "", "", "5", "",
                                      "NULLABLE", "NON_REQUIRED", "")
        else:
            arcpy.AddField_management(fc, name, "TEXT", "", "", "75", "",
                                      "NULLABLE", "NON_REQUIRED", "")
    try:
        arcpy.Delete_management("fc_lyr")
        arcpy.MakeFeatureLayer_management(fc, "fc_lyr")
        # joins the fc to the JoinTable based on file name
        arcpy.AddJoin_management("fc_lyr", JoinFieldFC, JoinTable,
                                 JoinFieldTable, "KEEP_ALL")
Example #12
0
if not os.path.exists(os.path.join(outfolder, "scratch")):
    os.mkdir(os.path.join(outfolder, "scratch"))

scratch = os.path.join(outfolder, "scratch")
arcpy.env.workspace = scratch

# Watershed raster to polygons
arcpy.RasterToPolygon_conversion(wsraster, "wspoly1.shp", '', "Value")
wspoly1 = os.path.join(scratch, "wspoly1.shp")

# Clip watershed polygons to subregion polys.
arcpy.Clip_analysis(wspoly1, subregion, os.path.join(scratch, "wsclip1.shp"))
wsclip1 = os.path.join(scratch, "wsclip1.shp")

# Calculate hectares
arcpy.AddField_management(wsclip1, "HA", "DOUBLE")
arcpy.CalculateField_management(wsclip1, "HA", '''!shape.area@hectares!''', "PYTHON")

# Create fc of watershed polygons >= 1 ha that coincide with seed lines and polys.
arcpy.MakeFeatureLayer_management(wsclip1, os.path.join(scratch, "wsclip1.lyr"))
wsclip1_lyr = os.path.join(scratch, "wsclip1.lyr")

arcpy.SelectLayerByLocation_management(wsclip1_lyr, "INTERSECT", seedline, '', "NEW_SELECTION")
arcpy.SelectLayerByLocation_management(wsclip1_lyr, "INTERSECT", seedpoly, '', "ADD_TO_SELECTION")
arcpy.SelectLayerByAttribute_management(wsclip1_lyr, "SUBSET_SELECTION",'''"HA" >= 1''')


arcpy.CopyFeatures_management(wsclip1_lyr, os.path.join(scratch, "wslegit.shp"))
wslegit = os.path.join(scratch, "wslegit.shp")

# Polygon back to raster
            fld_name = "Cumu_dist"
    args = [fld_name, fld_expr, code]

fld_name, fld_expr, code = args

arcpy.MakeTableView_management(in_table=in_tbl,
                               out_view="tbl_view",
                               workspace=wrkspace)

if in_fld in (None, "", " "):
    fld_name = fld_name
else:
    fld_name = in_fld
fld_name = arcpy.ValidateFieldName(fld_name)
arcpy.AddField_management("tbl_view",
                          field_name=fld_name,
                          field_type="DOUBLE",
                          field_is_nullable="NULLABLE")

arcpy.CalculateField_management(in_table="tbl_view",
                                field=fld_name,
                                expression=fld_expr,
                                code_block=code)

del in_fld, in_tbl, arcpy
# ----------------------------------------------------------------------
# __main__ .... code section

if __name__ == "__main__":
    """Optionally...
    : - print the script source name.
    : - run the _demo
#this file adds a field to the epoi shapefile that gives a desctiption of the type of building that it happens to be

import arcpy
import NAICS_DICT
#Set Workspace

#Set input shapefile
fc =  arcpy.GetParameterAsText(0)


#Create fields in Attribute tab
arcpy.AddField_management(fc,"DESC_1","TEXT")
arcpy.AddField_management(fc,"DESC_2","TEXT")
arcpy.AddField_management(fc,"DESC_3","TEXT")
arcpy.AddField_management(fc,"DESC_4","TEXT")
arcpy.AddField_management(fc,"DESC_5","TEXT")

fieldarray = [["NAICS_1","DESC_1"], ["NAICS_2","DESC_2"], ["NAICS_3","DESC_3"], ["NAICS_4","DESC_4"], ["NAICS_5","DESC_5"]]

#Update Each Table
index = 0
for index in range(0,5):
        cursor = arcpy.da.UpdateCursor(fc, fieldarray[index])
        for row in cursor:
                if NAICS_DICT.naics_1.get(row[0]):
                        row[1] =  NAICS_DICT.naics_1[row[0]]                      
                else:
                        row[1] = "ERROR - INCORRECT NAICS"
                cursor.updateRow(row)
        del cursor, row
        index = index + 1
    ncurrentstep+=1 
    arcpy.AddMessage("Extracting Valley Bottom - Step " + str(ncurrentstep) + "/" + str(nstep))
    Expression = "\"Value\" <= " + str(ThresholdMAX) + " and \"Value\" >= " + str(ThresholdMIN)

    arcpy.AddMessage("All values contains in : " + Expression + " are selected")
    RasterVB = arcpy.gp.ExtractByAttributes_sa(RelativeDEM, Expression, "%scratchWorkspace%\\RasterVB")


    #/conversion into a polygon layer
    VBminusVB = arcpy.gp.Minus_sa(RasterVB, RasterVB, "%scratchWorkspace%\\VBminusVB")

    ncurrentstep+=1
    arcpy.AddMessage("Converting Raster Valley Bottom into a polygon feature - Step " + str(ncurrentstep) + "/" + str(nstep))
    RasterVBToPolygon = arcpy.RasterToPolygon_conversion(VBminusVB, "%scratchWorkspace%\\RasterVBToPolygon", "SIMPLIFY", "VALUE")
    arcpy.AddField_management(RasterVBToPolygon, "TEMP", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")

    ncurrentstep+=1
    arcpy.AddMessage("Dissolving the Polygon Valley Bottom - Step " + str(ncurrentstep) + "/" + str(nstep))
    UncleanedPolygonVB = arcpy.Dissolve_management(RasterVBToPolygon, "%scratchWorkspace%\\UncleanedPolygonVB", "TEMP", "", "MULTI_PART", "DISSOLVE_LINES")
    ncurrentstep+=1

  
  
    ################################
    #### Valley bottom cleaning ####
    ################################
    ## The box "Only Execute the CleanStep of the Polygonal Valley Bottom" is checked

else :
    # Number of steps
Example #16
0
def main(ini_path, zone_type='huc8', area_threshold=10,
         dairy_cuttings=5, beef_cuttings=4, crop_str='',
         remove_empty_flag=True, overwrite_flag=False):
    """Build a feature class for each crop and set default crop parameters

    Apply the values in the CropParams.txt as defaults to every cell

    Args:
        ini_path (str): file path of the project INI file
        zone_type (str): Zone type (huc8, huc10, county, gridmet)
        area_threshold (float): CDL area threshold [acres]
        dairy_cuttings (int): Initial number of dairy hay cuttings
        beef_cuttings (int): Initial number of beef hay cuttings
        crop_str (str): comma separated list or range of crops to compare
        overwrite_flag (bool): If True, overwrite existing output rasters

    Returns:
        None

    """
    logging.info('\nCalculating ET-Demands Spatial Crop Parameters')

    remove_empty_flag = True

    # Input paths
    # DEADBEEF - For now, get cropET folder from INI file
    # This function may eventually be moved into the main cropET code
    crop_et_sec = 'CROP_ET'
    config = util.read_ini(ini_path, section=crop_et_sec)

    try:
        project_ws = config.get(crop_et_sec, 'project_folder')
    except:
        logging.error('project_folder parameter must be set in the INI file, '
                      'exiting')
        return False
    try:
        gis_ws = config.get(crop_et_sec, 'gis_folder')
    except:
        logging.error('gis_folder parameter must be set in the INI file, '
                      'exiting')
        return False
    try:
        cells_path = config.get(crop_et_sec, 'cells_path')
    except:
        # cells_path = os.path.join(gis_ws, 'ETCells.shp')
        logging.error('et_cells_path parameter must be set in the INI file, '
                      'exiting')
        return False
    try:
        stations_path = config.get(crop_et_sec, 'stations_path')
    except:
        logging.error('stations_path parameter must be set in the INI file, '
                      'exiting')
        return False

    crop_et_ws = config.get(crop_et_sec, 'crop_et_folder')
    bin_ws = os.path.join(crop_et_ws, 'bin')

    try:
        calibration_ws = config.get(crop_et_sec, 'spatial_cal_folder')
    except:
        calibration_ws = os.path.join(project_ws, 'calibration')

    # Sub folder names
    static_ws = os.path.join(project_ws, 'static')
    # pmdata_ws = os.path.join(project_ws, 'pmdata')
    crop_params_path = os.path.join(static_ws, 'CropParams.txt')

    # ET cells field names
    cell_id_field = 'CELL_ID'
    cell_name_field = 'CELL_NAME'
    crop_acres_field = 'CROP_ACRES'

    # Only keep the following ET Cell fields
    keep_field_list = [cell_id_field, cell_name_field, 'AG_ACRES']
    # keep_field_list = ['CELL_ID', 'STATION_ID', 'HUC8', 'HUC10', 'GRIDMET_ID,
    #                    'COUNTYNAME', 'AG_ACRES']
    # keep_field_list = ['FIPS', 'COUNTYNAME']

    # Check input folders
    if not os.path.isdir(crop_et_ws):
        logging.error('\nERROR: The INI cropET folder does not exist'
                      '\n  {}'.format(crop_et_ws))
        sys.exit()
    elif not os.path.isdir(bin_ws):
        logging.error('\nERROR: The bin workspace does not exist'
                      '\n  {}'.format(bin_ws))
        sys.exit()
    elif not os.path.isdir(project_ws):
        logging.error('\nERROR: The project folder does not exist'
                      '\n  {}'.format(project_ws))
        sys.exit()
    elif not os.path.isdir(gis_ws):
        logging.error('\nERROR: The GIS folder does not exist'
                      '\n  {}'.format(gis_ws))
        sys.exit()
    if '.gdb' not in calibration_ws and not os.path.isdir(calibration_ws):
        os.makedirs(calibration_ws)
    logging.info('\nGIS Workspace:      {}'.format(gis_ws))
    logging.info('Project Workspace:  {}'.format(project_ws))
    logging.info('CropET Workspace:   {}'.format(crop_et_ws))
    logging.info('Bin Workspace:      {}'.format(bin_ws))
    logging.info('Calib. Workspace:   {}'.format(calibration_ws))

    # Check input files
    if not os.path.isfile(crop_params_path):
        logging.error('\nERROR: The crop parameters file does not exist'
                      '\n  {}'.format(crop_params_path))
        sys.exit()
    elif not os.path.isfile(cells_path):
        logging.error('\nERROR: The ET Cell shapefile does not exist'
                      '\n  {}'.format(cells_path))
        sys.exit()
    elif not os.path.isfile(stations_path) or not arcpy.Exists(stations_path):
        logging.error('\nERROR: The weather station shapefile does not exist'
                      '\n  {}'.format(stations_path))
        sys.exit()
    logging.debug('Crop Params Path:   {}'.format(crop_params_path))
    logging.debug('ET Cells Path:      {}'.format(cells_path))
    logging.debug('Stations Path:      {}'.format(stations_path))

    # For now, only allow calibration parameters in separate shapefiles
    ext = '.shp'
    # # Build output geodatabase if necessary
    # if calibration_ws.endswith('.gdb'):
    #     logging.debug('GDB Path:           {}'.format(calibration_ws))
    #     ext = ''
    #     if arcpy.Exists(calibration_ws) and overwrite_flag:
    #         try: arcpy.Delete_management(calibration_ws)
    #         except: pass
    #     if calibration_ws is not None and not arcpy.Exists(calibration_ws):
    #         arcpy.CreateFileGDB_management(
    #             os.path.dirname(calibration_ws),
    #             os.path.basename(calibration_ws))
    # else:
    #     ext = '.shp'

    # Field Name, Property, Field Type
    # Property is the string of the CropParameter class property value
    # It will be used to access the property using getattr
    dairy_cutting_field = 'Dairy_Cut'
    beef_cutting_field = 'Beef_Cut'
    param_list = [
        # ['Name', 'name', 'STRING'],
        # ['ClassNum', 'class_number', 'LONG'],
        # ['IsAnnual', 'is_annual', 'SHORT'],
        # ['IrrigFlag', 'irrigation_flag', 'SHORT'],
        # ['IrrigDays', 'days_after_planting_irrigation', 'LONG'],
        # ['Crop_FW', 'crop_fw', 'LONG'],
        # ['WinterCov', 'winter_surface_cover_class', 'SHORT'],
        # ['CropKcMax', 'kc_max', 'FLOAT'],
        ['MAD_Init', 'mad_initial', 'LONG'],
        ['MAD_Mid', 'mad_midseason', 'LONG'],
        # ['RootDepIni', 'rooting_depth_initial', 'FLOAT'],
        # ['RootDepMax', 'rooting_depth_max', 'FLOAT'],
        # ['EndRootGrw', 'end_of_root_growth_fraction_time', 'FLOAT'],
        # ['HeightInit', 'height_initial', 'FLOAT'],
        # ['HeightMax', 'height_max', 'FLOAT'],
        # ['CurveNum', 'curve_number', 'LONG'],
        # ['CurveName', 'curve_name', 'STRING'],
        # ['CurveType', 'curve_type', 'SHORT'],
        # ['PL_GU_Flag', 'flag_for_means_to_estimate_pl_or_gu', 'SHORT'],
        ['T30_CGDD', 't30_for_pl_or_gu_or_cgdd', 'FLOAT'],
        ['PL_GU_Date', 'date_of_pl_or_gu', 'FLOAT'],
        ['CGDD_Tbase', 'tbase', 'FLOAT'],
        ['CGDD_EFC', 'cgdd_for_efc', 'LONG'],
        ['CGDD_Term', 'cgdd_for_termination', 'LONG'],
        ['Time_EFC', 'time_for_efc', 'LONG'],
        ['Time_Harv', 'time_for_harvest', 'LONG'],
        ['KillFrostC', 'killing_frost_temperature', 'FLOAT'],
        # ['InvokeStrs', 'invoke_stress', 'SHORT'],
        # ['CN_Coarse', 'cn_coarse_soil', 'LONG'],
        # ['CN_Medium', 'cn_medium_soil', 'LONG'],
        # ['CN_Fine', 'cn_fine_soil', 'LONG']
    ]
    # if calibration_ws.endswith('.gdb'):
    #     dairy_cutting_field = 'Dairy_Cuttings'
    #     beef_cutting_field = 'Beef_Cuttings'
    #     param_list  = [
    #        # ['Name', 'name', 'STRING'],
    #        # ['Class_Number', 'class_number', 'LONG'],
    #        # ['Is_Annual', 'is_annual', 'SHORT'],
    #        # ['Irrigation_Flag', 'irrigation_flag', 'SHORT'],
    #        # ['Irrigation_Days', 'days_after_planting_irrigation', 'LONG'],
    #        # ['Crop_FW', 'crop_fw', 'LONG'],
    #        # ['Winter_Cover_Class', 'winter_surface_cover_class', 'SHORT'],
    #        # ['Crop_Kc_Max', 'kc_max', 'FLOAT'],
    #        # ['MAD_Initial', 'mad_initial', 'LONG'],
    #        # ['MAD_Midseason', 'mad_midseason', 'LONG'],
    #        # ['Root_Depth_Ini', 'rooting_depth_initial', 'FLOAT'],
    #        # ['Root_Depth_Max', 'rooting_depth_max', 'FLOAT'],
    #        # ['End_Root_Growth', 'end_of_root_growth_fraction_time', 'FLOAT'],
    #        # ['Height_Initial', 'height_initial', 'FLOAT'],
    #        # ['Height_Maximum', 'height_max', 'FLOAT'],
    #        # ['Curve_Number', 'curve_number', 'LONG'],
    #        # ['Curve_Name', 'curve_name', 'STRING'],
    #        # ['Curve_Type', 'curve_type', 'SHORT'],
    #        # ['PL_GU_Flag', 'flag_for_means_to_estimate_pl_or_gu', 'SHORT'],
    #        ['T30_CGDD', 't30_for_pl_or_gu_or_cgdd', 'FLOAT'],
    #        ['PL_GU_Date', 'date_of_pl_or_gu', 'FLOAT'],
    #        ['CGDD_Tbase', 'tbase', 'FLOAT'],
    #        ['CGDD_EFC', 'cgdd_for_efc', 'LONG'],
    #        ['CGDD_Termination', 'cgdd_for_termination', 'LONG'],
    #        ['Time_EFC', 'time_for_efc', 'LONG'],
    #        ['Time_Harvest', 'time_for_harvest', 'LONG'],
    #        ['Killing_Crost_C', 'killing_frost_temperature', 'FLOAT'],
    #        # ['Invoke_Stress', 'invoke_stress', 'SHORT'],
    #        # ['CN_Coarse_Soil', 'cn_coarse_soil', 'LONG'],
    #        # ['CN_Medium_Soil', 'cn_medium_soil', 'LONG'],
    #        # ['CN_Fine_Soil', 'cn_fine_soil', 'LONG']
    #    ]

    crop_add_list = []
    if crop_str:
        try:
            crop_add_list = sorted(list(util.parse_int_set(crop_str)))
        # try:
        #     crop_test_list = sorted(list(set(
        #         crop_test_list + list(util.parse_int_set(crop_str)))
        except:
            pass
    # Don't build crop parameter files for non-crops
    crop_skip_list = sorted(list(set([44, 45, 46, 55, 56, 57])))

    # crop_test_list = sorted(list(set(crop_test_list + [46])))
    logging.info('\ncrop_add_list = {}'.format(crop_add_list))

    # Read crop parameters using ET Demands functions/methods
    logging.info('\nReading default crop parameters')
    sys.path.append(bin_ws)
    import crop_parameters
    crop_param_dict = crop_parameters.read_crop_parameters(crop_params_path)

    # arcpy.CheckOutExtension('Spatial')
    # arcpy.env.pyramid = 'NONE 0'
    arcpy.env.overwriteOutput = overwrite_flag
    arcpy.env.parallelProcessingFactor = 8


    # Get list of crops specified in ET cells
    # Currently this may only be crops with CDL acreage
    crop_field_list = [
        field.name for field in arcpy.ListFields(cells_path)
        if re.match('CROP_\d{2}', field.name)]
    logging.debug('Cell crop fields: {}'.format(', '.join(crop_field_list)))
    crop_number_list = [
        int(f_name.split('_')[-1]) for f_name in crop_field_list]

    crop_number_list = [
        crop_num for crop_num in crop_number_list
        if not (crop_skip_list and crop_num in crop_skip_list)]
    logging.info('Cell crop numbers: {}'.format(
        ', '.join(list(util.ranges(crop_number_list)))))

    # Get crop acreages for each cell
    crop_acreage_dict = defaultdict(dict)

    field_list = [cell_id_field] + crop_field_list
    with arcpy.da.SearchCursor(cells_path, field_list) as cursor:
        for row in cursor:
            for i, crop_num in enumerate(crop_number_list):
                # logging.info('{} {}'.format(crop_num, i))
                if crop_num in crop_add_list:
                    crop_acreage_dict[crop_num][row[0]] = 0
                else:
                    crop_acreage_dict[crop_num][row[0]] = row[i + 1]

    crop_number_list = sorted(list(set(crop_number_list) | set(crop_add_list)))

    # Make an empty template crop feature class
    logging.info('')
    crop_template_path = os.path.join(
        calibration_ws, 'crop_00_template' + ext)
    if overwrite_flag and arcpy.Exists(crop_template_path):
        logging.debug('Overwriting template crop feature class')
        arcpy.Delete_management(crop_template_path)
    if arcpy.Exists(crop_template_path):
        logging.info('Template crop feature class already exists, skipping')
    else:
        logging.info('Building template crop feature class')
        arcpy.CopyFeatures_management(cells_path, crop_template_path)

        # Remove unneeded et cell fields
        for field in arcpy.ListFields(crop_template_path):
            if (field.name not in keep_field_list and
                field.editable and not field.required):
                logging.debug('  Delete field: {}'.format(field.name))
                arcpy.DeleteField_management(crop_template_path, field.name)
        field_list = [f.name for f in arcpy.ListFields(crop_template_path)]

        # Add crop acreage field
        if crop_acres_field not in field_list:
            logging.debug('  Add field: {}'.format(crop_acres_field))
            arcpy.AddField_management(
                crop_template_path, crop_acres_field, 'Float')
            arcpy.CalculateField_management(
                crop_template_path, crop_acres_field, '0', 'PYTHON_9.3')

        # Add crop parameter fields if necessary
        for param_field, param_method, param_type in param_list:
            logging.debug('  Add field: {}'.format(param_field))
            if param_field not in field_list:
                arcpy.AddField_management(
                    crop_template_path, param_field, param_type)
        # if dairy_cutting_field not in field_list:
        #     logging.debug('  Add field: {}'.format(dairy_cutting_field))
        #     arcpy.AddField_management(crop_template_path, dairy_cutting_field, 'Short')
        #     arcpy.CalculateField_management(
        #          crop_template_path, dairy_cutting_field, dairy_cuttings, 'PYTHON')
        # if beef_cutting_field not in field_list:
        #     logging.debug('  Add field: {}'.format(beef_cutting_field))
        #     arcpy.AddField_management(crop_template_path, beef_cutting_field, 'Short')
        #     arcpy.CalculateField_management(
        #        crop_template_path, beef_cutting_field, beef_cuttings, 'PYTHON')

    # Add an empty/zero crop field for the field mappings below
    # if len(arcpy.ListFields(cells_path, 'CROP_EMPTY')) == 0:
    #     arcpy.AddField_management(cells_path, 'CROP_EMPTY', 'Float')
    #     arcpy.CalculateField_management(
    #        cells_path, 'CROP_EMPTY', '0', 'PYTHON_9.3')


    # Process each crop
    logging.info('\nBuilding crop feature classes')
    for crop_num in crop_number_list:
        try:
            crop_param = crop_param_dict[crop_num]
        except:
            continue
        logging.info('{:>2d} {}'.format(crop_num, crop_param.name))
        logging.debug('{}'.format(crop_param))
        # Replace other characters with spaces, then remove multiple spaces
        crop_name = re.sub('[-"().,/~]', ' ', str(crop_param.name).lower())
        crop_name = ' '.join(crop_name.strip().split()).replace(' ', '_')
        crop_path = os.path.join(calibration_ws, 'crop_{0:02d}_{1}{2}'.format(
            crop_num, crop_name, ext))
        # crop_field = 'CROP_{:02d}'.format(crop_num)

        # Don't check crops in add list
        if crop_num in crop_add_list:
            pass
        # Skip if all zone crop areas are below threshold
        elif all([v < area_threshold for v in
                  crop_acreage_dict[crop_num].values()]):
            logging.info('  All crop acreaeges below threshold, skipping crop')
            continue

        # Remove existing shapefiles if necessary
        if overwrite_flag and arcpy.Exists(crop_path):
            logging.debug('  Overwriting: {}'.format(
                os.path.basename(crop_path)))
            arcpy.Delete_management(crop_path)

        # Don't check skip list until after existing files are removed
        # if ((crop_test_list and crop_num not in crop_test_list) or
        #     _skip_list and crop_num in crop_skip_list)):
        #     .debug('  Skipping')

        # Copy ET cells for each crop if needed
        if arcpy.Exists(crop_path):
            logging.debug('  Shapefile already exists, skipping')
            continue
        else:
            # logging.debug('    {}'.format(crop_path))
            arcpy.Copy_management(crop_template_path, crop_path)
            # Remove extra fields
            # for field in arcpy.ListFields(crop_path):
            #     if field.name not in keep_field_list:
            #         # logging.debug('    {}'.format(field.name))
            #         arcpy.DeleteField_management(crop_path, field.name)

        # Add alfalfa cutting field
        if crop_num in [1, 2, 3, 4]:
            if len(arcpy.ListFields(crop_path, dairy_cutting_field)) == 0:
                logging.debug('  Add field: {}'.format(dairy_cutting_field))
                arcpy.AddField_management(
                    crop_path, dairy_cutting_field, 'Short')
                arcpy.CalculateField_management(
                    crop_path, dairy_cutting_field, dairy_cuttings, 'PYTHON')
            if len(arcpy.ListFields(crop_path, beef_cutting_field)) == 0:
                logging.debug('  Add field: {}'.format(beef_cutting_field))
                arcpy.AddField_management(
                    crop_path, beef_cutting_field, 'Short')
                arcpy.CalculateField_management(
                    crop_path, beef_cutting_field, beef_cuttings, 'PYTHON')

        # Write default crop parameters to file
        field_list = [p[0] for p in param_list] + [cell_id_field, crop_acres_field]
        with arcpy.da.UpdateCursor(crop_path, field_list) as cursor:
            for row in cursor:
                # Don't remove zero acreage crops if in add list
                if crop_num in crop_add_list:
                    pass
                # Skip and/or remove zones without crop acreage    
                elif crop_acreage_dict[crop_num][row[-2]] < area_threshold:
                    if remove_empty_flag:
                        cursor.deleteRow()
                    continue
                # Write parameter values
                for i, (param_field, param_method, param_type) in enumerate(param_list):
                    row[i] = getattr(crop_param, param_method)
                # Write crop acreage
                if crop_num not in crop_add_list:
                    row[-1] = crop_acreage_dict[crop_num][row[-2]]
                cursor.updateRow(row)
Example #17
0
    def execute(self, parameters, messages):
        """The source code of the tool."""

        # local variables and env
        arcpy.env.workspace = "E:/gina/poker/pip"
        adnr_lo_shp = "E:/gina/poker/shp/wip/adnr_gls_dls_merge_20170823_v1.shp"
        pfrr_popn_places = "E:/gina/poker/shp/wip/pokerflat_popn_places_gcs_wgs84_to_akalbers_2.shp"
        pipTable = "E:/gina/poker/dbf/predicted_impact_xy.dbf"
        pip_point_shp = "E:/gina/poker/pip/pip_point.shp"
        pip_point_3338 = "E:/gina/poker/pip/pip_point_3338.shp"
        pip_buffer_shp = "E:/gina/poker/pip/pip_buffer.shp"
        pip_lo_in_buffer_shp = "E:/gina/poker/pip/pip_lo_in_buffer.shp"
        pip_lo_in_buf_sum_dbf = "E:/gina/poker/pip/pip_lo_in_buf_sum.dbf"
        pip_lo_in_buf_sum_csv = "E:/gina/poker/pip/pip_lo_in_buf_sum.csv"
        pip_popn_places_in_buffer_shp = "E:/gina/poker/pip/pip_popn_places_in_buffer.shp"
        x = parameters[0].valueAsText
        y = parameters[1].valueAsText
        r = parameters[2].valueAsText + " NauticalMiles"
        pipLayer = "pipLayer"
        srs = arcpy.SpatialReference("Alaska Albers Equal Area Conic")
        intersect_fc1 = [adnr_lo_shp, pip_buffer_shp]
        intersect_fc2 = [pfrr_popn_places, pip_buffer_shp]
        mxd = arcpy.mapping.MapDocument("current")
        dataframe = arcpy.mapping.ListDataFrames(mxd)[0]
        sourceLoSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/lo.lyr")
        sourcePipSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/pip.lyr")

        # Process: Calculate Lon Field
        arcpy.CalculateField_management(pipTable, "Lon", x, "PYTHON", "")

        # Process: Calculate Lat Field
        arcpy.CalculateField_management(pipTable, "Lat", y, "PYTHON", "")

        # Process: Make XY Event Layer
        arcpy.MakeXYEventLayer_management(
            pipTable, "Lon", "Lat", pipLayer,
            "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision",
            "")

        # Process: Copy Features
        arcpy.CopyFeatures_management(pipLayer, pip_point_shp, "", "0", "0",
                                      "0")

        # Process: Project pip point
        arcpy.Project_management(pip_point_shp, pip_point_3338, srs)

        # Process: Buffer pip point
        arcpy.Buffer_analysis(pip_point_3338, pip_buffer_shp, r, "FULL",
                              "ROUND", "NONE", "", "PLANAR")

        # Process: Intersect pip buffer with land ownership
        arcpy.Intersect_analysis(intersect_fc1, pip_lo_in_buffer_shp, "ALL",
                                 "", "INPUT")

        # Process: Intersect pip buffer with popn places
        arcpy.Intersect_analysis(intersect_fc2, pip_popn_places_in_buffer_shp,
                                 "ALL", "", "INPUT")

        # Process: Make feature layers and add to the map
        arcpy.MakeFeatureLayer_management(pip_point_3338,
                                          "Predicted Impact Point")
        arcpy.MakeFeatureLayer_management(
            pip_lo_in_buffer_shp,
            "Land Onwership within 3sigma of Predicted Impact Point")
        arcpy.MakeFeatureLayer_management(
            pip_popn_places_in_buffer_shp,
            "Populated Places within 3sigma of Predicted Impact Point")
        addPipPointLayer = arcpy.mapping.Layer("Predicted Impact Point")
        arcpy.mapping.AddLayer(dataframe, addPipPointLayer)
        add3sigmaLoLayer = arcpy.mapping.Layer(
            "Land Onwership within 3sigma of Predicted Impact Point")
        arcpy.mapping.AddLayer(dataframe, add3sigmaLoLayer)
        addPipPopnPlacesLayer = arcpy.mapping.Layer(
            "Populated Places within 3sigma of Predicted Impact Point")
        arcpy.mapping.AddLayer(dataframe, addPipPopnPlacesLayer)

        # Add and calc Acres field for intersected Land Ownership
        arcpy.AddField_management(pip_lo_in_buffer_shp, "Acres", "DOUBLE")
        arcpy.CalculateField_management(pip_lo_in_buffer_shp, "Acres",
                                        "!shape.area@acres!", "PYTHON_9.3", "")

        # Summarize intersected Land Ownership by Owner and total Acres
        arcpy.Statistics_analysis(pip_lo_in_buffer_shp, pip_lo_in_buf_sum_dbf,
                                  "Acres SUM", "OWNER")
        # arcpy.MakeTableView_management(pip_lo_in_buf_sum_dbf)
        add3sigmaLoSumTbl = arcpy.mapping.TableView(pip_lo_in_buf_sum_dbf)
        arcpy.mapping.AddTableView(dataframe, add3sigmaLoSumTbl)

        # Symbolize and Refresh
        layers = arcpy.mapping.ListLayers(mxd)
        arcpy.mapping.UpdateLayer(dataframe, layers[2], sourceLoSymbologyLayer,
                                  True)
        layers[2].symbology.addAllValues()
        arcpy.mapping.UpdateLayer(dataframe, layers[1],
                                  sourcePipSymbologyLayer, True)

        arcpy.RefreshTOC()
        arcpy.RefreshActiveView()

        return
Example #18
0
ap.CopyFeatures_management(
    MV_fc, "macrInvCo"
)  # produces a point feature class containing every monitoring site with macroinvertebrate community index data,

# process 2: convert from shapefiles to feature classes within a geodatabase. (I wanted this to happen automatically in the prior step, but "TypeError: unsupported operand type(s) for +: 'Result' and 'str" ensured that it never worked.
ap.env.workspace = str(wd)
ap.ListFeatureClasses()
for fcPre in ap.ListFeatureClasses(
):  # function to convert new fc to the geodatabase (could not get them to be saved here immediately).
    ap.FeatureClassToGeodatabase_conversion(fcPre, scrgdb_path)

# Process 3: [re] append median measurement values of each NIWA river quality metric to the corresponding geographical coordinate point geometry.
ap.env.workspace = str(wd)
field = "Median"
ap.AddField_management(
    scrgdb_path + "\\totalNitr_shp", field_name="Median", field_type="FLOAT"
)  # adds median measurement values to total nitrogen measurement coordinates
pointer = 0  #increment variable, to append each subsequential median row value to the approproate coordinates, beginning with the first row.
print(NI_median[pointer])
rows = ap.UpdateCursor(scrgdb_path + "\\totalNitr_shp")
for row in rows:
    print(row)
    row.setValue(field, NI_median[pointer])
    pointer = pointer + 1
    rows.updateRow(row)
ap.AddField_management(
    scrgdb_path + "\\totalPhos_shp", field_name="Median", field_type="FLOAT"
)  # adds median measurement values to total phosphorus measurement coordinates
pointer = 0  # reset pointer for next cursor update
rows = ap.UpdateCursor(scrgdb_path + "\\totalPhos_shp")
for row in rows:
Example #19
0
join_table="ot47", join_field="LOT", fields="MAX")
arcpy.JoinField_management(in_data="ot17", in_field="LOT", 
join_table="ot52", join_field="LOT", fields="MAX")
arcpy.JoinField_management(in_data="ot17", in_field="LOT", 
join_table="ot57", join_field="LOT", fields="MAX")
arcpy.JoinField_management(in_data="ot17", in_field="LOT", 
join_table="ot62", join_field="LOT", fields="MAX")
arcpy.JoinField_management(in_data="ot17", in_field="LOT", 
join_table="ot67", join_field="LOT", fields="MAX")

#joining the zonalstats from q1
arcpy.JoinField_management(in_data="ot17", in_field="LOT", 
join_table=Q1z, join_field="LOT", fields="MAX")

#adding field for Max elev data
arcpy.AddField_management("ot17","DEPT2017","DOUBLE","10","5","5")
arcpy.AddField_management("ot17","DEPT2022","DOUBLE","10","5","5")
arcpy.AddField_management("ot17","DEPT2027","DOUBLE","10","5","5")
arcpy.AddField_management("ot17","DEPT2032","DOUBLE","10","5","5")
arcpy.AddField_management("ot17","DEPT2037","DOUBLE","10","5","5")
arcpy.AddField_management("ot17","DEPT2042","DOUBLE","10","5","5")
arcpy.AddField_management("ot17","DEPT2047","DOUBLE","10","5","5")
arcpy.AddField_management("ot17","DEPT2052","DOUBLE","10","5","5")
arcpy.AddField_management("ot17","DEPT2057","DOUBLE","10","5","5")
arcpy.AddField_management("ot17","DEPT2062","DOUBLE","10","5","5")
arcpy.AddField_management("ot17","DEPT2067","DOUBLE","10","5","5")
arcpy.AddField_management("ot17","DEPTZERO","DOUBLE","10","5","5")

#moving the data from obscurely named columns to columns by year
arcpy.CalculateField_management(in_table="ot17", field="DEPT2017", expression="[MAX]", expression_type="VB", code_block="")
arcpy.CalculateField_management(in_table="ot17", field="DEPT2022", expression="[MAX_1]", expression_type="VB", code_block="")
# Assignment: 3 Question: 6
# Due 2/22/19 @ 11:59 PM

# Import system modules
import arcpy
from arcpy import env

# Set workspace

current_workspace = r"C:\Users\dhove\Desktop\Exercise3.gdb\Exercise3.gdb"
arcpy.env.workspace = current_workspace

# Add a field to feature class

arcpy.AddField_management(
    r"C:\Users\dhove\Desktop\Exercise3.gdb\Exercise3.gdb\CallsforService",
    "Crime_Explanation", "TEXT")

fields = ['CFSType', 'Crime_Explanation']

with arcpy.da.UpdateCursor(
        r"C:\Users\dhove\Desktop\Exercise3.gdb\Exercise3.gdb\CallsforService",
        fields) as cursor:
    # For each row, evaluate the WELL_YIELD value (index position
    # of 0), and update WELL_CLASS (index position of 1)
    for row in cursor:
        if (row[0] == "Burglary Call"):
            row[1] = "This is a burglary"

        # Update the cursor with the updated list
        cursor.updateRow(row)
        # create a path to this file
        graticule_file = os.path.join(path_to_graticule_folder, fc)

        # ADD FIELD

        # Announce the processes
        print "Add fields and field calculations"

        # Provide field name and data
        fieldName = "lat_code"
        fieldPrecision = 10
        fieldScale = 10

        # Add fields
        arcpy.AddField_management(graticule_file, fieldName, "SHORT",
                                  fieldPrecision, fieldScale)

        # FIELD CALCULATIONS

        # Provide local variables
        fieldName = "lat_code"
        exp = "autoIncrement()"
        codeblock = """interval=0\nrec=1\ndef autoIncrement():\n  global rec\n  global interval\n  pStart = 89\n  pInterval = 1\n  if (interval == 0):\n    rec = pStart\n    interval += 1\n  else:\n    rec -= pInterval\n  return rec"""

        # Execute CalculateField
        arcpy.CalculateField_management(in_table=graticule_file,
                                        field=fieldName,
                                        expression=exp,
                                        expression_type="PYTHON_9.3",
                                        code_block=codeblock)
Example #22
0
    # convert raster to points
    arcpy.AddMessage('Converting to points: ' + str(raster_loc))
    in_point_mem = 'in_memory\\raster_points'
    arcpy.RasterToPoint_conversion(raster_loc, in_point_mem, "VALUE")
    arcpy.AddMessage('Conversion to points complete')

    # Do outlier
    out_features = 'in_memory\\out_group'
    arcpy.AddMessage('Output directory: ' + out_features)
    result = arcpy.ClustersOutliers_stats(Input_Feature_Class=in_point_mem,
                                          Input_Field='grid_code',
                                          Output_Feature_Class=out_features)
    arcpy.AddMessage('Outlier analysis complete')

    # add field to use for classification
    arcpy.AddField_management(out_features, 'ISOUTLIER', 'SHORT')
    codeblock = '''
def getClass(cotype):
    if cotype == 'HL' or cotype == 'LH':
        return 1
    else:
        return 0
    '''
    arcpy.CalculateField_management(out_features, 'ISOUTLIER',
                                    "getClass(!COType!)", 'PYTHON_9.3',
                                    codeblock)
    arcpy.AddMessage('Classification field added')

    # convert clustered points to a raster
    arcpy.AddMessage('Converting to raster using field ' + result[2])
    out_rastername = arcpy.CreateUniqueName("output.tif",
Example #23
0
csvReader = csv.reader(fileHandle)
header = csvReader.next()
latIndex = header.index('X')
lonIndex = header.index('Y')
rhinoIndex = header.index('Rhino')

# Create a dictionary to contain rhino arrays

rhinoDict = {}

# Attempt to create the polyline feature class and add a NAME field

try:
    spatialRef = arcpy.SpatialReference('WGS 1984')
    rhinoFC = arcpy.CreateFeatureclass_management(r'C:\TmpWrkDirGIS\GEOG485\Lesson4', 'rhinopaths.shp', 'POLYLINE', '', '', '', spatialRef)
    arcpy.AddField_management(rhinoFC, 'NAME', 'TEXT', '', '', 20)
except:
    print('Error in creating polyline feature class')

# Attempt to update the dictionary by iterating through the rows in the CSV

try:
    for row in csvReader:
        updateDict(row[rhinoIndex], rhinoDict, row[latIndex], row[lonIndex])

    # Once the dictionary is updated, insert the arrays into the polyline feature class
    # using the key for the NAME field and the value as a polyline object

    try:
        with arcpy.da.InsertCursor(rhinoFC, ('SHAPE@', 'NAME')) as cursor:
            for rhino in rhinoDict:
print "\nStep 7 Add Names starts at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

finalFolder = "C:\\GIS_RGB\\Geodatabase\\Biophysical\\7_landuse\\US_nass\\final_output\\"

projList = arcpy.ListRasters("*", "")

# Add new column
fieldName = "NAME"
fieldType = "TEXT"

for raster in projList:
    path, name = os.path.split(str(raster))
    out_raster = os.path.join(finalFolder, name)
    arcpy.CopyRaster_management(raster, out_raster)
    arcpy.AddField_management(out_raster, fieldName, fieldType)

    # Add names
    cur = arcpy.UpdateCursor(out_raster)
    for row in cur:
        value1 = row.getValue("VALUE")
        if value1 == 1:
            row.setValue(fieldName, "Corn")
        elif value1 == 2:
            row.setValue(fieldName, "Cotton")
        elif value1 == 3:
            row.setValue(fieldName, "Rice")
        elif value1 == 4:
            row.setValue(fieldName, "Sorghum")
        elif value1 == 5:
            row.setValue(fieldName, "Soybeans")
Example #25
0
    ######################################
    outReclass1 = arcpy.sa.Reclassify(IN_DEM, "Value",
                                      arcpy.sa.RemapRange([[0, 5000, 1]]),
                                      'NODATA')
    outReclass1.save("demfinal.tif")
    arcpy.Clip_management("demfinal.tif", "", "demclip.tif", IN_CLIP, "",
                          "ClippingGeometry")
    #####################################
    #整合
    gg = arcpy.Raster("tdzyfinal.tif") * arcpy.Raster(
        "trhjfinal.tif") * arcpy.Raster("demclip.tif")
    gg.save(OUTPUTPATH + OUTPUTNAME + '.tif')
cursor = arcpy.da.SearchCursor(OUTPUTPATH + OUTPUTNAME + '.tif', ["Count"])
arcpy.BuildRasterAttributeTable_management(OUTPUTPATH + OUTPUTNAME + ".tif",
                                           "Overwrite")
arcpy.AddField_management(OUTPUTPATH + OUTPUTNAME + ".tif", "dengji", "STRING",
                          "", "", "")
arcpy.AddField_management(OUTPUTPATH + OUTPUTNAME + ".tif", "mj", "DOUBLE", "",
                          "", "")
arcpy.AddField_management(OUTPUTPATH + OUTPUTNAME + ".tif", "fbl", "DOUBLE",
                          "", "", "")
expression = "getClass(!VALUE!)"
codeblock = """def getClass(a):
    if a == 1:
        return u"土地资源约束下农业生产的最大规模"
  """
arcpy.CalculateField_management(OUTPUTPATH + OUTPUTNAME + ".tif", "fbl", FBLL,
                                "PYTHON_9.3")
arcpy.CalculateField_management(OUTPUTPATH + OUTPUTNAME + ".tif", "dengji",
                                expression, "PYTHON_9.3", codeblock)
arcpy.CalculateField_management(OUTPUTPATH + OUTPUTNAME + ".tif", "mj",
                                "(!fbl!)*(!COUNT!)", "PYTHON_9.3")
    if Metric[elem[i]:elem[i+1]][0] == None :
        breaks.append([[elem[i], elem[i+1]],[None]])
        HReach [breaks[i][0][0]:breaks[i][0][-1]] = [None]*len(Metric[elem[i]:elem[i+1]])
    else :
        breaks.append(dPH.Hubert_segmentation(Metric[elem[i]:elem[i+1]], alpha))
        breaks[i][0][:] = [x+elem[i] for x in breaks[i][0]]
        model = dPH.model_signal(Metric[breaks[i][0][0]:breaks[i][0][-1]], breaks[i][0])
        HReach [breaks[i][0][0]:breaks[i][0][-1]] = model[:]




#/back to ArcGIS : transfer of Rank_AGO and AGO_Val fields into the DGO-scale database 
ncurrentstep+=1
arcpy.AddMessage("Transferring the 'Rank_AGO' and  'AGO_Val' - Step " + str(ncurrentstep) + "/" + str(nstep))
arcpy.AddField_management(SortedTable, "Rank_AGO", "SHORT")
arcpy.AddField_management(SortedTable, "AGO_Val", "FLOAT")

p=0
rows1 = arcpy.UpdateCursor(SortedTable)
rows2 = arcpy.SearchCursor(SortedTable)
line2 = next(rows2)
AGOval = HReach[p]
AGORankUGO = Rank_UGO[p]
HReachID = 1

for line1 in rows1 :
    line2 = next(rows2)
    line1.Rank_AGO = HReachID
    line1.AGO_Val = HReach[p]
    rows1.updateRow(line1)
Example #27
0
 r = arcpy.GetRasterProperties_management(NIR, 'ROWCOUNT')  #行
 c = arcpy.GetRasterProperties_management(NIR, 'COLUMNCOUNT')  #列
 #print o,y
 arcpy.CreateFishnet_management("fishnet.shp", o, y, w, h, r, c, '#',
                                'NO_LABELS', '#', 'POLYGON')
 ###########################################
 arcpy.Dissolve_management("landusecopy.shp", "solve.shp")  #融合
 arcpy.Intersect_analysis(["solve.shp", "fishnet.shp"],
                          'xiangjiao.shp')  #相交
 cursor = arcpy.da.SearchCursor("xiangjiao.shp", ["SHAPE@AREA"])  #面积
 ar = []
 for row in cursor:
     ar.append(row[0])
 try:
     arcpy.DeleteField_management("xiangjiao.shp", 'area')
     arcpy.AddField_management("xiangjiao.shp", 'area', "FLOAT")
 except:
     arcpy.AddField_management("xiangjiao.shp", 'area', "FLOAT")
 del cursor, row
 cursor1 = arcpy.UpdateCursor("xiangjiao.shp")
 i = 0
 for my_row in cursor1:
     my_row.setValue('area', ar[i])
     cursor1.updateRow(my_row)
     i += 1
 del cursor1
 del my_row
 #arcpy.Select_analysis("xiangjiao.shp", "sele.shp",'"DLMC" =\'碳酸岩\'' )
 arcpy.Intersect_analysis(["xiangjiao.shp", IN_TSY], 'xiangjiao2.shp')  #相交
 ###################################################
 #碳酸岩出露面积百分比
Example #28
0
print("ArcPyDemo practice file")
import arcpy

arcpy.AddField_management("c:/data/Portland.gdb/streets", "LENGTH_MILES",
                          "TEXT")
arcpy.CalculateField_management("c:/data/Portland.gdb/streets", "LENGTH_MILES",
                                "!shape.length@miles!", "PYTHON_9.3")
arcpy.FeatureClassToFeatureClass_conversion(
    "c:/data/Portland.gdb/streets",
    "Database Connections/MySDE.sde/PortlandDataset", "streets")

# learned that I need to get python 2.7 i order to run ArcPy
# going to try to DL the old python when I'm in the desert

arcpy.AddField_management("c:/data/Portland.gdb/streets", "LENGTH_MILES",
                          "TEXT")
arcpy.CalculateField_management("c:/data/Portland.gdb/streets", "LENGTH_MILES",
                                "!shape.length@miles!", "PYTHON_9.3")
arcpy.FeatureClassToFeatureClass_conversion(
    "c:/data/Portland.gdb/streets",
    "Database Connections/MySDE.sde/PortlandDataset", "streets")
def smallFeaturesCheck(inFds, outFds, mapScaleString, outHtml, tooShortArcMM,
                       tooSmallAreaMM2, tooSkinnyWidthMM):
    # get inputs
    inCaf = os.path.basename(getCaf(inFds))
    inMup = inCaf.replace('ContactsAndFaults', 'MapUnitPolys')
    nameToken = inCaf.replace('ContactsAndFaults', '')
    # set mapscale and mapunits
    mapUnit1 = arcpy.Describe(inFds).spatialReference.linearUnitName
    mapUnit1 = mapUnit1.upper()
    if mapUnit1.find('FOOT') > -1:
        mapUnits = 'feet'
    else:
        mapUnits = 'meters'
    mapScale = 1.0 / float(mapScaleString)

    tooShortArcLength = tooShortMM / 1000.0 / mapScale
    tooSmallPolyArea = tooSmallAreaMM2 / 1e6 / mapScale / mapScale
    #addMsgAndPrint(str(tooSmallAreaMM2)+'  '+str(tooSmallPolyArea))
    tooSkinnyWidth = tooSkinnyWidthMM / 1000 / mapScale
    if mapUnits == 'feet':
        tooShortArcLength = tooShortArcLength * 3.28
        tooSmallPolyArea = tooSmallPolyArea * 3.28 * 3.28
        tooSkinnyWidth = tooSkinnyWidth * 3.28

    tooShortArcs = outFds + '/errors_' + nameToken + 'ShortArcs'
    tooSmallPolys = outFds + '/errors_' + nameToken + 'SmallPolys'
    tooSmallPolyPoints = outFds + '/errors_' + nameToken + 'SmallPolyPoints'
    tooSkinnyPolys = outFds + '/errors_' + nameToken + 'SkinnyPolys'
    testAndDelete(tooShortArcs)
    testAndDelete(tooSmallPolys)
    testAndDelete(tooSmallPolyPoints)
    testAndDelete(tooSkinnyPolys)

    outHtml.write('<h3>Small feature inventory</h3>\n')
    outHtml.write('&nbsp;&nbsp; map scale = 1:' + mapScaleString + '<br>\n')

    # short arcs
    testAndDelete('cafLayer')
    arcpy.MakeFeatureLayer_management(
        inFds + '/' + inCaf, 'cafLayer',
        'Shape_Length < ' + str(tooShortArcLength))
    arcpy.CopyFeatures_management('cafLayer', tooShortArcs)
    outHtml.write('&nbsp;&nbsp; ' + str(numberOfRows(tooShortArcs)) +
                  ' arcs shorter than ' + str(tooShortMM) + ' mm<br>\n')
    if numberOfRows(tooShortArcs) == 0:
        testAndDelete(tooShortArcs)
    if arcpy.Exists(inMup):
        # small polys
        addMsgAndPrint('  tooSmallPolyArea = ' + str(tooSmallPolyArea))
        testAndDelete('mupLayer')
        arcpy.MakeFeatureLayer_management(
            inFds + '/' + inMup, 'mupLayer',
            'Shape_Area < ' + str(tooSmallPolyArea))
        arcpy.CopyFeatures_management('mupLayer', tooSmallPolys)
        addMsgAndPrint('  ' + str(numberOfRows(tooSmallPolys)) +
                       ' too-small polygons')
        arcpy.FeatureToPoint_management(tooSmallPolys, tooSmallPolyPoints,
                                        'INSIDE')
        outHtml.write('&nbsp;&nbsp; ' + str(numberOfRows(tooSmallPolys)) +
                      ' polys with area less than ' + str(tooSmallAreaMM2) +
                      ' mm<sup>2</sup><br>\n')
        # sliver polys
        arcpy.CopyFeatures_management(inFds + '/' + inMup, tooSkinnyPolys)
        testAndDelete('sliverLayer')
        arcpy.MakeFeatureLayer_management(tooSkinnyPolys, 'sliverLayer')
        arcpy.AddField_management('sliverLayer', 'AreaDivLength', 'FLOAT')
        arcpy.CalculateField_management('sliverLayer', 'AreaDivLength',
                                        "!Shape_Area! / !Shape_Length!",
                                        "PYTHON")
        arcpy.SelectLayerByAttribute_management(
            'sliverLayer', 'NEW_SELECTION',
            "AreaDivLength >= " + str(tooSkinnyWidth))
        arcpy.DeleteFeatures_management('sliverLayer')
        addMsgAndPrint('  tooSkinnyPolyWidth = ' + str(tooSkinnyWidth))
        addMsgAndPrint('  ' + str(numberOfRows(tooSkinnyPolys)) +
                       ' too-skinny polygons')

        outHtml.write('&nbsp;&nbsp; ' + str(numberOfRows(tooSkinnyPolys)) +
                      ' polys with area/length ratio less than ' +
                      str(tooSkinnyWidth) + ' ' + mapUnits + '<br>\n')
        for fc in (tooSkinnyPolys, tooSmallPolys):
            if numberOfRows(fc) == 0: testAndDelete(fc)
    else:
        outHtml.write('&nbsp;&nbsp; No MapUnitPolys feature class<br>\n')

        for xx in 'cafLayer', 'mupLayer', 'sliverLayer':
            testAndDelete(xx)

    return
except:
    arcpy.AddMessage("Failed to create temp 'tables.gdb'")
    pass
tablesgdb = os.path.join(outfolder, "tables.gdb")

# Creating output "ZonalStats" geodatabase.
try:
    arcpy.CreateFileGDB_management(outfolder, "ZonalStats")
except:
    arcpy.AddMessage("Failed to create 'ZonalStats.gdb' for output.")
    pass
outgdb = os.path.join(outfolder, "ZonalStats.gdb")

# Adding a temporary id field to zones
try:
    arcpy.AddField_management(zone, "tempid", "TEXT")
except:
    arcpy.AddMessage("Failed to add the field 'tempid' to the zone feature class. It might already exist. Continuing if it does...")
    pass
arcpy.CalculateField_management(zone, "tempid", '''"temp" + str(!OBJECTID!)''', "PYTHON")

# Splitting zones into single polygon feature classes
mem = "in_memory"
arcpy.env.workspace = mem
arcpy.Split_analysis(zone, zone, "tempid", mem, "10 meters")
arcpy.AddMessage("Done splitting zones.")

# Listing feature classes and performing zonal stats on each individually
fclist = arcpy.ListFeatureClasses("*")
fcs = []
for fc in fclist: