def remergeDistPolyInv(self):
     pp = self.ProgressPrinter.newProcess(inspect.stack()[0][3], 1,
                                          1).start()
     arcpy.Update_analysis(self.gridded_inventory_layer,
                           self.disturbedInventory_layer,
                           self.RolledBackInventory, "BORDERS",
                           "0.25 Meters")
     pp.finish()
Beispiel #2
0
def createReachsheds():
    #create an feature class of all the modified permitted sewersheds
    arcpy.env.workspace = PerMS4_folder
    arcpy.Merge_management(PerMS4s, TempGDB + '\Permitted_reachsheds')

    #reset workspace
    arcpy.env.workspace = TempGDB

    #update subbasins with permitted sewershed changes
    arcpy.Update_analysis(subbasins, "Permitted_reachsheds", "test2")
    arcpy.Dissolve_management("test2", "test2_diss", "Subbasin")

    #overlay Urban Area Boundaries with soils data
    arcpy.Intersect_analysis([UABs, soils], "soil_UABs_isect")

    #overlay that with subbasins
    arcpy.Intersect_analysis(["soil_UABs_isect", 'test2_diss'],
                             "Soil_UAB_DA_Isect")

    #overlay that with municipalities and finalize
    arcpy.Intersect_analysis([FinalMunis, 'Soil_UAB_DA_Isect'],
                             "AlmostFinalOverlay")
    arcpy.Dissolve_management("AlmostFinalOverlay", "FinalOverlayForReal",
                              ["Subbasin", "MCD_NAME", "LAST_SLA_2"])
check1 = arcpy.SearchCursor("areas_new")
for row in check1:
    dv = row.getValue("RORI")
del check1, row

# Dissolving process
while dv < Ka:
    arcpy.SelectLayerByLocation_management("areas_new",
                                           "SHARE_A_LINE_SEGMENT_WITH", "", "",
                                           "NEW_SELECTION")
    arcpy.Dissolve_management("areas_new", "new", "#", "RORI SUM")
    arcpy.AddField_management("new", "RORI", "LONG")
    arcpy.CalculateField_management('new', 'RORI', '[SUM_RORI]', 'VB', '#')
    arcpy.DeleteField_management("new", "SUM_RORI")
    arcpy.SelectLayerByAttribute_management("areas_new", "CLEAR_SELECTION")
    arcpy.Update_analysis("areas_new", "new", "new1")
    arcpy.CopyFeatures_management("new1", path)
    arcpy.Delete_management(path2)
    arcpy.Delete_management(path3)
    arcpy.Delete_management(path4)
    arcpy.CopyFeatures_management(path, "areas_new")
    arcpy.SelectLayerByAttribute_management(
        "areas_new", "NEW_SELECTION",
        '[RORI] in (SELECT min( [RORI] ) FROM areas_new)')
    cursor = arcpy.SearchCursor("areas_new")
    for row in cursor:
        dv = row.getValue("RORI")
    del cursor, row

else:
    arcpy.CopyFeatures_management("areas_new", path)
Beispiel #4
0
def main():
    """
    The main routine which processes stuff

    """
    arcpy.AddMessage("Setting up workspace and parameters.")
    arcpy.env.overwriteOutput = True
    workspace = r"in_memory"
    arcpy.env.workspace = workspace

    output_date = datetime.datetime.now().strftime("%Y%m%d")

    output = arcpy.GetParameterAsText(0)
    if output == "#" or not output:
        output = r"D:\Projects\TreeProject\TreeProject.gdb\treecrops_{}".format(
            output_date)

    # Set more variables
    output_fc = output.split("\\")[-1]
    output_workspace = output.split(output_fc)[0][:-1]
    print(output_fc)
    print(output_workspace)

    # Create output FC if it doesn't exist
    if arcpy.Exists(output):
        pass
    else:
        print("Creating output feature class")
        arcpy.CreateFeatureclass_management(output_workspace,
                                            output_fc,
                                            "POLYGON",
                                            spatial_reference=4283)

    # For feature service connection
    # noinspection SpellCheckingInspection
    gis = GIS("http://arcgis.com", "jmckechn_une", "Leoj270592")
    print("Credentials verified: {}".format(gis))
    rest_url = "https://services5.arcgis.com/3foZbDxfCo9kcPwP/arcgis/rest/services/" \
               "TreeCrops_Editing/FeatureServer/0"
    # Try copying editing service to local gdb
    trees = output_workspace + "\\fs_download_{}".format(output_date)
    if arcpy.Exists(trees):
        arcpy.Delete_management(trees)
        print("Removing existing {}".format(trees))
    else:
        print("Copying from service: {}".format(rest_url))
        arcpy.CopyFeatures_management(rest_url, trees)
    print("Copy successful: {}".format(trees))

    # Copy data to memory and set up feature layer
    trees_memory = r"in_memory/trees"
    trees_lyr = "trees_lyr"
    query = "(commodity IS NOT NULL AND commodity <> 'other') AND (stage IS NULL OR stage = '1' OR stage = '2')"
    print("Copying data to memory")
    arcpy.CopyFeatures_management(trees, trees_memory)
    arcpy.MakeFeatureLayer_management(trees_memory,
                                      trees_lyr,
                                      where_clause=query)

    # Remove ag_ features if they exist
    rem_list = arcpy.ListFeatureClasses("ag_*")
    for i in rem_list:
        print("Deleting {}".format(i))
        arcpy.Delete_management(workspace + r"/" + i)

    # Get unique values
    print("Getting unique attributes from fields")
    field_list = ["commodity", "source", "year"]
    com_list = []
    for i in field_list:
        if i == "commodity":
            u_list = unique_values(trees_lyr, i)
            for j in u_list:
                com_list.append(j)
        else:
            pass
    # # Remove banana for speed :) (testing)
    # print("Remove banana for speed :) (testing)")
    # com_list.remove("banana")
    print(com_list)
    update_list = []

    print("Looping through selecting unique features to aggregate")
    for c in com_list:
        print("    Working on {} features".format(c))
        print("        selecting")
        selection_query = "commodity = '{}'".format(c)
        arcpy.SelectLayerByAttribute_management(trees_lyr, "NEW_SELECTION",
                                                selection_query)
        ag_output = "ag_{}".format(c)
        print("        aggregating")
        arcpy.AggregatePolygons_cartography(trees_lyr, ag_output, "25 METERS",
                                            "1 HECTARES", "1 HECTARES",
                                            "ORTHOGONAL")
        print("        Adding and calculating field")
        arcpy.AddField_management(ag_output, "commodity", "TEXT")
        arcpy.CalculateField_management(ag_output, "commodity",
                                        "'{}'".format(c), "ARCADE")
        print("            created {}".format(ag_output))

        # Copy aggregated features to output location
        print("            copying to output location")
        arcpy.CopyFeatures_management(ag_output, output + "_{}".format(c))
        update_list.append(output + "_{}".format(c))

    # make a list of ag_... feature classes and loop update analysis tool
    print("Joining features back together with update tool")
    loop_no = len(com_list)
    update_no = 0
    update_output = output + "_update{}".format(update_no)
    print("update_list: {}".format(update_list))
    print("loop_no: {}".format(loop_no))
    print("update_no: {}".format(update_no))
    print("update_output: {}".format(update_output))
    arcpy.CopyFeatures_management(update_list[0], update_output)
    while update_no + 1 <= loop_no:
        loop_name = update_list[update_no].split("{}_".format(output_fc))[-1]
        print("    {} loop ({}/{})".format(loop_name, update_no + 1, loop_no))

        if update_no == 0:
            arcpy.Update_analysis(update_output, update_list[update_no],
                                  output + "_update{}".format(update_no + 1))
            print("        variables: {}, {}, {}".format(
                update_output, update_list[update_no],
                output + "_update{}".format(update_no + 1)))
        else:
            arcpy.Update_analysis(output + "_update{}".format(update_no),
                                  update_list[update_no],
                                  output + "_update{}".format(update_no + 1))
            print("        variables: {}, {}, {}".format(
                output + "_update{}".format(update_no), update_list[update_no],
                output + "_update{}".format(update_no + 1)))

        update_no += 1
    arcpy.CopyFeatures_management(output + "_update{}".format(loop_no), output)

    # join attributes back to output
    print("Trying spatial join")
    arcpy.SpatialJoin_analysis(output, trees_memory, output + "_join",
                               "JOIN_ONE_TO_ONE")

    # Add hectare field
    arcpy.AddField_management(output + "_join", "hectares", "DOUBLE")
    arcpy.CalculateGeometryAttributes_management(
        output + "_join", [["hectares", "AREA_GEODESIC"]],
        area_unit="HECTARES",
        coordinate_system=4283)

    # Overwrite output
    print("Explode, and overwriting output")
    arcpy.MultipartToSinglepart_management(output + "_join", output)

    # Clean up fields
    join_field_del_list = [
        "Join_Count", "TARGET_FID", "comment", "other", "stage", "edit",
        "Shape__Area", "Shape__Length", "commodity_1", "ORIG_FID", "field",
        "review", "imagery", "industry", "uncertain"
    ]
    print("Deleting the following fields:")
    print(join_field_del_list)
    for i in join_field_del_list:
        arcpy.DeleteField_management(output, i)

    # Assign domains
    print("Assigning domains")
    arcpy.AssignDomainToField_management(output, "source", "source_domain")
    arcpy.AssignDomainToField_management(output, "commodity",
                                         "commodity_domain")
    arcpy.AssignDomainToField_management(output, "year", "year_domain")

    arcpy.env.workspace = output_workspace

    # Delete all working features except actual output, topology and original tree data.
    print("Trying to delete unnecessary data")
    del_fc_list = arcpy.ListFeatureClasses("{}_*".format(output_fc))
    print(del_fc_list)
    for i in del_fc_list:
        print("Deleting {}".format(i))
        arcpy.Delete_management(output_workspace + "\\{}".format(i))

    # Derive points
    print("Creating points")
    arcpy.FeatureToPoint_management(output_fc, output + "_point", "INSIDE")
# -*- coding: utf-8 -*-
import arcpy, os, time

t_inicio = time.clock()  # captura el tiempo de inicio del proceso
arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(3116)
arcpy.env.overwriteOutput = True

infea = arcpy.GetParameterAsText(0)
feaUpdate = arcpy.GetParameterAsText(1)
capa_salida = arcpy.GetParameterAsText(3)
gdb_salida = arcpy.GetParameterAsText(2)
##output=""

if __name__ == '__main__':
    print "Ejecutando update a 64bits ...."
    print infea, feaUpdate, capa_salida, gdb_salida
    arcpy.Update_analysis(in_features=infea,
                          update_features=feaUpdate,
                          out_feature_class=gdb_salida + "\\" + capa_salida,
                          keep_borders="BORDERS",
                          cluster_tolerance="")
def buildNational(inputs, national1MReclass, gdb):
    '''
    From an input list and a a file use the Update_analysis method to build
    a national dataset. The national dataset is incrementally updated based
    on previously added data.

    Arguments:
    inputs            -- Ordered list of datasets to be updated on top of
                         dataset in the second argument and progressively
                         the previously updated dataset
    national1MReclass -- Base dataset upon which the inputs list datasets
                         are progressively updated on top of
    gdb               -- ESRI Geodatabase to store the incrementally updated
                         datasets

    '''
    def updateSource(outputDataset, field, source):
        '''
        This function updates the table of features that have a NULL or empty
        value in the 'field' parameter field. The table is updated with the
        'source' input paramemter where the 'field' attribute is empty or NULL.

        Arguments:
        outputDataset -- The dataset where the table is to be edited
        field         -- The field to be checked/edited if NULL or empty
        source        -- The name of the source feature dataset that has most
                          recently been added to the national dataset

        '''
        rowCount = 0
        print('\t\tUpdating \'Source\' attribute...')
        log.info('\t\tUpdating \'Source\' attribute...')
        # Create update cursor for feature class
        with arcpy.da.UpdateCursor(outputDataset, field) as cursor:
            # For each row, evaluate the 'field' value (index position
            # of 0), and update 'field' if blank to the 'source'
            for row in cursor:
                if row[0] == None or row[0] == '':
                    rowCount += 1
                    row[0] = source

                # Update the cursor
                cursor.updateRow(row)

        print('\t\tUpdated \'Source\' attribute: {} rows updated'.format(
            rowCount))
        log.info('\t\tUpdated \'Source\' attribute: {} rows updated'.format(
            rowCount))
        return

    print('\nStarting spatial join of input datasets to derived dataset...')
    log.info('Starting spatial join of input datasets to derived dataset...')

    # Set the overwrite to True so as to overwrite the layer files
    arcpy.env.overwriteOutput = True

    print(len(inputs), ' feature classes to be updated:')
    cumulativeName = ''
    # Add a new field to the base dataset that all other data will be 'updated'
    # on top of.
    newField = 'Source'
    arcpy.MakeFeatureLayer_management(national1MReclass, 'nationalLayer')
    arcpy.AddField_management("nationalLayer",
                              newField,
                              "TEXT",
                              field_length=30)
    # Calculate the Source field to match the name of the base dataset
    updateSource('nationalLayer', newField,
                 arcpy.ValidateTableName(os.path.split(national1MReclass)[1]))
    toBeUpdated = 'nationalLayer'
    for i in inputs:
        count = 0
        print('\n\t', i)
        log.info(i + ' update_Analysis onto ' + toBeUpdated)
        print('\t\tStarting Update...')
        name = cumulativeName + '_' + arcpy.ValidateTableName(
            os.path.split(i)[1])
        #print os.path.join(gdb, os.path.split(national1MReclass)[1] + os.path.split(i)[1])
        outputDataset = os.path.join(
            gdb,
            arcpy.ValidateTableName(
                os.path.split(national1MReclass)[1] + name))
        arcpy.Update_analysis(toBeUpdated, i, outputDataset, "BORDERS", "#")
        print('\t\tComplete Update of: ' + os.path.split(i)[1])
        log.info('\tComplete Update of: ' + os.path.split(i)[1])
        log.info('\t\tOutput dataset: ' + outputDataset)
        print('\t\t\tOutput file: ' + outputDataset)
        # Run and report on a spatial join to see that all the features were transferred
        spatialJoin(i, outputDataset)
        # Update the input file to be updated from that which has been created in the last Update_analysis process
        toBeUpdated = os.path.join(
            gdb,
            arcpy.ValidateTableName(
                os.path.split(national1MReclass)[1] + name))

        if count == 0:
            count += 1
            updateSource(outputDataset, newField,
                         arcpy.ValidateTableName(os.path.split(i)[1]))
        else:
            updateSource(outputDataset, newField,
                         arcpy.ValidateTableName(os.path.split(i)[1]))
        cumulativeName = name

    print('\t\tCompleted update of input datasets to build national dataset.')

    return outputDataset
def DebrisAreaSegmentation(debarea,fishnetRes,lookDistance,workspace):
    import os,arcpy
    from arcpy import env
   
    desc = arcpy.Describe(debarea)
    spatialRef = arcpy.Describe(debarea).spatialReference
    arcpy.CreateFishnet_management("Cliff_"+str(fishnetRes)+"fishnet.shp",str(desc.extent.lowerLeft),str(desc.extent.XMin) + " " + str(desc.extent.YMax + 10),fishnetRes,fishnetRes,"0","0",str(desc.extent.upperRight),"NO_LABELS","#","POLYGON")
    # create 'value' to dissolve further down
    arcpy.AddField_management("Cliff_"+str(fishnetRes)+"fishnet.shp", "value", "SHORT", 1, "", "", "", "", "")
    arcpy.MakeFeatureLayer_management("Cliff_"+str(fishnetRes)+"fishnet.shp", "tempLayer")
    arcpy.SelectLayerByLocation_management("tempLayer", 'WITHIN_A_DISTANCE', debarea,str(-1) + " meters")
    arcpy.SelectLayerByAttribute_management ("tempLayer", "SWITCH_SELECTION")
    arcpy.DeleteFeatures_management("tempLayer")
    arcpy.AddField_management("Cliff_"+str(fishnetRes)+"fishnet.shp",'FIDc','SHORT')
    arcpy.CalculateField_management ("Cliff_"+str(fishnetRes)+"fishnet.shp", "FIDc", "!FID!", "PYTHON_9.3")
    arcpy.DefineProjection_management("Cliff_"+str(fishnetRes)+"fishnet.shp", spatialRef)
    arcpy.Intersect_analysis (["Cliff_"+str(fishnetRes)+"fishnet.shp",debarea], "tiles.shp", "ALL", "", "")
    arcpy.AddField_management('tiles.shp','Perc_gl','FLOAT')
    
    rows = arcpy.UpdateCursor("tiles.shp")
    for row in rows:
         row.Perc_gl = (row.shape.area/fishnetRes**2)*100
         rows.updateRow(row) 
    del row, rows
    arcpy.JoinField_management("Cliff_"+str(fishnetRes)+"fishnet.shp", "FIDc", "tiles.shp", "FIDc", ["Perc_gl"])
    
    counter = 0
    while True:
        if arcpy.management.GetCount("Cliff_"+str(fishnetRes)+"fishnet.shp")[0] == "0":
            break
        else:
            n = []  
            rows = arcpy.SearchCursor("Cliff_"+str(fishnetRes)+"fishnet.shp")  
            for row in rows:  
                n.append(row.getValue("FIDc"))  
            del row, rows         
            n.sort() 
            arcpy.SelectLayerByAttribute_management("tempLayer", "CLEAR_SELECTION")
            noSelection = []
            noSelection = int(str(arcpy.GetCount_management("tempLayer")))
            arcpy.SelectLayerByAttribute_management("tempLayer", "NEW_SELECTION", "FIDc="+ str(n[0]))
            arcpy.SelectLayerByLocation_management("tempLayer", "SHARE_A_LINE_SEGMENT_WITH","tempLayer", "", "NEW_SELECTION")
            arcpy.SelectLayerByAttribute_management("tempLayer", "REMOVE_FROM_SELECTION", "FIDc="+ str(n[0]))
            result = []
            result = arcpy.GetCount_management("tempLayer")
            if int(result.getOutput(0)) == noSelection:
                #condition where no tiles share a line segment
                arcpy.SelectLayerByAttribute_management("tempLayer", "NEW_SELECTION", "FIDc="+ str(n[0]))
                arcpy.SelectLayerByLocation_management("tempLayer","WITHIN_A_DISTANCE","tempLayer", str(fishnetRes*lookDistance) + " meters", "NEW_SELECTION") 
                arcpy.SelectLayerByAttribute_management("tempLayer", "REMOVE_FROM_SELECTION", "FIDc="+ str(n[0]))
                #if still no shapes after look distance
                result = arcpy.GetCount_management("tempLayer")
                if int(result.getOutput(0)) == 0:
                    arcpy.CreateFeatureclass_management(workspace, "Cliff_"+str(fishnetRes)+"fishnet_iteration.shp", "POLYGON","tempLayer")
                else:
                    arcpy.CopyFeatures_management("tempLayer", "Cliff_"+str(fishnetRes)+"fishnet_iteration.shp")
            
            else:
                arcpy.CopyFeatures_management("tempLayer", "Cliff_"+str(fishnetRes)+"fishnet_iteration.shp")
    
            # populate listFIDc: unique ID of 'share a boundary' shapes in "Cliff_"+str(fishnetRes)+"fishnet_iteration.shp" 
            listFIDc = []
            tiles = arcpy.SearchCursor("Cliff_"+str(fishnetRes)+"fishnet_iteration.shp")        
            for tile in tiles:
                flag = True
                b = tile.getValue("FIDc")
                listFIDc.append(b)
            if not flag:
                listFIDc = []                      
            # iterate through features in "Cliff_"+str(fishnetRes)+"fishnet_iteration.shp" and find one (if exists) with a summed area below fishnetRes^2
            tileNumber = len(listFIDc)
            tileCount = 0
            summation = 101
            breakTracker = []
            while summation > 100:
                print str(tileCount)+" of "+str(tileNumber)+"   (tileCount of tileNumber)"
                arcpy.SelectLayerByAttribute_management("tempLayer", "CLEAR_SELECTION")
                if tileCount == tileNumber:
                    if os.path.exists(workspace+"DebrisCutForCliffs"+str(counter)+".shp"):
                        arcpy.Delete_management(workspace+"DebrisCutForCliffs"+str(counter)+".shp")
                        arcpy.RefreshCatalog(workspace)
                        pathFinal = workspace+"DebrisCutForCliffs"+str(counter)+".shp"
                    else:
                        pathFinal = workspace+"DebrisCutForCliffs"+str(counter)+".shp"
                    # extract deb area
                    arcpy.SelectLayerByAttribute_management("tempLayer", "NEW_SELECTION", "FIDc="+ str(n[0]))
                    arcpy.Intersect_analysis (["tempLayer", debarea], pathFinal)
                    arcpy.DeleteFeatures_management("tempLayer")
                    arcpy.Delete_management("Cliff_"+str(fishnetRes)+"fishnet_iteration.shp")
                    counter = counter+1
                    print "Counter updated: "+str(counter)
                    breakTracker = 1
                    break                     
                else:
                    arcpy.SelectLayerByAttribute_management("tempLayer", "NEW_SELECTION", "FIDc="+ str(n[0]))
                    arcpy.SelectLayerByAttribute_management("tempLayer", "ADD_TO_SELECTION", "FIDc="+ str(listFIDc[tileCount]))
                    areaList = []
                    rows = arcpy.SearchCursor("tempLayer")  
                    for row in rows:  
                        s = row.getValue("Perc_gl")
                        areaList.append(s)
                    del row, rows
                    print "areaList:"
                    print(areaList)
                    summation = sum(areaList)
                    print "summation: "+str(summation)
                    #if summation <= 100:
                    #    break
                    #else:
                    tileCount = tileCount+1
                    print "tileCount "+str(tileCount-1) +" updated to "+str(tileCount)
                    continue
                    
            if breakTracker == 1:
                breakTracker = []
                continue
            else:
                if not os.path.exists(workspace+"DebrisCutForCliffs0.shp"):
                    pathDissolve = workspace+"DebrisDissolveForCliffs0.shp"
                    pathFinal = workspace+"DebrisCutForCliffs0.shp"
                else:
                    fcListFinal = arcpy.ListFeatureClasses("*DebrisCutForCliffs*")
                    fcListFinal.sort()
                    s = fcListFinal[::-1][0]
                    if counter - int(s.split("Cliffs",1)[1].split(".shp")[0]) == 0:
                        arcpy.Delete_management(workspace+"DebrisCutForCliffs"+str(counter)+".shp")
                        arcpy.Delete_management(workspace+"DebrisDissolveForCliffs"+str(counter)+".shp")
                        arcpy.RefreshCatalog(workspace)
                        pathDissolve = workspace+"DebrisDissolveForCliffs"+str(counter)+".shp"
                        pathFinal = workspace+"DebrisCutForCliffs"+str(counter)+".shp"
                    else:
                        pathDissolve = workspace+"DebrisDissolveForCliffs"+str(counter)+".shp"
                        pathFinal = workspace+"DebrisCutForCliffs"+str(counter)+".shp"
    
                # merge two tiles
                arcpy.Dissolve_management("tempLayer", pathDissolve,"value")
                # extract deb area              
                arcpy.Intersect_analysis ([pathDissolve, debarea], pathFinal) 
                # update Perc_gl             
                fields = ['Perc_gl']
                fieldList = arcpy.ListFields(pathDissolve)    
                fieldName = [f.name for f in fieldList]
                for field in fields:
                    if field in fieldName:
                        print "Field 'Perc_gl' already exists, not replaced"
                    else:
                        arcpy.AddField_management(pathDissolve, field, 'FLOAT')
                del field, fields
                del f, fieldList
                del fieldName
                # update FIDc
                rows = arcpy.UpdateCursor(pathDissolve)
                for row in rows:
                    row.Perc_gl = summation
                    rows.updateRow(row) 
                del row, rows
                fields = ['FIDc']
                fieldList = arcpy.ListFields(pathDissolve)    
                fieldName = [f.name for f in fieldList]
                for field in fields:
                    if field in fieldName:
                        print "Field 'FIDc' already exists, not replaced"
                    else:
                        arcpy.AddField_management(pathDissolve, field,'SHORT')
                del field, fields
                del f, fieldList
                del fieldName
                features = arcpy.UpdateCursor(pathDissolve)
                for feature in features:
                    feature.FIDc = counter
                    features.updateRow(feature)
                del feature,features
    
                arcpy.MakeFeatureLayer_management(pathDissolve, "tempLayer1")
                arcpy.SelectLayerByAttribute_management("tempLayer", "CLEAR_SELECTION")
                arcpy.Update_analysis("tempLayer","tempLayer1", "update.shp")
                arcpy.Delete_management("Cliff_"+str(fishnetRes)+"fishnet.shp")
                arcpy.RefreshCatalog(workspace)
                arcpy.Rename_management("update.shp","Cliff_"+str(fishnetRes)+"fishnet.shp")
                arcpy.RefreshCatalog(workspace)
                arcpy.MakeFeatureLayer_management("Cliff_"+str(fishnetRes)+"fishnet.shp", "tempLayer")
                #Delete last feature to exit while loop
                if arcpy.management.GetCount("Cliff_"+str(fishnetRes)+"fishnet.shp")[0] == "1":
                    arcpy.MakeFeatureLayer_management("Cliff_"+str(fishnetRes)+"fishnet.shp", "tempLayer2")
                    arcpy.SelectLayerByLocation_management("tempLayer2", 'WITHIN_A_DISTANCE', workspace+"\\DebrisCutForCliffs"+str(counter)+".shp",str(-1) + " meters")
                    arcpy.DeleteFeatures_management("tempLayer2")
                    arcpy.Delete_management(pathDissolve)
                    arcpy.Delete_management("tempLayer1")
                    arcpy.Delete_management("Cliff_"+str(fishnetRes)+"fishnet_iteration.shp")
                    print "tile "+str(counter)+" assigned"
                    continue
                else:
                    arcpy.Delete_management(pathDissolve)
                    arcpy.Delete_management("tempLayer1")
                    arcpy.Delete_management("Cliff_"+str(fishnetRes)+"fishnet_iteration.shp")
                    print "tile "+str(counter)+" assigned"
                    continue
                
    arcpy.Delete_management("tempLayer")
    arcpy.Delete_management("tiles.shp")
    arcpy.Delete_management("Cliff_"+str(fishnetRes)+"fishnet.shp")
Beispiel #8
0
    def __hydro_analysis(self, hydro_file_list, distance_map, project_area,
                         hydro_project_area, timestamp):

        if hydro_project_area:

            arcpy.AddMessage('Processing hydrography...')
            clipped_features_map = {
                i: (hydro_file_list[i],
                    'tmp_hyd_clp_{:0>4}_{}'.format(i, timestamp))
                for i in range(0, len(hydro_file_list))
            }

            for index, paths in clipped_features_map.iteritems():

                features, clipped_features = paths
                arcpy.Clip_analysis(in_features=features,
                                    clip_features=hydro_project_area,
                                    out_feature_class=clipped_features)

            hydro_file_list = [i[1] for i in clipped_features_map.values()]

        interval_classes = distance_map.keys()
        interval_classes.sort()
        buffer_features_list = list()

        arcpy.AddMessage('Buffering...')

        for interval in interval_classes:

            buffer_subfeatures_list = list()
            distance = distance_map[interval]
            arcpy.AddMessage('({} mi. interval)'.format(distance))
            buffer_features = 'tmp_hyd_buf_{:0>4}_{}'.format(
                interval, timestamp)

            for features in hydro_file_list:

                buffer_subfeatures = 'tmp_hyd_buf_{:0>4}_{:0>4}_{}'.format(
                    hydro_file_list.index(features), interval, timestamp)

                arcpy.Buffer_analysis(
                    in_features=features,
                    out_feature_class=buffer_subfeatures,
                    buffer_distance_or_field='{} Miles'.format(distance),
                    dissolve_option='ALL')

                buffer_subfeatures_list.append(buffer_subfeatures)

            subfeatures_union = 'tmp_hyd_uni_{:0>4}_{}'.format(
                interval, timestamp)

            arcpy.Union_analysis(in_features=buffer_subfeatures_list,
                                 out_feature_class=subfeatures_union,
                                 join_attributes='NO_FID')

            arcpy.Dissolve_management(in_features=subfeatures_union,
                                      out_feature_class=buffer_features)

            arcpy.AddField_management(in_table=buffer_features,
                                      field_name='reduction',
                                      field_type='LONG')

            arcpy.CalculateField_management(in_table=buffer_features,
                                            field='reduction',
                                            expression='{}'.format(interval),
                                            expression_type='PYTHON_9.3')

            buffer_features_list.append(buffer_features)

        buffer_features_list.sort()
        buffer_features_list.reverse()

        update_input_features = buffer_features_list[0]
        update_list = buffer_features_list[1:]
        combined_buffers = 'tmp_com_buf_{}'.format(timestamp)

        arcpy.AddMessage('Combining buffers...')
        for features in update_list:

            update_index = update_list.index(features)
            update_output = 'tmp_hyd_upd_{:0>4}_{}'.format(
                update_index, timestamp)
            if update_index == len(update_list) - 1:
                update_output = combined_buffers

            arcpy.Update_analysis(in_features=update_input_features,
                                  update_features=features,
                                  out_feature_class=update_output)

            update_input_features = update_output

        if project_area:
            arcpy.AddMessage('Clipping to project area...')
            clipped_combined_buffers = 'tmp_com_buf_clp_{}'.format(timestamp)
            arcpy.Clip_analysis(in_features=combined_buffers,
                                clip_features=project_area,
                                out_feature_class=clipped_combined_buffers)

            return clipped_combined_buffers

        else:

            return combined_buffers

        return
Beispiel #9
0
def runScript(uploaderpk):
    print("Starting script")
    startTime = time.time()

    arcpy.env.overwriteOutput = True
    arcpy.env.cellSize = 1
    # Activate spatial analyst extension
    if arcpy.CheckExtension("Spatial") == "Available":
        arcpy.CheckOutExtension("Spatial")

    if arcpy.CheckExtension("3D") == "Available":
        arcpy.CheckOutExtension("3D")

    # basePath = .../apps/findbestroute/workfiles/
    global basePath
    sleep(2)
    basePath = os.path.join(settings.PROJECT_PATH, 'apps', 'findbestroute',
                            'workfiles')
    env.workspace = basePath
    sleep(2)

    mxd = arcpy.mapping.MapDocument(os.path.join(basePath, r'mapdocument.mxd'))

    onlyfiles = []
    kart_path = None
    for file in os.listdir(
            os.path.join(settings.PROJECT_PATH, r"files",
                         r"user_" + str(uploaderpk))):
        if file.endswith(".shp"):
            onlyfiles.append(
                os.path.join(settings.PROJECT_PATH, r"files",
                             r"user_" + str(uploaderpk), file))
        elif file.endswith(".jpg"):
            kart_path = os.path.join(settings.PROJECT_PATH, r"files",
                                     r"user_" + str(uploaderpk), file)

    for el in onlyfiles:
        print("File: " + el.__str__())
    print("Map file: " + kart_path.__str__())
    arealsymboler, linjesymboler, punktsymboler, breakBoolean = geometryType(
        onlyfiles)
    if (breakBoolean):
        print(
            "Datafiles not containing all shapefiles( either point, polyline or polygon)"
        )
        return
    kart = kart_path  #os.path.join(settings.PROJECT_PATH, r"apps", r"findbestroute", r"workfiles", r"inData", r"kart.jpg") #geoProcess(kart_path, arealsymboler)

    start = getStart(punktsymboler)
    destination = getDestination(punktsymboler)
    mask = setMask(start, destination)

    arcpy.env.mask = os.path.join(basePath, r"Trash", r"mask.shp")

    utsnitt = getExtentOfMap(linjesymboler)

    hoydedata = arcpy.Clip_analysis(
        in_features=os.path.join(basePath, r"hoydeData", r"trondheiml.shp"),
        clip_features=utsnitt,
        out_feature_class=os.path.join(basePath, r"Trash", r"hoydedata.shp"),
        cluster_tolerance="")

    #Klipper til symbolene etter mask
    ar = arcpy.Clip_analysis(in_features=arealsymboler,
                             clip_features=mask,
                             out_feature_class=os.path.join(
                                 basePath, r"Trash", r"a5"),
                             cluster_tolerance="")
    ln = arcpy.Clip_analysis(in_features=linjesymboler,
                             clip_features=mask,
                             out_feature_class=os.path.join(
                                 basePath, r"Trash", r"a6"),
                             cluster_tolerance="")
    pt = arcpy.Clip_analysis(in_features=punktsymboler,
                             clip_features=mask,
                             out_feature_class=os.path.join(
                                 basePath, r"Trash", r"a7"),
                             cluster_tolerance="")

    #Runde ned alle symboler
    floorSymbols(ar)
    floorSymbols(ln)
    floorSymbols(pt)

    #Lage buffer paa linjer som er lik bredden de skal ha
    fieldnames = [field.name for field in arcpy.ListFields(ln)]
    if not "WIDTH" in fieldnames:
        arcpy.AddField_management(in_table=ln,
                                  field_name="WIDTH",
                                  field_type="DOUBLE")
    symbols = [
        106, 107, 201, 203, 304, 305, 307, 502, 503, 504, 505, 506, 507, 508,
        509
    ]
    width = [2, 2, 4, 4, 2, 2, 1, 6, 4, 3, 2.5, 2, 2, 2, 2]
    features = arcpy.UpdateCursor(ln)
    for feature in features:
        if feature.SYMBOL in symbols:
            n = symbols.index(feature.SYMBOL)
            feature.WIDTH = width[n]
        features.updateRow(feature)
    del feature, features, n
    ln_buff = arcpy.Buffer_analysis(in_features=ln,
                                    out_feature_class=os.path.join(
                                        basePath, r"Trash", r"a8"),
                                    buffer_distance_or_field="WIDTH",
                                    line_side="FULL",
                                    line_end_type="FLAT",
                                    dissolve_option="LIST",
                                    dissolve_field="SYMBOL")

    #Hente ut alle forbudte symboler
    forbiddenArea = arcpy.Select_analysis(
        in_features=ar,
        out_feature_class=os.path.join(basePath, r"Trash", r"a9"),
        where_clause=
        '"SYMBOL" = 202 OR "SYMBOL" = 211 OR "SYMBOL" = 301 OR "SYMBOL" = 302 OR "SYMBOL" = 307 OR "SYMBOL" = 415 OR "SYMBOL" = 526 OR "SYMBOL" = 527 OR "SYMBOL" = 528 OR "SYMBOL" = 709'
    )
    forbiddenLineBuff = arcpy.Select_analysis(
        in_features=ln_buff,
        out_feature_class=os.path.join(basePath, r"Trash", r"b1"),
        where_clause=
        '"SYMBOL" = 201 OR "SYMBOL" = 307 OR "SYMBOL" = 521 OR "SYMBOL" = 524 OR "SYMBOL" = 528 OR "SYMBOL" = 534 OR "SYMBOL" = 709'
    )

    #Hente ut alle passerbare symboler
    passableArea = arcpy.Select_analysis(
        in_features=ar,
        out_feature_class=os.path.join(basePath, r"Trash", r"b2"),
        where_clause=
        '"SYMBOL" <> 202 AND "SYMBOL" <> 211 AND "SYMBOL" <> 301 AND "SYMBOL" <> 302 AND "SYMBOL" <> 307 AND "SYMBOL" <> 415 AND "SYMBOL" <> 526 AND "SYMBOL" <> 527 AND "SYMBOL" <> 528 AND "SYMBOL" <> 601 AND "SYMBOL" <> 709'
    )
    passableLineBuff = arcpy.Select_analysis(
        in_features=ln_buff,
        out_feature_class=os.path.join(basePath, r"Trash", r"b3"),
        where_clause=
        '"SYMBOL" <> 201 AND "SYMBOL" <> 307 AND "SYMBOL" <> 521 AND "SYMBOL" <> 524 AND "SYMBOL" <> 528 AND "SYMBOL" <> 534 AND "SYMBOL" <> 709'
    )

    #Lage skogflater
    area = arcpy.Update_analysis(in_features=passableArea,
                                 update_features=forbiddenArea,
                                 out_feature_class=os.path.join(
                                     basePath, r"Trash", r"b4"))
    forest = arcpy.Erase_analysis(in_features=mask,
                                  erase_features=area,
                                  out_feature_class=os.path.join(
                                      basePath, r"Trash", r"b5"))
    arcpy.AddField_management(in_table=forest,
                              field_name="SYMBOL",
                              field_type="DOUBLE")
    features = arcpy.UpdateCursor(forest)
    for feature in features:
        feature.SYMBOL = 405
        features.updateRow(feature)
    del feature, features

    #Lage kartet i ArcMap
    area1 = arcpy.Erase_analysis(in_features=passableArea,
                                 erase_features=forbiddenArea,
                                 out_feature_class=os.path.join(
                                     basePath, r"Trash", r"b6"))
    area2 = arcpy.Erase_analysis(in_features=area1,
                                 erase_features=forbiddenLineBuff,
                                 out_feature_class=os.path.join(
                                     basePath, r"Trash", r"b7"))
    passable1 = arcpy.Update_analysis(in_features=area2,
                                      update_features=forest,
                                      out_feature_class=os.path.join(
                                          basePath, r"Trash", r"b8"))
    mapped = arcpy.Update_analysis(in_features=passable1,
                                   update_features=passableLineBuff,
                                   out_feature_class=os.path.join(
                                       basePath, r"Trash", r"b9"))

    #Sette kostnad paa alle flater
    setCost(mapped)
    print('hey')
    costRaster = arcpy.FeatureToRaster_conversion(
        mapped, "COST", os.path.join(basePath, r"Results", r"CostRaster.tif"))

    #Lage sloperaster

    #create a TIN of the area
    tin = arcpy.CreateTin_3d(
        out_tin=os.path.join(basePath, r"Results", r"TIN"),
        spatial_reference="#",
        in_features=os.path.join(basePath, r"Trash", r"hoydedata.shp") +
        " HOEYDE masspoints")

    # Replace a layer/table view name with a path to a dataset (which can be a layer file) or create the layer/table view within the script
    # The following inputs are layers or table views: "hoydeTIN"
    tinRaster = arcpy.TinRaster_3d(in_tin=os.path.join(basePath, r"Results",
                                                       r"TIN"),
                                   out_raster=os.path.join(
                                       basePath, r"Results", "hRaster"),
                                   data_type="FLOAT",
                                   method="LINEAR",
                                   sample_distance="CELLSIZE 1",
                                   z_factor="1")

    # Replace a layer/table view name with a path to a dataset (which can be a layer file) or create the layer/table view within the script
    # The following inputs are layers or table views: "hraster"
    slope = arcpy.Slope_3d(in_raster=os.path.join(basePath, r"Results",
                                                  r"hRaster"),
                           out_raster=os.path.join(basePath, r"Results",
                                                   r"slope"),
                           output_measurement="DEGREE",
                           z_factor="1")

    # Reklassifisering av slope
    reMapRange = RemapRange([[0, 0.5, 100], [0.5, 1, 101], [1, 2, 102],
                             [2, 3, 103], [3, 4, 104], [4, 5,
                                                        105], [5, 6, 106],
                             [6, 7, 107], [7, 8, 108], [8, 9, 109],
                             [9, 10, 110], [10, 11, 111], [11, 12, 112],
                             [12, 13, 113], [13, 14, 114], [14, 15, 115],
                             [15, 16, 116], [16, 17, 117], [17, 18, 118],
                             [18, 19, 119], [19, 20, 120], [20, 90, 150]])
    slope_reclass = Reclassify(in_raster=os.path.join(basePath, r"Results",
                                                      r"slope"),
                               reclass_field="VALUE",
                               remap=reMapRange)
    slope_reclass.save(os.path.join(basePath, r"Results", r"slopeReclass"))

    # Rasterkalkulator som lager raster som tar hensyn til hoyde i kostnadsrasteret
    finalCostRaster = Raster(
        os.path.join(basePath, r"Results", r"CostRaster.tif")) * (
            Raster(os.path.join(basePath, r"Results", r"slopeReclass")) / 100)

    #Regne ut leastcostpath
    cdr = arcpy.sa.CostDistance(start, finalCostRaster)
    cdr.save(os.path.join(basePath, r"Results", r"costDistance"))
    cbr = arcpy.sa.CostBackLink(start, finalCostRaster)
    cbr.save(os.path.join(basePath, r"Results", r"Costback"))
    cp = arcpy.sa.CostPath(destination, cdr, cbr, "EACH_CELL")
    cp.save(os.path.join(basePath, r"Results", r"costpath"))

    #Gjore om til polygon med litt bredde
    arcpy.RasterToPolygon_conversion(
        in_raster=os.path.join(basePath, r"Results", r"costpath"),
        out_polygon_features=os.path.join(basePath, r"Results", r"cpPoly.shp"),
        simplify="SIMPLIFY")
    arcpy.Buffer_analysis(in_features=os.path.join(basePath, r"Results",
                                                   r"cpPoly.shp"),
                          out_feature_class=os.path.join(
                              basePath, r"Results", r"LCP.shp"),
                          buffer_distance_or_field="2",
                          line_side="FULL",
                          line_end_type="FLAT",
                          dissolve_option="LIST")

    df = arcpy.mapping.ListDataFrames(mxd, "*")[0]
    for lyr in arcpy.mapping.ListLayers(mxd, "", df):
        arcpy.mapping.RemoveLayer(df, lyr)
    print("Deleted lyr's in mxd")
    #Legge til i ArcMap
    templateLayer = arcpy.mapping.Layer(
        os.path.join(basePath, r"Template", r"colorTemplate.lyr"))
    df = arcpy.mapping.ListDataFrames(mxd, "*")[0]
    newlayer = arcpy.mapping.Layer(
        os.path.join(basePath, r"Results", r"LCP.shp"))
    newlayer.transparency = 50
    """ PROBLEMBARN RETT UNDER """
    arcpy.ApplySymbologyFromLayer_management(in_layer=newlayer,
                                             in_symbology_layer=templateLayer)
    #                                            in_symbology_layer = os.path.join(basePath, r"Template", r"colorTemplate.lyr"))
    """ PROBLEMBARN RETT OVER """

    arcpy.mapping.AddLayer(df, newlayer, "BOTTOM")
    arcpy.MakeRasterLayer_management(in_raster=kart,
                                     out_rasterlayer=os.path.join(
                                         basePath, r"Results", r"rasterkart"))
    mapLayer = arcpy.mapping.Layer(
        os.path.join(basePath, r"Results", r"rasterkart"))
    arcpy.mapping.AddLayer(df, mapLayer, "BOTTOM")

    # Lage postsirkler og linje og legge til dette i ArcGIS
    points = arcpy.CreateFeatureclass_management(out_path=os.path.join(
        basePath, r"Trash"),
                                                 out_name="points",
                                                 geometry_type="POINT")

    del destination
    start = getStart(pt)
    destination = getDestination(pt)
    features = arcpy.UpdateCursor(start)
    for feature in features:
        startX = feature.POINT_X
        startY = feature.POINT_Y
    features = arcpy.UpdateCursor(destination)
    for feature in features:
        destX = feature.POINT_X
        destY = feature.POINT_Y
    cursor = arcpy.da.InsertCursor(points, ("fid", "SHAPE@XY"))
    cursor.insertRow((1, (startX, startY)))
    cursor.insertRow((2, (destX, destY)))
    del destination

    outerCircle = arcpy.CreateFeatureclass_management(out_path=os.path.join(
        basePath, r"Trash"),
                                                      out_name="circles1.shp",
                                                      geometry_type="POLYGON")
    innerCircle = arcpy.CreateFeatureclass_management(out_path=os.path.join(
        basePath, r"Trash"),
                                                      out_name="circles2.shp",
                                                      geometry_type="POLYGON")
    circle = arcpy.CreateFeatureclass_management(
        out_path=os.path.join(basePath, r"Trash"),
        out_name="circles.shp",
        geometry_type="POLYGON",
    )
    arcpy.Buffer_analysis(points, outerCircle, 40)
    arcpy.Buffer_analysis(points, innerCircle, 35)
    arcpy.Erase_analysis(outerCircle, innerCircle, circle)
    symLayer = arcpy.mapping.Layer(
        os.path.join(basePath, r"Template", r"color2.lyr"))
    circleLayer = arcpy.mapping.Layer(
        os.path.join(basePath, r"Trash", r"circles.shp"))
    arcpy.ApplySymbologyFromLayer_management(in_layer=circleLayer,
                                             in_symbology_layer=symLayer)
    arcpy.mapping.AddLayer(data_frame=df,
                           add_layer=circleLayer,
                           add_position="TOP")

    # Lage postlinje
    lines = arcpy.CreateFeatureclass_management(out_path=os.path.join(
        basePath, r"Trash"),
                                                out_name="line.shp",
                                                geometry_type="POLYGON")
    directionX = (destX - startX) / (
        math.sqrt(math.pow(destX - startX, 2) + math.pow(destY - startY, 2)))
    directionY = (destY - startY) / (
        math.sqrt(math.pow(destX - startX, 2) + math.pow(destY - startY, 2)))
    features = []
    features.append(
        arcpy.Polyline(
            arcpy.Array([
                arcpy.Point(startX + 45 * directionX,
                            startY + 45 * directionY),
                arcpy.Point(destX - 45 * directionX, destY - 45 * directionY)
            ])))
    lineFeat = arcpy.CopyFeatures_management(
        features, os.path.join(basePath, r"Trash", r"lines.shp"))
    arcpy.Buffer_analysis(in_features=lineFeat,
                          out_feature_class=lines,
                          buffer_distance_or_field=2.5,
                          line_end_type="FLAT")
    lineLayer = arcpy.mapping.Layer(
        os.path.join(basePath, r"Trash", r"line.shp"))
    arcpy.ApplySymbologyFromLayer_management(in_layer=lineLayer,
                                             in_symbology_layer=symLayer)
    arcpy.mapping.AddLayer(data_frame=df,
                           add_layer=lineLayer,
                           add_position="TOP")

    mxd.save()

    #Skrive ut bilde av veivalg
    B = df.extent.XMax - df.extent.XMin
    H = df.extent.YMax - df.extent.YMin

    filename = str(uploaderpk) + "_" + time.strftime(
        "%d-%m-%Y") + "_" + time.strftime("%H-%M-%S") + ".png"
    relative_path_string = os.path.join(r"Dump", filename)
    print("hurr  " + settings.PROJECT_PATH)
    print("durr " + relative_path_string)
    out_path = os.path.join(settings.PROJECT_PATH, "files", r"Dump", filename)
    print(out_path)
    arcpy.mapping.ExportToPNG(map_document=mxd,
                              out_png=out_path,
                              data_frame=df,
                              df_export_width=int(3 * B),
                              df_export_height=int(3 * H),
                              resolution=225)
    print("Finished making image")

    #relative_path = os.path.join(r"Dump", "MapLCP.png")
    img = Image()
    img.uploader = PathUser.objects.get(pk=uploaderpk)
    img.bilde = relative_path_string
    img.save()

    folder = os.path.join(basePath, r"Trash")
    for file in os.listdir(folder):
        filepath = os.path.join(folder, file)
        try:
            if os.path.isfile(filepath):
                print "Removing " + filepath
                os.remove(filepath)
            elif os.path.isdir(filepath):
                print "Removing " + filepath
                shutil.rmtree(filepath)
        except Exception as e:
            print(e)

    folder = os.path.join(basePath, r"Results")
    for file in os.listdir(folder):
        filepath = os.path.join(folder, file)
        try:
            if os.path.isfile(filepath):
                print "Removing " + filepath
                os.remove(filepath)
            elif os.path.isdir(filepath):
                print "Removing " + filepath
                shutil.rmtree(filepath)
        except Exception as e:
            print(e)

    delete_user_uploads.delay(uploaderpk)

    end = time.time()
    print(end - startTime)
Beispiel #10
0
def main(lyrs, check, connection):
    """Main function of the scriptt

    This function analyzes a list of layers in order to update the
    Impervious surface feature in PROD3. If none of the component layers
    have changed since the last script run, the layer will not be
    updated.

    Parameters
    ----------
    lyrs : list
        List of dicts, where each dict is a {"feature class name": "query"}
        pair
    check : boolean
        A flag that triggers comparison to the previous script run
    connection : str
        A file path to the edit SDE connection

    Returns
    -------
    str
        The main email body message based on the results of the function
    """

    # Define the output layer
    original = os.path.join(edit_conn, "GISPROD3.PW.ImperviousSurface")

    # Instantiate each layer as an Impervious class
    impervious_features = [Impervious(layer) for layer in lyrs]
    equals_previous = [imp.equals_previous() for imp in impervious_features]

    # See if any changes have been made to the layers involved
    if check and all(equals_previous):
        log.info("None of the layers have changed since the previous run...")
        msg = ("Impervious surfaces do not need to be updated because "
               "none of its component layers have changed.")
    else:
        # Update features based on the assigned hierarchy
        log.info("Creating a new ImperviousSurface layer...")
        temp = impervious_features[0].memory_fc(original)
        for surf in impervious_features[1:]:
            log.info(f"Updating ImperviousSurface with {surf.name}...")
            temp = arcpy.Update_analysis(
                temp, surf.memory_fc(original),
                f"temp.gdb\\{surf.name.split('.')[-1]}Update")

        # Remove old records from the table, make three attempts in case of
        # unknown error
        for x in range(3):
            log.info(f"Attempt #{x+1} for removing old data...")
            try:
                editor = arcpy.da.Editor(connection)
                editor.startEditing(False, True)
                editor.startOperation()
                with arcpy.da.UpdateCursor(original, ['GLOBALID']) as cursor:
                    for row in cursor:
                        cursor.deleteRow()
                editor.stopOperation()
                editor.stopEditing(True)

                log.info("Loading new impervious surfaces...")
                arcpy.Append_management(temp, original, "NO_TEST")

                msg = ("The derived layer has been updated.")
                break
            except Exception:
                if x < 2:
                    log.info(f"Attempt #{x+1} failed, rertrying...")
                    sleep(20)  # sleep for 20 seconds before retrying
                else:
                    log.info("Final attempt failed...")
                    msg = "The script failed to make edits."
            finally:
                del editor

    # Return the email message notifying users of script run
    return msg
Beispiel #11
0
arcpy.env.parallelProcessingFactor = general_params['threads']
arcpy.env.overwriteOutput = True
arcpy.env.workspace = sys.argv[1]

# remove missing iucn categories from params
features = arcpy.ListFeatureClasses()
iucn_names = []
for x in iucn_params['IUCN_CAT']:
    name = 'IUCN_CAT_' + x.replace(' ', '_')
    if name in features:
        iucn_names.append(name)
iucn_names.reverse()

### Main processing
# initial update data
arcpy.Update_analysis(sys.argv[1] + '/' + iucn_names[0],
                      sys.argv[1] + '/' + iucn_names[1],
                      sys.argv[2] + '/update_1')

# subsequent updates
counter = 1
for x in iucn_names[2:]:
    arcpy.Update_analysis(sys.argv[2] + '/update_' + str(counter),
                          sys.argv[1] + '/' + x,
                          sys.argv[2] + '/update_' + str(counter + 1))
    counter = counter + 1

# export final dataset
arcpy.Copy_management(sys.argv[2] + '/update_' + str(counter),
                      sys.argv[2] + '/' + sys.argv[3])
import arcpy

arcpy.env.overwriteoutput = True

# UPDATE feature classes: Feature1 with Feature2 after Time Index has been added to each  feature class

inFeatures = arcpy.GetParameterAsText(0)
updateFeatures = arcpy.GetParameterAsText(1)

inFeatureTime_2 = arcpy.GetParameterAsText(2)

arcpy.AddField_management(updateFeatures, "TIME", "DOUBLE", "", "", "25", "",
                          "NULLABLE", "REQUIRED", "")
cursor = arcpy.UpdateCursor(updateFeatures)
for row in cursor:
    row.setValue("TIME", inFeatureTime_2)
    cursor.updateRow(row)

outFeatures = arcpy.GetParameterAsText(3)
arcpy.Update_analysis(inFeatures, updateFeatures, outFeatures, "BORDERS", 0.00)

arcpy.AddMessage(
    "Script was run. To see result check Feature Class having Name of the Output Folder."
)
Beispiel #13
0
            "corner_sum","JOIN_ONE_TO_ONE", "KEEP_ALL", "", "INTERSECT")

#Step 10
arcpy.MakeFeatureLayer_management('corner_sum', 'corner_sum_select',
                                  "Join_Count =2")

#Step 11
arcpy.SelectLayerByLocation_management('corner_sum_select',"CROSSED_BY_THE_OUTLINE_OF",\
                                       sapolygon,'#',"NEW_SELECTION",'INVERT')

#Step 12
arcpy.SelectLayerByLocation_management('too_narrow_select',"HAVE_THEIR_CENTER_IN"\
                                       ,'corner_sum_select','#',"NEW_SELECTION")

#Step 13
arcpy.Update_analysis("MU_gen", "too_narrow_select", "MU_cornered", "BORDERS",
                      "0.1 Meters")

#Step 14
arcpy.MakeFeatureLayer_management("MU_cornered", "MU_cornered_select")
arcpy.SelectLayerByLocation_management("MU_cornered_select","COMPLETELY_WITHIN",\
                                       'corner_sum_select')
arcpy.CopyFeatures_management("too_narrow_select", 'truncated')
#Step 15 Eliminate
arcpy.Eliminate_management("MU_cornered_select", "MU_decornered", "LENGTH")

arcpy.Delete_management("too_narrow")
arcpy.Delete_management("too_narrow_sing")
arcpy.Delete_management("corner_buff_4")
arcpy.Delete_management("corner_inter")
arcpy.Delete_management("corner_inter_sing")
arcpy.Delete_management("corner_sum")
Beispiel #14
0
        arcpy.MakeFeatureLayer_management(polyPath, tempLayer)
        arcpy.SelectLayerByAttribute_management(tempLayer, "NEW_SELECTION",
                                                "mapunit = ''")
        if int(arcpy.GetCount_management(tempLayer).getOutput(0)) > 0:
            arcpy.DeleteFeatures_management(tempLayer)

    arcpy.Dissolve_management(in_features=polyPath,
                              out_feature_class=dissPath,
                              dissolve_field="",
                              statistics_fields="",
                              multi_part="MULTI_PART",
                              unsplit_lines="DISSOLVE_LINES")

    arcpy.Update_analysis(in_features=mapAreasFCOrig,
                          update_features=dissPath,
                          out_feature_class=mapAreasFC,
                          keep_borders="BORDERS",
                          cluster_tolerance="")

    fields = [
        'inputDBPath', 'inputFDSName', 'inputPrefixLength', 'listFCsToClip',
        'exportFDSPrefix', 'inputPolygons', 'ConversionTables',
        'LineConversionTables', 'DataSourceID'
    ]
    with arcpy.da.UpdateCursor(mapAreasFC, fields) as cursor:
        for row in cursor:
            print(row[0])
            if row[0] == None or row[0] == "":
                print("Updating feature with no attributes...")
                row[0] = overlayInputDBPath
                row[1] = overlayInputFDSName
def create_csi_watersheds(flowdir, pour_dir, nhd_gdb, out_gdb):

    # Starting environmental variables:
    env.extent = flowdir
    env.snapRaster = flowdir
    env.cellSize = 10
    env.outputCoordinateSystem = arcpy.SpatialReference(102039)
    arcpy.CheckOutExtension('Spatial')

    huc8_code = re.search('\d{8}', os.path.basename(flowdir)).group()
    huc4_code = re.search('\d{4}', os.path.basename(nhd_gdb)).group()

    # create temp directory because we need shape geometry
    temp_gdb = cu.create_temp_GDB('watersheds' + huc4_code)
    cu.multi_msg("Temp geodatabase is located at {}".format(temp_gdb))
    env.workspace = temp_gdb

    wbd_hu8 = os.path.join(nhd_gdb, "WBD_HU8")
    field_name = (arcpy.ListFields(wbd_hu8, "HU*8"))[0].name
    whereClause8 = """{0} = '{1}'""".format(
        arcpy.AddFieldDelimiters(nhd_gdb, field_name), huc8_code)
    arcpy.Select_analysis(wbd_hu8, "hu8", whereClause8)
    arcpy.Buffer_analysis("hu8", "hu8_buffered", "100 meters")

    # Create the basic watersheds
    pour_points = os.path.join(pour_dir, 'pour_points.tif')
    arcpy.Clip_management(pour_points, '', "pour_points_clipped",
                          "hu8_buffered", '0', 'ClippingGeometry')
    raw_watersheds = os.path.join(
        out_gdb, 'huc{}_watersheds_precursors'.format(huc8_code))
    cu.multi_msg("Calculating preliminary watersheds...")
    outWatershed = Watershed(flowdir, "pour_points_clipped")
    outWatershed.save(raw_watersheds)

    cu.multi_msg(
        "Clipping watersheds to subregion boundaries and filtering spurious watersheds..."
    )

    # Watershed raster to polygons
    arcpy.RasterToPolygon_conversion(raw_watersheds, "wspoly1", 'NO_SIMPLIFY',
                                     "Value")

    # Clip watershed polygons to subregion polys.
    arcpy.Clip_analysis("wspoly1", "hu8", "wsclip1")

    # Clip watershed

    ##    # Calculate hectares
    ##    arcpy.AddField_management("wsclip1", "HA", "DOUBLE")
    ##    arcpy.CalculateField_management("wsclip1", "HA", '''!shape.area@hectares!''', "PYTHON")

    # Create fc of watershed polygons >= 1 ha that coincide with seed lines and polys.
    seedline = os.path.join(pour_dir, 'pourpoints.gdb', 'eligible_flowlines')
    seedpoly = os.path.join(pour_dir, 'pourpoints.gdb', 'eligible_lakes')
    arcpy.MakeFeatureLayer_management("wsclip1", "wsclip1_lyr")

    arcpy.SelectLayerByLocation_management("wsclip1_lyr", "INTERSECT",
                                           seedline, '', "NEW_SELECTION")
    arcpy.SelectLayerByLocation_management("wsclip1_lyr", "INTERSECT",
                                           seedpoly, '', "ADD_TO_SELECTION")
    arcpy.SelectLayerByAttribute_management("wsclip1_lyr", "SUBSET_SELECTION",
                                            '''"Shape_Area" >= 10000''')

    cu.multi_msg("Reshaping watersheds...")
    # Polygon back to raster
    grid_code = arcpy.ListFields("wsclip1_lyr", "grid*")[0].name
    arcpy.PolygonToRaster_conversion("wsclip1_lyr", grid_code, "ws_legit_ras")
    arcpy.Clip_management("ws_legit_ras", '', "ws_legit_clipped_ras", "hu8",
                          "0", "ClippingGeometry")

    # Make a raster from the subregion (boundary) polygon with zero for cell values.
    arcpy.AddField_management("hu8", "Value", "SHORT")
    arcpy.CalculateField_management("hu8", "Value", "0", "PYTHON")
    arcpy.PolygonToRaster_conversion("hu8", "Value", "boundary_raster")
    arcpy.Clip_management("boundary_raster", '', "boundary_raster_clip", "hu8",
                          '', "ClippingGeometry")

    # Fill NoData in watersheds with the zero values from the subregion raster's cells.
    composite = Con(IsNull("ws_legit_clipped_ras"), "boundary_raster_clip",
                    "ws_legit_clipped_ras")
    composite.save("composite_raster")
    arcpy.Clip_management("composite_raster", '', "composite_raster_clip",
                          "hu8", '0', "ClippingGeometry")

    # Make a mask of zero cells. NoData cells are the actual mask for nibble.
    premask = Con(IsNull("composite_raster_clip"), "composite_raster_clip", 0)
    premask.save("premask")

    arcpy.Clip_management("premask", '', "mask", "hu8", '', "ClippingGeometry")

    # Set Null to 1.
    pre_watersheds = Con(IsNull("composite_raster_clip"), 1,
                         "composite_raster_clip")
    pre_watersheds.save("pre_watersheds")  # does this speed things up?
    ##    prews.save("prews.tif")

    # Nibble masked values (null values along boundary).
    cu.multi_msg('Nibbling watersheds as part of reshaping...')
    nibble = Nibble("pre_watersheds", "mask", "DATA_ONLY")
    nibble.save("nibble")
    # Use HU8 buffer so that watersheds will overrun HU8 boundaries and get
    # clipped without weird slivers later
    arcpy.Clip_management("nibble", "", "watersheds_ras", "hu8_buffered",
                          "NoData", "ClippingGeometry")

    # Convert watershed raster to polygon.
    # Setting simplify keyword to TRUE in RasterToPolygon_conversion
    # is not working reliably so need to do this in two steps, unfortunately
    cu.multi_msg(
        "Converted reshaped watersheds raster to polygons. If you experience problems with this step, please read Known and Outstanding Bugs.txt"
    )
    arcpy.RasterToPolygon_conversion("watersheds_ras", "nibble_sheds",
                                     'SIMPLIFY', "Value")  #simplify okay

    ##    # I'm using 15 as the tolerance
    ##    # here because the diagonal of a 10x10 pixel is 14.14 m and
    ##    # I'm okay with a vertex moving as far as it can on the edges of the pixel
    ##    # This also produces results very similar to using the simplify setting
    ##    # on RasterToPolygon_conversion, when it works.
    ##    arcpy.SimplifyPolygon_cartography("nibble_sheds_unsimple",
    ##        "nibble_sheds_simplify", "POINT_REMOVE", "15 Meters", "0 SquareMeters",
    ##        "RESOLVE_ERRORS", "NO_KEEP")
    arcpy.Clip_analysis("nibble_sheds", "hu8", "final_watersheds")

    # Join Permanent ID from Waterbody seed shapefile
    final_watersheds_out = os.path.join(
        out_gdb, 'huc{}_final_watersheds'.format(huc8_code))
    arcpy.JoinField_management("final_watersheds", grid_code, seedpoly,
                               'POUR_ID', ['Permanent_Identifier'])

    # this block bumps out sheds so that they fully contain their own lakes
    # sometimes a little bit of another shed is overlapping the lake simply
    # due to the raster/polygon differences
    # 1) delete fields so watersheds and seedpoly share schema
    # 2) update features, keeping borders
    # 3) instead of lots of nulls make unique dissolve_id for all so that nulls aren't dissolved into one
    # 4) dissolve features on dissolve_id keeping the Permanent_Identifier field
    arcpy.CopyFeatures_management(seedpoly, 'lakes_nofields')
    for fc in ['lakes_nofields', 'final_watersheds']:
        fields = arcpy.ListFields(fc)
        for f in fields:
            if f != 'Permanent_Identifier':
                try:
                    arcpy.DeleteField_management(fc, f)
                except:
                    continue
    arcpy.Update_analysis("final_watersheds", 'lakes_nofields', 'update_fc')
    arcpy.AddField_management('update_fc', 'dissolve_id', 'TEXT', 255)
    arcpy.MakeFeatureLayer_management('update_fc', 'update_lyr')
    arcpy.SelectLayerByAttribute_management(
        'update_lyr', 'NEW_SELECTION',
        """"Permanent_Identifier" is not null""")
    arcpy.CalculateField_management('update_lyr', 'dissolve_id',
                                    '!Permanent_Identifier!', 'PYTHON')
    arcpy.SelectLayerByAttribute_management('update_lyr', 'SWITCH_SELECTION')
    arcpy.CalculateField_management('update_lyr', 'dissolve_id', '!OBJECTID!',
                                    'PYTHON')
    arcpy.SelectLayerByAttribute_management('update_lyr', 'CLEAR_SELECTION')
    arcpy.Dissolve_management('update_lyr', "final_watersheds_bumped",
                              'dissolve_id', 'Permanent_Identifier FIRST')
    cu.rename_field("final_watersheds_bumped",
                    "FIRST_Permanent_Identifier",
                    "Permanent_Identifier",
                    deleteOld=True)
    arcpy.DeleteField_management('final_watersheds_bumped', 'dissolve_id')

    arcpy.Clip_analysis('final_watersheds_bumped', 'hu8',
                        'final_watersheds_clipped')

    arcpy.CopyFeatures_management("final_watersheds_clipped",
                                  final_watersheds_out)

    temp_items = arcpy.ListRasters() + arcpy.ListFeatureClasses() + [temp_gdb]
    for item in temp_items:
        try:
            arcpy.Delete_management(item)
        except:
            continue

    arcpy.ResetEnvironments()
    arcpy.CheckInExtension('Spatial')
    cu.multi_msg("Complete.")