def createDEMgridTopoVars(DEM, sitePoly, path, site):
    # same preamble from above
    import arcpy
    import numpy
    from arcpy import env
    env.workspace = "%s\\ChangeModeling\\%s" % (path, site)
    arcpy.env.overwriteOutput = True
    env.scratchWorkspace = "%s\\ChangeModeling\\Scratch.gdb" % path

    #---------create unique raster based on DEM raster using NumPy----------

    # get info from DEM raster
    left = float(
        arcpy.GetRasterProperties_management(DEM, "LEFT").getOutput(0))
    bottom = float(
        arcpy.GetRasterProperties_management(DEM, "BOTTOM").getOutput(0))
    cellx = float(
        arcpy.GetRasterProperties_management(DEM, "CELLSIZEX").getOutput(0))
    celly = float(
        arcpy.GetRasterProperties_management(DEM, "CELLSIZEY").getOutput(0))
    print "Raster info imported"
    # take raster into numpy to create sequential array
    tempRaster = arcpy.Raster(DEM)
    my_array = arcpy.RasterToNumPyArray(tempRaster)
    # .shape pulls the rows and cols of the raster
    rasrows, rascols = my_array.shape
    uniqueval = 1
    # straight up nested for loop
    # very manual way to assign unique value to each cell
    for rowNum in xrange(rasrows):
        for colNum in xrange(rascols):
            my_array[rowNum, colNum] = uniqueval
            uniqueval += 1
    print "unique raster created"
    # bring back into arcGIS format, attach DEM's spatial reference information, and clip
    UniqueRaster = arcpy.NumPyArrayToRaster(my_array,
                                            arcpy.Point(left, bottom), cellx,
                                            celly, 0)
    UniqueRaster.save("UniqueRaster.tif")
    spatial_ref = arcpy.Describe(DEM).spatialReference
    UniqueRaster = arcpy.DefineProjection_management(UniqueRaster, spatial_ref)
    print "Unique raster exported to ArcGIS"

    # then RasterToPoly to create grid shapefile
    # has to be an integer
    UniqueRaster_int = arcpy.Int_3d(UniqueRaster, "UniqueRaster_int.tif")
    DEM_gr_larger = arcpy.RasterToPolygon_conversion(UniqueRaster_int,
                                                     "DEM_grid_larger.shp",
                                                     "NO_SIMPLIFY", "VALUE")
    # this is where we clip it to the analysis polygon, with wiggly edges and holes and all
    # need to do it here so area calculations will be sure to use the area we are
    # confident in classifying and analyzing.
    DEM_gr = arcpy.Clip_analysis(DEM_gr_larger, sitePoly, "DEM_grid.shp")
    print "unique raster converted to poly and clipped"

    # poly to points to get centroid of each cell
    # because we've already clipped to analysis_poly, these might not be the centers
    # of the original DEM cells, but they will fall into one of the orginal DEM cells
    # and therefore will have consistent values from the topographic variable rasters
    # note that 'centroid' results in some points being outside the boundaries of the
    # clipped DEM grid poly cell, and this causes a discrepancy with the way ExportXY
    # does it - it uses "inside".  Also, for some reason this means it doesn't pick up
    # topographic variable values.  So I will just go with "Inside".
    cellCenters = arcpy.FeatureToPoint_management(DEM_gr, "CellCenters.shp",
                                                  "INSIDE")
    print "cell centroids created"

    # extract values to points to get all the DEM & topo vars attached to centroids
    # pull all the values from the topographic variable rasters
    # note that at this point, the topo.var. information is associated with the CellCenters dataset (point dataset)
    inRasterList = [["mediumDEM.tif", "elev"], ["med_slope.tif", "slope"],
                    ["med_curvature.tif", "curv"],
                    ["med_curvature_prof.tif", "curv_prof"],
                    ["med_curvature_plan.tif", "curv_plan"],
                    ["med_Eastn.tif", "eastn"], ["med_Northn.tif", "northn"],
                    ["med_TMI.tif", "tmi"], ["med_Insol.tif", "insol"],
                    ["med_DistToRidge.tif", "distridge"],
                    ["med_HeatLoadIndex.tif", "heatind"]]
    cellCenters = arcpy.sa.ExtractMultiValuesToPoints("cellCenters.shp",
                                                      inRasterList, "NONE")
    print "Extracted topo vars to cell centroids"

    # calculate distance to prairie (not for WC)
    if site != "WC":
        prairie = "%s\\%s_Prairie.shp" % (path, site)
        distToPrairie = arcpy.Near_analysis(cellCenters, prairie)
        print "Distance to Prairie Calculated"
    else:
        print "WC, no prairie"
Example #2
0
def build_court_markers(output_gdb, output_feature_class):
    print('Creating basketball court markers.')
    fields = ('SHAPE@', 'NAME')
    fc = os.path.join(output_gdb, output_feature_class)
    if not arcpy.Exists(os.path.join(output_gdb, output_feature_class)):
        arcpy.CreateFeatureclass_management(output_gdb, output_feature_class,
                                            "POLYLINE", "#", "DISABLED",
                                            "DISABLED",
                                            arcpy.SpatialReference(3857))
        arcpy.AddField_management(fc, fields[1], "TEXT", field_length=20)

    cursor = arcpy.da.InsertCursor(fc, fields)

    wing_hash_1 = [(-250, 280 - 52.5), (-250 + 30, 280 - 52.5)]
    cursor.insertRow([wing_hash_1, "Hash Mark"])

    wing_hash_2 = [(250, 280 - 52.5), (250 - 30, 280 - 52.5)]
    cursor.insertRow([wing_hash_2, "Hash Mark"])

    wing_hash_3 = [(-250, 280 + 380 - 52.5), (-250 + 30, 280 + 380 - 52.5)]
    cursor.insertRow([wing_hash_3, "Hash Mark"])

    wing_hash_4 = [(250, 280 + 380 - 52.5), (250 - 30, 280 + 380 - 52.5)]
    cursor.insertRow([wing_hash_4, "Hash Mark"])

    circle_1 = [(40, -12, 5), (40, 0)]
    cursor.insertRow([circle_1, "Circle"])

    circle_2 = [(-40, -12, 5), (-40, 0)]
    cursor.insertRow([circle_2, "Circle"])

    circle_3 = [(40, 835 + 12, 5), (40, 835)]
    cursor.insertRow([circle_3, "Circle"])

    circle_4 = [(-40, 835 + 12, 5), (-40, 835)]
    cursor.insertRow([circle_4, "Circle"])

    baseline_hash_1 = [(110, -52.5), (110, -52.5 + 5)]
    cursor.insertRow([baseline_hash_1, "Baseline Hash Mark"])

    baseline_hash_2 = [(-110, -52.5), (-110, -52.5 + 5)]
    cursor.insertRow([baseline_hash_2, "Baseline Hash Mark"])

    baseline_hash_3 = [(-110, 940 - 52.5), (-110, 940 - 52.5 - 5)]
    cursor.insertRow([baseline_hash_3, "Baseline Hash Mark"])

    baseline_hash_4 = [(110, 940 - 52.5), (110, 940 - 52.5 - 5)]
    cursor.insertRow([baseline_hash_4, "Baseline Hash Mark"])

    back_board = [(-30, -12.5), (30, -12.5)]
    cursor.insertRow([back_board, "Backboard"])

    back_board = [(-30, 847.5), (30, 847.5)]
    cursor.insertRow([back_board, "Backboard"])

    half_court = [(-250, 470 - 52.5), (250, 470 - 52.5)]
    cursor.insertRow([half_court, "Half Court"])

    three = [(-219, -52.5), (-219, 140 - 52.5)]
    cursor.insertRow([three, "Three Point Line"])

    three = [(219, -52.5), (219, 140 - 52.5)]
    cursor.insertRow([three, "Three Point Line"])

    three = [(-219, 940 - 52.5), (-219, 940 - (140 + 52.5))]
    cursor.insertRow([three, "Three Point Line"])

    three = [(219, 940 - 52.5), (219, 940 - (140 + 52.5))]
    cursor.insertRow([three, "Three Point Line"])

    #4-Feet by basket
    pt_geometry = arcpy.PointGeometry(arcpy.Point(0, 0))
    arcpy.Buffer_analysis(pt_geometry, "in_memory\\lane_arc1", 40)  #237.5)#

    pt_geometry = arcpy.PointGeometry(arcpy.Point(0, 835))
    arcpy.Buffer_analysis(pt_geometry, "in_memory\\lane_arc2", 40)  #237.5)#

    arcpy.CreateFeatureclass_management("in_memory", "lane_arc_clipper",
                                        "POLYGON", "#", "DISABLED", "DISABLED",
                                        arcpy.SpatialReference(3857))
    arcpy.AddField_management("in_memory\\lane_arc_clipper",
                              fields[1],
                              "TEXT",
                              field_length=20)
    lane_clip_cursor = arcpy.da.InsertCursor("in_memory\\lane_arc_clipper",
                                             fields)
    clip_poly = [(-250, 0), (250, 0), (250, 50), (-250, 50)]
    lane_clip_cursor.insertRow([clip_poly, "Lane"])
    clip_poly = [(-250, 835), (250, 835), (250, 740), (-250, 740)]
    lane_clip_cursor.insertRow([clip_poly, "Lane"])

    arcpy.PolygonToLine_management("in_memory\\lane_arc1",
                                   "in_memory\\lane_arc_line_1")
    arcpy.PolygonToLine_management("in_memory\\lane_arc2",
                                   "in_memory\\lane_arc_line_2")

    arcpy.Clip_analysis("in_memory\\lane_arc_line_1",
                        "in_memory\\lane_arc_clipper",
                        "in_memory\\clip_lane_arc1")
    arcpy.Clip_analysis("in_memory\\lane_arc_line_2",
                        "in_memory\\lane_arc_clipper",
                        "in_memory\\clip_lane_arc2")
    arcpy.Append_management("in_memory\\clip_lane_arc1", fc, "NO_TEST", "", "")
    arcpy.Append_management("in_memory\\clip_lane_arc2", fc, "NO_TEST", "", "")

    #Create 3Point Arc
    pt_geometry = arcpy.PointGeometry(arcpy.Point(0, 0))
    arcpy.Buffer_analysis(pt_geometry, "in_memory\\arc", 235.833)  #237.5)#

    pt_geometry = arcpy.PointGeometry(arcpy.Point(0, 835))
    arcpy.Buffer_analysis(pt_geometry, "in_memory\\arc2", 235.833)  #237.5)#

    arcpy.CreateFeatureclass_management("in_memory", "clipper", "POLYGON", "#",
                                        "DISABLED", "DISABLED",
                                        arcpy.SpatialReference(3857))
    arcpy.AddField_management("in_memory\\clipper",
                              fields[1],
                              "TEXT",
                              field_length=20)
    clip_cursor = arcpy.da.InsertCursor("in_memory\\clipper", fields)
    clip_poly = [(-250, 940 - (140 + 52.5)), (250, 940 - (140 + 52.5)),
                 (250, 140 - 52.5), (-250, 140 - 52.5)]
    clip_cursor.insertRow([clip_poly, "Three Point Line"])

    ##    clip_poly = [(-250, 940-52.5),
    ##             (250, 940-52.5),
    ##             (250, 140-52.5),
    ##             (-250, 140-52.5)]
    ##    clip_cursor.insertRow([clip_poly, "Three Point Line"])

    arcpy.PolygonToLine_management("in_memory\\arc", "in_memory\\line_arc")
    arcpy.PolygonToLine_management("in_memory\\arc2", "in_memory\\line_arc2")

    arcpy.Clip_analysis("in_memory\\line_arc", "in_memory\\clipper",
                        "in_memory\\clip_res")
    arcpy.Clip_analysis("in_memory\\line_arc2", "in_memory\\clipper",
                        "in_memory\\clip_res2")
    arcpy.Append_management("in_memory\\clip_res", fc, "NO_TEST", "", "")
    arcpy.Append_management("in_memory\\clip_res2", fc, "NO_TEST", "", "")

    print("Done.")
Example #3
0
arcpy.env.rasterStatistics = "STATISTICS 1 1"
arcpy.env.transferDomains = "false"
arcpy.env.maintainAttachments = "true"
arcpy.env.resamplingMethod = "NEAREST"
arcpy.env.cartographicCoordinateSystem = ""
arcpy.env.configKeyword = ""
arcpy.env.qualifiedFieldNames = "true"
arcpy.env.tileSize = "128 128"
arcpy.env.pyramid = "PYRAMIDS -1 NEAREST DEFAULT 75 NO_SKIP"
arcpy.env.referenceScale = ""
arcpy.env.nodata = "NONE"
arcpy.env.cellSize = "MAXOF"
arcpy.env.mask = ""

# Process: Project Raster
arcpy.ProjectRaster_management(
    DC_11001_WGS1984_img, DC_LC_Proj,
    "PROJCS['NAD_1983_StatePlane_Maryland_FIPS_1900',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',400000.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-77.0],PARAMETER['Standard_Parallel_1',38.3],PARAMETER['Standard_Parallel_2',39.45],PARAMETER['Latitude_Of_Origin',37.66666666666666],UNIT['Meter',1.0]]",
    "NEAREST", "1.18236705408999 1.18236705408997",
    "WGS_1984_(ITRF00)_To_NAD_1983", "",
    "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]"
)

# Process: Raster to Polygon
arcpy.RasterToPolygon_conversion(DC_LC_Proj, CBCLU_DC_Poly_shp, "SIMPLIFY", "",
                                 "SINGLE_OUTER_PART", "")

# Process: Clip
arcpy.Clip_analysis(CBCLU_DC_Poly_shp, RockCreek_PotomacRiver_Watershed,
                    CBCLU_DC_RockCreekPotomacWS_shp, "")
    # download the gnis_data, infc = input feature class, the shapefile(summit)
    infc = GNIS_Download(Save_Dir_GNIS,'National Geographic Names Information System (GNIS)')
    temp = BBox_Value.split(',')
    for entries in temp:
        temp2.append(float(entries))
    # create an arrat of point to make the polygon
    array = arcpy.Array([arcpy.Point(temp[0], temp[3]), arcpy.Point(temp[2], temp[1]), arcpy.Point(temp[0], temp[1]),
                         arcpy.Point(temp[2], temp[3])])
    # specific format for the create_feature function
    bbox.append({'NAME': 'GNIS_BBOX', 'POINT': (temp2[0], temp2[3])})
    bbox.append({'NAME': 'GNIS_BBOX', 'POINT': (temp2[2], temp2[1])})

    # make a poly to clip gnis points
    polygon = arcpy.Polygon(array, arcpy.SpatialReference(6318))
    # clip the GNIS Shape file according to the bbox
    arcpy.Clip_analysis(infc,polygon,infc[:-4]+'_StudyArea.shp')
    infc = infc[:-4]+'_StudyArea.shp'
else:
    arcpy.AddMessage("Selecting only summit points")
    if not infc.endswith('.shp'):
        infc += '.shp'
    arcpy.Select_analysis(infc, infc[:-4]+'_Summit.shp',
                          '"FEATURE_CL" = \'Summit\'')
    infc = infc[:-4]+'_Summit.shp'

    arcpy.MinimumBoundingGeometry_management(infc, workspace+'\\poly.shp', geometry_type='RECTANGLE_BY_WIDTH')
    out_cor = arcpy.SpatialReference(6318)
    arcpy.AddGeometryAttributes_management(workspace+'\\poly.shp',Geometry_Properties='EXTENT',Coordinate_System=out_cor)
    with arcpy.da.SearchCursor(workspace+'\\poly.shp', ['EXT_MIN_X', 'EXT_MIN_Y','EXT_MAX_X','EXT_MAX_Y']) as cursor:
        for row in cursor:
            bbox.append({'NAME': 'GNIS_BBOX', 'POINT': (float(row[0]),float(row[3]))})
Example #5
0
        vectordata = arcpy.Merge_management([rtp, input_agriculture])

        # union with vector layers
        Union_vector = arcpy.Union_analysis([vectordata, impervious_filter])

        print "union of " + tif + " completed"

        union = "union" + tif
        arcpy.MakeFeatureLayer_management(Union_vector, union)

        # select all the objects outside of the vector layers
        OutFilter = arcpy.SelectLayerByAttribute_management(
            union, "NEW_SELECTION", "Filter = ' '")

        # Clip with administrative outline
        vector_filter = arcpy.Clip_analysis(OutFilter, administrative_clip)

        # Clip with raster outline to clip agricultural areas
        outRas = Raster(tif) * 0
        outline_vector = arcpy.RasterToPolygon_conversion(outRas)
        final_clip = arcpy.Clip_analysis(vector_filter, outline_vector)

        # add field and calculate area of polygons
        added_field = arcpy.AddField_management(final_clip, "area", "DOUBLE")
        final_clip_area = arcpy.CalculateField_management(
            added_field, "area", "!SHAPE.area!", "PYTHON")

        # select all polygons >= 1 squaremeter
        arcpy.MakeFeatureLayer_management(final_clip_area, "final" + tif)
        Out_Filter = arcpy.SelectLayerByAttribute_management(
            "final" + tif, "NEW_SELECTION", "area >= 1")
               
                #make feature layer out of each buffer
                current_buffer=arcpy.MakeFeatureLayer_management(end_point_buffers,"temp.lyr","OBJECTID="+str(row[0]))
                arcpy.CopyFeatures_management(current_buffer,os.path.join(env.workspace,naming+"current_buffer"))
                current_buffer=os.path.join(env.workspace,naming+"current_buffer")
                
                #create fishent out of isolated buffer
                desc = arcpy.Describe(current_buffer)
                current_fishnet=os.path.join(env.workspace,naming+"current_fishnet")
                arcpy.CreateFishnet_management(current_fishnet,str(desc.extent.lowerLeft),str(desc.extent.XMin) + " " + str(desc.extent.YMax + 10),"","","2", "1","","NO_LABELS", current_buffer,'POLYGON')

                #clip current end point buffer by top half of fishnet
                #select top half of fishnet
                top_net=arcpy.MakeFeatureLayer_management(current_fishnet,"top_net.lyr","OID=2")
                clip_buffer=os.path.join(env.workspace,naming+"clipped_buffer")
                arcpy.Clip_analysis(current_buffer, top_net, clip_buffer)

                #if this is first buffer
                if row[0]==1:
                        #create the clipped buffer feature class
                        clipped_buffer_fc=arcpy.CreateFeatureclass_management(env.workspace, naming+"_clipped_buffer_fc", "POLYGON",clip_buffer,"DISABLED", "DISABLED", clip_buffer)
                #append clipped buffer to clipped buffer fc
                arcpy.Append_management(clip_buffer,clipped_buffer_fc)

del search

#mask the DEM by the clipped buffers
##arcpy.AddMessage("\t\tMasking DEM...")
masked_dem = ExtractByMask(dem, clipped_buffer_fc)
masked_dem.save(os.path.join(env.workspace,naming+"_maskedraster"))
Example #7
0
        if len(value) > 1:
            #for hver sektor (olie/gas, vindenergi, fiskeri etc,)hvor der er mere end 1 datasæt,
            #laves der en union, som dissolves og "rengøres" for rækker med ens sektor og datasæt origin
            arcpy.Union_analysis(value, output)
            #i virkeligheden kunne "cleaning_tables" nok fjernes, da dens funktion løses senere
            cleaning_tables(output, origindslv)
            output2 = os.path.join(
                throwoutput, anvendelseskode + "_" + keyz + "dslv" + ".shp")
            arcpy.Dissolve_management(output, output2, [origindslv], "",
                                      "SINGLE_PART")
            addfield(output2, str(key), sektordslv)
            addfield(output2, str(temakodedict[key]), temakodedslv)
            sektorlist.append(
                os.path.join(throwoutput,
                             anvendelseskode + "_" + keyz + "dslv" + ".shp"))
            arcpy.Delete_management(output)
        else:  #i tilfælde af kun et datasæt, kopieres det over med en ny titel
            arcpy.CopyFeatures_management(value[0], output)
            sektorlist.append(output)
            addfield(output, str(key), sektordslv)
            addfield(output, str(temakodedict[key]), temakodedslv)
        for i in value:
            arcpy.Delete_management(i)
    print(sektorlist)
    for lyr in sektorlist:
        print(lyr)
        arcpy.Clip_analysis(
            lyr,
            r"C:\Users\B039723\Documents\Filkassen\havplan\BaseData\analysis_extent\final\merg_eez_geodk_sngprt_poly_removeland_dslv.shp",
            os.path.join(clipoutput, os.path.basename(lyr)))
Example #8
0
   for filename in filenames:
       if filename != "shp":
           fc_countries.append(os.path.join(dirpath, filename))
           country_names.append(filename)

print(vector_names, raster_names, country_names)


# Set workspace
arcpy.env.workspace = args["output"][0]
   
for i in range(len(fc_vectors)):
    for j in range(len(fc_countries)):
        print(vector_names[i],country_names[j])
        outfc = "{}_{}_Clipped".format(country_names[j],vector_names[i])
        arcpy.Clip_analysis(fc_vectors[i], fc_countries[j], outfc)
        print(arcpy.GetMessages())


for i in range(len(fc_rasters)):
    for j in range(len(fc_countries)):
        print(raster_names[i],country_names[j])
        outfc = "{}_{}_Clipped".format(country_names[j],raster_names[i])
        arcpy.Clip_management(in_raster=fc_rasters[i], rectangle="",
                            clipping_geometry="ClippingGeometry",
                            in_template_dataset=fc_countries[j],
                            out_raster=outfc)
        print(arcpy.GetMessages())


        
Example #9
0
    FA) + "temp3_down"  #.shp #single downstream buffer
flood_areaD = path + os.sep + clip_name
assets = FA + "_assets"  #addresses/population in flood zone

#3.2: NUMBER WHO BENEFIT
#3.2 Step 1: check that there are people in the flood zone
if addresses is not None:  #if using addresses
    addresses = checkSpatialReference(outTbl, addresses)  #check spatial ref
    flood_zone = checkSpatialReference(outTbl, flood_zone)  #check spatial ref
    #start cutting back the datasets
    #    if watershed is not None: #if user specified watershed
    #        watershed = checkSpatialReference(outTbl, watershed)
    #limit flood zone to what is within watershed
    #        flood_zone = arcpy.Clip_analysis(flood_zone, watershed, flood_area)
    #limit addresses to only those within the flood_zone
    arcpy.Clip_analysis(addresses, flood_zone, assets)
    total_cnt = arcpy.GetCount_management(assets)  #count addresses
    if int(total_cnt.getOutput(
            0)) <= 0:  #if there are no addresses in flood zones stop analysis
        arcpy.AddError("No addresses were found within the flooded area.")
        print("No addresses were found within the flooded area.")
        raise arcpy.ExecuteError
elif popRast is not None:  #not yet tested
    #check projection?
    #    if watershed is not None: #if user specified watershed
    #        watershed = checkSpatialReference(outTbl, watershed)
    #limit flood zone to what is within watershed
    #        flood_zone = arcpy.Clip_analysis(flood_zone, watershed, flood_area)
    arcpy.Clip_management(popRast, "", assets, flood_zone, "",
                          "ClippingGeometry", "NO_MAINTAIN_EXTENT")
    #add error handel to fail if floodarea contains no population
Example #10
0
print("points buffered")
# add field to each fc, with name of feature class
addSourceField()

print("source fields added")

## add attributes from each values layer by using the union tool, then clip ##

arcpy.Union_analysis(
    ["dataset1", "dataset2", "dataset3", "dataset4", "dataset5", "dataset6"],
    "union")

print("union complete")

arcpy.Clip_analysis("union", "dataset1", "attributed_dataset")

##delete unneccesary fields from attributed dataset##

#list all fields
fields = arcpy.ListFields("attributed_dataset")

# manually enter field names to keep here
# include mandatory fields name such as OBJECTID (or FID), and Shape in keepfields
keepFields = [
    'fieldname1', 'fieldname2', 'fieldname3', 'fieldname4', 'fieldname5'
]

# create dropFields list (all the fields not in keepFields)
dropFields = [f.name for f in fields if f.name not in keepFields]
US_Boundaries = arcpy.GetParameterAsText(2)
if US_Boundaries == '#' or not US_Boundaries:
    US_Boundaries = "us_boundaries" # provide a default value if unspecified

#### Paths to various files. \\ is the default but / would work as well (Gadi) 
	
# Local variables:
us_cities = "us_cities"
Buffered_Cities = "C:\\Users\\gadidreyfuss\\Documents\\ArcGIS\\Default.gdb\\us_cities_Buffer"
us_roads = "us_roads"
Buffered_Roads = "C:\\Users\\gadidreyfuss\\Documents\\ArcGIS\\Default.gdb\\us_roads_Buffer"
Buffered_Cities_and_Roads = "C:\\Users\\gadidreyfuss\\Documents\\ArcGIS\\Default.gdb\\us_cities_Buffer_Intersect"
Clipped = "C:\\Users\\gadidreyfuss\\Documents\\ArcGIS\\Default.gdb\\us_cities_Buffer_Intersect_C"


#### Like the back end development, this is for the "work" of the script (Gadi) 


# Process: Buffer the cities
arcpy.Buffer_analysis(us_cities, Buffered_Cities, Cities_buffer_distance, "FULL", "ROUND", "ALL", "", "PLANAR")

# Process: Buffer the roads
arcpy.Buffer_analysis(us_roads, Buffered_Roads, Road_buffer, "FULL", "ROUND", "ALL", "", "PLANAR")

# Process: Intersect
arcpy.Intersect_analysis("C:\\Users\\gadidreyfuss\\Documents\\ArcGIS\\Default.gdb\\us_cities_Buffer #;C:\\Users\\gadidreyfuss\\Documents\\ArcGIS\\Default.gdb\\us_roads_Buffer #", Buffered_Cities_and_Roads, "ALL", "", "INPUT")

# Process: Clip
arcpy.Clip_analysis(Buffered_Cities_and_Roads, US_Boundaries, Clipped, "")

Example #12
0
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# junk.py
# Created on: 2015-04-13 14:06:14.00000
#   (generated by ArcGIS/ModelBuilder)
# Description:
# ---------------------------------------------------------------------------

# Import arcpy module
import arcpy

# Local variables:
major_roads = "C:\\Users\\Todd\\Desktop\\morepython\\tabletesting.gdb\\major_roads"
tracts = "C:\\Users\\Todd\\Desktop\\morepython\\tabletesting.gdb\\tracts"
major_roads_Buffer = "C:\\Users\\Todd\\Desktop\\morepython\\tabletesting.gdb\\major_roads_Buffer"
tracts_Clip = "C:\\Users\\Todd\\Desktop\\morepython\\tabletesting.gdb\\tracts_Clip"

# Process: Buffer
arcpy.Buffer_analysis(major_roads, major_roads_Buffer, "10000 Meters", "FULL",
                      "ROUND", "ALL", "", "PLANAR")

# Process: Clip
arcpy.Clip_analysis(tracts, major_roads_Buffer, tracts_Clip, "")

rows = arcpy.UpdateCursor(tracts_Clip)
for row in rows:
    if row.pctbelowpo < 10:
        rows.deleteRow(row)
Example #13
0
def process(fldr, outFldr, outGdb, build):
    arcpy.env.overwriteOutput = True
    arcpy.CheckOutExtension("Spatial")

    start = datetime.datetime.now()
    print(start)
    #fldr = r"C:\00_school\appChallenge"
    #outFldr = r"C:\00_school\appChallenge\output"
    #outGdb = os.path.join(outFldr, "output.gdb")
    outFinal = os.path.join(outFldr, os.path.basename(fldr) + "_all.shp")
    #build = r"C:\00_school\appChallenge\AllOttawa_Buildings\Buildings_polygon_MTM9.shp"
    coord = arcpy.Describe(build).spatialReference.exportToString()
    days = [355, 172, 80]
    lat = "45.3748"
    sky = "200"
    outList = []
    extList = []

    for f in os.listdir(fldr):
        if f.endswith(".zip"):
            code = f[:-4]
            extFldr = os.path.join(fldr, code)

            print("Starting " + code)
            print("  Extracting files")
            zipPath = zipfile.ZipFile(os.path.join(fldr, f), 'r')
            zipPath.extractall(extFldr)
            zipPath.close()
            extList.append(extFldr)

            las = os.path.join(extFldr, os.listdir(extFldr)[0])
            ras = os.path.join(extFldr, "r" + code + ".tif")

            print("  Converting las to raster")
            arcpy.LasDatasetToRaster_conversion(
                in_las_dataset=las,
                out_raster=ras,
                value_field="ELEVATION",
                interpolation_type="BINNING AVERAGE NONE",
                data_type="FLOAT",
                sampling_type="CELLSIZE",
                sampling_value=1,
                z_factor=1)

            print("  Defining projection")
            arcpy.DefineProjection_management(in_dataset=ras,
                                              coor_system=coord)

            print("  Running solar analysis")
            solList = []
            for d in days:
                if d == days[0]:
                    sol = os.path.join(extFldr, "ws" + code + ".tif")
                elif d == days[1]:
                    sol = os.path.join(extFldr, "ss" + code + ".tif")
                else:
                    sol = os.path.join(extFldr, "eq" + code + ".tif")
                solList.append(sol)
                t = "WithinDay " + str(d) + " 0 24"
                arcpy.gp.AreaSolarRadiation(ras, sol, lat, sky, t)

            print("  Generating footprint")
            zeroExp = """\"""" + ras + """\" * 0 """
            zeroRas = os.path.join(extFldr, "z" + code + ".tif")
            intRas = os.path.join(extFldr, "i" + code + ".tif")
            rasFp = os.path.join(extFldr, "fp" + code + ".shp")

            arcpy.gp.RasterCalculator_sa(zeroExp, zeroRas)
            arcpy.gp.Int_sa(zeroRas, intRas)
            arcpy.RasterToPolygon_conversion(intRas, rasFp, "SIMPLIFY",
                                             "VALUE")

            print("  Clipping Buildings")
            buildClip = os.path.join(extFldr, "build" + code + ".shp")
            arcpy.Clip_analysis(in_features=build,
                                clip_features=rasFp,
                                out_feature_class=buildClip)

            print("  Generating stats")
            field = arcpy.ListFields(dataset=buildClip, field_type="OID")[0]
            tblList = []
            lyr = buildClip[:-4] + ".lyr"
            arcpy.MakeFeatureLayer_management(in_features=buildClip,
                                              out_layer=lyr)
            for s in solList:
                tbl = s[:-4] + "_tbl.dbf"
                tblList.append(tbl)
                arcpy.sa.ZonalStatisticsAsTable(in_zone_data=buildClip,
                                                zone_field=field.name,
                                                in_value_raster=s,
                                                out_table=tbl,
                                                statistics_type="MEAN")
                field2 = arcpy.ListFields(dataset=tbl, field_type="OID")[0]
                arcpy.AddJoin_management(in_layer_or_view=lyr,
                                         in_field=field.name,
                                         join_table=tbl,
                                         join_field=field2.name)

            print("  Exporting Data")
            output = os.path.join(outGdb, "out_" + code)
            outList.append(output)
            arcpy.CopyFeatures_management(in_features=lyr,
                                          out_feature_class=output)

            flds = arcpy.ListFields(dataset=output)
            for fld in flds:
                name = fld.name
                if name.endswith("MEAN"):
                    if name.startswith("ws"):
                        arcpy.AlterField_management(in_table=output,
                                                    field=name,
                                                    new_field_name="WS_MEAN",
                                                    new_field_alias="WS_MEAN")
                    elif name.startswith("ss"):
                        arcpy.AlterField_management(in_table=output,
                                                    field=name,
                                                    new_field_name="SS_MEAN",
                                                    new_field_alias="SS_MEAN")
                    else:
                        arcpy.AlterField_management(in_table=output,
                                                    field=name,
                                                    new_field_name="EQ_MEAN",
                                                    new_field_alias="EQ_MEAN")
            for fld in flds:
                if fld.type != "OID" and not fld.name.startswith(
                        "Shape") and not fld.name.endswith("MEAN"):
                    arcpy.DeleteField_management(in_table=output,
                                                 drop_field=fld.name)

            curr = datetime.datetime.now()
            elap = curr - start
            print(code + " complete. " + str(curr) + " Time elapsed: " +
                  str(elap))
    if len(os.listdir(fldr)) != 0:
        print("Merging output layers")
        arcpy.Merge_management(inputs=outList, output=outFinal)

    print("\nI AM INVINCIBLE")
    return (extList)
Example #14
0
"""
Use this script to update the oil refineries shapefile. Once new data is downloaded, run this code to:
1) clip to Gulf of Mexico region
2) re-project into WGS84 (or change projection if you want)

To run, fill in strings for the new shapefile location, location of file to clip with, and
the new output location.

Alec Dyer
[email protected]
"""

import arcpy

in_feature = r"P:\01_DataOriginals\USA\Infrastructure\ShippingFairwaysLanesandZones\shippinglanes"
out_feature = r"P:\03_DataFinal\GOM\Infrastructure\ShippingLanes\GOM_shippinglanes"
clip_feature = r"P:\03_DataFinal\GOM\Boundaries\Extent\GOM_Extent_All"

arcpy.Clip_analysis(in_feature + '.shp', clip_feature + '.shp',
                    out_feature + '.shp')

sr = arcpy.SpatialReference(4326)

spatial_ref = arcpy.Describe(out_feature + '.shp').spatialReference

if spatial_ref.Name != sr.GCSName:

    arcpy.Project_management(out_feature + '.shp', out_feature + '.shp', sr)
Example #15
0
import arcpy
arcpy.env.workspace = ""
infc = "1.shp"
clipfc = "2.shp"
outfc = "3.shp"
arcpy.Clip_analysis(infc, clipfc, outfc)
Example #16
0
    def run(self):
        communities = self.folders.get_table('Zentren')
        ws_tmp = arcpy.env.scratchGDB
        sel_comm = os.path.join(ws_tmp, 'sel_comm')
        feat_buffered = os.path.join(ws_tmp, 'feat_buffered')
        markets_table = self.folders.get_table(self._markets_table,
                                               check=False)
        markets_tmp = self.folders.get_table('markets_tmp', check=False)
        markets_buffer = self.folders.get_table('markets_buffer', check=False)
        markets_buffer_output = self.folders.get_table(self._markets_buffer,
                                                       check=False)
        markets_com = os.path.join(ws_tmp, 'markets_com')

        def del_tmp():
            for f in [
                    sel_comm, feat_buffered, markets_buffer, markets_tmp,
                    markets_com
            ]:
                arcpy.Delete_management(f)

        del_tmp()

        arcpy.FeatureClassToFeatureClass_conversion(communities,
                                                    ws_tmp,
                                                    os.path.split(sel_comm)[1],
                                                    where_clause='Auswahl<>0')

        # ToDo: buffer -> multi_poly -> markets -> markets in selected communities -> remove those from markets in multi_poly -> to db

        arcpy.AddMessage('Analysiere Pufferbereich...')
        # create buffer area
        arcpy.Buffer_analysis(sel_comm,
                              feat_buffered,
                              self.par.radius_markets.value,
                              dissolve_option='NONE')
        try:
            self.output.remove_layer(self._out_layer_name)
        except:
            pass

        arcpy.Delete_management(markets_buffer_output)
        arcpy.Dissolve_management(feat_buffered, markets_buffer_output, "", "",
                                  "SINGLE_PART", "DISSOLVE_LINES")

        multi_poly = minimal_bounding_poly(feat_buffered)
        epsg = self.parent_tbx.config.epsg
        multi_poly = [[Point(p.X, p.Y, epsg=epsg) for p in poly]
                      for poly in multi_poly]
        reader = OSMShopsReader(epsg=epsg)
        markets = []
        arcpy.AddMessage('Ermittle Märkte im Randgebiet...')
        count = 1000
        for poly in multi_poly:
            m = reader.get_shops(poly, count=count - len(markets))
            markets += m

        if len(markets) > 0:
            # pro license only
            #arcpy.SymDiff_analysis(feat_buffered, sel_comm, buffer_diff)

            #self.parent_tbx.delete_rows_in_table(self._markets_table,
            #where='is_buffer=1')
            arcpy.CreateFeatureclass_management(os.path.split(markets_tmp)[0],
                                                os.path.split(markets_tmp)[1],
                                                template=markets_table)
            ids = [
                id
                for id, in self.parent_tbx.query_table(markets_table, ['id'])
            ]
            start_id = max(ids) + 1 if ids else 0
            markets = self.parse_meta(markets)
            self.markets_to_db(markets,
                               tablename=os.path.split(markets_tmp)[1],
                               is_buffer=True,
                               start_id=start_id,
                               is_osm=True)
            arcpy.Clip_analysis(markets_tmp, feat_buffered, markets_buffer)
            arcpy.Clip_analysis(markets_buffer, sel_comm, markets_com)
            cursor = arcpy.da.SearchCursor(markets_com, ['id'])
            in_com_ids = [str(id) for id, in cursor]
            del (cursor)
            where = 'id IN ({})'.format(','.join(in_com_ids))
            self.parent_tbx.delete_rows_in_table(markets_buffer, where)
            arcpy.Append_management(markets_buffer, markets_table)

            arcpy.AddMessage('Entferne Duplikate...')
            n = remove_duplicates(markets_table,
                                  'id',
                                  match_field='id_kette',
                                  where='is_buffer=1',
                                  distance=50)
            arcpy.AddMessage('{} Duplikate entfernt...'.format(n))

            self.set_ags()
        del_tmp()
Example #17
0
def intersectGrid(AggLevel, workingDir, variable):
    '''Intersects the NOAA precipitation data grid with the AggLevel = "Woreda" or "Kebele" shapefile'''

    #create grid shapefile
    Grid = workingDir + "\\All" + variable + "Grid.shp"
    if (os.path.exists(Grid) == False):
        if variable == "Temp":
            origin_coord = "-180 -90"
            nrows = "360"
            ncols = "720"
            polygon_width = "0.5 degrees"
        else:
            origin_coord = "-20.05 -40.05"
            nrows = "801"
            ncols = "751"
            polygon_width = "0.1 degrees"

        polygon_height = polygon_width
        ap.GridIndexFeatures_cartography(Grid, "", "", "", "", polygon_width,
                                         polygon_height, origin_coord, nrows,
                                         ncols)
        ap.DefineProjection_management(
            Grid,
            coor_system="GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',\
        SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]"
        )

        #add 3 or 4 fields to grid shapefile: latitude (LAT), longitude (LONG) and
        #for precipitation, row (row) of text file corresponding to each grid in the shapefile;
        #for temperature, row (row) and column (col) of netCDF file corresponding to each grid in the shapefile
        ap.AddField_management(Grid, "LAT", "DOUBLE", 7, 2, "", "", "", "", "")
        ap.AddField_management(Grid, "LONG", "DOUBLE", 7, 2, "", "", "", "",
                               "")
        ap.AddField_management(Grid, "row", "SHORT", 6, "", "", "", "", "", "")
        if variable == "Temp":
            ap.AddField_management(Grid, "col", "SHORT", 5, "", "", "", "", "",
                                   "")

        #calculate lat and long fields
        expression1 = "float(!SHAPE.CENTROID!.split()[0])"
        expression2 = "float(!SHAPE.CENTROID!.split()[1])"
        ap.CalculateField_management(Grid, "LONG", expression1, "PYTHON")
        ap.CalculateField_management(Grid, "LAT", expression2, "PYTHON")

        #calculate row and col fields
        if variable == "Temp":
            Grid = calcTempFields(Grid)
        else:
            Grid = calcRainFields(Grid)

    #clip the grid to Ethiopia and convert its .dbf to a .csv for later use
    GridClip = workingDir + "\\" + variable + "GridClip" + AggLevel + ".shp"
    if AggLevel == 'Woreda':
        EthiopiaBorders = os.path.dirname(
            workingDir) + "\\Shapefiles\\WoredasAdindan.shp"
    elif AggLevel == 'Kebele':
        EthiopiaBorders = os.path.dirname(
            workingDir
        ) + "\\Shapefiles\\Ethiopia Kebeles without Somali region.shp"

    ap.Clip_analysis(Grid, EthiopiaBorders, GridClip)
    dbf = GridClip[0:-4] + ".dbf"
    GridCSV = convertDBFtoCSV(dbf)

    #intersect the clipped grid with the woreda or kebele shapefile and project to Adindan
    GridIntersect = workingDir + "\\" + variable + AggLevel + "Intersect.shp"
    ap.Intersect_analysis([GridClip, EthiopiaBorders], GridIntersect)
    GridIntersectProject = GridIntersect[0:-4] + "Project.shp"
    ap.Project_management(
        GridIntersect,
        GridIntersectProject,
        out_coor_system="PROJCS['Adindan_UTM_Zone_37N',GEOGCS['GCS_Adindan',\
    DATUM['D_Adindan',SPHEROID['Clarke_1880_RGS',6378249.145,293.465]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],\
    PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',500000.0],PARAMETER['False_Northing',0.0],\
    PARAMETER['Central_Meridian',39.0],PARAMETER['Scale_Factor',0.9996],PARAMETER['Latitude_Of_Origin',0.0],\
    UNIT['Meter',1.0]]",
        transform_method="Adindan_To_WGS_1984_1",
        in_coor_system="GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',\
    SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]"
    )

    #calculate area of intersection between grid and woreda or kebele shapefile after adding a field to store it
    ap.AddField_management(GridIntersectProject, "PartArea", "DOUBLE", 12, 6,
                           "", "", "", "", "")
    expression = "float(!SHAPE.AREA@SQUAREKILOMETERS!)"
    ap.CalculateField_management(GridIntersectProject, "PartArea", expression,
                                 "PYTHON")

    #convert GridIntersect's .dbf to a .csv for later use
    dbf = GridIntersectProject[0:-4] + ".dbf"
    intersectCSV = convertDBFtoCSV(dbf)

    return intersectCSV, GridCSV
Example #18
0
arcpy.CheckOutExtension("3D")
allMinY = 0
allMaxY = 0
for fc in projectFCS:
    #if fc.find('OrientationPoints') > -1:  # if uncommented, restricts to OrientationPoints, for debug purposes only
    shortName = os.path.basename(fc)
    addMsgAndPrint('  ' + fc)
    clippedName = xsFDS + '/clipped_' + shortName
    transName = xsFDS + '/trans_' + shortName
    ZName = xsFDS + '/CS' + token + shortName
    for nm in clippedName, ZName:
        testAndDelete(nm)
    # clip
    if debug:
        addMsgAndPrint('    clipping')
    arcpy.Clip_analysis(fc, clipPoly, clippedName)
    if numberOfRows(clippedName) == 0:
        if debug:
            addMsgAndPrint('    empty output')
        testAndDelete(clippedName)
    else:
        if debug:
            addMsgAndPrint('    clipped dataset has ' +
                           str(numberOfRows(clippedName)) + ' rows')
        # enable Z
        if debug:
            addMsgAndPrint('    adding Z to make ' + os.path.basename(ZName))
        arcpy.InterpolateShape_3d(dem, clippedName, ZName, sampleDistance,
                                  zFactor)

        if os.path.basename(fc) == xsLine:  ### this isn't very robust!
Example #19
0
def createVectorBoundaryC(f_path, f_name, raster_props, stat_out_folder, vector_bound_path, minZ, maxZ, bound_path, elev_type):
    a = datetime.now()
    arcpy.AddMessage("\tCreating {} bound for '{}' using min z '{}' and max z'{}'".format(elev_type, f_path, minZ, maxZ))

    vector_1_bound_path = os.path.join(stat_out_folder, "B1_{}.shp".format(f_name))
    vector_2_bound_path = os.path.join(stat_out_folder, "B2_{}.shp".format(f_name))
    vector_3_bound_path = os.path.join(stat_out_folder, "B3_{}.shp".format(f_name))
    vector_4_bound_path = os.path.join(stat_out_folder, "B4_{}.shp".format(f_name))
    vector_5_bound_path = os.path.join(stat_out_folder, "B5_{}.shp".format(f_name))
    deleteFileIfExists(vector_bound_path, useArcpy=True)
    deleteFileIfExists(vector_1_bound_path, useArcpy=True)
    deleteFileIfExists(vector_2_bound_path, useArcpy=True)
    deleteFileIfExists(vector_3_bound_path, useArcpy=True)
    deleteFileIfExists(vector_4_bound_path, useArcpy=True)
    deleteFileIfExists(vector_5_bound_path, useArcpy=True)

    arcpy.RasterDomain_3d(in_raster=f_path, out_feature_class=vector_5_bound_path, out_geometry_type="POLYGON")
    Utility.addToolMessages()

    arcpy.MultipartToSinglepart_management(in_features=vector_5_bound_path, out_feature_class=vector_4_bound_path)
    Utility.addToolMessages()
    checkRecordCount(vector_4_bound_path)

    arcpy.EliminatePolygonPart_management(in_features=vector_4_bound_path, out_feature_class=vector_3_bound_path, condition="AREA", part_area="10000 SquareMiles", part_area_percent="0", part_option="CONTAINED_ONLY")
    Utility.addToolMessages()
    checkRecordCount(vector_3_bound_path)

    arcpy.SimplifyPolygon_cartography(
        in_features=vector_3_bound_path,
        out_feature_class=vector_2_bound_path,
        algorithm="POINT_REMOVE",
        tolerance="{} Meters".format(C_SIMPLE_DIST),
        minimum_area="0 Unknown",
        error_option="RESOLVE_ERRORS",
        collapsed_point_option="NO_KEEP",
        in_barriers=""
        )
    Utility.addToolMessages()
    checkRecordCount(vector_2_bound_path)

    arcpy.AddMessage('ZFlag: ' + arcpy.env.outputZFlag)
    arcpy.AddMessage('MFlag: ' + arcpy.env.outputMFlag)

    arcpy.Dissolve_management(in_features=vector_2_bound_path, out_feature_class=vector_1_bound_path, dissolve_field="", statistics_fields="", multi_part="MULTI_PART", unsplit_lines="DISSOLVE_LINES")
    Utility.addToolMessages()
    checkRecordCount(vector_1_bound_path)

    deleteFields(vector_1_bound_path)

    record_count = checkRecordCount(vector_1_bound_path)
    footprint_area = 0
    for row in arcpy.da.SearchCursor(vector_1_bound_path, ["SHAPE@"]):  # @UndefinedVariable
        shape = row[0]
        footprint_area = shape.getArea ("PRESERVE_SHAPE", "SQUAREMETERS")

    if footprint_area <= 0:
        arcpy.AddMessage("\tWARNGING: Area is 0 in {} '{}' bound '{}'".format(elev_type, f_path, vector_bound_path))

    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[PATH][0], field_alias=FIELD_INFO[PATH][1], field_type=FIELD_INFO[PATH][2], field_length=FIELD_INFO[PATH][3])
    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[NAME][0], field_alias=FIELD_INFO[NAME][1], field_type=FIELD_INFO[NAME][2], field_length=FIELD_INFO[NAME][3])
    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[AREA][0], field_alias=FIELD_INFO[AREA][1], field_type=FIELD_INFO[AREA][2], field_length=FIELD_INFO[AREA][3])
    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[ELEV_TYPE][0], field_alias=FIELD_INFO[ELEV_TYPE][1], field_type=FIELD_INFO[ELEV_TYPE][2], field_length=FIELD_INFO[ELEV_TYPE][3])
    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[RANGE][0], field_alias=FIELD_INFO[RANGE][1], field_type=FIELD_INFO[RANGE][2], field_length=FIELD_INFO[RANGE][3])

    deleteFields(vector_1_bound_path)

    arcpy.AddMessage(raster_props)
    for field_name in KEY_LIST:
        time.sleep(0.25)
        field_shpname = FIELD_INFO[field_name][0]
        field_alias = FIELD_INFO[field_name][1]
        field_type = FIELD_INFO[field_name][2]
        field_length = FIELD_INFO[field_name][3]
        field_value = raster_props[field_name]
        if field_type == "TEXT":
            if str(field_value).endswith('\\'):
                field_value = str(field_value)[0:-1]
            field_value = r'"{}"'.format(field_value)

        addField(in_table=vector_1_bound_path, field_name=field_shpname, field_alias=field_alias, field_type=field_type, field_length=field_length, expression=field_value)


    b_f_path, b_f_name = os.path.split(f_path)
    b_f_name = os.path.splitext(b_f_name)[0]
    arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[PATH][0], expression='"{}"'.format(b_f_path), expression_type="PYTHON_9.3")
    arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[NAME][0], expression='"{}"'.format(b_f_name), expression_type="PYTHON_9.3")
    arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[AREA][0], expression=footprint_area, expression_type="PYTHON_9.3")
    arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[ELEV_TYPE][0], expression='"{}"'.format(elev_type), expression_type="PYTHON_9.3")
    try:
        z_expr = "!{}! - !{}!".format(FIELD_INFO[MAX][0], FIELD_INFO[MIN][0])
        arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[RANGE][0], expression=z_expr, expression_type="PYTHON_9.3")
    except:
        pass

    deleteFileIfExists(vector_bound_path, True)
    arcpy.Clip_analysis(in_features=vector_1_bound_path, clip_features=bound_path, out_feature_class=vector_bound_path, cluster_tolerance="")
    Utility.addToolMessages()
    checkRecordCount(vector_bound_path)

    deleteFields(vector_bound_path)

    #debug = False
    #try:
    #    debug = (str(f_path).find("alamazoo") >= 0)
    #except:
    #    debug = False
    #if not debug:
    deleteFileIfExists(vector_1_bound_path, useArcpy=True)
    deleteFileIfExists(vector_2_bound_path, useArcpy=True)
    deleteFileIfExists(vector_3_bound_path, useArcpy=True)
    deleteFileIfExists(vector_4_bound_path, useArcpy=True)
    deleteFileIfExists(vector_5_bound_path, useArcpy=True)
    #else:
    #    arcpy.AddMessage("\tleaving artifacts for {} '{}'".format(elev_type, vector_bound_path))

    doTime(a, "\tCreated BOUND {}".format(vector_bound_path))
import arcpy, os
import pandas as pd
arcpy.env.overwriteOutput = True

#Script to get the origin schools for each state
us_states = r'D:\ArcPy\arcpy_scripts\projected_states\projected_States.shp' #The states polygons used to clip schools

#Creating variable to read statename and index
state_name = pd.read_csv('D:/ArcPy/arcpy_scripts/projected_states/state_name.csv')
state_name.drop('Unnamed: 0', axis=1, inplace=True)

years = ['98', '99', '00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12', '13', '14']

for year in years:	#looping over the required years
    string = '/year_'+str(year)
    os.mkdir('D:/Arcpy/arcpy_scripts/origins/'+string) #Creating new directory for a new year
    outws = r'D:/Arcpy/arcpy_scripts/origins'+str(string) #The output directory of the clipped origin file
    school_points = r'D:/Arcpy/arcpy_scripts/schools/schools'+str(year)+'.shp'  # The school points to be clipped
    states = arcpy.SearchCursor(us_states)
    count = 0     # Start a counter to name output points
    for state in states: # Loop through individual features of "us_states"
        name = state_name.loc[count, 'STATE_NAME']	# reading the state file name
        ori_schools = os.path.join(outws, 'origin_'+str(year)+'_'+str(name))  # Assemble the output point name and path
        arcpy.Clip_analysis(school_points, state.Shape, ori_schools) #Clipping the data
        count = count + 1

# Clean up...Not necessary using "with" statement used in arcpy.da module 

del state
del states
# Smooth polygon using Bezier Interpolation and export without clipping
if Clip_Dataset == "":
    arcpy.AddMessage(
        "No clip dataset was specified, exporting concave hull polygon...")
    arcpy.SmoothPolygon_cartography(Dissolve_Polygon, Concave_Hull,
                                    "BEZIER_INTERPOLATION", "0 Meters",
                                    "FIXED_ENDPOINT", "NO_CHECK")
# Smooth polygon using Bezier Interpolation and clip the output
elif Clip_Dataset != "":
    arcpy.SmoothPolygon_cartography(Dissolve_Polygon, Smooth_Polygon,
                                    "BEZIER_INTERPOLATION", "0 Meters",
                                    "FIXED_ENDPOINT", "NO_CHECK")
    xy_tolerance = ""
    arcpy.AddMessage("Clipping concave hull polygon to the clip dataset...")
    arcpy.Clip_analysis(Smooth_Polygon, Clip_Dataset, Concave_Hull,
                        xy_tolerance)
else:
    arcpy.AddError("An invalid clip dataset was entered.")

# Delete intermediate data
arcpy.Delete_management(Search_Raster)
arcpy.Delete_management(Search_Points)
arcpy.Delete_management(Aggregate_Polygon)
arcpy.Delete_management(Buffer_Polygon)
arcpy.Delete_management(Dissolve_Polygon)
if os.path.exists(Smooth_Polygon) == True:
    arcpy.Delete_management(Smooth_Polygon)
if os.path.exists(Agg_Table) == True:
    os.remove(Agg_Table)
if os.path.exists(Agg_Table_CPG) == True:
    os.remove(Agg_Table_CPG)
def main(in_dir, out_dir, dem_dir, hydrog_dir, info1, info2, info3, info4,
         info5, info6):
    arcpy.env.overwriteOutput = True

    # Create the output directory
    if not os.path.isdir(os.getcwd() + out_dir):
        os.makedirs(os.getcwd() + out_dir)

    # Step 1. Use "raster domain" tool from arcpy and create a domain shapefile from the DEM raster file.
    print("Start the pre-processing steps")
    arcpy.RasterDomain_3d(
        in_raster=os.path.normpath(os.getcwd() + os.sep + os.pardir + os.sep +
                                   os.pardir) + dem_dir + info1,
        out_feature_class=os.getcwd() + out_dir + info2,
        out_geometry_type="POLYGON")

    # Step 2. Select the domain shapefile and define an internal buffer of the domain shapefile using "buffer" tool.
    # Remember to use negative sign for the internal buffer.
    arcpy.Buffer_analysis(in_features=os.getcwd() + out_dir + info2,
                          out_feature_class=os.getcwd() + out_dir + info3,
                          buffer_distance_or_field=info4,
                          line_side="FULL",
                          line_end_type="ROUND",
                          dissolve_option="NONE",
                          dissolve_field="",
                          method="GEODESIC")

    # Step 3. Use "select by attribute" tool to exclude the canals.
    name = os.path.normpath(os.getcwd() + os.sep + os.pardir + os.sep +
                            os.pardir) + hydrog_dir + info5
    name_temp = name.replace('.shp', '_lyr')
    arcpy.MakeFeatureLayer_management(name, name_temp)
    medres_NHDPlusflowline_noCanal = arcpy.SelectLayerByAttribute_management(
        name_temp, 'NEW_SELECTION', "FTYPE <> " + info6)

    # Step 4. Clip the medium-resolution NHD (i.e., medres_NHDPlusflowline.shp) for the "internal250mbuffer.shp".
    arcpy.Clip_analysis(in_features=medres_NHDPlusflowline_noCanal,
                        clip_features=os.getcwd() + out_dir + info3,
                        out_feature_class=os.getcwd() + out_dir +
                        'medres_NHDPlusflowline_noCanal_to_bfr.shp',
                        cluster_tolerance="")

    # Step 5. Find the starting points (dangles) that are used as an input to the network delineation process.
    #         In this research study, we used the main stems for the dangles. This was done within the ArcGIS software
    #         and the results are here as input files for the next steps. Following comment explains the steps we took
    #         in the ArcGIS software to create these files.

    #         1.Select the dangles (using "feature vertices to points" tool and set point_location="DANGLE").
    #         These are going to be used as the starting points to delineate the stream network. In our work, we only
    #         considered the main stems when generating the dangle points. This is due to the fact that the main stems
    #         had larger contributing areas in our case studies compared to the side tributaries.

    #         2.Open the attribute table of the dangle shapefile. Add a field using "add field" tool. Then, use
    #         "calculate field" tool to put a value of 1 for all the rows in this field.

    #         3.Use "feature to raster" tool to rasterize the dangle points. Make sure to set the extent and snap
    #         raster sizes to be the same as the DEM size (an extracted DEM that covers the domain) in the enviroment
    #         settings of the tool.

    #         4.Use the "reclassify" tool to reclassify the inlets.tif raster layer so that 1 is one and the rest (
    #         i.e., 0 or NAN) is zero.

    # Step 6. Generate the evenly dispersed nodes along the main stem.
    #         The evenly dispersed nodes are used as an input to the network delineation process. This was done within
    #         the ArcGIS software and the results are here as input files for the next steps. Following comment explains
    #         the steps we took in the ArcGIS software to create these nodes.

    #         1.Select the main stem from the buffered medium resolution NHDPlus
    #         dataset and save it as a new feature. Then, use "dissolve" tool to merge the segments (reaches) of this
    #         new feature into one single segment. Next, use "editor -> split" tool to split this feature into smaller
    #         equal segments with 3 km long. Then, create a new point shapefile and use the editor tool to generate
    #         points on the upstream and downstream ends of these equal segments. The new shape file is the evenly
    #         dispersed nodes on the main stems. This is required when delineating the stream network and catchments.

    # Step 7. Using a non-conditioned DEM and calculating the HAND using the following procedure.

    # 1.Fill pits in the original dem
    print('Running PitRemove Function')
    i_path = os.path.normpath(os.getcwd() + os.sep + os.pardir + os.sep +
                              os.pardir) + dem_dir + info1
    o_path = os.getcwd() + out_dir + "fel.tif"
    bashCommand = "mpiexec -n 10 PitRemove -z " + i_path + " -fel " + o_path
    os.system(bashCommand)
    time.sleep(120)

    # 2.Compute the D8 flow direction.
    print('Running D8 Flow Direction Function')
    i_path = os.getcwd() + out_dir + "fel.tif"
    o1_path = os.getcwd() + out_dir + "p.tif"
    o2_path = os.getcwd() + out_dir + "sd8.tif"
    bashCommand = "mpiexec -n 10 D8FlowDir -fel " + i_path + " -p " + o1_path + " -sd8 " + o2_path
    os.system(bashCommand)
    time.sleep(360)

    # 3.Compute D8 area contributing Compute D8 area contributing.
    print('Running D8 Area Contributing Function')
    i1_path = os.getcwd() + in_dir + "inlets_on_mainstem.tif"
    i2_path = os.getcwd() + out_dir + "p.tif"
    o_path = os.getcwd() + out_dir + "ad8.tif"
    bashCommand = "mpiexec -n 10 Aread8 -wg " + i1_path + " -p " + i2_path + " -ad8 " + o_path + " -nc "
    os.system(bashCommand)
    time.sleep(120)

    # 4.Use a threshold to delineate the stream
    print('Running Network Delineation Function')
    i_path = os.getcwd() + out_dir + "ad8.tif"
    o_path = os.getcwd() + out_dir + "src.tif"
    bashCommand = "mpiexec -n 10 Threshold -ssa " + i_path + " -src " + o_path + " -thresh 1"
    os.system(bashCommand)
    time.sleep(120)

    # 5.Generate network and watershed
    i1_path = os.getcwd() + in_dir + "Evenly_dispersed_nodes.shp"
    i2_path = os.getcwd() + out_dir + "fel.tif"
    i3_path = os.getcwd() + out_dir + "p.tif"
    i4_path = os.getcwd() + out_dir + "ad8.tif"
    i5_path = os.getcwd() + out_dir + "src.tif"
    o1_path = os.getcwd() + out_dir + "ord.tif"
    o2_path = os.getcwd() + out_dir + "tree.dat"
    o3_path = os.getcwd() + out_dir + "coord.dat"
    o4_path = os.getcwd() + out_dir + "net.shp"
    o5_path = os.getcwd() + out_dir + "w.tif"
    bashCommand = "mpiexec -n 10 Streamnet -o " + i1_path + " -fel " + i2_path + " -p " + i3_path + \
                  " -ad8 " + i4_path + " -src " + i5_path + " -ord " + o1_path + " -tree " + o2_path + \
                  " -coord " + o3_path + " -net " + o4_path + " -w " + o5_path
    os.system(bashCommand)

    # 6.Compute the D-inf flow direction. This function may take several hours to run.
    print('Running D-infinity Flow Direction Function')
    i_path = os.getcwd() + out_dir + "fel.tif"
    o1_path = os.getcwd() + out_dir + "ang.tif"
    o2_path = os.getcwd() + out_dir + "slp.tif"
    bashCommand = "mpiexec -n 10 DinfFlowDir -fel " + i_path + " -ang " + o1_path + " -slp " + o2_path
    os.system(bashCommand)
    time.sleep(360)

    # 7.Compute the HAND using D-inf Distance Down.
    print('Running D-infinity Distance Down Function to calculate HAND')
    i1_path = os.getcwd() + out_dir + "fel.tif"
    i2_path = os.getcwd() + out_dir + "src.tif"
    i3_path = os.getcwd() + out_dir + "ang.tif"
    o_path = os.getcwd() + out_dir + "hand.tif"
    bashCommand = "mpiexec -n 10 DinfDistDown -fel " + i1_path + " -src " + i2_path + " -ang " + i3_path + " -m ave v " + " -dd " + o_path + " -nc "
    os.system(bashCommand)
    time.sleep(360)

    print('Done!')
# Calculate flow accumulation
flow_accum = 'flow_accum'
flowAccumulation = FlowAccumulation(dir_ras, "", "FLOAT")
flowAccumulation.save(flow_accum)

#create buffer around water bodies
distance = [5, 10, 20, 50]
rivers_all = "river_florida.shp"
river_buf = "river_buffer.shp"
arcpy.MultipleRingBuffer_analysis(rivers_all, river_buf, distance, "miles", "",
                                  "ALL")

# clip waterbodies to extent
rivers = "riversbuf_clip.shp"
arcpy.Clip_analysis(river_buf, project_area, rivers)

#convert buffer distance to raster data
river_raster = "river_raster.tif"
field = "distance"
arcpy.FeatureToRaster_conversion(rivers, field, river_raster)

#clip precipitation to study area
precip_fl = "precip1981_2010_a_fl.shp"
precip = "precip_clip.shp"
arcpy.Clip_analysis(precip_fl, project_area, precip)

#convert precipitation to rater data
precip_raster = "precip_raster.tif"
field = "PrecipInch"
arcpy.FeatureToRaster_conversion(precip, field, precip_raster)
Example #24
0
 if f.startswith('RE_'):
         p = os.path.join(os.path.join(path,root),f)
         new_file.write(f)
         new_file.write(", 7248915.47482, ") #Redeye Fire Perimeter
         arcpy.AddField_management(p, "Uniform", "Integer") #Add Dissolve agent
         arcpy.RepairGeometry_management(p) #Areas were errroneously negative
         dis = arcpy.Dissolve_management(p, str('dis_' + f), 'Uniform') #Merge hourly timesteps
         #New Shape Area
         arcpy.AddField_management(dis, "Shape_area", "DOUBLE") 
         exp = "!SHAPE.AREA@SQUAREMETERS!"
         arcpy.CalculateField_management(dis, "Shape_area", exp, "PYTHON_9.3")
         arcpy.RepairGeometry_management(dis)
         for row in arcpy.SearchCursor(dis):
                 new_file.write(str(row.getValue("Shape_area")))
                 new_file.write(", ")
         clip = arcpy.Clip_analysis(dis, Redeye, str('clip_' + f))
         arcpy.CalculateField_management(clip, "Shape_area", exp, "PYTHON_9.3")
         for row in arcpy.SearchCursor(clip):
                 new_file.write(str(row.getValue("Shape_area")))
                 new_file.write(", ")
         mrg = arcpy.Merge_management([dis, Redeye], str('mrg_' + f)) #merge historic and simulated fires
         dis2 = arcpy.Dissolve_management(mrg, str('dis2_' + f), 'Uniform')
         arcpy.AddField_management(dis2, "Shape_area", "DOUBLE")
         exp2 = "!SHAPE.AREA@SQUAREMETERS!"
         arcpy.CalculateField_management(dis2, "Shape_area", exp2, "PYTHON_9.3")
         for row in arcpy.SearchCursor(dis2):
                 new_file.write(str(row.getValue("Shape_area")))
                 new_file.write(", ")
         new_file.write("\n")
 elif f.startswith('FAM_'):
         p = os.path.join(os.path.join(path,root),f)
Example #25
0
            exit()

        # ----------------------------------------------------------------------------------------------- Create New Outlet
        # -------------------------------------------- Features reside on hard disk;
        #                                              No heads up digitizing was used.

        if (os.path.dirname(
                arcpy.da.Describe(outlet)['catalogPath'])).find("memory") < 0:

            # if paths between outlet and outletFC are NOT the same
            if not arcpy.da.Describe(outlet)['catalogPath'] == outletFC:

                # delete the outlet feature class; new one will be created
                if arcpy.Exists(outletFC):
                    arcpy.Delete_management(outletFC)
                    arcpy.Clip_analysis(outlet, projectAOI, outletFC)
                    AddMsgAndPrint("\nSuccessfully Recreated " +
                                   str(outletOut) +
                                   " feature class from existing layer")

                else:
                    arcpy.Clip_analysis(outlet, projectAOI, outletFC)
                    AddMsgAndPrint("\nSuccessfully Created " + str(outletOut) +
                                   " feature class from existing layer")

            # paths are the same therefore input IS pour point
            else:
                AddMsgAndPrint("\nUsing Existing " + str(outletOut) +
                               " feature class")

        # -------------------------------------------- Features reside in Memory;
Example #26
0
File: p3.py Project: Rina97/ArcPy
import arcpy
import os
arcpy.env.workspace = r'C:\Users\Ira\Dropbox\Study\ProgPy\P1_3'
arcpy.env.overwriteOutput = True
amenities = ['school','hospital','place_of_worship']
country = 'El Salvador'
arcpy.MakeFeatureLayer_management('CentralAmerica.shp','temply','"NAME" = ' +"'"+country
+"'")
arcpy.Clip_analysis('OSMpoints.shp','temply','clip.shp')
arcpy.CreateFileGDB_management(arcpy.env.workspace,'yourGDB.gdb')
for i in amenities:
name='point'+i
arcpy.MakeFeatureLayer_management('clip.shp',name,'"amenity" = ' +"'"+i+"'")
arcpy.CopyFeatures_management('point'+i,arcpy.env.workspace+r'\yourGDB.gdb'+r'\point'+i)
arcpy.AddField_management('yourGDB.gdb\\'+'point'+i,'source',"TEXT")
arcpy.AddField_management('yourGDB.gdb\\'+'point'+i,'GID',"DOUBLE")
#arcpy.AlterField_management('yourGDB.gdb\\'+'point'+i,'id','GID')
with arcpy.da.UpdateCursor('yourGDB.gdb\\'+'point'+i, ('source','GID','id')) as cursor:
for row in cursor:
row[0]='OpenStreetMap'
row[1]=row[2]
cursor.updateRow(row)
Example #27
0
def lepTracts(census_key, year, tract_mergedgdb, tract_file, root_dir, gdb,
              final_gdb_loc, region):

    # initial census call
    c = Census(census_key, year=2000 + int(year))

    print('here')  # <3
    # call the acs 5 year tracts api for MO and IL convert to DF's and merge
    mo = c.acs5.state_county_tract(
        ("GEO_ID", "C16001_005E", "C16001_008E", "C16001_011E", "C16001_014E",
         "C16001_017E", "C16001_020E", "C16001_023E", "C16001_026E",
         "C16001_029E", "C16001_035E"), states.MO.fips, '189,510', Census.ALL)
    mo = pd.DataFrame(mo)

    il = c.acs5.state_county_tract(
        ("GEO_ID", "C16001_005E", "C16001_008E", "C16001_011E", "C16001_014E",
         "C16001_017E", "C16001_020E", "C16001_023E", "C16001_026E",
         "C16001_029E", "C16001_035E"), states.IL.fips, '163', Census.ALL)
    il = pd.DataFrame(il)

    mergeddf = mo.append(il, ignore_index=True)
    mergeddf["GEOID_DATA"] = mergeddf["GEO_ID"]
    mergeddf['GEOID_DATA'] = mergeddf['GEOID_DATA'].str.slice(start=9)

    # import geo to geopandas
    # pd.DataFrame.spatial.from_layer(os.path.join(tract_mergedgdb, tract_file))

    # # CLIP FEATURE CLASS
    # # CALC COVERAGE OF FIELDS

    tracts = pd.DataFrame.spatial.from_featureclass(
        os.path.join(tract_mergedgdb, tract_file))
    tracts = tracts.merge(mergeddf, left_on='GEOID', right_on='GEOID_DATA')
    print(tracts.columns)
    # ["TLEPFrench", "DOUBLE"], # C16001e8  LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: French, Haitian, or Cajun: Speak English less than "very well": Population 5 years and over -- (Estimate)
    # ["TLEPGerm", "DOUBLE"], # C16001e11  LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: German or other West Germanic languages: Speak English less than ""very well"": Population 5 years and over -- (Estimate)
    # ["TLEPRuss", "DOUBLE"],# C16001e14   LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: Russian, Polish, or other Slavic languages: Speak English less than "very well": Population 5 years and over -- (Estimate)
    # ["TLEPOIndoEuro", "DOUBLE"],# C16001e17   LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: Other Indo-European languages: Speak English less than ""very well"": Population 5 years and over -- (Estimate)
    # ["TLEPKor", "DOUBLE"],# C16001e20   LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: Korean: Speak English less than ""very well"": Population 5 years and over -- (Estimate)
    # ["TLEPChin", "DOUBLE"],# C16001e23   LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: Chinese (incl. Mandarin, Cantonese): Speak English less than "very well": Population 5 years and over -- (Estimate)
    # ["TLEPViet", "DOUBLE"],# C16001e26   LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: Vietnamese: Speak English less than ""very well"": Population 5 years and over -- (Estimate)
    # ["TLEPTaglog", "DOUBLE"],# C16001e29   LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: Tagalog (incl. Filipino): Speak English less than ""very well"": Population 5 years and over -- (Estimate)
    # ["TLEPArabic", "DOUBLE"],# C16001e35   LANGUAGE SPOKEN AT HOME FOR THE POPULATION 5 YEARS AND OVER: Arabic: Speak English less than ""very well"": Population 5 years and over -- (Estimate)

    # RENAME FIELDS

    fields = [["C16001_005E", "TLEPSpan"], ["C16001_008E", "TLEPFrench"],
              ["C16001_011E", "TLEPGerm"], ["C16001_014E", "TLEPRuss"],
              ["C16001_017E", "TLEPOIndoEuro"], ["C16001_020E", "TLEPKor"],
              ["C16001_023E", "TLEPChin"], ["C16001_026E", "TLEPViet"],
              ["C16001_029E", "TLEPTaglog"], ["C16001_035E", "TLEPArabic"],
              ["sqmiles", "org_sqmiles"]]

    for field in fields:
        tracts[field[1]] = tracts[field[0]]
        tracts.drop(field[0], axis=1)

    tracts.to_csv(os.path.join(root_dir, rf"LEP_Lang_{year}.csv"), index=False)
    tracts.spatial.to_featureclass(
        location=os.path.join(gdb, rf"LEP_Lang_{year}"))

    ap.env.workspace = gdb
    ap.env.overwriteOutput = True
    ap.CalculateField_management(f"LEP_Lang_{year}", "org_sqmiles",
                                 '!shape.area@squaremiles!', 'PYTHON3')
    ap.Clip_analysis(f"LEP_Lang_{year}", region, f"LEP_Lang_{year}_Region")
    ap.AddField_management(f"LEP_Lang_{year}_Region",
                           "NewSqMiles",
                           field_type='float')
    ap.CalculateField_management(f"LEP_Lang_{year}_Region", "NewSqMiles",
                                 '!shape.area@squaremiles!', 'PYTHON3')

    tracts_clipped = pd.DataFrame.spatial.from_featureclass(
        os.path.join(ap.env.workspace, f"LEP_Lang_{year}_Region"))
    tracts_clipped['Coverage'] = tracts_clipped['NewSqMiles'] / tracts_clipped[
        'org_sqmiles']

    coverage_fields = [
        'c16001_005_e', 'c16001_008_e', 'c16001_011_e', 'c16001_014_e',
        'c16001_017_e', 'c16001_020_e', 'c16001_023_e', 'c16001_026_e',
        'c16001_029_e', 'c16001_035_e', 'tlep_french', 'tlep_germ',
        'tlep_russ', 'tlepo_indo_euro', 'tlep_kor', 'tlep_chin', 'tlep_viet',
        'tlep_taglog', 'tlep_arabic'
    ]

    for field in coverage_fields:
        tracts_clipped[
            field] = tracts_clipped[field] * tracts_clipped['Coverage']
        print(field)

    tracts_clipped.spatial.to_featureclass(
        location=os.path.join(final_gdb_loc, rf"LEP_Lang_{year}_Region"))
    tracts_clipped.to_csv(os.path.join(root_dir,
                                       rf"LEP_Lang_{year}_Region.csv"),
                          index=False)
Example #28
0
import arcpy
from arcpy import env
env.workspace = "C:/GEO6533/Python/Data/Exercise05"
env.overwriteOutput = True
newclip = arcpy.Clip_analysis("bike_routes.shp", "parks.shp",
                              "Results/bike_Clip.shp")
fCount = arcpy.GetCount_management("Results/bike_Clip.shp")
msgCount = newclip.messageCount
print newclip.getMessage(msgCount - 1)
Example #29
0
input_file = "Sitios_Ramsar_Geo_ITRF92_2015.shp"
outputList = []

# Project
path, name = os.path.split(input_file)
projName = os.path.splitext(name)[0] + "_pr"
projFile = os.path.join(gdb, projName)
arcpy.Project_management(input_file, projFile, outCS)
print "Project", projName, "ends at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

# Clip
clipName = projName.split('_pr')[0] + "_clip"
clipFile = os.path.join(gdb, clipName)
arcpy.Clip_analysis(projFile, clip, clipFile, xy_tolerance)
outputList.append(clipFile)

print "Clip", clipName, "ends at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

print "Step 1 Project and clip ends at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

# ---------------------------------------------------------------------------
# 2. Clean the outputs
## Description: Translate Spanish to English (fields and field headers). Remve unecessary fields

print "\nStep 2 Clean the output starts at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")
Example #30
0
def clipInputs(outputFolder, studyAreaMaskBuff, inputDEM, inputLC, inputSoil, inputStreamNetwork, outputDEM, outputLC, outputSoil, outputStream):

    try:
        log.info("Clipping input data")

        # Set temporary variables
        prefix = os.path.join(arcpy.env.scratchGDB, "clip_")

        DEMCopy = prefix + "DEMCopy"
        lcResample = prefix + "lcResample"
        soilResample = prefix + "soilResample"

        # Clip DEM
        # Check DEM not compressed. If it is, uncompress before clipping.
        compression = arcpy.Describe(inputDEM).compressionType
        if compression.lower != 'none':
            arcpy.env.compression = "None"
            arcpy.CopyRaster_management(inputDEM, DEMCopy)
            arcpy.Clip_management(DEMCopy, "#", outputDEM, studyAreaMaskBuff, clipping_geometry="ClippingGeometry")

            # Delete copy of DEM
            arcpy.Delete_management(DEMCopy)

        else:
            arcpy.Clip_management(inputDEM, "#", outputDEM, studyAreaMaskBuff, clipping_geometry="ClippingGeometry")

        DEMSpatRef = arcpy.Describe(outputDEM).SpatialReference

        # Set environment variables
        arcpy.env.snapRaster = outputDEM
        arcpy.env.extent = outputDEM
        arcpy.env.cellSize = outputDEM

        # Resample and clip land cover
        lcFormat = arcpy.Describe(inputLC).dataType

        if lcFormat in ['RasterDataset', 'RasterLayer']:
            lcResampleInt = arcpy.sa.ApplyEnvironment(inputLC)
            lcResampleInt.save(lcResample)
            del lcResampleInt

            arcpy.Clip_management(lcResample, "#", outputLC, studyAreaMaskBuff, clipping_geometry="ClippingGeometry")

            # Delete resampled LC
            arcpy.Delete_management(lcResample)

        elif lcFormat in ['ShapeFile', 'FeatureClass']:
            arcpy.Clip_analysis(inputLC, studyAreaMaskBuff, outputLC, configuration.clippingTolerance)

        # Resample and clip soil
        soilFormat = arcpy.Describe(inputSoil).dataType

        if soilFormat in ['RasterDataset', 'RasterLayer']:
            soilResampleInt = arcpy.sa.ApplyEnvironment(inputSoil)
            soilResampleInt.save(soilResample)
            del soilResampleInt

            arcpy.Clip_management(soilResample, "#", outputSoil, studyAreaMaskBuff, clipping_geometry="ClippingGeometry")

            # Delete resampled soil
            arcpy.Delete_management(soilResample)

        elif soilFormat in ['ShapeFile', 'FeatureClass']:
            arcpy.Clip_analysis(inputSoil, studyAreaMaskBuff, outputSoil, configuration.clippingTolerance)

        # Clip steam network
        if inputStreamNetwork == None:
            outputStream = None
        else:
            arcpy.Clip_analysis(inputStreamNetwork, studyAreaMaskBuff, outputStream, configuration.clippingTolerance)


        log.info("Input data clipped successfully")

    except Exception:
        log.error("Input data clipping did not complete successfully")
        raise