예제 #1
0
def intersectGrid(AggLevel, workingDir, variable):
    '''Intersects the GHCN temperature data grid with the AggLevel = "Woreda" or "Kebele" shapefile'''

    #create grid shapefile
    Grid = workingDir + "\\All" + variable + "Grid.shp"
    if (os.path.exists(Grid) == False):
        if variable == "Temp":
            origin_coord = "-180 -90"
            nrows = "360"
            ncols = "720"
            polygon_width = "0.5 degrees"
        else:
            origin_coord = "-20.05 -40.05"
            nrows = "801"
            ncols = "751"
            polygon_width = "0.1 degrees"

        polygon_height = polygon_width
        ap.GridIndexFeatures_cartography(Grid, "", "", "", "", polygon_width,
                                         polygon_height, origin_coord, nrows,
                                         ncols)
        ap.DefineProjection_management(
            Grid,
            coor_system="GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',\
        SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]"
        )

        #add 3 or 4 fields to grid shapefile: latitude (LAT), longitude (LONG) and
        #for precipitation, row (row) of text file corresponding to each grid in the shapefile;
        #for temperature, row (row) and column (col) of netCDF file corresponding to each grid in the shapefile
        ap.AddField_management(Grid, "LAT", "DOUBLE", 7, 2, "", "", "", "", "")
        ap.AddField_management(Grid, "LONG", "DOUBLE", 7, 2, "", "", "", "",
                               "")
        ap.AddField_management(Grid, "row", "SHORT", 6, "", "", "", "", "", "")
        if variable == "Temp":
            ap.AddField_management(Grid, "col", "SHORT", 5, "", "", "", "", "",
                                   "")

        #calculate lat and long fields
        expression1 = "float(!SHAPE.CENTROID!.split()[0])"
        expression2 = "float(!SHAPE.CENTROID!.split()[1])"
        ap.CalculateField_management(Grid, "LONG", expression1, "PYTHON")
        ap.CalculateField_management(Grid, "LAT", expression2, "PYTHON")

        #calculate row and col fields
        if variable == "Temp":
            Grid = calcTempFields(Grid)
        else:
            Grid = calcRainFields(Grid)

    #clip the grid to Ethiopia and convert its .dbf to a .csv for later use
    GridClip = workingDir + "\\" + variable + "GridClip" + AggLevel + ".shp"
    if AggLevel == 'Woreda':
        EthiopiaBorders = os.path.dirname(
            workingDir) + "\\Shapefiles\\WoredasAdindan.shp"
    elif AggLevel == 'Kebele':
        EthiopiaBorders = os.path.dirname(
            workingDir
        ) + "\\Shapefiles\\Ethiopia Kebeles without Somali region.shp"

    ap.Clip_analysis(Grid, EthiopiaBorders, GridClip)
    dbf = GridClip[0:-4] + ".dbf"
    GridCSV = convertDBFtoCSV(dbf)

    #intersect the clipped grid with the woreda or kebele shapefile and project to Adindan
    GridIntersect = workingDir + "\\" + variable + AggLevel + "Intersect.shp"
    ap.Intersect_analysis([GridClip, EthiopiaBorders], GridIntersect)
    GridIntersectProject = GridIntersect[0:-4] + "Project.shp"
    ap.Project_management(
        GridIntersect,
        GridIntersectProject,
        out_coor_system="PROJCS['Adindan_UTM_Zone_37N',GEOGCS['GCS_Adindan',\
    DATUM['D_Adindan',SPHEROID['Clarke_1880_RGS',6378249.145,293.465]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],\
    PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',500000.0],PARAMETER['False_Northing',0.0],\
    PARAMETER['Central_Meridian',39.0],PARAMETER['Scale_Factor',0.9996],PARAMETER['Latitude_Of_Origin',0.0],\
    UNIT['Meter',1.0]]",
        transform_method="Adindan_To_WGS_1984_1",
        in_coor_system="GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',\
    SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]"
    )

    #calculate area of intersection between grid and woreda or kebele shapefile after adding a field to store it
    ap.AddField_management(GridIntersectProject, "PartArea", "DOUBLE", 12, 6,
                           "", "", "", "", "")
    expression = "float(!SHAPE.AREA@SQUAREKILOMETERS!)"
    ap.CalculateField_management(GridIntersectProject, "PartArea", expression,
                                 "PYTHON")

    #convert GridIntersect's .dbf to a .csv for later use
    dbf = GridIntersectProject[0:-4] + ".dbf"
    intersectCSV = convertDBFtoCSV(dbf)

    return intersectCSV, GridCSV
arcpy.SelectLayerByAttribute_management(layer_name, 'CLEAR_SELECTION')

arcpy.env.workspace = r'C:\...\Data\Illinois.gdb'  # Change the path...

disolve_felds = ['STATE_NAME', 'ST_ABBREV']

arcpy.Dissolve_management(layer_name, 'Illinois', disolve_felds, '',
                          'SINGLE_PART', 'DISSOLVE_LINES')

arcpy.PolygonToLine_management('Illinois', 'Illinois_Boundary')

csv_file = r'C:\...\Data\CSV\JeopardyContestants_LatLon.csv'  # Change the path...

arcpy.CopyRows_management(csv_file, 'JeopardyContestants_Table')

arcpy.MakeXYEventLayer_management('JeopardyContestants_Table', 'lon', 'lat',
                                  'Jeopardy Contestants')

arcpy.Select_analysis('Jeopardy Contestants', 'JeopardyContestants',
                      '"lat" IS NOT NULL OR "lon" IS NOT NULL')

arcpy.Buffer_analysis('JeopardyContestants', 'JeopardyContestants_Buffer',
                      '5 Miles', 'FULL', 'ROUND', 'ALL', '', 'GEODESIC')

arcpy.Clip_analysis('JeopardyContestants_Buffer', 'Illinois',
                    'JeopardyContestants_Buffer_Clip')

arcpy.Intersect_analysis(
    ['Illinois Counties', 'JeopardyContestants_Buffer_Clip'],
    'Illinois_Counties_Intersect', 'ALL')
# PREPARE VARIABLES TO CREATE HEATMAPS (Puget Sound): FUNCTIONAL-CLASS BASED AADT AND SPEED LIMIT, SLOPE, AND TRANSIT ROUTES
# These outputs were used for initial moss sampling
########################################################################################################################
#-----------------------------------------------------------------------------------------------------------------------
# Prepare OSM data to create heatmap based on roads functional class for all Puget Sound OSM roads
#-----------------------------------------------------------------------------------------------------------------------
arcpy.env.workspace = gdb

#Select OSM roads, but this map with service roads, as enough through traffic to potentially have some impact
arcpy.Project_management(OSMroads, OSMroads_proj, out_coor_system= cs_ref)
arcpy.MakeFeatureLayer_management(OSMroads_proj, 'OSMroads_lyr')
np.unique([row[0] for row in arcpy.da.SearchCursor('OSMroads_lyr', ['fclass'])])
sel = "{} IN ('motorway','motorway_link','living_street','primary','primary_link','residential','secondary','secondary_link'," \
      "'tertiary','tertiary_link','trunk','trunk_link','unclassified','unknown', 'service')".format('"fclass"')
arcpy.SelectLayerByAttribute_management('OSMroads_lyr', 'NEW_SELECTION', sel)
arcpy.Intersect_analysis(['OSMroads_lyr', 'PSwtshd_dissolve.shp'],out_feature_class=PSOSM_all)
arcpy.Delete_management('OSMroads_lyr')

# Join OSM and Pierce County + WSDOT traffic counts data to improve interpolation of speed limits
# and traffic volume within road fclasses
#Subselect OSM roads for Pierce County
arcpy.MakeFeatureLayer_management(pscounties, 'counties_lyr')
arcpy.SelectLayerByAttribute_management('counties_lyr', 'NEW_SELECTION', "COUNTYNS='01529159'")
arcpy.Clip_analysis(PSOSM_all, 'counties_lyr', OSMPierce)

SpatialJoinLines_LargestOverlap(target_features= OSMPierce, join_features=Pierceroads, out_fc = OSMPierce_datajoin,
                                outgdb=gdb, bufsize='10 meters', keep_all=True,
                                fields_select=['RoadNumber', 'RoadName', 'FFC', 'FFCDesc', 'ADTSource', 'ADT',
                                               'ADTYear', 'SpeedLimit'])
#Join OSM with WSDOT traffic counts
arcpy.SpatialJoin_analysis(traffic_wsdot, PSOSM_all, os.path.join(gdb, 'OSM_WSDOT_join'), 'JOIN_ONE_TO_ONE', 'KEEP_COMMON',
예제 #4
0
파일: 5.py 프로젝트: CUGGISRS/spj
    VAR_R = 15  #邻域统计栅格数
elif 30 <= int(VAR_FBL) < 50:
    VAR_R = 12
elif 50 <= int(VAR_FBL) < 100:
    VAR_R = 9
else:
    VAR_R = 3
##########################
exp = Con(IN_STBH, IN_STBH, "", "VALUE = 3 or VALUE = 5")
exp.save('st.tif')
exp = Con(IN_NY, IN_NY, "", "VALUE = 3 or VALUE = 1")
exp.save('ny.tif')
ex = '"DLMC" = \'农业用地\' '
arcpy.Select_analysis(IN_TD, "td.shp", ex)
arcpy.RasterToPolygon_conversion('st.tif', "shengtai.shp")
arcpy.RasterToPolygon_conversion('ny.tif', "nongye.shp")
arcpy.Intersect_analysis(["shengtai.shp", "td.shp", "nongye.shp"], "jg.shp")
arcpy.Clip_analysis("jg.shp", IN_CLIP, OUTPUTPATH + OUTPUTNAME + ".shp")
arcpy.Delete_management('st.tif')
arcpy.Delete_management('ny.tif')
arcpy.Delete_management("td.shp")
arcpy.Delete_management("shengtai.shp")
arcpy.Delete_management("nongye.shp")
arcpy.Delete_management("jg.shp")
arcpy.TableToExcel_conversion(OUTPUTPATH + OUTPUTNAME + ".shp",
                              OUTPUTPATH + OUTPUTNAME + ".xls")
if IN_XZQQ != "99999":
    StaticticsByXZQ.StatisticBYXZQ(OUTPUTPATH + OUTPUTNAME + ".shp", IN_XZQQ,
                                   OUTPUTPATH, OUTPUTNAME + "_XZQ")
arcpy.CheckInExtension('Spatial')
arcpy.CheckInExtension('3D')
예제 #5
0
        day = int(ndvi_file_name[ndvi_file_name.find(".") -
                                 3:ndvi_file_name.find(".")])
        year = int(ndvi_file_name[2:6])
        print year
        input_features_list = []
        input_features_list.append(ndvi_file)
        for file_list in all_file_list:
            if file_list == climate_tavg_file_list:
                for year_file in file_list:
                    temp_year = int(os.path.split(year_file)[-1][0:4])
                    if year == temp_year:
                        shp_file = [
                            year_file,
                        ]
                        break
            else:
                shp_file = [
                    year_file for year_file in file_list
                    if int(year_file[year_file.find(".") -
                                     4:year_file.find(".")]) == year
                ]
            if len(shp_file) == 0:
                print("there has no shp file in that year %s" % year)
                continue
            shp_file = shp_file[0]
            input_features_list.append(shp_file)
        out_file = str(year) + '_' + str(day) + ".shp"
        out_file = os.path.join(result_path, out_file)
        arcpy.Intersect_analysis(input_features_list, out_file)
print "hell"
# Make a layer from the feature class
#arcpy.MakeFeatureLayer_management(fe,v_Name_Lyr)

## Select all fishing events that are less than upper bound distance *** I can't get this to work correctly, so I did this manually first before running code
#qry = "\"LENGTH\" <  '" + dist1 + "'
#arcpy.AddMessage("Selecting fishing events "+ qry)
#arcpy.SelectLayerByAttribute_management(v_Name_Lyr, "NEW_SELECTION", qry )

# Write the selected features to a new featureclass
#arcpy.CopyFeatures_management(v_Name_Lyr, v_Name_Selected)

# Process: Intersect
arcpy.AddMessage("Intersecting fishing with polygons...")
inFeatures = [fe, area_Poly]
arcpy.Intersect_analysis(inFeatures, v_Name_Intersect, "ALL", "", "LINE")

# Process: Buffer
arcpy.AddMessage("Buffering...")
arcpy.Buffer_analysis(v_Name_Intersect, v_Name_Buffer, dist2, "FULL", "ROUND",
                      "LIST", "Reef")

# Process: Clip
arcpy.AddMessage("Clipping...")
arcpy.Clip_analysis(v_Name_Buffer, area_Poly, v_Name_Final, "")

# Process: Export Feature Attribute to ASCII...
arcpy.AddMessage("Exporting attribute table...")
arcpy.env.workspace = out_folder_path
input_features = outLocation + "\\" + v_Name_Final
export_ASCII = fename + "_Area.csv"
    def execute(self, parameters, messages):
        """The source code of the tool."""

        # local variables and env
        arcpy.CreateFileGDB_management("E:/gina/poker/gdb",
                                       parameters[0].valueAsText)
        arcpy.env.workspace = "E:/gina/poker/gdb/" + parameters[
            0].valueAsText + ".gdb"
        arcpy.env.overwriteOutput = True
        adnr_lo_shp = "E:/gina/poker/shp/wip/land_ownership_data/adnr_gls_dls_merge_20170823_v1.shp"
        pfrr_popn_places = "E:/gina/poker/shp/wip/popn_places_data/pokerflat_popn_places_gcs_wgs84_to_akalbers_2.shp"
        afs_known_sites = "E:/gina/poker/shp/asf_data/asf_known_sites_20180629_3338.shp"
        pipTable = "E:/gina/poker/dbf/predicted_impact_xy.dbf"
        pip_point_shp = "E:/gina/poker/pip/pip_point.shp"
        pip_point_3338 = "E:/gina/poker/pip/pip_point_3338.shp"
        pip_buffer_shp = "E:/gina/poker/pip/pip_buffer.shp"
        pip_range_rings_shp = "E:/gina/poker/pip/pip_range_rings.shp"
        pip_lo_in_buffer_shp = "E:/gina/poker/pip/pip_lo_in_buffer.shp"
        pip_lo_in_buf_sum_dbf = "E:/gina/poker/pip/pip_lo_in_buf_sum.dbf"
        pip_lo_in_buf_sum_csv = "E:/gina/poker/pip/pip_lo_in_buf_sum.csv"
        pip_popn_places_in_buffer_shp = "E:/gina/poker/pip/pip_popn_places_in_buffer.shp"
        pip_known_sites_in_buffer_shp = "E:/gina/poker/pip/pip_known_sites_in_buffer.shp"
        x = parameters[1].valueAsText
        y = parameters[2].valueAsText
        r = parameters[3].valueAsText + " NauticalMiles"
        rr1 = (float(parameters[3].valueAsText)) / 3
        rr2 = (rr1 * 2)
        rrs = str(rr1) + ";" + str(rr2) + ";" + r.split(" ")[0]
        pipLayer = "pipLayer1"
        srs = arcpy.SpatialReference("Alaska Albers Equal Area Conic")
        intersect_fc1 = [adnr_lo_shp, pip_buffer_shp]
        intersect_fc2 = [pfrr_popn_places, pip_buffer_shp]
        intersect_fc3 = [afs_known_sites, pip_buffer_shp]
        mxd = arcpy.mapping.MapDocument("current")
        dataframe = arcpy.mapping.ListDataFrames(mxd)[0]
        sourceLoSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/lo2.lyr")
        sourcePipSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/pip2.lyr")
        sourceRrsSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/rrs.lyr")
        sourcePopSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/pop.lyr")
        sourceAfsSymbologyLayer = arcpy.mapping.Layer(
            "E:/gina/poker/lyr/afs2.lyr")

        # Process: Calculate Lon Field
        arcpy.CalculateField_management(pipTable, "Lon", x, "PYTHON", "")

        # Process: Calculate Lat Field
        arcpy.CalculateField_management(pipTable, "Lat", y, "PYTHON", "")

        # Process: Make XY Event Layer
        arcpy.MakeXYEventLayer_management(
            pipTable, "Lon", "Lat", pipLayer,
            "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision",
            "")

        # Process: Copy Features
        arcpy.CopyFeatures_management(pipLayer, pip_point_shp, "", "0", "0",
                                      "0")

        # Process: Project pip point
        arcpy.Project_management(pip_point_shp, pip_point_3338, srs)

        # Process: Buffer pip point
        arcpy.Buffer_analysis(pip_point_3338, pip_buffer_shp, r, "FULL",
                              "ROUND", "NONE", "", "PLANAR")

        # Process: Multiple Ring Buffer
        arcpy.MultipleRingBuffer_analysis(pip_point_3338, pip_range_rings_shp,
                                          rrs, "NauticalMiles", "", "NONE",
                                          "FULL")

        # Process: Intersect pip buffer with land ownership
        arcpy.Intersect_analysis(intersect_fc1, pip_lo_in_buffer_shp, "ALL",
                                 "", "INPUT")

        # Process: Intersect pip buffer with popn places
        arcpy.Intersect_analysis(intersect_fc2, pip_popn_places_in_buffer_shp,
                                 "ALL", "", "INPUT")

        # Process: Intersect pip buffer with afs known sites
        arcpy.Intersect_analysis(intersect_fc3, pip_known_sites_in_buffer_shp,
                                 "ALL", "", "INPUT")

        # Process: Make feature layers and add to the map
        ## pip feature class list
        fclist = arcpy.ListFeatureClasses()

        ## pip layer
        arcpy.MakeFeatureLayer_management(pip_point_3338,
                                          "Predicted Impact Point")

        ## land ownership layer
        arcpy.MakeFeatureLayer_management(
            pip_lo_in_buffer_shp,
            "Land Ownership within 3sigma of Predicted Impact Point")

        ## Range Rings
        arcpy.MakeFeatureLayer_management(pip_range_rings_shp, "Range Rings")

        ## populated places layer
        popn_places_records = int(
            arcpy.GetCount_management(pip_popn_places_in_buffer_shp).getOutput(
                0))
        if popn_places_records > 0:
            arcpy.MakeFeatureLayer_management(
                pip_popn_places_in_buffer_shp,
                "Populated Places within 3sigma of Predicted Impact Point")
            addPipPopnPlacesLayer = arcpy.mapping.Layer(
                "Populated Places within 3sigma of Predicted Impact Point")
            arcpy.mapping.AddLayer(dataframe, addPipPopnPlacesLayer)

        ## known sites layer
        known_sites_records = int(
            arcpy.GetCount_management(pip_known_sites_in_buffer_shp).getOutput(
                0))
        if known_sites_records > 0:
            arcpy.MakeFeatureLayer_management(
                pip_known_sites_in_buffer_shp,
                "AFS Known Sites within 3sigma of Predicted Impact Point")
            addPipKnownSitesLayer = arcpy.mapping.Layer(
                "AFS Known Sites within 3sigma of Predicted Impact Point")
            arcpy.mapping.AddLayer(dataframe, addPipKnownSitesLayer)

        addPipPointLayer = arcpy.mapping.Layer("Predicted Impact Point")
        arcpy.mapping.AddLayer(dataframe, addPipPointLayer)

        add3sigmaLoLayer = arcpy.mapping.Layer(
            "Land Ownership within 3sigma of Predicted Impact Point")
        arcpy.mapping.AddLayer(dataframe, add3sigmaLoLayer)

        addRangeRings = arcpy.mapping.Layer("Range Rings")
        arcpy.mapping.AddLayer(dataframe, addRangeRings)

        # Add and calc Acres field for intersected Land Ownership
        arcpy.AddField_management(pip_lo_in_buffer_shp, "Acres", "DOUBLE")
        arcpy.CalculateField_management(pip_lo_in_buffer_shp, "Acres",
                                        "!shape.area@acres!", "PYTHON_9.3", "")

        # Summarize intersected Land Ownership by Owner and total Acres
        arcpy.Statistics_analysis(pip_lo_in_buffer_shp, pip_lo_in_buf_sum_dbf,
                                  "Acres SUM", "OWNER")
        arcpy.MakeTableView_management(pip_lo_in_buf_sum_dbf)
        add3sigmaLoSumTbl = arcpy.mapping.TableView(pip_lo_in_buf_sum_dbf)
        arcpy.mapping.AddTableView(dataframe, add3sigmaLoSumTbl)

        # Symbolize and Refresh
        lo_layer = arcpy.mapping.ListLayers(
            mxd, "*Land Ownership within 3sigma of Predicted Impact Point*",
            dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, lo_layer, sourceLoSymbologyLayer,
                                  True)
        lo_layer.symbology.addAllValues()

        pip_layer = arcpy.mapping.ListLayers(mxd, "*Predicted Impact Point*",
                                             dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, pip_layer,
                                  sourcePipSymbologyLayer, True)

        rr_layer = arcpy.mapping.ListLayers(mxd, "*Range Rings*", dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, rr_layer, sourceRrsSymbologyLayer,
                                  True)

        pop_layer = arcpy.mapping.ListLayers(mxd, "*Populated Places*",
                                             dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, pop_layer,
                                  sourcePopSymbologyLayer, True)

        afs_layer = arcpy.mapping.ListLayers(mxd, "*Known Sites*",
                                             dataframe)[0]
        arcpy.mapping.UpdateLayer(dataframe, afs_layer,
                                  sourceAfsSymbologyLayer, True)

        arcpy.RefreshTOC()
        arcpy.RefreshActiveView()

        # Populate Mission GDB
        mission_layers = [
            pip_point_3338, pip_lo_in_buffer_shp,
            pip_popn_places_in_buffer_shp, pip_range_rings_shp,
            pip_known_sites_in_buffer_shp
        ]
        arcpy.FeatureClassToGeodatabase_conversion(mission_layers,
                                                   arcpy.env.workspace)

        return
import arcpy
from arcpy import env

workspace = "D:/DATA/GEOSPATIAL/ADHOC ANALYSIS/Papua/Forest Area/"
inFeature = "idn_forests_06"
inFeatureList = " #;idn_forest_area_2017 #"
outFeature = "papua_forests_06_farea_17.shp"

arcpy.Intersect_analysis(in_features=inFeature + inFeatureList,
                         out_feature_class=workspace + outFeature,
                         join_attributes="ALL",
                         cluster_tolerance="",
                         output_type="INPUT")
""" Looping """

inFeatureLoop = [
    "idn_forests_09", "idn_forests_11", "idn_forests_13", "idn_forests_15",
    "idn_forests_17"
]
outFeatureLoop = [
    "papua_forests_09_farea_17.shp", "papua_forests_11_farea_17.shp",
    "papua_forests_13_farea_17.shp", "papua_forests_15_farea_17.shp",
    "papua_forests_17_farea_17.shp"
]

for f in inFeatureLoop:
    for g in outFeatureLoop:
        arcpy.Intersect_analysis(in_features=f + inFeatureList,
                                 out_feature_class=workspace + g,
                                 join_attributes="ALL",
                                 cluster_tolerance="",
def main(fcInputCenterline,
         fcInputPolygon,
         fcSegmentedPolygons,
         workspaceTemp,
         dblPointDensity=10.0,
         dblJunctionBuffer=120.00):

    arcpy.AddMessage("GNAT Divide Polygon By Segment Tool")
    arcpy.AddMessage("GNAT DPS: Saving Polygon Results to: " +
                     fcSegmentedPolygons)
    arcpy.AddMessage("GNAT DPS: Saving Temporary Files to: " + workspaceTemp)

    arcpy.env.OutputMFlag = "Disabled"
    arcpy.env.OutputZFlag = "Disabled"

    arcpy.AddMessage("arcpy M Output Flag: " + str(arcpy.env.OutputMFlag))

    ## Copy Centerline to Temp Workspace
    fcCenterline = gis_tools.newGISDataset(workspaceTemp,
                                           "GNAT_DPS_Centerline")
    arcpy.CopyFeatures_management(fcInputCenterline, fcCenterline)

    ## Build Thiessan Polygons
    arcpy.AddMessage("GNAT DPS: Building Thiessan Polygons")
    arcpy.env.extent = fcInputPolygon  ## Set full extent to build Thiessan polygons over entire line network.
    arcpy.Densify_edit(fcCenterline, "DISTANCE",
                       str(dblPointDensity) + " METERS")

    fcTribJunctionPoints = gis_tools.newGISDataset(
        workspaceTemp,
        "GNAT_DPS_TribJunctionPoints")  # All Segment Junctions??
    #gis_tools.findSegmentJunctions(fcCenterline,fcTribJunctionPoints,"ALL")
    arcpy.Intersect_analysis(fcCenterline,
                             fcTribJunctionPoints,
                             output_type="POINT")

    fcThiessanPoints = gis_tools.newGISDataset(workspaceTemp,
                                               "GNAT_DPS_ThiessanPoints")
    arcpy.FeatureVerticesToPoints_management(fcCenterline, fcThiessanPoints,
                                             "ALL")

    lyrThiessanPoints = gis_tools.newGISDataset("Layer", "lyrThiessanPoints")
    arcpy.MakeFeatureLayer_management(fcThiessanPoints, lyrThiessanPoints)
    arcpy.SelectLayerByLocation_management(lyrThiessanPoints, "INTERSECT",
                                           fcTribJunctionPoints,
                                           str(dblJunctionBuffer) + " METERS",
                                           "NEW_SELECTION")

    fcThiessanPoly = gis_tools.newGISDataset(workspaceTemp,
                                             "GNAT_DPS_ThiessanPoly")
    arcpy.CreateThiessenPolygons_analysis(lyrThiessanPoints, fcThiessanPoly,
                                          "ONLY_FID")

    fcThiessanPolyClip = gis_tools.newGISDataset(workspaceTemp,
                                                 "GNAT_DPS_TheissanPolyClip")
    arcpy.Clip_analysis(fcThiessanPoly, fcInputPolygon, fcThiessanPolyClip)

    ### Code to Split the Junction Thiessan Polys ###
    arcpy.AddMessage("GNAT DPS: Split Junction Thiessan Polygons")
    lyrTribThiessanPolys = gis_tools.newGISDataset("Layer",
                                                   "lyrTribThiessanPolys")
    arcpy.MakeFeatureLayer_management(fcThiessanPolyClip, lyrTribThiessanPolys)
    arcpy.SelectLayerByLocation_management(lyrTribThiessanPolys,
                                           "INTERSECT",
                                           fcTribJunctionPoints,
                                           selection_type="NEW_SELECTION")

    fcSplitPoints = gis_tools.newGISDataset(workspaceTemp,
                                            "GNAT_DPS_SplitPoints")
    arcpy.Intersect_analysis([lyrTribThiessanPolys, fcCenterline],
                             fcSplitPoints,
                             output_type="POINT")

    arcpy.AddMessage("GNAT DPS: Moving Starting Vertices of Junction Polygons")
    geometry_functions.changeStartingVertex(fcTribJunctionPoints,
                                            lyrTribThiessanPolys)

    arcpy.AddMessage("GNAT DPS: Vertices Moved.")
    fcThiessanTribPolyEdges = gis_tools.newGISDataset(
        workspaceTemp, "GNAT_DPS_ThiessanTribPolyEdges")
    arcpy.FeatureToLine_management(lyrTribThiessanPolys,
                                   fcThiessanTribPolyEdges)

    fcSplitLines = gis_tools.newGISDataset(workspaceTemp,
                                           "GNAT_DPS_SplitLines")
    arcpy.SplitLineAtPoint_management(fcThiessanTribPolyEdges, fcSplitPoints,
                                      fcSplitLines, "0.1 METERS")

    fcMidPoints = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_MidPoints")
    arcpy.FeatureVerticesToPoints_management(fcSplitLines, fcMidPoints, "MID")
    arcpy.Near_analysis(fcMidPoints, fcTribJunctionPoints, location="LOCATION")
    arcpy.AddXY_management(fcMidPoints)

    fcTribToMidLines = gis_tools.newGISDataset(workspaceTemp,
                                               "GNAT_DPS_TribToMidLines")
    arcpy.XYToLine_management(fcMidPoints, fcTribToMidLines, "POINT_X",
                              "POINT_Y", "NEAR_X", "NEAR_Y")

    ### Select Polys by Centerline ###
    arcpy.AddMessage("GNAT DPS: Select Polygons By Centerline")
    fcThiessanEdges = gis_tools.newGISDataset(workspaceTemp,
                                              "GNAT_DPS_ThiessanEdges")
    arcpy.FeatureToLine_management(fcThiessanPolyClip, fcThiessanEdges)

    fcAllEdges = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_AllEdges")
    arcpy.Merge_management([fcTribToMidLines, fcThiessanEdges, fcCenterline],
                           fcAllEdges)  # include fcCenterline if needed

    fcAllEdgesPolygons = gis_tools.newGISDataset(workspaceTemp,
                                                 "GNAT_DPS_AllEdgesPolygons")
    arcpy.FeatureToPolygon_management(fcAllEdges, fcAllEdgesPolygons)

    fcAllEdgesPolygonsClip = gis_tools.newGISDataset(
        workspaceTemp, "GNAT_DPS_AllEdgesPolygonsClip")
    arcpy.Clip_analysis(fcAllEdgesPolygons, fcInputPolygon,
                        fcAllEdgesPolygonsClip)

    fcPolygonsJoinCenterline = gis_tools.newGISDataset(
        workspaceTemp, "GNAT_DPS_PolygonsJoinCenterline")
    arcpy.SpatialJoin_analysis(fcAllEdgesPolygonsClip,
                               fcCenterline,
                               fcPolygonsJoinCenterline,
                               "JOIN_ONE_TO_MANY",
                               "KEEP_ALL",
                               match_option="SHARE_A_LINE_SEGMENT_WITH")

    fcPolygonsDissolved = gis_tools.newGISDataset(
        workspaceTemp, "GNAT_DPS_PolygonsDissolved")
    arcpy.Dissolve_management(fcPolygonsJoinCenterline,
                              fcPolygonsDissolved,
                              "JOIN_FID",
                              multi_part="SINGLE_PART")

    #fcSegmentedPolygons = gis_tools.newGISDataset(workspaceOutput,"SegmentedPolygons")
    lyrPolygonsDissolved = gis_tools.newGISDataset("Layer",
                                                   "lyrPolygonsDissolved")
    arcpy.MakeFeatureLayer_management(fcPolygonsDissolved,
                                      lyrPolygonsDissolved)
    arcpy.SelectLayerByAttribute_management(lyrPolygonsDissolved,
                                            "NEW_SELECTION",
                                            """ "JOIN_FID" = -1 """)

    arcpy.Eliminate_management(lyrPolygonsDissolved, fcSegmentedPolygons,
                               "LENGTH")

    arcpy.AddMessage("GNAT DPS: Tool Complete.")
    return
예제 #10
0
    "Selected_Holding", "",
    "Holding_Reference_Number \"Holding_Reference_Number\" true true false 4 Long 0 10 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Holding_Reference_Number,-1,-1;Holding_Name \"Holding_Name\" true true false 255 Text 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Holding_Name,-1,-1;Holding_Location_Address \"Holding_Location_Address\" true true false 457 Text 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Holding_Location_Address,-1,-1;Local_Land_Services_Region_Id \"Local_Land_Services_Region_Id\" true true false 100 Text 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Local_Land_Services_Region_Id,-1,-1;Local_Land_Services_Region_Name \"Local_Land_Services_Region_Name\" true true false 255 Text 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Local_Land_Services_Region_Name,-1,-1;RLPB_Board_Name \"RLPB_Board_Name\" true true false 255 Text 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,RLPB_Board_Name,-1,-1;Property_Identification_Code \"Property_Identification_Code\" true true false 255 Text 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Property_Identification_Code,-1,-1;Occupier_Id \"Occupier_Id\" true true false 100 Text 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Occupier_Id,-1,-1;Occupier_Full_Name \"Occupier_Full_Name\" true true false 255 Text 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Occupier_Full_Name,-1,-1;Occupier_Mailing_Address \"Occupier_Mailing_Address\" true true false 457 Text 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Occupier_Mailing_Address,-1,-1;Occupier_Home_Phone \"Occupier_Home_Phone\" true true false 320 Text 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Occupier_Home_Phone,-1,-1;Occupier_Mobile_Phone \"Occupier_Mobile_Phone\" true true false 320 Text 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Occupier_Mobile_Phone,-1,-1;Occupier_Email_Address \"Occupier_Email_Address\" true true false 320 Text 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Occupier_Email_Address,-1,-1;Total_Area \"Total_Area\" true true false 8 Double 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Total_Area,-1,-1;Is_Rateable_Indicator \"Is_Rateable_Indicator\" true true false 1 Text 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Is_Rateable_Indicator,-1,-1;Rateable_Area \"Rateable_Area\" true true false 8 Double 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Rateable_Area,-1,-1;Nominal_Notional_Carrying_Cap \"Nominal_Notional_Carrying_Cap\" true true false 8 Double 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,Nominal_Notional_Carrying_Cap,-1,-1;SHAPE_STArea__ \"SHAPE_STArea__\" false true true 0 Double 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,SHAPE.STArea(),-1,-1;SHAPE_STLength__ \"SHAPE_STLength__\" false true true 0 Double 0 0 ,First,#,GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS,SHAPE.STLength(),-1,-1",
    "")

# Process: Buffer (2)
arcpy.Buffer_analysis(Selected_Holding, Buffer_1000, "1000 Meters", "FULL",
                      "ROUND", "NONE", "", "PLANAR")

# Process: Select Layer By Attribute (2)
arcpy.SelectLayerByAttribute_management(
    GIS101DELIVERY_RESTRICTED_DBO_BOUND_ADMIN_HOLDINGS__3_, "CLEAR_SELECTION",
    "")

# Process: Intersect
arcpy.Intersect_analysis(
    "C:\\Users\\hawkinle\\Desktop\\STDTAS\\Workflow\\Data\\Workflow.gdb\\Buffer_1000 #;GIS101DELIVERY_RESTRICTED.DBO.BOUND_ADMIN_HOLDINGS #",
    Interesection_1km, "ALL", "", "INPUT")

# Process: Erase
arcpy.Erase_analysis(Interesection_1km, Selected_Holding, Erased_Feature, "")

# Process: Buffer
arcpy.Buffer_analysis(Point_shp, Buffer_600, "600 Meters", "FULL", "ROUND",
                      "NONE", "", "PLANAR")

# Process: Clip
arcpy.Clip_analysis(Selected_Holding, Buffer_600, No_Bait_zone, "")

# Process: Clip (2)
arcpy.Clip_analysis(GIS101DELIVERY_RESTRICTED_DBO_LAND_OWN_PROPERTY,
                    Buffer_1000, LAND_OWN_PROPERTY_Clip, "")
예제 #11
0
def main(fcLineNetwork, fcSplitPoints, fieldStreamName, fieldStreamOrder,
         fcOutputStreamNetwork, boolDissolve, tempWorkspace):

    reload(gis_tools)

    # Preprocessing
    gis_tools.resetData(fcOutputStreamNetwork)
    listfcMerge = []

    # Make Feature Layer for
    lyrStreamSelection = gis_tools.newGISDataset("LAYER",
                                                 "GNAT_BRANCHES_SelectByName")
    arcpy.MakeFeatureLayer_management(fcLineNetwork, lyrStreamSelection)

    # Dissolve by Stream (GNIS) Name
    where = arcpy.AddFieldDelimiters(fcLineNetwork,
                                     fieldStreamName) + " <> '' "
    arcpy.SelectLayerByAttribute_management(lyrStreamSelection,
                                            "NEW_SELECTION", where)
    fcDissolveByName = gis_tools.newGISDataset(tempWorkspace,
                                               "GNAT_BRANCHES_DissolveByName")
    #arcpy.Dissolve_management(lyrStreamSelection,fcDissolveByName,fieldStreamName)
    arcpy.Dissolve_management(lyrStreamSelection,
                              fcDissolveByName,
                              fieldStreamName,
                              multi_part="SINGLE_PART",
                              unsplit_lines="DISSOLVE_LINES")
    listfcMerge.append(fcDissolveByName)

    # Dissolve by Stream Order
    arcpy.SelectLayerByAttribute_management(lyrStreamSelection,
                                            "SWITCH_SELECTION")

    if fieldStreamOrder:
        if len(arcpy.ListFields(fcLineNetwork, fieldStreamOrder)) == 1:
            fcDissolveByStreamOrder = gis_tools.newGISDataset(
                tempWorkspace, "GNAT_BRANCHES_DissolveByStreamOrder")
            arcpy.Dissolve_management(lyrStreamSelection,
                                      fcDissolveByStreamOrder,
                                      fieldStreamOrder)

    # Split Stream Order Junctions
        if arcpy.Exists(fcSplitPoints):
            fcDissolveByStreamOrderSplit = gis_tools.newGISDataset(
                tempWorkspace, "GNAT_BRANCHES_DissolveByStreamOrderSplit")
            arcpy.SplitLineAtPoint_management(fcDissolveByStreamOrder,
                                              fcSplitPoints,
                                              fcDissolveByStreamOrderSplit,
                                              "1 METER")
            listfcMerge.append(fcDissolveByStreamOrderSplit)
        else:
            listfcMerge.append(fcDissolveByStreamOrder)
    else:
        fcNoStreamOrder = gis_tools.newGISDataset(
            tempWorkspace, "GNAT_BRANCHES_NoStreamOrderOrStreamName")
        arcpy.Dissolve_management(lyrStreamSelection,
                                  fcNoStreamOrder,
                                  multi_part="SINGLE_PART")
        listfcMerge.append(fcNoStreamOrder)

    # Merge Dissolved Networks
    fcMerged = gis_tools.newGISDataset(
        tempWorkspace, "GNAT_BRANCHES_MergedDissolvedNetworks")
    arcpy.Merge_management(listfcMerge, fcMerged)

    # Add Branch ID
    arcpy.AddField_management(fcMerged, "BranchID", "LONG")
    gis_tools.addUniqueIDField(fcMerged, "BranchID")

    # Final Output
    if boolDissolve == "true":
        arcpy.AddMessage("Dissolving " + str(boolDissolve))
        arcpy.CopyFeatures_management(fcMerged, fcOutputStreamNetwork)
    else:
        ## Delete remaining fields from fcMerged not BranchID, or required fields fieldStreamName,fieldStreamOrder,
        descFCMerged = arcpy.Describe(fcMerged)
        for field in descFCMerged.fields:
            if field.name not in [
                    "BranchID", descFCMerged.OIDFieldName,
                    descFCMerged.shapeFieldName, "Shape_Length"
            ]:
                arcpy.DeleteField_management(fcMerged, field.name)

        arcpy.AddMessage("NOT Dissolving " + str(boolDissolve))
        arcpy.Intersect_analysis([fcMerged, fcLineNetwork],
                                 fcOutputStreamNetwork, "ALL")

    return fcOutputStreamNetwork
예제 #12
0
###############################################
out = arcpy.sa.Raster("jykrigph.tif") * arcpy.sa.Raster("JLXS.tif")
out.save("JLYZ.tif")  #地表径流量=降水量*平均地表径流系数
out1 = Abs(
    arcpy.sa.Raster("jykrigph.tif") - arcpy.sa.Raster("JLYZ.tif") -
    arcpy.sa.Raster("zfkrigph.tif"))
out1.save("base.tif")  #p-r-et
arcpy.RasterToPoint_conversion("base.tif", "point.shp", "VALUE")
#将栅格数据转化成点要素
arcpy.gp.Int_sa("base.tif", "intbase.tif")  #将栅格数据转换成整型
arcpy.RasterToPolygon_conversion("intbase.tif", "polygon.shp", "NO_SIMPLIFY",
                                 "VALUE")
#将栅格数据集转换成面要素
arcpy.SpatialJoin_analysis("polygon.shp", "point.shp", "whole.shp")
#点要素和面要素进行空间连接
arcpy.Intersect_analysis(["whole.shp", IN_STXT], 'st.shp')
#相交
cursor2 = arcpy.da.SearchCursor("st.shp", ["SHAPE@AREA"])  #面积
ar = []
for row in cursor2:
    ar.append(row[0])

cursor3 = arcpy.da.SearchCursor("st.shp", ["GRID_CODE"])  #前面计算的值
yt = []
for row in cursor3:
    yt.append(row[0] * pow(10, -3))  #平方米转换成平方千米10**-6 在乘10**3
#
prod = [a * b for a, b in zip(yt, ar)]
#让两个列表相乘
try:
    arcpy.DeleteField_management("st.shp", 'SHY')
예제 #13
0
def unify(directory):

    logFile = open("logs/unify.log", "a")

    print "Unifying lakes from " + directory + "..."

    # Get licensed
    if arcpy.CheckExtension("Spatial"):
        arcpy.CheckOutExtension("Spatial")
    else:
        print "No SA licence"
        exit

    # Load the environment
    env.workspace = "C:/Users/hengstam/Desktop/projects/proglacial"
    hashlength = 30
    disphashlength = 12

    # This is where intersection results will be temporarily held
    output = "/temp/bubblebath/intersection_output.shp"
    if arcpy.Exists(output):
        arcpy.Delete_management(output)

    # This is where we trace lakes over for copying them
    tracingpaper = "/temp/bubblebath/tracing_output.shp"
    if arcpy.Exists(tracingpaper):
        arcpy.Delete_management(tracingpaper)

    # Get some names
    masterlakefile = "/master_lakes/master_lake_file.shp"

    # Make sure we can mess with stuff
    arcpy.env.overwriteOutput = True

    ###########################################
    ## Define some new types of data structures
    class bubblebath(object):

        data = []

        def add(self, i):

            # Add our incoming group to the dataset
            self.data.append(set(i))

            # Iterate through new things
            for item in i:
                membership = []
                index = -1

                # Work through each group
                for bubble in self.data:
                    index += 1

                    # Work through each group member
                    for thing in bubble:

                        # If one of our new items matches a group member, remember that group.
                        if item == thing:
                            membership.append(index)

                            # We only need one match per group
                            break

                # Now we have a list of things we belong to. We may need to merge those.
                if len(membership) > 1:

                    newbubble = set()

                    # Merge them all
                    for member in membership:
                        newbubble = newbubble | self.data[member]

                    # Flip and reverse it so we don't change our indices while deleting
                    membership.reverse()

                    # Delete the old ones
                    for member in membership:
                        del self.data[member]

                    # Add the new one
                    self.data.append(newbubble)

                # And now we repeat for the rest of the items

        def read(self):

            # This is what we will eventually spit out
            out = []

            for i in self.data:
                out.append(list(i))

            return out

        def clean(self, size):

            # Initalize the index and a list of things to get rid of
            index = -1
            remove = []

            # Iterate
            for bubble in self.data:
                index += 1

                # Check if it's too small
                if len(bubble) <= size or size == 0:
                    remove.append(index)

            # Now flip the remove list
            remove.reverse()

            # And delete them all
            for i in remove:
                del self.data[i]

    # Make a clean reader
    def reader(text):
        t = str(text)
        return t[20:22]

    def iterreader(arr):
        s = "["
        for a in arr:
            s += reader(a)
            s += ', '
        s = s[:-2]
        s += ']'
        return s

    def twoiterreader(arr):
        s = "["
        for ar in arr:
            s += '['
            for a in ar:
                s += reader(a)
                s += ', '
            s = s[:-2]
            s += '], '
        s = s[:-3]
        s += ']'
        return s

    ####################
    ## BEGIN BEGIN BEGIN

    # Loop through all folder names
    fns = glob.glob(env.workspace + '/' + directory + '/*.shp')
    for fn in fns:

        # Trim the filename to get the directory component
        newlakes = fn[len(env.workspace):]

        print "Loading from " + newlakes + "..."

        ################################################################################
        ## Build a database to help us get feature class filenames from the master lakes
        print "Generating dictionary..."

        # Make the dictionary
        refDict = {}

        # Build a cursor so we can get info on the master lakes
        tempcursor = arcpy.da.SearchCursor(masterlakefile, ['FID', 'ref_id'])

        # Iterate through the cursor results and fill the dictionary
        for fid, hashname in tempcursor:
            refDict[fid] = hashname[:hashlength]

        # Delete the cursor
        del tempcursor
        print "Dictionary generated."

        ######################################
        ## Collect all FIDs and hashes from the new lakes
        newlakeFIDs = {}
        newRefDict = {}

        # Build a cursor so we can get the stuff from the new lakes
        tempcursor = arcpy.da.SearchCursor(newlakes, ['FID', 'lake_id'])

        for temprow in tempcursor:
            # Mark them all good for now
            newlakeFIDs[temprow[0]] = True

            # Load this up
            newRefDict[temprow[0]] = temprow[1]

        del tempcursor

        #################################
        ## Prepare to resolve lake merges
        merges = {}

        ###############################################
        ## Make lists of lakes which are being modified
        lakes_to_add = set()
        lakes_to_remove = set()

        ##########################
        ## Check for intersections
        print "Checking for intersections..."

        # Make a list of assignments
        assignments = {}

        # Run the master intersection
        arcpy.Intersect_analysis((newlakes, masterlakefile), output,
                                 'ONLY_FID')

        # Get the names of the two FID fields for the output
        fields = arcpy.ListFields(output)
        FID1 = fields[2].baseName
        FID2 = fields[3].baseName

        # Build a cursor which will iterate over the output fields
        cursor = arcpy.da.SearchCursor(output, [FID1, FID2])

        # Build feature layers on the new lake feature classe to enable selection of objects
        arcpy.Delete_management("newlakes_lyr")
        arcpy.MakeFeatureLayer_management(newlakes, "newlakes_lyr")

        # Iterate through the new intersection shapes
        print "Matching new lakes..."
        for row in cursor:

            # Get the saved input FIDs of each intesection
            newlakeFID = row[0]
            masterlake = row[1]

            # Lookup the reference in our handy-dandy dictionary
            lakeRef = '/master_lakes/lakes/' + refDict[masterlake] + '.shp'

            # This gets either the previous assignments or an empty list and then adds the current assignment to it
            if str(newlakeFID) in assignments:
                assignments[str(newlakeFID)].append(lakeRef)
            else:
                assignments[str(newlakeFID)] = [lakeRef]

            # Prepare to check for duplicates
            eject = False
            tempcursor = arcpy.da.SearchCursor(lakeRef, ['lake_id'])

            # Look through the already-saved lakes
            newRef = newRefDict[newlakeFID]
            for temprow in tempcursor:

                existingHash = temprow[0]

                # Check that we're not adding a duplicate
                if existingHash == newRef:
                    eject = True
                    break

            del tempcursor

            # Is it a duplicate?
            if eject:
                print 'Trying to add a duplicate lake ' + newRef[:
                                                                 disphashlength] + '. Ignoring.'

            # Nope
            else:

                # Prepare a partial feature class to copy it over (it's just going to be the one lake)
                arcpy.FeatureClassToFeatureClass_conversion(
                    newlakes, env.workspace, tracingpaper,
                    'FID = ' + str(row[0]))

                # Add this lake to the new feature class
                arcpy.Append_management(tracingpaper, lakeRef, "NO_TEST")

                # Delete the temp shit
                arcpy.Delete_management(tracingpaper)

                # This lake needs to be refreshed
                lakes_to_remove.add(lakeRef)
                lakes_to_add.add(lakeRef)

                print 'Added lake ' + newRef[:
                                             disphashlength] + ' to ' + lakeRef + '.'

            # Indicate that this lake has found a home
            newlakeFIDs[newlakeFID] = False

        del cursor

        # Remove the temp file
        arcpy.Delete_management(output)

        print "Matching complete."

        #####################################
        ## Make new lakes for new lake shapes

        # Iterate through all the lakes...
        cursor = arcpy.da.SearchCursor(newlakes, ['FID', 'lake_id'])

        for row in cursor:

            # Check from the dictionary to make sure it's untouched
            if newlakeFIDs[row[0]]:

                # Yay!
                hashID = row[1]
                hashID = hashID[:hashlength]

                # Save it to a brand-new feature class
                myNewLakeFilename = '/master_lakes/lakes/' + hashID + '.shp'
                if arcpy.Exists(myNewLakeFilename):
                    print "Skipping making a new lake, file already present."
                else:

                    # Make said brand-new feature class
                    arcpy.CreateFeatureclass_management(
                        env.workspace, myNewLakeFilename, "POLYGON")
                    arcpy.AddField_management(myNewLakeFilename, "ID", "LONG")
                    arcpy.AddField_management(myNewLakeFilename, "GRIDCODE",
                                              "LONG")
                    arcpy.AddField_management(myNewLakeFilename, "area",
                                              "DOUBLE")
                    arcpy.AddField_management(myNewLakeFilename, "centr_x",
                                              "DOUBLE")
                    arcpy.AddField_management(myNewLakeFilename, "centr_y",
                                              "DOUBLE")
                    arcpy.AddField_management(myNewLakeFilename, "lake_id",
                                              "STRING")
                    arcpy.AddField_management(myNewLakeFilename, "date",
                                              "LONG")
                    arcpy.AddField_management(myNewLakeFilename, "loc1",
                                              "SHORT")
                    arcpy.AddField_management(myNewLakeFilename, "loc2",
                                              "SHORT")

                    # Prepare a partial feature class to copy it over (it's just going to be the one lake)
                    arcpy.FeatureClassToFeatureClass_conversion(
                        newlakes, env.workspace, tracingpaper,
                        'FID = ' + str(row[0]))

                    # Add this lake to the new feature class
                    arcpy.Append_management(tracingpaper, myNewLakeFilename,
                                            "NO_TEST")

                    # Delete the temp shit
                    arcpy.Delete_management(tracingpaper)

                    # This needs to be added to the master file
                    lakes_to_add.add(myNewLakeFilename)

                    print "New lake found! Created a whole new file just for it, we'll call it " + hashID[:
                                                                                                          disphashlength] + '.'

        # Clean up
        del cursor

        ################################################
        ## Go through all matched lakes and find mergers

        print "Merge checking..."

        # Make our data structure
        bath = bubblebath()

        # Load them all in
        for assingment in assignments:
            print iterreader(
                assignments[assingment]) + ' --> ' + twoiterreader(bath.read())
            bath.add(assignments[assingment])

        # Clean the small things (aka a non-merger)
        print "Behold the final bubblebath:"

        bath.clean(1)
        print twoiterreader(bath.read())

        # Merge this stuff
        for bubble in bath.read():

            # Make a new feature class name
            m = hashlib.sha224()

            # Mutate hash using lake names
            for item in bubble:
                m.update(str(item))

            m.update('holla holla')

            # Export it
            hashvalue = m.hexdigest()
            myNewLakeFilename = '/master_lakes/lakes/' + hashvalue[:
                                                                   hashlength] + '.shp'

            del m

            if arcpy.Exists(myNewLakeFilename):
                print myNewLakeFilename
                print "Collision while trying to merge bubbles!!!"
            else:

                print "Bubbles will be merged into " + myNewLakeFilename + "."

                # Make said brand-new feature class
                arcpy.CreateFeatureclass_management(env.workspace,
                                                    myNewLakeFilename,
                                                    "POLYGON")
                arcpy.AddField_management(myNewLakeFilename, "ID", "LONG")
                arcpy.AddField_management(myNewLakeFilename, "GRIDCODE",
                                          "LONG")
                arcpy.AddField_management(myNewLakeFilename, "area", "DOUBLE")
                arcpy.AddField_management(myNewLakeFilename, "centr_x",
                                          "DOUBLE")
                arcpy.AddField_management(myNewLakeFilename, "centr_y",
                                          "DOUBLE")
                arcpy.AddField_management(myNewLakeFilename, "lake_id",
                                          "STRING")
                arcpy.AddField_management(myNewLakeFilename, "date", "LONG")
                arcpy.AddField_management(myNewLakeFilename, "loc1", "SHORT")
                arcpy.AddField_management(myNewLakeFilename, "loc2", "SHORT")

            for item in bubble:

                print "Merging " + item + "..."

                # Append all the other ones
                arcpy.Append_management(item, myNewLakeFilename, "NO_TEST")

                # Delete the old feature classes
                arcpy.Delete_management(item)

                # This needs to be removed
                lakes_to_remove.add(item)

            # Remove duplicate lakes from the unified feature class
            tempcursor = arcpy.da.UpdateCursor(myNewLakeFilename, ['lake_id'])

            # Make a list of lake IDs. When we find a duplicate we'll delete the dupe one
            IDs = set()

            for row in tempcursor:
                ID = row[0]
                if ID in IDs:
                    tempcursor.deleteRow()
                    print "Deleted a duplicate in the merged bubble."
                else:
                    IDs.add(ID)

            # Take out the trash
            del tempcursor, IDs

            # Make sure to add the new lake
            lakes_to_add.add(myNewLakeFilename)

            print "Merge successful."

            # Now do it for the others

        ####################################################
        ## Generate union shapes and update the master files
        print "Beginning master lake file update..."

        if len(lakes_to_add) == 0 and len(lakes_to_remove) == 0:
            print "actually nevermind..."
        else:

            ####################################################
            ## Generate union shapes and update the master files
            print "Beginning master lake file update..."

            # Make a new master lake file
            arcpy.Delete_management(masterlakefile)
            arcpy.CreateFeatureclass_management(env.workspace, masterlakefile,
                                                "POLYGON")
            arcpy.AddField_management(masterlakefile, "ref_id", "STRING")
            arcpy.AddField_management(masterlakefile, "n", "SHORT")
            arcpy.AddField_management(masterlakefile, "n_real", "SHORT")
            arcpy.AddField_management(masterlakefile, "n_ratio", "SHORT")

            print "Master lake file reset."

            # Open the shape folder directory
            os.chdir(env.workspace + "/master_lakes/lakes/")

            # Iterate through all shapefiles
            for file in glob.glob("*.shp"):

                # Error management
                try:
                    ref_id = file[:-4]

                    # Count how many things the thing has
                    number = arcpy.GetCount_management(file)
                    dates = set()

                    print "Adding lake", ref_id, "to new master lake file. Has", number[
                        0], "lake images over", len(dates), "dates."

                    # Iterate through all elements of that lake
                    count_cursor = arcpy.da.SearchCursor(file, ['date'])
                    for crow in count_cursor:
                        dates.add(crow[0])

                    # Make a union of the thing
                    arcpy.Dissolve_management(file, output)

                    # Get ready to add reference stuff to the thing
                    arcpy.AddField_management(output, "ref_id", "STRING")
                    arcpy.AddField_management(output, "n", "SHORT")
                    arcpy.AddField_management(output, "n_real", "SHORT")
                    arcpy.AddField_management(output, "n_ratio", "SHORT")

                    # This cursor will let up change up that reference id
                    cursor = arcpy.da.UpdateCursor(
                        output, ["ref_id", "n", "n_real", "n_ratio"])

                    # Update that thang
                    for row in cursor:
                        row[0] = ref_id
                        row[1] = int(number[0])
                        row[2] = len(dates)
                        row[3] = row[1] / row[2]
                        cursor.updateRow(row)
                    del cursor

                    # Add it to the master lake file
                    arcpy.Append_management(output, masterlakefile, 'NO_TEST')

                    # Remove the temp file
                    arcpy.Delete_management(output)

                # Return geoprocessing specific errors
                except arcpy.ExecuteError:
                    # Display in terminal
                    print("ERROR: arcpy.ExecuteError")
                    arcpy.AddError(arcpy.GetMessages(2))
                    # Report in logfile
                    logFile.write(
                        str(datetime.now()) + " ERROR: arcpy.ExecuteError")
                    logFile.write(arcpy.GetMessages(2))
                    logFile.flush()

                # Return any other type of error
                except:
                    # Display in terminal
                    e = sys.exc_info()[1]
                    print("ERROR: default error")
                    print(e.args[0])
                    # Report in logfile
                    logFile.write(
                        str(datetime.now()) + " ERROR: default error")
                    logFile.write(e.args[0])
                    logFile.flush()

        print "Success!"

        # Reset this thing
        bath.clean(0)

    logFile.close()
예제 #14
0
        i += 1
    del my_row, cursor1
    cursor1 = arcpy.UpdateCursor(MIJY)
    i = 0
    for my_row in cursor1:
        my_row.setValue('js', js[i])
        cursor1.updateRow(my_row)
        i += 1
    del my_row, cursor1

    arcpy.Dissolve_management(MIQW, "disqiwen.shp", ["lon", "lat"],
                              [['qw', 'SUM']])
    arcpy.Dissolve_management(MIJY, "disjiangyu.shp", ["lon", "lat"],
                              [['js', 'SUM']])

    arcpy.Intersect_analysis(["disqiwen.shp", "disjiangyu.shp"], "ganzao.shp")
    cursor = arcpy.da.SearchCursor("ganzao.shp", ["SUM_qw", "SUM_js"])
    gz = []
    for row in cursor:
        gz.append(row[0] / row[1] * 0.16)
    #print gz
    del row, cursor
    try:
        arcpy.DeleteField_management("ganzao.shp", 'gz')
        arcpy.AddField_management("ganzao.shp", 'gz', "FLOAT")
    except:
        arcpy.AddField_management("ganzao.shp", 'gz', "FLOAT")
    cursor1 = arcpy.UpdateCursor("ganzao.shp")
    i = 0
    for my_row in cursor1:
        my_row.setValue('gz', gz[i])
예제 #15
0
    def execute(self, params, messages):

        # Writing params[x]... everywhere is annoying
        rept_id = params[0]
        nepa_id = params[1]
        allt_id = params[2]
        out_loc = params[3]

        try:
            now = datetime.datetime.now()
            date_split = str(datetime.datetime.now()).split('.')[0]
            date_time_stamp = re.sub('[^0-9]', '', date_split)

            output_id = re.sub('[^0-9a-bA-B]', '', rept_id)

            # Create the logger
            text_path = os.path.join(working_dir, 'Range_Logs')
            log_file = os.path.join(text_path, "log.txt")
            rep_file = os.path.join(text_path, "report.txt")
            lg = py_log(rep_file, log_file)
            lg.rep_active = False # Uncomment to disable report

            # Start logging
            lg.logging(1, "\nExecuting: "+filename+' \nDate: '+date_split)
            lg.logging(2, header)
            lg.logging(2, "Running env: Python - {}".format(sys.version))
            lg.logging(1, "User: "******"ALLOT_NO" = '+str(allt_id)
            allot_poly = arcpy.MakeFeatureLayer_management(
                                    'Range_Allotment_Polygons',
                                    'in_memory\\selected',
                                    allot_where)

            arcpy.CopyFeatures_management(allot_poly,
                                          output_id+'_'+allt_id+'.shp')

            # Define some necessary source data
            GCDB = r'T:\ReferenceState\CO\CorporateData\cadastral'\
                   r'\Survey Grid.lyr'

            counties = r'T:\ReferenceState\CO\CorporateData\admin_boundaries'\
                       r'County Boundaries.lyr'

            quads = r'T:\ReferenceState\CO\CorporateData\cadastral'\
                    r'\24k USGS Quad Index.lyr'

            land_ownership = r'T:\ReferenceState\CO\CorporateData\lands'\
                             r'\Land Ownership (No Outline).lyr'

            # Clip the BLM lands out of allotment
            blm_where = buildWhereClauseFromList(land_ownership,
                                                 'adm_manage',
                                                 ['BLM'])

            arcpy.SelectLayerByAttribute_management(land_ownership,
                                                    'NEW_SELECTION',
                                                     blm_where)

            arcpy.Clip_analysis(land_ownership, allot_poly, 'in_memory\\clip')

            # Get the raw allotment acres and BLM only acres
            allot_acres = collections.defaultdict(int)
            allot_acres['Original'] += get_acres(allot_poly)[1]
            allot_acres['BLM'] += get_acres('in_memory\\clip')[1]

            county_ids = []
            quad_ids = []

            #Intersect allotment, counties, and quads
            arcpy.Intersect_analysis([allot_poly, counties, quads],
                                     "in_memory\\intersect",
                                     "NO_FID")

            arcpy.Frequency_analysis("in_memory\\intersect",
                                     "in_memory\\County",
                                     "COUNTY")

            arcpy.Frequency_analysis("in_memory\\intersect",
                                     "in_memory\\Quad",
                                     "QUAD_NAME")

            with arcpy.da.SearchCursor("in_memory\\County",
                                       ["COUNTY"]) as cursor:
                for row in cursor:
                    county_ids.append(str(row[0]))

            with arcpy.da.SearchCursor("in_memory\\Quad",
                                       ["QUAD_NAME"]) as cursor:
                for row in cursor:
                    quad_ids.append(str(row[0]))

            county_str = ', '.join(county_ids[:-1]
                                   )+' and '+county_ids[-1]+' counties'

            quad_str = ', '.join(quad_ids[:-1]
                                 )+' and '+quad_ids[-1]+' 7.5'+"'"+' quads'

            #Intersect survey poly, GCDB Survey Grid, counties, and quad
            arcpy.Intersect_analysis([allot_poly, GCDB],
                                     "in_memory\\gcdb",
                                     "NO_FID")

            gcdb_fields = ["FRSTDIVID","QQSEC"]

            arcpy.Frequency_analysis("in_memory\\gcdb",
                                     "in_memory\\gcdb_freq",
                                     gcdb_fields)


            field_names = ["PM", "Twn", "Rng", "Section", "QQ1", "QQ2"]
            csv_rows = []

            with arcpy.da.SearchCursor("in_memory\\gcdb_freq",
                                       gcdb_fields) as cursor:
                for row in cursor:
                    #inRow[0] = PM
                    r0 = str(row[0])[2:4]
                    #inRow[1] = Twn
                    r1 = str(row[0])[5:7]+str(row[0])[8]
                    #inRow[2] = Rng
                    r2 = str(row[0])[10:12]+str(row[0])[13]
                    #inRow[3] = Sec
                    r3 = str(row[0])[-3:-1]
                    #inRow[4] = QQ1
                    r4 = str(row[1])[0:2]
                    #inRow[5] = QQ2
                    r5 = str(row[1])[2:4]

                    csv_rows.append([r0, r1, r2, r3, r4, r5])

#TODO  - find a way to sort and compress entries in table

            outCSV = output_id+'_'+allt_id+'.csv'
            with open(outCSV, 'wb') as csvfile:
                csvwriter = csv.writer(csvfile)
                csvwriter.writerow(field_names)
                for row in csv_rows:
                    csvwriter.writerow(row)

            legal_desc_tr =  list(set(' '.join(['PM '+row[0],
                                                'Twn '+row[1],
                                                'Rng '+row[2]]
                                                 for row in csv_rows)))

            # Clip sites and surveys, generate lists of PKs
            # And calculate survey coverage
            sites = r'T:\CO\GIS\gistools\tools\Cultural'\
                    r'\BLM_Cultural_Resources\Sites.lyr'

            surveys = r'T:\CO\GIS\gistools\tools\Cultural'\
                      r'\BLM_Cultural_Resources\Surveys.lyr'

            out_path = os.path.join(working_dir, '_exchange')
            out_sites_name = output_id+'_'+allt_id+'sites.shp'
            out_surveys_name = output_id+'_'+allt_id+'surveys.shp'
            out_sites = os.path.join(out_path, out_sites_name)
            out_surveys = os.path.join(out_path, out_surveys_name)

            arcpy.Clip_analysis(sites, allot_poly, out_sites)
            arcpy.Clip_analysis(surveys, allot_poly, out_surveys)

            # Get list of surveys and sum coverage acres
            surveys_dict = {'Surveys' :  [],
                          'Coverage' : 0}

            for row in arcpy.da.SearchCursor(out_surveys, 'SHPO_ID'):
                surveys_dict['Surveys'].append(row[0])
            surveys_dict['Coverage'] = get_acres(out_surveys)[1]

            # Get list of sites total and eligible sites specifically
            sites_dict = {'Sites'    : [],
                          'Eligible' : []}

            for row in arcpy.da.SearchCursor(out_sites, 'SITE_ID'):
                sites_dict['Sites'].append(row[0])

            eligible = ['ELIGIBLE', 'CONTRIBUTING', 'SUPPORTING',
                        'WITHIN ELIGIBLE DISTRICT', 'LISTED NATIONAL',
                        'NATIONAL LANDMARK', 'LISTED STATE', 'LOCAL LANDMARK',
                        'NOMINATED STATE', 'DELISTED']

            sites_where = buildWhereClauseFromList(out_sites,
                                                   'ELIGIBLE',
                                                    eligible)

            arcpy.SelectLayerByAttribute_management(out_sites,
                                                    'NEW_SELECTION',
                                                     sites_where)

            for row in arcpy.da.SearchCursor(out_sites, 'SITE_ID'):
                sites_dict['Eligible'].append(row[0])

            # Make a map
            mxd_name = output_id+'_'+allt_id+'.mxd'
            mxd_loc = os.path.join(working_dir, '_exchange')
            mxd = os.path.join(mxd_loc, mxd_name)

            temp_mxd = os.path.join(working_dir,
                                    '_templates\Range_Renewal_Temp.mxd')

            df = arcpy.mapping.ListDataFrames(temp_mxd)[0]

            # Update report elements
            data_dict = {'ProjectID': rept_id,
                         'Title'    : "Range Renewal Allotment ID: "+allt_id,
                         'Author'   : "Michael D. Troyer",
                         'Date'     : str(now.month)+"\\"+str(now.year),
                         'Location' : '\n'.join(legal_desc_tr),
                         'County'   : county_str,
                         'Quad'     : quad_str}

            for item in arcpy.mapping.ListLayoutElements(temp_mxd):
                if item.name in data_dict:
                    ePX = item.elementPositionX  # get the item position
                    item.text = data_dict[item]
                    item.elementPositionX = ePX  # reset the item position

            # Add Layer to map
            allot_lyr = arcpy.mapping.Layer(allot_poly)
            arcpy.mapping.AddLayer(df, allot_lyr, "TOP")

            # Set visible layers
            for item in arcpy.mapping.ListLayers(temp_mxd, data_frame=df):
                if item.supports("VISIBLE") == "True":
                    item.visible = "False"
            arcpy.RefreshActiveView
            arcpy.RefreshTOC

            # Set scale and pan to extent
            desc = arcpy.Describe(allot_poly)
            new_extent = desc.extent
            df.extent = new_extent
            df.scale = 24000

            # Save as new mxd
            mxd.saveACopy(mxd_name)

#TODO - update range renewal table with calculated percent inventoried

# EXCEPTIONS ------------------------------------------------------------------

        except:
            print_exception_full_stack(lg, print_locals=True)

            # Don't create exceptions in the except block!
            try:
                lg.logging(1, '\n\n{} did not complete'.format(filename))
                lg.console('See logfile for details')

            except:
                pass

# CLEAN-UP --------------------------------------------------------------------


        finally:
            end_time = datetime.datetime.now()
            elapsed_time = str(end_time - start_time)

            try:
                lg.logging(1, "End Time: "+str(end_time))
                lg.logging(1, "Time Elapsed: {}".format(elapsed_time))

            except:
                pass
예제 #16
0
	# Local variables:
	fc = "Data/Interp/"+evname+".gdb/"+evname+"_fc"
	Thiessen = "Data/Interp/"+evname+".gdb/"+evname+"_Thiessen"
	Thiessen_ocean = "Data/Interp/"+evname+".gdb/"+evname+"_Thiessen_ocean"
	Thiessen_single = "Data/Interp/"+evname+".gdb/"+evname+"_Thiessen_single"
	Thiessen_clipped = "Data/Interp/"+evname+".gdb/"+evname+"_Interp"

	# 1)
	# Create geodatabase file
	arcpy.CreateFileGDB_management ("Data/Interp", evname)

	# Add env. points to geodatabase
	arcpy.CopyFeatures_management (s, fc)

	# 2)
	# Create Thiessen Polygons
	arcpy.CreateThiessenPolygons_analysis (fc, Thiessen, "ALL")

	# 4)
	# Overlay with ocean layer
	arcpy.Intersect_analysis ([Thiessen, ocean], Thiessen_ocean)
	arcpy.MultipartToSinglepart_management(Thiessen_ocean, Thiessen_single)

	# 5)
	# Select Layer By Location
	# Remove polygons over 50 m from original points
	arcpy.MakeFeatureLayer_management (Thiessen_single, "tmp")
	arcpy.SelectLayerByLocation_management ("tmp", "WITHIN_A_DISTANCE", fc, "50 Meters", "NEW_SELECTION")
	arcpy.CopyFeatures_management ("tmp", Thiessen_clipped)
final_pts = "D:\\projects\\ak_fire\\data\\firePerimeters_1940_2016_dates_for_each_burn_bufferedIn300m.shp"

# In original polygon file:
# Create field on which to dissolve polys; populate; dissolve
# This makes a single polygon out of all fire areas
arcpy.AddField_management(input_file, "junk", "SHORT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
arcpy.CalculateField_management(input_file, "junk", "1", "VB", "")
arcpy.Dissolve_management(input_file, dissolved_polys, "", "", "MULTI_PART", "DISSOLVE_LINES")
print 'Polygons dissolved..."

# Create non-overlapping file of polygons
# Here gaps between polygons show up as 'real' polygons
arcpy.FeatureToPolygon_management(input_file, polys_and_gaps, "", "ATTRIBUTES", fc_empty_pts)

# Intersect dissolved polys with polys and gaps to remove gaps
arcpy.Intersect_analysis([dissolved_polys, polys_and_gaps], individual_polys, "ALL", "", "INPUT")
print 'Polygons intersected...'

# Calculate acreage for non-overlapping polys
arcpy.AddField_management(individual_polys, "acres", "DOUBLE", "15", "3", "", "", "NULLABLE", "NON_REQUIRED", "")
arcpy.CalculateField_management(individual_polys, "acres", "!Shape.Area@acres!", "PYTHON_9.3", "")
print 'Acreage calculated...'

# Convert non-overlapping polys to points
arcpy.FeatureToPoint_management(individual_polys, individual_pts, "INSIDE")
print 'Polygons converted to points...'

# Intersect points with original polygon file
arcpy.Intersect_analysis([input_file, individual_pts], final_pts, "ALL", "", "INPUT")
print 'Points intersected with original polygon file...'
print 'Done! Polygon file written to ' +
예제 #18
0
def main(fcLineNetwork,
         fieldStreamRouteID,
         seed_distance,
         window_sizes,
         stat_fields,
         fcOutputWindows,
         fcOutputSeedPoints,
         tempWorkspace=arcpy.env.scratchWorkspace):
    """Perform a Moving Window Analysis on a Line Network."""

    # Prepare Inputs
    arcpy.AddMessage("Preparing Moving Window Analysis")
    fc_line_dissolve = gis_tools.newGISDataset(tempWorkspace, "GNAT_MWA_LineNetworkDissolved")
    arcpy.Dissolve_management(fcLineNetwork, fc_line_dissolve, fieldStreamRouteID, multi_part=False, unsplit_lines=True)
    arcpy.FlipLine_edit(fc_line_dissolve)

    listSeeds = []
    listgWindows = []
    intSeedID = 0

    # Moving Window Generation
    arcpy.AddMessage("Starting Window Generation")
    iRoutes = int(arcpy.GetCount_management(fc_line_dissolve).getOutput(0))
    arcpy.SetProgressor("step", "Processing Each Route", 0, iRoutes, 1)
    iRoute = 0
    with arcpy.da.SearchCursor(fc_line_dissolve, ["SHAPE@", fieldStreamRouteID, "SHAPE@LENGTH"]) as scLines:
        for fLine in scLines:  # Loop Through Routes
            arcpy.SetProgressorLabel("Route: {} Seed Point: {}".format(iRoute, intSeedID))
            arcpy.SetProgressorPosition(iRoute)
            dblSeedPointPosition = float(max(window_sizes)) / 2  # Start Seeds at position of largest window
            while dblSeedPointPosition + float(max(window_sizes)) / 2 < fLine[2]:
                arcpy.SetProgressorLabel("Route: {} Seed Point: {}".format(iRoute, intSeedID))
                gSeedPointPosition = fLine[0].positionAlongLine(dblSeedPointPosition)
                listSeeds.append([scLines[1], intSeedID, gSeedPointPosition, dblSeedPointPosition])
                for window_size in window_sizes:
                    dblWindowSize = float(window_size)
                    dblLengthStart = dblSeedPointPosition - dblWindowSize / 2
                    dblLengthEnd = dblSeedPointPosition + dblWindowSize / 2
                    listgWindows.append([scLines[1], intSeedID, dblWindowSize, fLine[0].segmentAlongLine(dblLengthStart, dblLengthEnd)])
                dblSeedPointPosition = dblSeedPointPosition + float(seed_distance)
                intSeedID = intSeedID + 1
            iRoute = iRoute + 1

    arcpy.AddMessage("Compiling Moving Windows")
    fcSeedPoints = gis_tools.newGISDataset(tempWorkspace, "GNAT_MWA_SeedPoints")
    fcWindowLines = gis_tools.newGISDataset(tempWorkspace, "GNAT_MWA_WindowLines")
    arcpy.CreateFeatureclass_management(tempWorkspace, "GNAT_MWA_SeedPoints", "POINT", spatial_reference=fcLineNetwork)
    arcpy.CreateFeatureclass_management(tempWorkspace, "GNAT_MWA_WindowLines", "POLYLINE",
                                        spatial_reference=fcLineNetwork)

    gis_tools.resetField(fcSeedPoints, "RouteID", "TEXT")
    gis_tools.resetField(fcSeedPoints, "SeedID", "LONG")
    gis_tools.resetField(fcSeedPoints, "SeedDist", "DOUBLE")

    gis_tools.resetField(fcWindowLines, "RouteID", "TEXT")
    gis_tools.resetField(fcWindowLines, "SeedID", "LONG")
    gis_tools.resetField(fcWindowLines, "Seg", "DOUBLE")

    with arcpy.da.InsertCursor(fcSeedPoints, ["RouteID", "SeedID", "SHAPE@XY", "SeedDist"]) as icSeedPoints:
        for row in listSeeds:
            icSeedPoints.insertRow(row)

    with arcpy.da.InsertCursor(fcWindowLines, ["RouteID", "SeedID", "Seg", "SHAPE@"]) as icWindowLines:
        for row in listgWindows:
            icWindowLines.insertRow(row)

    # Intersecting Network Attributes with Moving Windows
    arcpy.AddMessage("Intersecting Network Attributes with Moving Windows")
    fcIntersected = gis_tools.newGISDataset(tempWorkspace, "GNAT_MWA_IntersectWindowAttributes")
    arcpy.Intersect_analysis([fcWindowLines, fcLineNetwork], fcIntersected, "ALL", output_type="LINE")

    # Use Python Dictionaries for Summary Stats
    # Reference: https://community.esri.com/blogs/richard_fairhurst/2014/11/08/turbo-charging-data-manipulation-with-python-cursors-and-dictionaries
    arcpy.AddMessage("Loading Moving Window Attributes")
    valueDict = {}
    with arcpy.da.SearchCursor(fcIntersected, ["SeedID", "Seg", "SHAPE@LENGTH"] + stat_fields) as searchRows:
        for searchRow in searchRows:
            keyValue = str(searchRow[0])
            segValue = str(searchRow[1])
            if not keyValue in valueDict:
                valueDict[keyValue] = {segValue: [(searchRow[2:])]}
            else:
                if segValue not in valueDict[keyValue]:
                    valueDict[keyValue][segValue] = [(searchRow[2:])]
                else:
                    valueDict[keyValue][segValue].append((searchRow[2:]))

    addfields = ["w{}{}{}".format(str(ws)[:4].rstrip("."), stat, field)[:10] for ws in window_sizes for field in stat_fields for stat in ["N", "Av", "Sm", "Rn", "Mn", "Mx", "Sd", "WA"]]
    for field in addfields:
        gis_tools.resetField(fcSeedPoints, field, "DOUBLE")

    arcpy.AddMessage("Calculating Attribute Statistics")
    with arcpy.da.UpdateCursor(fcSeedPoints, ["SeedID"] + addfields) as ucSeedPoints:
        for row in ucSeedPoints:
            new_row = [row[0]]
            for ws in window_sizes:
                seglen = [segment[0] for segment in valueDict[str(row[0])][str(ws)]]
                for i in range(1, len(stat_fields) + 1):
                    vals = [float(segment[i]) for segment in valueDict[str(row[0])][str(ws)]]
                    count_vals = float(len(vals))
                    sum_vals = sum(vals)
                    ave_vals = sum_vals / float(count_vals)
                    max_vals = max(vals)
                    min_vals = min(vals)
                    range_vals = max_vals - min_vals
                    sd_vals = sqrt(sum([abs(float(x) - float(ave_vals))**2 for x in vals]) / float(count_vals))
                    wave_vals = sum([val / slen for val, slen in zip(vals, seglen)])/ float(count_vals)
                    new_row.extend([count_vals, ave_vals, sum_vals, range_vals, min_vals, max_vals, sd_vals, wave_vals])
            ucSeedPoints.updateRow(new_row)

    # Manage Outputs
    arcpy.AddMessage("Saving Outputs")
    gis_tools.resetData(fcOutputSeedPoints)
    arcpy.CopyFeatures_management(fcSeedPoints, fcOutputSeedPoints)
    gis_tools.resetData(fcOutputWindows)
    arcpy.CopyFeatures_management(fcWindowLines, fcOutputWindows)

    return 0
import numpy
from pandas import *

sr = arcpy.SpatialReference(2881)

workdir = r"//ad.sfwmd.gov/dfsroot/data/wsd/GIS/GISP_2012/DistrictAreaProj/CFWI/Data/Soils/"
Myworkspace = workdir + "ProcessDir.gdb"

ModelMesh = r"\\ad.sfwmd.gov\dfsroot\data\wsd\GIS\GISP_2012\DistrictAreaProj\CFWI\Data\Soils\ECFTX_GRID_V3.shp"
SoilGroups = workdir + "ECFTXsoilMu.shp"
SoilModelMesh = Myworkspace + "/SoilModelMesh"
maxAreaSoilMesh = Myworkspace + "/maxAreaSoilMesh"
InterceptFeatures = ModelMesh + " #;" + SoilGroups + " #"
arcpy.Intersect_analysis(in_features=InterceptFeatures,
                         out_feature_class=SoilModelMesh,
                         join_attributes="ALL",
                         cluster_tolerance="#",
                         output_type="INPUT")

arcpy.AddGeometryAttributes_management(Input_Features=SoilModelMesh,
                                       Geometry_Properties="AREA",
                                       Length_Unit="FEET_US",
                                       Area_Unit="SQUARE_FEET_US",
                                       Coordinate_System=sr)

arcpy.Dissolve_management(in_features=SoilModelMesh,
                          out_feature_class=maxAreaSoilMesh,
                          dissolve_field="SEQNUM;MUKEY",
                          statistics_fields="POLY_AREA SUM",
                          multi_part="MULTI_PART",
                          unsplit_lines="DISSOLVE_LINES")
예제 #20
0
def generate_dispersiveness(polygon, levels, workspace="in_memory", move_dir=0):
    arcpy.env.workspace = "in_memory"
    dispersiveness = ""
    closure_str = ""
    fragmentation = ""
    roundness = ""
    adv_elongation = ""
    displacements_e = ""
    displacements_n = ""
    extent_str = ""
    extent_mv_str = ""
    large_str = ""
    large_attr_list = []
    dispersive_search_radius = (100e3, 150e3, 200e3, 250e3, 500e3)

    for l in levels:

        # list for dispersiveness
        l_dispersive = []
        # list for fragmentation
        l_frag = []
        # list for roundness
        l_round = []
        # list to collect areas
        _areas_list = []
        # dict for closure
        # closure = dict(zip(range(360), [False]*360))
        # list for displacement
        l_displace = []

        eye_x = 0
        eye_y = 0

        # We need multiple level radar
        for lr in dispersive_search_radius:
            with arcpy.da.SearchCursor(polygon,
                                       ["AREA", "TO_EYE", "SHAPE@", "EYE_X", "EYE_Y", "AREA_CVX", "PERIM", "SUM_AREA", "ANGLE"],
                                       where_clause="dBZ=%d and TO_EYE<=%f" % (l, lr)) as cur:
                l_dispersive = []
                cur.reset()
                for row in cur:
                    # Dispersiveness
                    l_dispersive.append([row[0], row[1]])
                # Calculate dispersiveness
                areas = numpy.array(l_dispersive)
                if areas.shape != (0,):
                    total_areas = numpy.sum(areas[:, 0])
                    area_frac = old_div(areas[:, 0], total_areas)
                    dist_weight = old_div(areas[:, 1], lr)
                    dispersiveness += "%f|" % numpy.sum(area_frac * dist_weight)
                else:
                    dispersiveness += "|"
        if dispersiveness.endswith("|"):
            dispersiveness = dispersiveness[:-1]
        dispersiveness += ","

        with arcpy.da.SearchCursor(polygon,
                                   ["AREA", "TO_EYE", "SHAPE@", "EYE_X", "EYE_Y", "AREA_CVX", "PERIM", "SUM_AREA", "ANGLE"],
                                   where_clause="dBZ=%d" % (l,)) as cur:
            cur.reset()
            for row in cur:
                # # For closure, we need exclude polygon in 50km buffer closed to the eye
                # if row[1] >= 50000:
                #     geom, x0, y0 = row[2:5]
                #     cl = calc_closure(geom, x0, y0)
                #     closure.update(cl)
                # Fragment
                l_frag.append((row[0], row[5]))
                # Roundness
                l_round.append((row[0], row[6], row[7]))
                # Area list
                # _areas_list.append(row[0])
                # displacement
                l_displace.append((row[0], row[1], row[8]))
                # we need eye_x, eye_y for closure center
                eye_x = row[3]
                eye_y = row[4]

        # Calculate fragmentation.
        fareas = numpy.array(l_frag)
        if fareas.shape != (0,):
            total_areas = numpy.sum(fareas[:, 0])
            total_cvx_areas = numpy.sum(fareas[:, 1])
            solidity = old_div(total_areas, total_cvx_areas)
            # Connectivity
            sareas = fareas.shape[0]
            conn = 1 - (old_div((sareas - 1), ((old_div(total_areas, 9)) ** 0.5 + sareas)))
            fragmentation += "%f," % (1 - solidity * conn)
        else:
            fragmentation += ","

        # Asymmetry/Roundness
        # I think, it should be OK for each polygon, but I think it hurt nothing to calculate it here.
        rareas = numpy.array(l_round)
        if fareas.shape != (0,):
            max_rareas = rareas[numpy.argmax(rareas, 0)]
            # R = base_roundness * size_factor
            R = numpy.mean(old_div(4 * max_rareas[:, 0] * math.pi, numpy.square(max_rareas[:, 1]) * (
                    old_div(numpy.log(max_rareas[:, 0]), numpy.log(max_rareas[:, 2])))))
            roundness += "%f," % (1 - R)
        else:
            roundness += ","

        # Calculate displacement
        areas = numpy.array(l_displace)
        if areas.shape != (0,):
            total_areas = numpy.sum(areas[:, 0])
            area_frac = old_div(areas[:, 0], total_areas)
            dist_weight_e = areas[:, 1] * numpy.sin(numpy.radians(areas[:, 2])) / 1000.0  # Let's scale it to km, otherwise it will be too large
            dist_weight_n = areas[:, 1] * numpy.cos(numpy.radians(areas[:, 2])) / 1000.0
            displacements_e += "%f," % numpy.sum(area_frac * dist_weight_e)
            displacements_n += "%f," % numpy.sum(area_frac * dist_weight_n)
        else:
            displacements_e += ","
            displacements_n += ","

        pid = os.getpid()

        # Now we we can do closure in old way
        if "closure" not in utils.skip_list:
            closure_ring_km = [(25, 100), (100, 200), (200, 300), (300, 400), (400, 500), (25, 500)]
            select3 = arcpy.Select_analysis(polygon, "in_memory/select_temp_3_%d" % pid, where_clause="dBZ=%d" % l)
            eye_lon, eye_lat = utils.projFunc(eye_x, eye_y, inverse=True)
            for s, e in closure_ring_km:
                with RadiantLine(lon=eye_lon, lat=eye_lat, r_start=s, r_end=e) as radiant:
                    arcpy.Intersect_analysis(in_features=["in_memory/select_temp_3_%d" % pid, radiant.temp_name],
                                             out_feature_class="in_memory/closure_temp_%d" % pid, join_attributes="ALL", output_type="INPUT")
                    with arcpy.da.SearchCursor("in_memory/closure_temp_%d" % pid, ["SHAPE@", "DEG"]) as q:
                        count = set()
                        for k in q:
                            count.add(k[1])
                        closure_str += "%.2f|" % (len(count) / 360.0)
                # arcpy.Delete_management("in_memory/select_temp.shp")
                # arcpy.Delete_management("in_memory/closure_temp.shp")
            # Remove last "|"
            if closure_str.endswith("|"):
                closure_str = closure_str[:-1]
            closure_str += ","

        if "extent" not in utils.skip_list:
            select4 = arcpy.Select_analysis(polygon, "in_memory/select_temp_4_%d" % pid, where_clause="dBZ=%d" % l)
            eye_lon, eye_lat = utils.projFunc(eye_x, eye_y, inverse=True)
            extent_mv = {"1": [0] * 90, "2": [0] * 90, "3": [0] * 90, "4": [0] * 90}
            extent_nat = {"1": [0] * 90, "2": [0] * 90, "3": [0] * 90, "4": [0] * 90}
            with RadiantLine(lon=eye_lon, lat=eye_lat, r_start=0, r_end=600, direction=int(move_dir)) as radiant:
                arcpy.Intersect_analysis(in_features=["in_memory/select_temp_4_%d" % pid, radiant.temp_name],
                                         out_feature_class="in_memory/extent_temp_%d" % pid, join_attributes="ALL", output_type="INPUT")
                # sr = arcpy.Describe(polygon).spatialReference
                with arcpy.da.SearchCursor("in_memory/extent_temp_%d" % pid, ["SHAPE@", "QUAD", "MOVE", "DEG", "MV_DEG"]) as q:
                    for k in q:
                        quad = k[1]
                        move = k[2]
                        geom = k[0]
                        deg = int(k[3]) % 90
                        mv_deg = int(k[4]) % 90
                        for part in geom.getPart():
                            for pt in part:
                                dist = abs((pt.X + pt.Y * 1j) - (eye_x + eye_y * 1j)) / 1000.0
                                extent_mv[move][mv_deg] = max(extent_mv[move][mv_deg], dist)
                                extent_nat[quad][deg] = max(extent_nat[quad][deg], dist)
                extent_mv_str += "%.2f|%.2f|%.2f|%.2f" % (sum(extent_mv["1"]) / 90.0, sum(extent_mv["2"]) / 90.0, sum(extent_mv["3"]) / 90.0, sum(extent_mv["4"]) / 90.0)
                extent_str += "%.2f|%.2f|%.2f|%.2f" % (sum(extent_nat["1"]) / 90.0, sum(extent_nat["2"]) / 90.0, sum(extent_nat["3"]) / 90.0, sum(extent_nat["4"]) / 90.0)
            extent_mv_str += ","
            extent_str += ","

        # Get largest polygons and copy their attributes
        all_fields = [p.name for p in arcpy.ListFields(polygon)][2:]  # The first is always id, second is always shape
        area_index = all_fields.index("AREA")
        max_area = 0
        with arcpy.da.SearchCursor(polygon, all_fields, where_clause="dBZ=%d" % l) as cur:
            for row in cur:
                if row[area_index] > max_area:
                    max_area = area_index
                    attr = row   # row is a tuple
        large_attr_list.append(row)
        
    
    # We need again process large_attr_list to conver to csv strings
    large_str = [",".join(map(str, t)) + "," for t in zip(*large_attr_list)]

    arcpy.Delete_management("in_memory")

    # let us return field name first, then field values
    return (["dispersiveness", "closure", "frag", "asymmetry", "dis_e", "dis_n", "extent_move", "extent_geom"] + all_fields, 
            [dispersiveness, closure_str, fragmentation, roundness, displacements_n, displacements_e, extent_mv_str, extent_str] + large_str)
예제 #21
0
    Predicted_Impact_Point_Lat_Lon, "lon", "lat", predicted_impact_xy_Layer,
    "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision",
    "")

# Process: Copy Features
arcpy.CopyFeatures_management(predicted_impact_xy_Layer, pip_point_shp, "",
                              "0", "0", "0")

# Process: Buffer
arcpy.Buffer_analysis(pip_point_shp, v3sigma_Circle_Output,
                      Enter_the_3sigma_circle_Radius_and_Distance_Unit, "FULL",
                      "ROUND", "NONE", "", "PLANAR")

# Process: Intersect
arcpy.Intersect_analysis(
    "E:\\gina\\poker\\pip\\pip_buffer.shp #;E:\\gina\\poker\\shp\\wip\\adnr_gls_dls_merge_20170823_v1.shp #",
    Land_Ownership_within_3sigma_Circle, "ALL", "", "INPUT")

# Process: slurp_pip_dir
# arcpy.gp.toolbox = "E:/gina/poker/tbx/pfrr_ranger.tbx";
# Warning: the toolbox E:/gina/poker/tbx/pfrr_ranger.tbx DOES NOT have an alias.
# Please assign this toolbox an alias to avoid tool name collisions
# And replace arcpy.gp.slurp(...) with arcpy.slurp_ALIAS(...)
# arcpy.gp.slurp()

pip_dir = "E:/gina/poker/pip"
arcpy.env.workspace = pip_dir
shp_list = arcpy.ListFeatureClasses()
mxd = arcpy.mapping.MapDocument("CURRENT")
dataframe = arcpy.mapping.ListDataFrames(mxd)[0]
for shp in shp_list:
## allow outputs to be overwritten
arcpy.env.overwriteOutput = 1

##toolbox inputs
patches = sys.argv[1]
streams = sys.argv[2]
streamNetwork = sys.argv[3]
distThresh = sys.argv[4]  # in KM

#get directory from input file
outDir = os.path.splitext(os.path.dirname(patches))[0]

##create points at the intersections of streams and patch edges for use in distance calculation
patchPointTemp = os.path.join(outDir, "patchPointTemp.shp")
arcpy.Intersect_analysis([streams, patches],
                         patchPointTemp,
                         output_type="POINT")

##explode multipoint objects
patchPoints = os.path.join(outDir, "patchPoints.shp")
arcpy.MultipartToSinglepart_management(patchPointTemp, patchPoints)

arcpy.AddMessage("Starting Distance Measurements Between Patches...")

##make new service area analysis
distThreshMeters = float(distThresh) * 1000
arcpy.MakeServiceAreaLayer_na(streamNetwork,
                              "ServiceArea",
                              "Length",
                              default_break_values=distThreshMeters)
                    arcpy.RepairGeometry_management('c_states_t_d')

                    # add detailed interior back
                    arcpy.Erase_analysis('c_states_t_d', 'c_states',
                                         'c_states_t_d_e')
                    arcpy.Merge_management(['c_states', 'c_states_t_d_e'],
                                           'c_states_t_d_e_m')
                    arcpy.Dissolve_management('c_states_t_d_e_m', 'c_thiessen',
                                              'NAME_1')
                    arcpy.RepairGeometry_management('c_thiessen')

                if not arcpy.Exists(c_offshore_dest):
                    # rgn_offshore: rename NAME_1 to rgn_name
                    print '  rgn_offshore, rgn_inland (%s)' % time.strftime(
                        '%H:%M:%S')
                    arcpy.Intersect_analysis(['c_eez', 'c_thiessen'],
                                             'c_eez_t', 'NO_FID')
                    arcpy.RepairGeometry_management('c_eez_t')
                    arcpy.Dissolve_management('c_eez_t', 'c_rgn_offshore_mol',
                                              'NAME_1')
                    arcpy.RepairGeometry_management('c_rgn_offshore_mol')
                    arcpy.AddField_management('c_rgn_offshore_mol', 'rgn_name',
                                              'TEXT')
                    arcpy.CalculateField_management('c_rgn_offshore_mol',
                                                    'rgn_name', '!NAME_1!',
                                                    'PYTHON_9.3')
                    arcpy.DeleteField_management('c_rgn_offshore_mol',
                                                 'NAME_1')

                    # rgn_offshore: assign rgn_id by ascending y coordinate
                    arcpy.AddField_management('c_rgn_offshore_mol',
                                              'centroid_y', 'FLOAT')
예제 #24
0
arcpy.MakeFeatureLayer_management(
    select_only_multi_iso3, wdpa_only_multi_iso3, "", "",
    "iso3 iso3 VISIBLE NONE;AREA_GEO AREA_GEO VISIBLE NONE;Shape_length Shape_length VISIBLE NONE;Shape_area Shape_area VISIBLE NONE"
)

# Process: Copy Features
arcpy.CopyFeatures_management(wdpa_only_multi_iso3, iso3_multi, "", "0", "0",
                              "0")
print("Feature class with only multi iso3 created")

# Process: Erase
arcpy.Erase_analysis(iso3_multi, iso3_no_multi, iso3_multi_erased, "")

# Process: Intersect
# arcpy.Intersect_analysis("Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_jun2020.gdb/iso3_multi_erased #;Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_jun2020.gdb/gaul #", intersected_gaul_erased, "ALL", "", "INPUT")
arcpy.Intersect_analysis([iso3_multi_erased, gaul], intersected_gaul_erased,
                         "ALL", "", "INPUT")
print("Multi iso3 erased and intersected with gaul")

# Process: Add Field myISO3 (2)
arcpy.AddField_management(intersected_gaul_erased, "myISO3", "TEXT", "", "",
                          "", "", "NULLABLE", "NON_REQUIRED", "")

# Process: Calculate Field myISO3 (2)
arcpy.CalculateField_management(intersected_gaul_erased, "myISO3", "!iso3_1!",
                                "PYTHON", "")

# Process: Merge
# arcpy.Merge_management("Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_jun2020.gdb/iso3_no_multi;Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_jun2020.gdb/intersected_gaul_erased", wdpa_multi_iso3_together, "iso3 \"iso3\" true true false 50 Text 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_jun2020.gdb/iso3_no_multi,iso3,-1,-1,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_jun2020.gdb/intersected_gaul_erased,iso3,-1,-1;AREA_GEO \"AREA_GEO\" true true false 3014713 Double 98 6553703 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_jun2020.gdb/iso3_no_multi,AREA_GEO,-1,-1,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_jun2020.gdb/intersected_gaul_erased,AREA_GEO,-1,-1;myISO3 \"myISO3\" true true false 50 Text 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_jun2020.gdb/iso3_no_multi,myISO3,-1,-1,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_jun2020.gdb/intersected_gaul_erased,myISO3,-1,-1;FID_gaul \"FID_gaul\" true true false 0 Long 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_jun2020.gdb/intersected_gaul_erased,FID_gaul,-1,-1;iso3_1 \"iso3_1\" true true false 254 Text 0 0 ,First,#,Z:/globes/USERS/GIACOMO/protconn/data/ProtConn_jun2020.gdb/intersected_gaul_erased,iso3_1,-1,-1")
arcpy.Merge_management([iso3_no_multi, intersected_gaul_erased],
                       wdpa_multi_iso3_together)
print("Features multi iso3 processed")
예제 #25
0
arcpy.Dissolve_management('thie', 'thie_d')
arcpy.MakeFeatureLayer_management('thie_pts', 'lyr_pts')
arcpy.SelectLayerByLocation_management('lyr_pts', 'WITHIN_CLEMENTINI', 'thie_d')
arcpy.DeleteFeatures_management('lyr_pts')
 
# generate thiessen polygons
arcpy.CreateThiessenPolygons_analysis('thie_pts', 'thie_polys', 'ALL')
arcpy.env.outputCoordinateSystem = sr_gcs
arcpy.Dissolve_management('thie_polys', 'thie_polys_d', ['basin_name'])

arcpy.Erase_analysis('thie_polys_d', 'basins_m', 'thie_polys_d_e')
arcpy.Merge_management(['thie_polys_d_e','basins_m'], 'thie_polys_d_e_m')
arcpy.Dissolve_management('thie_polys_d_e_m', 'thie_polys_d_e_m_d', ['rgn_id','rgn_name'])

# intersect expanded basins with eez's
arcpy.Intersect_analysis(['sp_gcs','thie_polys_d_e_m_d'], 'sp_thie_m')
arcpy.AddField_management('eez_basins_m', 'rgn_name', 'TEXT')
arcpy.CalculateField_management('eez_basins_m', 'rgn_name', "'%s_%s' % (!eez_name!, !basin_name!)", 'PYTHON_9.3')
arcpy.Dissolve_management('eez_basins_m', 'eez_basins', ['eez_name','basin_name','rgn_name'])
arcpy.AddField_management('eez_basins', 'rgn_id', 'SHORT')
arcpy.CalculateField_management('eez_basins', 'rgn_id', "!OBJECTID!", 'PYTHON_9.3')

# add area
arcpy.AddField_management('eez_basins', 'area_km2', 'DOUBLE')
arcpy.CalculateField_management('eez_basins', 'area_km2', '!shape.area@SQUAREKILOMETERS!', 'PYTHON_9.3')



# ? rgn_type of land or ocean?

예제 #26
0
    in_las_dataset=groundLyr,
    out_raster=ground_files['rawraster'],
    value_field='ELEVATION',
    interpolation_type=
    "TRIANGULATION NATURAL_NEIGHBOR NO_THINNING CLOSEST_TO_MEAN 0",
    # 'TRIANGULATION Linear {point_thinning_type} {point_selection_method} {resolution}',
    # previous selection method was MAXIMUM
    data_type='FLOAT',
    sampling_type='CELLSIZE',
    sampling_value=cell_edge_length,
    z_factor=z_factor)

if has_clipping:
    merged_intersect = os.path.join(extraction_folder,
                                    'footprint_intersection.shp')
    arcpy.Intersect_analysis(in_features=[footprint_name, clipping_file],
                             out_feature_class=merged_intersect)
    cutter = merged_intersect
else:
    cutter = footprint_name

arcpy.Clip_management(surface_files['rawraster'], "#", surface_files['raster'],
                      cutter, "0", "ClippingGeometry")
arcpy.Clip_management(ground_files['rawraster'], "#", ground_files['raster'],
                      cutter, "0", "ClippingGeometry")
arcpy.Delete_management(surface_files['rawraster'])
arcpy.Delete_management(ground_files['rawraster'])

# arcpy.management.Delete(surfaceLyr)
# arcpy.management.Delete(groundLyr)

arcpy.AddMessage("Generating DHM")
예제 #27
0
    arcpy.CalculateField_management(product_db + "/smoothedFlowlines" + region,
                                    "LengthKM", "!SHAPE.LENGTH@KILOMETERS!",
                                    "PYTHON")

    # The next section is computationally intensive and may throw an error if the system requirements are not sufficient.

    # Detailed Lines
    # ==============
    # Creates one feature per reach
    dissolveLines = arcpy.Dissolve_management(
        highResLines, workspace_db + "/dissolve" + region, "#", "#",
        "MULTI_PART", "UNSPLIT_LINES")

    # Establish link between flowlines and catchments
    intersect = arcpy.Intersect_analysis([dissolveLines, catchments],
                                         workspace_db + "/intersect" + region,
                                         "ALL", "#", "INPUT")

    # Length of each flowline per catchment
    sumTable = arcpy.Statistics_analysis(
        intersect, workspace_db + "/sumLengths" + region,
        [["Shape_Length", "SUM"]], "FID_dissolve" + region + ";FEATUREID")

    # The maximum flowline piece
    maxTable = arcpy.Statistics_analysis(sumTable,
                                         workspace_db + "/maxLengths" + region,
                                         [["SUM_Shape_Length", "MAX"]],
                                         "FID_dissolve" + region)

    # Pair the FEATUREID with the Reach ID
    joinStats = arcpy.JoinField_management(maxTable, "MAX_SUM_Shape_Length",
예제 #28
0
# - Export results to a feature class called link_components

point_out_new = r'\link_components'
if arcpy.Exists(point_out_new):
    arcpy.Delete_management(point_out_new)

arcpy.CopyFeatures_management(points, geodb + point_out_new)

# - Intersect the points with the links to get the edge IDs
inFeatures = ["link_components", "links"]
intersectOutput = "link_components_full"
if arcpy.Exists(intersectOutput):
    arcpy.Delete_management(intersectOutput)
clusterTolerance = 1.5
arcpy.Intersect_analysis(inFeatures, intersectOutput, "", clusterTolerance,
                         "point")

# - Intersect with a raster to get elevation
# - import elevation raster from W:/geodata/raster/dem30m
#     - this is the raster of elevations at 30 m fidelity
#     - note that elevation values are in METERS
# Note that spatial analyst must be active for this portion

# for some unknown reason, the program always get crashed when executing ExtractValuesToPoints.
# license is checked out successfully but it can not be executed. I have to split the script from this point
# into two smaller scripts and they work!

print 'start to pull elevation'
in_point_features = geodb + r'\link_components_full'
out_point_features = geodb + r'\link_components_elevation'
if arcpy.Exists(out_point_features):
#split soils by MLRA with Acres
#A. Stephens
#09/18/2014
#This tool splits soils by MLRA, calculates acres, and exports to excel spreadsheet.

import arcpy

arcpy.env.overwriteOutput = True

inFC = arcpy.GetParameterAsText(0)
output = arcpy.GetParameterAsText(1)
out_xls = arcpy.GetParameterAsText(2)

#INTERSECT

arcpy.Intersect_analysis(inFC, "outFC", "ALL", "", "")

dissolveFields = ["AREASYMBOL", "MUSYM", "MLRARSYM"]
#Dissolve Features
arcpy.Dissolve_management("outFC", "outFCDISSOLVE", dissolveFields)

#Add Field
#arcpy.AddField_management("outFCDISSOLVE", "ACRES", "SHORT", )

#Calculate Field
arcpy.CalculateField_management(
    "outFCDISSOLVE",
    "ACRES",
    '!Shape.area@ACRES!',
    "PYTHON_9.3",
)
예제 #30
0
g = Graph(roads, id, avg_Speed, direction)
#Wyciagniecie punktow z klasy targets
points = []
with arcpy.da.SearchCursor(targets, ["SHAPE@X", "SHAPE@Y"]) as sc:
    for row in sc:
        points.append([row[0], row[1]])
#Znalezienie punktow
begin = g.search(points[0])
end = g.search(points[1])
arcpy.AddMessage(str(begin) + " " + str(end))
#Wyznaczenie trasy
path = g.make_path(begin, end, [algorithm, ignore_direct, time_or_dist])
#Przypisanie workspace do celowego datasetu
prev_work = arcpy.env.workspace
arcpy.env.workspace = dat
#Zamiana w Shapefile
wizualizacja(roads, path, file_path, id)
#Jezeli inna niz punkt koncowy, to zamien na linie
if target != targets:
    temp = "toLine"
    arcpy.FeatureToLine_management([target], temp)
    target = temp
#Ustalenie celu na granicy poligonu
arcpy.Intersect_analysis([file_path, target], file_target, "ONLY_FID", None,
                         "POINT")

if target == "toLine":
    arcpy.Delete_management(target)
#Zamiana z powrotem
arcpy.env.workspace = prev_work