Exemplo n.º 1
0
 def onSelChange(self, selection):
     self.value = selection
     desc = arcpy.Describe(selection)
     path = desc.path
     extension = desc.extension
     self.layerpath = str(path) + "/" + selection + '.' + str(extension)
     self.refresh()
     point_tool.enabled = True
     if len(arcpy.ListFields(self.layerpath, 'START_X')) > 0:
         print('Data already prepared')
     else:
         print('Preparing data...')
         arcpy.AddGeometryAttributes_management(
             self.layerpath, ["LENGTH", "LINE_START_MID_END"])
         print('Data prepared')
def add_basic_geometry_attr(polygon, suffix=""):
    arcpy.AddGeometryAttributes_management(polygon,
                                           "AREA;PERIMETER_LENGTH;CENTROID",
                                           Length_Unit="KILOMETERS",
                                           Area_Unit="SQUARE_KILOMETERS")
    rename_field(polygon, "POLY_AREA", "AREA%s" % suffix)
    rename_field(polygon, "PERIMETER", "PERIM%s" % suffix)
    rename_field(polygon, "CENTROID_X", "CNTRX%s" % suffix)
    rename_field(polygon, "CENTROID_Y", "CNTRY%s" % suffix)
    return [
        "AREA%s" % suffix,
        "PERIM%s" % suffix,
        "CNTRX%s" % suffix,
        "CNTRY%s" % suffix
    ]
def strand_conduit():
    '''
    Calculate footage for strand and conduit in requirement #2. Calculations are identical.
    :return:
    Write footage to template
    '''

    # Strand, calculated from fiber field where strand_built = 'N'
    exp = "subtypecode = 2 AND strand_built = 'N'"
    strandFiber = scratch + '\\' + 'strandFiber'
    arcpy.Select_analysis(scratch + '\\' + 'olt_fiber', strandFiber, exp)
    values = [row[0] for row in arcpy.da.SearchCursor(strandFiber, ['subtypecode'])]
    if not values:
        lenStrand = 0
    else:
        length = 0
        arcpy.Dissolve_management(strandFiber, scratch + '\\' + 'strandFiber_Dissolve', 'subtypecode', )
        arcpy.AddGeometryAttributes_management(scratch + '\\' + 'strandFiber_Dissolve', 'LENGTH_GEODESIC', 'FEET_US')
        sum_length = [length + row[0] for row in arcpy.da.SearchCursor(scratch + '\\' + 'strandFiber_Dissolve', ['LENGTH_GEO'])]
        lenStrand = sum_length[0]

    # Conduit, calculate from ugFiber where strand_built = 'N'
    exp = "subtypecode = 1 AND strand_built = 'N'"
    strandConduit = scratch + '\\' + 'strandConduit'
    arcpy.Select_analysis(scratch + '\\' + 'olt_fiber', strandConduit, exp)
    values = [row[0] for row in arcpy.da.SearchCursor(strandConduit, ['subtypecode'])]
    if not values:
        lenConduit = 0
    else:
        length = 0
        arcpy.Dissolve_management(strandConduit, scratch + '\\' + 'strandConduit_Dissolve', 'subtypecode', )
        arcpy.AddGeometryAttributes_management(scratch + '\\' + 'strandConduit_Dissolve', 'LENGTH_GEODESIC', 'FEET_US')
        sum_length = [length + row[0] for row in arcpy.da.SearchCursor(scratch + '\\' + 'strandConduit_Dissolve', ['LENGTH_GEO'])]
        lenConduit = sum_length[0]

    ws['B4'] = lenStrand + lenConduit
Exemplo n.º 4
0
    def points_from_line_fc(self, line_fc, interval_ft):
        """Make a list of X/Y coordinates that the ORS API can take in and build isochrones around.
            Example: If you want to build isochrones every 1,000 feet along a 1-mile project line,
            you would specify interval_ft = 1000

        Returns: lists of X/Y coordinate batches that can be fed into ORS API to generate isochrones around each X/Y point
        """

        # ORS needs WGS84 (WKID 4326) coordinate system to generate isochrones
        sref_wgs84 = arcpy.SpatialReference(4326)

        batch_size = 5  # As of 12/12/2021, each ORS call can generate isochrones for up to 5 points.

        # make temporary feature class of points at regular intervales along lines
        # FYI, time permitting, the shapely library has some options for doing this that *might* be faster than ESRI tool
        temp_pt_fc = os.path.join("memory", "TEMP_pts")  # arcpy.env.scratchGDB
        arcpy.management.GeneratePointsAlongLines(
            line_fc,
            temp_pt_fc,
            "DISTANCE",
            Distance=f"{interval_ft} feet",
            Include_End_Points="END_POINTS")

        # calc x/y coords in WGS84 (WKID 4326) for compatibility with ORS API
        pt_fl = "pt_fl"
        arcpy.MakeFeatureLayer_management(temp_pt_fc, pt_fl)
        arcpy.AddGeometryAttributes_management(
            Input_Features=pt_fl,
            Geometry_Properties=['POINT_X_Y_Z_M'],
            Coordinate_System=sref_wgs84)

        # make array of points at regular intervals along line to
        line_pts = []
        with arcpy.da.SearchCursor(pt_fl, ["POINT_X", "POINT_Y"]) as cur:
            for row in cur:
                lon = row[0]
                lat = row[1]
                pt_coords = [lon, lat]
                line_pts.append(pt_coords)

        # batchify points into groups of 5, because ORS API cannot process more than 5 points in single call
        line_pts_batched = [
            line_pts[i:i + 5] for i, v in enumerate(line_pts)
            if i % batch_size == 0
        ]

        return line_pts_batched
def polygon_processing(mainDir):
    walk = arcpy.da.Walk(mainDir, topdown=True)

    for folderPath, folderNames, fileNames in walk:

        if 'CN' in folderPath:
            workspace = env.workspace = folderPath

            # The fileNames variable in fact is a list with the name of the rasters that exist inside each folder. Every time we create a new raster the list's length grows
            # per one. So, we create the index variable in order to be able to manipulate dynamically the newest raster.

            index = len(fileNames) - 1

            if fileNames[index].endswith('_stnull.tif'):

                stNull_Grid = arcpy.Raster(fileNames[index])

                # Define the output name and path
                polygon_Name = os.path.basename(workspace) + 'poly.shp'
                polygon_fullPath = os.path.join(workspace, polygon_Name)

                # Raster to polygon conversion
                polygon_Depth = arcpy.RasterToPolygon_conversion(
                    stNull_Grid, polygon_fullPath, "NO_SIMPLIFY",
                    "VALUE")  # convert raster to polygon

                # Denote the spatial reference and calculate polygon area in squared meters.
                sp_Ref = arcpy.env.outputCoordinateSystem = arcpy.Describe(
                    polygon_Depth).spatialReference
                polygon_geometry = arcpy.AddGeometryAttributes_management(
                    polygon_Depth, "AREA", "METERS", "SQUARE_METERS", sp_Ref)

                # Spatial Join the Water depth polygon with streams layer --> Intersect with 2m distance
                targetFeature = polygon_Depth
                joinFeature = streams
                # define the output spatial join name and path
                outputName, file_extension = os.path.splitext(
                    os.path.basename(polygon_fullPath)
                )  # isolate only the filename without any prefix of the shape file
                outputName = outputName + '_SpJoin.shp'
                SpJoin_output_path = os.path.join(workspace, outputName)
                SpatialJoin_with_Streams = arcpy.SpatialJoin_analysis(
                    targetFeature, joinFeature, SpJoin_output_path,
                    "JOIN_ONE_TO_ONE", "KEEP_ALL", "#", "INTERSECT",
                    "2 Meters", "")

                print SpatialJoin_with_Streams
Exemplo n.º 6
0
def criminalityIndex(table, neighborhoods):
    inputTable = arcpy.ExcelToTable_conversion(table, "criminalityTable", "")
    crimeLayer = arcpy.MakeFeatureLayer_management(neighborhoods,
                                                   "crime_layer")

    #Joining the crime table with the CBS neighborhood dataset table
    arcpy.JoinField_management(crimeLayer, "statcode", inputTable, "STATCODE",
                               ["inwoners", "crimeTotal", "relatCrime"])
    criminalityFC = arcpy.CopyFeatures_management(neighborhoods,
                                                  "criminality2015")
    arcpy.DeleteField_management(
        criminalityFC, ["AREA", "POLY_AREA", "standStep1", "standCrime"])
    arcpy.AddGeometryAttributes_management(criminalityFC, "AREA", "",
                                           "SQUARE_KILOMETERS")

    #Calculating the population density on a neighborhood level as equal input for the standardized crime rate
    arcpy.AddField_management(criminalityFC, "standStep1", "FLOAT", 10, 4, "",
                              "", "NULLABLE")
    arcpy.CalculateField_management(criminalityFC, "standStep1",
                                    "[INWONERS] / [POLY_AREA]")

    #Calculating a standardized crime rate by dividing the total amount of crimes by the population density
    arcpy.AddField_management(criminalityFC, "standCrime", "FLOAT", 10, 4, "",
                              "", "NULLABLE")
    #To prevent overflow errors, it is avoided that there is a division by 0
    expression1 = "Calculate(!crimeTotal!, !standStep1!, !standCrime!)"
    codeblock1 = """def Calculate(x, y, z):
        if (x != 0 and y != 0):
            z = x / y
        elif (x == 0 or y == 0):
            z = 0
        return z """
    arcpy.CalculateField_management(criminalityFC, "standCrime", expression1,
                                    "PYTHON_9.3", codeblock1)
    """
    arcpy.AddField_management(criminalityFC, "crime", "FLOAT", 10, 4, "", "", "NULLABLE")    
    expression2 = "Calculate(!relatCrime!, !crime!)"
    codeblock2 = "def Calculate(x, y):
        if (x == '.'):
            y = 0
        elif (x != '.'):
            y = x
        return y"
    arcpy.CalculateField_management(criminalityFC, "crime", expression2, "PYTHON_9.3", codeblock2)
    arcpy.DeleteField_management(criminalityFC, ["INWONERS_1", "STATCODE_1", "CRIMETOT_1"])
    """
    return criminalityFC
Exemplo n.º 7
0
def split_line(workspacePath, tempGDB, inputFlsplit, splitDistance):
    import arcpy
    import os

    arcpy.env.workspace = workspacePath
    includeEndPoints = 'NO_END_POINTS'

    arcpy.env.overwriteOutput = True

    mxd = arcpy.mapping.MapDocument('CURRENT')
    df = arcpy.mapping.ListDataFrames(mxd)[0]

    #Generate points along line for split
    arcpy.SetProgressorLabel("Generating points to split by...")
    outputPointFC = os.path.join(tempGDB, inputFlsplit + "Points")
    arcpy.GeneratePointsAlongLines_management(
        inputFlsplit,
        outputPointFC,
        'DISTANCE',
        splitDistance,
        Include_End_Points=includeEndPoints)

    #Split line into separate segments based on previous points with a 2 meter radius tolerance around the point
    arcpy.SetProgressorLabel("Splitting initial line into segments...")
    outputFCsplit = os.path.join(tempGDB, inputFlsplit + "Split")
    arcpy.SplitLineAtPoint_management(inputFlsplit,
                                      outputPointFC,
                                      outputFCsplit,
                                      search_radius='1 Meters')

    # Get filename part of outputFCsplit
    flName = os.path.basename(outputFCsplit)

    #Create a feature layer to be used for AddGeometryAttributes
    arcpy.MakeFeatureLayer_management(outputFCsplit, flName)

    #Create a layer object from the feature layer
    #tempLayer = arcpy.mapping.Layer(flName)

    #Add attributes for line start,mid, and end to determine order of segments for new feature classes; for some reason line segments are not in proper order
    arcpy.AddGeometryAttributes_management(flName, "LINE_START_MID_END")

    #Add layer to the map
    #arcpy.mapping.AddLayer(df,tempLayer)

    return outputFCsplit
 def polygonCalculation(self):
     #Creating new file with information about classification, delete field with class
     arcpy.PolygonNeighbors_analysis(
         self.dataSource, self.filename,
         [self.fieldName, "ClassNumber", "Shape_Area"], "NO_AREA_OVERLAP",
         "NO_BOTH_SIDES")
     Disolve = "ROBOCZY\\agregacja2.shp"
     arcpy.Dissolve_management(self.dataSource, Disolve, "ClassNumber",
                               "ClassNumber COUNT", "SINGLE_PART",
                               "UNSPLIT_LINES")
     arcpy.AddGeometryAttributes_management(Disolve, "AREA", "",
                                            "SQUARE_METERS", "")
     arcpy.DeleteField_management(self.dataSource, "ClassNumber")
     #Getting the value of the field in Disolve
     self.AreaHomogeneousClass = []
     with arcpy.da.SearchCursor(Disolve, ["POLY_AREA"]) as rows:
         for row in rows:
             self.AreaHomogeneousClass.append(row[0])
     return self.AreaHomogeneousClass
     del row
    def add_itm_cor_to_csv_file(self):
        fc_file = self.name_gis
        arcpy.management.XYTableToPoint(self.gps_file, fc_file,
                                        "lon", "lat", coordinate_system=arcpy.SpatialReference(4326))

        obj_name_pro = fc_file + '_pro'

        # Process: Project
        arcpy.Project_management(in_dataset=fc_file, out_dataset=obj_name_pro,
                                 out_coor_system=arcpy.SpatialReference(2039),
                                 transform_method="WGS_1984_To_Israel_CoordFrame",
                                 in_coor_system=arcpy.SpatialReference(4326),
                                 preserve_shape="NO_PRESERVE_SHAPE", max_deviation="", vertical="NO_VERTICAL")

        # Process: Add Geometry Attributes
        arcpy.AddGeometryAttributes_management(Input_Features=obj_name_pro,
                                               Geometry_Properties="POINT_X_Y_Z_M", Length_Unit="METERS", Area_Unit="",
                                               Coordinate_System="")
        # Process: Table To Table
        arcpy.TableToTable_conversion(obj_name_pro, self.workspace_csv_progress, fc_file + 'itm.csv')
Exemplo n.º 10
0
    def createPolygon(self, lat, lon, extent, out_polygons, scratchWorkspace):
        """Create a Thiessen polygon feature class from numpy.ndarray lat and lon
           Each polygon represents the area described by the center point
        """
        buffer = 2 * max(abs(lat[0] - lat[1]), abs(lon[0] - lon[1]))
        # Extract the lat and lon within buffered extent (buffer with 2* interval degree)
        lat0 = lat[(lat >= (extent.YMin - buffer))
                   & (lat <= (extent.YMax + buffer))]
        lon0 = lon[(lon >= (extent.XMin - buffer))
                   & (lon <= (extent.XMax + buffer))]
        # Spatial reference: GCS_WGS_1984
        sr = arcpy.SpatialReference(4326)

        # Create a list of geographic coordinate pairs
        count_lon = len(lon0)
        count_lat = len(lat0)
        pointList = []
        for i in range(0, count_lon):
            for j in range(0, count_lat):
                pointList.append([float(lon0[i]), float(lat0[j])])

        pointGeometryList = []
        for pt in pointList:
            point = arcpy.Point()
            point.X = pt[0]
            point.Y = pt[1]
            pointGeometry = arcpy.PointGeometry(point, sr)
            pointGeometryList.append(pointGeometry)

        # Create a point feature class with longitude in Point_X, latitude in Point_Y
        out_points = os.path.join(scratchWorkspace, 'points_subset')
        result2 = arcpy.CopyFeatures_management(pointGeometryList, out_points)
        out_points = result2.getOutput(0)
        arcpy.AddGeometryAttributes_management(out_points, 'POINT_X_Y_Z_M')

        # Create Thiessen polygon based on the point feature
        result3 = arcpy.CreateThiessenPolygons_analysis(
            out_points, out_polygons, 'ALL')
        out_polygons = result3.getOutput(0)

        return out_points, out_polygons
Exemplo n.º 11
0
    def get_gemeinde(self, tfl, id_column, max_dist):
        """Verschneide Teilflächen mit Gemeinde"""
        # to do (Stefaan)
        arcpy.SetProgressorLabel('Verschneide Teilflächen mit Gemeinde')
        arcpy.SetProgressorPosition(10)

        # calculate Gauß-Krüger-Coordinates and append them to tfl
        arcpy.AddGeometryAttributes_management(
            Input_Features=tfl, Geometry_Properties="CENTROID_INSIDE")



        # Check if the distances between the centroids is smaller than max_dist
        toolbox = self.parent_tbx
        XY_INSIDE = toolbox.query_table("Teilflaechen_Plangebiet",
                                        ['INSIDE_X', 'INSIDE_Y'])
        INSIDE_X = [row[0] for row in XY_INSIDE]
        INSIDE_Y = [row[1] for row in XY_INSIDE]
        self._project_centroid = (np.mean(INSIDE_X), np.mean(INSIDE_Y))
        distances = []
        if len(XY_INSIDE) > 1:
            for i in range(len(XY_INSIDE)):
                for j in range(i):
                    dist = np.linalg.norm(np.subtract(XY_INSIDE[i], XY_INSIDE[j]))
                    distances.append(dist)
            if distances and max(distances) > max_dist:
                raise Exception("Der Abstand zwischen den Schwerpunkten der "
                                "Teilflächen darf nicht größer "
                                "als {} m sein!".format(max_dist))

        # get AGS and Gemeindename and check if AGS is unique
        ags_gen = get_ags(tfl, id_column)
        ags_project = [ID[0] for ID in ags_gen.values()]
        gen_project =  [ID[1] for ID in ags_gen.values()]
        if len(np.unique(ags_project)) != 1:
            raise Exception("Die Teilflächen müssen in der selben Gemeinde"
                            "liegen")

        return ags_project[0], gen_project[0]
Exemplo n.º 12
0
def findUtmZone(inShp, ddir):

    # Get UTM zone of AOI and return an arcpy SR object

    bname = os.path.basename(inShp).replace('.shp', '')

    UTM = 'E:\MaggieData\General Reference Data\UTM_Zone_Boundaries\UTM_Zone_Boundaries.shp'

    # Clip input with UTM
    outUtmClip = os.path.join(ddir, '{}__clippedUTM.shp'.format(bname))
    arcpy.Clip_analysis(UTM, inShp, outUtmClip, "")

    # Add area column and calculate it
    arcpy.AddGeometryAttributes_management(outUtmClip, "AREA_GEODESIC", "", "",
                                           "")

    # Now find clipped UTM zone polygon with largest area
    maxArea = 0
    features = arcpy.SearchCursor(outUtmClip)
    for feat in features:
        area = feat.getValue("AREA_GEO")
        if area > maxArea:
            maxArea = area
            zone, hemi = feat.getValue('Zone_Hemi').split(',')

    # Now configure the EPSG code
    if hemi.upper() == 'N':  # 326 + zone for N
        epsgCode = '326{}'.format(zone.zfill(2))
    else:  # 327 + zone for S
        epsgCode = '327{}'.format(zone.zfill(2))
    """ If usine proj4 string:
        proj4 = '+proj=utm +zone={} +ellps=WGS84 +datum=WGS84 +units=m +no_defs'.format(zone)
        if hemi.upper() == 'S': proj4 += ' +south'
    """

    #print " UTM Zone: EPSG:{}".format(epsgCode)

    # And create SR object to return for project
    return arcpy.SpatialReference(int(epsgCode))
Exemplo n.º 13
0
 def surface_in_between(self):
     arcpy.AddMessage("Started differential surface statistics... ")
     self.report.write("\n---Differential surface---\n")
     temp_merge = "in_memory\\merge"
     arcpy.Merge_management([self.original, self.simplified], temp_merge)
     temp_poly = "in_memory\\poly"
     arcpy.FeatureToPolygon_management(temp_merge, temp_poly)
     arcpy.Delete_management(temp_merge)
     arcpy.AddGeometryAttributes_management(temp_poly, "AREA", "#",
                                            "SQUARE_METERS")
     with arcpy.da.SearchCursor(temp_poly, ["POLY_AREA"]) as cursor:
         summarize = 0
         parts = 0
         for row in cursor:
             summarize += row[0]
             parts += 1
     mean = summarize / parts
     arcpy.Delete_management(temp_poly)
     self.report.write("Sum area: {:0.0f} sq m\n".format(summarize))
     self.report.write("Mean area: {:0.0f} sq m\n".format(mean))
     arcpy.AddMessage("Differential surface statistics - done.")
     return
def main(path):
    # Open a file folder
    dirs = os.listdir(path)
    county = join(path, "nhgis0041_shape", "nhgis0041_shape",
                  "US_county_2010.shp")
    env.workspace = path
    env.overwriteOutput = True
    for file in dirs:
        if file.endswith('.shp'):
            drought = file
            inFeatures = [county, drought]
            outFeatures = join("..", "data", "drought_monitor",
                               "processed_drought_monitor_data", "temp.shp")
            results = join("..", "data", "drought_monitor",
                           "processed_drought_monitor_data", "csv",
                           "csv" + file[:-4] + ".xls")
            # Process: Pepair the geometry
            arcpy.RepairGeometry_management(drought)
            # Process: Add Field
            arcpy.AddField_management(drought, "DM_mod", "LONG", "", "", "",
                                      "", "NULLABLE", "NON_REQUIRED", "")
            # Process: Calculate Field, add 1 to each drought moinitor class to differentiate
            # drought area with the rest (DM_mod = DM+1)
            arcpy.CalculateField_management(drought, "DM_mod", "!DM! +1",
                                            "PYTHON", "")
            # Process: Union the data
            arcpy.Union_analysis(inFeatures, outFeatures, "ALL", "", "GAPS")
            # Process: Add Geometry Attributes, calculate the area (squre mile) of each drought monitor class in each county in field "POLY_AREA"
            arcpy.AddGeometryAttributes_management(
                outFeatures, "AREA", "", "SQUARE_MILES_US",
                "PROJCS['USA_Contiguous_Albers_Equal_Area_Conic',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Albers'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-96.0],PARAMETER['Standard_Parallel_1',29.5],PARAMETER['Standard_Parallel_2',45.5],PARAMETER['Latitude_Of_Origin',37.5],UNIT['Meter',1.0]]"
            )
            # Process: Table To Excel
            arcpy.TableToExcel_conversion(outFeatures, results, "ALIAS",
                                          "CODE")
            print file
Exemplo n.º 15
0
try:
    # ---- Select Layer By Location and save it -----
    arcpy.MakeFeatureLayer_management(blockgroups, lyr_blockgroups)
    arcpy.SelectLayerByLocation_management(lyr_blockgroups, "HAVE_THEIR_CENTER_IN", boundrary, "", "NEW_SELECTION")
    matchcount = int(arcpy.GetCount_management(lyr_blockgroups)[0])
    if matchcount == 0:
        exit()
    else:
        arcpy.CopyFeatures_management(lyr_blockgroups, se_blockgroups)

    # ---- Intersect roads and selected blockgroups -----
    arcpy.Intersect_analysis(intersect_infc, roads_intersect, "ALL", "", "INPUT")

    # ---- Add Geometry Attributes to convert roads to miles -----
    arcpy.AddGeometryAttributes_management(roads_intersect, "LENGTH", "MILES_US")

    # ---- Summary Statistics calculate sum of roads in each selected blockgroup -----
    arcpy.Statistics_analysis(roads_intersect, roads_calculate, statistics_fd, "GEOID10")

    # ---- Buffer major roads -----
    arcpy.Buffer_analysis(roads, out_buffer, buffer_dist, "FULL", "ROUND", "ALL", "", "PLANAR")

    # ---- Clip the crashes points -----
    arcpy.Clip_analysis(crashes, out_buffer, out_clip)

    # ---- Spatial Join blockgroups and clip crashes -----
    arcpy.SpatialJoin_analysis(se_blockgroups, out_clip, out_spatialjoin, "JOIN_ONE_TO_ONE", "KEEP_ALL", "#", "INTERSECT")

    # ---- Join Field put each blockgroup's road length and crashes together -----
    arcpy.JoinField_management(roads_calculate, "GEOID10", out_spatialjoin, "GEOID10", "Join_Count")
#Process: Union (join the colony layer w/ the thermal colonies)
allCol = scratchWS + "\\col_buff_mask_temp.shp"
arcpy.Union_analysis([nircolonies, ThermCol], allCol, "ALL", "", "GAPS")

#Make the Buffered Mask
#Process: Buffer Penguin Colonies
BuffMask = scratchWS + "\\nircolony_buff.shp"
arcpy.Buffer_analysis(allCol, BuffMask, "0.5 Meters", "FULL", "ROUND", "NONE",
                      "", "PLANAR")
#Process: Calculate Field for DISS field (so can dissolve the thermal and NIR colonies)
arcpy.CalculateField_management(BuffMask, "DISS", "1", "VB", "")
#Process: Dissolve
ColBuffMask = scratchWS + "\\col_buff_mask.shp"
arcpy.Dissolve_management(BuffMask, ColBuffMask, "DISS", "", "SINGLE_PART", "")
#Process: Add Geometry Attributes (needed for density calculations at the end)
arcpy.AddGeometryAttributes_management(ColBuffMask, "AREA", "",
                                       "SQUARE_METERS", "")

#Make the Unbuffered Mask
#Process: Calculate Field for DISS field (so can dissolve the thermal and NIR colonies)
arcpy.CalculateField_management(allCol, "DISS", "1", "VB", "")
#Process: Dissolve
ColMask = scratchWS + "\\col_mask.shp"
arcpy.Dissolve_management(allCol, ColMask, "DISS", "", "SINGLE_PART", "")
#Process: Add Geometry Attributes (needed for density calculations at the end)
arcpy.AddGeometryAttributes_management(ColMask, "AREA", "", "SQUARE_METERS",
                                       "")

#Process: Extract by Mask (extract colonies from thermal raster)
therm_col = scratchWS + "\\therm_colonies.img"
arcpy.gp.ExtractByMask_sa(inputFile, ColBuffMask, therm_col)
    def execute(self, parameters, messages):
        """The source code of the tool."""
        arcpy.env.overwriteOutput = True

        scratchWorkspace = arcpy.env.scratchWorkspace
        if not scratchWorkspace:
            scratchWorkspace = arcpy.env.scratchGDB

        in_nc = parameters[0].valueAsText
        in_nc_lat_var = parameters[1].valueAsText
        in_nc_lon_var = parameters[2].valueAsText
        in_rapid_connect_file = parameters[3].valueAsText
        in_catchment = parameters[4].valueAsText
        streamID = parameters[5].valueAsText
        out_WeightTable = parameters[6].valueAsText

        # validate the netcdf dataset
        self.dataValidation(in_nc, messages)

        # Obtain catchment extent in lat and lon in GCS_WGS_1984
        sr_cat = arcpy.Describe(in_catchment).SpatialReference
        extent = arcpy.Describe(in_catchment).extent
        #if (sr_cat.name == 'World_Equidistant_Cylindrical'):
        if (sr_cat.name == 'GCS_WGS_1984'):
            extent = extent
        else:
            envelope = os.path.join(scratchWorkspace, 'envelope')
            result0 = arcpy.MinimumBoundingGeometry_management(
                in_catchment, envelope, 'ENVELOPE', 'ALL')
            envelope = result0.getOutput(0)
            sr_out = arcpy.SpatialReference(4326)  # GCS_WGS_1984
            envelope_proj = os.path.join(scratchWorkspace, 'envelope_proj')
            result1 = arcpy.Project_management(envelope, envelope_proj, sr_out)
            envelope_proj = result1.getOutput(0)
            extent = arcpy.Describe(envelope_proj).extent

        #Open nc file
        data_nc = NET.Dataset(in_nc)

        # Obtain geographic coordinates
        self.lsm_lon_array = data_nc.variables[
            in_nc_lon_var][:]  #assume [-180, 180]
        self.lsm_lat_array = data_nc.variables[
            in_nc_lat_var][:]  #assume [-90,90]
        data_nc.close()

        #convert 3d to 2d if time dimension
        if (len(self.lsm_lon_array.shape) == 3):
            self.lsm_lon_array = self.lsm_lon_array[0]
            self.lsm_lat_array = self.lsm_lat_array[0]

        # Create Thiessen polygons based on the points within the extent
        arcpy.AddMessage("Generating Thiessen polygons...")
        polygon_thiessen = os.path.join(scratchWorkspace, 'polygon_thiessen')

        result4 = self.createPolygon(extent, polygon_thiessen,
                                     scratchWorkspace)
        polygon_thiessen = result4[1]

        # Intersect the catchment polygons with the Thiessen polygons
        arcpy.AddMessage("Intersecting Thiessen polygons with catchment...")
        intersect = os.path.join(scratchWorkspace, 'intersect')
        result5 = arcpy.Intersect_analysis([in_catchment, polygon_thiessen],
                                           intersect, 'ALL', '#', 'INPUT')
        intersect = result5.getOutput(0)

        # Calculate the geodesic area in square meters for each intersected polygon (no need to project if it's not projected yet)
        arcpy.AddMessage("Calculating geodesic areas...")
        arcpy.AddGeometryAttributes_management(intersect, 'AREA_GEODESIC', '',
                                               'SQUARE_METERS', '')

        # Calculate the total geodesic area of each catchment based on the contributing areas of points
        fields = [streamID, 'POINT_X', 'POINT_Y', 'AREA_GEO']
        area_arr = arcpy.da.FeatureClassToNumPyArray(intersect, fields)

        arcpy.AddMessage("Writing the weight table...")
        #get list of COMIDs in rapid_connect file so only those are included in computations
        connectivity_table = self.csvToList(in_rapid_connect_file)
        streamID_unique_arr = [int(row[0]) for row in connectivity_table]

        #if point not in array append dummy data for one point of data
        lon_dummy = area_arr['POINT_X'][0]
        lat_dummy = area_arr['POINT_Y'][0]
        #find point index in 2d grid
        lsm_lat_indices_from_lat, lsm_lon_indices_from_lat = NUM.where(
            self.lsm_lat_array == lat_dummy)
        lsm_lat_indices_from_lon, lsm_lon_indices_from_lon = NUM.where(
            self.lsm_lon_array == lon_dummy)

        index_lat_dummy = NUM.intersect1d(lsm_lat_indices_from_lat,
                                          lsm_lat_indices_from_lon)[0]
        index_lon_dummy = NUM.intersect1d(lsm_lon_indices_from_lat,
                                          lsm_lon_indices_from_lon)[0]

        with open(out_WeightTable, 'wb') as csvfile:
            connectwriter = csv.writer(csvfile, dialect='excel')
            connectwriter.writerow(
                [streamID, 'area_sqm', 'lon_index', 'lat_index', 'npoints'])
            for streamID_unique in streamID_unique_arr:
                ind_points = NUM.where(
                    area_arr[streamID] == streamID_unique)[0]
                num_ind_points = len(ind_points)

                if num_ind_points <= 0:
                    #FEATUREID,area_sqm,lon_index,lat_index,npoints,Lon,Lat
                    connectwriter.writerow([
                        streamID_unique, 0, index_lon_dummy, index_lat_dummy, 1
                    ])
                else:
                    for ind_point in ind_points:
                        area_geo_each = float(area_arr['AREA_GEO'][ind_point])
                        lon_each = area_arr['POINT_X'][ind_point]
                        lat_each = area_arr['POINT_Y'][ind_point]

                        #find point index in 2d grid
                        lsm_lat_indices_from_lat, lsm_lon_indices_from_lat = NUM.where(
                            self.lsm_lat_array == lat_each)
                        lsm_lat_indices_from_lon, lsm_lon_indices_from_lon = NUM.where(
                            self.lsm_lon_array == lon_each)

                        index_lat_each = NUM.intersect1d(
                            lsm_lat_indices_from_lat,
                            lsm_lat_indices_from_lon)[0]
                        index_lon_each = NUM.intersect1d(
                            lsm_lon_indices_from_lat,
                            lsm_lon_indices_from_lon)[0]

                        #write to file
                        connectwriter.writerow([
                            streamID_unique, area_geo_each, index_lon_each,
                            index_lat_each, num_ind_points
                        ])

        return
Exemplo n.º 18
0
def generate_route_border_rule_table(workspace,route,route_id_field,boundary,boundary_id_field,buffer_size,route_border_rule_table,high_angle_threshold,offset):
    arcpy.AddMessage("Generating route border rule source table for {1}...".format(boundary))
    try:
        date = datetime.now()
        date_string = date.strftime("%m/%d/%Y")

        spatial_reference = arcpy.Describe(route).spatialReference
        xy_resolution = "{0} {1}".format(spatial_reference.XYResolution,spatial_reference.linearUnitName)

        ###############################################################################################################
        # get all candidate border routes
        arcpy.AddMessage("Identifying candidate border routes...")

        # generate boundary border
        boundary_border = os.path.join(workspace,"{0}_{1}_border".format(boundary,"boundary"))
        arcpy.FeatureToLine_management(boundary, boundary_border)

        # dissolve polygon boundary based on boundary id
        boundary_border_dissolved = os.path.join(workspace,"{0}_boundary_border_dissolved".format(boundary))
        arcpy.Dissolve_management(boundary_border,boundary_border_dissolved,[boundary_id_field])

        # generate buffer around boundary
        # arcpy.AddMessage("generate buffer around boundary")
        boundary_border_buffer = os.path.join(workspace,"{0}_{1}".format(boundary,"boundary_buffer"))
        arcpy.Buffer_analysis(boundary_border_dissolved, boundary_border_buffer, buffer_size, "FULL", "ROUND")

        # get candidate border route
        # arcpy.AddMessage("get candidate border route")
        candidate_border_route_multipart = "in_memory\\candidate_{0}_border_route_multipart".format(boundary)
        candidate_border_route = os.path.join(workspace,"candidate_{0}_border_route".format(boundary))
        arcpy.Clip_analysis(route, boundary_border_buffer, candidate_border_route_multipart)
        arcpy.MultipartToSinglepart_management(candidate_border_route_multipart, candidate_border_route)
        ################################################################################################################


        ################################################################################################################
        #  filter out candidate border routes that 'intersects' boundary at high angles
        arcpy.AddMessage("Filtering out candidate border routes that 'intersects' boundary at high angles...")

        route_buffer = os.path.join(workspace,"{0}_{1}".format(route,"buffer_flat"))
        if not arcpy.Exists(route_buffer):
            arcpy.Buffer_analysis(route, route_buffer, buffer_size, "FULL", "FLAT")

        # clip boundary segments within route buffer
        boundary_border_within_buffer_multipart = "in_memory\\{0}_boundary_within_{1}_buffer_multipart".format(boundary,route)
        boundary_border_within_buffer = os.path.join(workspace,"{0}_boundary_within_{1}_buffer".format(boundary,route))
        arcpy.Clip_analysis(boundary_border_dissolved, route_buffer, boundary_border_within_buffer_multipart)
        arcpy.MultipartToSinglepart_management(boundary_border_within_buffer_multipart, boundary_border_within_buffer)

        # Add 'SEGMENT_ID_ALL_CANDIDATES' field to candidate route and populate it with 'OBJECTID'
        arcpy.AddField_management(candidate_border_route,"SEGMENT_ID_ALL_CANDIDATES","LONG")
        arcpy.CalculateField_management(candidate_border_route, "SEGMENT_ID_ALL_CANDIDATES", "!OBJECTID!", "PYTHON")

        # Add 'ANGLE_ROUTE' field to candidate route and populate it with the angle to the true north(= 0 degree)
        arcpy.AddField_management(candidate_border_route,"ANGLE_ROUTE","DOUBLE")
        with arcpy.da.UpdateCursor(candidate_border_route,("SHAPE@","ANGLE_ROUTE")) as uCur:
            for row in uCur:
                shape = row[0]
                x_first = shape.firstPoint.X
                y_first = shape.firstPoint.Y
                x_last = shape.lastPoint.X
                y_last = shape.lastPoint.Y

                angle = calculate_angle(x_first,y_first,x_last,y_last)

                if angle >=0:
                    row[1]=angle
                    uCur.updateRow(row)

        # Add 'ANGLE_BOUNDARY' field to boundary segment within route buffer and populate it with the angle to the true north(= 0 degree)
        arcpy.AddField_management(boundary_border_within_buffer,"ANGLE_BOUNDARY","DOUBLE")
        with arcpy.da.UpdateCursor(boundary_border_within_buffer,("SHAPE@","ANGLE_BOUNDARY")) as uCur:
            for row in uCur:
                shape = row[0]
                x_first = shape.firstPoint.X
                y_first = shape.firstPoint.Y
                x_last = shape.lastPoint.X
                y_last = shape.lastPoint.Y

                angle = calculate_angle(x_first,y_first,x_last,y_last)

                if angle:
                    row[1]=angle
                    uCur.updateRow(row)

        del uCur

        # locate boundary segment within buffer along candidate border route.
        # assuming that if the boundary segment can't be located along its corresponding route, these two might have high angles.
        boundary_along_candidate_border_route = os.path.join(workspace,"{0}_boundary_along_candidate_{1}_border_route".format(boundary,boundary))
        arcpy.LocateFeaturesAlongRoutes_lr(boundary_border_within_buffer,candidate_border_route,"SEGMENT_ID_ALL_CANDIDATES",buffer_size,\
                                           boundary_along_candidate_border_route,"{0} {1} {2} {3}".format("RID","LINE","FMEAS","TMEAS"))

        arcpy.JoinField_management(boundary_along_candidate_border_route, "RID", candidate_border_route, "SEGMENT_ID_ALL_CANDIDATES", ["ANGLE_ROUTE"])


        positive_candidate_border_route = []
        with arcpy.da.SearchCursor(boundary_along_candidate_border_route,("RID","ANGLE_ROUTE","ANGLE_BOUNDARY")) as sCur:
            for row in sCur:
                sid = str(row[0])
                angle_route = row[1]
                angle_boundary = row[2]

                if angle_route and angle_boundary:
                    delta_angle = abs(angle_route-angle_boundary)

                    # get real intersecting angle
                    if delta_angle > 90 and delta_angle <= 270:
                        delta_angle = abs(180 - delta_angle)
                    elif delta_angle > 270:
                        delta_angle = 360 - delta_angle
                    else:
                        pass

                    # filter out negative candidate border route
                    if delta_angle <= high_angle_threshold:
                        if sid not in positive_candidate_border_route:
                            positive_candidate_border_route.append(sid)
        del sCur

        candidate_border_route_lyr = "in_memory\\candidate_border_route_lyr"
        arcpy.MakeFeatureLayer_management(candidate_border_route, candidate_border_route_lyr)
        candidate_border_route_positive = os.path.join(workspace,"candidate_{0}_border_route_positive".format(boundary))
        where_clause = "\"{0}\" IN ({1})".format("OBJECTID",",".join(positive_candidate_border_route))
        arcpy.SelectLayerByAttribute_management(candidate_border_route_lyr, "NEW_SELECTION", where_clause)
        arcpy.CopyFeatures_management(candidate_border_route_lyr,candidate_border_route_positive)

        candidate_border_route_negative = os.path.join(workspace,"candidate_{0}_border_route_negative".format(boundary))
        where_clause = "\"{0}\" NOT IN ({1})".format("OBJECTID",",".join(positive_candidate_border_route))
        arcpy.SelectLayerByAttribute_management(candidate_border_route_lyr, "NEW_SELECTION", where_clause)
        arcpy.CopyFeatures_management(candidate_border_route_lyr,candidate_border_route_negative)
        ################################################################################################################


        ################################################################################################################
        # get left, right boundary topology of positive candidate border route
        # handle candidate border route segment with different L/R boundary id by offset
        arcpy.AddMessage("Calculating L/R boundary topology of positive candidate border route...")

        # generate offset around boundary
        boundary_border_offset= os.path.join(workspace,"{0}_{1}".format(boundary,"boundary_offset"))
        arcpy.Buffer_analysis(boundary_border_dissolved, boundary_border_offset, offset, "FULL", "ROUND")

        # get intersections between positive candidate border route and boundary offset
        candidate_border_route_positive_boundary_offset_intersections = os.path.join(workspace,"candidate_{0}_border_route_positive_{1}_offset_intersections".format(boundary,boundary))
        arcpy.Intersect_analysis([candidate_border_route_positive,boundary_border_offset], candidate_border_route_positive_boundary_offset_intersections, "ALL", "", "point")

        # split positive candidate border route by intersections generated above
        candidate_border_route_positive_splitted_by_offset = os.path.join(workspace,"candidate_{0}_border_route_positive_splitted_by_offset".format(boundary))
        arcpy.SplitLineAtPoint_management(candidate_border_route_positive,candidate_border_route_positive_boundary_offset_intersections,\
                                          candidate_border_route_positive_splitted_by_offset,xy_resolution)

        # Add 'SEGMENT_ID_POSITIVE_CANDIDATES' field to splitted positive candidate route and populate it with 'OBJECTID'
        arcpy.AddField_management(candidate_border_route_positive_splitted_by_offset,"SEGMENT_ID_POSITIVE_CANDIDATES","LONG")
        arcpy.CalculateField_management(candidate_border_route_positive_splitted_by_offset, "SEGMENT_ID_POSITIVE_CANDIDATES", "!OBJECTID!", "PYTHON")

        # get positive candidate border route segments that within boundary offset
        candidate_border_route_positive_within_offset = os.path.join(workspace,"candidate_{0}_border_route_positive_within_offset".format(boundary))
        candidate_border_route_positive_splitted_by_offset_lyr = "in_memory\\candidate_{0}_border_route_positive_splitted_by_offset_lyr".format(boundary)
        arcpy.MakeFeatureLayer_management(candidate_border_route_positive_splitted_by_offset, candidate_border_route_positive_splitted_by_offset_lyr)
        arcpy.SelectLayerByLocation_management (candidate_border_route_positive_splitted_by_offset_lyr, "WITHIN", boundary_border_offset)
        arcpy.CopyFeatures_management(candidate_border_route_positive_splitted_by_offset_lyr,candidate_border_route_positive_within_offset)

        # get positive candidate border route segments that out of boundary offset
        candidate_border_route_positive_outof_offset = os.path.join(workspace,"candidate_{0}_border_route_positive_outof_offset".format(boundary))
        arcpy.SelectLayerByAttribute_management(candidate_border_route_positive_splitted_by_offset_lyr, "SWITCH_SELECTION")
        arcpy.CopyFeatures_management(candidate_border_route_positive_splitted_by_offset_lyr,candidate_border_route_positive_outof_offset)

        # generate offset around positive candidate border route within boundary offset
        # arcpy.AddMessage("generate offset around boundary")
        candidate_border_route_positive_within_offset_buffer= os.path.join(workspace,"candidate_{0}_border_route_positive_within_offset_buffer".format(boundary))
        arcpy.Buffer_analysis(candidate_border_route_positive_within_offset, candidate_border_route_positive_within_offset_buffer, offset, "FULL", "FLAT")

        # clip boundary segments within offset distance from positive candidate route that within boundary offset
        boundary_border_within_positive_candidate_border_route_buffer_multipart = "in_memory\\{0}_boundary_within_positive_candidate_border_route_buffer_multipart".format(boundary)
        boundary_border_within_positive_candidate_border_route_buffer = os.path.join(workspace,"{0}_boundary_within_positive_candidate_border_route_buffer".format(boundary))
        arcpy.Clip_analysis(boundary_border_dissolved, candidate_border_route_positive_within_offset_buffer, boundary_border_within_positive_candidate_border_route_buffer_multipart)
        arcpy.MultipartToSinglepart_management(boundary_border_within_positive_candidate_border_route_buffer_multipart, boundary_border_within_positive_candidate_border_route_buffer)

        # get endpoints of boundary border within offset buffer of splitted positive candidate border routes
        boundary_border_within_positive_candidate_border_route_buffer_endpoints = os.path.join(workspace,"{0}_boundary_within_positive_candidate_border_route_buffer_endpoints".format(boundary))
        arcpy.FeatureVerticesToPoints_management(boundary_border_within_positive_candidate_border_route_buffer,\
                                                 boundary_border_within_positive_candidate_border_route_buffer_endpoints,"BOTH_ENDS")
        arcpy.DeleteIdentical_management(boundary_border_within_positive_candidate_border_route_buffer_endpoints, ["Shape"])

        # split boundary border within offset buffer of splitted positive candidate border routes and endpoints location
        # then delete identical shape
        boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints = os.path.join(workspace,"{0}_boundary_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints".format(boundary))
        arcpy.SplitLineAtPoint_management(boundary_border_within_positive_candidate_border_route_buffer,boundary_border_within_positive_candidate_border_route_buffer_endpoints,\
                                          boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints,xy_resolution)
        arcpy.DeleteIdentical_management(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints, ["Shape"])

        # Add 'SEGMENT_ID_BOUNDARY' field to boundary segments within offset distance from positive candidate route that within boundary offset and populate it with 'OBJECTID'
        arcpy.AddField_management(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints,"SEGMENT_ID_BOUNDARY","LONG")
        arcpy.CalculateField_management(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints, "SEGMENT_ID_BOUNDARY", "!OBJECTID!", "PYTHON")

        # locate boundary segments within offset distance of positive candidate route that within boundary offset along positive candidate route that within boundary offset
        boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route = os.path.join(workspace,"{0}_boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route".format(boundary))
        arcpy.LocateFeaturesAlongRoutes_lr(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints,candidate_border_route_positive_within_offset,"SEGMENT_ID_POSITIVE_CANDIDATES",offset,\
                                           boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route,"{0} {1} {2} {3}".format("RID","LINE","FMEAS","TMEAS"))

        # get left, right boundary topology of boundary within offset distance of positive candidate route that within boundary offset along positive candidate route that within boundary offset
        boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases= os.path.join(workspace,"{0}_boundary_border_within_positive_candidate_border_route_buffer_with_{1}_topology_allcases".format(boundary,boundary))
        arcpy.Identity_analysis(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints, boundary, boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases,"ALL","","KEEP_RELATIONSHIPS")

        boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases_lyr = "in_memory\\{0}_boundary_border_within_positive_candidate_border_route_buffer_with_{1}_topology_allcases_lyr".format(boundary,boundary)
        arcpy.MakeFeatureLayer_management(boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases, boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases_lyr)

        where_clause = "\"{0}\"<>0 AND \"{1}\"<>0".format("LEFT_{0}".format(boundary),"RIGHT_{0}".format(boundary))
        arcpy.SelectLayerByAttribute_management(boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases_lyr, "NEW_SELECTION", where_clause)
        boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology = os.path.join(workspace,"{0}_boundary_border_within_positive_candidate_border_route_buffer_with_{1}_topology".format(boundary,boundary))
        arcpy.CopyFeatures_management(boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases_lyr,boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology)

        arcpy.JoinField_management(boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route,"SEGMENT_ID_BOUNDARY",\
                                   boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology,"SEGMENT_ID_BOUNDARY",["LEFT_{0}".format(boundary_id_field),"RIGHT_{0}".format(boundary_id_field)])

        arcpy.JoinField_management(candidate_border_route_positive_within_offset,"SEGMENT_ID_POSITIVE_CANDIDATES",\
                                   boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route,"RID",["SEGMENT_ID_BOUNDARY","LEFT_{0}".format(boundary_id_field),"RIGHT_{0}".format(boundary_id_field)])

        candidate_border_route_positive_within_offset_lyr = "in_memory\\candidate_{0}_border_route_positive_within_offset_lyr".format(boundary)
        arcpy.MakeFeatureLayer_management(candidate_border_route_positive_within_offset, candidate_border_route_positive_within_offset_lyr)
        where_clause = "\"{0}\"IS NOT NULL AND \"{1}\"IS NOT NULL".format("LEFT_{0}".format(boundary_id_field),"RIGHT_{0}".format(boundary_id_field))
        arcpy.SelectLayerByAttribute_management(candidate_border_route_positive_within_offset_lyr, "NEW_SELECTION", where_clause)
        candidate_border_route_positive_within_offset_with_polygon_topology = os.path.join(workspace,"candidate_{0}_border_route_positive_within_offset_with_{1}_topology".format(boundary,boundary))
        arcpy.CopyFeatures_management(candidate_border_route_positive_within_offset_lyr,candidate_border_route_positive_within_offset_with_polygon_topology)

        # get left, right boundary topology of candidate border route out of boundary offset
        candidate_border_route_positive_outof_offset_with_polygon_topology_allcases= os.path.join(workspace,"candidate_{0}_border_route_positive_outof_offset_with_{1}_topology_allcases".format(boundary,boundary))
        arcpy.Identity_analysis(candidate_border_route_positive_outof_offset, boundary, candidate_border_route_positive_outof_offset_with_polygon_topology_allcases,"ALL","","KEEP_RELATIONSHIPS")

        candidate_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr = "in_memory\\candidate_{0}_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr".format(boundary)
        arcpy.MakeFeatureLayer_management(candidate_border_route_positive_outof_offset_with_polygon_topology_allcases, candidate_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr)
        where_clause = "\"{0}\"<>0 AND \"{1}\"<>0".format("LEFT_{0}".format(boundary),"RIGHT_{0}".format(boundary))
        arcpy.SelectLayerByAttribute_management(candidate_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr, "NEW_SELECTION", where_clause)
        candidate_border_route_positive_outof_offset_with_polygon_topology = os.path.join(workspace,"candidate_{0}_border_route_positive_outof_offset_with_{1}_topology".format(boundary,boundary))
        arcpy.CopyFeatures_management(candidate_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr,candidate_border_route_positive_outof_offset_with_polygon_topology)

        # merge
        candidate_border_route_positive_with_polygon_topology = "candidate_{0}_border_route_positive_with_{1}_topology".format(boundary,boundary)
        arcpy.FeatureClassToFeatureClass_conversion(candidate_border_route_positive_outof_offset_with_polygon_topology,workspace,candidate_border_route_positive_with_polygon_topology)
        arcpy.Append_management([candidate_border_route_positive_within_offset_with_polygon_topology],candidate_border_route_positive_with_polygon_topology,"NO_TEST")

        ################################################################################################################


        ################################################################################################################
        arcpy.AddMessage("Populate route_border_rule_table...")

        # calculate from measure and to measure of candidate border route
        # arcpy.AddMessage("Calculating from measure and to measure of candidate border routes...")
        arcpy.AddGeometryAttributes_management(candidate_border_route_positive_with_polygon_topology, "LINE_START_MID_END")

        # get candidte border route segment geometry
        arcpy.AddField_management(candidate_border_route_positive_with_polygon_topology,"SEGMENT_GEOMETRY","TEXT","","",100)
        arcpy.CalculateField_management(candidate_border_route_positive_with_polygon_topology,"SEGMENT_GEOMETRY","!shape.type!","PYTHON")

        # sort candidate border route segments based on route id and from measure, orderly
        # arcpy.AddMessage("sort validated output got above based on route id and from measure, orderly")
        candidate_border_route_positive_with_polygon_topology_sorted = os.path.join(workspace,"candidate_{0}_border_route_positive_with_polygon_topology_sorted".format(boundary))
        arcpy.Sort_management(candidate_border_route_positive_with_polygon_topology,candidate_border_route_positive_with_polygon_topology_sorted,[[route_id_field,"ASCENDING"],["START_M","ASCENDING"]])

        # create route_border_rule_table
        if arcpy.Exists(route_border_rule_table):
            arcpy.Delete_management(route_border_rule_table)
            create_route_border_rule_table_schema(workspace,route_border_rule_table)
        else:
            create_route_border_rule_table_schema(workspace,route_border_rule_table)

        # populate route_border_rule_table
        iCur = arcpy.da.InsertCursor(route_border_rule_table,["ROUTE_ID","ROUTE_START_MEASURE","ROUTE_END_MEASURE","BOUNDARY_LEFT_ID",\
                                                              "BOUNDARY_RIGHT_ID","SEGMENT_GEOMETRY","EFFECTIVE_FROM_DT","EFFECTIVE_TO_DT"])
        with arcpy.da.SearchCursor(candidate_border_route_positive_with_polygon_topology_sorted,[route_id_field,"START_M","END_M","LEFT_{0}".format(boundary_id_field),\
                                                                              "RIGHT_{0}".format(boundary_id_field),"SEGMENT_GEOMETRY","START_DATE","END_DATE"]) as sCur:
            for row in sCur:
                iCur.insertRow(row)

        del sCur
        del iCur

        arcpy.CalculateField_management(route_border_rule_table, "BRP_PROCESS_DT", "'{0}'".format(date_string), "PYTHON")
        ################################################################################################################

        arcpy.AddMessage("done!")

        return route_border_rule_table
    except Exception:
        # arcpy.AddMessage(traceback.format_exc())
        sys.exit(traceback.format_exc())
        return False
Exemplo n.º 19
0
            STOP_SCRIPT = "Yes"

    if STOP_SCRIPT == "Yes":
        raise arcpy.ExecuteError

    # Check if feature class is of polygon type, in which case we calculate the
    # inside centroid X,Y coordinates and add them as INSIDE_X and INSIDE_Y
    # fields to the feature class. Other shape types are not supported, so raise
    # an error to stop the process.
    # SEE http://pro.arcgis.com/en/pro-app/tool-reference/data-management/add-geometry-attributes.htm
    FC_DESC = arcpy.Describe(HAZAREA_FC)
    if FC_DESC.shapeType == "Polygon":
        try:
            LOGGER.info("Adding the inside centroid X and Y coordinates.")
            arcpy.AddGeometryAttributes_management(Input_Features=HAZAREA_FC,
                                                   Geometry_Properties="CENTROID_INSIDE",
                                                   Length_Unit="", Area_Unit="",
                                                   Coordinate_System="")
        except Exception as inst:
            # Log the exception type  and all error messages returned
            LOGGER.error(type(inst))
            LOGGER.error(arcpy.GetMessages())
    else:
        LOGGER.error("Unsupported shape type detected.")
        raise arcpy.ExecuteError

    # Loop through the array and create the fields using the values stored
    # in the array.
    for row in ARRAY_FIELDS:
        arcpy.AddField_management(
            in_table=HAZAREA_FC, field_name=row[0],
            field_type=row[1], field_precision=row[2], field_scale=row[3],
Exemplo n.º 20
0
# Get geospatial geometries from Latitude and Longitude in data file, exported as point layer
arcpy.MakeXYEventLayer_management(smFilePath, smlatField, smlonField,
                                  smPointLyName, ukCoordSystem)

# Define the produced SMARTscan Input Features
smInputPoint = os.path.join(gdbDatabasePath, smPointLyName)
smOutputLine = os.path.join(gdbDatabasePath, smLineLyName)

# Get line layers and start & end node coordinate
arcpy.FeatureClassToShapefile_conversion(smPointLyName, gdbDatabasePath)
arcpy.PointsToLine_management(smInputPoint, smOutputLine, smlineField)

print "\nInfo: SMARTscan geospatial points and lines are produced"

# Get start node and end note layer
arcpy.AddGeometryAttributes_management(smOutputLine, "LINE_START_MID_END")
startLayer = arcpy.MakeXYEventLayer_management(smOutputLine, "START_X",
                                               "START_Y", "startLayer",
                                               ukCoordSystem)
endLayer = arcpy.MakeXYEventLayer_management(smOutputLine, "END_X", "END_Y",
                                             "endLayer", ukCoordSystem)

# Convert layer into geodatabase datafile
arcpy.FeatureClassToFeatureClass_conversion(startLayer, gdbDatabasePath,
                                            "StartEN")
arcpy.FeatureClassToFeatureClass_conversion(endLayer, gdbDatabasePath, "EndEN")

# Process Pipe ref and then new Scan_Num values for start node and end nodes
with arcpy.da.UpdateCursor(startEN, ["Scan_Num"]) as cursor:
    for row in cursor:
        row[0] = row[0] + "_1"
Exemplo n.º 21
0
def main(bhi_ras, s_zone, zones_out):

    scratchPath = os.path.abspath(os.path.join(__file__, os.pardir))

    scratchName = "scratch.gdb"
    scratch = os.path.join(scratchPath, scratchName)
    if arcpy.Exists(scratch):
        arcpy.AddMessage("scratch gdb exists - Deleting")
        arcpy.Delete_management(scratch)

    arcpy.CreateFileGDB_management(scratchPath, scratchName)

    if arcpy.Exists(zones_out):
        arcpy.Delete_management(zones_out)
    arcpy.AddMessage("Running BHI Interpretation Script")

    # classifying_zones
    sZone_fields = [f.name for f in arcpy.ListFields(s_zone)]
    if "Zone_no" in sZone_fields:
        arcpy.DeleteField_management(s_zone, "Zone_no")

    zone_info = os.path.join(scratch, "tmp_shp_copy")
    arcpy.CopyFeatures_management(s_zone, zone_info)

    # create sequential numbers for reaches
    arcpy.AddField_management(zone_info, "Zone_no", "LONG")

    with arcpy.da.UpdateCursor(zone_info, ["Zone_no", 'OBJECTID']) as cursor:
        for row in cursor:
            row[0] = row[1]

            cursor.updateRow(row)

    arcpy.AddField_management(zone_info,
                              field_name="BHI_MEAN",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_MIN",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_MAX",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_STD",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_PERC_0",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_PERC_1",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_PERC_2",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_PERC_3",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_PERC_4",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_PERC_5",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_AREA_0",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_AREA_1",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_AREA_2",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_AREA_3",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_AREA_4",
                              field_type="DOUBLE")
    arcpy.AddField_management(zone_info,
                              field_name="BHI_AREA_5",
                              field_type="DOUBLE")

    arcpy.AddGeometryAttributes_management(zone_info,
                                           Geometry_Properties="AREA",
                                           Area_Unit="SQUARE_KILOMETERS")

    arcpy.AddMessage("begin looping features...")
    sz_fl = arcpy.MakeFeatureLayer_management(zone_info, "tempFL", "", scratch)

    n_feat = arcpy.GetCount_management(sz_fl)

    with arcpy.da.UpdateCursor(sz_fl, ["Zone_no"]) as cursor:
        for row in cursor:

            arcpy.AddMessage("working on feature {0}/{1}".format(
                row[0], n_feat))

            expr = """{0} = {1}""".format('Zone_no', row[0])
            arcpy.SelectLayerByAttribute_management(sz_fl, "NEW_SELECTION",
                                                    expr)
            out_shp = os.path.join(scratch, "op{0}".format(row[0]))

            arcpy.CopyFeatures_management(sz_fl, out_shp)

            xcell = arcpy.GetRasterProperties_management(
                bhi_ras, property_type="CELLSIZEX").getOutput(0)
            area_ras = r"in_memory/area_ras_temp"
            arcpy.env.snapRaster = os.path.join(bhi_ras)
            arcpy.FeatureToRaster_conversion(out_shp,
                                             field="Zone_no",
                                             out_raster=area_ras,
                                             cell_size=xcell)

            try:
                ebm_ras = Con(IsNull(Raster(area_ras)), -100, Raster(bhi_ras))

                ebm_ras = SetNull(ebm_ras == -100, ebm_ras)

                area_ras = None

                # arcpy alternative:
                stdVal = arcpy.GetRasterProperties_management(
                    ebm_ras, property_type="STD").getOutput(0)
                meanVal = arcpy.GetRasterProperties_management(
                    ebm_ras, property_type="MEAN").getOutput(0)
                minVal = arcpy.GetRasterProperties_management(
                    ebm_ras, property_type="MINIMUM").getOutput(0)
                maxVal = arcpy.GetRasterProperties_management(
                    ebm_ras, property_type="MAXIMUM").getOutput(0)

                histtab = os.path.join(scratch, "histtab")

                ZonalHistogram(out_shp, "Zone_no", ebm_ras, histtab)
                field_names = [f.name for f in arcpy.ListFields(histtab)]

                tab_arr = list(
                    arcpy.da.TableToNumPyArray(histtab,
                                               field_names=field_names))

                arr = np.asarray([x[1] for x in tab_arr])
                arrb = np.asarray([x[2] for x in tab_arr])

                namesList = list(arr)

                if '0' in namesList:
                    n0 = int(arrb[arr == '0'])
                else:
                    n0 = 0
                if '1' in namesList:
                    n1 = int(arrb[arr == '1'])
                else:
                    n1 = 0
                if '2' in namesList:
                    n2 = int(arrb[arr == '2'])
                else:
                    n2 = 0
                if '3' in namesList:
                    n3 = int(arrb[arr == '3'])
                else:
                    n3 = 0
                if '4' in namesList:
                    n4 = int(arrb[arr == '4'])
                else:
                    n4 = 0
                if '5' in namesList:
                    n5 = int(arrb[arr == '5'])
                else:
                    n5 = 0

                countN = sum([n0, n1, n2, n3, n4, n5])

                p0 = round(float(n0) / countN * 100, 2)
                p1 = round(float(n1) / countN * 100, 2)
                p2 = round(float(n2) / countN * 100, 2)
                p3 = round(float(n3) / countN * 100, 2)
                p4 = round(float(n4) / countN * 100, 2)
                p5 = round(float(n5) / countN * 100, 2)

                with arcpy.da.UpdateCursor(out_shp, [
                        "BHI_MEAN", "BHI_MIN", "BHI_MAX", "BHI_STD",
                        "BHI_PERC_0", "BHI_PERC_1", "BHI_PERC_2", "BHI_PERC_3",
                        "BHI_PERC_4", "BHI_PERC_5", "BHI_AREA_0", "BHI_AREA_1",
                        "BHI_AREA_2", "BHI_AREA_3", "BHI_AREA_4", "BHI_AREA_5",
                        "SHAPE_AREA"
                ]) as cursorc:  # NEED TO CHECK COL NAME OF AREA.
                    for rowc in cursorc:
                        rowc[0] = meanVal
                        rowc[1] = minVal
                        rowc[2] = maxVal
                        rowc[3] = stdVal
                        rowc[4] = p0
                        rowc[5] = p1
                        rowc[6] = p2
                        rowc[7] = p3
                        rowc[8] = p4
                        rowc[9] = p5
                        rowc[10] = round((rowc[16] / 100 * p0) / 1000000, 2)
                        rowc[11] = round((rowc[16] / 100 * p1) / 1000000, 2)
                        rowc[12] = round((rowc[16] / 100 * p2) / 1000000, 2)
                        rowc[13] = round((rowc[16] / 100 * p3) / 1000000, 2)
                        rowc[14] = round((rowc[16] / 100 * p4) / 1000000, 2)
                        rowc[15] = round((rowc[16] / 100 * p5) / 1000000, 2)

                        cursorc.updateRow(rowc)

            except Exception:

                arcpy.AddMessage(
                    "\n WARNING: A FEATURE {0} FALLS OUTSIDE OF THE PROVIDED BHI AREA! \n"
                    .format(row[0]))

                with arcpy.da.UpdateCursor(out_shp, [
                        "BHI_MEAN", "BHI_MIN", "BHI_MAX", "BHI_STD",
                        "BHI_PERC_0", "BHI_PERC_1", "BHI_PERC_2", "BHI_PERC_3",
                        "BHI_PERC_4", "BHI_PERC_5", "BHI_AREA_0", "BHI_AREA_1",
                        "BHI_AREA_2", "BHI_AREA_3", "BHI_AREA_4", "BHI_AREA_5",
                        "SHAPE_AREA"
                ]) as cursorc:  # NEED TO CHECK COL NAME OF AREA.
                    for rowc in cursorc:
                        rowc[0] = 0
                        rowc[1] = 0
                        rowc[2] = 0
                        rowc[3] = 0
                        rowc[4] = 0
                        rowc[5] = 0
                        rowc[6] = 0
                        rowc[7] = 0
                        rowc[8] = 0
                        rowc[9] = 0
                        rowc[10] = 0
                        rowc[11] = 0
                        rowc[12] = 0
                        rowc[13] = 0
                        rowc[14] = 0
                        rowc[15] = 0

                        cursorc.updateRow(rowc)

            arcpy.Delete_management(r"in_memory")

        if arcpy.Exists(zone_info):
            arcpy.Delete_management(zone_info)
        if arcpy.Exists(sz_fl):
            arcpy.Delete_management(zone_info)

        if arcpy.Exists(os.path.join(scratch, "histtab")):
            arcpy.Delete_management(os.path.join(scratch, "histtab"))

        if arcpy.Exists(os.path.join(scratch, "tempRas")):
            arcpy.Delete_management(os.path.join(scratch, "tempRas"))

        zones_list = []

        walk = arcpy.da.Walk(scratch, datatype="FeatureClass", type="Polygon")

        for dirpath, dirnames, filenames in walk:
            for filename in filenames:
                zones_list.append(os.path.join(dirpath, filename))

        if len(zones_list) > 1:
            arcpy.AddMessage("merging final features")
            arcpy.Merge_management(zones_list, zones_out)
        else:
            arcpy.AddMessage("copying final features")
            arcpy.CopyFeatures_management(zones_list[0], zones_out)

        for fc in zones_list:
            arcpy.Delete_management(fc)

        if arcpy.Exists(scratch):
            try:
                arcpy.Delete_management(scratch)
            except Exception:
                print(
                    "An issue occured when deleting the scratch folder - not a big deal"
                )

        arcpy.Delete_management(r"in_memory")

        arcpy.AddMessage("Tool completed")
Exemplo n.º 22
0
mxd = arcpy.mapping.MapDocument("CURRENT")
df = arcpy.mapping.ListDataFrames(mxd, "*")[0]
arcpy.CheckOutExtension("Spatial")
sr = arcpy.Describe(strDataFC).spatialReference

# Overwrite pre-existing files
arcpy.env.overwriteOutput = True

# Add coordinates X and Y to Structural Data Feature Layer
arcpy.AddXY_management(strDataFC)

outlines = [
]  # Array to stock the temporal line projections of structural data

# Add bearing azimuth to plan profile line
arcpy.AddGeometryAttributes_management(profileFC, 'LINE_BEARING', 'METERS',
                                       '#', sr)

# Define cursor to loop the Structural Data Feature Layer
cursor1 = arcpy.SearchCursor(strDataFC)

# Loop the cursor and append the temporal line projections in outlines array
for row1 in cursor1:
    Bz = (row1.getValue(dipField))
    Az = (row1.getValue(azRumField))
    Az2 = Az + 180
    xi = (row1.getValue("POINT_X"))
    yi = (row1.getValue("POINT_Y"))
    start = arcpy.PointGeometry(arcpy.Point(xi, yi), sr)
    end = start.pointFromAngleAndDistance(Az, 5000, "PLANAR")
    end2 = start.pointFromAngleAndDistance(Az2, 5000, "PLANAR")
    outlines.append(
Exemplo n.º 23
0
def SpatialJoinLines_LargestOverlap(target_features, join_features, outgdb,
                                    out_fc, bufsize, keep_all, fields_select):
    arcpy.env.extent = target_features
    arcpy.env.workspace = outgdb

    #Split target and join lines at intersections
    print('Selecting lines...')
    joinhull = arcpy.MinimumBoundingGeometry_management(join_features,
                                                        'joinhull',
                                                        'CONVEX_HULL',
                                                        group_option='ALL')
    targethull = arcpy.MinimumBoundingGeometry_management(target_features,
                                                          'targethull',
                                                          'CONVEX_HULL',
                                                          group_option='ALL')

    print('Splitting lines...')
    lyr = arcpy.MakeFeatureLayer_management(target_features)
    arcpy.SelectLayerByLocation_management(lyr,
                                           'WITHIN',
                                           joinhull,
                                           selection_type='NEW_SELECTION')
    arcpy.FeatureToLine_management(
        lyr, 'target_split')  #Feature to line splits lines at intersections

    lyr = arcpy.MakeFeatureLayer_management(join_features)
    arcpy.SelectLayerByLocation_management(lyr,
                                           'WITHIN',
                                           targethull,
                                           selection_type='NEW_SELECTION')
    arcpy.FeatureToLine_management(join_features, 'joinfeat_split')

    #Bufferize both datasets
    print('Buffering...')
    arcpy.Buffer_analysis('target_split',
                          'target_buf',
                          bufsize,
                          method='GEODESIC')
    arcpy.Buffer_analysis('joinfeat_split',
                          'joinfeat_buf',
                          bufsize,
                          method='GEODESIC')
    #Get buffer area for target feature
    arcpy.AddGeometryAttributes_management('target_buf',
                                           'AREA_GEODESIC',
                                           Area_Unit='SQUARE_METERS')

    #Spatial join with largest overlap
    # Calculate intersection between Target Feature and Join Features
    print('Intersecting...')
    arcpy.Intersect_analysis(['target_buf', 'joinfeat_buf'],
                             'lines_intersect',
                             join_attributes='ALL')
    arcpy.AlterField_management('lines_intersect', 'AREA_GEO',
                                'AREA_targetbuf', 'AREA_targetbuf')
    arcpy.AddGeometryAttributes_management('lines_intersect',
                                           'AREA_GEODESIC',
                                           Area_Unit='SQUARE_METERS')
    arcpy.AlterField_management('lines_intersect', 'AREA_GEO', 'AREA_inters',
                                'AREA_inters')

    #Dissolve to sum intersecting area over
    print('Computing statistics...')
    arcpy.Statistics_analysis(
        'lines_intersect',
        'lines_intersect_stats',
        statistics_fields=[['AREA_inters', 'SUM'], ['AREA_targetbuf',
                                                    'FIRST']],
        case_field=['FID_joinfeat_buf', 'FID_target_buf'])

    print('Joining by largest overlap...')
    #[f.name for f in arcpy.ListFields('lines_intersect_stats')]
    # Find which Join Feature has the largest overlap with each Target Feature
    fields = [
        'FID_target_buf', 'FID_joinfeat_buf', "SUM_AREA_inters",
        "FIRST_AREA_targetbuf"
    ]
    overlap_dict = {}
    with arcpy.da.SearchCursor('lines_intersect_stats', fields) as scur:
        for row in scur:
            try:
                if row[2] > overlap_dict[row[0]][1]:
                    overlap_dict[row[0]] = [row[1], row[2], row[3]]
            except:
                overlap_dict[row[0]] = [row[1], row[2], row[3]]

    # Copy the target features and write the largest overlap join feature ID to each record
    # Set up all fields from the target features + ORIG_FID
    fieldmappings = arcpy.FieldMappings()
    fieldmappings.addTable(target_features)
    fieldmap = arcpy.FieldMap()
    fieldmap.addInputField(target_features,
                           arcpy.Describe(target_features).OIDFieldName)
    fld = fieldmap.outputField
    fld.type, fld.name, fld.aliasName = "LONG", "ORIG_FID", "ORIG_FID"
    fieldmap.outputField = fld
    fieldmappings.addFieldMap(fieldmap)
    # Perform the copy
    print('Copying...')
    arcpy.conversion.FeatureClassToFeatureClass('target_split',
                                                os.path.dirname(out_fc),
                                                os.path.basename(out_fc), "",
                                                fieldmappings)
    # Add a new field JOIN_FID to contain the fid of the join feature with the largest overlap
    arcpy.AddField_management(out_fc, "JOIN_FID", "LONG")
    arcpy.AddField_management(out_fc, "AREA_inters", "DOUBLE")
    arcpy.AddField_management(out_fc, "AREA_targetbuf", "DOUBLE")
    arcpy.AddField_management(out_fc, "intersper", "DOUBLE")
    # Calculate the JOIN_FID field
    with arcpy.da.UpdateCursor(
            out_fc,
        ["ORIG_FID", "JOIN_FID", "AREA_inters", "AREA_targetbuf", "intersper"
         ]) as ucur:
        for row in ucur:
            try:
                row[1] = overlap_dict[row[0]][0]
                row[2] = overlap_dict[row[0]][1]
                row[3] = overlap_dict[row[0]][2]
                row[4] = overlap_dict[row[0]][1] / overlap_dict[row[0]][
                    2]  #intersper = AREA_inters/AREA_targetbuf
                ucur.updateRow(row)
            except:
                if not keep_all:
                    ucur.deleteRow()

    # Join all attributes from the join features to the output
    joinfields = [
        x.name for x in arcpy.ListFields('joinfeat_split')
        if not x.required and x.name in fields_select
    ]
    arcpy.JoinField_management(out_fc, "JOIN_FID", 'joinfeat_split',
                               arcpy.Describe('joinfeat_split').OIDFieldName,
                               joinfields)

    #Add length attribute to be able to remove outliers from crossings
    arcpy.AddGeometryAttributes_management(out_fc,
                                           'LENGTH_GEODESIC',
                                           Length_Unit='METERS')

    #Delete intermediate outputs
    for outlyr in [
            'joinfeat_split', 'target_slip', 'target_buf', 'joinfeat_buf',
            'lines_intersect', 'lines_intersect_stats'
    ]:
        print('Deleting {}'.format(outlyr))
        arcpy.Delete_management(outlyr)
Exemplo n.º 24
0
        groundel_n = outputroot + "GEL_" + str(n) + "_" + EL + ".shp"
        polytemp_n = temp + "Poly_" + str(n) + ".shp"

        # Select
        arcpy.SelectLayerByAttribute_management("lyr", "New_SELECTION",
                                                '"fid" = ' + str(n))
        # Creat a temporate polygon for the selected house
        arcpy.FeatureClassToFeatureClass_conversion("lyr", temp,
                                                    "Poly_" + str(n) + ".shp")
        # Process: Intersect
        arcpy.Intersect_analysis(polytemp_n + " #;" + elpoly + " #",
                                 groundel_n, "ALL", "", "INPUT")

        arcpy.AddGeometryAttributes_management(
            groundel_n, "AREA_GEODESIC", "FEET_US", "SQUARE_FEET_US",
            "GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]"
        )

        # Process: Delete
        arcpy.Delete_management(polytemp_n, "")
        StopTime1 = time.clock()
        elapsedTime = (StopTime1 - StartTime)
        print 'Time for calculating feature ' + str(
            n) + ' for period ' + EL + ' is: ' + str(round(elapsedTime,
                                                           1)) + ' seconds'

StopTime2 = time.clock()
elapsedTime1 = (StopTime2 - StartTime0) / 60
print 'Time for all the analysis is: ' + str(round(elapsedTime1,
                                                   1)) + ' minites'
Exemplo n.º 25
0
def StreamNetworkPoints(output_workspace, stream_network, flow_accum, dem):
    # Check out the extension license
    arcpy.CheckOutExtension("Spatial")

    # Set environment variables
    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = output_workspace

    # Get spatial reference of FAC
    spatial_ref = arcpy.Describe(flow_accum).spatialReference

    # List parameter values
    arcpy.AddMessage("Workspace: {}".format(arcpy.env.workspace))
    arcpy.AddMessage("Stream Network: "
                     "{}".format(arcpy.Describe(stream_network).baseName))
    arcpy.AddMessage("Flow Accumulation Model: "
                     "{}".format(arcpy.Describe(flow_accum).baseName))
    arcpy.AddMessage("Digital Elevation Model: "
                     "{}".format(arcpy.Describe(dem).baseName))

    ## Convert stream_network fc to a route
    # Add a field to hold the linear referencing route `from_measure`
    # Check if the field already exists and if not add it
    field_names = [f.name for f in arcpy.ListFields(stream_network)]
    if "from_measure" not in field_names:
        arcpy.AddField_management(in_table=stream_network,
                                  field_name="from_measure",
                                  field_type="DOUBLE")

    # Set the value of the cross section `from_measure` to zero in units meters
    arcpy.CalculateField_management(in_table=stream_network,
                                    field="from_measure",
                                    expression="!shape.length@kilometers!",
                                    expression_type="PYTHON_9.3")

    # Add a field to hold the linear referencing route `to_measure`
    if "to_measure" not in field_names:
        arcpy.AddField_management(in_table=stream_network,
                                  field_name="to_measure",
                                  field_type="DOUBLE")

    # Set the value of the cross section `to_measure` to the length of the
    # stream_network in units kilometers
    arcpy.CalculateField_management(in_table=stream_network,
                                    field="to_measure",
                                    expression="0",
                                    expression_type="PYTHON_9.3")

    # Convert stream_network fc into a route
    stream_network_route = os.path.join(output_workspace,
                                        "stream_network_route")
    arcpy.CreateRoutes_lr(in_line_features=stream_network,
                          route_id_field="ReachName",
                          out_feature_class=stream_network_route,
                          measure_source="TWO_FIELDS",
                          from_measure_field="from_measure",
                          to_measure_field="to_measure")
    arcpy.AddMessage("Stream network route created")
    # Convert stream network feature vertices to points
    stream_network_points = os.path.join(output_workspace,
                                         "stream_network_points")
    arcpy.FeatureVerticesToPoints_management(
        in_features=stream_network_route,
        out_feature_class=stream_network_points)
    arcpy.AddMessage("Converted the stream network to points")

    # Add x, y, z, and m values to the `cross_section_points` feature class
    arcpy.AddGeometryAttributes_management(
        Input_Features=stream_network_points,
        Geometry_Properties="POINT_X_Y_Z_M",
        Length_Unit="KILOMETERS")

    # Set the first m-value for each stream network to zero. The `create route`
    # tool sets it to NULL.
    arcpy.CalculateField_management(in_table=stream_network_points,
                                    field="POINT_M",
                                    expression="setNull2Zero(!POINT_M!)",
                                    code_block="""def setNull2Zero(m):
                                                        if m is None: 
                                                            return 0
                                                        else:
                                                            return m""",
                                    expression_type="PYTHON_9.3")

    # Delete un-needed fields
    arcpy.DeleteField_management(in_table=stream_network_points,
                                 drop_field=["ORIG_FID", "POINT_Z"])
    arcpy.AddMessage("Added stream network route lengths")

    # Add flow accumulation values to the stream_network_points fc
    arcpy.sa.ExtractMultiValuesToPoints(
        in_point_features=stream_network_points,
        in_rasters=[flow_accum],
        bilinear_interpolate_values="NONE")
    # Add a field to to the stream_network_points fc to hold watershed area
    # Check if the field already exists and if not add it
    field_names = [f.name for f in arcpy.ListFields(stream_network_points)]
    if "Watershed_Area_SqMile" not in field_names:
        arcpy.AddField_management(in_table=stream_network_points,
                                  field_name="Watershed_Area_SqMile",
                                  field_type="DOUBLE")
    # Convert flow accumulation cell counts to area in square miles
    cell_size = str(
        arcpy.GetRasterProperties_management(flow_accum, "CELLSIZEX"))
    # Expression to convert sq m to sq mi: 1 sq m = 0.0000003861 sq mile
    meters_sqmi = ("((float({0}) * float({0})) * 0.0000003861) * "
                   "!{1}!".format(cell_size,
                                  arcpy.Describe(flow_accum).baseName))

    if spatial_ref.linearUnitName == "Meter":
        arcpy.CalculateField_management(in_table=stream_network_points,
                                        field="Watershed_Area_SqMile",
                                        expression=meters_sqmi,
                                        expression_type="PYTHON_9.3")
    else:
        # Error
        arcpy.AddError("    Watershed linear unit not recognized."
                       " Area not calculated")
    # Delete un-needed fields
    arcpy.DeleteField_management(
        in_table=stream_network_points,
        drop_field=[arcpy.Describe(flow_accum).baseName])
    arcpy.AddMessage("Added stream network drainage area")

    # Add elevation values to the stream_network_points fc
    arcpy.sa.ExtractMultiValuesToPoints(
        in_point_features=stream_network_points,
        in_rasters=[dem],
        bilinear_interpolate_values="NONE")
    arcpy.AlterField_management(in_table=stream_network_points,
                                field=arcpy.Describe(dem).baseName,
                                new_field_name="Z",
                                new_field_alias="Z")
    arcpy.AddMessage("Added stream network elevations")

    # Return
    arcpy.SetParameter(4, stream_network_points)

    # Cleanup
    arcpy.Delete_management(in_data=stream_network_route)
def createSegments(contour_at_mean_high_water, contour_at_surge):
    # Start a timer  
    time1 = time.clock()
    arcpy.AddMessage("\nSegmentation of the coastline started at "+str(datetime.now()))

    # Specify a tolerance distance or minimum length of a seawall
    # Users are not yet given control of this
    th = 150

    # Create random points along the lines (mean high water and the surge of choice)
    # The numbers used are just my choice based on iterative observations
    random0 = arcpy.CreateRandomPoints_management(out_path= arcpy.env.workspace, \
                                                out_name= "random0", \
                                                constraining_feature_class= contour_at_mean_high_water, \
                                                number_of_points_or_field= long(1600), \
                                                  minimum_allowed_distance = "{0} Feet".format(th))

    random1 = arcpy.CreateRandomPoints_management(out_path= arcpy.env.workspace, \
                                                    out_name= "random1", \
                                                    constraining_feature_class= contour_at_surge, \
                                                    number_of_points_or_field= long(1600), \
                                                  minimum_allowed_distance = "{0} Feet".format(th))

    # Perform a proximity analysis with the NEAR tool 
    arcpy.Near_analysis(random0, random1)
    # Give each point a fixed unique ID
    # Create the ID field
    arcpy.AddField_management (random0, "UniqueID", "SHORT")
    arcpy.AddField_management (random1, "UniqueID", "SHORT")
    # Add Unique IDs 
    arcpy.CalculateField_management(random0, "UniqueID", "[FID]")
    arcpy.CalculateField_management(random1, "UniqueID", "[FID]")

    # Categorize/Separate each feature based on their near feature
    # Crate a table view of random0
    table0 = arcpy.MakeTableView_management(random0, "random0_table")
    #table1 = arcpy.MakeTableView_management(random1, "random1_table")
    # Sort the near feature for each points in random0 
    random0_sorted = arcpy.Sort_management(table0, "random0_sorte.dbf", [["NEAR_FID", "ASCENDING"]])


    # Create "long enough" lists for each of the field of interests: ID, NEAR_ID, and NEAR_DIST
    # (distance to closest point). I added [99999] here to extend the list length and avoid IndexError
    list_fid = [r.getValue("UniqueID") for r in arcpy.SearchCursor(random0_sorted, ["UniqueID"])] +[99999]
    list_nearid = [r.getValue("NEAR_FID") for r in arcpy.SearchCursor(random0_sorted, ["NEAR_FID"])]\
                  +[99999]
    list_neardist = [r.getValue("NEAR_DIST") for r in arcpy.SearchCursor(random0_sorted, ["NEAR_DIST"])]\
                    +[99999]

    del r

    # Only take points with near feature within the specified threshold. If it's too far, it's not better
    # than the others for a segment point
    list_fid_filtered = [i for i in list_neardist if i < th]
    # Then initiate a list o contain their Unique ID and Near ID
    first_unique_id = [] 
    first_near_id = []
    # Get NEAR_ID and Unique ID for each of these points
    for i in list_fid_filtered:
        first_unique_id.append(list_fid[list_neardist.index(i)])
        first_near_id.append(list_nearid[list_neardist.index(i)])

    # Only take the unique values in case there are duplicates. This shoudn't happen. Just to make sure.
    first_unique_id = [i for i in set(first_unique_id)]
    first_near_id = [i for i in set(first_near_id)]


    # Now create a new feature out of these points
    # Frist let's create a Feature Layer
    arcpy.MakeFeatureLayer_management("random0.shp", "random0_lyr")
    # Let's select all points and export them into a new feature
    random0_points = arcpy.SearchCursor(random0, ["UniqueID"])
    point0 = random0_points.next()

    for point0 in random0_points:
        for i in range(len(first_unique_id)):
            if point0.getValue("UniqueID") == first_unique_id[i]:
                selector0 = arcpy.SelectLayerByAttribute_management(\
                     "random0_lyr", "ADD_TO_SELECTION", '"UniqueID" = {0}'.format(first_unique_id[i]))

    del point0, random0_points
     
    new_random0 = arcpy.CopyFeatures_management(selector0, "new_random0")
    arcpy.Delete_management('random0_lyr')
    

    # Now for the new point feature, remove clusters of points around them and take only the ones
    # with minimum NEAR_DIST
    # First, get the geometry attributes of the new points
    arcpy.AddGeometryAttributes_management(new_random0, "POINT_X_Y_Z_M", "", "", "")

    # Create long enough list of the field of interest (same as the previous) 
    pointx = [r.getValue("POINT_X") for r in arcpy.SearchCursor(new_random0, ["POINT_X"])] +[99999]
    pointy = [r.getValue("POINT_Y") for r in arcpy.SearchCursor(new_random0, ["POINT_Y"])] +[99999]
    new_list_fid = [r.getValue("UniqueID") for r in arcpy.SearchCursor(new_random0, ["UniqueID"])]\
                   +[99999]
    new_list_nearid = [r.getValue("NEAR_FID") for r in arcpy.SearchCursor(new_random0, ["NEAR_FID"])]\
                      +[99999]
    new_list_neardist = [r.getValue("NEAR_DIST") for r in arcpy.SearchCursor(new_random0, ["NEAR_DIST"])]\
                        +[99999]

    del r


    # Initiate a list of every points that has already been compared to the near points
    garbage = []
    # Also initiate a list for the new Unique ID and NEAR ID
    new_unique_ID = []
    new_near_ID = []
    # Then, check if the points are right next to them. If so, add them to a temporary list
    # and find the one with closest near ID (or find minimum of their NEAR_DIST)
    for i in range(len(pointx)):
        if i+1 < len(pointx):
             
            # If not within the th range 
            if not calculateDistance(pointx[i], pointy[i], pointx[i+1], pointy[i+1]) < float(th)*1.5:
                # Skip if it's in garbage 
                if new_list_nearid[i] in garbage:
                    continue
                else:
                    new_unique_ID.append(new_list_fid[i])
                    new_near_ID.append(new_list_nearid[i])

            # If within the range        
            else:
                # Skip if it's in garbage 
                if new_list_nearid[i] in garbage:
                    continue
                else:
                    temp_ID = []
                    temp_NEAR = []
                    temp_DIST = []
                    while True:
                        temp_ID.append(new_list_fid[i])
                        temp_NEAR.append(new_list_nearid[i])
                        temp_DIST.append(new_list_neardist[i])
                        garbage.append(new_list_nearid[i])
                        i = i+1
                        # Stop when within the range again. And add the last point within the range
                        if not calculateDistance(pointx[i], pointy[i], pointx[i+1], pointy[i+1]) < 200:
                            temp_ID.append(new_list_fid[i])
                            temp_NEAR.append(new_list_nearid[i])
                            temp_DIST.append(new_list_neardist[i])
                            garbage.append(new_list_nearid[i])

                            # Calculate the minimum and get the Unique ID and Near ID  
                            minD = min(temp_DIST)
                            new_unique_ID.append(new_list_fid[new_list_neardist.index(minD)])
                            new_near_ID.append(new_list_nearid[new_list_neardist.index(minD)])

                            del temp_ID, temp_NEAR, temp_DIST
                            break


    # Now select these final points export them into new feature.
    # These are the end points for the segments to be created
    # First, make a layer out of all the random points
    arcpy.MakeFeatureLayer_management("random0.shp", "random0_lyr") 
    arcpy.MakeFeatureLayer_management("random1.shp", "random1_lyr") 

    # Then select and export the end points into feature0 and feature1
    # Based on new_unique_ID for random0
    random0_points = arcpy.SearchCursor(random0, ["UniqueID"])
    point0 = random0_points.next()
    for point0 in random0_points:
        for i in range(len(new_unique_ID)):
            if point0.getValue("UniqueID") == new_unique_ID[i]:
                selected0 = arcpy.SelectLayerByAttribute_management(\
                     "random0_lyr", "ADD_TO_SELECTION", '"UniqueID" = {0}'.format(new_unique_ID[i]))

    feature0 = arcpy.CopyFeatures_management(selected0, "feature0")

    # Based on new_near_ID for random1
    random1_points = arcpy.SearchCursor(random1, ["UniqueID"])
    point1 = random1_points.next()
    for point1 in random1_points:
        for k in range(len(new_near_ID)):
            if point1.getValue("UniqueID") == new_near_ID[k]:
                selected1 = arcpy.SelectLayerByAttribute_management(\
                     "random1_lyr", "ADD_TO_SELECTION", '"UniqueID" = {0}'.format(new_near_ID[k]))

    feature1 = arcpy.CopyFeatures_management(selected1, "feature1")

    del point0, point1, random0_points, random1_points 
    arcpy.Delete_management('random0_lyr')
    arcpy.Delete_management('random1_lyr')


    # Now for the actual create of the coastal segments
    # Which include creation of polygon and splitting the contours as the corresponding points
    # STEPS NECESSARY FOR POLYGON CREATION
    # Let's first add geometry attributes to these points
    arcpy.AddGeometryAttributes_management(feature0, "POINT_X_Y_Z_M", "", "", "")
    arcpy.AddGeometryAttributes_management(feature1, "POINT_X_Y_Z_M", "", "", "")

    # Let's create lines that connects points from feature0 to feature1 
    # Initiate a POLYLINE feature class for these lines
    arcpy.CreateFeatureclass_management (arcpy.env.workspace, "connector_lines.shp", "POLYLINE")

    # Then for each of the points in feature0, get the correspondingin feature1
    # And create a line for each of the two points
    with arcpy.da.SearchCursor(feature0, ["NEAR_FID", "POINT_X", "POINT_Y"]) as features0:
        for feat0 in features0:
                    
            with arcpy.da.SearchCursor(feature1, ["UniqueID", "POINT_X", "POINT_Y"]) as features1:
                x=0
                for feat1 in features1:
                    x = x+1
                    theseTwoPoints = []

                    if feat0[0] == feat1[0]:
                        # Get coordinates 
                        X0, Y0 = feat0[1], feat0[2]
                        X1, Y1 = feat1[1], feat1[2]
                        # Append coordinates
                        theseTwoPoints.append(arcpy.PointGeometry(arcpy.Point(X0, Y0)))
                        theseTwoPoints.append(arcpy.PointGeometry(arcpy.Point(X1, Y1)))
                        # Create line from the coordinates
                        subline = arcpy.PointsToLine_management(theseTwoPoints, "subline"+str(x)+".shp")
                        # Append all lines into one feature
                        lines = arcpy.Append_management(["subline"+str(x)+".shp"], "connector_lines.shp")
                        # Then delete subline as it's now unnecessary
                        arcpy.Delete_management(subline)

                        continue

    
    del feat0, feat1, features0, features1

    # Now that the connectors are created, let's split the segments 
    # Before splitting contours into segments, let's integrate the points and the segments
    # Just in case, there are misalignment
    arcpy.Integrate_management([contour_at_mean_high_water, feature0])
    arcpy.Integrate_management([contour_at_surge, feature1])
    segments0 = arcpy.SplitLineAtPoint_management(contour_at_mean_high_water, feature0, "segments0.shp", "10 Feet")
    segments1 = arcpy.SplitLineAtPoint_management(contour_at_surge, feature1, "segments1.shp", "10 Feet")
    # And let's give fixed unique ID for each segment
    arcpy.CalculateField_management(segments0, "Id", "[FID]")
    arcpy.CalculateField_management(segments1, "Id", "[FID]")

    # Now with the split segments and connector lines, let's make segment polygon of the segments
    almost_segment_polygons = arcpy.FeatureToPolygon_management([segments0, segments1, lines],\
                                                                "almost_segment_polygons.shp")
    # Adding unique ID to the segment polygons
    arcpy.CalculateField_management(almost_segment_polygons, "Id", "[FID]")
    
    # The Feature to Polygon process also created polygons that are surrounded by polygons
    # These are because these areas are surrounded by flooded areas at surge.
    # They are above the surge and technically safe. So, let's remove them.
    arcpy.MakeFeatureLayer_management(almost_segment_polygons, 'almost_segment_polygons_lyr')
    arcpy.MakeFeatureLayer_management(segments0, 'segments0_lyr')
    # Only the polygons within the mean_high_water segments are at risk
    arcpy.SelectLayerByLocation_management('almost_segment_polygons_lyr', 'INTERSECT', 'segments0_lyr')
    final_without_length = arcpy.CopyFeatures_management('almost_segment_polygons_lyr', 'final.shp')
    
    arcpy.Delete_management('segments0_lyr')
    arcpy.Delete_management('almost_segment_polygons_lyr')

    # For the new polygons, let's add the corresponding seawall length
    # Let's add Length field to both first
    arcpy.AddField_management(final_without_length, "Length", "SHORT")
    arcpy.AddField_management(segments0, "Length", "SHORT")
    # Calculation of the length
    with arcpy.da.UpdateCursor(segments0, ["SHAPE@LENGTH", "Length"]) as segments_0:  
         for segment_0 in segments_0:
              length = segment_0[0]
              segment_0[1] = length
              segments_0.updateRow(segment_0)
    del segment_0, segments_0

    # With spatial join, let's add these results to the segment polygons 
    final = spatialJoin(final_without_length, segments0, "Length", "Length", "max", "joined_segment.shp")

    # Delete the created but now unnecessary files 
    arcpy.Delete_management(random0)
    arcpy.Delete_management(random1)

    # Stop the timer 
    time2 = time.clock()

    arcpy.AddMessage("Seawall segments and regions successfully created. It took "\
                     +str(time2-time1)+" seconds")
    
    return final
Exemplo n.º 27
0
        #out_units="KILOMETERS"
        #arcpy.TabulateIntersection_analysis(in_zone_features,zone_fields,in_class_features,out_table,class_fields,sum_fields,xy_tolerance,out_units)
        #print "tabulating intersection " + "%s" %in_class_features

#Manage geodesic area fields in fishnet grids
for i in PP_list_fishnet:
    in_table = i
    drop_field = ["AREA_GEO", "FREQUENCY"]
    arcpy.DeleteField_management(in_table, drop_field)
    Geometry_Properties = "AREA_GEODESIC"
    Length_Unit = "#"
    Area_Unit = "SQUARE_KILOMETERS"
    Coordinate_System = "#"
    try:
        arcpy.AddGeometryAttributes_management(in_table, Geometry_Properties,
                                               Length_Unit, Area_Unit,
                                               Coordinate_System)
        print "Deleting old and adding new geodesic area field to the " + "%s" % in_table
    except:
        print arcpy.GetMessages(2)

    # If using this code within a script tool, AddError can be used to return messages
    #   back to a script tool.  If not, AddError will have no effect.
    arcpy.AddError(e.message)

## PermanentJoin.py: Join one field from a table to a feature class
# Set the local parameters
#tables = arcpy.ListTables()
#for table in tables:
#if
# Create a describe object of the input table
desc = arcpy.Describe(table)
# List all fields
table_field_list = [field.name for field in desc.fields]

# If the table is a feature class and geometry properties are included, add geometry attributes
if desc.datasetType == 'FeatureClass' and geom_properties:
    arcpy.SetProgressorLabel('Adding Geometry Attributes...')
    # If a coordinate system is not given, use the feature class coordinate system
    if not cs:
        cs = desc.spatialReference
    # Copy the table to keep the original as-is
    arcpy.CopyFeatures_management(table, 'copy_table')
    table = 'copy_table'
    # Add geometry attributes
    arcpy.AddGeometryAttributes_management(table, geom_properties, l_unit,
                                           a_unit, cs)
    # Get geometry fields
    geom_fields = [
        field.name for field in desc.fields
        if field.name not in table_field_list
    ]
    # Combine selected field names and geometry field names
    field_names = field_names + geom_fields

# If aliases are included: Create a list of all of the field aliases in the table
if alias_incl != 'NONE':
    # Create a list of all of the fields in the table
    aliases = [
        field.aliasName for field in desc.fields if field.name in field_names
    ]
Exemplo n.º 29
0
#in_features="C:/Users/oc3512/Documents/ArcGIS/Projects/MMSP/MMSP_utm.gdb/MMSP_Stations_utm"

properties="POINT_X_Y_Z_M" # properties: string (parameter), choose "POINT_X_Y_Z_M"
length_unit = ""
area_unit = ""

# CHoose coordinate of your interest
## To generate UTM: choose "UTM" coordinates
## TO generate lat, long: choose "GCS_WGS_1984"

#coordinate=arcpy.Describe(in_features).spatialReference.Name
coordinate1=arcpy.SpatialReference(32651)
coordinate2=arcpy.SpatialReference(4326)

# Generate UTM coordinates
output=arcpy.AddGeometryAttributes_management(in_features, properties, length_unit, area_unit, coordinate1)

# Rename the field
## As field names are overwriten every time different coordinates are added, we need to 
## change the field names:

fieldNames = [f.name for f in arcpy.ListFields(output)]

if "X" in fieldNames:
    print("")
else:
    output=arcpy.AlterField_management(output, 'POINT_X', 'X', '')
    output=arcpy.AlterField_management(output, 'POINT_Y', 'Y', '')

# Generate lat and long
output1=arcpy.AddGeometryAttributes_management(output, properties, length_unit, area_unit, coordinate2)
Exemplo n.º 30
0
def AddXYToShapefile(inFC, geomProperty, coordinateSystem):
    """
    http://pro.arcgis.com/en/pro-app/tool-reference/data-management/add-geometry-attributes.htm
    AddGeometryAttributes_management (Input_Features, Geometry_Properties, {Length_Unit}, {Area_Unit}, {Coordinate_System})
    """
    arcpy.AddGeometryAttributes_management(inFC, geomProperty, None, None, coordinateSystem)