Example #1
0
def clean_slivers_by_vertex(PARCEL_ALL, SLIVERS_CLEAN, border, Dis_search,
                            PARCEL_ALL_lyr):

    print_arcpy_message("START Func: clean slivers by vertex")
    '''
    [INFO] -  מוחק לפי ליניאריות ומרחק את הוורטקסים שנמצאים ליד החורים של התצ"ר
    INPUT-
    1) PARCEL_ALL     - שכבת רצף
    2) SLIVERS_CLEAN  - חורים של הרצף
    3) border         - גבול התצ"ר
    4) Dis_search     - מרחק חיפוש הוורטקסים
    5) PARCEL_ALL_lyr - שכבת הפלט
    '''

    gdb = os.path.dirname(border)

    tazar_border = 'in_memory\TazarBorderDiss'
    arcpy.Dissolve_management(border, tazar_border)

    conn = sqlite3.connect(':memory:')
    c = conn.cursor()
    c.execute(
        '''CREATE TABLE old_vertices(pnt_num real, x real, y real, xy text, part real, oid real)'''
    )
    c.execute(
        '''CREATE TABLE new_vertices(pnt_num real, x real, y real, xy text, part real, oid real)'''
    )

    c.execute(
        '''CREATE TABLE vertices(pnt_num real, x real, y real, xy text, part real, oid real, junction real, linearity real)'''
    )

    c.execute(
        '''CREATE TABLE sliver_vertices(pnt_num real, x real, y real, xy text, part real, oid real, junction real, linearity real)'''
    )

    c.execute(
        '''CREATE TABLE border_vertices(pnt_num real, x real, y real, xy text, part real, oid real, junction real, linearity real)'''
    )

    arcpy.Select_analysis(PARCEL_ALL, PARCEL_ALL_lyr)

    arcpy.CopyFeatures_management(PARCEL_ALL_lyr,
                                  gdb + "\\PARCEL_ALL_lyr_COPY_DEL")

    VerticesToTable2(PARCEL_ALL_lyr, "vertices", c)
    VerticesToTable2(SLIVERS_CLEAN, "sliver_vertices", c)
    VerticesToTable2(tazar_border, "border_vertices", c)

    parcel_common_vertices = [
        row for row in c.execute('''SELECT * FROM vertices
                                                                                                left join sliver_vertices
                                                                                                on vertices.xy = sliver_vertices.xy
                                                                                                where  sliver_vertices.xy is not null'''
                                 )
    ]

    border_common_vertices = [
        row for row in c.execute('''SELECT * FROM border_vertices
                                                                                                left join sliver_vertices
                                                                                            on border_vertices.xy = sliver_vertices.xy
                                                                                                where  sliver_vertices.xy is not null'''
                                 )
    ]


    distance_vertices = [[p[:8] + b[:8],math.sqrt(((p[1]-b[1])**2)+((p[2]-b[2])**2))] for p in parcel_common_vertices for b in border_common_vertices if math.sqrt(((p[1]-b[1])**2)+((p[2]-b[2])**2))\
         < Dis_search or (float("{0:.2f}".format(p[1])) == float("{0:.2f}".format(b[1])) and float("{0:.2f}".format(p[2])) == float("{0:.2f}".format(b[2])))]

    rows = arcpy.UpdateCursor(PARCEL_ALL_lyr)
    for row in rows:
        geometry = row.Shape
        oid = row.OBJECTID
        pts = []
        ring = []
        poly_vertices = [r for r in distance_vertices if r[0][5] == oid]
        for part in geometry:
            for pt in part:
                if str(type(pt)) != "<type 'NoneType'>":
                    num_point = 0
                    #print str(pt.X) + "--" + str(pt.Y)
                    this_x = float("{0:.2f}".format(pt.X))
                    this_y = float("{0:.2f}".format(pt.Y))
                    this_vertex = [
                        p for p in poly_vertices
                        if float("{0:.2f}".format(p[0][1])) == this_x
                        and float("{0:.2f}".format(p[0][2])) == this_y
                    ]
                    if this_vertex:
                        if this_vertex[0][0][8] == None:
                            if this_vertex[0][0][7] < 0.7 and this_vertex[0][
                                    0][6] == 1:
                                print("pseodo: delete vertex")
                            else:
                                #print "pseodo, but important: keep the vertex"
                                point = pt
                                ring.append([point.X, point.Y])
                        # tazar point in buffer
                        else:
                            # check minimum distance
                            the_minimum_vertex = [
                                v for v in this_vertex
                                if v[1] == min([i[1] for i in this_vertex])
                            ]
                            point = arcpy.Point(the_minimum_vertex[0][0][9],
                                                the_minimum_vertex[0][0][10])
                            ring.append([point.X, point.Y])
                    # point not on sliver: keep the vertex
                    else:
                        point = pt
                        ring.append([point.X, point.Y])
                    if num_point == 0:
                        first_point = point
                    num_point = num_point + 1
                else:
                    ring.append([first_point.X, first_point.Y])
                    ring.append(None)
                    num_point = 0

        # if pts[0] != pts[-1] and first_point:
        #         #print "ooops.... - polygon not closed"
        #         pts.append(first_point)

        pts.append(ring)
        polygon = PtsToPolygon1(pts)
        row.Shape = polygon
        rows.updateRow(row)

    arcpy.Delete_management(gdb + "\\PARCEL_ALL_lyr_COPY_DEL")
    return PARCEL_ALL_lyr
Example #2
0
def riparianInt():
    # Find landcover breakdown for riparian buffer - sqft per subwatershed
    util.log(
        "Starting riparianInt module ---------------------------------------------------------------"
    )

    streams = config.streams
    waterbodies = config.waterbodies
    subwatersheds = config.subwatersheds
    canopy_new = config.canopy_2014

    util.log("Step 1 of 12, Preparing streams")
    streams_sub = arcpy.MakeFeatureLayer_management(
        config.streams, "streams_sub", "LINE_TYPE = 'Open Channel'")
    streamBuffer = arcpy.Buffer_analysis(streams_sub,
                                         config.temp_gdb + r"\streams_buff",
                                         "300 Feet", "FULL", "ROUND", "NONE")

    util.log("Step 2 of 12, Preparing Hydro")
    waterbodyBuffer = arcpy.Buffer_analysis(waterbodies,
                                            config.temp_gdb + r"\hydro_buff",
                                            "300 Feet", "OUTSIDE_ONLY")

    util.log("Step 3 of 12, Erasing")
    streams_without_waterbody = arcpy.Erase_analysis(
        streamBuffer, waterbodyBuffer,
        config.temp_gdb + r"\streams_without_water")

    util.log("Step 4 of 12, Deleting unneeded fields")
    arcpy.DeleteField_management(streams_without_waterbody, [
        "LLID", "NAME", "LOC_NAME", "WB_TYPE", "MAJOR_WB", "SOURCE",
        "SOURCE_REF", "NHD_FCODE", "WS_ID", "HUC12", "HUC12_NAME", "MODIFIER",
        "MOD_NAME", "MOD_DATE", "CREATED_BY", "CREATEDATE", "FIELD_DATE",
        "REVIEW", "NOTES", "SUBAREA"
    ])
    arcpy.DeleteField_management(waterbodyBuffer, [
        'LLID', 'HYDRO_ID', 'SEG_NUM', 'NAME', 'LOC_NAME', 'LINE_TYPE',
        'PERIOD', 'SOURCE', 'SOURCE_REF', 'NHD_FCODE', 'WS_ID', 'HUC12',
        'HUC12_Name', 'MODIFIER', 'MOD_NAME', 'MOD_DATE', 'CREATED_BY',
        'CREATEDATE', 'FIELD_DATE', 'REVIEW', 'NOTES', 'SUBAREA', 'STATUS'
    ])

    util.log("Step 5 of 12, Merging")
    merged_water = arcpy.Merge_management(
        [waterbodyBuffer, streams_without_waterbody],
        config.temp_gdb + r"\merged_water")

    util.log("Step 6 of 12, Repairing Geometry")
    arcpy.RepairGeometry_management(merged_water)

    util.log("Step 7 of 12, Erasing")
    intersect = arcpy.Intersect_analysis([merged_water, subwatersheds],
                                         config.temp_gdb + r"\intersect")

    util.log("Step 8 of 12, Dissolving")
    final_water = arcpy.Dissolve_management(intersect,
                                            config.temp_gdb + r"\final_water",
                                            "WATERSHED")
    arcpy.CheckOutExtension("Spatial")

    util.log("Step 9 of 12, Zonal Statistics")
    zone_table = arcpy.gp.ZonalStatisticsAsTable_sa(
        final_water, "WATERSHED", canopy_new,
        config.temp_gdb + r"\canopy_stats", "DATA", "SUM")
    arcpy.CheckInExtension("Spatial")

    util.log("Step 10 of 12, Adding Field")
    arcpy.AddField_management(final_water, "Pcnt_Canopy", "DOUBLE")

    Landcov_final = arcpy.MakeFeatureLayer_management(final_water,
                                                      "Landcov_final")
    util.log("Step 11 of 12, Joining")
    arcpy.AddJoin_management(Landcov_final, "WATERSHED", zone_table,
                             "WATERSHED")

    util.log("Step 12 of 12, Calculating Percent Canopy")
    arcpy.CalculateField_management(
        Landcov_final, "Pcnt_Canopy",
        "([canopy_stats.AREA]/ [final_water.Shape_Area])*100", "VB")

    arcpy.RemoveJoin_management(Landcov_final)

    # Find count of stream/ street intersection per subwatershed

    # Subset and intersect the streams and roads - generate points from this
    util.log("Subsetting and intersecting streams/ roads")
    stream_subset = arcpy.MakeFeatureLayer_management(
        config.streams, "in_memory" + r"\stream_subset",
        "LINE_TYPE in ( 'Open Channel' , 'Stormwater Culvert' , 'Stormwater Pipe' , 'Water Body' )"
    )
    streets_erase = arcpy.Clip_analysis(config.streets, config.city_bound,
                                        "in_memory" + r"\streets_erase")
    crossing_sect = arcpy.Intersect_analysis([stream_subset, streets_erase],
                                             "in_memory" + r"\crossing_sect",
                                             "NO_FID", "", "POINT")

    # Add Count field and populate with value = 1
    util.log("Adding and Populating Count field")
    arcpy.AddField_management(crossing_sect, "Sect_Count", "SHORT")
    with arcpy.da.UpdateCursor(crossing_sect, "Sect_Count") as rows:
        for row in rows:
            row[0] = 1
            rows.updateRow(row)

    # Intersect crossings with subwatersheds, group by WATERSHED and get summed count of crossings
    groupby_list = ["WATERSHED"]
    sum_field = "Sect_Count SUM"
    crossing_sumBy = "in_memory" + r"\sect_sumBy"
    sumBy_select(crossing_sect, config.subwatersheds, groupby_list, sum_field,
                 crossing_sumBy)

    # Intersect streams with subwatersheds, group by WATERSHED and get summed area
    util.log(
        "Intersecting streams with subwtwatersheds and grouping length by subwatershed"
    )
    groupby_list = ["WATERSHED"]
    sum_field = "Shape_Length SUM"
    stream_sumBy = config.temp_gdb + r"\riparianInt_final"
    sumBy_intersect(streams_sub, config.subwatersheds, groupby_list, sum_field,
                    stream_sumBy)

    # Append information into one place
    util.log("Add crossing counts to stream length data")
    arcpy.JoinField_management(stream_sumBy, "WATERSHED", crossing_sumBy,
                               "WATERSHED", "SUM_Sect_Count")

    # Calculate # of crossings per kilometer of stream
    util.log("Calculating # of crossings per km of stream")
    rate_field2 = "Crossings_km"
    feet_perKm = 3280.1
    arcpy.AddField_management(stream_sumBy, rate_field2, "DOUBLE")
    cursor_fields = ["SUM_Sect_Count", "SUM_Shape_Length", rate_field2]
    with arcpy.da.UpdateCursor(stream_sumBy, cursor_fields) as rows:
        for row in rows:
            row[2] = row[0] / (row[1] / feet_perKm)
            rows.updateRow(row)

    # Combine info from % canopy and # of crossings per kilometer into one place
    util.log("Add % canopy data to crossings per stream km info")
    arcpy.JoinField_management(stream_sumBy, "WATERSHED", Landcov_final,
                               "WATERSHED", "Pcnt_Canopy")

    # WHI score
    util.log("Calc WHI score")
    rate_field1 = "Pcnt_Canopy"
    score_field = "riparianInt_score"
    arcpy.AddField_management(stream_sumBy, score_field, "DOUBLE")
    with arcpy.da.UpdateCursor(
            stream_sumBy, [rate_field1, rate_field2, score_field]) as rows:
        for row in rows:
            row[2] = calc.ripIntegrity_score(row[0], row[1])
            rows.updateRow(row)

    # convert output to table if needed
    util.tableTo_primaryOutput(stream_sumBy)

    util.log("Cleaning up")
    arcpy.Delete_management("in_memory")

    util.log(
        "Module complete ---------------------------------------------------------------"
    )
# ===================
# Create Range Raster
# ===================

# Create a list of the state polygons
statePolyList = []
for k in range(len(states)):
    statePolyList.append(sourceFolder + "/" + "gssurgo_g_" + states[k] +
                         ".gdb/SAPOLYGON")

# Merge state boundaries
arcpy.Merge_management(statePolyList, vectorDB + "/SoilsStates")

# Create regional outline
arcpy.Dissolve_management(vectorDB + "/SoilsStates", vectorDB + "/SoilsRange",
                          "#", "#", "SINGLE_PART", "DISSOLVE_LINES")

# Calculate the field that determines the raster value
arcpy.AddField_management("SoilsRange", "rasterVal", "SHORT")
arcpy.CalculateField_management("SoilsRange", "rasterVal", 0, "PYTHON_9.3")

# Create template for the final raster
arcpy.PolygonToRaster_conversion("SoilsRange", "rasterVal",
                                 rasterFolder + "/rangeRaster",
                                 "MAXIMUM_COMBINED_AREA", "NONE", 30)

# ========================
# Create the state rasters
# ========================
for i in range(len(states)):
Example #4
0
from arcpy import env

env.workspace = r'D:\LiDAR_Factor_Full_valley\diff_images_2014_2016\FinalOutput.gdb'

output = r'D:\LiDAR_Factor_Full_valley\diff_images_2014_2016\outMerged.gdb'

env.overwriteOutput = True
fcs = arcpy.ListFeatureClasses()

for fc in fcs:
    if fc != 'Merged_Final':
        print 'grabbing turf from ' + fc
        arcpy.MakeFeatureLayer_management(fc, 'fc_lyr')
        arcpy.SelectLayerByAttribute_management('fc_lyr', 'NEW_SELECTION', 'gridcode = 1')
        arcpy.CopyFeatures_management('fc_lyr', output + '\\' + fc)

env.workspace = output


fcs = arcpy.ListFeatureClasses()

FC_List = []
for fc in fcs:
    FC_List.append(fc)

print 'Merging layers'
arcpy.Merge_management(FC_List, output + '\\' + 'Merged_fcs')

print 'Dissolving layers'
arcpy.Dissolve_management(output + '\\' + 'Merged_fcs', output + '\\' + 'Merged_dis')
    if debug == True:
        arcpy.AddMessage(
            "intersect fishnet & AOI: " +
            str(time.strftime("%m/%d/%Y  %H:%M:%S", time.localtime())))
    arcpy.Intersect_analysis([inputAOI, prefishnet], fishnet)
    deleteme.append(fishnet)

    numTiles = int(arcpy.GetCount_management(fishnet).getOutput(0))
    arcpy.AddMessage("AOI has " + str(numTiles) + " 10km square tiles.")

    fishnetBoundary = os.path.join("in_memory", "fishnetBoundary")
    if debug == True:
        arcpy.AddMessage(
            "fishnet boundary: " +
            str(time.strftime("%m/%d/%Y  %H:%M:%S", time.localtime())))
    arcpy.Dissolve_management(fishnet, fishnetBoundary)
    deleteme.append(fishnetBoundary)

    # Clip slope service layers over fishnet
    env.extent = fishnetBoundary
    env.mask = fishnetBoundary
    #arcpy.MakeImageServerLayer_management(inputSlope,"SlopeLayer")
    arcpy.MakeRasterLayer_management(inputSlope, "SlopeLayer")

    if runVegetation == True:
        arcpy.AddMessage(
            "Clipping soils to fishnet and joining parameter table...")
        vegetation = os.path.join("in_memory", "vegetation")
        if debug == True:
            arcpy.AddMessage(
                str(
def main(fcInputSegments, fcInputFullNetwork, fcOutputNodes, tempWorkspace):

    arcpy.env.overwriteOutput = True

    # Turn off Z and M geometry
    arcpy.env.outputMFlag = "Disabled"
    arcpy.env.outputZFlag = "Disabled"

    # Prep temporary files and layers
    arcpy.MakeFeatureLayer_management(fcInputSegments, "lyrInputSegments")
    arcpy.MakeFeatureLayer_management(fcInputFullNetwork,
                                      "lyrInputFullNetwork")
    fcInputFullNetworkTemp = gis_tools.newGISDataset(tempWorkspace,
                                                     "fcInputFullNetworkTemp")
    arcpy.CopyFeatures_management("lyrInputFullNetwork",
                                  fcInputFullNetworkTemp)
    fcBraidDslv = gis_tools.newGISDataset(tempWorkspace, "fcBraidDslv")
    fcSegmentDslv = gis_tools.newGISDataset(tempWorkspace, "fcSegmentDslv")
    fcNodeBraidToBraid = gis_tools.newGISDataset(tempWorkspace,
                                                 "fcNodeBraidToBraid")
    fcNodeBraidToBraidSingle = gis_tools.newGISDataset(
        tempWorkspace, "fcNodeBraidToBraidSingle")
    fcNodeBraidToBraidDslv = gis_tools.newGISDataset(tempWorkspace,
                                                     "fcNodeBraidToBraidDslv")
    fcNodeBraidToMainstem = gis_tools.newGISDataset(tempWorkspace,
                                                    "fcNodeBraidToMainstem")
    fcNodeBraidToMainstemSingle = gis_tools.newGISDataset(
        tempWorkspace, "fcNodeBraidToMainstemSingle")
    fcNodeBraidToMainstemDslv = gis_tools.newGISDataset(
        tempWorkspace, "fcNodeBraidToMainstemDslv")
    fcNodeTribConfluence = gis_tools.newGISDataset(tempWorkspace,
                                                   "fcNodeTribConfluence")
    fcNodeTribConfluenceSingle = gis_tools.newGISDataset(
        tempWorkspace, "fcNodeTribConfuenceSingle")
    fcNodeTribConfluenceDslv = gis_tools.newGISDataset(
        tempWorkspace, "fcNodeTribConfluenceDslv")
    fcNodesAll = gis_tools.newGISDataset(tempWorkspace, "fcNodesAll")
    fcNodesToSegments = gis_tools.newGISDataset(tempWorkspace,
                                                "fcNodesToSegments")
    tblNodeBMSummary = gis_tools.newGISDataset(tempWorkspace,
                                               "tblNodeBMSummary")
    tblNodeTCSummary = gis_tools.newGISDataset(tempWorkspace,
                                               "tblNodeTCSummary")

    # Check if the segmented stream network has a field named LineOID
    if findField(fcInputSegments, "LineOID"):
        LineOID = "LineOID"
        pass
    else:
        arcpy.AddMessage(
            "LineOID attribute field not found in input stream feature class. Using ObjectID field..."
        )
        LineOID = arcpy.Describe(fcInputSegments).OIDFieldName
        #arcpy.AddError("LineOID attribute does not exist in segmented feature class input!")
        #sys.exit(0)

    # Check if the full stream network as been run through the Find Braided Network tool.
    if findField(fcInputFullNetworkTemp, "IsBraided"):
        pass
    else:
        FindBraidedNetwork.main(fcInputFullNetworkTemp)

    # Braid-to-braid nodes
    arcpy.AddMessage("GNAT CTT: Generating braid-to-braid nodes...")
    arcpy.MakeFeatureLayer_management(fcInputFullNetworkTemp,
                                      "lyrInputFullNetworkTemp")
    arcpy.SelectLayerByAttribute_management("lyrInputFullNetworkTemp",
                                            "NEW_SELECTION", '"IsBraided" = 1')
    arcpy.SelectLayerByLocation_management("lyrInputFullNetworkTemp",
                                           "HAVE_THEIR_CENTER_IN",
                                           "lyrInputSegments", "#",
                                           "REMOVE_FROM_SELECTION")
    arcpy.Dissolve_management("lyrInputFullNetworkTemp", fcBraidDslv, "#", "#",
                              "SINGLE_PART")
    arcpy.Intersect_analysis([fcBraidDslv], fcNodeBraidToBraid, "ONLY_FID",
                             "#", "POINT")
    arcpy.MakeFeatureLayer_management(fcNodeBraidToBraid,
                                      "lyrNodeBraidToBraid")
    arcpy.MultipartToSinglepart_management("lyrNodeBraidToBraid",
                                           fcNodeBraidToBraidSingle)
    arcpy.MakeFeatureLayer_management(fcNodeBraidToBraidSingle,
                                      "lyrNodeBraidToBraidSingle")
    arcpy.Dissolve_management("lyrNodeBraidToBraidSingle",
                              fcNodeBraidToBraidDslv, "#", "#", "SINGLE_PART")
    arcpy.MakeFeatureLayer_management(fcNodeBraidToBraidDslv,
                                      "lyrNodeBraidToBraidDslv")
    arcpy.AddField_management("lyrNodeBraidToBraidDslv", "NODE_TYPE", "TEXT")
    arcpy.CalculateField_management("lyrNodeBraidToBraidDslv", "NODE_TYPE",
                                    '"BB"', "PYTHON_9.3")
    # Braid-to-mainstem nodes
    arcpy.AddMessage("GNAT CTT: Generating braid-to-mainstem nodes...")
    arcpy.Intersect_analysis([fcBraidDslv, fcInputSegments],
                             fcNodeBraidToMainstem, "#", "#", "POINT")
    arcpy.MakeFeatureLayer_management(fcNodeBraidToMainstem,
                                      "lyrNodeBraidToMainstem")
    arcpy.MultipartToSinglepart_management("lyrNodeBraidToMainstem",
                                           fcNodeBraidToMainstemSingle)
    arcpy.MakeFeatureLayer_management(fcNodeBraidToMainstemSingle,
                                      "lyrNodeBraidToMainstemSingle")
    arcpy.Dissolve_management("lyrNodeBraidToMainstemSingle",
                              fcNodeBraidToMainstemDslv, "#", "#",
                              "SINGLE_PART")
    arcpy.MakeFeatureLayer_management(fcNodeBraidToMainstemDslv,
                                      "lyrNodeBraidToMainstemDslv")
    arcpy.AddField_management("lyrNodeBraidToMainstemDslv", "NODE_TYPE",
                              "TEXT")
    arcpy.CalculateField_management("lyrNodeBraidToMainstemDslv", "NODE_TYPE",
                                    '"BM"', "PYTHON_9.3")
    # Tributary confluence nodes
    arcpy.AddMessage("GNAT CTT: Generating tributary nodes...")
    arcpy.Dissolve_management("lyrInputSegments", fcSegmentDslv, "#", "#",
                              "SINGLE_PART")
    arcpy.Intersect_analysis([fcSegmentDslv], fcNodeTribConfluence, "ONLY_FID",
                             "#", "POINT")
    arcpy.MakeFeatureLayer_management(fcNodeTribConfluence,
                                      "lyrNodeTribConfluence")
    arcpy.MultipartToSinglepart_management("lyrNodeTribConfluence",
                                           fcNodeTribConfluenceSingle)
    arcpy.MakeFeatureLayer_management(fcNodeTribConfluenceSingle,
                                      "lyrNodeTribConfluenceSingle")
    arcpy.Dissolve_management("lyrNodeTribConfluenceSingle",
                              fcNodeTribConfluenceDslv, "#", "#",
                              "SINGLE_PART")
    arcpy.MakeFeatureLayer_management(fcNodeTribConfluenceDslv,
                                      "lyrNodeTribConfluenceDslv")
    arcpy.AddField_management("lyrNodeTribConfluenceDslv", "NODE_TYPE", "TEXT")
    arcpy.CalculateField_management("lyrNodeTribConfluenceDslv", "NODE_TYPE",
                                    '"TC"', "PYTHON_9.3")
    # Merge nodes feature classes together
    arcpy.AddMessage("GNAT CTT: Merge and save node feature class...")
    node_list = [
        "lyrNodeBraidToBraidDslv", "lyrNodeBraidToMainstemDslv",
        "lyrNodeTribConfluenceDslv"
    ]
    fieldMapping = nodeFieldMap(node_list)
    arcpy.Merge_management(node_list, fcNodesAll, fieldMapping)
    arcpy.MakeFeatureLayer_management(fcNodesAll, "lyrNodesAll")
    # Spatial join nodes to segmented stream network
    arcpy.SpatialJoin_analysis("lyrInputSegments", "lyrNodesAll",
                               fcNodesToSegments, "JOIN_ONE_TO_MANY",
                               "KEEP_COMMON", "#", "INTERSECT")
    # Save merged nodes to disk
    arcpy.MakeFeatureLayer_management(fcNodesAll, "lyrNodesAll")
    arcpy.CopyFeatures_management("lyrNodesAll", fcOutputNodes)
    # Summarize each node type by attribute field LineOID
    arcpy.AddMessage("GNAT CTT: Summarize nodes per stream segments...")
    arcpy.MakeFeatureLayer_management(fcNodesToSegments, "lyrNodesToSegments")
    arcpy.SelectLayerByAttribute_management("lyrNodesToSegments",
                                            "NEW_SELECTION",
                                            """"NODE_TYPE" = 'BM'""")
    arcpy.Statistics_analysis("lyrNodesToSegments", tblNodeBMSummary,
                              [["NODE_TYPE", "COUNT"]], LineOID)
    arcpy.SelectLayerByAttribute_management("lyrNodesToSegments",
                                            "CLEAR_SELECTION")
    arcpy.SelectLayerByAttribute_management("lyrNodesToSegments",
                                            "NEW_SELECTION",
                                            """"NODE_TYPE" = 'TC'""")
    arcpy.Statistics_analysis("lyrNodesToSegments", tblNodeTCSummary,
                              [["NODE_TYPE", "COUNT"]], LineOID)
    arcpy.SelectLayerByAttribute_management("lyrNodesToSegments",
                                            "CLEAR_SELECTION")
    # Spatial join each summary table as a new field to final segment network
    arcpy.AddField_management("lyrInputSegments", "NODES_BM", "TEXT")
    arcpy.AddField_management("lyrInputSegments", "NODES_TC", "TEXT")
    arcpy.MakeTableView_management(tblNodeBMSummary, "viewNodeBMSummary")
    arcpy.MakeTableView_management(tblNodeTCSummary, "viewNodeTCSummary")
    arcpy.AddJoin_management("lyrInputSegments", LineOID, "viewNodeBMSummary",
                             LineOID, "KEEP_COMMON")
    arcpy.CalculateField_management("lyrInputSegments", "NODES_BM",
                                    '"!COUNT_NODE_TYPE!"', "PYTHON_9.3")
    arcpy.RemoveJoin_management("lyrInputSegments")
    arcpy.AddJoin_management("lyrInputSegments", LineOID, "viewNodeTCSummary",
                             LineOID, "KEEP_COMMON")
    arcpy.CalculateField_management("lyrInputSegments", "NODES_TC",
                                    '"!COUNT_NODE_TYPE!"', "PYTHON_9.3")
    arcpy.RemoveJoin_management("lyrInputSegments")

    arcpy.AddMessage("GNAT CTT: Processing complete.")
Example #7
0
def createBoundaryFeatureClass(raster_footprint, target_raster_boundary, statistics_fields="", alter_field_infos=None):
    a = datetime.datetime.now()
    aa = a
    deleteFields(raster_footprint)

    lasd_boundary_0 = "{}0".format(target_raster_boundary)
    lasd_boundary_1 = "{}1".format(target_raster_boundary)

    deleteFileIfExists(lasd_boundary_0, True)
    deleteFileIfExists(lasd_boundary_1, True)

    arcpy.AddMessage("\tMultipart to Singlepart")
    arcpy.MultipartToSinglepart_management(in_features=raster_footprint, out_feature_class=lasd_boundary_0)
    Utility.addToolMessages()
    arcpy.RepairGeometry_management(in_features=lasd_boundary_0, delete_null="DELETE_NULL")
    deleteFields(lasd_boundary_0)

    arcpy.AddMessage("\tBuffering")
    arcpy.Buffer_analysis(in_features=lasd_boundary_0, out_feature_class=lasd_boundary_1, buffer_distance_or_field="10 Meters", line_side="FULL", line_end_type="ROUND", dissolve_option="NONE", method="PLANAR")
    Utility.addToolMessages()
    arcpy.RepairGeometry_management(in_features=lasd_boundary_1, delete_null="DELETE_NULL")
    deleteFields(lasd_boundary_1)

    deleteFileIfExists(lasd_boundary_0, True)

    lasd_boundary_2 = "{}2".format(target_raster_boundary)
    deleteFileIfExists(lasd_boundary_2, True)
    arcpy.AddMessage("\tDissolving with statistics: {}".format(statistics_fields))
    arcpy.Dissolve_management(
        in_features=lasd_boundary_1,
        out_feature_class=lasd_boundary_2,
        statistics_fields=statistics_fields
        )
    Utility.addToolMessages()
    arcpy.RepairGeometry_management(in_features=lasd_boundary_2, delete_null="DELETE_NULL")
    deleteFields(lasd_boundary_2)
    a = doTime(a, "\tDissolved to {}".format(lasd_boundary_2))


    if alter_field_infos is not None:
        for alter_field_info in alter_field_infos:
            try:
                alterField(lasd_boundary_2, alter_field_info[0], alter_field_info[1], alter_field_info[2])
            except:
                pass

        a = doTime(a, "\tRenamed summary fields")

    lasd_boundary_3 = "{}3".format(target_raster_boundary)
    deleteFileIfExists(lasd_boundary_3, True)
    arcpy.EliminatePolygonPart_management(in_features=lasd_boundary_2, out_feature_class=lasd_boundary_3, condition="AREA", part_area="10000 SquareMiles", part_area_percent="0", part_option="CONTAINED_ONLY")
    arcpy.RepairGeometry_management(in_features=lasd_boundary_3, delete_null="DELETE_NULL")
    deleteFileIfExists(lasd_boundary_1, True)
    deleteFields(lasd_boundary_3)
    lasd_boundary_4 = "{}4".format(target_raster_boundary)
    deleteFileIfExists(lasd_boundary_4, True)
    arcpy.SimplifyPolygon_cartography(in_features=lasd_boundary_3, out_feature_class=lasd_boundary_4, algorithm="BEND_SIMPLIFY", tolerance="20 Meters", minimum_area="0 Unknown", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP", in_barriers="")
    arcpy.RepairGeometry_management(in_features=lasd_boundary_4, delete_null="DELETE_NULL")
    deleteFields(lasd_boundary_4)
    #try:
    #    arcpy.DeleteField_management(in_table=lasd_boundary_4, drop_field="Id;ORIG_FID;InPoly_FID;SimPgnFlag;MaxSimpTol;MinSimpTol")
    #except:
    #    pass
    deleteFileIfExists(lasd_boundary_3, True)

    deleteFileIfExists(target_raster_boundary, True)
    arcpy.Buffer_analysis(in_features=lasd_boundary_4, out_feature_class=target_raster_boundary, buffer_distance_or_field="-10 Meters", line_side="FULL", line_end_type="ROUND", dissolve_option="ALL", method="PLANAR")
    arcpy.RepairGeometry_management(in_features=target_raster_boundary, delete_null="DELETE_NULL")
    deleteFields(target_raster_boundary)
    deleteFileIfExists(lasd_boundary_4, True)

    if alter_field_infos is not None and len(alter_field_infos) > 0:
        fields = ";".join([field[1] for field in alter_field_infos])
        arcpy.JoinField_management(in_data=target_raster_boundary, in_field="OBJECTID", join_table=lasd_boundary_2, join_field="OBJECTID", fields=fields)
        Utility.addToolMessages()

    deleteFileIfExists(lasd_boundary_2, True)

    a = doTime(aa, "Dissolved las footprints to dataset boundary {} ".format(target_raster_boundary))
Example #8
0
                # --- Handle multi-part geometries ---
                # Explode into single part features
                arcpy.MultipartToSinglepart_management("RevLine",
                                                       LineShapeSingle)

                # Run repair geometry incase any bad geometry created.
                arcpy.RepairGeometry_management(LineShapeSingle)

                # Create a point for each part
                arcpy.FeatureToPoint_management(LineShapeSingle, LineShape,
                                                "INSIDE")

                # Dissolve the points into a multi-part point using the LinkGUID
                # field
                arcpy.Dissolve_management(LineShape, LineShapeDissolve,
                                          "LINKGUID", "", "MULTI_PART",
                                          "DISSOLVE_LINES")

                TotalErrors = TotalErrors + count

                arcpy.Append_management(LineShapeDissolve, TempFC, "NO_TEST",
                                        "")

            else:
                arcpy.AddMessage("  .. No line errors exist in selected " \
                + "session.")

            # ----------------------------------
            # Add Polygon Errors to XY Shapefile
            # ----------------------------------
def cleanLineGeom(inLine,streamID,segID,lineClusterTolerance):
    lyrs = []
    inLineName = arcpy.Describe(inLine).name
    oidFieldName = arcpy.Describe(inLine).oidFieldName
    
    # Add new field to store field length values (to replace the "Shape_Length" or "Shape_Leng" fields)
    arcpy.AddField_management(inLine, "SegLen", "DOUBLE", "", "", "", "","NULLABLE", "NON_REQUIRED")
    arcpy.CalculateField_management(inLine, "SegLen", "!shape.length@meters!", "PYTHON_9.3")

    # Separate short and long lines into different layers, then select all longs that touch shorts
    shortLines = arcpy.MakeFeatureLayer_management(inLine,'shortLines',"SegLen" + ' <= '+ str(lineClusterTolerance))
    lyrs.append(shortLines)
    longLines = arcpy.MakeFeatureLayer_management(inLine,'longLines', "SegLen" + ' > '+str(lineClusterTolerance))
    lyrs.append(longLines)
    arcpy.SelectLayerByLocation_management(longLines,"BOUNDARY_TOUCHES",shortLines,'',"NEW_SELECTION")

    # Make a dictionary relating shortLine streamID/segID pairs to their origin- and endpoint coordinate pairs
    shortDict = {}
    rows = arcpy.SearchCursor(shortLines)
    for row in rows:
        shp = row.Shape
        shortDict[(row.getValue(streamID),row.getValue(segID))] = [(shp.firstPoint.X,shp.firstPoint.Y),(shp.lastPoint.X,shp.lastPoint.Y)]
    del rows

    # Make a dictionary relating longLine origin- and endpoint coordinate pairs to segIDs
    longDict = {}
    rows = arcpy.SearchCursor(longLines)
    for row in rows:
        shp = row.Shape
        firstCoords = (shp.firstPoint.X,shp.firstPoint.Y)
        lastCoords = (shp.lastPoint.X,shp.lastPoint.Y)
        longDict[firstCoords] = (row.getValue(streamID),row.getValue(segID))
        longDict[lastCoords] = (row.getValue(streamID),row.getValue(segID))
    del rows

    # Create new dictionary relating shortLine segIDs to longLine segIDs that share a point
    dissolveDict = {}
    # If a shortLine's coordinate pair matches an entry in longDict,
    # and the longLine's streamID matches, add their segIDs to dissolveDict
    for ids, coordPairs in shortDict.iteritems():
        for coords in [coordPairs[0],coordPairs[1]]:
            if coords in longDict.iterkeys():
                if longDict[coords][0] == ids[0]:
                    dissolveDict[ids[1]] = longDict[coords][1]

    # Give all longLines a 'dissolve' value equal to their segID
    arcpy.AddField_management(inLine,'dissolve','LONG')
    arcpy.SelectLayerByAttribute_management(longLines,"CLEAR_SELECTION")
    arcpy.CalculateField_management(longLines,'dissolve','[{0}]'.format(segID),'VB')

    # If shortLine in dissolveDict, give it a 'dissolve' value equal to the dissolveDict value
    # Else give it its own segID
    urows = arcpy.UpdateCursor(shortLines,'','',segID+';dissolve')
    for urow in urows:
        if dissolveDict.get(urow.getValue(segID)):
            urow.dissolve = dissolveDict[urow.getValue(segID)]
        else:
            urow.dissolve = urow.getValue(segID)
        urows.updateRow(urow)
    del urows

    arcpy.Dissolve_management(inLine, r'in_memory\seg_dslv', 'dissolve', '', 'MULTI_PART')
    cleaned = arcpy.JoinField_management(r'in_memory\seg_dslv', 'dissolve', inLine, segID, [segID, streamID])
    arcpy.DeleteField_management(cleaned, 'dissolve')

    return cleaned
Example #10
0
# -*- coding:gb2312 -*-
import arcpy
from arcpy import env
# env.workspace = r'E:\\mapmatching\\bh.gdb'
print "import over"
# env.workspace = r'C:\Users\Zero Yi\Documents\L\data_process\bh.gdb'
print "workspace over"

arcpy.Dissolve_management(r"C:\Users\Zero Yi\Documents\L\data_preprocess\bh_dissolve\BeiHuanDaDao.shp",  r"C:\Users\Zero Yi\Documents\L\data_preprocess\bh_dissolve\BeiHuanDaDao2.shp",
                          "AB_ROADNO", "", "SINGLE_PART","DISSOLVE_LINES")
print "test over"
#arcpy.Dissolve_management("bh_142", "E:\\mapmatching\\bh_dissolve\\bh142_dissolved",
                          #"AB_ROADNO", "", "SINGLE_PART","DISSOLVE_LINES")
Example #11
0
def databaseSetup(output_workspace,
                  output_gdb_name,
                  hu_dataset,
                  hu8_field,
                  hu12_field,
                  hucbuffer,
                  nhd_path,
                  elevation_projection_template,
                  alt_buff,
                  version=None):
    """Set up the local folders and copy hydrography data into input geodatabases.

	This tool creates folder corresponding to each local hydrologic unit, usually a HUC8, and fills those folders with the flowlines, inwalls, and outwalls that will be used later to hydro-enforce the digital elevation model for each hydrologic unit. This tool also creates a global geodatabase with a feature class for the whole domain.
	
	Parameters
	----------
	output_workspace : str
		Output directory where processing will occur.
	output_gdb_name : str
		Global file geodatabase to be created.
	hu_dataset : str
		Feature class that defines local folder geographic boundaries.
	hu8_field : str
		Field name in "hu_dataset" to dissolve boundaries to local folder extents.
	hu12_field : str
		Field name in "hu_dataset" from which inwalls are generated.
	hucbuffer : str
		Distance to buffer local folder bounds in map units.
	nhd_path : str
		Path to workspace containing NHD geodatabases.
	elevation_projection_template : str
		Path to DEM file to use as a projection template.
	alt_buff : str
		Alternative buffer to use on local folder boundaries.
	version : str
		Package version number.
	
	Returns
	-------
	None

	Notes
	-----
	As this tool moves through each local hydrologic unit it searches the *nhd_path* for a geodatabase with hydrography data with the same HUC-4 as the local hydrologic unit. If this cannot be found the tool will skip that local hydrologic unit. Non-NHD hydrography data can be used with this tool, but it must be named and organized exactly as the NHD hydrography.
	"""

    if version:
        arcpy.AddMessage('StreamStats Data Preparation Tools version: %s' %
                         (version))

    # set up geoprocessor, with spatial analyst license
    if arcpy.CheckExtension("Spatial") == "Available":
        arcpy.CheckOutExtension("Spatial")
    else:
        arcpy.addmessage('License Error')

    # Set script to overwrite if files exist
    arcpy.env.overwriteOutput = True

    localName = "local"
    subName = "subWatershed"
    GDB_name = "input_data.gdb"

    #set scratch and arcpy workspaces
    arcpy.env.workspace = output_workspace
    arcpy.env.scratchWorkspace = output_workspace

    #disable Z & M values
    arcpy.env.outputZFlag = "Disabled"
    arcpy.AddMessage('Z: ' + arcpy.env.outputZFlag)
    arcpy.env.outputMFlag = "Disabled"
    arcpy.AddMessage('M: ' + arcpy.env.outputMFlag)

    try:
        #name output fileGDB
        output_gdb = os.path.join(output_workspace, output_gdb_name + ".gdb")
        #output_gdb = output_workspace + "\\" + output_gdb_name + ".gdb"

        #create container geodatabase
        if arcpy.Exists(output_gdb):
            arcpy.Delete_management(output_gdb)

        arcpy.CreateFileGDB_management(output_workspace,
                                       output_gdb_name + ".gdb")

        #dissolve at 8 dig level and put in output workspace
        hu8_dissolve = arcpy.Dissolve_management(
            hu_dataset, os.path.join(output_gdb, "huc8index"), hu8_field)

        elev_spatial_ref = arcpy.Describe(
            elevation_projection_template
        ).spatialReference  # read the elevation spatial ref.
        orig_spatial_ref = arcpy.Describe(
            hu_dataset
        ).spatialReference  # read the local division spatial ref.

        # Setup loop to iterate thru each HUC in WBD dataset
        #fields = hu8_field
        with arcpy.da.SearchCursor(hu8_dissolve, hu8_field) as cursor:
            for row in cursor:
                #Get current huc 8
                current_hu8 = str(row[0])
                current_db = os.path.join(output_workspace, current_hu8,
                                          GDB_name)
                #current_db = output_workspace + "\\" + row[0] + "\\input_data.gdb"
                arcpy.AddMessage("")
                #arcpy.AddMessage("%s = \"%s\"" % (hu8_field, current_hu8))

                #check to make sure NHD exists and set variable names, if no NHD for HUC, skip it
                arcpy.AddMessage("Starting processing local folder %s...." %
                                 (current_hu8))
                arcpy.AddMessage("	Checking to see if NHD exists for %s" %
                                 (current_hu8[:4]))
                NHDExists = False
                if arcpy.Exists(
                        os.path.join(
                            nhd_path,
                            "NHD_H_" + current_hu8[:4] + "_HU4_GDB" + ".gdb")):
                    orig_4dig_NHD = os.path.join(
                        nhd_path,
                        "NHD_H_" + current_hu8[:4] + "_HU4_GDB" + ".gdb")
                    NHDExists = True
                else:
                    arcpy.AddMessage(
                        "     4 DIGIT NHD DOES NOT EXIST FOR THE CURRENT HUC")
                    arcpy.AddMessage(
                        "     Please download NHD for this HUC and/or ensure NHD geodatabase is named correctly"
                    )
                    NHDExists = False

                #If NHD exists for current HUC 8, then do the work
                if NHDExists:
                    #Create folder for HU inside output folder
                    hydrog_projection_template = os.path.join(
                        orig_4dig_NHD, "Hydrography", "NHDFlowline"
                    )  # get a file to generate hydrography clip.
                    hydrog_spatial_ref = arcpy.Describe(
                        hydrog_projection_template
                    ).spatialReference  # make spatial reference object for reproject later
                    arcpy.CreateFolder_management(output_workspace,
                                                  current_hu8)
                    arcpy.CreateFolder_management(
                        os.path.join(output_workspace, current_hu8), "Layers")
                    arcpy.CreateFolder_management(
                        os.path.join(output_workspace, current_hu8),
                        "tmp")  # make scratch workspace later for hydroDEM.

                    #Create file geodatabase to house data
                    arcpy.CreateFileGDB_management(
                        os.path.join(output_workspace, current_hu8), GDB_name)

                    #start output file creation
                    #----------------------------------
                    #WBD Processing
                    #----------------------------------
                    arcpy.AddMessage("  Doing WBD processing")

                    #create variables for huc buffers
                    hucbuffer_custom = os.path.join(
                        current_db, "local_buffer" + str(hucbuffer))
                    hucbuffer_custom_elev_dd83 = os.path.join(
                        current_db,
                        "local_buffer_elev" + str(hucbuffer) + "_dd83")
                    hucbuffer_custom_hydrog_dd83 = os.path.join(
                        current_db,
                        "local_buffer_hydrog" + str(hucbuffer) + "_dd83")
                    hucbuffer_alt = os.path.join(current_db,
                                                 "local_buffer%s" % (alt_buff))

                    #start process
                    arcpy.AddMessage(
                        "    Selecting current local hydrologic unit.")
                    arcpy.Select_analysis(
                        hu_dataset, os.path.join(current_db, subName),
                        "\"%s\" = \'%s\'" % (hu8_field, current_hu8))

                    arcpy.AddMessage("    Dissolving sub-watershed polygons")
                    arcpy.Dissolve_management(
                        os.path.join(current_db, subName),
                        os.path.join(current_db, localName), hu8_field)

                    arcpy.AddMessage(
                        "    Creating inner and outer wall polyline feature classes"
                    )
                    arcpy.PolygonToLine_management(
                        os.path.join(current_db, subName),
                        os.path.join(current_db, "huc12_line"))
                    arcpy.PolygonToLine_management(
                        os.path.join(current_db, localName),
                        os.path.join(current_db, "outer_wall"))
                    arcpy.Erase_analysis(
                        os.path.join(current_db, "huc12_line"),
                        os.path.join(current_db, "outer_wall"),
                        os.path.join(current_db, "inwall_edit"))

                    arcpy.AddMessage(
                        "    Creating user-defined buffered outwall dataset")
                    arcpy.Buffer_analysis(os.path.join(current_db, localName),
                                          hucbuffer_custom, hucbuffer, "FULL",
                                          "ROUND")
                    arcpy.AddMessage(
                        "    Creating %s meter buffered outwall dataset" %
                        (alt_buff))
                    arcpy.Buffer_analysis(os.path.join(current_db, localName),
                                          hucbuffer_alt,
                                          "%s METERS" % (alt_buff), "FULL",
                                          "ROUND")

                    arcpy.AddMessage(
                        "    Creating unprojected buffered outwall dataset for elevation and hydrography clips"
                    )
                    arcpy.Project_management(hucbuffer_custom,
                                             hucbuffer_custom_elev_dd83,
                                             elev_spatial_ref,
                                             in_coor_system=orig_spatial_ref)
                    arcpy.Project_management(hucbuffer_custom,
                                             hucbuffer_custom_hydrog_dd83,
                                             hydrog_spatial_ref,
                                             in_coor_system=orig_spatial_ref)

                    arcpy.AddMessage("    Creating sink point feature class")
                    arcpy.CreateFeatureclass_management(
                        os.path.join(output_workspace, current_hu8,
                                     "input_data.gdb"),
                        "sinkpoint_edit", "POINT", "", "", "",
                        os.path.join(current_db, localName))

                    #erase huc 12 line dataset after inwall is created
                    if arcpy.Exists(os.path.join(current_db, "huc12_line")):
                        arcpy.Delete_management(
                            os.path.join(current_db, "huc12_line"))

                    #----------------------------------
                    #NHD Processing
                    #----------------------------------
                    arcpy.AddMessage("  Doing NHD processing")

                    #Create NHD feature dataset within current HU database
                    arcpy.AddMessage(
                        "    Creating NHD feature dataset in local hydrologic unit workspace"
                    )
                    arcpy.CreateFeatureDataset_management(
                        current_db, "Hydrography", orig_spatial_ref)
                    arcpy.CreateFeatureDataset_management(
                        current_db, "Reference", orig_spatial_ref)

                    #process each feature type in NHD
                    featuretypelist = [
                        "NHDArea", "NHDFlowline", "NHDWaterbody"
                    ]
                    for featuretype in featuretypelist:

                        #clip unprojected feature
                        arcpy.AddMessage("      Clipping   " + featuretype)
                        arcpy.Clip_analysis(
                            os.path.join(orig_4dig_NHD, "Hydrography",
                                         featuretype),
                            hucbuffer_custom_hydrog_dd83,
                            os.path.join(current_db, featuretype + "_dd83"))

                        #project clipped feature
                        arcpy.AddMessage("      Projecting " + featuretype)
                        arcpy.Project_management(
                            os.path.join(current_db, featuretype + "_dd83"),
                            os.path.join(current_db, featuretype + "_project"),
                            orig_spatial_ref)
                        arcpy.CopyFeatures_management(
                            os.path.join(current_db, featuretype + "_project"),
                            os.path.join(current_db, "Hydrography",
                                         featuretype))

                        #delete unprojected and temporary projected NHD feature classes
                        arcpy.Delete_management(
                            os.path.join(current_db, featuretype + "_dd83"))
                        arcpy.Delete_management(
                            os.path.join(current_db, featuretype + "_project"))

                    #create editable dendrite feature class from NHDFlowline
                    arcpy.AddMessage(
                        "    Creating copy of NHDFlowline to preserve as original"
                    )
                    arcpy.CopyFeatures_management(
                        os.path.join(current_db, "Hydrography", "NHDFlowline"),
                        os.path.join(current_db, "Hydrography",
                                     "NHDFlowline_orig"))

                    arcpy.AddMessage("    Adding fields to NHDFlowline")
                    arcpy.AddField_management(
                        os.path.join(current_db, "Hydrography", "NHDFlowline"),
                        "comments", "text", "250")
                    arcpy.AddField_management(
                        os.path.join(current_db, "Hydrography", "NHDFlowline"),
                        "to_steward", "text", "50")
                    arcpy.AddMessage("    Finished local %s" % current_hu8)

                #if no NHD, skip the HUC
                else:
                    arcpy.AddMessage(
                        "     Processing skipped for this HUC--NO NHD")

            #del cursor, row

    # handle errors and report using gp.addmessage function
    except:
        #If we have messages of severity error (2), we assume a GP tool raised it,
        #  so we'll output that.  Otherwise, we assume we raised the error and the
        #  information is in errMsg.
        #
        if arcpy.GetMessages(2):
            arcpy.AddError(arcpy.GetMessages(2))
            arcpy.AddError(arcpy.GetMessages(2))
        else:
            arcpy.AddError(str(errMsg))
Example #12
0
    else:
        arcpy.TableToTable_conversion(PARCEL_TO_SUBZONE_FILE,
                                      arcpy.env.workspace, parcel_subzone)
        print("Created {}\{}".format(arcpy.env.workspace, parcel_subzone))

    # join with parcel to subzone
    parcel_w_subzone = "parcel_with_subzone"
    if arcpy.Exists(parcel_w_subzone):
        print("Found {} -- skipping creation".format(parcel_w_subzone))
    else:
        joined_layer = arcpy.AddJoin_management(args.parcel_layer, "PARCEL_ID",
                                                parcel_subzone, "PARCEL_ID",
                                                "KEEP_ALL")
        print("Created joined layer {}".format(joined_layer))

        # make it real
        arcpy.CopyFeatures_management(joined_layer, parcel_w_subzone)
        print("Created {}\{}".format(arcpy.env.workspace, parcel_w_subzone))

    # dissolve to taz and subzone
    parcel_subzone_dissolved = "parcel_subzone_dissolved"
    if arcpy.Exists(parcel_subzone_dissolved):
        print("Found {} -- skipping dissolve".format(parcel_subzone_dissolved))
    else:
        arcpy.Dissolve_management(parcel_w_subzone, parcel_subzone_dissolved, [
            "{}_taz_key".format(parcel_subzone),
            "{}_subzone".format(parcel_subzone)
        ])
        print("Created {}\{}".format(arcpy.env.workspace,
                                     parcel_subzone_dissolved))
Example #13
0
def main(fcLineNetwork,
         fieldStreamRouteID,
         fieldConfinement,
         fieldConstriction,
         strSeedDistance,
         inputliststrWindowSize,
         outputWorkspace,
         tempWorkspace=arcpy.env.scratchWorkspace):
    """Perform a Moving Window Analysis on a Line Network."""

    liststrWindowSize = inputliststrWindowSize.split(";")

    fcLineNetworkDissolved = gis_tools.newGISDataset(
        tempWorkspace, "GNAT_MWA_LineNetworkDissolved")
    arcpy.Dissolve_management(fcLineNetwork,
                              fcLineNetworkDissolved,
                              fieldStreamRouteID,
                              multi_part=False,
                              unsplit_lines=True)

    listLineGeometries = arcpy.CopyFeatures_management(fcLineNetworkDissolved,
                                                       arcpy.Geometry())

    listWindows = []
    listSeeds = []
    listWindowEvents = []
    listgWindows = []
    intSeedID = 0

    iRoutes = int(
        arcpy.GetCount_management(fcLineNetworkDissolved).getOutput(0))
    arcpy.SetProgressor("step", "Processing Each Route", 0, iRoutes, 1)
    iRoute = 0
    with arcpy.da.SearchCursor(
            fcLineNetworkDissolved,
        ["SHAPE@", fieldStreamRouteID, "SHAPE@LENGTH"]) as scLines:
        for fLine in scLines:  #Loop Through Routes
            arcpy.SetProgressorLabel("Route: " + str(iRoute) +
                                     " Seed Point: " + str(intSeedID))
            arcpy.SetProgressorPosition(iRoute)
            gLine = fLine[0]
            dblSeedPointPosition = float(
                max(liststrWindowSize
                    )) / 2  #Start Seeds at position of largest window
            while dblSeedPointPosition + float(
                    max(liststrWindowSize)) / 2 < fLine[2]:
                arcpy.SetProgressorLabel("Route: " + str(iRoute) +
                                         " Seed Point: " + str(intSeedID))
                gSeedPointPosition = gLine.positionAlongLine(
                    dblSeedPointPosition)
                listSeeds.append([
                    scLines[1], intSeedID, gSeedPointPosition
                ])  #gSeedPointPosition.X,gSeedPointPosition.Y])
                for strWindowSize in liststrWindowSize:
                    dblWindowSize = float(strWindowSize)
                    dblLengthStart = dblSeedPointPosition - dblWindowSize / 2
                    dblLengthEnd = dblSeedPointPosition + dblWindowSize / 2

                    gPointStartLocation = gLine.positionAlongLine(
                        dblLengthStart)
                    gPointEndLocation = gLine.positionAlongLine(dblLengthEnd)
                    gTemp = arcpy.Geometry()
                    listgWindowTemp = arcpy.SplitLineAtPoint_management(
                        gLine, [gPointStartLocation, gPointEndLocation], gTemp,
                        "1 METER")
                    #TODO: Need a better method to select the line here!!
                    for gWindowTemp in listgWindowTemp:
                        if abs(gWindowTemp.length - dblWindowSize) < 10:
                            listgWindows.append([
                                scLines[1], intSeedID, dblWindowSize,
                                gWindowTemp
                            ])
                    # End TODO
                    listWindows.append([
                        scLines[1], intSeedID, dblWindowSize,
                        gPointStartLocation
                    ])
                    listWindows.append([
                        scLines[1], intSeedID, dblWindowSize, gPointEndLocation
                    ])
                    listWindowEvents.append([
                        scLines[1], intSeedID, dblWindowSize, dblLengthStart,
                        dblLengthEnd
                    ])
                dblSeedPointPosition = dblSeedPointPosition + float(
                    strSeedDistance)
                intSeedID = intSeedID + 1
            iRoute = iRoute + 1

    fcSeedPoints = gis_tools.newGISDataset(tempWorkspace,
                                           "GNAT_MWA_SeedPoints")
    fcWindowEndPoints = gis_tools.newGISDataset(tempWorkspace,
                                                "GNAT_MWA_WindowEndPoints")
    fcWindowLines = gis_tools.newGISDataset(tempWorkspace,
                                            "GNAT_MWA_WindowLines")

    arcpy.CreateFeatureclass_management(tempWorkspace,
                                        "GNAT_MWA_SeedPoints",
                                        "POINT",
                                        spatial_reference=fcLineNetwork)
    arcpy.CreateFeatureclass_management(tempWorkspace,
                                        "GNAT_MWA_WindowEndPoints",
                                        "POINT",
                                        spatial_reference=fcLineNetwork)
    arcpy.CreateFeatureclass_management(tempWorkspace,
                                        "GNAT_MWA_WindowLines",
                                        "POLYLINE",
                                        spatial_reference=fcLineNetwork)

    gis_tools.resetField(fcSeedPoints, "RouteID", "LONG")
    gis_tools.resetField(fcSeedPoints, "SeedID", "LONG")

    gis_tools.resetField(fcWindowEndPoints, "RouteID", "LONG")
    gis_tools.resetField(fcWindowEndPoints, "SeedID", "LONG")
    gis_tools.resetField(fcWindowEndPoints, "Seg", "DOUBLE")

    gis_tools.resetField(fcWindowLines, "RouteID", "LONG")
    gis_tools.resetField(fcWindowLines, "SeedID", "LONG")
    gis_tools.resetField(fcWindowLines, "Seg", "DOUBLE")

    with arcpy.da.InsertCursor(
            fcSeedPoints, ["RouteID", "SeedID", "SHAPE@XY"]) as icSeedPoints:
        for row in listSeeds:
            icSeedPoints.insertRow(row)

    with arcpy.da.InsertCursor(
            fcWindowEndPoints,
        ["RouteID", "SeedID", "Seg", "SHAPE@XY"]) as icWindowEndPoints:
        for row in listWindows:
            icWindowEndPoints.insertRow(row)

    with arcpy.da.InsertCursor(
            fcWindowLines,
        ["RouteID", "SeedID", "Seg", "SHAPE@"]) as icWindowLines:
        for row in listgWindows:
            icWindowLines.insertRow(row)

    fcIntersected = gis_tools.newGISDataset(
        tempWorkspace, "GNAT_MWA_IntersectWindowAttributes")
    arcpy.Intersect_analysis([fcWindowLines, fcLineNetwork],
                             fcIntersected,
                             "ALL",
                             output_type="LINE")

    # Confinement
    tblSummaryStatisticsConfinement = gis_tools.newGISDataset(
        tempWorkspace, "GNAT_MWA_SummaryStatsTableConfinement")
    arcpy.Statistics_analysis(
        fcIntersected, tblSummaryStatisticsConfinement, "Shape_Length SUM",
        fieldStreamRouteID + ";SeedID;Seg;" + fieldConfinement)

    tblSummaryStatisticsPivot = gis_tools.newGISDataset(
        tempWorkspace, "GNAT_MWA_SummaryStatisticsPivotTable")
    arcpy.PivotTable_management(tblSummaryStatisticsConfinement,
                                "Route;SeedID;Seg", fieldConfinement,
                                "SUM_Shape_Length", tblSummaryStatisticsPivot)

    fieldConfinementValue = gis_tools.resetField(tblSummaryStatisticsPivot,
                                                 "CONF_Value", "DOUBLE")

    if len(arcpy.ListFields(tblSummaryStatisticsPivot,
                            fieldConfinement + "1")) == 0:
        arcpy.AddField_management(tblSummaryStatisticsPivot,
                                  fieldConfinement + "1", "DOUBLE")
    if len(arcpy.ListFields(tblSummaryStatisticsPivot,
                            fieldConfinement + "0")) == 0:
        arcpy.AddField_management(tblSummaryStatisticsPivot,
                                  fieldConfinement + "0", "DOUBLE")

    arcpy.CalculateField_management(
        tblSummaryStatisticsPivot, fieldConfinementValue,
        "!" + fieldConfinement + "1!/(!" + fieldConfinement + "0! + !" +
        fieldConfinement + "1!)", "PYTHON")

    #Pivot Confinement on Segment Size
    tblSummaryStatisticsWindowPivot = gis_tools.newGISDataset(
        tempWorkspace, "GNAT_MWA_SummaryStatisticsWindowPivotTable")
    arcpy.PivotTable_management(tblSummaryStatisticsPivot,
                                fieldStreamRouteID + ";SeedID", "Seg",
                                fieldConfinementValue,
                                tblSummaryStatisticsWindowPivot)

    # Constriction

    tblSummaryStatisticsConstriction = gis_tools.newGISDataset(
        tempWorkspace, "GNAT_MWA_SummaryStatsTableConstriction")
    arcpy.Statistics_analysis(
        fcIntersected, tblSummaryStatisticsConstriction, "Shape_Length SUM",
        fieldStreamRouteID + ";SeedID;Seg;" + fieldConstriction)

    tblSummaryStatisticsPivotConstriction = gis_tools.newGISDataset(
        tempWorkspace, "GNAT_MWA_SummaryStatisticsPivotTableConsriction")
    arcpy.PivotTable_management(tblSummaryStatisticsConstriction,
                                "Route;SeedID;Seg", fieldConstriction,
                                "SUM_Shape_Length",
                                tblSummaryStatisticsPivotConstriction)

    fieldConstrictionValue = gis_tools.resetField(
        tblSummaryStatisticsPivotConstriction, "CNST_Value", "DOUBLE")
    if len(
            arcpy.ListFields(tblSummaryStatisticsConstriction,
                             fieldConstriction + "1")) == 0:
        arcpy.AddField_management(tblSummaryStatisticsConstriction,
                                  fieldConstriction + "1", "DOUBLE")
    if len(
            arcpy.ListFields(tblSummaryStatisticsConstriction,
                             fieldConstriction + "0")) == 0:
        arcpy.AddField_management(tblSummaryStatisticsConstriction,
                                  fieldConstriction + "0", "DOUBLE")

    arcpy.CalculateField_management(
        tblSummaryStatisticsPivotConstriction, fieldConstrictionValue,
        "!" + fieldConstriction + "1!/(!" + fieldConstriction + "0! + !" +
        fieldConstriction + "1!)", "PYTHON")
    tblSummaryStatisticsWindowPivotConstriction = gis_tools.newGISDataset(
        tempWorkspace,
        "GNAT_MWA_SummaryStatisticsWindowPivotTableConstriction")
    arcpy.PivotTable_management(tblSummaryStatisticsPivotConstriction,
                                fieldStreamRouteID + ";SeedID", "Seg",
                                fieldConstrictionValue,
                                tblSummaryStatisticsWindowPivotConstriction)

    strWindowSizeFields = ""
    for WindowSize in liststrWindowSize:
        strWindowSizeFields = strWindowSizeFields + ";Seg" + WindowSize
    strWindowSizeFields = strWindowSizeFields.lstrip(";")

    #Join Above table to seed points
    arcpy.JoinField_management(fcSeedPoints, "SeedID",
                               tblSummaryStatisticsWindowPivot, "SeedID",
                               strWindowSizeFields)
    arcpy.JoinField_management(fcSeedPoints, "SeedID",
                               tblSummaryStatisticsWindowPivotConstriction,
                               "SeedID", strWindowSizeFields)

    # Manage Outputs
    fcOutputSeedPoints = gis_tools.newGISDataset(outputWorkspace,
                                                 "MovingWindowSeedPoints")
    arcpy.CopyFeatures_management(fcSeedPoints, fcOutputSeedPoints)

    fcOutputWindows = gis_tools.newGISDataset(outputWorkspace,
                                              "MovingWindowSegments")
    arcpy.CopyFeatures_management(fcWindowLines, fcOutputWindows)

    return
Example #14
0
        in_features_list, intersect_out_list, dissolved_out_list, fields_list):
    print "Intersect %s" % in_features
    intersect_in_features = ["DHS_Regions.shp", in_features]
    if arcpy.Exists(intersect_out):
        print "\tDelete %s" % intersect_out
        arcpy.Delete_management(intersect_out)
    print "\tNow intersecting %s" % intersect_in_features
    arcpy.Intersect_analysis(intersect_in_features, intersect_out)

    print "Dissolve %s" % intersect_out
    if arcpy.Exists(dissolved_out):
        print "\tDelete %s" % dissolved_out
        arcpy.Delete_management(dissolved_out)
    print "\tNow dissolving %s" % intersect_out
    # The NAME field is the name of the conservation zone
    arcpy.Dissolve_management(intersect_out, dissolved_out,
                              ["FID_DHS_Re", "NAME"], "", "MULTI_PART")
    if arcpy.Exists(intersect_out):
        # Now delete the unneeded intersect_out intermediary file
        print "\tDelete %s" % intersect_out
        arcpy.Delete_management(intersect_out)
    arcpy.AddField_management(dissolved_out, fields[0], "TEXT", "", "", "100")

    # Add InName field
    arcpy.CalculateField_management(dissolved_out, fields[0], "[NAME]")

    # Add Inkm field with area in square kilometers
    arcpy.AddField_management(dissolved_out, fields[1], "DOUBLE")
    expression = "float(!shape.area@squarekilometers!)"
    arcpy.CalculateField_management(dissolved_out, fields[1], expression,
                                    "PYTHON")
Example #15
0
arcpy.env.overwriteOutput = True

#Bnd Bounding Polygons
#arcpy.MakeRouteEventLayer_lr("SDE.CMLRS","LRS_KEY","SDE.Bnd_Lane","LRSKEY LINE BEGMILEPOST ENDMILEPOST","Bnd_LINES","#","ERROR_FIELD","NO_ANGLE_FIELD","NORMAL","ANGLE","LEFT","POINT")
arcpy.MinimumBoundingGeometry_management(BndLyr, BndEnv, "ENVELOPE", "LIST",
                                         BndIndx, "MBG_FIELDS")
#City Limit Bounding Polygons

#Build PLSS SEctions for T&R
arcpy.env.workspace = ws
if arcpy.Exists(PLSS_1):
    print("Using " + PLSS_1 + " to merge township and range")
else:
    print("MAking the Township and Range dissolve from PLSS")
    arcpy.FeatureClassToFeatureClass_conversion(PLSS_IN, ws, PLSS_1, "#", "#")
    arcpy.Dissolve_management(PLSS_1, ws + "//" + PLSS_2, "TOWNSHIP;RANGE",
                              "#", "MULTI_PART", "DISSOLVE_LINES")
    arcpy.AddField_management(PLSS_2, "TWP_NO", "LONG", "#", "#", "#", "#",
                              "NULLABLE", "NON_REQUIRED", "#")
    arcpy.AddField_management(PLSS_2, "RNG_NO", "LONG", "#", "#", "#", "#",
                              "NULLABLE", "NON_REQUIRED", "#")
    arcpy.AddField_management(PLSS_2, "TWP_Dir", "LONG", "#", "#", "#", "#",
                              "NULLABLE", "NON_REQUIRED", "#")
    arcpy.AddField_management(PLSS_2, "RNG_Dir", "LONG", "#", "#", "#", "#",
                              "NULLABLE", "NON_REQUIRED", "#")
    arcpy.CalculateField_management(PLSS_2, "TWP_NO", "Left([TOWNSHIP],2)",
                                    "VB", "#")
    arcpy.CalculateField_management(PLSS_2, "RNG_NO", "Left([RANGE],2)", "VB",
                                    "#")
    arcpy.SelectLayerByAttribute_management(PLSS_2, "NEW_SELECTION",
                                            "[TOWNSHIP] LIKE '*S'")
    arcpy.CalculateField_management(PLSS_2, "TWP_Dir", "2", "VB", "#")
Example #16
0
def function(params):

    try:
        # Get inputs
        pText = common.paramsAsText(params)
        outputRaster = pText[1]
        studyAreaMask = pText[2]
        fdr = pText[3]

        common.runSystemChecks()

        # Snap rasters to flow direction raster grid
        arcpy.env.snapRaster = fdr

        # Set temporary filenames
        prefix = "terrflow_"
        baseTempName = os.path.join(arcpy.env.scratchGDB, prefix)

        fdrClip = baseTempName + "fdrClip"
        studyAreaDissolved = baseTempName + "studyAreaDissolved"
        studyAreaRaster = baseTempName + "studyAreaRaster"
        studyAreaBinary = baseTempName + "studyAreaBinary"
        studyAreaBoundary = baseTempName + "studyAreaBoundary"
        studyAreaBoundaryRaster = baseTempName + "studyAreaBoundaryRaster"
        studyAreaBoundaryBinary = baseTempName + "studyAreaBoundaryBinary"

        ######################################
        ### Flow direction raster to numpy ###
        ######################################

        # Clip flow direction raster to study area
        arcpy.sa.ExtractByMask(fdr, studyAreaMask).save(fdrClip)

        # Convert flow direction raster to numpy array
        fdrArray = arcpy.RasterToNumPyArray(fdrClip)
        fdrArray.astype(int)
        rows, cols = fdrArray.shape # Returns the rows, columns

        log.info('Flow direction raster converted to numpy array')

        ###########################
        ### Study area to numpy ###
        ###########################

        # Dissolve the study area mask
        arcpy.Dissolve_management(studyAreaMask, studyAreaDissolved)

        log.info('Study area mask dissolved')

        # Convert the dissolved study area to a raster
        cellsize = float(arcpy.GetRasterProperties_management(fdr, "CELLSIZEX").getOutput(0))
        arcpy.FeatureToRaster_conversion(studyAreaDissolved, "Shape_Length", studyAreaRaster, cell_size=cellsize)

        log.info('Study area mask converted to raster')

        # Convert study area to raster with value of 1
        tempRas = arcpy.sa.Con(studyAreaRaster, 1)
        tempRas.save(studyAreaBinary)

        # Convert raster to numpy array
        studyAreaArray = arcpy.RasterToNumPyArray(studyAreaBinary)
        studyAreaArray.astype(int)

        log.info('Study area raster converted to numpy array')

        ##############################################
        ### Create study area boundary numpy array ###
        ##############################################

        # Pad the study area array with zeros around the edges (creating an array one cell larger in each direction)
        x_offset = 1
        y_offset = 1
        zeroPaddedStudyAreaArray = np.zeros(shape=(rows + 2, cols + 2), dtype=int)
        zeroPaddedStudyAreaArray[x_offset:studyAreaArray.shape[0]+x_offset,y_offset:studyAreaArray.shape[1]+y_offset] = studyAreaArray

        # Find the cells on the boundary of the study area
        k = np.ones((3,3),dtype=int) # for 4-connected
        zeroPaddedstudyAreaBoundaryArray = binary_dilation(zeroPaddedStudyAreaArray==0, k) & zeroPaddedStudyAreaArray

        # Remove the zero padding
        studyAreaBoundaryArray = zeroPaddedstudyAreaBoundaryArray[1:-1, 1:-1]

        #################################
        ### Set up output numpy array ###
        #################################

        outArray = np.zeros(shape=(rows, cols), dtype=int)

        # Loop through the rows
        for rowNum in xrange(rows):

            # Loop through the row's columns
            for colNum in xrange(cols):

                # Get the value from the study area boundary cell
                boundaryValue = studyAreaBoundaryArray.item(rowNum, colNum)

                if boundaryValue != 0:

                    # Get the value from the flow direction cell
                    fdrValue = fdrArray.item(rowNum, colNum)

                    '''
                    arcpy.AddMessage('=============')
                    arcpy.AddMessage('rowNum: ' + str(rowNum))
                    arcpy.AddMessage('colNum: ' + str(colNum))
                    arcpy.AddMessage('fdrValue: ' + str(fdrValue))
                    '''

                    # Direction east
                    if fdrValue == 1:
                        endX = 1
                        endY = 0

                    # Direction south east
                    elif fdrValue == 2:
                        endX = 1
                        endY = 1

                    # Direction south
                    elif fdrValue == 4:
                        endX = 0
                        endY = 1

                    # Direction south west
                    elif fdrValue == 8:
                        endX = -1
                        endY = 1

                    # Direction west
                    elif fdrValue == 16:
                        endX = -1
                        endY = 0

                    # Direction north west
                    elif fdrValue == 32:
                        endX = -1
                        endY = -1

                    # Direction north
                    elif fdrValue == 64:
                        endX = 0
                        endY = -1

                    # Direction north east
                    elif fdrValue == 128:
                        endX = 1
                        endY = -1

                    # Start X and Y are on the other side of the central cell
                    startX = endX * -1
                    startY = endY * -1

                    # Work out start and end rows and columns
                    startRow = rowNum + startY
                    startCol = colNum + startX
                    endRow = rowNum + endY
                    endCol = colNum + endX

                    '''
                    arcpy.AddMessage('startRow: ' + str(startRow))
                    arcpy.AddMessage('startCol: ' + str(startCol))
                    arcpy.AddMessage('endRow: ' + str(endRow))
                    arcpy.AddMessage('endCol: ' + str(endCol))
                    '''

                    # Set start value
                    if (startRow < 0 or startRow >= rows
                     or startCol < 0 or startCol >= cols):
                        startValue = 0
                    else:
                        startValue = studyAreaArray.item(startRow, startCol)

                    # Set end value
                    if (endRow < 0 or endRow >= rows
                     or endCol < 0 or endCol >= cols):
                        endValue = 0
                    else:
                        endValue = studyAreaArray.item(endRow, endCol)

                    '''
                    arcpy.AddMessage('startValue: ' + str(startValue))
                    arcpy.AddMessage('endValue: ' + str(endValue))
                    '''

                    # Water flows out of study area
                    if startValue == 1 and endValue == 0:
                        outValue = 1

                    # Water flows into study area
                    if startValue == 0 and endValue == 1:
                        outValue = 2

                    # Water flows along study area boundary (ridgeline)
                    if ((startValue == 0 and endValue == 0)
                     or (startValue == 1 and endValue == 1)):
                        outValue = 3

                    # Set the output array value
                    outArray.itemset((rowNum, colNum), outValue)

        # Convert numpy array back to a raster
        dsc = arcpy.Describe(fdrClip)
        sr = dsc.SpatialReference
        ext = dsc.Extent
        lowerLeftCorner = arcpy.Point(ext.XMin, ext.YMin)

        outRasterTemp = arcpy.NumPyArrayToRaster(outArray, lowerLeftCorner, dsc.meanCellWidth, dsc.meanCellHeight)
        arcpy.DefineProjection_management(outRasterTemp, sr)

        # Set zero values in raster to NODATA
        outRasterTemp2 = arcpy.sa.SetNull(outRasterTemp, outRasterTemp, "VALUE = 0")

        # Save raster
        outRasterTemp2.save(outputRaster)

        log.info('Terrestrial flow raster created')

        # Save flow direction raster in degrees (for display purposes)
        degreeValues = arcpy.sa.RemapValue([[1, 90], [2, 135], [4, 180], [8, 225], [16, 270], [32, 315], [64, 0], [128, 45]])
        fdrDegrees = os.path.join(os.path.dirname(outputRaster), "fdr_degrees")
        arcpy.sa.Reclassify(fdr, "Value", degreeValues, "NODATA").save(fdrDegrees)
        arcpy.SetParameter(4, fdrDegrees)

        # Set output success parameter - the parameter number is zero based (unlike the input parameters)
        arcpy.SetParameter(0, True)

    except Exception:
        arcpy.SetParameter(0, False)
        arcpy.AddMessage('Terrestrial flow direction operations did not complete successfully')
        raise
inMaskData = citrus
arcpy.CheckOutExtension("Spatial")
outExtractByMask = ExtractByMask(merged_raster, inMaskData)
out_raster = path + r"\Citrus only" + "\\" + merged_raster_name
outExtractByMask.save(out_raster)

#add citrus only raster to map
citrus_only = arcpy.MakeRasterLayer_management(out_raster,
                                               merged_raster_name + "_O")
citrus_only1 = citrus_only.getOutput(0)
arcpy.mapping.AddLayer(df, citrus_only1, "AUTO_ARRANGE")

# Create 40 random polygons on citrus polygon layer

# dissolve IR_3
arcpy.Dissolve_management(citrus, path + r"\citrus_dissolved")

# create random points
outName = "random_points"
conFC = path + r"\citrus_dissolved.shp"
numPoints = 40

arcpy.CreateRandomPoints_management(path, outName, conFC, "", numPoints,
                                    "100 Meters", "", "")

# draw a circle around points using buffer tool
arcpy.Buffer_analysis(path + r"\random_points.shp", path + r"\random_circles",
                      "20 Meters")

# draw polygons around the circles
arcpy.FeatureEnvelopeToPolygon_management(path + r"\random_circles.shp",
Example #18
0
def get_points(data):
    dss = arcpy.Dissolve_management(data, 'in_memory\\dissolve')
    coords = [[i[0].lastPoint.X, i[0].lastPoint.Y]
              for i in arcpy.da.SearchCursor(dss, ["SHAPE@"], None,
                                             arcpy.SpatialReference(32718))][0]
    return coords
Example #19
0
                arcpy.AddMessage("There were no records found in " +
                                 forestGDBDict.get(forest) + " for " + tes)

            if count > 0:
                arcpy.AddMessage("Copying selected records to " + forest +
                                 "  Geodatabase ......")
                arcpy.CopyFeatures_management("lyr", final_wo_space)
                forestFCList.append(final_wo_space)

        mergeFeatureClass = tes_folder + forest + "\\" + "FireRetardantEIS_merge"
        arcpy.AddMessage("Merging Feature Classes")
        arcpy.AddMessage(
            "If there are no files to merge this will error until a workaround is produced!"
        )
        arcpy.Merge_management(forestFCList, mergeFeatureClass)

        arcpy.AddMessage("Dissolving Features")

        dissolveFeatureClass = tes_folder + forest + "\\" + "FireRetardantEIS_Dissolve"

        if sys.version_info[0] < 3:
            arcpy.Dissolve_management(mergeFeatureClass, dissolveFeatureClass,
                                      "UnitID")
        else:
            arcpy.PairwiseDissolve_analysis(mergeFeatureClass,
                                            dissolveFeatureClass, "UnitID")

except arcpy.ExecuteError:
    arcpy.AddError(arcpy.GetMessages(2))
except Exception as e:
    arcpy.AddMessage(e)
maxAreaLU95Mesh = Myworkspace + "/maxAreaLU95Mesh"
maxAreaLU99Mesh = Myworkspace + "/maxAreaLU99Mesh"
maxAreaLU04Mesh = Myworkspace + "/maxAreaLU04Mesh"
maxAreaLU09Mesh = Myworkspace + "/maxAreaLU09Mesh"


#			Process 1995 Land Use

InterceptFeatures = ModelMesh + " #;" + Lu95_merge + " #"
arcpy.Intersect_analysis(in_features=InterceptFeatures,out_feature_class=LU95ModelMesh,join_attributes="ALL",cluster_tolerance="#",output_type="INPUT")

# Add Geometry Time : (Elapsed Time: 26 minutes 44 seconds)
arcpy.AddGeometryAttributes_management(Input_Features=LU95ModelMesh,Geometry_Properties="AREA",Length_Unit="FEET_US",Area_Unit="SQUARE_FEET_US",Coordinate_System=sr)
# Dissolve Time: (Elapsed Time: 28 minutes 32 seconds)
arcpy.Dissolve_management(in_features=LU95ModelMesh,out_feature_class=maxAreaLU95Mesh,dissolve_field="SEQNUM;LU_CODE",statistics_fields="POLY_AREA SUM",multi_part="MULTI_PART",unsplit_lines="DISSOLVE_LINES")

LU_Mesh = maxAreaLU95Mesh
LU_Mesh_lyr = arcpy.MakeFeatureLayer_management(LU_Mesh, "LU_Mesh_lyr")
LU_Mesh_nparr = arcpy.da.FeatureClassToNumPyArray(LU_Mesh_lyr, ['OBJECTID','SEQNUM','LU_CODE','SUM_POLY_AREA'])
LU_Mesh_df = DataFrame(LU_Mesh_nparr, columns=['SEQNUM','LU_CODE','SUM_POLY_AREA'])
LU_Mesh_df.columns=['SEQNUM','LU95_CODE','LU95_SQFT']
maxAreabySeq = LU_Mesh_df.sort(['LU95_SQFT'],ascending=False).groupby(['SEQNUM'], as_index=False).nth(0)
LUcsv = workdir + "predominanteLu95.csv"
maxAreabySeq.to_csv(LUcsv,index=False)
pdomLU = Myworkspace + "/PredomLU_95"
arcpy.CopyRows_management(LUcsv, pdomLU)

arcpy.MakeFeatureLayer_management (ModelMesh, "memMM")
# Join Field Time: Elapsed Time: 6 hours 4 minutes 31 seconds
arcpy.JoinField_management ("memMM", "SEQNUM", pdomLU, "SEQNUM")
Example #21
0
Calculate_Area_Rashum   (AOI_final)
NewGushim               (parcel_modad_c, parcel_bankal,AOI_final)
Get_Point_AOI           (AOI_final,point_bankal_c,point_modad_c,AOI_Point)
Create_Line_AOI         (AOI_final,tazar_border,Curves,arc_bankal_c,AOI_Line)

#  #  #  #  #  #  # insert To Razaf #  #  #  #  #  #  #  # #

print_arcpy_message ("  #   #   #    # insert To Razaf  #   #   #   #  ")

# # # # # Polygons 

Update_Polygons           (parcel_bankal,AOI_final)

# # # # Lines

arcpy.Dissolve_management (AOI_final,diss_aoi)
Layer_Management          (arc_bankal).Select_By_Location ('COMPLETELY_WITHIN',diss_aoi)
arcpy.Append_management   (AOI_Line,arc_bankal,'NO_TEST')

Multi_to_single           (arc_bankal)

arcpy.Append_management  (arc_modad_c,arc_bankal,'NO_TEST')

del_Non_Boundery_Line    (arc_bankal,AOI_final,tazar_border)

Find_stubbern_lines      (arc_bankal,AOI_final,tazar_border)
Delete_Duplic_Line       (arc_bankal)

# # # # Points

bankal_pnts = Layer_Management (point_bankal).Select_By_Location('INTERSECT',AOI_final)
def UpdateStreets(arcpy):


    try:
        # Create the Geoprocessor object
        #gp = arcgisscripting.create()

        # Load required toolboxes...
        #gp.AddToolbox("E:/Program Files/ArcGIS/ArcToolbox/Toolboxes/Data Management Tools.tbx")
        #gp.AddToolbox("C:/Documents and Settings/djrenz/Application Data/ESRI/ArcToolbox/My Toolboxes/_David_general.tbx")
        arcpy.AddToolbox("C:/Program Files (x86)/ArcGIS/Desktop10.0/ArcToolbox/Toolboxes/Data Management Tools.tbx")

        
        # Local variables...
        Streets_Layer = "dispatchstreets_Copy_Layer"
        Input_Streets = "\\\\172.16.217.1\\ecsostreets\\shp\\Streets.shp"
        Workspace = "Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.DBTools"
        Med_Streets_Unique_Temp = "Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.Cartegraph\\SDE.DBO.xxx_Streets_Unique_temp"
        xxx_Streets_Unique_Delete_Results = "Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.Cartegraph\\SDE.DBO.xxx_Streets_Unique"
        Append_Results = "Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.Cartegraph\\SDE.DBO.xxx_Streets_Unique"
        Existing_xxx_Streets_Unique_ = "Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.Cartegraph\\SDE.DBO.xxx_Streets_Unique"
        Append_Prod_2_xxx_Streets_Success = "Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.xxxStreets"
        xxx_Streets_Delete_Results = "Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.xxxStreets"
        xxx_Streets = "Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.xxxStreets"
        Medstreets_Layer = "dispatchstreets_Copy_Layer"
        Delete_Streets_Result_ = "Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.ReferenceData_County\\SDE.DBO.Streets"
        Streets_Append_Success = "Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.ReferenceData_County\\SDE.DBO.Streets"
        Delete_xxx_streets_unique_temp = "Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.Cartegraph\\SDE.DBO.xxx_Streets_Unique_temp"
        Prod_1_xxx_Streets_Unique_temp = "Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.Cartegraph\\SDE.DBO.xxx_Streets_Unique_temp"
        Copied_Input_Streets = "F:\\Processes\\UpdateStreets\\Temp\\dispatchstreets_Copy.shp"
        Prod_1_Streets = "Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.ReferenceData_County\\SDE.DBO.Streets"
        Delete_Success = "F:\\Processes\\UpdateStreets\\Temp\\dispatchstreets_Copy.shp"

        #try:
        #    gp.Delete_management(Copied_Input_Streets, "ShapeFile")
        #except:
        #    print "Nothing to delete here : " + Copied_Input_Streets

        time.sleep(10)

        print "Beginning"
        
        try:
            arcpy.Delete_management(Copied_Input_Streets, "")
        except:
            print "Nothing to delete here 2"

        # Process: Copy...
        try:
            arcpy.Copy_management(Input_Streets, Copied_Input_Streets, "ShapeFile")
        except:
            arcpy.Delete_management(Copied_Input_Streets, "")
            arcpy.Copy_management(Input_Streets, Copied_Input_Streets, "ShapeFile")
            
        time.sleep(5)

        # Process: Make Feature Layer...
        arcpy.MakeFeatureLayer_management(Copied_Input_Streets, Streets_Layer, "", Workspace, "IDNUM IDNUM VISIBLE NONE;PREFIX PREFIX VISIBLE NONE;STREETNAME STREETNAME VISIBLE NONE;STREETTYPE STREETTYPE VISIBLE NONE;SUFFIX SUFFIX VISIBLE NONE;CFCC CFCC VISIBLE NONE;CITY_L CITY_L VISIBLE NONE;CITY_R CITY_R VISIBLE NONE;FROMLEFT FROMLEFT VISIBLE NONE;TOLEFT TOLEFT VISIBLE NONE;FROMRIGHT FROMRIGHT VISIBLE NONE;TORIGHT TORIGHT VISIBLE NONE;ZIP_L ZIP_L VISIBLE NONE;ZIP_R ZIP_R VISIBLE NONE;POLICE_L POLICE_L VISIBLE NONE;POLICE_R POLICE_R VISIBLE NONE;FIRE_L FIRE_L VISIBLE NONE;FIRE_R FIRE_R VISIBLE NONE;EMS_L EMS_L VISIBLE NONE;EMS_R EMS_R VISIBLE NONE;P_BEAT_L P_BEAT_L VISIBLE NONE;P_BEAT_R P_BEAT_R VISIBLE NONE;MAP_PAGE1 MAP_PAGE1 VISIBLE NONE;MAP_PAGE2 MAP_PAGE2 VISIBLE NONE;MAP_PAGE3 MAP_PAGE3 VISIBLE NONE;X1 X1 VISIBLE NONE;Y1 Y1 VISIBLE NONE;X2 X2 VISIBLE NONE;Y2 Y2 VISIBLE NONE;TYPE TYPE VISIBLE NONE;LABELTYPE LABELTYPE VISIBLE NONE;LESN LESN VISIBLE NONE;RESN RESN VISIBLE NONE;NAME NAME VISIBLE NONE;STREET STREET VISIBLE NONE;LEGALNAME LEGALNAME VISIBLE NONE;SURF_TYPE SURF_TYPE VISIBLE NONE;SPEED SPEED VISIBLE NONE;LCITY LCITY VISIBLE NONE;RCITY RCITY VISIBLE NONE;ONEWAY ONEWAY VISIBLE NONE;NAMED NAMED VISIBLE NONE;ADDRESSED ADDRESSED VISIBLE NONE;NAMELOW NAMELOW VISIBLE NONE;LENGTH LENGTH VISIBLE NONE;COUNTY COUNTY VISIBLE NONE;REVERSED REVERSED VISIBLE NONE;BLM BLM VISIBLE NONE;USFS USFS VISIBLE NONE;MP_DIST MP_DIST VISIBLE NONE;MP_BEG MP_BEG VISIBLE NONE;MP_END MP_END VISIBLE NONE;ROADNUMB ROADNUMB VISIBLE NONE;FLIPME FLIPME VISIBLE NONE;SHAPE_LENG SHAPE_LENG VISIBLE NONE;TCFCC TCFCC VISIBLE NONE;NEW_TYPE NEW_TYPE VISIBLE NONE")

        print "Make feature layer :: " + Copied_Input_Streets
        
        time.sleep(5)

        # Process: Select Layer By Attribute...
        #gp.SelectLayerByAttribute_management(Streets_Layer, "NEW_SELECTION", "\"POLICE_L\" LIKE 'M%' OR \"POLICE_R\" LIKE 'M%'")
        arcpy.SelectLayerByAttribute_management(Streets_Layer, "NEW_SELECTION", "\"P_L\" LIKE 'M%' OR \"P_R\" LIKE 'M%'")

        print "Select layer by attribute : " + Streets_Layer

        time.sleep(5)

        # Process: Delete...
        arcpy.Delete_management(Prod_1_xxx_Streets_Unique_temp, "FeatureClass")

        time.sleep(5)

        # Process: Dissolve...
        arcpy.Dissolve_management(Medstreets_Layer, Med_Streets_Unique_Temp, "NAME", "", "MULTI_PART", "DISSOLVE_LINES")

        time.sleep(5)

        # Process: Delete Features...
        try:
            arcpy.DeleteFeatures_management(Existing_xxx_Streets_Unique_)
        except:
            print "Delete Existing xxx Streets Unique is a no go."

        time.sleep(5)

        # Process: Append...
        arcpy.Append_management("'Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.Cartegraph\\SDE.DBO.xxx_Streets_Unique_temp'", Existing_xxx_Streets_Unique_, "NO_TEST", "NAME 'NAME' true true false 254 Text 0 0 ,First,#,Database Connections\\xxx@xxx-2_prod.sde\\SDE.DBO.Cartegraph\\SDE.DBO.xxx_Streets_Unique_temp,NAME,-1,-1;Shape.len 'Shape.len' false false true 0 Double 0 0 ,First,#", "")

        time.sleep(5)

        # Process: Delete Features (2)...
        arcpy.DeleteFeatures_management(xxx_Streets)

        time.sleep(5)

    
        arcpy.Append_management("dispatchstreets_Copy_Layer", xxx_Streets, "NO_TEST", "OBJECTID 'OBJECTID' true true false 8 Double 0 10 ,First,#;IDNUM 'IDNUM' true true false 8 Double 0 10 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,IDNUM,-1,-1;PREFIX 'PREFIX' true true false 2 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,PREFIX,-1,-1;STREETNAME 'STREETNAME' true true false 30 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,STREETNAME,-1,-1;STREETTYPE 'STREETTYPE' true true false 4 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,STREETTYPE,-1,-1;SUFFIX 'SUFFIX' true true false 2 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,SUFFIX,-1,-1;CFCC 'CFCC' true true false 3 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,CFCC,-1,-1;CITY_L 'CITY_L' true true false 3 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,CITY_L,-1,-1;CITY_R 'CITY_R' true true false 3 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,CITY_R,-1,-1;FROMLEFT 'FROMLEFT' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,FROMLEFT,-1,-1;TOLEFT 'TOLEFT' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,TOLEFT,-1,-1;FROMRIGHT 'FROMRIGHT' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,FROMRIGHT,-1,-1;TORIGHT 'TORIGHT' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,TORIGHT,-1,-1;ZIP_L 'ZIP_L' true true false 5 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,ZIP_L,-1,-1;ZIP_R 'ZIP_R' true true false 5 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,ZIP_R,-1,-1;POLICE_L 'POLICE_L' true true false 15 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,POLICE_L,-1,-1;POLICE_R 'POLICE_R' true true false 15 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,POLICE_R,-1,-1;FIRE_L 'FIRE_L' true true false 15 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,FIRE_L,-1,-1;FIRE_R 'FIRE_R' true true false 15 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,FIRE_R,-1,-1;EMS_L 'EMS_L' true true false 15 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,EMS_L,-1,-1;EMS_R 'EMS_R' true true false 15 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,EMS_R,-1,-1;P_BEAT_L 'P_BEAT_L' true true false 16 Text 0 0 ,First,#;P_BEAT_R 'P_BEAT_R' true true false 16 Text 0 0 ,First,#;MAP_PAGE1 'MAP_PAGE1' true true false 10 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,MAP_PAGE1,-1,-1;MAP_PAGE2 'MAP_PAGE2' true true false 10 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,MAP_PAGE2,-1,-1;MAP_PAGE3 'MAP_PAGE3' true true false 10 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,MAP_PAGE3,-1,-1;X1 'X1' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,X1,-1,-1;Y1 'Y1' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,Y1,-1,-1;X2 'X2' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,X2,-1,-1;Y2 'Y2' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,Y2,-1,-1;TYPE 'TYPE' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,TYPE,-1,-1;LABELTYPE 'LABELTYPE' true true false 8 Double 0 10 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,LABELTYPE,-1,-1;LESN 'LESN' true true false 5 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,LESN,-1,-1;RESN 'RESN' true true false 5 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,RESN,-1,-1;NAME 'NAME' true true false 30 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,NAME,-1,-1;STREET 'STREET' true true false 72 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,STREET,-1,-1;LEGALNAME 'LEGALNAME' true true false 36 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,LEGALNAME,-1,-1;SURF_TYPE 'SURF_TYPE' true true false 20 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,SURF_TYPE,-1,-1;SPEED 'SPEED' true true false 8 Double 0 10 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,SPEED,-1,-1;LCITY 'LCITY' true true false 32 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,LCITY,-1,-1;RCITY 'RCITY' true true false 32 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,RCITY,-1,-1;ONEWAY 'ONEWAY' true true false 5 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,ONEWAY,-1,-1;NAMED 'NAMED' true true false 36 Date 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,NAMED,-1,-1;ADDRESSED 'ADDRESSED' true true false 36 Date 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,ADDRESSED,-1,-1;NAMELOW 'NAMELOW' true true false 36 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,NAMELOW,-1,-1;SOURCETHM 'SOURCETHM' true true false 16 Text 0 0 ,First,#;LENGTH 'LENGTH' true true false 8 Double 0 19 ,First,#;COUNTY 'COUNTY' true true false 16 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,COUNTY,-1,-1;REVERSED 'REVERSED' true true false 16 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,REVERSED,-1,-1;BLM 'BLM' true true false 12 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,BLM,-1,-1;USFS 'USFS' true true false 12 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,USFS,-1,-1;MP_DIST 'MP_DIST' true true false 8 Double 4 14 ,First,#;MP_BEG 'MP_BEG' true true false 8 Double 4 14 ,First,#;MP_END 'MP_END' true true false 8 Double 4 14 ,First,#;ROADNUMB 'ROADNUMB' true true false 4 Long 0 10 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,ROADNUMB,-1,-1;FLIPME 'FLIPME' true true false 4 Long 0 10 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,FLIPME,-1,-1;SHAPE_LENG 'SHAPE_LENG' true true false 8 Double 11 19 ,First,#;CROSS1 'CROSS1' true true false 254 Text 0 0 ,First,#;CROSS2 'CROSS2' true true false 254 Text 0 0 ,First,#;Shape.len 'Shape.len' false false true 0 Double 0 0 ,First,#", "")

        print "Append : " + xxx_Streets
        
        time.sleep(5)

        # Process: Delete Features (4)...
        arcpy.DeleteFeatures_management(Prod_1_Streets)

        time.sleep(5)

        # Process: Append (4)...
        arcpy.Append_management("F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp", Prod_1_Streets, "NO_TEST", "IDNUM 'IDNUM' true true false 4 Long 0 10 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,IDNUM,-1,-1;PREFIX 'PREFIX' true true false 2 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,PREFIX,-1,-1;STREETNAME 'STREETNAME' true true false 30 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,STREETNAME,-1,-1;STREETTYPE 'STREETTYPE' true true false 4 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,STREETTYPE,-1,-1;SUFFIX 'SUFFIX' true true false 2 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,SUFFIX,-1,-1;CFCC 'CFCC' true true false 3 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,CFCC,-1,-1;CITY_L 'CITY_L' true true false 3 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,CITY_L,-1,-1;CITY_R 'CITY_R' true true false 3 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,CITY_R,-1,-1;FROMLEFT 'FROMLEFT' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,FROMLEFT,-1,-1;TOLEFT 'TOLEFT' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,TOLEFT,-1,-1;FROMRIGHT 'FROMRIGHT' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,FROMRIGHT,-1,-1;TORIGHT 'TORIGHT' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,TORIGHT,-1,-1;ZIP_L 'ZIP_L' true true false 5 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,ZIP_L,-1,-1;ZIP_R 'ZIP_R' true true false 5 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,ZIP_R,-1,-1;POLICE_L 'POLICE_L' true true false 15 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,POLICE_L,-1,-1;POLICE_R 'POLICE_R' true true false 15 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,POLICE_R,-1,-1;FIRE_L 'FIRE_L' true true false 15 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,FIRE_L,-1,-1;FIRE_R 'FIRE_R' true true false 15 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,FIRE_R,-1,-1;EMS_L 'EMS_L' true true false 15 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,EMS_L,-1,-1;EMS_R 'EMS_R' true true false 15 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,EMS_R,-1,-1;P_BEAT_L 'P_BEAT_L' true true false 16 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,P_BEAT_L,-1,-1;P_BEAT_R 'P_BEAT_R' true true false 16 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,P_BEAT_R,-1,-1;MAP_PAGE1 'MAP_PAGE1' true true false 10 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,MAP_PAGE1,-1,-1;MAP_PAGE2 'MAP_PAGE2' true true false 10 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,MAP_PAGE2,-1,-1;MAP_PAGE3 'MAP_PAGE3' true true false 10 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,MAP_PAGE3,-1,-1;X1 'X1' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,X1,-1,-1;Y1 'Y1' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,Y1,-1,-1;X2 'X2' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,X2,-1,-1;Y2 'Y2' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,Y2,-1,-1;TYPE 'TYPE' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,TYPE,-1,-1;LABELTYPE 'LABELTYPE' true true false 4 Long 0 10 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,LABELTYPE,-1,-1;LESN 'LESN' true true false 5 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,LESN,-1,-1;RESN 'RESN' true true false 5 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,RESN,-1,-1;NAME 'NAME' true true false 30 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,NAME,-1,-1;STREET 'STREET' true true false 72 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,STREET,-1,-1;LEGALNAME 'LEGALNAME' true true false 36 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,LEGALNAME,-1,-1;SURF_TYPE 'SURF_TYPE' true true false 20 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,SURF_TYPE,-1,-1;SPEED 'SPEED' true true false 4 Long 0 10 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,SPEED,-1,-1;LCITY 'LCITY' true true false 32 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,LCITY,-1,-1;RCITY 'RCITY' true true false 32 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,RCITY,-1,-1;ONEWAY 'ONEWAY' true true false 5 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,ONEWAY,-1,-1;NAMED 'NAMED' true true false 36 Date 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,NAMED,-1,-1;ADDRESSED 'ADDRESSED' true true false 36 Date 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,ADDRESSED,-1,-1;NAMELOW 'NAMELOW' true true false 36 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,NAMELOW,-1,-1;SOURCETHM 'SOURCETHM' true true false 16 Text 0 0 ,First,#;LENGTH 'LENGTH' true true false 8 Double 0 19 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,LENGTH,-1,-1;COUNTY 'COUNTY' true true false 16 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,COUNTY,-1,-1;REVERSED 'REVERSED' true true false 16 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,REVERSED,-1,-1;BLM 'BLM' true true false 12 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,BLM,-1,-1;USFS 'USFS' true true false 12 Text 0 0 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,USFS,-1,-1;MP_DIST 'MP_DIST' true true false 8 Double 4 12 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,MP_DIST,-1,-1;MP_BEG 'MP_BEG' true true false 8 Double 4 12 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,MP_BEG,-1,-1;MP_END 'MP_END' true true false 8 Double 4 12 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,MP_END,-1,-1;ROADNUMB 'ROADNUMB' true true false 2 Short 0 5 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,ROADNUMB,-1,-1;FLIPME 'FLIPME' true true false 2 Short 0 5 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,FLIPME,-1,-1;SHAPE_LENG 'SHAPE_LENG' true true false 8 Double 11 18 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,SHAPE_LENG,-1,-1;CROSS2 'CROSS2' true true false 254 Text 0 0 ,First,#;CROSS1 'CROSS1' true true false 254 Text 0 0 ,First,#;New_Type 'New_Type' true true false 8 Double 8 38 ,First,#,F:\\Processes\\UpdateStreets\Temp\\dispatchstreets_Copy.shp,NEW_TYPE,-1,-1;Shape.len 'Shape.len' false false true 0 Double 0 0 ,First,#", "")

        time.sleep(5)

        # Process: Delete (2)...
        arcpy.Delete_management(Copied_Input_Streets, "ShapeFile")

        del arcpy

    except:
        print 'Errors occurred in UpdateStreetsProd2_def'
        raise
Example #23
0
        pass
    try:
        arcpy.Merge_management([sheds, ownshed],
                               os.path.join(lakes, "sheds3" + name))
        sheds3 = os.path.join(lakes, "sheds3" + name)
    except:
        arcpy.CopyFeatures_management(ownshed,
                                      os.path.join(lakes, "sheds3" + name))
        sheds3 = os.path.join(lakes, "sheds3" + name)
        pass
    # Dissolve the aggregate watershed if it has more than one polygon
    polynumber = int(arcpy.GetCount_management(sheds3).getOutput(0))
    if polynumber > 1:
        arcpy.AddField_management(sheds3, "Dissolve", "TEXT")
        arcpy.CalculateField_management(sheds3, "Dissolve", "1", "PYTHON")
        arcpy.Dissolve_management(sheds3, os.path.join(lakes, "pre" + name))
    elif polynumber < 2:
        arcpy.CopyFeatures_management(sheds3,
                                      os.path.join(lakes, "pre" + name))

    pre = os.path.join(lakes, "pre" + name)

    # Get the permanent id from the feature and add it to output shed
    field = "Permanent_"
    cursor = arcpy.SearchCursor(fc)
    for row in cursor:
        id = row.getValue(field)
    arcpy.AddField_management(pre, "NHD_ID", "TEXT")
    arcpy.CalculateField_management(pre, "NHD_ID", '"{0}"'.format(id),
                                    "PYTHON")
    # Erase the lakes own geometry from its watershed
Example #24
0
FCDisGeo = FCDis
Output_Feature_Class = FCDisGeo
FC_Dis_Area = Output_Feature_Class
FC_Dis_ConvexHull = ""
ConvexHullArea = FC_Dis_ConvexHull
Output_Feature_Class__2_ = ConvexHullArea
ConvexHullArea2 = Output_Feature_Class__2_
ConvexHullArea3 = ConvexHullArea2
Ratio = ConvexHullArea3
Output_Feature_Class__3_ = Ratio
Area2Perimeter = Output_Feature_Class__3_
Ideal2Actual = Area2Perimeter
IdealActualCompute = Ideal2Actual

# Process: Dissolve
arcpy.Dissolve_management(CBD_CDA, FCDis, "", "", "MULTI_PART",
                          "DISSOLVE_LINES")

# Process: Add Geometry Attributes
arcpy.AddGeometryAttributes_management(FCDis, "AREA;PERIMETER_LENGTH",
                                       "KILOMETERS", "SQUARE_KILOMETERS", "")

# Process: Add Field
arcpy.AddField_management(FCDisGeo, "km1", "FLOAT", "", "", "", "", "NULLABLE",
                          "NON_REQUIRED", "")

# Process: Calculate Field
arcpy.CalculateField_management(Output_Feature_Class, "km1", "[POLY_AREA]",
                                "VB", "")

# Process: Minimum Bounding Geometry
arcpy.MinimumBoundingGeometry_management(FC_Dis_Area, FC_Dis_ConvexHull,
Example #25
0
def shallowWaterRef():
    util.log(
        "Starting shallowWaterRef module ---------------------------------------------------------------"
    )

    #dissolve EDT reaches into 1 polygon
    reach_diss = arcpy.Dissolve_management(config.EDT_reaches,
                                           r"in_memory" + r"\reach_diss", "#",
                                           "#", "MULTI_PART", "DISSOLVE_LINES")

    util.log("Clipping depth raster to EDT reach extent")
    arcpy.CheckOutExtension("Spatial")
    depth_clip = arcpy.Clip_management(config.river_depth, "#",
                                       config.temp_gdb + r"\depth_clip",
                                       reach_diss, "-3.402823e+038",
                                       "ClippingGeometry",
                                       "NO_MAINTAIN_EXTENT")

    util.log(
        "Converting depth raster to positive values and adjusting to ordinary low water mark"
    )
    depth_raster = arcpy.sa.Raster(depth_clip)
    lowWater_conversion = 15
    raster_adj = abs(depth_raster) - lowWater_conversion

    # get rid of negative values
    raster_noNeg = arcpy.sa.SetNull(raster_adj < 0, raster_adj)

    # reclassify to above and below 20'
    util.log("Reclassifying to above and below 20' depth")
    # 0-20' set to 0, > 20' set to 1
    reclass_mapping = "0 20 0;20 200 1"
    raster_reclass = arcpy.sa.Reclassify(raster_noNeg, "Value",
                                         reclass_mapping, "DATA")

    #convert to polygon
    util.log("Conveting raster to polygon")
    shallow_vect = arcpy.RasterToPolygon_conversion(
        raster_reclass, config.temp_gdb + r"\shallow_vect")

    #summarize data
    util.log("Creating summary table")
    summary = arcpy.Statistics_analysis(
        shallow_vect, config.temp_gdb + r"\shallow_summary_table",
        "Shape_Area SUM", "gridcode")

    #pivot info
    util.log("Creating pivot table")
    arcpy.AddField_management(summary, "input_field", "SHORT")
    with arcpy.da.UpdateCursor(summary, "input_field") as rows:
        for row in rows:
            row[0] = 100
            rows.updateRow(row)
    ShallowWater_final = arcpy.PivotTable_management(
        summary, "input_field", "gridcode", "SUM_Shape_Area",
        config.temp_gdb + r"\ShallowWater_final")

    # calculate # of total
    util.log("Calc % shallow water")
    rate_field = "Pcnt_Shallow"
    arcpy.AddField_management(ShallowWater_final, rate_field, "Double")
    cursor_fields = ["gridcode0", "gridcode1", rate_field]
    with arcpy.da.UpdateCursor(ShallowWater_final, cursor_fields) as rows:
        for row in rows:
            row[2] = (row[0] / (row[0] + row[1])) * 100
            rows.updateRow(row)

    # WHI score
    util.log("Calc WHI score")
    score_field = "shallow_water_score"
    arcpy.AddField_management(ShallowWater_final, score_field, "DOUBLE")
    with arcpy.da.UpdateCursor(ShallowWater_final,
                               [rate_field, score_field]) as rows:
        for row in rows:
            row[1] = calc.shallowWater_score(row[0])
            rows.updateRow(row)

    # convert output to table if needed
    util.tableTo_primaryOutput(ShallowWater_final)

    util.log("Cleaning up")
    arcpy.Delete_management("in_memory")

    util.log(
        "Module complete ---------------------------------------------------------------"
    )
arcpy.env.workspace = "J:/Apps/Python/LayerUpdates/parcels/source/Parcels_Combined.gdb/"

fc = "AC_SchoolDistrict"

# Replace a layer/table view name with a path to a dataset (which can be a layer file) or create the layer/table view within the script
# The following inputs are layers or table views: "AC_Parcels_Combined_2_SchoolDistrict"
if arcpy.Exists(fc):
    arcpy.AddMessage("Deleting old version of AC_SchoolDistrict...")
    arcpy.Delete_management(
        in_data=
        "J:/Apps/Python/LayerUpdates/parcels/source/Parcels_Combined.gdb/AC_SchoolDistrict",
        data_type="FeatureClass")
    arcpy.Dissolve_management(
        in_features="AC_Parcels_Combined_2_SchoolDistrict",
        out_feature_class=
        "J:/Apps/Python/LayerUpdates/parcels/source/Parcels_Combined.gdb/AC_SchoolDistrict",
        dissolve_field="schooldist",
        statistics_fields="",
        multi_part="MULTI_PART",
        unsplit_lines="DISSOLVE_LINES")
    arcpy.AddMessage("   ...and replaced with new AC_SchoolDistrict.")
else:
    arcpy.Dissolve_management(
        in_features="AC_Parcels_Combined_2_SchoolDistrict",
        out_feature_class=
        "J:/Apps/Python/LayerUpdates/parcels/source/Parcels_Combined.gdb/AC_SchoolDistrict",
        dissolve_field="schooldist",
        statistics_fields="",
        multi_part="MULTI_PART",
        unsplit_lines="DISSOLVE_LINES")
    arcpy.AddMessage("Created new AC_SchoolDistrict.")
### Initialization
# load libraries
import arcpy
import toml
import os
import sys

### Preliminary processing
# load parameters
with open("code/parameters/general.toml") as conffile:
    general_params = toml.loads(conffile.read())

# set environmental variables
arcpy.env.parallelProcessingFactor = general_params['threads']
arcpy.env.overwriteOutput = True
arcpy.env.workspace = sys.argv[1]

### Main processing
# get list of data in gdb
features = arcpy.ListFeatureClasses()

# dissolve data
for x in features:
    arcpy.Dissolve_management(sys.argv[1] + '/' + x,
                              sys.argv[3] + '/' + x,
                              sys.argv[2],
                              multi_part='SINGLE_PART')
        AddMsgAndPrint("\n\tSurface volume and area calculations completed", 0)

        arcpy.Delete_management(subMask)
        arcpy.Delete_management(subGrid)

    # -------------------------------------------------------------------------- Process Soils and Landuse Data

    AddMsgAndPrint("\nProcessing Soils and Landuse for " + str(wsName) + "...",
                   0)

    # -------------------------------------------------------------------------- Create Landuse Layer
    if splitLU:

        # Dissolve in case the watershed has multiple polygons
        arcpy.Dissolve_management(inWatershed, watershedDissolve, "", "",
                                  "MULTI_PART", "DISSOLVE_LINES")

        # Clip the CLU layer to the dissolved watershed layer
        arcpy.Clip_analysis(inCLU, watershedDissolve, cluClip, "")
        AddMsgAndPrint(
            "\n\tSuccessfully clipped the CLU to your Watershed Layer", 0)

        # Union the CLU and dissolve watershed layer simply to fill in gaps
        arcpy.Union_analysis(cluClip + ";" + watershedDissolve, landuse,
                             "ONLY_FID", "", "GAPS")
        AddMsgAndPrint(
            "\tSuccessfully filled in any CLU gaps and created Landuse Layer: "
            + os.path.basename(landuse), 0)

        # Delete FID field
        fields = arcpy.ListFields(landuse, "FID*")
#Make Feature Layer from Union soils
arcpy.MakeFeatureLayer_management("union_compare_data",
                                  "union_compare_data_lyr")

#Select Layer By Attribute NEW_SELECTION "MUSYM" <> "MUSYM_1"
arcpy.SelectLayerByAttribute_management("union_compare_data_lyr",
                                        "NEW_SELECTION",
                                        ' "MLRASYM" <> "MLRASYM_1" ')

#Copy Features
arcpy.CopyFeatures_management("union_compare_data_lyr", "outFC")

dissolveFields = ["MLRASYM", "MLRASYM_1"]
#Dissolve Features
arcpy.Dissolve_management("outFC", "COMPARE", dissolveFields)

#Delete Features
arcpy.Delete_management("union_compare_data")
arcpy.Delete_management("outFC")

#Add Field

arcpy.AddField_management(
    "COMPARE",
    "ACRES",
    "DOUBLE",
)

#Calculate Field
Example #30
0
def PrePare_Data(parcel_bankal, parcels_copy, points_copy, Point_bankal, GDB,
                 name_bankal, name_tazar):
    '''
    INPUTS
    1) parcel_bankal - שכבת החלקות של הבנק"ל
    2) parcels_copy  - שכבת החלקות של המודד
    3) points_copy   - שכבת נקודות המודד
    4) Point_bankal  - שכבת נקודות הבנק"ל
    5) GDB           - בסיס הנתונים בו ישמרו השכבות
    6) name_bankal   - שם השדה של שם הנקודה בבנק"ל
    7) name_bankal   - שם השדה של שם הנקודה בתצ"ר

    OUTPUTS
    1) AOI               - אזור העבודה החדש
    2) tazar_border      - גבול התצ"ר
    3) Curves            - קשתות של אזור העבודה
    4) parcel_Bankal_cut - חיתוך של הבנק"ל כל חלקה בטווח 10 מטר מהתצ"ר תיכנס
    5) Point_bankal_Cut  - חיתוך של נקודות הבנק"ל, כל נקודה בטווח 10 מטר מהתצ"ר תיכנס
    '''

    # #Prepare data

    parcel_Bankal_cut = GDB + '\\' + 'Bankal_Cut'
    tazar_border = GDB + '\\' + 'tazar_border'
    AOI = GDB + '\\' + 'AOI'
    Point_bankal_Cut = GDB + '\\' + 'Point_bankal_Cut'
    Holes_data = GDB + '\\' + 'Holes_Prepare_data'

    # Create Tazar Border, Curves
    arcpy.Dissolve_management(parcels_copy, tazar_border)

    # Create Parcel Bankal For AOI
    Layer_Management(parcel_bankal).Select_By_Location("INTERSECT",
                                                       tazar_border,
                                                       "10 Meters",
                                                       parcel_Bankal_cut)

    add_field(parcel_Bankal_cut, "AREA_Orig", "DOUBLE")
    arcpy.CalculateField_management(parcel_Bankal_cut, "AREA_Orig",
                                    "!shape.area!", "PYTHON_9.3")

    # Cut Points From Bankal
    Layer_Management(Point_bankal).Select_By_Location("INTERSECT",
                                                      tazar_border,
                                                      "10 Meters",
                                                      Point_bankal_Cut)

    # Move_Vertices_By_Name                      (parcel_Bankal_cut,Point_bankal_Cut,name_bankal,points_copy,name_tazar) # לשים לב לשדות שמות הנקודות

    Delete_polygons(parcel_Bankal_cut, parcels_copy, AOI)
    arcpy.Append_management(parcels_copy, AOI, 'NO_TEST')

    # מחיקה של חלקים הקטנים מ-20 אחוז של גודלם המקורי
    Multi_to_single(AOI)
    arcpy.AddField_management(AOI, "OVERLAP_PRCT", "DOUBLE")
    arcpy.CalculateField_management(AOI, "OVERLAP_PRCT",
                                    "((!SHAPE_Area!  / !AREA_Orig!) * 100)",
                                    "PYTHON")
    arcpy.MakeFeatureLayer_management(AOI, 'parcel_Bankal_cut_Layer',
                                      "\"OVERLAP_PRCT\" < 20")
    arcpy.Select_analysis('parcel_Bankal_cut_Layer', Holes_data)
    arcpy.DeleteFeatures_management('parcel_Bankal_cut_Layer')

    # Update_Polygons                         (AOI,parcels_copy)
    Curves = generateCurves(AOI)
    Update_Polygons(AOI, parcels_copy)

    Multi = Multi_to_single(AOI)
    if Multi:
        print_arcpy_message("You have Multi layer after insert new tazar")

    return AOI, tazar_border, Curves, parcel_Bankal_cut, Point_bankal_Cut