areawayfc, "temparea", "", "",
    "Shape_Length Shape_Length VISIBLE;Way_ID Way_ID VISIBLE")
arcpy.AddJoin_management("temparea", "Way_ID", waytagtab, "Way_ID",
                         "KEEP_COMMON")
arcpy.CopyFeatures_management("temparea", finalareawayfc, "", "0.05", "0.5",
                              "5.0")
arcpy.Delete_management("temparea")
arcpy.Delete_management(areawayfc)
arcpy.Delete_management(waytagtab)

#Sort out some of the mess caused by loading all loops as areas.

#copy highways that are loops from areas to line feature class needs an ArcInfo License
if arcpy.CheckProduct("ArcInfo") == "Available":
    arcpy.AddMessage("Tidying areas that should be lines")
    arcpy.MakeFeatureLayer_management(finalareawayfc, "loopyroads",
                                      "osm_way_tags_highway <> ''")
    arcpy.FeatureToLine_management("loopyroads", temproadsfc)
    arcpy.Append_management(temproadsfc, finalwayfc, "NO_TEST")
    arcpy.Delete_management(temproadsfc)

arcpy.AddMessage("Step 7 --- %s seconds ---" % (time.time() - stepstarttime))

#Completed
arcpy.AddMessage("Conversion Completed")
arcpy.AddMessage(
    str(nodecount) + ' Nodes    ' + str(waycount) + ' Ways' + str(relcount) +
    ' relations     ')
arcpy.AddMessage("Total runtime  --- %s seconds ---" %
                 (time.time() - starttime))
        if state_dict2[cs_list[i]] in st:
            clist = clist + [city_list[i]]

    #if len(clist) == 1:
    #		clist = tuple(clist)+tuple("XXXX")

    print "- " + str(clist)

    name_select = " \"NAME10\" IN " + str(tuple(clist))
    arcpy.MakeFeatureLayer_management(state_place_clip, ua_places, name_select,
                                      "")

    place_line_hold = lines + "places_line_" + str(c) + ".shp"

    # Process: Feature To Line
    arcpy.FeatureToLine_management(ua_places, place_line_hold, "",
                                   "ATTRIBUTES")

    # Process: Near calculation

    print "- Near Calculations"

    arcpy.Near_analysis(ua_lehd_acs, place_line_hold, "", "NO_LOCATION",
                        "NO_ANGLE", "PLANAR")

    ##This can be vastly improved by just computing a distance for each tract from boundary and merging on that one file!

    #for d in range(200,3200, 200):

    #		boundary_tracts = boundary + "boundary_tracts_" + str(d) + "_" + str(c) + ".shp"

    #		dist_str = str(d) + " Meters"
Beispiel #3
0
            except Exception,e:
                arcpy.AddError('%s'%(e))
                continue
            
def Width(infc,mask):
    area = {}

    for feature in arcpy.da.SearchCursor(mask,['SHAPE@','Id']):
        try:
            if feature[1] not in area:
                area[feature[1]] = feature[0].area
                
        except Exception,e:
            arcpy.AddError('%s'%(e))

    arcpy.FeatureToLine_management([mask],'in_memory\\templines',"0.001 Meters", "ATTRIBUTES")
    dname = os.path.dirname(infc)
    arcpy.CreateFeatureclass_management('in_memory','temppoints',"POINT",'','','',infc)
    arcpy.AddField_management('in_memory\\temppoints','FID',"LONG")
    
    cursor = arcpy.da.InsertCursor('in_memory\\temppoints',['SHAPE@','FID'])
    fields = ['Id','DCoordx','DCoordy','OID@']
    for row in arcpy.da.SearchCursor(infc,fields):
        data = [[row[1],row[2]],row[-1]]
        cursor.insertRow(data)

    arcpy.Near_analysis('in_memory\\temppoints', 'in_memory\\templines')

   
    curfields = [f.name for f in arcpy.ListFields(infc)]
    if 'Width' not in curfields:
Beispiel #4
0
def main():
    try:
        ##################################################################################
        #READ PARAMETERS
        ##################################################################################
        inpoints = arcpy.GetParameterAsText(0)
        inlines = arcpy.GetParameterAsText(1)
        outWorkspace = arcpy.GetParameterAsText(2)
        outpoints = arcpy.GetParameterAsText(3)
        outsegments = arcpy.GetParameterAsText(4)
        outpolygons = arcpy.GetParameterAsText(5)
        inroads_identifier = arcpy.GetParameterAsText(6)
        arcpy.env.workspace = outWorkspace

        ##################################################################################
        #HARD CODED PARAMETERS
        ##################################################################################
        if arcpy.env.scratchWorkspace is None:
            arcpy.env.scratchWorkspace = r'C:\Users\fancelin\Documents\ArcGIS\Default.gdb'
        factor = 100
        inroads_split_name = "voronoying_lines_split"
        inroads_split_line_name = "voronoying_lines_split_lines"
        inroads_split = "{0}{1}{2}".format(arcpy.env.scratchWorkspace,
                                           os.path.sep, inroads_split_name)
        inroads_split_line = "{0}{1}{2}".format(arcpy.env.scratchWorkspace,
                                                os.path.sep,
                                                inroads_split_line_name)
        spatial_reference = arcpy.Describe(inlines).spatialReference

        ##################################################################################
        #VALIDATION
        ##################################################################################
        arcpy.AddMessage("Validation")
        #Validate license requirements
        validateLicense()

        #Validate lines are provided
        if len(outsegments) == 0:
            raise Exception("Input lines were not provided.")

        #Validate that a line identifier was provided
        if len(inroads_identifier) == 0:
            raise Exception("Input lines identifer was not provided.")

        extents = []
        #Validate input line feature class.
        inlinesBBox = validateInputLineFeatureClass(inlines)
        extents.append(inlinesBBox)
        #Validate input point feature class if required.
        inPointsBBox = validateInputPointFeatureClass(inpoints) if len(
            arcpy.GetParameterAsText(0)) > 0 else None

        ##################################################################################
        #REMOVE FEATURE CLASSES
        ##################################################################################
        for fc in [
                inroads_split, inroads_split_line,
                "{0}{1}{2}".format(outWorkspace, os.path.sep, outpoints),
                "{0}{1}{2}".format(outWorkspace, os.path.sep, outsegments),
                "{0}{1}{2}".format(outWorkspace, os.path.sep, outpolygons)
        ]:
            delFCByPath(fc)

        ##################################################################################
        #COMPUTING THE BOUNDING BOX
        ##################################################################################
        # Instanciate pyvoronoi
        pv = pyvoronoi.Pyvoronoi(factor)
        arcpy.AddMessage("Add points to voronoi")
        pointOIDs = []
        if inPointsBBox != None:
            extents.append(inPointsBBox)
            for point in arcpy.da.SearchCursor(inpoints,
                                               ['SHAPE@X', 'SHAPE@Y', 'OID@']):
                pointOIDs.append(point[2])
                pv.AddPoint([point[0], point[1]])

        arcpy.AddMessage("Computing bounding box outlines")
        finalBBox = mergeExtent(extents)
        finalBBoxExpended = arcpy.Extent(finalBBox.XMin - 1,
                                         finalBBox.YMin - 1,
                                         finalBBox.XMax + 1,
                                         finalBBox.YMax + 1)
        bbox_line = [
            arcpy.Array([
                arcpy.Point(finalBBox.XMin, finalBBox.YMin),
                arcpy.Point(finalBBox.XMax, finalBBox.YMin)
            ]),
            arcpy.Array([
                arcpy.Point(finalBBox.XMin, finalBBox.YMin),
                arcpy.Point(finalBBox.XMin, finalBBox.YMax)
            ]),
            arcpy.Array([
                arcpy.Point(finalBBox.XMax, finalBBox.YMax),
                arcpy.Point(finalBBox.XMin, finalBBox.YMax)
            ]),
            arcpy.Array([
                arcpy.Point(finalBBox.XMax, finalBBox.YMax),
                arcpy.Point(finalBBox.XMax, finalBBox.YMin)
            ]),
            arcpy.Array([
                arcpy.Point(finalBBoxExpended.XMin, finalBBoxExpended.YMin),
                arcpy.Point(finalBBoxExpended.XMax, finalBBoxExpended.YMin)
            ]),
            arcpy.Array([
                arcpy.Point(finalBBoxExpended.XMin, finalBBoxExpended.YMin),
                arcpy.Point(finalBBoxExpended.XMin, finalBBoxExpended.YMax)
            ]),
            arcpy.Array([
                arcpy.Point(finalBBoxExpended.XMax, finalBBoxExpended.YMax),
                arcpy.Point(finalBBoxExpended.XMin, finalBBoxExpended.YMax)
            ]),
            arcpy.Array([
                arcpy.Point(finalBBoxExpended.XMax, finalBBoxExpended.YMax),
                arcpy.Point(finalBBoxExpended.XMax, finalBBoxExpended.YMin)
            ])
        ]
        arcpy.AddMessage("Bounding Box Info: {0},{1} | {2},{3}".format(
            finalBBox.XMin, finalBBox.YMin, finalBBox.XMax, finalBBox.YMax))

        ##################################################################################
        #FORMAT INPUT. NEED TO MAKE SURE LINE ARE SPLIT AT VERTICES AND THAT THERE ARE NO OVERLAPS
        ##################################################################################
        arcpy.AddMessage("Format lines")
        arcpy.AddMessage("Split lines at vertices")
        arcpy.SplitLine_management(in_features=inlines,
                                   out_feature_class=inroads_split)

        arcpy.AddMessage("Add bounding box")
        with arcpy.da.InsertCursor(inroads_split,
                                   ['SHAPE@', inroads_identifier]) as op:
            for pointArray in bbox_line:
                arcpy.AddMessage("{0},{1} - {2},{3}".format(
                    pointArray[0].X, pointArray[0].Y, pointArray[1].X,
                    pointArray[1].Y))
                op.insertRow([arcpy.Polyline(pointArray), None])
        del op

        arcpy.AddMessage("Split lines at intersections")
        arcpy.FeatureToLine_management(inroads_split, inroads_split_line, '#',
                                       'ATTRIBUTES')

        ##################################################################################
        #SEND LINE INPUT TO VORONOI AND CONSTRUCT THE GRAPH
        ##################################################################################
        arcpy.AddMessage("Add lines to voronoi")
        lineIds = []
        for road in arcpy.da.SearchCursor(
                inroads_split_line,
            ['SHAPE@', 'OID@', 'SHAPE@LENGTH', inroads_identifier]):
            if (road[2] > 0):
                lineIds.append(road[3])
                pv.AddSegment([[road[0].firstPoint.X, road[0].firstPoint.Y],
                               [road[0].lastPoint.X, road[0].lastPoint.Y]])

        arcpy.AddMessage("Construct voronoi")
        pv.Construct()
        cells = pv.GetCells()
        edges = pv.GetEdges()
        vertices = pv.GetVertices()

        ##################################################################################
        #CREATE THE OUTPUT FEATURE CLASSES
        ##################################################################################
        arcpy.AddMessage("Construct output segment feature class")
        if len(outsegments) > 0:
            arcpy.CreateFeatureclass_management(
                outWorkspace,
                outsegments,
                'POLYLINE',
                spatial_reference=spatial_reference)
            arcpy.AddField_management(outsegments, 'EdgeIndex', "LONG")
            arcpy.AddField_management(outsegments, 'Start', "LONG")
            arcpy.AddField_management(outsegments, 'End', "LONG")
            arcpy.AddField_management(outsegments, 'IsLinear', "SHORT")
            arcpy.AddField_management(outsegments, 'IsPrimary', "SHORT")
            arcpy.AddField_management(outsegments, 'Site1', "LONG")
            arcpy.AddField_management(outsegments, 'Site2', "LONG")
            arcpy.AddField_management(outsegments, 'Cell', "LONG")
            arcpy.AddField_management(outsegments, 'Twin', "LONG")

            fields = [
                'EdgeIndex', 'Start', 'End', 'IsLinear', 'IsPrimary', 'Site1',
                'Site2', 'Cell', 'Twin', 'SHAPE@'
            ]
            cursor = arcpy.da.InsertCursor(outsegments, fields)
            for cIndex in range(len(cells)):
                cell = cells[cIndex]
                if cell.is_open == False:
                    if (cIndex % 5000 == 0 and cIndex > 0):
                        arcpy.AddMessage("Cell Index: {0}".format(cIndex))

                    for i in range(len(cell.edges)):
                        e = edges[cell.edges[i]]
                        startVertex = vertices[e.start]
                        endVertex = vertices[e.end]

                        max_distance = distance(
                            [startVertex.X, startVertex.Y],
                            [endVertex.X, endVertex.Y]) / 10
                        array = arcpy.Array()
                        if startVertex != -1 and endVertex != -1:
                            if (e.is_linear == True):
                                array = arcpy.Array([
                                    arcpy.Point(startVertex.X, startVertex.Y),
                                    arcpy.Point(endVertex.X, endVertex.Y)
                                ])

                            else:
                                try:
                                    points = pv.DiscretizeCurvedEdge(
                                        cell.edges[i], max_distance)
                                    for p in points:
                                        #print "{0},{1}".format(p[0], p[1])
                                        array.append(arcpy.Point(p[0], p[1]))
                                except:
                                    arcpy.AddMessage(
                                        "Issue at: {5}. The drawing has been defaulted from a curved line to a straight line. Length {0} - From: {1}, {2} To: {3}, {4}"
                                        .format(max_distance, startVertex.X,
                                                startVertex.Y, endVertex.X,
                                                endVertex.Y, i))
                                    #array = arcpy.Array([arcpy.Point(startVertex.X, startVertex.Y), arcpy.Point(endVertex.X, endVertex.Y)])
                                    array = arcpy.Array([
                                        arcpy.Point(startVertex.X,
                                                    startVertex.Y),
                                        arcpy.Point(endVertex.X, endVertex.Y)
                                    ])

                            polyline = arcpy.Polyline(array)
                            cursor.insertRow(
                                (cell.edges[i], e.start, e.end, e.is_linear,
                                 e.is_primary, e.site1, e.site2, e.cell,
                                 e.twin, polyline))

        arcpy.AddMessage("Construct output cells feature class")
        if len(outpolygons) > 0:
            arcpy.CreateFeatureclass_management(
                outWorkspace,
                outpolygons,
                'POLYGON',
                spatial_reference=spatial_reference)
            arcpy.AddField_management(outpolygons, 'CELL_ID', "LONG")
            arcpy.AddField_management(outpolygons, 'CONTAINS_POINT', "SHORT")
            arcpy.AddField_management(outpolygons, 'CONTAINS_SEGMENT', "SHORT")
            arcpy.AddField_management(outpolygons, 'SITE', "LONG")
            arcpy.AddField_management(outpolygons, 'SOURCE_CATEGORY', "SHORT")
            arcpy.AddField_management(outpolygons, 'INPUT_TYPE', "TEXT")
            arcpy.AddField_management(outpolygons, 'INPUT_ID', "LONG")
            fields = [
                'CELL_ID', 'CONTAINS_POINT', 'CONTAINS_SEGMENT', 'SHAPE@',
                'SITE', 'SOURCE_CATEGORY', 'INPUT_TYPE', 'INPUT_ID'
            ]
            cursor = arcpy.da.InsertCursor(outpolygons, fields)
            for cIndex in range(len(cells)):
                cell = cells[cIndex]
                if cell.is_open == False:
                    if (cIndex % 5000 == 0 and cIndex > 0):
                        arcpy.AddMessage("Cell Index: {0}".format(cIndex))
                    pointArray = arcpy.Array()
                    for vIndex in cell.vertices:
                        pointArray.add(
                            arcpy.Point(vertices[vIndex].X,
                                        vertices[vIndex].Y))
                    input_type = None
                    input_id = None
                    if cell.site >= len(pointOIDs):
                        input_type = "LINE"
                        input_id = lineIds[cell.site - len(pointOIDs)]
                    else:
                        input_type = "POINT"
                        input_id = pointOIDs[cell.site]
                    polygon = arcpy.Polygon(pointArray)
                    cursor.insertRow(
                        (cell.cell_identifier, cell.contains_point,
                         cell.contains_segment, polygon, cell.site,
                         cell.source_category, input_type, input_id))
            del cursor

    except Exception:
        tb = sys.exc_info()[2]
        tbInfo = traceback.format_tb(tb)[-1]
        arcpy.AddError('PYTHON ERRORS:\n%s\n%s: %s\n' %
                       (tbInfo, sys.exc_type, sys.exc_value))
        # print('PYTHON ERRORS:\n%s\n%s: %s\n' %
        #                 (tbInfo, _sys.exc_type, _sys.exc_value))
        arcpy.AddMessage('PYTHON ERRORS:\n%s\n%s: %s\n' %
                         (tbInfo, sys.exc_type, sys.exc_value))
        gp_errors = arcpy.GetMessages(2)
        if gp_errors:
            arcpy.AddError('GP ERRORS:\n%s\n' % gp_errors)
Beispiel #5
0
# import system modules
import arcpy
from arcpy import env
import os
import shutil

arcpy.env.overwriteOutput = True

#Aqui se define la carpeta que va a contener las carpetas "RUSTICO" y "URBANO" con los shapefiles
root = "d:\\carpeta"
env.workspace = root
auxiliar = root + "\\AUXILIAR"
os.mkdir(auxiliar)

#Se recortan los lindes y los frentes de parcela, corrigiendo algunos errores graficos
arcpy.FeatureToLine_management("PARCELA.shp", "AUXILIAR\\PARCELA_LINE.shp")
arcpy.Erase_analysis("AUXILIAR\\PARCELA_LINE.shp", "DOMPUB.shp",
                     "AUXILIAR\\LINDEIROS_.shp")
arcpy.Dissolve_management("PARCELA.shp", "AUXILIAR\\MANZANAS_.shp")
arcpy.FeatureToLine_management("AUXILIAR\\MANZANAS_.shp",
                               "AUXILIAR\\MANZANAS_FRONTE.shp")
arcpy.Buffer_analysis("DOMPUB.shp", "AUXILIAR\\DOMPUB_.shp", "0,01", "FULL",
                      "ROUND", "ALL")
arcpy.Intersect_analysis(
    ["AUXILIAR\\DOMPUB_.shp", "AUXILIAR\\MANZANAS_FRONTE.shp"],
    "AUXILIAR\\MANZANAS_FRONTE2", "ALL", "", "")
arcpy.Erase_analysis("AUXILIAR\\LINDEIROS_.shp",
                     "AUXILIAR\\MANZANAS_FRONTE2.shp", "LINDEIROS.shp")

# Se crean las medianeras y fachadas preliminares de las plantas altas (de edificios entre si para captar medianeras en voladizos)
arcpy.FeatureToLine_management("ALTAS.shp", "AUXILIAR\\ALTAS_LINE.shp")
Beispiel #6
0
def ExtractIntFromSeg(Segments, Intersections, Buffer, Output):
    #Output should be on a GDB not a shapefile
    SegFields = [f.name for f in arcpy.ListFields(Segments)]

    SelInt = common.CreateOutPath(MainFile=Output,
                                  appendix='SelInt',
                                  Extension='')
    arcpy.SpatialJoin_analysis(target_features=Intersections,
                               join_features=Segments,
                               out_feature_class=SelInt,
                               join_operation='JOIN_ONE_TO_ONE',
                               join_type='KEEP_COMMON',
                               match_option='INTERSECT',
                               search_radius=Buffer)
    arcpy.DeleteField_management(
        SelInt, [f.name for f in arcpy.ListFields(SelInt) if not f.required])

    SelIntBuf = common.CreateOutPath(MainFile=Output,
                                     appendix='SelIntBuf',
                                     Extension='')
    arcpy.Buffer_analysis(in_features=SelInt,
                          out_feature_class=SelIntBuf,
                          buffer_distance_or_field=str(Buffer) + ' Feet',
                          line_side='FULL',
                          line_end_type='FLAT')

    F2L = common.CreateOutPath(MainFile=Output, appendix='F2L', Extension='')
    arcpy.FeatureToLine_management(in_features=[Segments, SelIntBuf],
                                   out_feature_class=F2L,
                                   attributes='ATTRIBUTES')

    F2LLayer = common.CreateOutLayer('F2LLayer')
    arcpy.MakeFeatureLayer_management(in_features=F2L, out_layer=F2LLayer)
    arcpy.SelectLayerByAttribute_management(in_layer_or_view=F2LLayer,
                                            selection_type='NEW_SELECTION',
                                            where_clause="BUFF_DIST = 0")

    Seg1F = common.CreateOutPath(MainFile=Output,
                                 appendix='Seg1F',
                                 Extension='')
    arcpy.CopyFeatures_management(in_features=F2LLayer,
                                  out_feature_class=Seg1F)
    arcpy.DeleteField_management(Seg1F, [
        f.name for f in arcpy.ListFields(Seg1F)
        if not f.required and not f.name in SegFields
    ])

    Selseg = common.CreateOutPath(MainFile=Output,
                                  appendix='SelSeg',
                                  Extension='')
    arcpy.SpatialJoin_analysis(target_features=Seg1F,
                               join_features=SelIntBuf,
                               out_feature_class=Selseg,
                               join_operation='JOIN_ONE_TO_ONE',
                               join_type='KEEP_ALL',
                               match_option='WITHIN')

    SPJLayer = common.CreateOutLayer('SPJLayer')
    arcpy.MakeFeatureLayer_management(in_features=Selseg, out_layer=SPJLayer)
    arcpy.SelectLayerByAttribute_management(
        in_layer_or_view=SPJLayer,
        selection_type='NEW_SELECTION',
        where_clause="Join_Count = 0 AND Shape_Length>528")

    arcpy.CopyFeatures_management(in_features=SPJLayer,
                                  out_feature_class=Output)
    arcpy.DeleteField_management(Output, [
        f.name for f in arcpy.ListFields(Output)
        if not f.required and not f.name in SegFields
    ])

    arcpy.Delete_management(SelInt)
    arcpy.Delete_management(SelIntBuf)
    arcpy.Delete_management(F2L)
    arcpy.Delete_management(F2LLayer)
    arcpy.Delete_management(Seg1F)
    arcpy.Delete_management(Selseg)
    arcpy.Delete_management(SPJLayer)
Beispiel #7
0
def connected_wetlands(lakes_fc, lake_id_field, wetlands_fc, out_table):
    env.workspace = 'in_memory'
    env.outputCoordinateSystem = arcpy.SpatialReference(102039)

    arcpy.Buffer_analysis(lakes_fc, 'lakes_30m', '30 meters')

    arcpy.FeatureToLine_management('lakes_30m', 'shorelines')

    # 3 selections for the wetlands types we want to look at
    openwater_exp = """"VegType" = 'PEMorPAB'"""
    forested_exp = """"VegType" = 'PFO'"""
    scrubshrub_exp = """"VegType" = 'PSS'"""
    other_exp = """"VegType" = 'Other'"""
    all_exp = ''

    selections = [
        all_exp, forested_exp, scrubshrub_exp, openwater_exp, other_exp
    ]
    temp_tables = [
        'AllWetlands', 'ForestedWetlands', 'ScrubShrubWetlands',
        'OpenWaterWetlands', 'OtherWetlands'
    ]

    # for each wetland type, get the count of intersection wetlands, and the length of the lake
    # shoreline that is within a wetland polygon
    for sel, temp_table in zip(selections, temp_tables):
        print("Creating temporary table for wetlands where {0}".format(sel))
        # this function adds the count and the area using the lake as the zone
        polygons_in_zones('lakes_30m',
                          lake_id_field,
                          wetlands_fc,
                          temp_table,
                          sel,
                          contrib_area=True)

        # make good field names now rather than later
        for f in new_fields:
            cu.rename_field(temp_table, f, f.replace('Poly', temp_table), True)

        # shoreline calculation
        # using the Shape_Length field so can't do this part in memory
        shoreline_gdb = cu.create_temp_GDB('shoreline')
        selected_wetlands = os.path.join(shoreline_gdb, 'wetlands')
        arcpy.Select_analysis(wetlands_fc, selected_wetlands, sel)
        intersect_output = os.path.join(shoreline_gdb, "intersect")
        arcpy.Intersect_analysis(['shorelines', selected_wetlands],
                                 intersect_output)
        arcpy.Statistics_analysis(intersect_output, 'intersect_stats',
                                  [['Shape_Length', 'SUM']], lake_id_field)
        cu.one_in_one_out('intersect_stats', ['SUM_Shape_Length'], lakes_fc,
                          lake_id_field, 'temp_shoreline_table')
        cu.redefine_nulls('temp_shoreline_table', ['SUM_Shape_Length'], [0])
        shoreline_field = temp_table + "_Shoreline_Km"
        arcpy.AddField_management('temp_shoreline_table', shoreline_field,
                                  'DOUBLE')
        arcpy.CalculateField_management('temp_shoreline_table',
                                        shoreline_field,
                                        '!SUM_Shape_Length!/1000', 'PYTHON')

        # join the shoreline value to the temp_table
        arcpy.JoinField_management(temp_table, lake_id_field,
                                   'temp_shoreline_table', lake_id_field,
                                   shoreline_field)

        # clean up shoreline intermediates
        for item in [shoreline_gdb, 'intersect_stats', 'temp_shoreline_table']:
            arcpy.Delete_management(item)

    # join em up and copy to final
    temp_tables.remove('AllWetlands')
    for t in temp_tables:
        try:
            arcpy.JoinField_management('AllWetlands', lake_id_field, t,
                                       lake_id_field)
        # sometimes there's no table if it was an empty selection
        except:
            empty_fields = [f.replace('Poly', t) for f in new_fields]
            for ef in empty_fields:
                arcpy.AddField_management('AllWetlands', ef, 'Double')
                arcpy.CalculateField_management('AllWetlands', ef, '0',
                                                'PYTHON')
            continue
    # remove all the extra zone fields, which have underscore in name
    drop_fields = [
        f.name
        for f in arcpy.ListFields('AllWetlands', 'Permanent_Identifier_*')
    ]
    for f in drop_fields:
        arcpy.DeleteField_management('AllWetlands', f)

    # remove all the overlapping metrics, which do not apply by definition
    fields = [f.name for f in arcpy.ListFields('AlLWetlands')]
    for f in fields:
        if 'Overlapping' in f:
            arcpy.DeleteField_management('AllWetlands', f)
    arcpy.CopyRows_management('AllWetlands', out_table)

    for item in ['AllWetlands'] + temp_tables:
        try:
            arcpy.Delete_management(item)
        except:
            continue
Beispiel #8
0
    def execute(self, params, messages):
        deleteInMemory()
        rawPath = os.path.dirname(
            params[1].valueAsText) + "\\" + os.path.basename(
                params[1].valueAsText) + "_Raw_Data"
        finalPath = os.path.dirname(
            params[1].valueAsText) + "\\" + os.path.basename(
                params[1].valueAsText) + "_Final_Data"
        testPath = os.path.dirname(
            params[1].valueAsText) + "\\" + os.path.basename(
                params[1].valueAsText) + "_Test_Data"
        if not os.path.exists(rawPath):
            os.mkdir(rawPath)
        if not os.path.exists(finalPath):
            os.mkdir(finalPath)
        if not os.path.exists(testPath):
            os.mkdir(testPath)
        poly = arcpy.MakeFeatureLayer_management(params[0].valueAsText)
        outRaw = rawPath + "\\" + os.path.basename(params[1].valueAsText)
        outFinal = finalPath + "\\" + os.path.basename(params[1].valueAsText)
        outTest = testPath + "\\" + os.path.basename(params[1].valueAsText)
        arcpy.env.workspace = os.path.dirname(params[1].valueAsText)
        arcpy.env.scratchWorkspace = os.path.dirname(params[1].valueAsText)
        Sites = arcpy.MakeFeatureLayer_management(params[2].valueAsText)
        DEM = params[4].valueAsText
        zFactor = params[5].value
        Streams = arcpy.MakeFeatureLayer_management(params[6].valueAsText)
        #Process Input Polygon
        lyr = finalPath + "\\" + os.path.basename(
            params[1].valueAsText) + "_Poly.shp"
        polyParts = int(arcpy.GetCount_management(poly).getOutput(0))
        if polyParts > 1:
            arcpy.Dissolve_management(poly, lyr)
        else:
            arcpy.CopyFeatures_management(poly, lyr)
        lyrDesc = arcpy.Describe(lyr)
        lyrFields = lyrDesc.fields
        lyrExtent = lyrDesc.extent
        arcpy.env.extent = lyrExtent
        fieldx = 0
        for field in lyrFields:
            if field.name == "POLY_ACRES":
                fieldx = 1
        if fieldx == 0:
            arcpy.AddField_management(lyr, "POLY_ACRES", 'DOUBLE', 12, 8)
        arcpy.CalculateField_management(lyr, "POLY_ACRES",
                                        "!shape.area@ACRES!", "PYTHON_9.3", "")
        Desc = arcpy.Describe(lyr)
        polyAcres = ([
            row[0] for row in arcpy.da.SearchCursor(lyr, ["POLY_ACRES"])
        ][0])
        arcpy.AddMessage("Polygon acreage = %d" % polyAcres)
        #Clip Sites
        siteQuery = params[3].ValueAsText
        outPoints = outFinal + "_Data_Points.shp"
        outSites = outRaw + "_Sites"
        if siteQuery == "Use All Sites":
            arcpy.MakeFeatureLayer_management(Sites, outSites)
        else:
            arcpy.MakeFeatureLayer_management(Sites, outSites, siteQuery)
        arcpy.SelectLayerByLocation_management(outSites, "INTERSECT", lyr)
        siteResult = int(arcpy.GetCount_management(outSites).getOutput(0))
        arcpy.AddMessage(siteQuery)
        arcpy.AddMessage("Site Count = " + str(siteResult))
        if siteResult < 10:
            arcpy.AddMessage("There are insufficient site data for analysis")
            systemExit(0)
        arcpy.FeatureToPoint_management(outSites, outPoints, "CENTROID")
        #Add Random field to extract build and test points
        arcpy.AddField_management(outPoints, "Test_Hold", "Double")
        with arcpy.da.UpdateCursor(outPoints, "Test_Hold") as cursor:
            for row in cursor:
                row[0] = random.random()
                cursor.updateRow(row)
        buildPoints = outTest + "_Build_Sites.shp"
        testPoints = outTest + "_Test_Sites.shp"
        arcpy.MakeFeatureLayer_management(outPoints, "in_memory\\test",
                                          """ "Test_Hold" <= 0.2 """)
        arcpy.CopyFeatures_management("in_memory\\test", testPoints)
        arcpy.MakeFeatureLayer_management(outPoints, "in_memory\\build",
                                          """ "Test_Hold" > 0.2 """)
        arcpy.CopyFeatures_management("in_memory\\build", buildPoints)
        #These are the raw layers of interest
        outSlope = outRaw + "_slp"
        outTopoProm = outRaw + "_pro"
        outHHODist = outRaw + "_dtw"
        outEleHHO = outRaw + "_eaw"
        outConfDist = outRaw + "_dtc"
        outEaConf = outRaw + "_eac"
        #DEM-based analysis
        outDEM = outRaw + "_dem"
        arcpy.Clip_management(DEM, "#", outDEM, lyr, "#", "ClippingGeometry")
        arcpy.Slope_3d(outDEM, outSlope, "DEGREE", zFactor)
        outBlk = BlockStatistics(outDEM, NbrCircle(3, "CELL"), "RANGE", "DATA")
        outBlk.save(outTopoProm)
        #Stream-based analysis - rubs only if streams are within input polygon
        outStreams = outFinal + "_str.shp"
        outVPts = outRaw + "_vpt.shp"
        vPtsEle = outRaw + "_vpe.shp"
        vPtsCor = outRaw + "_vpc.shp"
        outCPts = outRaw + "_cpt.shp"
        outCPsC = outRaw + "_cpc.shp"
        outBuff = outRaw + "_buff.shp"
        outDiss = outRaw + "_diss.shp"
        outConPts = outRaw + "_con.shp"
        cPtsEle = outRaw + "_cpe.shp"
        arcpy.Clip_analysis(Streams, lyr, outStreams)
        streamCount = arcpy.GetCount_management(outStreams)
        if not streamCount == 0:
            arcpy.FeatureVerticesToPoints_management(outStreams, outVPts,
                                                     "ALL")
            arcpy.gp.ExtractValuesToPoints_sa(outVPts, outDEM, vPtsEle, "NONE",
                                              "VALUE_ONLY")
            arcpy.MakeFeatureLayer_management(vPtsEle, "in_memory\\vPtsCor",
                                              """"RASTERVALU" > 0""")
            arcpy.CopyFeatures_management("in_memory\\vPtsCor", vPtsCor)
            arcpy.AddField_management(vPtsCor, "WAT_ELEV", "SHORT")
            arcpy.CalculateField_management(vPtsCor, "WAT_ELEV",
                                            "[RASTERVALU]", "VB", "#")
            arcpy.gp.EucAllocation_sa(vPtsCor, "in_memory\\outAllo", "#", "#",
                                      "10", "WAT_ELEV", outHHODist, "#")
            arcpy.Minus_3d(outDEM, "in_memory\\outAllo", outEleHHO)
            deleteList = [outVPts, vPtsEle, vPtsCor]
            #Confluence-based analysis
            arcpy.FeatureVerticesToPoints_management(outStreams,
                                                     "in_memory\\outCPts",
                                                     "BOTH_ENDS")
            arcpy.MakeFeatureLayer_management("in_memory\\outCPts", outCPts)
            arcpy.FeatureToLine_management(lyr, "in_memory\\lyrLine", "#",
                                           "ATTRIBUTES")
            arcpy.SelectLayerByLocation_management(outCPts,
                                                   "WITHIN_A_DISTANCE",
                                                   "in_memory\\lyrLine",
                                                   "100 Meters",
                                                   "NEW_SELECTION")
            arcpy.SelectLayerByLocation_management(outCPts, "#", "#", "#",
                                                   "SWITCH_SELECTION")
            arcpy.CopyFeatures_management(outCPts, outCPsC)
            arcpy.Buffer_analysis(outCPsC, outBuff, "10 METERS", "#", "#",
                                  "NONE", "#")
            arcpy.Dissolve_management(outBuff, outDiss, "#", "#",
                                      "SINGLE_PART", "#")
            arcpy.SpatialJoin_analysis(outDiss, outCPsC, "in_memory\\outJoin")
            arcpy.MakeFeatureLayer_management("in_memory\\outJoin",
                                              "in_memory\\joinLayer",
                                              """"Join_Count" >= 3""")
            arcpy.FeatureToPoint_management("in_memory\\joinLayer", outConPts,
                                            "CENTROID")
            arcpy.gp.ExtractValuesToPoints_sa(outConPts, outDEM, cPtsEle,
                                              "NONE", "VALUE_ONLY")
            arcpy.AddField_management(cPtsEle, "CONF_ELEV", "SHORT")
            arcpy.CalculateField_management(cPtsEle, "CONF_ELEV",
                                            "[RASTERVALU]", "VB", "#")
            arcpy.gp.EucAllocation_sa(cPtsEle, "in_memory\\outConfAllo", "#",
                                      "#", "10", "CONF_ELEV", outConfDist, "#")
            arcpy.Minus_3d(outDEM, "in_memory\\outConfAllo", outEaConf)
        deleteList = [
            outCPts, outCPsC, outBuff, outDiss, outConPts, cPtsEle, outVPts,
            vPtsEle, vPtsCor
        ]
        for delete in deleteList:
            arcpy.Delete_management(delete)
        #Extract values to seperate tables and rename fields
        def extractValues(pointLayer, raster, outPoints, renameField):
            arcpy.gp.ExtractValuesToPoints_sa(pointLayer, raster, outPoints,
                                              "NONE", "ALL")
            arcpy.AddField_management(outPoints, renameField, "SHORT")
            arcpy.CalculateField_management(outPoints, renameField,
                                            "[RASTERVALU]", "VB", "#")
            return

        slopeTable = outRaw + "_slopePts.shp"
        promTable = outRaw + "_promPts.shp"
        distTHOtable = outRaw + "_distTHOPts.shp"
        distAHOtable = outRaw + "_distAHOPts.shp"
        distTCOtable = outRaw + "_distTCOPts.shp"
        distACOtable = outRaw + "_distACOPts.shp"
        extractValues(buildPoints, outSlope, slopeTable, "Slope")
        extractValues(buildPoints, outTopoProm, promTable, "Relief")
        if not streamCount == 0:
            extractValues(buildPoints, outHHODist, distTHOtable, "DTo_Water")
            extractValues(buildPoints, outEleHHO, distAHOtable, "DAbo_Water")
            extractValues(buildPoints, outConfDist, distTCOtable, "DTo_Conf")
            extractValues(buildPoints, outEaConf, distACOtable, "DAbo_Conf")
        #Get range of values for each layer and populate lists - reject null values
        def getValues(layer, fieldName):
            vList = []
            with arcpy.da.SearchCursor(layer, [fieldName]) as cursor:
                for row in cursor:
                    if row[0] != -999 and row[0] != -9999:
                        vList.append(row[0])
            return vList

        slopeList = getValues(slopeTable, "Slope")
        promList = getValues(promTable, "Relief")
        if not streamCount == 0:
            dtwList = getValues(distTHOtable, "DTo_Water")
            dawList = getValues(distAHOtable, "DAbo_Water")
            dtcList = getValues(distTCOtable, "DTo_Conf")
            dacList = getValues(distACOtable, "DAbo_Conf")
        deleteList = [
            slopeTable, promTable, distTHOtable, distAHOtable, distTCOtable,
            distACOtable
        ]
        for item in deleteList:
            if arcpy.Exists(item):
                arcpy.Delete_management(item)
        #Get statistics for range of values
        def meanstdv(xlist):
            from math import sqrt
            n, total, std1 = len(xlist), 0, 0
            for x in xlist:
                total = total + x
                mean = total / float(n)
            for x in xlist:
                std1 = std1 + (x - mean)**2
                std = sqrt(std1 / float(n - 1))
            return mean, std

        slopeStats = meanstdv(slopeList)
        promStats = meanstdv(promList)
        if not streamCount == 0:
            dtwStats = meanstdv(dtwList)
            dawStats = meanstdv(dawList)
            dtcStats = meanstdv(dtcList)
            dacStats = meanstdv(dacList)
        #Remap rasters according to 1-sigma range
        def remapRaster(inRaster, outRaster, recField, statList):
            R1 = statList[0] - statList[1]
            R2 = statList[0] + statList[1]
            rasterMin = arcpy.GetRasterProperties_management(
                inRaster, "MINIMUM")
            rasterMax = arcpy.GetRasterProperties_management(
                inRaster, "MAXIMUM")
            if R1 < rasterMin:
                R1 = rasterMin
            if R2 > rasterMax:
                R2 = rasterMax
            remap = str(rasterMin) + " " + str(R1) + " 0;" + str(
                R1) + " " + str(R2) + " 1;" + str(R2) + " " + str(
                    rasterMax) + " 0"
            arcpy.Reclassify_3d(inRaster, recField, remap, outRaster, "NODATA")
            return outRaster

        targetSlope = outTest + "_slp"
        targetTopoProm = outTest + "_pro"
        targetHHODist = outTest + "_dtw"
        targetConfDist = outTest + "_dtc"
        targetEleHHO = outTest + "_eaw"
        targetEaConf = outTest + "_eac"
        remapRaster(outSlope, targetSlope, "Value", slopeStats)
        remapRaster(outTopoProm, targetTopoProm, "Value", promStats)
        if not streamCount == 0:
            remapRaster(outHHODist, targetHHODist, "Value", dtwStats)
            remapRaster(outEleHHO, targetEleHHO, "Value", dawStats)
            remapRaster(outConfDist, targetConfDist, "Value", dtcStats)
            remapRaster(outEaConf, targetEaConf, "Value", dacStats)
        #Test against test points
        def AreaAndAccuracy(inRaster, inPoly):
            rasterPoly = outRaw + "_poly.shp"
            rasterPolyarea = 0
            lyrPolyarea = 0
            testCount = int(arcpy.GetCount_management(testPoints).getOutput(0))
            arcpy.RasterToPolygon_conversion(inRaster, rasterPoly, "SIMPLIFY",
                                             "Value")
            with arcpy.da.SearchCursor(rasterPoly,
                                       ("GRIDCODE", "SHAPE@AREA")) as cursor:
                for row in cursor:
                    if row[0] == 1:
                        rasterPolyarea += row[1]
            with arcpy.da.SearchCursor(inPoly, "SHAPE@AREA") as cursor:
                for row in cursor:
                    lyrPolyarea += row[0]
            targetAcres = rasterPolyarea / lyrPolyarea
            arcpy.MakeFeatureLayer_management(rasterPoly,
                                              "in_memory\\rasterPoly",
                                              """ "GRIDCODE" = 1 """)
            arcpy.MakeFeatureLayer_management(testPoints,
                                              "in_memory\\testPoints")
            arcpy.SelectLayerByLocation_management("in_memory\\testPoints",
                                                   "WITHIN",
                                                   "in_memory\\rasterPoly")
            selectCount = int(
                arcpy.GetCount_management("in_memory\\testPoints").getOutput(
                    0))
            Accuracy = float(selectCount) / float(testCount)
            indexValue = float(Accuracy) / float(targetAcres)
            arcpy.AddMessage(
                os.path.basename(inRaster) + ": Accuracy = " +
                (str(Accuracy)[:5]) + ", Target Area Proportion = " +
                (str(targetAcres)[:5]) + ", Index = " + (str(indexValue)[:5]))
            arcpy.Delete_management(rasterPoly)
            return targetAcres, Accuracy, indexValue

        #Evaluate accuracy and target area proprtion - generate accuracy/area index - eliminate where index < 1
        assessList = [
            targetSlope, targetTopoProm, targetHHODist, targetEleHHO,
            targetConfDist, targetEaConf
        ]
        sumDict = {}
        for item in assessList:
            if arcpy.Exists(item):
                testX = AreaAndAccuracy(item, lyr)
                if testX[2] >= 1:
                    sumDict[item] = testX
        nameList = sumDict.keys()
        #Weighted overlay
        outWeight = outFinal + "wgt"
        weightList = []
        for item in sumDict:
            weightList.append(str(item) + " Value " + str(sumDict[item][2]))
            weightString = ";".join(weightList)
        arcpy.gp.WeightedSum_sa(weightString, outWeight)
        deleteInMemory()
        return
Beispiel #9
0
            arcpy.RemoveJoin_management(PointsFL)
            
            #Set parcel elevation to 0 this will be replaced by SPOT value calculated above
            outIsNull = IsNull(outExtractByMask) #Identify NoData Areas
            outIsNull.save(IntermediateFiles+"\\isnull_"+str(value))
            outCon = Con(outIsNull,Elevation,0,"") #Use Con tool to change building footprint to elevation of 0 while leaving all other building footprints as is
            outCon.save(IntermediateFiles+"\\con_"+str(value)) #Final raster to be used in viewshed analysis     
            
            #buffer selected viewpoint
            arcpy.SetProgressorLabel("Buffering point "+str(value))
            outBuffer = IntermediateFiles+"\\buffer_"+str(value)+".shp"
            arcpy.Buffer_analysis(PointsFL,outBuffer,"1 mile")

            #Convert buffer polygon to line
            BufferLine = IntermediateFiles+"\\BufferLine_"+str(value)+".shp"
            arcpy.FeatureToLine_management(outBuffer,BufferLine)

            #Clip buffer to Ocean
            arcpy.SetProgressorLabel("Clipping point "+str(value)+" buffer to ocean")
            BufferClip = IntermediateFiles+"\\buffer_clipped_"+str(value).split(".")[0]+".shp"
            arcpy.Clip_analysis(outBuffer, Ocean, BufferClip)
           
            if FlrCnt ==1: #parcel floor count = 1
                arcpy.AddMessage("\nParcel "+str(value)+" has 1 story to process. Calculating viewshed now...")
                print "\nParcel ",str(value)," has 1 story to process. Calculating viewshed now..."
                
                DegViewshed(1,10) #Calculate the viewshed with an observer height of 10 feet then move to point

                arcpy.AddMessage("First floor viewshed for parcel "+str(value)+" has been completed...")                  
                print "First floor viewshed for parcel ",str(value)," has been completed..."
                arcpy.AddMessage(str(count)+" of "+str(RangeCount)+" parcles has been completed.\n")
        output3 = "AUXILIAR4\\" + str(edificio) + "_21.shp"
        if row2.FID == 0:
            arcpy.Copy_management(output, output2)
        else:
            arcpy.Merge_management([output2, output], output3)
            arcpy.Copy_management(output3, output2)
    shutil.rmtree(auxiliar4)


auxiliar2 = root + "\\AUXILIAR2"
os.mkdir(auxiliar2)
auxiliar3 = root + "\\AUXILIAR3"
os.mkdir(auxiliar3)

#SE RECALCULAN LAS CAPAS DE ADYACENCIA A PARTIR DE LAS MODIFICACIONES DEL USUARIO
arcpy.FeatureToLine_management("PARCELA.shp", "AUXILIAR2\\PARCELA_LINE.shp")
arcpy.Intersect_analysis(
    ["MEDIANEIRAS_LINDEIROS.shp", "AUXILIAR2\\PARCELA_LINE.shp"],
    "LINDEIROS.shp")
arcpy.FeatureToLine_management("ALTAS.shp", "AUXILIAR2\\ALTAS_LINE.shp")
arcpy.Erase_analysis("AUXILIAR2\\ALTAS_LINE.shp", "MEDIANEIRAS_LINDEIROS.shp",
                     "FACHADAS.shp")
arcpy.Erase_analysis("AUXILIAR2\\ALTAS_LINE.shp", "FACHADAS.shp",
                     "MEDIANERAS.shp")

#Bucle FOR con el objetivo de exportar cada edificio a un archivo de capa separado
arcpy.AddField_management("ALTAS.dbf", "AREA", "FLOAT")
arcpy.CalculateField_management("ALTAS.dbf", "AREA",
                                "!SHAPE.area@SQUAREMETERS!", "PYTHON_9.3")
rows = arcpy.SearchCursor("ALTAS.dbf")
for row in rows:
Beispiel #11
0
arcpy.SelectLayerByLocation_management("temp_lines_lyr", 'intersect', study_area)
lines_study_area = arcpy.FeatureClassToFeatureClass_conversion(lines_copy_lyr, temp_dir, 'temp_lines.shp')

# erase 
print("Adding canyon roads...")
canyon_roads = r'.\Inputs\Canyon_Roads.shp'
lines_erased = os.path.join(temp_dir, 'lines_erased.shp')
arcpy.Erase_analysis(lines_study_area, canyon_roads, lines_erased)

# merge
arcpy.Append_management([canyon_roads], lines_erased, schema_type='NO_TEST')


# split lines at vertices
#lines_copy = arcpy.FeatureToLine_management(lines_copy_lyr, os.path.join(temp_dir, 'temp_lines2.shp'))
lines_copy = arcpy.FeatureToLine_management(lines_erased, os.path.join(temp_dir, 'temp_lines2.shp'))

lines_copy_lyr = arcpy.MakeFeatureLayer_management(lines_copy,"temp_lines_lyr2")




# Add unique ID
unique_id_field = 'id'
arcpy.AddField_management(lines_copy, field_name=unique_id_field, field_type='LONG')
arcpy.CalculateField_management(lines_copy, unique_id_field, '"!FID!"')

#=====================================
# Create Nodes
#=====================================
Beispiel #12
0
        arcpy.Merge_management(infeatures, merge)
        print  "merge done"

# Let us check that the splitting above did not alter  the quality of our lines and correct them
        arcpy.RepairGeometry_management(merge, "DELETE_NULL")
        print  "geometry repaired"

# Replace a layer/table view name with a path to a dataset (which can be a layer file) or create the layer/table view within the script
# The following inputs are layers or table views: "centr_rail_Merge1"
#arcpy.ExtendLine_edit(merge, "", "EXTENSION")
#print "extend line done" 


# Feature to line allows us to split the different line shapefiles in segments for each line crossing [improvement from DH]
        arcpy.FeatureToLine_management(merge, feat_to_line, "", "ATTRIBUTES")
        print  "feature to line done"

        database_creation = timeit.timeit()
        print  database_creation - start

# Let us check that the splitting above did not alter  the quality of our lines and correct them
        arcpy.RepairGeometry_management(feat_to_line, "DELETE_NULL")
        print  "geometry repaired"

        arcpy.AddField_management(feat_to_line, "Length", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
        print  "generate Length field centroids done"

        arcpy.CalculateField_management(feat_to_line, "Length", "!shape.geodesicLength@KILOMETERS!", "PYTHON_9.3", "")
        print  "calculate length in kilometers"
Beispiel #13
0
def procesar_calidad(cant_zonas=0, data=[], campos=['UBIGEO', 'ZONA']):

    if len(data) == 0:
        data = conex.obtener_lista_zonas_calidad(cant_zonas)[:]

    importar_tablas_trabajo(data, campos)
    where = expresiones_consulta_arcpy.Expresion(data, campos)
    arcpy.AddField_management(tb_viviendas_ordenadas, 'IDMANZANA', 'TEXT')
    arcpy.CalculateField_management(tb_viviendas_ordenadas, 'IDMANZANA',
                                    '!UBIGEO!+!ZONA!+!MANZANA!', 'PYTHON_9.3')

    # print "Importar"

    list_zonas = [(x[0], x[1])
                  for x in arcpy.da.SearchCursor(tb_zonas, ["UBIGEO", "ZONA"])]
    ######################################################CALIDAD PUERTAS MULTIFAMILIAR AFUERA DEL FRENTE DE MANZANA############################################################

    arcpy.AddField_management(tb_viviendas_ordenadas, 'IDMANZANA', 'TEXT')

    arcpy.CalculateField_management(tb_viviendas_ordenadas, 'IDMANZANA',
                                    '!UBIGEO!+!ZONA!+!MANZANA!', 'PYTHON_9.3')
    manzanas_mfl = arcpy.MakeFeatureLayer_management(tb_manzanas,
                                                     "manzanas_mfl", where)
    viviendas_mfl = arcpy.MakeFeatureLayer_management(tb_viviendas_ordenadas,
                                                      "viviendas_mfl", where)
    frentes_mfl = arcpy.MakeFeatureLayer_management(tb_frentes, "frentes_mfl",
                                                    where)
    mzs_line = arcpy.FeatureToLine_management(manzanas_mfl,
                                              "in_memory/mzs_line")
    puertas_multifamiliar = arcpy.MakeFeatureLayer_management(
        tb_viviendas_ordenadas, "puertas_multifamiliar", "p29=6")
    puertas_multifamiliar_afuera = arcpy.SelectLayerByLocation_management(
        puertas_multifamiliar, "INTERSECT", mzs_line, '', "NEW_SELECTION",
        "INVERT")
    viviendas_selecc_frentes_mfl = arcpy.SelectLayerByLocation_management(
        viviendas_mfl, "INTERSECT", mzs_line)
    viviendas_selecc_frentes = arcpy.CopyFeatures_management(
        viviendas_selecc_frentes_mfl, "in_memory/viv_selecc_frentes")
    arcpy.CopyFeatures_management(puertas_multifamiliar_afuera, error_1)

    ########################################LISTA ZONAS CON ERROR PUERTA MULTIFAMILIAR###############################
    list_1 = list(
        set([(x[0], x[1])
             for x in arcpy.da.SearchCursor(error_1, ["UBIGEO", "ZONA"])]))
    zonas_error_puertas_multi = list(
        set([(x[0], x[1])
             for x in arcpy.da.SearchCursor(error_1, ["UBIGEO", "ZONA"])]))

    # print zonas_error_puertas_multi

    #####################################################CALIDAD EXISTENCIA DE EJES VIALES POR ZONA#######################################################################
    # tb_ejes_viales

    ejes_viales_mfl = arcpy.MakeFeatureLayer_management(
        tb_ejes_viales, "ejes_viales_mfl")
    manzanas_sin_vias = arcpy.SelectLayerByLocation_management(
        manzanas_mfl, "INTERSECT", ejes_viales_mfl, "20 METERS",
        "NEW_SELECTION", "INVERT")
    arcpy.CopyFeatures_management(manzanas_sin_vias, error_2)

    ######################################LISTA DE ZONAS SIN EJES VIALES#############################################
    #list_2 = []
    #for x in arcpy.da.SearchCursor(tb_zonas, ["UBIGEO", "ZONA"]):
    #    where = " UBIGEO='{}' AND ZONA='{}'".format(x[0], x[1])
    #    manzanas_mfl = arcpy.MakeFeatureLayer_management(tb_manzanas, "manzanas_mfl", where)
    #    manzanas_sin_vias_mfl = arcpy.MakeFeatureLayer_management(error_2, "manzanas_sin_vias_mfl", where)
    #    a = int(arcpy.GetCount_management(manzanas_mfl).getOutput(0))
    #    b = int(arcpy.GetCount_management(manzanas_sin_vias_mfl).getOutput(0))
    #    if a != 0:
    #        porcentaje = b / float(a) * 100
    #
    #    else:
    #        porcentaje = 100
    #
    #    if porcentaje > 10:
    #        list_2.append((x[0], x[1]))

    ##################################################CALIDAD  MANZANAS INTERSECTADO CON VIAS########################################

    line_mzs = arcpy.FeatureToLine_management(tb_manzanas_ordenadas,
                                              "in_memory/line_mzs")
    buffer_line = arcpy.Buffer_analysis(line_mzs, "in_memory/buffer_line",
                                        "0.50 meters")
    mzs_cortadas = arcpy.Erase_analysis(tb_manzanas_ordenadas, buffer_line,
                                        "in_memory/erase_mzs")

    #manzanas_ordenadas_mfl = arcpy.MakeFeatureLayer_management(tb_manzanas_ordenadas, "manzanas_ordenadas_mfl")
    manzanas_cortadas_mfl = arcpy.MakeFeatureLayer_management(
        mzs_cortadas, "mzs_cortadas_mfl")

    #vias_dentro_manzana = arcpy.SelectLayerByLocation_management(manzanas_ordenadas_mfl, "INTERSECT", tb_ejes_viales,'', "NEW_SELECTION")
    vias_dentro_manzana = arcpy.SelectLayerByLocation_management(
        manzanas_cortadas_mfl, "INTERSECT", tb_ejes_viales, '',
        "NEW_SELECTION")
    arcpy.CopyFeatures_management(vias_dentro_manzana, error_3)
    #########################################LISTA DE ZONAS CON VIAS DENTRO DE MANZANAS###################################

    list_3 = []

    if (int(arcpy.GetCount_management(error_3).getOutput(0)) > 0):
        list_3 = list(
            set([(x[0], x[1])
                 for x in arcpy.da.SearchCursor(error_3, ["UBIGEO", "ZONA"])]))
    #################Calidad Viviendas afuera de la manzana#################################################

    viviendas_mfl = arcpy.MakeFeatureLayer_management(tb_viviendas_ordenadas,
                                                      "viviendas_mfl", where)

    viviendas_afuera_manzana = arcpy.SelectLayerByLocation_management(
        viviendas_mfl, "INTERSECT", tb_manzanas_ordenadas, '0.2 meters',
        "NEW_SELECTION", "INVERT")

    arcpy.CopyFeatures_management(viviendas_afuera_manzana, error_5)

    ##########################################LISTA DE ZONAS CON VIVIENDAS FUERA DE MANZANA#################

    list_4 = []
    if (int(arcpy.GetCount_management(error_5).getOutput(0)) > 0):
        list_4 = list(
            set([(x[0], x[1])
                 for x in arcpy.da.SearchCursor(error_5, ["UBIGEO", "ZONA"])]))

    #################################################CALIDAD PUNTOS DE INICIO#######################################################################
    lineas_viviendas = arcpy.PointsToLine_management(
        viviendas_selecc_frentes, 'in_memory/lineas_viviendas', "IDMANZANA",
        "ID_REG_OR")
    puntos_extremos = arcpy.FeatureVerticesToPoints_management(
        lineas_viviendas, 'in_memory/puntos_extremos', "BOTH_ENDS")
    puntos_extremos_buffer = arcpy.Buffer_analysis(
        puntos_extremos, 'in_memory/puntos_extremos_buffer', "0.2 meters")
    erase_lineas = arcpy.Erase_analysis(mzs_line, puntos_extremos_buffer,
                                        'in_memory/erase_lineas')
    split = arcpy.SplitLine_management(erase_lineas, "in_memory/split")
    dissolve = arcpy.Dissolve_management(split, "in_memory/dissolve",
                                         "UBIGEO;CODCCPP;ZONA;MANZANA", "",
                                         "MULTI_PART", "DISSOLVE_LINES")
    dissolve_multi = arcpy.MultipartToSinglepart_management(
        dissolve, "in_memory/dissolve_multi")
    dissolve_mfl = arcpy.MakeFeatureLayer_management(dissolve_multi,
                                                     'dissolve_mfl')
    puntos_inicio_mfl = arcpy.MakeFeatureLayer_management(
        tb_puntos_inicio, 'puntos_inicio_mfl')

    segmentos_selec = arcpy.SelectLayerByLocation_management(
        dissolve_mfl, "INTERSECT", tb_viviendas_ordenadas, '', "NEW_SELECTION",
        "INVERT")

    tb_segmentos_selec = arcpy.CopyFeatures_management(
        segmentos_selec, "{}/tb_segmentos_selec.shp".format(path_ini))

    puntos_inici_selec = arcpy.SelectLayerByLocation_management(
        puntos_inicio_mfl, "INTERSECT", tb_segmentos_selec, '',
        "NEW_SELECTION", "INVERT")
    arcpy.CopyFeatures_management(puntos_inici_selec, error_4)

    ################################################LISTA DE ZONAS CON PROBLEMAS DE PUNTO DE INICIO##################################################

    list_5 = []

    if (int(arcpy.GetCount_management(error_4).getOutput(0)) > 0):
        list_5 = list(
            set([(x[0], x[1])
                 for x in arcpy.da.SearchCursor(error_4, ["UBIGEO", "ZONA"])]))

    ############################ Cantidad de frentes############################################################
    '''
    resumen_frentes_viv=arcpy.Statistics_analysis(tb_viviendas_ordenadas,'in_memory/resumen_frentes_viv',[["FRENTE_ORD","MAX"]],["UBIGEO","ZONA","MANZANA"])
    arcpy.AddField_management(resumen_frentes_viv,"ID_MANZANA","text")

    with arcpy.da.UpdateCursor(resumen_frentes_viv, ["UBIGEO","ZONA","MANZANA","ID_MANZANA"]) as cursor:
        for x in cursor:
            x[4]=u'{}{}{}'.format(x[0],x[1],x[2])
            cursor.updateRow(x)

    #arcpy.CalculateField_management(resumen_frentes_viv,"ID_MANZANA","!UBIGEO!+!ZONA!+!MANZANA!","PYTHON_9.3")



    resumen_frentes = arcpy.Statistics_analysis(tb_frentes_dissolve, 'in_memory/resumen_frentes',[["FRENTE_ORD", "MAX"],["FRENTE_ORD", "COUNT"]], ["UBIGEO", "ZONA", "MANZANA"])

    arcpy.AddField_management(resumen_frentes, "ID_MANZANA", "text")

    with arcpy.da.UpdateCursor(resumen_frentes, ["UBIGEO","ZONA","MANZANA","ID_MANZANA"]) as cursor:
        for x in cursor:
            x[4]=u'{}{}{}'.format(x[0],x[1],x[2])
            cursor.updateRow(x)

    arcpy.CalculateField_management(resumen_frentes, "ID_MANZANA", "!UBIGEO!+!ZONA!+!MANZANA!", "PYTHON_9.3")

    arcpy.JoinField_management(resumen_frentes,"ID_MANZANA",resumen_frentes_viv,"ID_MANZANA",["MAX_FRENTE_ORD"])
    mzs_dif_cant_frent=arcpy.TableSelect_analysis(resumen_frentes, error_6, " (MAX_FRENTE_ORD<>MAX_FRENTE_ORD_1)")

    arcpy.AddField_management(error_6, "CANT_FR_V", "SHORT")
    arcpy.CalculateField_management(error_6, "CANT_FR_V", "!MAX_FRENTE_ORD!")

    arcpy.AddField_management(error_6, "CANT_FR_F", "text")
    arcpy.CalculateField_management(error_6, "CANT_FR_F", "!MAX_FRENTE_ORD_1!")
    arcpy.DeleteField_management(error_6,["MAX_FRENTE_ORD","MAX_FRENTE_ORD_1"])

    list_6=[]


    if (int(arcpy.GetCount_management(error_6).getOutput(0)) > 0):
        list_6 = list(set([(x[0], x[1]) for x in arcpy.da.SearchCursor(error_6, ["UBIGEO", "ZONA"])]))

    #mzs_dif_cant_frent_1 = arcpy.TableSelect_analysis(resumen_frentes, error_7_cant_frentes_dif, " CapVivNFr<>COUNT_FRENTE_ORD")
    #list_7 = []
    #if (int(arcpy.GetCount_management(error_7_cant_frentes_dif).getOutput(0)) > 0):
    #    list_7 = list(set([(x[0], x[1]) for x in arcpy.da.SearchCursor(error_7_cant_frentes_dif, ["UBIGEO", "ZONA"])]))
    #arcpy.SelectLayerByLocation_management
    '''

    #####################################################ERROR DE FRENTE DE VIVIENDAS#########################################################

    resultado = arcpy.Intersect_analysis([tb_viviendas_ordenadas, tb_frentes],
                                         'in_memory/results')

    arcpy.Select_analysis(resultado, error_7, 'FRENTE_ORD<>FRENTE_ORD_1')
    fields = arcpy.ListFields(error_7)

    list_campos_validos = [
        'FID', 'Shape', 'UBIGEO', 'CODCCPP', 'ZONA', 'MANZANA', 'ID_REG_OR',
        'FRENTE_ORD'
    ]
    delete_fields = []
    for el in fields:
        if el.name not in list_campos_validos:
            delete_fields.append(el.name)

    arcpy.DeleteField_management(error_7, delete_fields)

    #####################################################ERROR FRENTES DE MANZANAS NO COINCIDEN CON LA MANZANA EN FORMA#################################
    temp_frentes = arcpy.SelectLayerByLocation_management(
        frentes_mfl, "WITHIN", mzs_line, '', "NEW_SELECTION", "INVERT")
    arcpy.CopyFeatures_management(temp_frentes, error_8)

    list_8 = []

    if (int(arcpy.GetCount_management(error_8).getOutput(0)) > 0):
        list_8 = list(
            set([(x[0], x[1])
                 for x in arcpy.da.SearchCursor(error_8, ["UBIGEO", "ZONA"])]))

    ####################################################ERROR NUMERACION DE VIVIENDAS#############################################################

    lineas_viviendas = arcpy.PointsToLine_management(
        viviendas_selecc_frentes, 'in_memory/lineas_viviendas', "IDMANZANA",
        "ID_REG_OR")
    viviendas_selecc_frentes_buffer = arcpy.Buffer_analysis(
        viviendas_selecc_frentes, "in_memory/puntos_extremos_buffer",
        "0.2 meters")

    erase_lineas = arcpy.Erase_analysis(lineas_viviendas,
                                        viviendas_selecc_frentes_buffer,
                                        'in_memory/erase_lineas')
    split = arcpy.SplitLine_management(erase_lineas, path_ini + "/split.shp")

    mz_line_erase = arcpy.Erase_analysis(mzs_line,
                                         viviendas_selecc_frentes_buffer,
                                         "in_memory\mz_line_erase")
    mz_line_erase_multi = arcpy.MultipartToSinglepart_management(
        mz_line_erase, 'in_memory\m_l_e_m')
    result = arcpy.Statistics_analysis(mz_line_erase_multi, 'in_memory/result',
                                       [['FID', "MAX"]], ["Shape"])
    maxids = [[
        x[0]
    ] for x in arcpy.da.SearchCursor(result, ["MAX_FID"], 'FREQUENCY>1')]

    if len(maxids) == 0:
        where_ids = expresiones_consulta_arcpy.Expresion_2([["-1"]],
                                                           [["FID", "SHORT"]])

    else:
        where_ids = expresiones_consulta_arcpy.Expresion_2(
            maxids, [["FID", "SHORT"]])

    arcpy.Select_analysis(mz_line_erase_multi, error_9, where_ids)
    '''
    intersect=arcpy.Intersect_analysis([mz_line_erase_multi, split], path_ini+"/intersect.shp", "ALL", "", "")
    list_id_buffer_mzs_line_erase_multi=list(set( [x[0] for x in arcpy.da.SearchCursor(intersect,["FID_m_l_e_"])]))
    list_intersect= [x[0]  for x  in arcpy.da.SearchCursor(intersect,["FID_m_l_e_"])]


    errores_numeracion=[]
    print list_id_buffer_mzs_line_erase_multi
    print list_intersect

    for x in list_id_buffer_mzs_line_erase_multi:
        cont = 0
        for y in list_intersect:
            if (x==y):
                cont=cont+1


        #print cont
        if (cont>1):
           errores_numeracion.append([x])
    print errores_numeracion
    where_exp=UBIGEO.Expresion_2(errores_numeracion,[["FID","SHORT"]])
    b_m_l_e_m_selecc = arcpy.Select_analysis(mz_line_erase_multi, error_9, where_exp)

    list_9=[]
    if (int(arcpy.GetCount_management(error_9).getOutput(0)) > 0):
        list_9 = list(set([(x[0], x[1]) for x in arcpy.da.SearchCursor(error_9, ["UBIGEO", "ZONA"])]))


    #dissolve = arcpy.Dissolve_management(split, "in_memory/dissolve", "UBIGEO;CODCCPP;ZONA;MANZANA", "","MULTI_PART","DISSOLVE_LINES")
    #dissolve_multi=arcpy.MultipartToSinglepart_management(dissolve, "in_memory/dissolve_multi")
    #arcpy.SelectLayerByLocation_management (dissolve_multi, "INTERSECT",dissolve_multi)
    #arcpy.MultipartToSinglepart_management("intersect", "in_memory/intersect2")

    '''

    #################################################VIVIENDAS Y VIAS#####################################################

    # list_zonas_error=list(set(list_1+list_2+list_3+list_4+list_5+list_6+list_8+list_9))
    # print  list_zonas_error
    #nombre_ejes_viales()

    ################################puertas  hijos multifamiliar en el frente##########################
    puertas_hijos_multifamilar = arcpy.MakeFeatureLayer_management(
        tb_viviendas_ordenadas, "puertas_multifamiliar",
        "(p29=1  or p29=3) and ID_REG_PAD<>0 ")
    error_11_mfl = arcpy.SelectLayerByLocation_management(
        puertas_hijos_multifamilar, "INTERSECT", mzs_line, '', "NEW_SELECTION")
    arcpy.CopyFeatures_management(error_11_mfl, error_11)

    # puertas_hijos_multifamilar=arcpy.MakeFeatureLayer_management(tb_viviendas_ordenadas, "puertas_multifamiliar", "(p29=1  or p29=3) and ID_REG_PAD<>0 ")
    # error_11_mfl=arcpy.SelectLayerByLocation_management(puertas_hijos_multifamilar, "INTERSECT",mzs_line ,'' , "NEW_SELECTION")
    # arcpy.CopyFeatures_management(error_11_mfl, error_11)

    ###############################################ERROR HIJOS SIN PADRES#########################################################################
    '''
    puertas_hijos_multifamilar = arcpy.MakeFeatureLayer_management(tb_viviendas_ordenadas, "puertas_multifamiliar",
                                                                   "(p29=1  or p29=3) and (ID_REG_PAD<>0)" )

    list_puertas_hijos_multifamilar=[[x[0],x[1],x[2],x[3],x[4]] for x in  arcpy.da.SearchCursor(puertas_hijos_multifamilar,["UBIGEO","ZONA","MANZANA","ID_REG_OR","ID_REG_PAD"])]

    list_puertas_multifamiliar=[ '{}{}{}{}'.format(x[0],x[1],x[2],x[3]) for x in arcpy.da.SearchCursor(puertas_multifamiliar,["UBIGEO","ZONA","MANZANA","ID_REG"])]




    where_error_12=""

    i=0

    for el in list_puertas_hijos_multifamilar:
        i=i+1
        id_padre='{}{}{}{}'.format(el[0],el[1],el[2],el[4])
        if  id_padre not in list_puertas_multifamiliar:
            if i==1:
                where_error_12=" (UBIGEO='{}' AND ZONA='{}' AND MANZANA='{}' AND ID_REG_OR={})".format(el[0],el[1],el[2],el[3])
            else:
                where_error_12 = "{} OR (UBIGEO='{}' AND ZONA='{}' AND MANZANA='{}' AND ID_REG_OR={})".format(where_error_12,el[0], el[1],
                                                                                                         el[2], el[3])
    error_12_mfl=arcpy.MakeFeatureLayer_management(tb_viviendas_ordenadas, "error_12",where_error_12 )

    arcpy.CopyFeatures_management(error_12_mfl, error_12)






    #############################ERROR   PUERTAS MULTIFAMILIAR CON MAS DE 2 GEOMETRIAS#########################################
    set_puertas_multi=set(list_puertas_multifamiliar)

    where_error_13=""
    j=0

    for el in set_puertas_multi:
        i=0

        if el in list_puertas_multifamiliar:
           i=i+1

        if i>1:
            j=j+1
            if (j==1):
                where_error_13 = " (UBIGEO='{}' AND ZONA='{}' AND MANZANA='{}' AND ID_REG_OR={})".format(el[0], el[1],el[2], el[3])
            else:
                where_error_13 = "{} OR (UBIGEO='{}' AND ZONA='{}' AND MANZANA='{}' AND ID_REG_OR={})".format(where_error_13,el[0], el[1],
                                                                                                         el[2], el[3])

    error_13_mfl = arcpy.MakeFeatureLayer_management(tb_viviendas_ordenadas, "error_13", where_error_13)

    arcpy.CopyFeatures_management(error_13_mfl, error_13)

   '''

    ################################Insercion de data###########################################

    arcpy.env.workspace = "Database Connections/PruebaSegmentacion.sde"
    arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(4326)
    if arcpy.Exists("GEODATABASE.sde") == False:
        arcpy.CreateDatabaseConnection_management(
            "Database Connections", "GEODATABASE.sde", "SQL_SERVER", ip_server,
            "DATABASE_AUTH", "sde", "$deDEs4Rr0lLo", "#", "GEODB_CPV_SEGM",
            "#", "#", "#", "#")
    arcpy.env.workspace = "Database Connections/GEODATABASE.sde"
    path_conexion2 = "Database Connections/GEODATABASE.sde"
    path_calidad = path_conexion2 + "/GEODB_CPV_SEGM.SDE.CALIDAD_URBANO"
    calidad_error_1_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_1_INPUT_PUERTA_MULTIFAMILIAR_DENTRO_MZ'
    calidad_error_2_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_2_INPUT_MANZANAS_SIN_VIAS'
    calidad_error_3_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_3_INPUT_MANZANAS_VIAS_DENTRO'
    calidad_error_4_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_4_INPUT_PUNTOS_INICIO'
    calidad_error_5_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_5_INPUT_VIVIENDAS_AFUERA_MZ'
    calidad_error_7_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_7_INPUT_VIVIENDAS_ERROR_FRENTE'
    calidad_error_8_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_8_INPUT_FRENTES_MANZANAS_FORMA'
    calidad_error_9_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_9_INPUT_ENUMERACION_VIV_POR_FRENTE'
    calidad_error_10_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_10_INPUT_VIV_ERROR_NOMBRE_VIA'
    calidad_error_11_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_11_INPUT_PUERTAS_HIJOS_MULTI_EN_FRENTE_MZ'
    #error_7 = path_calidad + "/error_7_viviendas_error_frente.shp"
    list_errores = [
        [error_1, calidad_error_1_input, 1],
        [error_2, calidad_error_2_input, 1],
        [error_3, calidad_error_3_input, 1],
        [error_4, calidad_error_4_input, 1],
        [error_5, calidad_error_5_input, 1],
        [error_8, calidad_error_8_input, 1],
        [error_9, calidad_error_9_input, 1],
        # [error_10, calidad_error_10_input, 1],
        [error_11, calidad_error_11_input, 1],
        [error_7, calidad_error_7_input, 1],
    ]

    conn = conex.Conexion2()
    cursor = conn.cursor()
    for el in data:
        ubigeo = el[0]
        zona = el[1]
        sql_query = """
                DELETE GEODB_CPV_SEGM.SDE.ERROR_1_INPUT_PUERTA_MULTIFAMILIAR_DENTRO_MZ where ubigeo='{ubigeo}' and zona='{zona}'
                DELETE GEODB_CPV_SEGM.SDE.ERROR_2_INPUT_MANZANAS_SIN_VIAS where ubigeo='{ubigeo}' and zona='{zona}'
                delete GEODB_CPV_SEGM.SDE.ERROR_3_INPUT_MANZANAS_VIAS_DENTRO where ubigeo='{ubigeo}' and zona='{zona}'
                delete GEODB_CPV_SEGM.SDE.ERROR_4_INPUT_PUNTOS_INICIO  where ubigeo='{ubigeo}' and zona='{zona}'
                delete GEODB_CPV_SEGM.SDE.ERROR_5_INPUT_VIVIENDAS_AFUERA_MZ  where ubigeo='{ubigeo}' and zona='{zona}'
                delete GEODB_CPV_SEGM.SDE.ERROR_7_INPUT_VIVIENDAS_ERROR_FRENTE  where ubigeo='{ubigeo}' and zona='{zona}'
                delete GEODB_CPV_SEGM.SDE.ERROR_8_INPUT_FRENTES_MANZANAS_FORMA  where ubigeo='{ubigeo}' and zona='{zona}'
                delete GEODB_CPV_SEGM.SDE.ERROR_9_INPUT_ENUMERACION_VIV_POR_FRENTE where ubigeo='{ubigeo}' and zona='{zona}'
                delete GEODB_CPV_SEGM.SDE.ERROR_10_INPUT_VIV_ERROR_NOMBRE_VIA where ubigeo='{ubigeo}' and zona='{zona}'
                delete GEODB_CPV_SEGM.SDE.ERROR_11_INPUT_PUERTAS_HIJOS_MULTI_EN_FRENTE_MZ where ubigeo='{ubigeo}' and zona='{zona}'
                """.format(ubigeo=ubigeo, zona=zona)
        cursor.execute(sql_query)
        conn.commit()
    conn.close()

    i = 0
    for el in list_errores:
        i = i + 1
        print el[0]
        if (int(el[2]) > 1):
            a = arcpy.MakeTableView_management(
                el[0],
                "a{}".format(i),
            )

        else:
            a = arcpy.MakeFeatureLayer_management(el[0], "a{}".format(i))

        arcpy.Append_management(a, el[1], "NO_TEST")

    #for el in list_errores:
    #    i = i + 1


#
#    print where
#
#    if el[2] == 1:
#        a = arcpy.arcpy.MakeFeatureLayer_management(el[1], "a{}".format(i), where)
#    else:
#        a = arcpy.MakeTableView_management(el[1], "a{}".format(i), where)
#
#    if (int(arcpy.GetCount_management(a).getOutput(0)) > 0):
#        arcpy.DeleteRows_management(a)
#
#    print 'borro'
#    if el[2] == 1:
#        b = arcpy.arcpy.MakeFeatureLayer_management(el[0], "b{}".format(i), where)
#    else:
#        b = arcpy.MakeTableView_management(el[0], "b{}".format(i), where)
#
#    if (int(arcpy.GetCount_management(b).getOutput(0)) > 0):
#        arcpy.Append_management(b, el[1], "NO_TEST")
#    print 'inserto'

    for el in data:
        conex.actualizar_errores_input_adicionales(ubigeo=el[0], zona=el[1])
Beispiel #14
0
def nombre_ejes_viales():
    arcpy.AddField_management(tb_viviendas_ordenadas, 'ID_FRENTE', 'TEXT')
    arcpy.AddField_management(tb_viviendas_ordenadas, 'NOM_CAT_AL', 'TEXT')
    arcpy.AddField_management(tb_viviendas_ordenadas, 'NOM_VIA_AL', 'TEXT')
    arcpy.AddField_management(tb_viviendas_ordenadas, 'ERROR_VIA', 'SHORT')

    with arcpy.da.UpdateCursor(
            tb_viviendas_ordenadas,
        ["UBIGEO", "ZONA", "MANZANA", "FRENTE_ORD", "ID_FRENTE"]) as cursor:
        for x in cursor:
            x[4] = u'{}{}{}{}'.format(x[0], x[1], x[2], x[3])
            cursor.updateRow(x)

    #arcpy.CalculateField_management(tb_viviendas_ordenadas,'ID_FRENTE','!UBIGEO!+!ZONA!+!MANZANA!+str(!FRENTE_ORD!)','PYTHON_9.3')
    viviendas_mfl = arcpy.MakeFeatureLayer_management(tb_viviendas_ordenadas,
                                                      "viviendas_mfl")
    mzs_line = arcpy.FeatureToLine_management(tb_manzanas_ordenadas,
                                              "in_memory/mzs_line")
    viviendas_selecc_frentes_mfl = arcpy.SelectLayerByLocation_management(
        viviendas_mfl, "INTERSECT", mzs_line)
    viviendas_selecc_frentes = arcpy.CopyFeatures_management(
        viviendas_selecc_frentes_mfl, "in_memory/viv_selecc_frentes")
    ejes_viales_buffer = arcpy.Buffer_analysis(
        tb_ejes_viales, "in_memory/ejes_viales_buffer", "60 meters", "", "",
        "LIST", [
            "CAT_VIA", "NOMBRE_CAT", "NOMBRE_VIA", "NOMBRE_ALT", "CAT_NOM",
            "UBIGEO"
        ])
    intersect_viv_vias = arcpy.Intersect_analysis(
        [viviendas_selecc_frentes, ejes_viales_buffer],
        "in_memory/intersect_viv_vias")
    list_id_frentes_validos = list(
        set([
            u"{}{}{}{}".format(x[0], x[1], x[2], x[3])
            for x in arcpy.da.SearchCursor(
                intersect_viv_vias,
                ["UBIGEO", "ZONA", "MANZANA", "FRENTE_ORD"],
                "P20=NOMBRE_CAT AND NOMBRE_VIA=P21")
        ]))
    print list_id_frentes_validos

    where_expression_list = ""
    with arcpy.da.UpdateCursor(tb_viviendas_ordenadas,
                               ["ID_FRENTE", "ERROR_VIA"]) as cursor:
        for x in cursor:
            if x[0] not in list_id_frentes_validos:
                x[1] = 1

            cursor.updateRow(x)

    viviendas_no_enlazadas = arcpy.Select_analysis(tb_viviendas_ordenadas,
                                                   error_10, "ERROR_VIA=1")
    viviendas_no_enlazadas_mfl = arcpy.MakeFeatureLayer_management(
        viviendas_no_enlazadas)

    viviendas_no_enlazadas_select = arcpy.SelectLayerByLocation_management(
        viviendas_no_enlazadas_mfl, "INTERSECT", mzs_line, '5 METERS',
        "NEW_SELECTION")
    lineas_viv_no_en = arcpy.PointsToLine_management(
        viviendas_no_enlazadas_select, path_calidad + '/lineas_viv_no_en.shp',
        'p21', 'ID_FRENTE')

    ejes_viales_buffer = arcpy.Buffer_analysis(
        tb_ejes_viales, "in_memory/ejes_viales_buffer", "40 meters", "", "",
        "LIST", [
            "CAT_VIA", "NOMBRE_CAT", "NOMBRE_VIA", "NOMBRE_ALT", "CAT_NOM",
            "UBIGEO"
        ])
    where_eje = "NOMBRE_VIA<>'{}'".format("SN")

    ejes_viales_buffer_select = arcpy.Select_analysis(
        ejes_viales_buffer, 'in_memory/ejes_viales_buffer_select', where_eje)
    ejes_viales_buffer_select_mfl = arcpy.MakeFeatureLayer_management(
        ejes_viales_buffer_select)
    temp = arcpy.SpatialJoin_analysis(ejes_viales_buffer_select_mfl,
                                      lineas_viv_no_en,
                                      path_calidad + '/temp.shp',
                                      'JOIN_ONE_TO_MANY', '', '', 'CONTAINS')

    arcpy.AddField_management(temp, 'AREA', 'DOUBLE')
    exp = "!SHAPE.AREA@METERS!"
    arcpy.CalculateField_management(temp, 'AREA', exp, 'PYTHON_9.3')
    temp_sort = arcpy.Sort_management(
        temp, 'in_memory/temp_sort',
        [["JOIN_FID", "ASCENDING"], ["AREA", "ASCENDING"]])
    temp_sort_select = arcpy.Select_analysis(
        temp_sort, path_calidad + '/temp_sort_select.shp', "JOIN_FID<>-1")
    arcpy.DeleteIdentical_management(temp_sort_select, ["JOIN_FID"])

    list_correcion_vias = list(
        set([(x[0], x[1], x[2]) for x in arcpy.da.SearchCursor(
            temp_sort_select, ["p21", "NOMBRE_CAT", "NOMBRE_VIA"])]))
    #list_correcion_vias_p21=list(set([x[0] for x in  arcpy.da.SearchCursor(temp_sort_select,["p21"])]))

    arcpy.AddField_management(error_10, 'NOM_CAT_AL', 'TEXT')
    arcpy.AddField_management(error_10, 'NOM_VIA_AL', 'TEXT')
    ##########################################################################################################
    with arcpy.da.UpdateCursor(error_10, [
            "UBIGEO", "ZONA", "MANZANA", "ID_REG_OR", "p21", "NOM_CAT_AL",
            "NOM_VIA_AL"
    ]) as cursor:
        for x in cursor:
            for y in list_correcion_vias:
                if (y[0] == x[4]):
                    x[5] = y[1]
                    x[6] = y[2]
                    break
            cursor.updateRow(x)
Beispiel #15
0
arcpy.MakeXYEventLayer_management(crashCSVFile, XPoints, YPoints,
                                  "New York CrashData", "", "")
arcpy.CopyFeatures_management("New York CrashData", crashShapeFile)

#Clean traffic shape files
#Delete everything besides data from Manhattan
with arcpy.da.UpdateCursor(trafficShapeFile, "County_Cod") as cursor:
    for row in cursor:
        if row[0] != 61:
            cursor.deleteRow()

#Create mapping traffic shape file -> this file is used to later correctly map the AADTs
#Background: some segments are to long or to short, that leads to unwanted mini segments
#Split at intersections
arcpy.FeatureToLine_management(trafficShapeFile, mappingTrafficShapeFile,
                               "0.001 Meters")

#Create segment shape file
arcpy.Dissolve_management(mappingTrafficShapeFile, segmentShapeFile)
##Now we have one big connected polygon

#Intersect segment shape file
arcpy.FeatureToLine_management(segmentShapeFile, segmentShapeFile2,
                               "0.001 Meters")

#Add fields segment shape file, length and id
arcpy.AddField_management(in_table=segmentShapeFile2,
                          field_name="Length_m",
                          field_type="DOUBLE")
arcpy.AddField_management(in_table=segmentShapeFile2,
                          field_name="Segment_ID",
def route_fiber(nd_in, incidents_in, facilities_in, name_in, output_fc_in, pro_in, protection_in=False,
                sp_protection_in=True, brownfield_duct='#'):
    arcpy.CheckOutExtension('Network')

    # Set local variables
    layer_name = "ClosestFacility"
    impedance = "Length"

    # MakeClosestFacilityLayer_na (in_network_dataset, out_network_analysis_layer, impedance_attribute,
    # {travel_from_to}, {default_cutoff}, {default_number_facilities_to_find}, {accumulate_attribute_name},
    # {UTurn_policy}, {restriction_attribute_name}, {hierarchy}, {hierarchy_settings}, {output_path_shape},
    # {time_of_day}, {time_of_day_usage})
    #
    # http://desktop.arcgis.com/en/arcmap/10.3/tools/network-analyst-toolbox/make-closest-facility-layer.htm
    result_object = arcpy.na.MakeClosestFacilityLayer(nd_in, layer_name, impedance, 'TRAVEL_TO', default_cutoff=None,
                                                      default_number_facilities_to_find=1,
                                                      output_path_shape='TRUE_LINES_WITH_MEASURES')

    # Get the layer object from the result object. The Closest facility layer can
    # now be referenced using the layer object.
    layer_object = result_object.getOutput(0)

    # Get the names of all the sublayers within the Closest facility layer.
    sublayer_names = arcpy.na.GetNAClassNames(layer_object)

    # Stores the layer names that we will use later
    incidents_layer_name = sublayer_names["Incidents"]  # as origins
    facilities_layer_name = sublayer_names["Facilities"]  # as destinations
    lines_layer_name = sublayer_names["CFRoutes"]  # as lines

    arcpy.na.AddLocations(layer_object, incidents_layer_name, incidents_in)
    arcpy.na.AddLocations(layer_object, facilities_layer_name, facilities_in)

    if brownfield_duct != '#':
        mapping = "Name Name #;Attr_Length # " + '0,001' + "; BarrierType # 1"
        arcpy.na.AddLocations(layer_object, "Line Barriers", brownfield_duct, mapping, search_tolerance="5 Meters")

    # Solve the Closest facility  layer
    arcpy.na.Solve(layer_object)

    # # Save the solved Closest facility layer as a layer file on disk
    # output_layer_file = os.path.join(output_dir_in, layer_name)
    # arcpy.MakeFeatureLayer_management(layer_object, output_layer_file)

    # Get the Lines Sublayer (all the distances)
    if pro_in:
        lines_sublayer = layer_object.listLayers(lines_layer_name)[0]
    elif not pro_in:
        lines_sublayer = arcpy.mapping.ListLayers(layer_object, lines_layer_name)[0]

    layer_out_path = os.path.join(output_fc_in, name_in)
    arcpy.management.CopyFeatures(lines_sublayer, layer_out_path)

    protection_out_path = "#"

    # If requested route the protection paths
    if protection_in:
        # For all the routed apths the disjoint path has to be found
        n_paths = int(arcpy.GetCount_management(incidents_in).getOutput(0))

        field_objects = arcpy.ListFields(layer_out_path)
        fields = [field.name for field in field_objects if field.type != 'Geometry']

        if 'Total_Length' in fields:
            field_len = 'Total_Length'
        elif 'Shape_Length' in fields:
            field_len = 'Shape_Length'

        # Iterate through all the facility-demand pairs and their respective routes
        cursor_r = arcpy.da.SearchCursor(layer_out_path, ['SHAPE@', field_len])
        cursor_n = arcpy.da.SearchCursor(incidents_in, 'SHAPE@')

        if sp_protection_in:
            name_protect = 'sp'
        else:
            name_protect = 'duct_sharing'

        protection_out_path = os.path.join(output_fc_in, '{0}_protection_{1}'.format(name_in, name_protect))
        check_exists(protection_out_path)
        arcpy.CreateFeatureclass_management(output_fc_in, '{0}_protection_{1}'.format(name_in, name_protect),
                                            template=layer_out_path)

        for i in range(n_paths):
            path = cursor_r.next()
            node = cursor_n.next()
            if not path[1] == 0:
                if sp_protection_in:
                    tmp = protection_routing(nd_in, facilities_in, node[0], path[0], pro_in)
                    # Add the protection route to the output feature class
                    arcpy.Append_management(tmp, protection_out_path, schema_type="NO_TEST")
                else:
                    all_paths = os.path.join('in_memory', 'all_paths_{0}'.format(i))
                    check_exists(all_paths)
                    arcpy.CopyFeatures_management(layer_out_path, all_paths)

                    other_paths_tmp = os.path.join('in_memory', 'other_paths_{0}_dissolved'.format(i))
                    check_exists(other_paths_tmp)
                    arcpy.Dissolve_management(all_paths, other_paths_tmp)

                    other_paths = os.path.join('in_memory', 'other_paths_{0}'.format(i))
                    check_exists(other_paths)
                    arcpy.FeatureToLine_management(other_paths_tmp, other_paths)

                    other_paths_layer = os.path.join('in_memory', 'other_paths_layer_{0}'.format(i))
                    check_exists(other_paths_layer)
                    arcpy.MakeFeatureLayer_management(other_paths, other_paths_layer)

                    arcpy.SelectLayerByLocation_management(other_paths_layer, 'SHARE_A_LINE_SEGMENT_WITH', path[0],
                                                           selection_type='NEW_SELECTION',
                                                           invert_spatial_relationship='INVERT')

                    scaled_cost = os.path.join('in_memory', 'scaled_cost_{0}'.format(i))
                    check_exists(scaled_cost)
                    arcpy.CopyFeatures_management(other_paths_layer, scaled_cost)

                    tmp = protection_routing(nd_in, facilities_in, node[0], path[0], pro_in, scaled_cost)
                    # Add the protection route to the output feature class
                    arcpy.Append_management(tmp, protection_out_path, schema_type="NO_TEST")

    return layer_out_path, protection_out_path
arcpy.env.workspace = getGDBPath()
root.destroy()

ds = getfloor()
#df = pd.DataFrame()
fctypes = np.unique([
    str(fc[fc.rfind("_") + 1:])
    for fc in arcpy.ListFeatureClasses(feature_dataset=ds[0])
])
#print fctypes

new_build = getbuild()
print "------------------------------------------------------------------------"
#print "In %s there the:" %  ds[0]
for fctype in fctypes:
    FCinput = ds[0] + "/" + new_build + '_' + fctype
    Foutput = ds[0] + "/" + ds[1] + '_' + fctype
    if ar.Exists(FCinput):
        print new_build + '_' + fctype
        if ar.Exists(Foutput):
            ar.Append_management(FCinput, Foutput, "NO_TEST")
            print " Appended to:", ds[1] + '_' + fctype
        else:
            try:
                ar.FeatureToPolygon_management(FCinput, Foutput)
            except:
                ar.FeatureToLine_management(FCinput, Foutput)
            print ' Created: ', ds[1] + '_' + fctype

print "------------------------------------------------------------------------"
g = Graph(roads, id, avg_Speed, direction)
#Wyciagniecie punktow z klasy targets
points = []
with arcpy.da.SearchCursor(targets, ["SHAPE@X", "SHAPE@Y"]) as sc:
    for row in sc:
        points.append([row[0], row[1]])
#Znalezienie punktow
begin = g.search(points[0])
end = g.search(points[1])
arcpy.AddMessage(str(begin) + " " + str(end))
#Wyznaczenie trasy
path = g.make_path(begin, end, [algorithm, ignore_direct, time_or_dist])
#Przypisanie workspace do celowego datasetu
prev_work = arcpy.env.workspace
arcpy.env.workspace = dat
#Zamiana w Shapefile
wizualizacja(roads, path, file_path, id)
#Jezeli inna niz punkt koncowy, to zamien na linie
if target != targets:
    temp = "toLine"
    arcpy.FeatureToLine_management([target], temp)
    target = temp
#Ustalenie celu na granicy poligonu
arcpy.Intersect_analysis([file_path, target], file_target, "ONLY_FID", None,
                         "POINT")

if target == "toLine":
    arcpy.Delete_management(target)
#Zamiana z powrotem
arcpy.env.workspace = prev_work
Beispiel #19
0
def cartoLimits(aoi, prod_db, desktop_fldr):
    # Subtype field used in where clause to filter inputs to Model
    subtype_fld = arcpy.AddFieldDelimiters(prod_db, "FCSubtype")

    # Make feature layer of aoi
    arcpy.MakeFeatureLayer_management(aoi, "aoi")
    # Convert AOI to polyline
    aoi_line = os.path.join(arcpy.env.scratchGDB, "aoi_line")
    arcpy.FeatureToLine_management("aoi", aoi_line)
    arcpy.MakeFeatureLayer_management(aoi_line, "aoi_line")

    # Get list of input feature classes, subtypes, and cart limit feature classes
    inputs = [["DangersA", [], "DangersA_L"],
              ["DepthsA", ["5", "10", "15"], "DepthsA_L"],
              ["IceFeaturesA", [], "IceA_L"],
              ["MilitaryFeaturesA", [], "MilitaryA_L"],
              ["NaturalFeaturesA", ["1", "20", "35"], "NaturalA_L"],
              ["OffshoreInstallationsA", [], "OffshoreA_L"],
              [
                  "PortsAndServicesA",
                  [
                      "5", "10", "25", "30", "35", "40", "45", "50", "55",
                      "60", "65", "70", "80"
                  ], "PortsA_L"
              ],
              [
                  "RegulatedAreasAndLimitsA",
                  [
                      "1", "5", "10", "15", "20", "30", "40", "50", "60", "65",
                      "70", "75", "85", "95", "105", "110", "115"
                  ], "RegulatedA_L"
              ], ["SeabedA", ["15"], "SeabedA_L"],
              [
                  "TracksAndRoutesA",
                  ["1", "5", "10", "15", "20", "25", "40", "45", "70"],
                  "TracksA_L"
              ]]

    # Set workspace
    arcpy.env.workspace = prod_db

    # Get CoastlineA and CloastlineL layers
    coastlinea_fc = getFC(prod_db, "CoastlineA", NAUT_FDS)
    arcpy.MakeFeatureLayer_management(coastlinea_fc, "CoastlineA")
    coastlinel_fc = getFC(prod_db, "CoastlineL", NAUT_FDS)
    arcpy.MakeFeatureLayer_management(coastlinel_fc, "CoastlineL")

    # Loop through list of inputs
    for data in inputs:
        # Get full paths to data
        input_fc = getFC(prod_db, data[0], NAUT_FDS)
        output_fc = getFC(prod_db, data[2], CARTO_FDS)
        if input_fc != "" and output_fc != "":
            # Check if there are subtypes, if there are, write where clause
            where = ""
            if len(data[1]) > 0:
                where = subtype_fld + " = "
                where = where + (" OR " + subtype_fld + " = ").join(data[1])
                # Remove single quotes that get added to beginning and end of where clause
                where = where.replace("'", "")
            # Select features in where clause
            arcpy.MakeFeatureLayer_management(input_fc, "in_lyr", where)
            # Only run Generate Cartographic Limits model if layer has features
            if int(arcpy.GetCount_management("in_lyr").getOutput(0)) > 0:
                arcpy.AddMessage("\t\t" + data[2])
                arcpy.GenerateCartographicLimits_nautical(
                    "in_lyr", "CoastlineL; CoastlineA; aoi_line", output_fc)

    return
Beispiel #20
0
    outputName = os.path.join(outputFolder, name)
    subset = arcpy.Select_analysis(rios, outputName, where)
walk = arcpy.da.Walk(outputFolder, datatype="FeatureClass")
listaRios = []
for dir_path, dir_names, file_names in walk:
    for filename in file_names:
        listaRios.append(os.path.join(dir_path, filename))
listaFeatures = []
for i in range(0, len(listaRios), 1):
    inputs = listaRios[i:i + 2]
    j = i + 1
    nombreUnion = 'rios_{0}{1}'.format(j, j + 1)
    nombreSalida_union = os.path.join(carpeta, nombreUnion)
    if len(inputs) < 2:
        break
    uniones = arcpy.FeatureToLine_management(inputs, nombreSalida_union)
    whereSelect = "GRID_CODE = {0}".format(j)
    nombreSelect = 'rios_{0}{1}select'.format(j, j + 1)
    nombreSalida_select = os.path.join(carpeta, nombreSelect)
    ordenes = arcpy.Select_analysis(uniones, nombreSalida_select, whereSelect)
    listaFeatures.append(ordenes)
riosProcesados = arcpy.Merge_management(
    listaFeatures, 'C:/Datos/Glaciares_vegas/rios_procesados.shp')
riosProcesados = arcpy.AddField_management(featureTotal, "tramo", "LONG")
riosProcesados = arcpy.CalculateField_management("rios_procesados", "tramo",
                                                 "!FID! + 1", "PYTHON")

### AGRUPACION DE LAS VEGAS SEGUN LOS TRAMOS DE LOS RIOS


## FUNCION PARA DEFINIR MAPPINGS para spatial join. DOS atributos (GRID_CODE y tramo, PIDIENDO max)
def setupFiles(roadsFC, newRoadsFC, cityBoundFC):
    arcpy.env.overwriteOutput = True

    #Make sure that you set these flags to Disabled or else there is an issue with some of the functions and the roads
    env.outputMFlag = "Disabled"
    env.outputZFlag = "Disabled"

    intersectFC = "Intersect"
    featLyrIntersect = "FeatLayerPt"
    featLyrRoad = "FeatLayerLine"

    outFolder = "L:\\IntersectData"
    outWorkspace = "IntersectGDB.gdb"

    featLyrCity = "FeatLayerCity"
    featLyrTemp = "FeatLayerTemp"

    env.workspace = outFolder + "\\" + outWorkspace

    #Delete old data
    if (arcpy.Exists(env.workspace + "\\" + intersectFC)):
        print("Copying Features")
        arcpy.CopyFeatures_management(
            "Database Connections\\ARCSDE_10.sde\\City.DBO.CATSO\\City.DBO.CATSOMRP",
            roadsFC)
        arcpy.CopyFeatures_management(
            "Database Connections\\ARCSDE_10.sde\\City.DBO.PW_zoning\\City.DBO.columbia_corp_limit",
            cityBoundFC)
        print("Copied Features")
        UpdateCATSOIntersections.update(
            roadsFC,
            cityBoundFC)  #If it already exists, reroute to the update script
        return
    #Create Geodatabase
    arcpy.CreateFileGDB_management(outFolder, outWorkspace)
    env.workspace = outFolder + "\\" + outWorkspace

    print("CREATING INTERSECTIONS")

    #Copy features from SDE
    print("Copying Features")
    arcpy.CopyFeatures_management(
        "Database Connections\\ARCSDE_10.sde\\City.DBO.CATSO\\City.DBO.CATSOMRP",
        roadsFC)
    arcpy.CopyFeatures_management(
        "Database Connections\\ARCSDE_10.sde\\City.DBO.PW_zoning\\City.DBO.columbia_corp_limit",
        cityBoundFC)
    print("Copied Features")

    #Convert to feature layer to select by location
    arcpy.MakeFeatureLayer_management(roadsFC, featLyrTemp)
    arcpy.MakeFeatureLayer_management(cityBoundFC, featLyrCity)

    #select only features within the city limits
    arcpy.SelectLayerByLocation_management(featLyrTemp, "INTERSECT",
                                           featLyrCity, "", "NEW_SELECTION")

    #Perform feature to line tool, to split the lines at vertices for more precise intersection output
    arcpy.FeatureToLine_management(featLyrTemp, newRoadsFC, "", "ATTRIBUTES")

    print("Feature to Line Completed")

    #Find all of the intersections
    arcpy.Intersect_analysis(newRoadsFC, intersectFC, "ONLY_FID", "", "POINT")

    print("Found Intersects")

    #Add fields to the output intersect feature
    arcpy.AddField_management(intersectFC, "STREETS", "TEXT", "", "", 150,
                              "STREET_NAMES", "", "", "")
    arcpy.AddField_management(intersectFC, "INT_ID", "LONG", "", "", "",
                              "INTERSECT_ID", "", "", "")
    arcpy.AddField_management(intersectFC, "MAJOR_INT", "TEXT", "", "", 10,
                              "MAJOR_INTERSECTION", "", "", "")

    #Convert to feature layer to make later selections
    arcpy.MakeFeatureLayer_management(newRoadsFC, featLyrRoad)
    arcpy.MakeFeatureLayer_management(intersectFC, featLyrIntersect)

    IntersectCATSOAnalysis.runAnalysis(featLyrIntersect, featLyrRoad, False)
def main(fcInputCenterline,
         fcInputPolygon,
         fcSegmentedPolygons,
         workspaceTemp,
         dblPointDensity=10.0,
         dblJunctionBuffer=120.00):

    arcpy.AddMessage("GNAT Divide Polygon By Segment Tool")
    arcpy.AddMessage("GNAT DPS: Saving Polygon Results to: " +
                     fcSegmentedPolygons)
    arcpy.AddMessage("GNAT DPS: Saving Temporary Files to: " + workspaceTemp)

    arcpy.env.OutputMFlag = "Disabled"
    arcpy.env.OutputZFlag = "Disabled"

    arcpy.AddMessage("arcpy M Output Flag: " + str(arcpy.env.OutputMFlag))

    ## Copy Centerline to Temp Workspace
    fcCenterline = gis_tools.newGISDataset(workspaceTemp,
                                           "GNAT_DPS_Centerline")
    arcpy.CopyFeatures_management(fcInputCenterline, fcCenterline)

    ## Build Thiessan Polygons
    arcpy.AddMessage("GNAT DPS: Building Thiessan Polygons")
    arcpy.env.extent = fcInputPolygon  ## Set full extent to build Thiessan polygons over entire line network.
    arcpy.Densify_edit(fcCenterline, "DISTANCE",
                       str(dblPointDensity) + " METERS")

    fcTribJunctionPoints = gis_tools.newGISDataset(
        workspaceTemp,
        "GNAT_DPS_TribJunctionPoints")  # All Segment Junctions??
    #gis_tools.findSegmentJunctions(fcCenterline,fcTribJunctionPoints,"ALL")
    arcpy.Intersect_analysis(fcCenterline,
                             fcTribJunctionPoints,
                             output_type="POINT")

    fcThiessanPoints = gis_tools.newGISDataset(workspaceTemp,
                                               "GNAT_DPS_ThiessanPoints")
    arcpy.FeatureVerticesToPoints_management(fcCenterline, fcThiessanPoints,
                                             "ALL")

    lyrThiessanPoints = gis_tools.newGISDataset("Layer", "lyrThiessanPoints")
    arcpy.MakeFeatureLayer_management(fcThiessanPoints, lyrThiessanPoints)
    arcpy.SelectLayerByLocation_management(lyrThiessanPoints, "INTERSECT",
                                           fcTribJunctionPoints,
                                           str(dblJunctionBuffer) + " METERS",
                                           "NEW_SELECTION")

    fcThiessanPoly = gis_tools.newGISDataset(workspaceTemp,
                                             "GNAT_DPS_ThiessanPoly")
    arcpy.CreateThiessenPolygons_analysis(lyrThiessanPoints, fcThiessanPoly,
                                          "ONLY_FID")

    fcThiessanPolyClip = gis_tools.newGISDataset(workspaceTemp,
                                                 "GNAT_DPS_TheissanPolyClip")
    arcpy.Clip_analysis(fcThiessanPoly, fcInputPolygon, fcThiessanPolyClip)

    ### Code to Split the Junction Thiessan Polys ###
    arcpy.AddMessage("GNAT DPS: Split Junction Thiessan Polygons")
    lyrTribThiessanPolys = gis_tools.newGISDataset("Layer",
                                                   "lyrTribThiessanPolys")
    arcpy.MakeFeatureLayer_management(fcThiessanPolyClip, lyrTribThiessanPolys)
    arcpy.SelectLayerByLocation_management(lyrTribThiessanPolys,
                                           "INTERSECT",
                                           fcTribJunctionPoints,
                                           selection_type="NEW_SELECTION")

    fcSplitPoints = gis_tools.newGISDataset(workspaceTemp,
                                            "GNAT_DPS_SplitPoints")
    arcpy.Intersect_analysis([lyrTribThiessanPolys, fcCenterline],
                             fcSplitPoints,
                             output_type="POINT")

    arcpy.AddMessage("GNAT DPS: Moving Starting Vertices of Junction Polygons")
    geometry_functions.changeStartingVertex(fcTribJunctionPoints,
                                            lyrTribThiessanPolys)

    arcpy.AddMessage("GNAT DPS: Vertices Moved.")
    fcThiessanTribPolyEdges = gis_tools.newGISDataset(
        workspaceTemp, "GNAT_DPS_ThiessanTribPolyEdges")
    arcpy.FeatureToLine_management(lyrTribThiessanPolys,
                                   fcThiessanTribPolyEdges)

    fcSplitLines = gis_tools.newGISDataset(workspaceTemp,
                                           "GNAT_DPS_SplitLines")
    arcpy.SplitLineAtPoint_management(fcThiessanTribPolyEdges, fcSplitPoints,
                                      fcSplitLines, "0.1 METERS")

    fcMidPoints = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_MidPoints")
    arcpy.FeatureVerticesToPoints_management(fcSplitLines, fcMidPoints, "MID")
    arcpy.Near_analysis(fcMidPoints, fcTribJunctionPoints, location="LOCATION")
    arcpy.AddXY_management(fcMidPoints)

    fcTribToMidLines = gis_tools.newGISDataset(workspaceTemp,
                                               "GNAT_DPS_TribToMidLines")
    arcpy.XYToLine_management(fcMidPoints, fcTribToMidLines, "POINT_X",
                              "POINT_Y", "NEAR_X", "NEAR_Y")

    ### Select Polys by Centerline ###
    arcpy.AddMessage("GNAT DPS: Select Polygons By Centerline")
    fcThiessanEdges = gis_tools.newGISDataset(workspaceTemp,
                                              "GNAT_DPS_ThiessanEdges")
    arcpy.FeatureToLine_management(fcThiessanPolyClip, fcThiessanEdges)

    fcAllEdges = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_AllEdges")
    arcpy.Merge_management([fcTribToMidLines, fcThiessanEdges, fcCenterline],
                           fcAllEdges)  # include fcCenterline if needed

    fcAllEdgesPolygons = gis_tools.newGISDataset(workspaceTemp,
                                                 "GNAT_DPS_AllEdgesPolygons")
    arcpy.FeatureToPolygon_management(fcAllEdges, fcAllEdgesPolygons)

    fcAllEdgesPolygonsClip = gis_tools.newGISDataset(
        workspaceTemp, "GNAT_DPS_AllEdgesPolygonsClip")
    arcpy.Clip_analysis(fcAllEdgesPolygons, fcInputPolygon,
                        fcAllEdgesPolygonsClip)

    fcPolygonsJoinCenterline = gis_tools.newGISDataset(
        workspaceTemp, "GNAT_DPS_PolygonsJoinCenterline")
    arcpy.SpatialJoin_analysis(fcAllEdgesPolygonsClip,
                               fcCenterline,
                               fcPolygonsJoinCenterline,
                               "JOIN_ONE_TO_MANY",
                               "KEEP_ALL",
                               match_option="SHARE_A_LINE_SEGMENT_WITH")

    fcPolygonsDissolved = gis_tools.newGISDataset(
        workspaceTemp, "GNAT_DPS_PolygonsDissolved")
    arcpy.Dissolve_management(fcPolygonsJoinCenterline,
                              fcPolygonsDissolved,
                              "JOIN_FID",
                              multi_part="SINGLE_PART")

    #fcSegmentedPolygons = gis_tools.newGISDataset(workspaceOutput,"SegmentedPolygons")
    lyrPolygonsDissolved = gis_tools.newGISDataset("Layer",
                                                   "lyrPolygonsDissolved")
    arcpy.MakeFeatureLayer_management(fcPolygonsDissolved,
                                      lyrPolygonsDissolved)
    arcpy.SelectLayerByAttribute_management(lyrPolygonsDissolved,
                                            "NEW_SELECTION",
                                            """ "JOIN_FID" = -1 """)

    arcpy.Eliminate_management(lyrPolygonsDissolved, fcSegmentedPolygons,
                               "LENGTH")

    arcpy.AddMessage("GNAT DPS: Tool Complete.")
    return
Beispiel #23
0
def main(workspace, areaOfInterest, albertaloticRiparian,
         albertaMergedWetlandInventory, quarterSectionBoundaries,
         parksProtectedAreasAlberta, humanFootprint):

    # Import necesarry modules
    import numpy as np
    import arcpy

    # Overwrite output and checkout neccesary extensions
    arcpy.env.overwriteOutput = True
    arcpy.CheckOutExtension("spatial")

    # assign workspace
    arcpy.env.workspace = workspace

    # First we project our parcel data into the correct projection, create a layer file, then select only parcels we are interested in with Select by Attribute
    # and Select by Location (Intersecting tht Area of Interest polygon), then export this selection to a new feature class called "ParcelsFinal"

    # Local Variables
    quarterSectionBoundaries_project = "quarterSectionBoundaries_project"
    quarterSectionBoundaries_project_layer = "quarterSectionBoundaries_project_layer"
    ParcelsFinal = "ParcelsFinal"

    # Process: Project
    arcpy.Project_management(
        quarterSectionBoundaries, quarterSectionBoundaries_project,
        "PROJCS['NAD_1983_10TM_AEP_Forest',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',500000.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-115.0],PARAMETER['Scale_Factor',0.9992],PARAMETER['Latitude_Of_Origin',0.0],UNIT['Meter',1.0]]",
        "",
        "GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]",
        "NO_PRESERVE_SHAPE", "", "NO_VERTICAL")

    # Process: Make Feature Layer
    arcpy.MakeFeatureLayer_management(
        quarterSectionBoundaries_project,
        quarterSectionBoundaries_project_layer, "", "",
        "OBJECTID OBJECTID VISIBLE NONE;Shape Shape VISIBLE NONE;MER MER VISIBLE NONE;RGE RGE VISIBLE NONE;TWP TWP VISIBLE NONE;SEC SEC VISIBLE NONE;QS QS VISIBLE NONE;RA RA VISIBLE NONE;PARCEL_ID PARCEL_ID VISIBLE NONE;Shape_length Shape_length VISIBLE NONE;Shape_area Shape_area VISIBLE NONE"
    )

    # selects all parcels intersecting the users area of interest
    # Process: Select Layer By Location
    arcpy.SelectLayerByLocation_management(
        quarterSectionBoundaries_project_layer, "INTERSECT", areaOfInterest,
        "", "NEW_SELECTION", "NOT_INVERT")

    # Removes roads from parcel data to ensure that only quarter sections are selected
    # Process: Select Layer By Attribute
    arcpy.SelectLayerByAttribute_management(
        quarterSectionBoundaries_project_layer, "SUBSET_SELECTION",
        "RA NOT LIKE 'R'")

    # Process: Copy Features
    arcpy.CopyFeatures_management(quarterSectionBoundaries_project_layer,
                                  ParcelsFinal, "", "0", "0", "0")

    # ############### ArcGis MODEL BUILDER SECTION: for initial Geoproccessing #################################################################################################################

    # The following was exported from ArcMap's Model builder. It performs most of the neccessary geoprocessing needed to determine the spatial relationships
    # between the parcels and the user provided data (Human footprint, Lotic(Riparian), Wetlands, Patch Size, and Proximity)

    # local Variables:
    footprint_EXTENT_CLIPPED = "Footprint_Extent_Clipped"
    Footprint_Inverse = "Footprint_Inverse"
    Intact_Area_Per_Parcel = "Intact_Area_Per_Parcel"
    Wetland_Extent_Clipped = "Wetland_Extent_Clipped"
    Wetland_Lines = "Wetland_Lines"
    Wetland_Edge_Per_Parcel = "Wetland_Edge_Per_Parcel"
    Lotic_Extent_Clipped = "Lotic_Extent_Clipped"
    Lotic_No_Wetlands = "Lotic_No_Wetlands"
    Lotic_Area_Per_Parcel = "Lotic_Area_Per_Parcel"
    Area_Of_Interest_Buffered = "Area_Of_Interest_Buffered"
    Footprint_Larger_Extent = "Footprint_Larger_Extent"
    Footprint_INVERSE_Large = "Footprint_INVERSE_Large"
    Footprint_INVERSE_Large_Explode = "Footprint_INVERSE_Large_Explode"

    # Process: Clip
    arcpy.Clip_analysis(humanFootprint, ParcelsFinal, footprint_EXTENT_CLIPPED,
                        "")

    # Process: Erase
    arcpy.Erase_analysis(ParcelsFinal, footprint_EXTENT_CLIPPED,
                         Footprint_Inverse, "")

    #
    # Process: Tabulate Intersection
    arcpy.TabulateIntersection_analysis(ParcelsFinal, "OBJECTID",
                                        Footprint_Inverse,
                                        Intact_Area_Per_Parcel, "", "", "",
                                        "UNKNOWN")

    # Process: Clip (3)
    arcpy.Clip_analysis(albertaMergedWetlandInventory, ParcelsFinal,
                        Wetland_Extent_Clipped, "")

    # Process: Feature To Line
    arcpy.FeatureToLine_management(Wetland_Extent_Clipped, Wetland_Lines, "",
                                   "ATTRIBUTES")
    ##arcpy.FeatureToLine_management("'D:\\evanamiesgalonskiMOBILE\\1 Courses\\329\\Final Project\\DATA\\test results.gdb\\Wetland_Extent_Clipped'", Wetland_Lines, "", "ATTRIBUTES")

    # Process: Tabulate Intersection (2)
    arcpy.TabulateIntersection_analysis(ParcelsFinal, "OBJECTID",
                                        Wetland_Lines, Wetland_Edge_Per_Parcel,
                                        "", "", "", "UNKNOWN")

    # Process: Clip (4)
    arcpy.Clip_analysis(albertaloticRiparian, ParcelsFinal,
                        Lotic_Extent_Clipped, "")

    # Process: Erase (2)
    arcpy.Erase_analysis(Lotic_Extent_Clipped, Wetland_Extent_Clipped,
                         Lotic_No_Wetlands, "")

    # Process: Tabulate Intersection (3)
    arcpy.TabulateIntersection_analysis(ParcelsFinal, "OBJECTID",
                                        Lotic_No_Wetlands,
                                        Lotic_Area_Per_Parcel, "", "", "",
                                        "UNKNOWN")

    # Process: Buffer
    arcpy.Buffer_analysis(areaOfInterest, Area_Of_Interest_Buffered,
                          "50 Kilometers", "FULL", "ROUND", "NONE", "",
                          "PLANAR")

    # Process: Clip (2)
    arcpy.Clip_analysis(humanFootprint, Area_Of_Interest_Buffered,
                        Footprint_Larger_Extent, "")

    # Process: Erase (3)
    arcpy.Erase_analysis(Area_Of_Interest_Buffered, Footprint_Larger_Extent,
                         Footprint_INVERSE_Large, "")

    # Process: Multipart To Singlepart
    arcpy.MultipartToSinglepart_management(Footprint_INVERSE_Large,
                                           Footprint_INVERSE_Large_Explode)

    # ###########################################################################################################################################################################

    # This part of the script edits the nwely created tables that contain information about the instersection of Wetlands, Lotic, and Intactness data with the land parcels
    # The Area and Percent coverage fields are renamed to be more decriptive and to ensure there are no confusing duplicate field names in our ParcelsFinal feature class.

    # Alter Field names in intactness table
    arcpy.AlterField_management(Intact_Area_Per_Parcel,
                                "AREA",
                                new_field_name="Area_Intact",
                                field_is_nullable="NULLABLE")
    arcpy.AlterField_management(Intact_Area_Per_Parcel,
                                "PERCENTAGE",
                                new_field_name="Percent_Intact",
                                field_is_nullable="NULLABLE")

    # Alter field names in lotic_table
    arcpy.AlterField_management(Lotic_Area_Per_Parcel,
                                "AREA",
                                new_field_name="Area_Lotic",
                                field_is_nullable="NULLABLE")
    arcpy.AlterField_management(Lotic_Area_Per_Parcel,
                                "PERCENTAGE",
                                new_field_name="Percent_Lotic",
                                field_is_nullable="NULLABLE")

    # Alter Field name in wetlands_table
    arcpy.AlterField_management(Wetland_Edge_Per_Parcel,
                                "LENGTH",
                                new_field_name="Wetland_Edge",
                                field_is_nullable="NULLABLE")

    # Now we will join the desired fields from the 3 tables (intactness, lotic, ad wetlands) to the Land Parcel feature class

    # Process: Join Field
    arcpy.JoinField_management(ParcelsFinal, "OBJECTID",
                               Intact_Area_Per_Parcel, "OBJECTID_1",
                               ["Area_Intact", "Percent_Intact"])

    # Process: Join Field (2)
    arcpy.JoinField_management(ParcelsFinal, "OBJECTID", Lotic_Area_Per_Parcel,
                               "OBJECTID_1", ["Area_Lotic", "Percent_Lotic"])

    # Process: Join Field (3)
    arcpy.JoinField_management(ParcelsFinal, "OBJECTID",
                               Wetland_Edge_Per_Parcel, "OBJECTID_1",
                               "Wetland_Edge")

    # Now we get rid of null values in our new fields and replace them with zeros

    with arcpy.da.UpdateCursor(ParcelsFinal, ["Area_Intact"]) as cursor:
        for row in cursor:
            if row[0] == None:
                row[0] = 0
                cursor.updateRow(row)

    with arcpy.da.UpdateCursor(ParcelsFinal, ["Percent_Intact"]) as cursor:
        for row in cursor:
            if row[0] == None:
                row[0] = 0
                cursor.updateRow(row)

    with arcpy.da.UpdateCursor(ParcelsFinal, ["Area_Lotic"]) as cursor:
        for row in cursor:
            if row[0] == None:
                row[0] = 0
                cursor.updateRow(row)

    with arcpy.da.UpdateCursor(ParcelsFinal, ["Percent_Lotic"]) as cursor:
        for row in cursor:
            if row[0] == None:
                row[0] = 0
                cursor.updateRow(row)

    with arcpy.da.UpdateCursor(ParcelsFinal, ["Wetland_Edge"]) as cursor:
        for row in cursor:
            if row[0] == None:
                row[0] = 0
                cursor.updateRow(row)

    # This section of the script calculates the largest intact patch that intersects each parcel

    # Local Variables
    Footprint_INVERSE_Large_Explode = "Footprint_INVERSE_Large_Explode"
    Patch_Sizes_Per_Parcel = "Patch_Sizes_Per_Parcel"

    # Process: Tabulate Intersection
    arcpy.TabulateIntersection_analysis(ParcelsFinal, "OBJECTID",
                                        Footprint_INVERSE_Large_Explode,
                                        Patch_Sizes_Per_Parcel, "SHAPE_Area",
                                        "", "", "UNKNOWN")

    # A table was created with Tabulate Intersection that contains the areas of all intact patches that intersect
    # each parcel. We have several duplicates of each Parcel OBJECTID in this table, one for every patch that intersects a parcel.
    # we need to determine which duplicate OBJECTID corresponds to the largest patch area.

    # First we get a full list of the object IDs in our clipped ParcelsFinal Class
    # even though there is only one value in each cell of the attribute table, the data type is a tuple, so we need to extract our value our of it, as with a list
    parcel_IDs_extracted = []
    parcel_IDs = arcpy.da.SearchCursor(ParcelsFinal, "OBJECTID")
    for ID in parcel_IDs:
        if isinstance(ID, tuple):
            ID = ID[0]
            parcel_IDs_extracted.append(ID)
        else:
            parcel_IDs_extracted.append(ID)

    Patch_Sizes_Per_Parcel = "Patch_Sizes_Per_Parcel"

    ##    # remove null values
    ##    with arcpy.da.UpdateCursor(Patch_Sizes_Per_Parcel, ["SHAPE_Area"]) as cursor:
    ##        for row in cursor:
    ##            if row[0] == None:
    ##                row[0] = 0
    ##                cursor.updateRow(row)

    # Now we get a full list of all of the Parcel Object ID that had at least one intersection with the "Intact" feature class (human footprint inverse)
    # NOTE: not all of the parcels in our area of interest necessarily intersect with the "Intact" feature class
    patch_IDs = arcpy.da.SearchCursor(Patch_Sizes_Per_Parcel, "OBJECTID_1")
    patch_IDs_extracted = []
    for ID in patch_IDs:
        if isinstance(ID, tuple):
            ID = ID[0]
            patch_IDs_extracted.append(ID)
        elif isinstance(ID, str):
            patch_IDs_extracted.append(ID)

    # initialize 2 new lists
    orderedListofLists = []
    newlist = []
    # for each OBJECT ID we create a list of areas which are the intsects for a parcel, then append that list as an element in our list of lists (orderedListofLists)
    # the newlist is re-initialized every intereation after it has dumped its values into the orderedlistoflists. The orderedlistoflists is not re-initialized, and continues to be appended to.
    # Now the intersections for each parcel are nicely grouped together
    for ID in parcel_IDs_extracted:
        patch_IDs_and_Areas = arcpy.da.SearchCursor(
            Patch_Sizes_Per_Parcel, ["OBJECTID_1", "SHAPE_Area"])
        if ID not in patch_IDs_extracted:  # This step ensures that parcels that have not intersection receive a zero instead of being glossed over. This will maintain order of our field values.
            orderedListofLists.append(0)
        else:
            newlist = []
            for rows in patch_IDs_and_Areas:
                if ID == rows[0]:
                    x = rows[1]
                    newlist.append(x)
            orderedListofLists.append(newlist)

    # initialize one more list
    # Since the intersections(areas) are grouped by parcel, we extract the highest number in each list element (which is a list), and this give us the largest patch size for each parcel.
    max_patch_size_per_parcel = []

    for patchSizes in orderedListofLists:
        if patchSizes == 0:
            max_patch_size_per_parcel.append(0)
        else:
            max_patch_size_per_parcel.append(max(patchSizes))

    # convert to acres for scoring
    max_patch_size_per_parcel_acres = []
    acre = 0
    for patchsize in max_patch_size_per_parcel:
        acre = patchsize / 4046.86
        max_patch_size_per_parcel_acres.append(acre)

    # Now we have a list that contains the largest patch that intersects each parcel.
    # It is ordered the same as the OBJECTID and we can now create a new field in the parcels feature class and
    # iteratively polulate the rows with each patch area value

    # create new field
    arcpy.AddField_management(ParcelsFinal,
                              "Largest_Patch_Area",
                              "DOUBLE",
                              field_length=50)

    # initialize x
    x = 0

    # use update cursor to populate rows and after each time the cursor moves down to the next row,
    # iterate to the next list element (x)
    with arcpy.da.UpdateCursor(ParcelsFinal, "Largest_Patch_Area") as cursor:
        for row in cursor:
            row[0] = max_patch_size_per_parcel_acres[x]
            cursor.updateRow(row)
            x += 1

    # the following code calculates the nearest protected area feature and automatically creates a new field that contains that distance for each parcel.
    # Process: Near
    arcpy.Near_analysis(ParcelsFinal, parksProtectedAreasAlberta, "",
                        "NO_LOCATION", "NO_ANGLE", "PLANAR")

    # #######################################################################################################################################################################################################

    # The next section of code calulates the scores for each parcel based on the values is our newly added/created fields.

    # ##################### INTACTNESS SCORE #######################

    # extract percent intact field
    intact_scores = []
    percent_intact = arcpy.da.SearchCursor(ParcelsFinal, "Percent_Intact")
    # Perform calulation for score and append to new list. Accomodate for str and tuple field types
    for percent in percent_intact:
        if isinstance(percent, tuple):
            percent = percent[0] / 100
        elif isinstance(percent, str):
            percent = float(percent)
        intact_scores.append(percent)

    # create new field
    arcpy.AddField_management(ParcelsFinal,
                              "SCORE_Intactness",
                              "DOUBLE",
                              field_length=50)

    x = 0

    # use update cursor to populate rows with list element and after each time the cursor moves down to the next row,
    # iterate to the next list element (x)
    with arcpy.da.UpdateCursor(ParcelsFinal, "SCORE_Intactness") as cursor:
        for row in cursor:
            row[0] = intact_scores[x]
            cursor.updateRow(row)
            x += 1

    # ################### Lotic (Riparian) Score #########################

    # extract percent lotic field
    lotic_percent_list = []
    percent_lotic = arcpy.da.SearchCursor(ParcelsFinal, "Percent_Lotic")
    #  Accomodate for str and tuple field types
    for percent in percent_lotic:
        if isinstance(percent, tuple):
            percent = percent[0]
        elif isinstance(percent, str):
            percent = float(percent)
        lotic_percent_list.append(percent)

    # now we create a create a lotic percent list no zeros before establishing ranges for deciles
    lotic_percent_list_noZero = []
    for percent in lotic_percent_list:
        if percent != 0:
            lotic_percent_list_noZero.append(percent)

    # use numbpy to calculate the decile ranges
    ranges = np.percentile(lotic_percent_list_noZero, np.arange(0, 100, 10))

    # iterate through origincal lotic percent list and use the decile ranges to bin the lotic percent values to the appropriate scores
    final_lotic_scores = []
    for percent in lotic_percent_list:
        if percent == 0:
            final_lotic_scores.append(0)
        elif percent >= ranges[0] and percent <= ranges[1]:
            final_lotic_scores.append(0.1)
        elif percent >= ranges[1] and percent <= ranges[2]:
            final_lotic_scores.append(0.2)
        elif percent >= ranges[2] and percent <= ranges[3]:
            final_lotic_scores.append(0.3)
        elif percent >= ranges[3] and percent <= ranges[4]:
            final_lotic_scores.append(0.4)
        elif percent >= ranges[4] and percent <= ranges[5]:
            final_lotic_scores.append(0.5)
        elif percent >= ranges[5] and percent <= ranges[6]:
            final_lotic_scores.append(0.6)
        elif percent >= ranges[6] and percent <= ranges[7]:
            final_lotic_scores.append(0.7)
        elif percent >= ranges[7] and percent <= ranges[8]:
            final_lotic_scores.append(0.8)
        elif percent >= ranges[8] and percent <= ranges[9]:
            final_lotic_scores.append(0.9)
        elif percent >= ranges[9]:
            final_lotic_scores.append(1)

    # the order of the resulting list is identical to the original, so it can be appended as a new field and the values will
    # correspond with the rows they are meant to score

    # create new field
    arcpy.AddField_management(ParcelsFinal,
                              "SCORE_Lotic_Deciles",
                              "DOUBLE",
                              field_length=50)

    x = 0

    # use update cursor to populate rows with list element and after each time the cursor moves down to the next row,
    # iterate to the next list element (x)
    with arcpy.da.UpdateCursor(ParcelsFinal, "SCORE_Lotic_Deciles") as cursor:
        for row in cursor:
            row[0] = final_lotic_scores[x]
            cursor.updateRow(row)
            x += 1

    # ######################### Wetland Score #####################

    # extract Wetland edge length field
    wetland_edge_list = []
    wetland_field = arcpy.da.SearchCursor(ParcelsFinal, "Wetland_Edge")

    # append values to new list. Accomodate for str and tuple field types.
    for length in wetland_field:
        if isinstance(length, tuple):
            length = length[0]
        elif isinstance(length, str):
            length = float(length)
        wetland_edge_list.append(length)

    # now we create a create a wetland edge list no zeros before establishing ranges for deciles
    wetland_edge_list_noZero = []
    for edge_length in wetland_edge_list:
        if edge_length != 0:
            wetland_edge_list_noZero.append(edge_length)

    # use numbpy to calculate the decile ranges
    ranges = np.percentile(wetland_edge_list_noZero, np.arange(0, 100, 10))

    # iterate through original wetland edge list and use the decile ranges to bin the wetland edge values to the appropriate scores
    final_wetland_scores = []
    for edge_length in wetland_edge_list:
        if edge_length == 0:
            final_wetland_scores.append(0)
        elif edge_length >= ranges[0] and edge_length <= ranges[1]:
            final_wetland_scores.append(0.1)
        elif edge_length >= ranges[1] and edge_length <= ranges[2]:
            final_wetland_scores.append(0.2)
        elif edge_length >= ranges[2] and edge_length <= ranges[3]:
            final_wetland_scores.append(0.3)
        elif edge_length >= ranges[3] and edge_length <= ranges[4]:
            final_wetland_scores.append(0.4)
        elif edge_length >= ranges[4] and edge_length <= ranges[5]:
            final_wetland_scores.append(0.5)
        elif edge_length >= ranges[5] and edge_length <= ranges[6]:
            final_wetland_scores.append(0.6)
        elif edge_length >= ranges[6] and edge_length <= ranges[7]:
            final_wetland_scores.append(0.7)
        elif edge_length >= ranges[7] and edge_length <= ranges[8]:
            final_wetland_scores.append(0.8)
        elif edge_length >= ranges[8] and edge_length <= ranges[9]:
            final_wetland_scores.append(0.9)
        elif edge_length >= ranges[9]:
            final_wetland_scores.append(1)

    # the order of the resulting list is identical to the original, so it can be appended as a new field and the values will
    # correspond with the rows they are meant to score

    # create new field
    arcpy.AddField_management(ParcelsFinal,
                              "SCORE_Wetland_Deciles",
                              "DOUBLE",
                              field_length=50)

    x = 0

    # use update cursor to populate rows with list element and after each time the cursor moves down to the next row,
    # iterate to the next list element (x)
    with arcpy.da.UpdateCursor(ParcelsFinal,
                               "SCORE_Wetland_Deciles") as cursor:
        for row in cursor:
            row[0] = final_wetland_scores[x]
            cursor.updateRow(row)
            x += 1

    # ################ Patch size score ####################

    # extract patch size field
    largest_patch_sizes = []
    patch_sizes = arcpy.da.SearchCursor(ParcelsFinal, "Largest_Patch_Area")
    # Perform calulation for score and append to new list. Accomodate for str and tuple field types
    for size in patch_sizes:
        if isinstance(size, tuple):
            size = size[0]
        elif isinstance(size, str):
            size = float(size)
        largest_patch_sizes.append(size)

    # now we populate a new list and assign scores based on number ranges
    patch_size_scores = []
    for size in largest_patch_sizes:
        if size < 160:
            patch_size_scores.append(0)
        elif size > 160 and size < 2500:
            patch_size_scores.append(0.5)
        elif size > 2500 and size < 10000:
            patch_size_scores.append(.75)
        elif size > 10000:
            patch_size_scores.append(1)

    # create new field
    arcpy.AddField_management(ParcelsFinal,
                              "SCORE_Patch_Size",
                              "DOUBLE",
                              field_length=50)

    x = 0

    # use update cursor to populate rows with list element and after each time the cursor moves down to the next row,
    # iterate to the next list element (x)
    with arcpy.da.UpdateCursor(ParcelsFinal, "SCORE_Patch_Size") as cursor:
        for row in cursor:
            row[0] = patch_size_scores[x]
            cursor.updateRow(row)
            x += 1

    # ############### Proximity Score #####################

    # Rename Distance field to be more decriptive
    # delete NEAD FID feild (un-needed)
    arcpy.AlterField_management(ParcelsFinal,
                                "NEAR_DIST",
                                new_field_name="Dist_to_Protected",
                                field_is_nullable="NULLABLE")
    arcpy.DeleteField_management(ParcelsFinal, "NEAR_FID")

    # extract proximity field
    all_proximities = []
    proximities = arcpy.da.SearchCursor(ParcelsFinal, "Dist_to_Protected")
    # Perform calulation for score and append to new list. Accomodate for str and tuple field types
    for proximity in proximities:
        if isinstance(proximity, tuple):
            proximity = proximity[0]
        elif isinstance(proximity, str):
            proximity = float(proximity)
        all_proximities.append(proximity)

    # now we populate a new list and assign scores based on number ranges
    proximity_scores = []
    for proximity in all_proximities:
        if proximity == 0:
            proximity_scores.append(1)
        elif proximity > 0 and proximity < 2000:
            proximity_scores.append(0.75)
        elif proximity > 2000 and proximity < 4000:
            proximity_scores.append(.5)
        elif proximity > 4000:
            proximity_scores.append(0)

    # create new field
    arcpy.AddField_management(ParcelsFinal,
                              "SCORE_Proximity",
                              "DOUBLE",
                              field_length=50)

    x = 0

    # use update cursor to populate rows with list element and after each time the cursor moves down to the next row,
    # iterate to the next list element (x)
    with arcpy.da.UpdateCursor(ParcelsFinal, "SCORE_Proximity") as cursor:
        for row in cursor:
            row[0] = proximity_scores[x]
            cursor.updateRow(row)
            x += 1

    # ##################### FINAL PRIORITY SCORES ###########################

    sumOfScores = []
    scoreFields = arcpy.da.SearchCursor(ParcelsFinal, [
        "SCORE_Lotic_Deciles", "SCORE_Wetland_Deciles", "SCORE_Intactness",
        "SCORE_Patch_Size", "SCORE_Proximity"
    ])
    for score in scoreFields:
        sumScore = score[0] + score[1] + score[2] + score[3] + score[4]
        sumOfScores.append(sumScore)

    # create new field
    arcpy.AddField_management(ParcelsFinal,
                              "PRIORITY_SCORE",
                              "DOUBLE",
                              field_length=50)

    x = 0

    # use update cursor to populate rows with list element and after each time the cursor moves down to the next row,
    # iterate to the next list element (x)
    with arcpy.da.UpdateCursor(ParcelsFinal, "PRIORITY_SCORE") as cursor:
        for row in cursor:
            row[0] = sumOfScores[x]
            cursor.updateRow(row)
            x += 1
    # the order of the resulting list is identical to the original, so it can be appended as a new field and the values will
    # correspond with the rows they are meant to score

    # ################################## PRIORITY RANKING #######################################

    # now we calculate ranges for priority ranking with 4 breaks (Quartiles)
    ranges = np.percentile(sumOfScores, np.arange(0, 100, 25))

    final_priority_ranking = []
    for score in sumOfScores:
        if score >= ranges[0] and score <= ranges[1]:
            final_priority_ranking.append(None)
        elif score >= ranges[1] and score <= ranges[2]:
            final_priority_ranking.append(3)
        elif score >= ranges[2] and score <= ranges[3]:
            final_priority_ranking.append(2)
        elif score >= ranges[3]:
            final_priority_ranking.append(1)

    # create new field
    arcpy.AddField_management(ParcelsFinal,
                              "PRIORITY_RANKING",
                              "DOUBLE",
                              field_length=50)

    x = 0

    # use update cursor to populate rows with list element and after each time the cursor moves down to the next row,
    # iterate to the next list element (x)
    with arcpy.da.UpdateCursor(ParcelsFinal, "PRIORITY_RANKING") as cursor:
        for row in cursor:
            row[0] = final_priority_ranking[x]
            cursor.updateRow(row)
            x += 1

    arcpy.CheckInExtension("spatial")

    print("proccess complete")
    print("...........")
    print(
        "The resulting priority scored parcels feature class can be found in the user specified geodatabase by the name of 'ParcelsFinal'"
    )
    print(
        "To view the Conservation Priority ranking, symbolize the feature class by unique values, using the 'PRIORITY_RANKING' field."
    )
Beispiel #24
0
def AnalyzePolylines(fc, route_id_fld, output_gdb):
    fc_name = os.path.basename(fc)
    if fc_name[-4:] == ".shp":
        fc_name = fc_name[:-4]
    fc_F2L_InMem = os.path.join("in_memory", fc_name + "_F2L")
    fc_F2L = os.path.join(output_gdb, fc_name + "_F2L")

    # fc_Diss name modification
    if prefixKeyString == '':
        fc_Diss = os.path.join(output_gdb, fc_name + "_SelfIntClassification")
    else:
        fc_Diss = os.path.join(
            output_gdb,
            fc_name + "_SelfIntClassification" + "_" + prefixKeyString)

    arcpy.AddMessage("Executing Feature To Line...")

    #arcpy.FeatureToLine_management(fc, fc_F2L)
    arcpy.FeatureToLine_management(fc, fc_F2L_InMem)
    arcpy.CopyFeatures_management(fc_F2L_InMem, fc_F2L)
    arcpy.AddMessage("Executing Dissolve...")
    arcpy.Dissolve_management(fc_F2L, fc_Diss, route_id_fld)

    arcpy.AddField_management(fc_Diss, "SelfIntersectionType", "TEXT", "", "",
                              50)

    arcpy.AddMessage("Classifying routes...")
    with arcpy.da.UpdateCursor(
            fc_Diss,
        ["SHAPE@", "SelfIntersectionType", route_id_fld]) as uCursor:

        for i, row in enumerate(uCursor):
            intersection_tuples_dict = {
            }  # Keeps track of the number of times a particular vertex has come up for a route
            vtx_list = []  # Running list of vertices
            if i % 1000 == 0 and i != 0:
                arcpy.AddMessage("      Number of routes classified: " +
                                 str(i))
            partnum = 0
            #if row[1].isMultipart:

            vtx_prev = None
            skip_feature = False

            for part in row[0]:
                vtx_list.append([])
                # Step through each vertex in the feature
                vtx_prev = None
                for vtx in part:
                    vtx_current = (vtx.X, vtx.Y)

                    # first check if the current vertex is already in intersection_tuples_dict.
                    # If it's just a matter of duplicate consecutive vertices in this part, ignore (2nd clause in if statement condition)
                    if vtx_current in intersection_tuples_dict and vtx_prev != vtx_current:
                        if partnum in intersection_tuples_dict[vtx_current]:
                            intersection_tuples_dict[vtx_current][partnum] += 1
                        else:
                            intersection_tuples_dict[vtx_current][partnum] = 1

                    # if the current vertex is not already in intersection_tuples_dict, we still need to check if it's among
                    # the vertices already reviewed (i.e., those in vtx_list)
                    else:
                        if vtx_prev != vtx_current:  # want to ignore duplicate consecutive vertices

                            vtx_in_list = False
                            vtx_part = None

                            for i, ls in enumerate(vtx_list):
                                if vtx_current in ls:
                                    vtx_part = i
                                    vtx_in_list = True

                            # if vtx_current is in the list, and it's part is the same as the previous incidence's:
                            if vtx_in_list and partnum == vtx_part:
                                intersection_tuples_dict[vtx_current] = {
                                    partnum: 2
                                }

                            # if vtx_current is in the list, but it's not from the same part as the previous incidence's, add it's part to the dictionary with a count of 1
                            elif vtx_in_list:
                                intersection_tuples_dict[vtx_current] = {
                                    vtx_part: 1
                                }
                                intersection_tuples_dict[vtx_current][
                                    partnum] = 1

                    # Add the vertex to the current part's list
                    if vtx_prev != vtx_current:
                        vtx_list[partnum].append(vtx_current)

                    # save the current vertex as vtx_prev for the next iteration
                    vtx_prev = vtx_current

                    # if the feature has over 100k vertices, it may cause the tool to crash.  Quick reviewing this vertex and put a warning.
                    if len(vtx_list) > 100000:
                        arcpy.AddWarning(
                            "Route " + row[2] +
                            " has more than 100,000 vertices.  Analyze manually."
                        )
                        skip_feature = True  # added in order to break out of the outer for loop
                        continue
                if skip_feature:
                    continue

                partnum += 1

            # Categorize the route based on the intersection info collected
            category = "Not self-intersecting"

            # More than two intersection points found
            if len(intersection_tuples_dict) >= 3:
                leng = len(intersection_tuples_dict)
                category = "Complex: " + str(leng) + " intersections total"

            # Only one intersection point found
            elif len(intersection_tuples_dict) == 1:
                for point_key in intersection_tuples_dict:

                    if len(intersection_tuples_dict[point_key]) == 1:
                        category = "Loop"
                    elif len(intersection_tuples_dict[point_key]) == 2:
                        part_pt_counts = []
                        for key in intersection_tuples_dict[point_key]:
                            part_pt_counts.append(
                                intersection_tuples_dict[point_key][key])
                        part_pt_counts.sort()
                        if part_pt_counts == [2, 2]:
                            category = "Infinity"
                        else:
                            category = "Lollipop"
                    elif len(intersection_tuples_dict[point_key]) == 3:
                        part_pt_counts = []
                        for key in intersection_tuples_dict[point_key]:
                            part_pt_counts.append(
                                intersection_tuples_dict[point_key][key])
                        part_pt_counts.sort()
                        if part_pt_counts == [1, 1, 2]:
                            category = "Alpha"
                        else:
                            category = "Branch"
                    else:
                        length = len(intersection_tuples_dict[point_key])
                        category = "Single self-intersection with " + str(
                            length) + " approaches"
            # Two intersection points found
            elif len(intersection_tuples_dict) == 2:

                category_int1 = None
                category_int2 = None

                # Retrieve key for each intersection
                key_list = []
                for key in intersection_tuples_dict:
                    key_list.append(key)

                # Classify the first intersection
                for partnum_key in intersection_tuples_dict[key_list[0]]:
                    if intersection_tuples_dict[key_list[0]][partnum_key] == 2:
                        category_int1 = "Lollipop"
                if not category_int1:
                    category_int1 = "Branch"
                if len(intersection_tuples_dict[key_list[0]]) > 2:
                    category_int1 = "Branch or Alpha"

                # Classify the second intersection
                for partnum_key in intersection_tuples_dict[key_list[1]]:
                    if intersection_tuples_dict[key_list[1]][partnum_key] == 2:
                        category_int2 = "Lollipop"
                if not category_int2:
                    category_int2 = "Branch"
                if len(intersection_tuples_dict[key_list[1]]) > 2:
                    category_int1 = "Branch or Alpha"

                # Classify the route based on the two intersections
                if category_int1 == "Lollipop" and category_int2 == "Lollipop":
                    category = "Barbell"
                else:
                    int_ct = len(intersection_tuples_dict)
                    category = "Complex: 2 intersections total"

            row[1] = category

            uCursor.updateRow(row)

    arcpy.Delete_management(fc_F2L)

    stats_table = os.path.join(output_gdb,
                               "SelfIntersectingRoutes_CategoryCounts")
    arcpy.Statistics_analysis(fc_Diss, stats_table, [["Shape_Length", "SUM"]],
                              "SelfIntersectionType")
    costLayer = arcpy.sa.Int(costRaster)
else:
    costLayer = costRaster

costBndry = arcpy.RasterToPolygon_conversion(costLayer,
                                             "in_memory" + os.sep + "xxpoly",
                                             "SIMPLIFY", "VALUE")
costBndry = arcpy.Dissolve_management(costBndry,
                                      "in_memory" + os.sep + "cb_dissolved",
                                      "", "", "SINGLE_PART", "")
# Createfeature envelope of costRaster
costEnv = arcpy.MinimumBoundingGeometry_management(
    costBndry, "in_memory" + os.sep + "cb_env", "ENVELOPE", "ALL")
# Convert costRaster boundary to line
costBndry = arcpy.FeatureToLine_management(costBndry,
                                           "in_memory" + os.sep + "xxline", "",
                                           "NO_ATTRIBUTES")
costBndry = arcpy.Buffer_analysis(costBndry, "in_memory" + os.sep + "buf",
                                  CellSize + " Meters", "FULL", "ROUND",
                                  "NONE", "")
costEnv = arcpy.FeatureToLine_management(costEnv,
                                         "in_memory" + os.sep + "xxenv", "",
                                         "NO_ATTRIBUTES")
EnvPnt = arcpy.FeatureVerticesToPoints_management(
    costEnv, "in_memory" + os.sep + "xxpnt")
fc = arcpy.MakeFeatureLayer_management(EnvPnt, "pLayer")
oid_fieldname = arcpy.Describe(fc).OIDFieldName
arcpy.SelectLayerByAttribute_management(
    fc, "NEW_SELECTION", oid_fieldname + " = 1 OR " + oid_fieldname + " = 3")
diag_1 = arcpy.PointsToLine_management(fc, "in_memory" + os.sep + "diag_1")
arcpy.SelectLayerByAttribute_management(fc, "SWITCH_SELECTION")
# Import arcpy module
import arcpy

# Local variables:
ua_lehd_acs_1 = "ua_lehd_acs_1"
ua_lehd_acs_1__2_ = ua_lehd_acs_1
tl_2010_06_place10 = "tl_2010_06_place10"
tl_2010_06_place10_Layer = "tl_2010_06_place10_Layer"
place_line_hold_shp = "L:\\Research\\Resurgence\\Working Files\\Place Dissolve\\place_line_hold.shp"
boundary_tracts_hold_shp = "L:\\Research\\Resurgence\\Working Files\\Boundary\\boundary_tracts_hold.shp"

# Process: Make Feature Layer
arcpy.MakeFeatureLayer_management(
    tl_2010_06_place10, tl_2010_06_place10_Layer, "\"NAME10\" = 'Los Angeles'",
    "",
    "FID FID VISIBLE NONE;Shape Shape VISIBLE NONE;STATEFP10 STATEFP10 VISIBLE NONE;PLACEFP10 PLACEFP10 VISIBLE NONE;PLACENS10 PLACENS10 VISIBLE NONE;GEOID10 GEOID10 VISIBLE NONE;NAME10 NAME10 VISIBLE NONE;NAMELSAD10 NAMELSAD10 VISIBLE NONE;LSAD10 LSAD10 VISIBLE NONE;CLASSFP10 CLASSFP10 VISIBLE NONE;PCICBSA10 PCICBSA10 VISIBLE NONE;PCINECTA10 PCINECTA10 VISIBLE NONE;MTFCC10 MTFCC10 VISIBLE NONE;FUNCSTAT10 FUNCSTAT10 VISIBLE NONE;ALAND10 ALAND10 VISIBLE NONE;AWATER10 AWATER10 VISIBLE NONE;INTPTLAT10 INTPTLAT10 VISIBLE NONE;INTPTLON10 INTPTLON10 VISIBLE NONE"
)

# Process: Feature To Line
arcpy.FeatureToLine_management("tl_2010_06_place10_Layer", place_line_hold_shp,
                               "", "ATTRIBUTES")

# Process: Select Layer By Location
arcpy.SelectLayerByLocation_management(ua_lehd_acs_1, "WITHIN_A_DISTANCE",
                                       place_line_hold_shp, "1 Miles",
                                       "NEW_SELECTION", "NOT_INVERT")

# Process: Copy Features
arcpy.CopyFeatures_management(ua_lehd_acs_1__2_, boundary_tracts_hold_shp, "",
                              "0", "0", "0")
Beispiel #27
0
arcpy.AddField_management(ExtremePoints, "Del", "SHORT")
ExtPts.ExtremePoints(ExtremePoints)

Make = arcpy.MakeFeatureLayer_management(ExtremePoints,
                                         "%ScratchWorkspace%\\Make")
Selection = arcpy.SelectLayerByAttribute_management(Make, "NEW_SELECTION",
                                                    "\"Del\" = 1")

arcpy.DeleteFeatures_management(Selection)

#/splitting of the polygon with extreme points
ncurrentstep += 1
arcpy.AddMessage("Converting the input polygon to line - Step " +
                 str(ncurrentstep) + "/" + str(nstep))
PolyToLine = arcpy.FeatureToLine_management(Polygon,
                                            "%ScratchWorkspace%\\PolyToLine",
                                            "", "ATTRIBUTES")

ncurrentstep += 1
arcpy.AddMessage(
    "Looking for the longer distance between extreme points and the polygon - Step "
    + str(ncurrentstep) + "/" + str(nstep))
NearTable = arcpy.GenerateNearTable_analysis(ExtremePoints, PolyToLine,
                                             "NearTable", "", "LOCATION",
                                             "NO_ANGLE")
rows = arcpy.SearchCursor(NearTable)
Counter = 0
for row in rows:
    if row.NEAR_DIST > Counter:
        Counter = row.NEAR_DIST
Counter += 1
Beispiel #28
0
def dissolve_optimal_route_segments_feature_class_for_commodity_mapping(
        layer_name, sql_where_clause, the_scenario, logger):

    # Make a dissolved version of fc for mapping aggregate flows
    logger.info(
        "start: dissolve_optimal_route_segments_feature_class_for_commodity_mapping"
    )

    scenario_gdb = the_scenario.main_gdb

    arcpy.env.workspace = scenario_gdb

    # Delete previous fcs if they exist
    for fc in [
            "optimized_route_segments_dissolved_tmp",
            "optimized_route_segments_split_tmp",
            "optimized_route_segments_dissolved_tmp2",
            "optimized_route_segments_dissolved_tmp2",
            "dissolved_segments_lyr",
            "optimized_route_segments_dissolved_commodity",
            "optimized_route_segments_dissolved_" + layer_name
    ]:
        if arcpy.Exists(fc):
            arcpy.Delete_management(fc)

    arcpy.MakeFeatureLayer_management("optimized_route_segments",
                                      "optimized_route_segments_lyr")
    arcpy.SelectLayerByAttribute_management(
        in_layer_or_view="optimized_route_segments_lyr",
        selection_type="NEW_SELECTION",
        where_clause=sql_where_clause)

    # Dissolve
    arcpy.Dissolve_management(
        "optimized_route_segments_lyr",
        "optimized_route_segments_dissolved_tmp",
        ["NET_SOURCE_NAME", "NET_SOURCE_OID", "ARTIFICIAL"],
        [['COMMODITY_FLOW', 'SUM']], "SINGLE_PART", "DISSOLVE_LINES")

    # Second dissolve needed to accurately show aggregate pipeline flows
    arcpy.FeatureToLine_management("optimized_route_segments_dissolved_tmp",
                                   "optimized_route_segments_split_tmp")

    arcpy.AddGeometryAttributes_management(
        "optimized_route_segments_split_tmp", "LINE_START_MID_END")

    arcpy.Dissolve_management(
        "optimized_route_segments_split_tmp",
        "optimized_route_segments_dissolved_tmp2",
        ["NET_SOURCE_NAME", "Shape_Length", "MID_X", "MID_Y", "ARTIFICIAL"],
        [["SUM_COMMODITY_FLOW", "SUM"]], "SINGLE_PART", "DISSOLVE_LINES")

    arcpy.AddField_management(
        in_table="optimized_route_segments_dissolved_tmp2",
        field_name="SUM_COMMODITY_FLOW",
        field_type="DOUBLE",
        field_precision="",
        field_scale="",
        field_length="",
        field_alias="",
        field_is_nullable="NULLABLE",
        field_is_required="NON_REQUIRED",
        field_domain="")
    arcpy.CalculateField_management(
        in_table="optimized_route_segments_dissolved_tmp2",
        field="SUM_COMMODITY_FLOW",
        expression="!SUM_SUM_COMMODITY_FLOW!",
        expression_type="PYTHON_9.3",
        code_block="")
    arcpy.DeleteField_management(
        in_table="optimized_route_segments_dissolved_tmp2",
        drop_field="SUM_SUM_COMMODITY_FLOW")
    arcpy.DeleteField_management(
        in_table="optimized_route_segments_dissolved_tmp2", drop_field="MID_X")
    arcpy.DeleteField_management(
        in_table="optimized_route_segments_dissolved_tmp2", drop_field="MID_Y")

    # Sort for mapping order
    arcpy.AddField_management(
        in_table="optimized_route_segments_dissolved_tmp2",
        field_name="SORT_FIELD",
        field_type="SHORT")
    arcpy.MakeFeatureLayer_management(
        "optimized_route_segments_dissolved_tmp2", "dissolved_segments_lyr")
    arcpy.SelectLayerByAttribute_management(
        in_layer_or_view="dissolved_segments_lyr",
        selection_type="NEW_SELECTION",
        where_clause="NET_SOURCE_NAME = 'road'")
    arcpy.CalculateField_management(in_table="dissolved_segments_lyr",
                                    field="SORT_FIELD",
                                    expression=1,
                                    expression_type="PYTHON_9.3")
    arcpy.SelectLayerByAttribute_management(
        in_layer_or_view="dissolved_segments_lyr",
        selection_type="NEW_SELECTION",
        where_clause="NET_SOURCE_NAME = 'rail'")
    arcpy.CalculateField_management(in_table="dissolved_segments_lyr",
                                    field="SORT_FIELD",
                                    expression=2,
                                    expression_type="PYTHON_9.3")
    arcpy.SelectLayerByAttribute_management(
        in_layer_or_view="dissolved_segments_lyr",
        selection_type="NEW_SELECTION",
        where_clause="NET_SOURCE_NAME = 'water'")
    arcpy.CalculateField_management(in_table="dissolved_segments_lyr",
                                    field="SORT_FIELD",
                                    expression=3,
                                    expression_type="PYTHON_9.3")
    arcpy.SelectLayerByAttribute_management(
        in_layer_or_view="dissolved_segments_lyr",
        selection_type="NEW_SELECTION",
        where_clause="NET_SOURCE_NAME LIKE 'pipeline%'")
    arcpy.CalculateField_management(in_table="dissolved_segments_lyr",
                                    field="SORT_FIELD",
                                    expression=4,
                                    expression_type="PYTHON_9.3")

    arcpy.Sort_management("optimized_route_segments_dissolved_tmp2",
                          "optimized_route_segments_dissolved_commodity",
                          [["SORT_FIELD", "ASCENDING"]])

    # Delete temp fc's
    arcpy.Delete_management("optimized_route_segments_dissolved_tmp")
    arcpy.Delete_management("optimized_route_segments_split_tmp")
    arcpy.Delete_management("optimized_route_segments_dissolved_tmp2")
    arcpy.Delete_management("optimized_route_segments_lyr")
    arcpy.Delete_management("dissolved_segments_lyr")

    # Copy to permanent fc (unique to commodity name)
    arcpy.CopyFeatures_management(
        "optimized_route_segments_dissolved_commodity",
        "optimized_route_segments_dissolved_" + layer_name)
Beispiel #29
0
def appendedNetwork(networkBike, networkFoot, outData):
    if arcpy.Exists(outData):
        arcpy.Delete_management(outData)
    arcpy.FeatureToLine_management([networkBike, networkFoot], outData, "",
                                   "ATTRIBUTES")
Beispiel #30
0
'Right Street ID' <None> VISIBLE NONE;'Display X' <None> VISIBLE NONE;'Display Y' <None> VISIBLE NONE;'Min X value for extent' <None> VISIBLE NONE;
'Max X value for extent' <None> VISIBLE NONE;'Min Y value for extent' <None> VISIBLE NONE;'Max Y value for extent' <None> VISIBLE NONE;'Left parity' <None> VISIBLE NONE;
'Right parity' <None> VISIBLE NONE;'Left Additional Field' <None> VISIBLE NONE;'Right Additional Field' <None> VISIBLE NONE;'Altname JoinID' <None> VISIBLE NONE;'''
add_locator = dir_path + name + "_addloc_2"
#'Add_30_2nd' originates from 'Create 1930 and 1940 Address Files_2nd.R' code
addresses = dir_path + "Add_30_2nd.csv"
address_fields = "Street address;City city;State state"
points30 = dir_path + name + "_Points30_2.shp"
pblk_points = dir_path + name + "_1930_Pblk_Points_2.shp"

print "Working On: " + name + " Creating Physical Blocks"
##### #Create Physical Blocks# #####
#First Dissolve St_Grid lines
arcpy.Dissolve_management(grid, dissolve_grid, "FULLNAME")
#Second Split Lines at Intersections
arcpy.FeatureToLine_management(dissolve_grid, split_grid)
#Third Create Physical Blocks using Feature to Polygon
arcpy.FeatureToPolygon_management(split_grid, pblocks)
#Finally Add a Physical Block ID
expression = "!FID! + 1"
arcpy.AddField_management(pblocks, "pblk_id", "LONG", 4, "", "", "", "", "")
arcpy.CalculateField_management(pblocks, "pblk_id", expression, "PYTHON_9.3")

print "Working On: " + name + " Geocode"
##### #Geocode Points# #####
#Create Address Locator
arcpy.CreateAddressLocator_geocoding("US Address - Dual Ranges",
                                     reference_data, in_field_map, add_locator,
                                     "")
#Geocode Points
arcpy.GeocodeAddresses_geocoding(addresses, add_locator, address_fields,