def integrating_network(lines, tolerance="0 Meters"): overhead_lines = arcpy.FeatureClassToFeatureClass_conversion( lines, "Network", "Lines_over_p", where_clause="Line_Type = 'ВЛ'") cable_lines = arcpy.FeatureClassToFeatureClass_conversion( lines, "Network", "Lines_cable_p", where_clause="Line_Type = 'КЛ'") arcpy.Integrate_management(overhead_lines, tolerance) arcpy.Integrate_management(cable_lines, "0.1 Meters") lines = arcpy.Merge_management([overhead_lines, cable_lines], "Lines_merge") split = arcpy.SplitLine_management(lines, "SplitLine") find = arcpy.FindIdentical_management( split, "in_memory/Find_Ident", ["Shape", "Name", "Voltage"], xy_tolerance=tolerance, output_record_option="ONLY_DUPLICATES") joined_split = arcpy.JoinField_management(split, "OBJECTID", find, "IN_FID") arcpy.DeleteIdentical_management(joined_split, ["Shape", "Name", "Voltage"], "0.1 Meters") unsplit = arcpy.Dissolve_management( joined_split, "Unsplited_Lines", [ "Name", "Voltage", "Line_Type", "Start", "End", "Circuit", "Operate_Name", "Trace_Version", "Status" ], multi_part="MULTI_PART") return unsplit
def doDimensions(self, dFrame, lyr): # Set Up Stuff tmpDrive = "T:" tmpFGDBname = "\\tmpfgdb_" + getpass.getuser() + ".gdb" tmpFGDB = tmpDrive + tmpFGDBname dimFC = tmpFGDB + "\\DimensionsFC" tmpFC1 = tmpDrive + "\\tmpFC1.shp" #tmpFC2 = tmpFGDB + "\\tmpFC2" # Create a new temp FGDB if it doesn't already exist if not arcpy.Exists(tmpFGDB): arcpy.AddMessage("creating new " + tmpFGDB) arcpy.CreateFileGDB_management(tmpDrive, tmpFGDBname) else: # if FGDB exists, then remove all the old layers arcpy.AddMessage("Deleting old dimensions feature class if exists") delList = [dimFC, tmpFC1] for feat in delList: self.delLayer(feat) arcpy.AddMessage("finished deleting old temp feature classes") arcpy.AddMessage("converting polygon to polyline") arcpy.PolygonToLine_management(lyr, tmpFC1,"IGNORE_NEIGHBORS") #dimLayer = arcpy.mapping.Layer(dimFC) # create layer object from dimFC feat class arcpy.SplitLine_management(tmpFC1, dimFC) arcpy.AddMessage("Splitting up the lines of polyline layer") mainLyr = arcpy.mapping.ListLayers(self.mxd, "Dimensions", self.mainDF)[0] detailLyr = arcpy.mapping.ListLayers(self.mxd, "iDimensions", self.detailDF)[0] arcpy.AddMessage("updating the dimensions layer") # changing the dimensions layer source to the new one mainLyr.replaceDataSource(tmpFGDB, "FILEGDB_WORKSPACE", "DimensionsFC") detailLyr.replaceDataSource(tmpFGDB, "FILEGDB_WORKSPACE", "DimensionsFC") arcpy.AddMessage(tmpFGDB + " is my temp fgdb name") arcpy.AddMessage("Dimensions data source changed: ") #arcpy.AddMessage(upLyr.dataSource) # !!! turning off the dimensions lyr on the main data frame if dimensions was already used on the inset map # !!! otherwise dimensions is turned on for both... not ideal. if dFrame == self.detailDF: arcpy.AddMessage("inset measurements only") mainLyr.visible = False detailLyr.visible = True arcpy.AddMessage("main: " + str(mainLyr.visible)) else: arcpy.AddMessage("main map measurements only") mainLyr.visible = True detailLyr.visible = False arcpy.AddMessage("main: " + str(mainLyr.visible)) arcpy.RefreshActiveView() return lyr
def countlines(gdb, linefc, outfc, countfield='linecount', overwrite=True): """count identical lines in linefc and return outfc with a field (linecount) holding the count of equal lines. Outfc will be split on verticies. gdb -- string, path to .gdb where linefc exists linefc -- string, name of line featureclass in gdb to count outfc -- string, path or name of output featureclass with count. If not path, result is placed in gdb. """ arcpy.env.workspace = gdb arcpy.env.overwriteOutput = overwrite lines_split = f'{linefc}_split' arcpy.SplitLine_management(linefc, lines_split) all_lines = [ row[0] for row in arcpy.da.SearchCursor(lines_split, 'shape@') ] counter = Counter() unike = {} with arcpy.da.SearchCursor(lines_split, 'shape@') as cursor: for i, row in (enumerate(cursor)): collect = [] for ii, line in enumerate(all_lines): if row[0].equals(line): unike[i] = row[0] counter.update([i]) collect.append(ii) all_lines = [ geom for i, geom in enumerate(all_lines) if i not in collect ] outfc_p = pathlib.Path(outfc) if outfc_p.is_dir(): print('outfc is dir') outpath = outfc_p.parent outname = outfc_p.name else: outpath = gdb outname = outfc print(outpath, outname) arcpy.CreateFeatureclass_management(outpath, outname, 'polyline', spatial_reference=linefc) arcpy.AddField_management(os.path.join(outpath, outname), countfield, 'LONG') insertcursor = arcpy.da.InsertCursor(outfc, [ 'shape@', countfield, ]) for k, v in unike.items(): insertcursor.insertRow((v, counter[k])) del insertcursor
def delete_dangles(KVL_dissolve, input_points_p): points_subset = arcpy.FeatureClassToFeatureClass_conversion( input_points_p, "in_memory", "Points_Subset", "Point_Type IN ('ПС', 'ЭС', 'РУ')") points_layer = arcpy.MakeFeatureLayer_management(points_subset, "Points_Layer") arcpy.Integrate_management(KVL_dissolve) split2 = arcpy.SplitLine_management(KVL_dissolve, "SplitLine2") arcpy.DeleteIdentical_management(split2, ["SHAPE", "Name"]) unsplit2 = arcpy.Dissolve_management( split2, "Unsplited_Lines2", [ "Name", "Voltage", "Start", "End", "Circuit", "Operate_Name", "Trace_Version", "Status" ], multi_part="MULTI_PART") KVL_splitted = arcpy.SplitLineAtPoint_management(unsplit2, points_subset, "SplitAtPoint", search_radius="1 Meters") dangles_new = arcpy.FeatureVerticesToPoints_management( KVL_splitted, 'Dangles_KVL', 'DANGLE') dangles_layer = arcpy.MakeFeatureLayer_management(dangles_new, "Dangles_Layer") lines_layer = arcpy.MakeFeatureLayer_management(KVL_splitted, "Lines_Layer") arcpy.SelectLayerByLocation_management(dangles_layer, "INTERSECT", points_layer) arcpy.SelectLayerByAttribute_management(dangles_layer, "SWITCH_SELECTION") arcpy.SelectLayerByLocation_management(lines_layer, "INTERSECT", dangles_layer) arcpy.DeleteFeatures_management(lines_layer) KVL_dissolve_final = arcpy.Dissolve_management( lines_layer, "KVL_Dissolve", [ "Name", "Voltage", "Start", "End", "Circuit", "Operate_Name", "Status" ], multi_part="MULTI_PART") return KVL_dissolve_final
def CalcObservers(Simple_CQ, Observers, DataFactorsBoundaries, locationtemporal2, gv): # local variables Buffer_CQ = locationtemporal2 + '\\' + 'BufferCQ' temporal_lines = locationtemporal2 + '\\' + 'lines' Points = locationtemporal2 + '\\' + 'Points' AggregatedBuffer = locationtemporal2 + '\\' + 'BufferAggregated' temporal_lines3 = locationtemporal2 + '\\' + 'lines3' Points3 = locationtemporal2 + '\\' + 'Points3' Points3Updated = locationtemporal2 + '\\' + 'Points3Updated' EraseObservers = locationtemporal2 + '\\' + 'eraseobservers' Observers0 = locationtemporal2 + '\\' + 'observers0' NonoverlappingBuildings = locationtemporal2 + '\\' + 'Non_overlap' templines = locationtemporal2 + '\\' + 'templines' templines2 = locationtemporal2 + '\\' + 'templines2' Buffer_CQ0 = locationtemporal2 + '\\' + 'Buffer_CQ0' Buffer_CQ = locationtemporal2 + '\\' + 'Buffer_CQ' Buffer_CQ1 = locationtemporal2 + '\\' + 'Buffer_CQ1' Simple_CQcopy = locationtemporal2 + '\\' + 'Simple_CQcopy' # First increase the boundaries in 2m of each surface in the community to # analyze- this will avoid that the observers overlap the buildings and Simplify # the community vertices to only create 1 point per surface arcpy.CopyFeatures_management(Simple_CQ, Simple_CQcopy) # Make Square-like buffers arcpy.PolygonToLine_management(Simple_CQcopy, templines, "IGNORE_NEIGHBORS") arcpy.SplitLine_management(templines, templines2) arcpy.Buffer_analysis(templines2, Buffer_CQ0, "0.75 Meters", "FULL", "FLAT", "NONE", "#") arcpy.Append_management(Simple_CQcopy, Buffer_CQ0, "NO_TEST") arcpy.Dissolve_management(Buffer_CQ0, Buffer_CQ1, "Name", "#", "SINGLE_PART", "DISSOLVE_LINES") arcpy.SimplifyBuilding_cartography(Buffer_CQ1, Buffer_CQ, simplification_tolerance=8, minimum_area=None) # arcpy.Buffer_analysis(Simple_CQ,Buffer_CQ,buffer_distance_or_field=1, line_end_type='FLAT') # buffer with a flat finishing # arcpy.Generalize_edit(Buffer_CQ,"2 METERS") # Transform all polygons of the simplified areas to observation points arcpy.SplitLine_management(Buffer_CQ, temporal_lines) arcpy.FeatureVerticesToPoints_management( temporal_lines, Points, 'MID') # Second the transformation of Lines to a mid point # Join all the polygons to get extra vertices, make lines and then get points. # these points should be added to the original observation points arcpy.AggregatePolygons_cartography(Buffer_CQ, AggregatedBuffer, "0.5 Meters", "0 SquareMeters", "0 SquareMeters", "ORTHOGONAL") # agregate polygons arcpy.SplitLine_management(AggregatedBuffer, temporal_lines3) # make lines arcpy.FeatureVerticesToPoints_management(temporal_lines3, Points3, 'MID') # create extra points # add information to Points3 about their buildings arcpy.SpatialJoin_analysis(Points3, Buffer_CQ, Points3Updated, "JOIN_ONE_TO_ONE", "KEEP_ALL", match_option="CLOSEST", search_radius="5 METERS") arcpy.Erase_analysis(Points3Updated, Points, EraseObservers, "2 Meters") # erase overlaping points arcpy.Merge_management([Points, EraseObservers], Observers0) # erase overlaping points # Eliminate Observation points above roofs of the highest surfaces(a trick to make the # Import Overlaptable from function CalcBoundaries containing the data about buildings overlaping, eliminate duplicades, chose only those ones no overlaped and reindex DataNear = pd.read_csv(DataFactorsBoundaries) CleanDataNear = DataNear[DataNear['FactorShade'] == 1] CleanDataNear.drop_duplicates(subset='Name_x', inplace=True) CleanDataNear.reset_index(inplace=True) rows = CleanDataNear.Name_x.count() for row in range(rows): Field = "Name" # select field where the name exists to iterate Value = CleanDataNear.loc[ row, 'Name_x'] # set the value or name of the City quarter Where_clausule = '''''' + '"' + Field + '"' + "=" + "\'" + str( Value) + "\'" + '''''' # strange writing to introduce in ArcGIS if row == 0: arcpy.MakeFeatureLayer_management(Simple_CQ, 'Simple_lyr') arcpy.SelectLayerByAttribute_management('Simple_lyr', "NEW_SELECTION", Where_clausule) else: arcpy.SelectLayerByAttribute_management('Simple_lyr', "ADD_TO_SELECTION", Where_clausule) arcpy.CopyFeatures_management('simple_lyr', NonoverlappingBuildings) arcpy.ErasePoint_edit(Observers0, NonoverlappingBuildings, "INSIDE") arcpy.CopyFeatures_management( Observers0, Observers) # copy features to reset the OBJECTID with arcpy.da.UpdateCursor(Observers, ["OBJECTID", "ORIG_FID"]) as cursor: for row in cursor: row[1] = row[0] cursor.updateRow(row) gv.log('complete calculating observers') return arcpy.GetMessages()
def split_linein2pairs(linha_shpfile, linha_splitshpfile): """Split the input line shpfiles into lines that are pairs of points inputshpfile: (str) outputshpfile: (str)""" arcpy.SplitLine_management(linha_shpfile, linha_splitshpfile)
arcpy.Project_management(in_dataset=NTMsel, out_dataset=NTMproj, out_coor_system=cs_ref) #Create raster of weekly number of buses at the same resolution as bing data # Convert weekly number of buses to integer arcpy.AddField_management(NTMproj, 'adjustnum_int', 'SHORT') with arcpy.da.UpdateCursor(NTMproj, ['adjustnum_SUM', 'adjustnum_int']) as cursor: for row in cursor: if row[0]: row[1] = int(10 * row[0] + 0.5) cursor.updateRow(row) #Split lines at all intersections so that small identical overlapping segments can be dissolved arcpy.SplitLine_management(NTMproj, NTMproj + '_split') #Split at intersection arcpy.FindIdentical_management( NTMproj + '_split', "explFindID", "Shape" ) #Find overlapping segments and make them part of a group (FEAT_SEQ) arcpy.MakeFeatureLayer_management(NTMproj + '_split', "intlyr") arcpy.AddJoin_management("intlyr", arcpy.Describe("intlyr").OIDfieldName, "explFindID", "IN_FID", "KEEP_ALL") arcpy.Dissolve_management("intlyr", NTMsplitdiss, dissolve_field='explFindID.FEAT_SEQ', statistics_fields=[[ os.path.split(NTMproj)[1] + '_split.adjustnum_int', 'SUM' ]]) #Dissolve overlapping segments arcpy.RepairGeometry_management(
def generate_transects(workspacePath, lineFL, splitType, distanceBetweenTransects, transectWidth, widthUnit, outputFC): import arcpy import math #Set environments arcpy.env.overwriteOutput = True arcpy.env.XYResolution = "0.00001 Meters" arcpy.env.XYTolerance = "0.0001 Meters" # Set local variables arcpy.env.workspace = workspacePath Lines = lineFL SplitType = splitType DistanceSplit = float(distanceBetweenTransects) TransecLength = transectWidth TransecLength_Unit = widthUnit OutputTransect = outputFC # Def splitline module ###START SPLIT LINE CODE IN A SAME DISTANCE### Source: http://nodedangles.wordpress.com/2011/05/01/quick-dirty-arcpy-batch-splitting-polylines-to-a-specific-length/ def splitline(inFC, FCName, alongDist): OutDir = arcpy.env.workspace outFCName = FCName outFC = OutDir + "/" + outFCName def distPoint(p1, p2): calc1 = p1.X - p2.X calc2 = p1.Y - p2.Y return math.sqrt((calc1**2) + (calc2**2)) def midpoint(prevpoint, nextpoint, targetDist, totalDist): newX = prevpoint.X + ((nextpoint.X - prevpoint.X) * (targetDist / totalDist)) newY = prevpoint.Y + ((nextpoint.Y - prevpoint.Y) * (targetDist / totalDist)) return arcpy.Point(newX, newY) def splitShape(feat, splitDist): # Count the number of points in the current multipart feature # partcount = feat.partCount partnum = 0 # Enter while loop for each part in the feature (if a singlepart feature # this will occur only once) # lineArray = arcpy.Array() while partnum < partcount: # Print the part number # #print "Part " + str(partnum) + ":" part = feat.getPart(partnum) #print part.count totalDist = 0 pnt = part.next() pntcount = 0 prevpoint = None shapelist = [] # Enter while loop for each vertex # while pnt: if not (prevpoint is None): thisDist = distPoint(prevpoint, pnt) maxAdditionalDist = splitDist - totalDist #print thisDist, totalDist, maxAdditionalDist if (totalDist + thisDist) > splitDist: while (totalDist + thisDist) > splitDist: maxAdditionalDist = splitDist - totalDist #print thisDist, totalDist, maxAdditionalDist newpoint = midpoint(prevpoint, pnt, maxAdditionalDist, thisDist) lineArray.add(newpoint) shapelist.append(lineArray) lineArray = arcpy.Array() lineArray.add(newpoint) prevpoint = newpoint thisDist = distPoint(prevpoint, pnt) totalDist = 0 lineArray.add(pnt) totalDist += thisDist else: totalDist += thisDist lineArray.add(pnt) #shapelist.append(lineArray) else: lineArray.add(pnt) totalDist = 0 prevpoint = pnt pntcount += 1 pnt = part.next() # If pnt is null, either the part is finished or there is an # interior ring # if not pnt: pnt = part.next() #if pnt: #print "Interior Ring:" partnum += 1 if (lineArray.count > 1): shapelist.append(lineArray) return shapelist if arcpy.Exists(outFC): arcpy.Delete_management(outFC) arcpy.Copy_management(inFC, outFC) #origDesc = arcpy.Describe(inFC) #sR = origDesc.spatialReference #revDesc = arcpy.Describe(outFC) #revDesc.ShapeFieldName deleterows = arcpy.UpdateCursor(outFC) for iDRow in deleterows: deleterows.deleteRow(iDRow) try: del iDRow del deleterows except: pass inputRows = arcpy.SearchCursor(inFC) outputRows = arcpy.InsertCursor(outFC) fields = arcpy.ListFields(inFC) numRecords = int(arcpy.GetCount_management(inFC).getOutput(0)) OnePercentThreshold = numRecords // 100 #printit(numRecords) iCounter = 0 iCounter2 = 0 for iInRow in inputRows: inGeom = iInRow.shape iCounter += 1 iCounter2 += 1 if (iCounter2 > (OnePercentThreshold + 0)): #printit("Processing Record "+str(iCounter) + " of "+ str(numRecords)) iCounter2 = 0 if (inGeom.length > alongDist): shapeList = splitShape(iInRow.shape, alongDist) for itmp in shapeList: newRow = outputRows.newRow() for ifield in fields: if (ifield.editable): newRow.setValue(ifield.name, iInRow.getValue(ifield.name)) newRow.shape = itmp outputRows.insertRow(newRow) else: outputRows.insertRow(iInRow) del inputRows del outputRows #printit("Done!") ###END SPLIT LINE CODE IN A SAME DISTANCE### # Create "General" file geodatabase WorkFolder = arcpy.env.workspace General_GDB = WorkFolder + "\General.gdb" arcpy.CreateFileGDB_management(WorkFolder, "General", "CURRENT") arcpy.env.workspace = General_GDB #Unsplit Line LineDissolve = "LineDissolve" arcpy.Dissolve_management(Lines, LineDissolve, "", "", "SINGLE_PART") LineSplit = "LineSplit" #Split Line if SplitType == "Split at approximate distance": splitline(LineDissolve, LineSplit, DistanceSplit) else: arcpy.SplitLine_management(LineDissolve, LineSplit) #Add fields to LineSplit FieldsNames = [ "LineID", "Direction", "Azimuth", "X_mid", "Y_mid", "AziLine_1", "AziLine_2", "Distance" ] for fn in FieldsNames: arcpy.AddField_management(LineSplit, fn, "DOUBLE") #Calculate Fields CodeBlock_Direction = """def GetAzimuthPolyline(shape): radian = math.atan((shape.lastpoint.x - shape.firstpoint.x)/(shape.lastpoint.y - shape.firstpoint.y)) degrees = radian * 180 / math.pi return degrees""" CodeBlock_Azimuth = """def Azimuth(direction): if direction < 0: azimuth = direction + 360 return azimuth else: return direction""" CodeBlock_NULLS = """def findNulls(fieldValue): if fieldValue is None: return 0 elif fieldValue is not None: return fieldValue""" arcpy.CalculateField_management(LineSplit, "LineID", "!OBJECTID!", "PYTHON_9.3") arcpy.CalculateField_management(LineSplit, "Direction", "GetAzimuthPolyline(!Shape!)", "PYTHON_9.3", CodeBlock_Direction) arcpy.CalculateField_management(LineSplit, "Direction", "findNulls(!Direction!)", "PYTHON_9.3", CodeBlock_NULLS) arcpy.CalculateField_management(LineSplit, "Azimuth", "Azimuth(!Direction!)", "PYTHON_9.3", CodeBlock_Azimuth) arcpy.CalculateField_management( LineSplit, "X_mid", "!Shape!.positionAlongLine(0.5,True).firstPoint.X", "PYTHON_9.3") arcpy.CalculateField_management( LineSplit, "Y_mid", "!Shape!.positionAlongLine(0.5,True).firstPoint.Y", "PYTHON_9.3") CodeBlock_AziLine1 = """def Azline1(azimuth): az1 = azimuth + 90 if az1 > 360: az1-=360 return az1 else: return az1""" CodeBlock_AziLine2 = """def Azline2(azimuth): az2 = azimuth - 90 if az2 < 0: az2+=360 return az2 else: return az2""" arcpy.CalculateField_management(LineSplit, "AziLine_1", "Azline1(!Azimuth!)", "PYTHON_9.3", CodeBlock_AziLine1) arcpy.CalculateField_management(LineSplit, "AziLine_2", "Azline2(!Azimuth!)", "PYTHON_9.3", CodeBlock_AziLine2) arcpy.CalculateField_management(LineSplit, "Distance", TransecLength, "PYTHON_9.3") #Generate Azline1 and Azline2 spatial_reference = arcpy.Describe(Lines).spatialReference Azline1 = "Azline1" Azline2 = "Azline2" arcpy.BearingDistanceToLine_management(LineSplit, Azline1, "X_mid", "Y_mid", "Distance", TransecLength_Unit, "AziLine_1", "DEGREES", "GEODESIC", "LineID", spatial_reference) arcpy.BearingDistanceToLine_management(LineSplit, Azline2, "X_mid", "Y_mid", "Distance", TransecLength_Unit, "AziLine_2", "DEGREES", "GEODESIC", "LineID", spatial_reference) #Create Azline and append Azline1 and Azline2 Azline = "Azline" arcpy.CreateFeatureclass_management(arcpy.env.workspace, "Azline", "POLYLINE", "", "", "", spatial_reference) arcpy.AddField_management(Azline, "LineID", "DOUBLE") arcpy.Append_management([Azline1, Azline2], Azline, "NO_TEST") #Dissolve Azline Azline_Dissolve = "Azline_Dissolve" arcpy.Dissolve_management(Azline, Azline_Dissolve, "LineID", "", "SINGLE_PART") #Add Fields to Azline_Dissolve FieldsNames2 = ["x_start", "y_start", "x_end", "y_end"] for fn2 in FieldsNames2: arcpy.AddField_management(Azline_Dissolve, fn2, "DOUBLE") #Calculate Azline_Dissolve fields arcpy.CalculateField_management( Azline_Dissolve, "x_start", "!Shape!.positionAlongLine(0,True).firstPoint.X", "PYTHON_9.3") arcpy.CalculateField_management( Azline_Dissolve, "y_start", "!Shape!.positionAlongLine(0,True).firstPoint.Y", "PYTHON_9.3") arcpy.CalculateField_management( Azline_Dissolve, "x_end", "!Shape!.positionAlongLine(1,True).firstPoint.X", "PYTHON_9.3") arcpy.CalculateField_management( Azline_Dissolve, "y_end", "!Shape!.positionAlongLine(1,True).firstPoint.Y", "PYTHON_9.3") #Generate output file arcpy.XYToLine_management(Azline_Dissolve, OutputTransect, "x_start", "y_start", "x_end", "y_end", "", "", spatial_reference) #Delete General.gdb arcpy.Delete_management(General_GDB)
def splitPolyLineIntoSegments(inFC, outFC): arcpy.SplitLine_management(inFC, outFC)
except: printMsg("Delete in_memory workspace: " + arcpy.GetMessages(2)) sys.exit(0) #Get input parameter inputFeatures = arcpy.GetParameter(0) try: arcpy.CopyFeatures_management(inputFeatures, ROUTE) printMsg("Copied features into ROUTE feature class") except: printMsg("Error copying features to ROUTE feature class: " + arcpy.GetMessages(2)) try: arcpy.SplitLine_management(ROUTE, ROUTES) printMsg("Split features into ROUTES feature class") except: printMsg("Error splitting ROUTE feature class: " + arcpy.GetMessages(2)) if arcpy.CheckExtension("3D") == "Available": if arcpy.CheckOutExtension("3D") == "CheckedOut": printMsg("Checked out 3D license") try: #Create line of sight arcpy.LineOfSight_3d(DEM, ROUTES, OUTPUT) printMsg("Created line of sight OUTPUT") arcpy.SetParameter(1, arcpy.FeatureSet(OUTPUT)) except: printMsg("Error creating line of sight OUTPUT: " + arcpy.GetMessages(2))
#Set mask to polygon arcpy.env.mask = Polygon #Create FC of polygon centroid Centroid = arcpy.GetParameterAsText(2) arcpy.FeatureToPoint_management(Polygon, Centroid, "CENTROID") arcpy.env.workspace = r"in_memory" #Convert polygon to line PolygonTolineOutput = "LineFC" Polyline = arcpy.PolygonToLine_management(Polygon, PolygonTolineOutput, "IDENTIFY_NEIGHBORS") #Convert line to multipart feature class(each edge is a feature) SplitLineOutput = "SplitLine" SplitLine = arcpy.SplitLine_management(PolygonTolineOutput, SplitLineOutput) #Create list of individual features in multipart feature class def UniqueValues(table, field): """Gets list of unique Values from feature class""" with arcpy.da.SearchCursor(table, [field]) as cursor: return sorted({row[0] for row in cursor}) Sides = UniqueValues(SplitLineOutput, "OBJECTID") #Create feature class for each feature in multipart feature class(feature class created for each edge) for OID in Sides: expression = "OBJECTID =" + str(OID) arcpy.FeatureClassToFeatureClass_conversion(SplitLineOutput, "in_memory",
def prep_data(fdhs): """ This function helps in data preparation PARAMETERS ---------- fdhs : list A list of FDH IDs for which you need the BOMs """ crs = arcpy.SpatialReference(2231) arcpy.env.overwriteOutput = True arcpy.env.workspace = scratch print("FILTERING DATA") """ If there are any Feature classes or Tables present in the scratch GDB, remove all of them """ fcs = arcpy.ListFeatureClasses() for fc in fcs: arcpy.Delete_management(scratch + '/' + fc) tables = arcpy.ListTables() for table in tables: arcpy.Delete_management(scratch + '/' + table) # The keys present in the following dictionary are the feature classes # Data from these feature classes are gathered to generate BOM # and the values are the attributes present in those feature classes. # These attributes are later(lines 147 - 166) used in filtering the data name_dict = { 'FiberLine': 'cablename', 'FC_Structure': 'layer', 'StructureLine': 'layer', 'fdhpoint': 'fdhid', 'SplicePoint': 'locationdescription', 'FiberSlackLoop': 'designid' } # The following fdh expression helps in generating a query of below form # ("fdhid" = 'DIX101d-F31' or "fdhid" = 'DIX101d-F32' or "fdhid" = 'DIX101d-F33') # which can later be used to select only the required FD Boundaries fdh_exp = "(" + " or ".join(["fdhid = '{0}'".format(x) for x in fdhs]) + ")" fdh_exp.encode('utf-8').strip() # Select only those FDH Boundaries for which the BOMs needs to be generated arcpy.Select_analysis(gdb + "\\fdhboundary", scratch + "\\fdhs", fdh_exp) """ Exlanations for Queries used inside select_analysis for the for loop part that comes next # Query for Structure and Conduit # Select only those structures and conduits for which the status is 'Preliminary' # and the ones which are present inside the FDH Boundaries we are working on (This part is # handled using Intersect_analysis) # Then for the next elif part, the queries are much similar to the above queries and so are self explanatory # Same goes for final else part """ for fc in name_dict.keys( ): # ["FiberOpticCable", "FC_Structure", "FC_Conduit", "fdhpoint", "SplicePoint"]: fieldnames = [ field.name for field in arcpy.ListFields(gdb + "\\" + fc) ] if fc == "SplicePoint": arcpy.Select_analysis(gdb + "\\" + fc, scratch + "\\" + fc) elif fc in ['FC_Structure', 'StructureLine']: arcpy.Select_analysis(gdb + "\\" + fc, scratch + "\\temp_" + fc, "inventory_status_code = 'Preliminary'") arcpy.Intersect_analysis( [scratch + "\\temp_" + fc, scratch + "\\fdhs"], scratch + "\\" + fc) elif "inventory_status_code" in fieldnames: arcpy.Select_analysis( gdb + "\\" + fc, scratch + "\\pre_" + fc, "(" + " or ".join( ["{0} like '{1}%'".format(name_dict[fc], x) for x in fdhs]) + ") and inventory_status_code = 'Preliminary'") arcpy.Select_analysis( gdb + "\\" + fc, scratch + "\\" + fc, "(" + " or ".join( ["{0} like '{1}%'".format(name_dict[fc], x) for x in fdhs]) + ")") arcpy.AddField_management(scratch + "\\pre_" + fc, "fdhid", "TEXT") arcpy.CalculateField_management( scratch + "\\pre_" + fc, "fdhid", "getfdh(!{0}!)".format(name_dict[fc]), "PYTHON_9.3", codeblock) else: arcpy.Select_analysis( gdb + "\\" + fc, scratch + "\\" + fc, "(" + " or ".join( ["{0} like '{1}%'".format(name_dict[fc], x) for x in fdhs]) + ")") # Make sure there is an 'fdhid' column for all of the feature classes. # There is no special reason for this. It's just to make some of the other geo-processing operations faster fieldnames = [ field.name for field in arcpy.ListFields(scratch + "\\" + fc) ] if "fdhid" not in fieldnames: arcpy.AddField_management(scratch + "\\" + fc, "fdhid", "TEXT") arcpy.CalculateField_management( scratch + "\\" + fc, "fdhid", "getfdh(!{0}!)".format(name_dict[fc]), "PYTHON_9.3", codeblock) # Select only Access Fiber, changed 12/07 to grab all fiber intersecting an FDH, and included 'Lateral' infrastructure class query so that 288 cts are counted. arcpy.Intersect_analysis([gdb + "\\FiberLine", scratch + "\\fdhs"], scratch + "\\af_1", '', '', 'LINE') arcpy.Select_analysis( scratch + "\\af_1", scratch + "\\af", "infrastructureclass = 'Access' OR infrastructureclass = 'Lateral'") # Get the end points of the Access Fiber get_end_points(scratch + "\\af", scratch + "\\af_ends", "BOTH_ENDS") # Get those fiber ends which intersects with Splice Point arcpy.SpatialJoin_analysis(scratch + "\\SplicePoint", scratch + "\\af_ends", scratch + "\\af_sc_join", "JOIN_ONE_TO_MANY", "KEEP_ALL", "", "INTERSECT", "") # We dissolve the output from previous step just to make sure we have only one entry even for the points where multiple fibercable intersect with a splice point # We will take into consideration only the fibercable with maximum fiber count. Thats the reason why we use ["fibercount", "MAX"] arcpy.Dissolve_management( scratch + "\\af_sc_join", scratch + "\\final_scs", [ "locationdescription", "splice_type", "splice_count", "fdhid", "fiber_assignments", "spliceenclosuremodelnumber" ], [["fibercount", "MAX"]]) # "cable_size", arcpy.AlterField_management(scratch + "\\final_scs", "MAX_fibercount", "fcount", "fcount") arcpy.AlterField_management(scratch + "\\final_scs", "spliceenclosuremodelnumber", "sc_size", "sc_size") # The below set of lines (220- 227) are used to create a feature class with name final_vaults # A new attribute named 'pvault' is added and it's value is either 'Y' or 'N' - Changed 12/07/2020 to only include preliminary structures pvault = 'N' # Added prelim_vaults 12/07/2020 arcpy.Select_analysis(gdb + "\\FC_Structure", scratch + "\\prelim_vaults", "inventory_status_code = 'Preliminary'") arcpy.AddField_management(scratch + "\\FC_Structure", "pvault", "TEXT") arcpy.MakeFeatureLayer_management(scratch + "\\FC_Structure", "vaults") # arcpy.CalculateField_management("vaults", "pvault", "'N'", "PYTHON_9.3", "") arcpy.SelectLayerByLocation_management("vaults", "INTERSECT", scratch + "\\prelim_vaults", "", "NEW_SELECTION") arcpy.CalculateField_management("vaults", "pvault", "'N'", "PYTHON_9.3", "") arcpy.SelectLayerByAttribute_management("vaults", "CLEAR_SELECTION") arcpy.CopyFeatures_management("vaults", scratch + "\\final_vaults") # The following set of lines(234 - 240) are used to find out whether an access fiber cable is an FDH cable. # Any Acces Fibercable that intersects FDH point is an 'FDH cable.' # So, we add a new field named 'fdhcable' and it's values are 'Y' or 'N' # If the value is 'Y' - it means fiber is an FDH Cable else it is not. # And the final result is copied into scratch GDB just like vaults arcpy.AddField_management(scratch + "\\af", "fdhcable", "TEXT") arcpy.MakeFeatureLayer_management(scratch + "\\af", "fiber") arcpy.SelectLayerByLocation_management("fiber", "INTERSECT", scratch + "\\fdhpoint", "", "NEW_SELECTION") arcpy.CalculateField_management("fiber", "fdhcable", "'Y'", "PYTHON_9.3", "") arcpy.SelectLayerByAttribute_management("fiber", "CLEAR_SELECTION") arcpy.CopyFeatures_management("fiber", scratch + "\\final_fiber") arcpy.AddGeometryAttributes_management(scratch + "\\final_fiber", "LENGTH_GEODESIC", "FEET_US", "", crs) arcpy.Select_analysis(scratch + "\\StructureLine", scratch + "\\all_con", "diameter = '2inch' or diameter = '1.25inch'") arcpy.AddField_management(scratch + "\\all_con", "shared", "TEXT") arcpy.CalculateField_management(scratch + "\\all_con", "shared", "'N'", "PYTHON_9.3", "") arcpy.SplitLine_management(scratch + "\\all_con", scratch + "\\con_split") get_end_points(scratch + "\\con_split", scratch + "\\con_mids", "MID") arcpy.AddField_management(scratch + "\\con_mids", "trench", "SHORT") arcpy.CalculateField_management(scratch + "\\con_mids", "trench", "1", "PYTHON_9.3", "") arcpy.Buffer_analysis(scratch + "\\con_mids", scratch + "\\con_mid_buff", "1.5 FEET", "FULL", "ROUND") arcpy.Dissolve_management(scratch + "\\con_mid_buff", scratch + "\\con_mid_diss", "", "", "SINGLE_PART", "") arcpy.AddField_management(scratch + "\\con_mid_diss", "mid_id", "LONG") arcpy.CalculateField_management(scratch + "\\con_mid_diss", "mid_id", "!objectid!", "PYTHON_9.3", "") arcpy.SpatialJoin_analysis(scratch + "\\con_mid_buff", scratch + "\\con_mid_diss", scratch + "\\con_join_temp", "JOIN_ONE_TO_ONE", "KEEP_ALL", "", "INTERSECT", "") arcpy.Dissolve_management(scratch + "\\con_join_temp", scratch + "\\con_mid_diss_temp", ["mid_id"], [["trench", "SUM"]], "SINGLE_PART", "") arcpy.AlterField_management(scratch + "\\con_mid_diss_temp", "SUM_trench", "trench", "trench") arcpy.SpatialJoin_analysis(scratch + "\\con_split", scratch + "\\con_mid_diss_temp", scratch + "\\con_join", "JOIN_ONE_TO_ONE", "KEEP_ALL", "", "INTERSECT", "") arcpy.Select_analysis(scratch + "\\con_join", scratch + "\\con2", "diameter = '2inch'") arcpy.Select_analysis(scratch + "\\con_join", scratch + "\\con125", "diameter = '1.25inch'") arcpy.Buffer_analysis(scratch + "\\con2", scratch + "\\con2_buff", "2 FEET", "FULL", "ROUND", "ALL") arcpy.MakeFeatureLayer_management(scratch + "\\con125", "con125") arcpy.SelectLayerByLocation_management("con125", "WITHIN", scratch + "\\con2_buff", "", "NEW_SELECTION") arcpy.CalculateField_management("con125", "shared", "'Y'", "PYTHON_9.3", "") arcpy.SelectLayerByAttribute_management("con125", "CLEAR_SELECTION") arcpy.Merge_management([scratch + "\\con2", "con125"], scratch + "\\final_con") arcpy.AddGeometryAttributes_management(scratch + "\\final_con", "LENGTH_GEODESIC", "FEET_US", "", crs) arcpy.Dissolve_management(scratch + "\\final_con", scratch + "\\trench", ["fdhid"]) arcpy.AddGeometryAttributes_management(scratch + "\\trench", "LENGTH_GEODESIC", "FEET_US", "", crs) print("DATA FILTERATION DONE..")
Seattleroadsproj = os.path.join(PSgdb, 'Seattle_roadproj') ######################################################################################################################### #ANALYSIS #Compare to Seattle roads slope values. Apply same method to Seattle road dataset roadproj = arcpy.Project_management(Seattleroads, Seattleroadsproj, UTM10) #Densify roads arcpy.CopyFeatures_management(Seattleroadsproj, Seattleroadsproj + 'dens10m') arcpy.Densify_edit(Seattleroadsproj + 'dens10m', densification_method='DISTANCE', distance='10', max_deviation='1.5') #Split at vertices arcpy.SplitLine_management(Seattleroadsproj + 'dens10m', Seattleroadsproj + 'split10') #Compute statistics arcpy.PolylineToRaster_conversion(Seattleroadsproj + 'split10', 'OBJECTID_1', sroadsras, cell_assignment='MAXIMUM_COMBINED_LENGTH', priority_field='SURFACEWID', cellsize=NED19proj) ZonalStatisticsAsTable(sroadsras, 'Value', NED19proj, out_table=srangetab19, statistics_type='RANGE', ignore_nodata='NODATA') ZonalStatisticsAsTable(sroadsras,
devpath + "GIS_DEV.CCL_Resolution", "LRS_KEY LINE BEG_CNTY_LOGMILE END_CNTY_LOGMILE", devpath + "LNCL_EVENT", "LRS_KEY LINE BEG_CNTY_LOGMILE END_CNTY_LOGMILE", "INTERSECT", r"Database Connections/SDEDEV_GISDEV.sde/GIS_DEV.CCL_Lanes", "LRS_KEY LINE BEG_CNTY_LOGMILE END_CNTY_LOGMILE", "NO_ZERO", "FIELDS", "INDEX") arcpy.MakeRouteEventLayer_lr(clrs, "LRS_KEY", devpath + "CCL_Resolution", "LRS_KEY LINE BEG_CNTY_LOGMILE END_CNTY_LOGMILE", "CCL_Resolution_Events", "#", "ERROR_FIELD", "NO_ANGLE_FIELD", "NORMAL", "ANGLE", "LEFT", "POINT") arcpy.FeatureClassToGeodatabase_conversion("CCL_Resolution_Events", devpath + "KDOT_CCL_WORKSPACE") arcpy.SplitLine_management( "Database Connections\SDEDEV.sde\SDE.KDOT_CCL_WORKSPACE\SDE.CCL_Resolution_Events", "Database Connections\SDEDEV.sde\SDE.KDOT_CCL_WORKSPACE\CCL_LN_SPEC") #locate resolution mileage along state route to cross counties arcpy.LocateFeaturesAlongRoutes_lr( "Database Connections\SDEDEV.sde\SDE.KDOT_CCL_WORKSPACE\SDE.CCL_LN_SPEC", "Database Connections\SDEDEV.sde\SDE.KDOT_ROADWAY\SDE.SMLRS", "LRS_ROUTE", "0 Meters", "Database Connections\SDEDEV.sde\CCL_ROUTE_CALIBRATE_SM", "LRS_ROUTE LINE BEG_STATE_LOGMILE END_STATE_LOGMILE", "FIRST", "DISTANCE", "ZERO", "FIELDS", "M_DIRECTON") #Make the route event line layer matching hte state routes arcpy.MakeRouteEventLayer_lr( smlrs, "LRS_ROUTE", "SDE.CCL_ROUTE_CALIBRATE_SM", "LRS_ROUTE LINE BEG_STATE_LOGMILE END_STATE_LOGMILE", "CCL_ROUTE_SM_Events", "#", "ERROR_FIELD", "NO_ANGLE_FIELD", "NORMAL", "ANGLE", "LEFT", "POINT") #dissolve the state route events to find the begin logmile in the city
#Se estima la division de apartamentos en funcion de la geometria del edificio, # asimilando la zona interior (corredor) a un rectangulo: #1) Si la zona corredor es mas ancho que profundo se estima que # habra una vivienda a cada lado. #2) Si la zona corredor es mas profundo que ancho se estima que # habra una vivienda delante y otra detras. #Se empieza calculando el rectangulo equivalente output = "AUXILIAR2\\" + str(row.FID) + "_6.shp" output2 = "AUXILIAR3\\" + str(row.FID) + "_20b.shp" arcpy.MinimumBoundingGeometry_management( output, output2, "RECTANGLE_BY_WIDTH") output = "AUXILIAR3\\" + str(row.FID) + "_20c.shp" arcpy.FeatureToLine_management(output2, output) output2 = "AUXILIAR3\\" + str(row.FID) + "_20d.shp" arcpy.SplitLine_management(output, output2) outputb = "AUXILIAR3\\" + str(row.FID) + "_20d.dbf" arcpy.AddField_management(outputb, "LARGO", "FLOAT") arcpy.CalculateField_management(outputb, "LARGO", "!SHAPE.length@METERS!", "PYTHON_9.3") output = "AUXILIAR3\\" + str(row.FID) + "_20db.shp" output3 = "AUXILIAR3\\" + str(row.FID) + "_20dc.shp" #Aqui se comprueba si la zona corredor es mas ancha que profunda row3s = arcpy.SearchCursor(outputb) largomax = 0 for row3 in row3s: if row3.LARGO > largomax: largomax = row3.LARGO largomax = largomax - 0.1
def NrRd(city, inDir, workFld): import traceback, time, arcpy, os from arcpy import env arcpy.CheckOutExtension('Spatial') #-------- DIRECTORY SETUP ------------------------------------------------ """ Working Directory """ try: arcpy.CreateFileGDB_management(str(workFld), str(city) + '_NrRd.gdb') except: print 'NrRd GDB already exists' workDir = str(workFld) + '/' + city + '_NrRd.gdb' arcpy.env.workspace = workDir """ Report File Directory """ reportfileDir = str(workFld) + '/Logs' """ Frequent Directory """ freqDir = str(workFld) + '/' + city + '_Freq.gdb' """ Final Geodatabase """ finalDir = str(workFld) + '/' + city + '_Final.gdb' """ Projection File Directory """ prjDir = str(inDir) + '/Prj' """ Input Roads Data """ navDir = inDir + '/Input.gdb/Streets_1234_Alb' """ Set Workspace Environments """ arcpy.env.scratch = str(inDir) + '/Scratch.gdb' arcpy.env.overwriteOutput = True #----------------------------------------------------------------------------- # BEGIN ANALYSIS #----------------------------------------------------------------------------- try: #-------- LOGFILE CREATION --------------------------------------------- """ Create report file for each metric """ tmpName = city + '_NrRd_Pop_' + time.strftime('%Y%m%d_%H-%M') reportfileName = reportfileDir + '/' + tmpName + '.txt' popRF = open(reportfileName, 'w') tmpName = city + '_NrRd_PFor_' + time.strftime('%Y%m%d_%H-%M') reportfileName = reportfileDir + '/' + tmpName + '.txt' pctRF = open(reportfileName, 'w') try: loglist = sorted(f for f in os.listdir(reportfileDir) if f.startswith(str(city) + '_Reuse')) tmpName = loglist[-1] except: tmpName = city + '_Reuse_' + time.strftime('%Y%m%d_%H-%M') + '.txt' reportfileName = reportfileDir + '/' + tmpName try: ReuseRF = open(reportfileName, 'a') except: ReuseRF = open(reportfileName, 'w') print 'Creating Reuse Log' """ Write out first lines of report files """ print 'Near Road Start Time: ' + time.asctime() popRF.write( "Begin with 2011 NavTeq Streets Layer and 1-Meter Land Cover Classification for the EnviroAtlas community created by the US EPA EnviroAtlas Team.--ANALYST-TIME--\n" ) pctRF.write( "Begin with 2011 NavTeq Streets Layer and 1-Meter Land Cover Classification for the EnviroAtlas community created by the US EPA EnviroAtlas Team.--ANALYST-TIME--\n" ) popRF.write("Project NavTeq Streets layer into UTM.--ANALYST-TIME--\n") pctRF.write("Project NavTeq Streets layer into UTM.--ANALYST-TIME--\n") popRF.write( "Clip NavTeq Streets Layer to 1-km Buffer of the EnviroAtlas community boundary.--ANALYST-TIME--\n" ) pctRF.write( "Clip NavTeq Streets Layer to 1-km Buffer of the EnviroAtlas community boundary.--ANALYST-TIME--\n" ) popRF.write( "Extract roads from NavTeq Streets where Func_Class = 1-4 to a new layer.--ANALYST-TIME--\n" ) pctRF.write( "Extract roads from NavTeq Streets where Func_Class = 1-4 to a new layer.--ANALYST-TIME--\n" ) popRF.write( "Add Field to the new streets layer: LANES (double) and calculate where LANES = TO_LANES + FROM_LANES.--ANALYST-TIME--\n" ) pctRF.write( "Add Field to the new streets layer: LANES (double) and calculate where LANES = TO_LANES + FROM_LANES.--ANALYST-TIME--\n" ) popRF.write( "For any records where LANES = 0, use Esri Aerial basemap to fill in correct lane value.--ANALYST-TIME--\n" ) pctRF.write( "For any records where LANES = 0, use Esri Aerial basemap to fill in correct lane value.--ANALYST-TIME--\n" ) #-------- PROCESSING LAYERS ---------------------------------------------- """ Set Environments """ arcpy.env.extent = freqDir + '/LC' arcpy.env.snapRaster = freqDir + '/LC' Expression = 'Shape_Length <= 1050' """-------- Reclassify LC into Binary Forest ----------------------------- """ if arcpy.Exists(str(freqDir) + '/MForestIO') == False: outReclass = arcpy.sa.Reclassify( str(freqDir) + '/LC', 'Value', arcpy.sa.RemapValue([[0, 0], [10, 0], [20, 0], [21, 0], [22, 0], [30, 0], [40, 1], [52, 0], [70, 0], [80, 0], [82, 1], [91, 1], [92, 0]])) outReclass.save(str(freqDir) + '/MForestIO') popRF.write( "Reclassify the Land Cover into Binary Forest. REPLACE-MFE--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') popRF.write( "Reclassify the Land Cover into Binary Forest. REPLACE-MFE--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') ReuseRF.write("MForestIO--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') else: popRF.write( "Reclassify the Land Cover into Binary Forest. REPLACE-MFE--MForestIO" + '--\n') pctRF.write( "Reclassify the Land Cover into Binary Forest. REPLACE-MFE--MForestIO" + '--\n') """-------- Create 29m Moving Window ------------------------------------- """ outFocalStat = arcpy.sa.FocalStatistics( freqDir + '/MForestIO', arcpy.sa.NbrCircle(14.5, 'MAP'), 'SUM', 'NODATA') outFocalStat.save('MFor_29C') popRF.write( "Run Focal Statistics on the Forest Binary Raster with a circular cell neighborhood with a radius of 14.5m in map units--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') pctRF.write( "Run Focal Statistics on the Forest Binary Raster with a circular cell neighborhood with a radius of 14.5m in map units--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Figure out the correct UTM Zone """ prjNumb = arcpy.Describe(str(freqDir) + '/LC').spatialReference.name prjNumb = prjNumb[-3:] prjfile = prjDir + '/NAD 1983 UTM Zone ' + prjNumb + '.prj' """ -------- Create Road Buffer Lines ----------------------------------""" """ Create Road Polygons """ arcpy.CopyFeatures_management( str(inDir) + '/NavTeq_D.gdb/' + str(city) + '_NavTeq_D', 'NavTeq_D') arcpy.AddField_management('NavTeq_D', 'HalfWidth', 'DOUBLE') arcpy.CalculateField_management('NavTeq_D', 'HalfWidth', '!Width! / 2', 'PYTHON_9.3') popRF.write( "Add Field to streets layer: HALFWIDTH (double) and calculate where HALFWIDTH = LANES * 3.6576 / 2.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') pctRF.write( "Add Field to streets layer: HALFWIDTH (double) and calculate where HALFWIDTH = LANES * 3.6576 / 2.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') arcpy.Buffer_analysis('NavTeq_D', 'RoadEdge', 'HalfWidth', 'FULL', 'FLAT', 'ALL') arcpy.CalculateField_management('NavTeq_D', 'HalfWidth', '!Width! / 2', 'PYTHON_9.3') popRF.write( "Buffer streets using the value in HALFWIDTH with options FULL, FLAT, ALL.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') pctRF.write( "Buffer streets using the value in HALFWIDTH with options FULL, FLAT, ALL.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Create road buffer """ arcpy.Buffer_analysis('RoadEdge', 'RoadBuffer', '11.5 Meters', 'FULL', 'FLAT', 'ALL') popRF.write( "Rebuffer the buffered streets by 11.5 meters with options FULL, FLAT, ALL.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') pctRF.write( "Rebuffer the buffered streets by 11.5 meters with options FULL, FLAT, ALL.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Convert the buffer into lines """ arcpy.PolygonToLine_management('RoadBuffer', 'RdBuffLine') popRF.write( "Convert the resulting polygons into polylines - referred to as analysis lines.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') pctRF.write( "Convert the resulting polygons into polylines - referred to as analysis lines.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Remove interior lines based on cut-off point """ arcpy.MakeFeatureLayer_management('RdBuffLine', 'BuffLine_lyr') arcpy.SelectLayerByAttribute_management('BuffLine_lyr', 'NEW_SELECTION', Expression) arcpy.DeleteFeatures_management('BuffLine_lyr') arcpy.CopyFeatures_management('BuffLine_lyr', 'BuffLineUse') popRF.write( "Delete analysis lines that are unnecessary for analysis, for example, lines in between two lanes of a divided highway and lines on the interior of a freeway ramp.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') pctRF.write( "Delete analysis lines that are unnecessary for analysis, for example, lines in between two lanes of a divided highway and lines on the interior of a freeway ramp.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """-------- Calculate Forest Area -----------------------------""" """ Extract the tree values """ outExtractByMask = arcpy.sa.ExtractByMask(workDir + '/MFor_29C', 'BuffLineUse') outExtractByMask.save('ForBuff') popRF.write( "Extract the Focal Statistics Raster using the analysis lines.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') pctRF.write( "Extract the Focal Statistics Raster using the analysis lines.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') #-------- POPULATION ANALYSIS --------------------------------------------- """ Reclassify into sufficient and insufficent tree buffer. """ outReclass2 = arcpy.sa.Reclassify( 'ForBuff', 'Value', arcpy.sa.RemapRange([[0, 154, 1], [155, 620, 2]])) outReclass2.save('ForBinary') popRF.write( "Reclassify the extracted raster into above and below 25% tree cover: 0-154 = 1; 155-613 = 2.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Create lines of sufficent and insufficient tree buffer """ arcpy.RasterToPolygon_conversion('ForBinary', 'For_YN', 'NO_SIMPLIFY') popRF.write( "Convert the reclassified raster into a polygon WITHOUT simplifying.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') arcpy.Intersect_analysis(['BuffLineUse', 'For_YN', freqDir + '/BG'], 'Line_YN') popRF.write( "Intersect the analysis line with the polygons and the community block groups, splitting the analysis line into pieces of greater than and less than 25% tree cover within each block group.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') arcpy.AddField_management('Line_YN', 'KMs', 'FLOAT') arcpy.CalculateField_management('Line_YN', 'KMs', '!shape.length@kilometers!', 'PYTHON_9.3') popRF.write( "Add a new field to the analysis line: Length_KM (double) and calculate the geometry of the lines using length in kilometers.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Calcualte statistics on road lenghts """ arcpy.Select_analysis('Line_YN', 'Line_Y', '"gridcode" = 2') arcpy.Statistics_analysis('Line_Y', 'KMpBG_Y', [['KMs', 'SUM']], [['bgrp']]) arcpy.Select_analysis('Line_YN', 'Line_N', '"gridcode" = 1') arcpy.Statistics_analysis('Line_N', 'KMpBG_N', [['KMs', 'SUM']], [['bgrp']]) popRF.write( "Summarize the analysis line layer by block group and greater than vs less than 25% tree cover where the summary statistics is the sum of Length_KM.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Create full buffer of roads for popualtion counts """ arcpy.Buffer_analysis('Line_YN', 'YN_289L', '288.5 Meters', 'LEFT', 'FLAT', 'ALL') arcpy.Buffer_analysis('Line_YN', 'YN_11R', '11.5 Meters', 'RIGHT', 'FLAT', 'ALL') arcpy.Buffer_analysis('Line_YN', 'YN_14L', '14.5 Meters', 'LEFT', 'FLAT', 'ALL') arcpy.Merge_management(['YN_289L', 'YN_11R'], 'YN_300') popRF.write( "Buffer the analysis line twice: by 288.5m with LEFT, FLAT, ALL and by 11.5m with RIGHT, FLAT, ALL. Merge the two buffers together to create the population analysis zone.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Create insufficient buffer area """ arcpy.SplitLine_management('Line_N', 'Line_N_Split') arcpy.Buffer_analysis('Line_N_Split', 'N_289L_ND', '288.5 Meters', 'LEFT', 'FLAT', 'NONE') arcpy.Buffer_analysis('Line_N_Split', 'N_289L_D', '288.5 Meters', 'LEFT', 'FLAT', 'ALL') arcpy.Merge_management(['N_289L_D', 'N_289L_ND', 'YN_11R', 'YN_14L'], 'N_300_ND') arcpy.Dissolve_management('N_300_ND', 'N_300') popRF.write( "Buffer the analysis line twice again: by 14.5m with LEFT, FLAT, ALL and by 11.5m with RIGHT, FLAT, ALL. Select the analysis line pieces with grid_code = 1 and buffer by 288.5m with LEFT, FLAT, ALL. Merge the three buffers together to identify areas of less than 25% tree cover.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Create sufficient buffer area """ arcpy.Erase_analysis('YN_300', 'N_300', 'BuffSuff') popRF.write( "Erase the areas of less than 25% tree cover from the population analysis area to identify areas buffered by greater than 25% tree cover.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Create real insufficient buffer area """ arcpy.Erase_analysis('YN_300', 'BuffSuff', 'BuffInSuff') popRF.write( "Clip the area buffered by less than 25% tree cover to the population analysis zone for consistency's sake.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Convert sufficient and insufficient areas into Albers and rasters """ prjfile = prjDir + '/USA Contiguous Albers Equal Area Conic USGS.prj' arcpy.Project_management('BuffInSuff', 'BuffInSuff_Alb', prjfile) arcpy.Project_management('BuffSuff', 'BuffSuff_Alb', prjfile) popRF.write( "Project both the less than and greater than areas into Albers.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') arcpy.AddField_management('BuffInSuff_Alb', 'InSuff', 'SHORT') arcpy.CalculateField_management('BuffInSuff_Alb', 'InSuff', '1', 'PYTHON_9.3') arcpy.AddField_management('BuffSuff_Alb', 'Suff', 'SHORT') arcpy.CalculateField_management('BuffSuff_Alb', 'Suff', '1', 'PYTHON_9.3') popRF.write( "Add a field to each polygon layer: Value (short) and calculate where Value=1.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Set Environments """ arcpy.env.snapRaster = freqDir + '/Dasy' arcpy.env.extent = freqDir + '/Dasy' """ Convert Rasters to Polygons """ arcpy.PolygonToRaster_conversion('BuffInSuff_Alb', 'InSuff', 'InSuff_R', 'Maximum_Area', '', 30) arcpy.PolygonToRaster_conversion('BuffSuff_Alb', 'Suff', 'Suff_R', 'Maximum_Area', '', 30) popRF.write("Convert each polygon layer into a raster. --" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Extract by Mask and Calculate Zonal Statistics for Insufficient and Sufficient Areas """ for val in ('InSuff', 'Suff'): EbM = arcpy.sa.ExtractByMask(freqDir + '/Dasy', val + '_R') EbM.save(val + '_Pop') arcpy.sa.ZonalStatisticsAsTable(freqDir + '/BG_Alb', 'bgrp', val + '_Pop', 'Pop_' + str(val), 'DATA', 'SUM') popRF.write( "Extract by Mask the EnviroAtlas Dasymetric (2011/October 2015) within each of the rasterized zones.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') popRF.write( "Calculate Zonal Statistics as a Table for the two extracted dasymetric rasters with the zones being the 2010 block groups within the EnviroAtlas community boundary.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """-------- Calculate Total Dasy Population, if necessary ------------------ """ """ Use the existing data """ fieldNames = [f.name for f in arcpy.ListFields(freqDir + '/BG_Alb')] if 'Dasy_Pop' in fieldNames: popRF.write( "Calculate Zonal Statistics as a Table for the EnviroAtlas Dasymetrics (2011/October 2015) with the zones being the 2010 block groups within the EnviroAtlas community boundary. Add resulting population sums to the community block groups as attribute Dasy_Pop--Dasy_Pop" + '--\n') """ Create population data """ else: arcpy.AddField_management(freqDir + '/BG_Alb', 'Dasy_Pop', 'LONG') arcpy.sa.ZonalStatisticsAsTable(freqDir + '/BG_Alb', 'bgrp', freqDir + '/Dasy', freqDir + '/Dasy_ZS', '', 'SUM') arcpy.JoinField_management(freqDir + '/BG_Alb', 'bgrp', freqDir + '/Dasy_ZS', 'bgrp', ['SUM']) arcpy.CalculateField_management(freqDir + '/BG_Alb', 'Dasy_Pop', '!SUM!', 'PYTHON_9.3') arcpy.DeleteField_management(freqDir + '/BG_Alb', ['SUM']) arcpy.JoinField_management(freqDir + '/BG', 'bgrp', freqDir + '/BG_Alb', 'bgrp', ['Dasy_Pop']) popRF.write( "Calculate Zonal Statistics as a Table for the EnviroAtlas Dasymetrics (2011/October 2015) with the zones being the 2010 block groups within the EnviroAtlas community boundary. Add resulting population sums to the community block groups as attribute Dasy_Pop.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') ReuseRF.write('Dasy_Pop--' + time.strftime('%Y%m%d--%H%M%S') + '--\n') """-------- Create Final Table --------------------------------------------- """ arcpy.TableToTable_conversion(freqDir + '/BG', workDir, 'NrRd_Pop', '', 'bgrp') arcpy.DeleteField_management('NrRd_Pop', [ 'NonWhite', 'PLx2_Pop', 'PLx2_Pct', 'SUM_HOUSIN', 'under_1', 'under_1pct', 'under_13', 'under_13pc', 'over_70', 'over_70pct', 'Shape_Length', 'Shape_Leng', 'NonWhite_Pop', 'NonWt_Pct', 'Density', 'Shape_Le_1', 'Shape_Area', 'Black', 'Blackpct', 'PopWithin', 'PctWithin', 'Include', 'City', 'Area', 'LandA_M', 'LandA_M_1', 'NonWhite_P', 'H_Income_M', 'State' ]) nrrdtbl = 'NrRd_Pop' popRF.write( "Create a new table based on the EnviroAtlas community block groups table retaining the BGRP and Dasy_Pop fields--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Add fields to new table """ arcpy.AddField_management(nrrdtbl, 'IBuff_Pop', 'LONG') arcpy.AddField_management(nrrdtbl, 'SBuff_Pop', 'LONG') arcpy.AddField_management(nrrdtbl, 'Buff_Pop', 'LONG') arcpy.AddField_management(nrrdtbl, 'IBuff_Pct', 'FLOAT', 5, 2) arcpy.AddField_management(nrrdtbl, 'SBuff_Pct', 'FLOAT', 5, 2) arcpy.AddField_management(nrrdtbl, 'Buff_Pct', 'FLOAT', 5, 2) arcpy.AddField_management(nrrdtbl, 'Lane_KMN', 'DOUBLE', 7, 2) arcpy.AddField_management(nrrdtbl, 'Lane_KMY', 'DOUBLE', 7, 2) arcpy.AddField_management(nrrdtbl, 'Lane_KMAll', 'DOUBLE', 7, 2) arcpy.AddField_management(nrrdtbl, 'Lane_PctSB', 'FLOAT', 5, 2) arcpy.AddField_management(nrrdtbl, 'Lane_PctIB', 'FLOAT', 5, 2) popRF.write( "Add fields to the new table for IBuff_Pop (long), SBuff_Pop (long), Buff_Pop (long), IBuff_Pct (float), SBuff_Pct (float), Buff_Pct (float), Lane_KMN (double), Lane_KMY (double), Lane_KMAll (double), Lane_PctSB (float), Lane_PctIB (float).--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Join Each Table to the final table and calculate necessary records """ arcpy.JoinField_management(nrrdtbl, 'bgrp', 'Pop_InSuff', 'bgrp', ['SUM']) arcpy.CalculateField_management(nrrdtbl, 'IBuff_Pop', '!SUM!', 'PYTHON_9.3') arcpy.DeleteField_management(nrrdtbl, 'SUM') arcpy.JoinField_management(nrrdtbl, 'bgrp', 'Pop_Suff', 'bgrp', ['SUM']) arcpy.CalculateField_management(nrrdtbl, 'SBuff_Pop', '!SUM!', 'PYTHON_9.3') arcpy.DeleteField_management(nrrdtbl, 'SUM') arcpy.JoinField_management(nrrdtbl, 'bgrp', 'KMpBG_N', 'bgrp', ['SUM_KMs']) arcpy.CalculateField_management(nrrdtbl, 'Lane_KMN', '!SUM_KMs!', 'PYTHON_9.3') arcpy.DeleteField_management(nrrdtbl, 'SUM_KMs') arcpy.JoinField_management(nrrdtbl, 'bgrp', 'KMpBG_Y', 'bgrp', ['SUM_KMs']) arcpy.CalculateField_management(nrrdtbl, 'Lane_KMY', '!SUM_KMs!', 'PYTHON_9.3') arcpy.DeleteField_management(nrrdtbl, 'SUM_KMs') popRF.write( "Join the zonal statistics and length statistics tables with the new table and calculate IBuff_Pop, SBuff_Pop, Lane_KMN, Lane_KMY. Remove Joins.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Fill Null Values with Zeros """ arcpy.MakeTableView_management(nrrdtbl, 'NrRdTbl') arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION', 'SBuff_Pop IS NULL') arcpy.CalculateField_management('NrRdTbl', 'SBuff_Pop', '0', 'PYTHON_9.3') arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION', 'IBuff_Pop IS NULL') arcpy.CalculateField_management('NrRdTbl', 'IBuff_Pop', '0', 'PYTHON_9.3') arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION', 'Lane_KMN IS NULL') arcpy.CalculateField_management('NrRdTbl', 'Lane_KMN', '0', 'PYTHON_9.3') arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION', 'Lane_KMY IS NULL') arcpy.CalculateField_management('NrRdTbl', 'Lane_KMY', '0', 'PYTHON_9.3') arcpy.SelectLayerByAttribute_management('NrRdTbl', 'CLEAR_SELECTION') """ Calculate additional fields. """ arcpy.CalculateField_management('NrRdTbl', 'Buff_Pop', '!IBuff_Pop! + !SBuff_Pop!', 'PYTHON_9.3') arcpy.CalculateField_management('NrRdTbl', 'Lane_KMAll', '!Lane_KMN! + !Lane_KMY!', 'PYTHON_9.3') arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION', 'Dasy_Pop > 0') arcpy.CalculateField_management( 'NrRdTbl', 'IBuff_Pct', '"%.2f" % (float(!IBuff_Pop!)/float(!Dasy_Pop!) * 100)', 'PYTHON_9.3') arcpy.CalculateField_management( 'NrRdTbl', 'SBuff_Pct', '"%.2f" % (float(!SBuff_Pop!)/float(!Dasy_Pop!) * 100)', 'PYTHON_9.3') arcpy.CalculateField_management( 'NrRdTbl', 'Buff_Pct', '"%.2f" % (float(!Buff_Pop!)/float(!Dasy_Pop!) * 100)', 'PYTHON_9.3') arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION', 'Lane_KMAll > 0') arcpy.CalculateField_management( 'NrRdTbl', 'Lane_PctIB', '"%.2f" % (!Lane_KMN!/!Lane_KMAll! * 100)', 'PYTHON_9.3') arcpy.CalculateField_management( 'NrRdTbl', 'Lane_PctSB', '"%.2f" % (!Lane_KMY!/!Lane_KMAll! * 100)', 'PYTHON_9.3') arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION', 'Lane_KMAll = 0') arcpy.CalculateField_management('NrRdTbl', 'Lane_PctIB', '0', 'PYTHON_9.3') arcpy.CalculateField_management('NrRdTbl', 'Lane_PctSB', '0', 'PYTHON_9.3') arcpy.SelectLayerByAttribute_management('NrRdTbl', 'CLEAR_SELECTION') popRF.write( "Calculate remaining fields: Buff_Pop = IBuff_Pop + SBuff_Pop; IBuff_Pct = IBuff_Pop/Dasy_Pop*100; SBuff_Pct = SBuff_Pop/Dasy_Pop*100; Lane_KMAll = Lane_KMN + Lane_KMY; Lane_PctSB = Lane_KMY/Lane_KMAll*100; Lane_PctIB = Lane_KMN/Lane_KMAll*100. --" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Copy into Working Directory """ arcpy.SelectLayerByAttribute_management('NrRdTbl', 'NEW_SELECTION', 'Dasy_Pop = 0') arcpy.CalculateField_management('NrRdTbl', 'IBuff_Pct', '-99999', 'PYTHON_9.3') arcpy.CalculateField_management('NrRdTbl', 'SBuff_Pct', '-99999', 'PYTHON_9.3') arcpy.CalculateField_management('NrRdTbl', 'Buff_Pct', '-99999', 'PYTHON_9.3') arcpy.CalculateField_management('NrRdTbl', 'IBuff_Pop', '-99999', 'PYTHON_9.3') arcpy.CalculateField_management('NrRdTbl', 'SBuff_Pop', '-99999', 'PYTHON_9.3') arcpy.CalculateField_management('NrRdTbl', 'Buff_Pop', '-99999', 'PYTHON_9.3') arcpy.SelectLayerByAttribute_management('NrRdTbl', 'CLEAR_SELECTION') popRF.write( "Calculate Fields where Dasy_Pop = 0: IBuff_Pop, SBuff_Pop, Buff_Pop, IBuff_Pct, SBuff_Pct, Buff_Pct = -99999--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """-------- Check that the Analysis Area is covered by the LC -------------- """ """ Create a Polygon Version of the LC """ if arcpy.Exists(freqDir + '/LC_Poly') == False: ReC = arcpy.sa.Reclassify( str(freqDir) + '/LC', 'Value', arcpy.sa.RemapValue([[0, 0], [10, 1], [20, 1], [21, 1], [22, 1], [30, 1], [40, 1], [52, 1], [70, 1], [80, 1], [82, 1], [91, 1], [92, 1]])) ReC.save(str(freqDir) + '/AreaIO') arcpy.RasterToPolygon_conversion( str(freqDir) + '/AreaIO', str(freqDir) + '/LC_Poly', 'SIMPLIFY') arcpy.EliminatePolygonPart_management( str(freqDir) + '/LC_Poly', str(freqDir) + '/LC_Poly_EP', 'PERCENT', '', '5', 'CONTAINED_ONLY') arcpy.Delete_management(str(freqDir) + '/LC_Poly') arcpy.Rename_management( str(freqDir) + '/LC_Poly_EP', str(freqDir) + '/LC_Poly') """ Buffer the LC Polygon by -500m """ if arcpy.Exists(freqDir + '/Bnd_Cty_500m') == False: arcpy.env.extent = freqDir + '/LC' arcpy.env.snapRaster = freqDir + '/LC' arcpy.Buffer_analysis( str(freqDir) + '/Bnd_Cty', str(freqDir) + '/Bnd_Cty_500m', '500 meters') arcpy.EliminatePolygonPart_management( str(freqDir) + '/Bnd_Cty_500m', str(freqDir) + '/Bnd_Cty_500m_EP', 'PERCENT', '', '30', 'CONTAINED_ONLY') arcpy.Delete_management(str(freqDir) + '/Bnd_Cty_500m') arcpy.Rename_management( str(freqDir) + '/Bnd_Cty_500m_EP', str(freqDir) + '/Bnd_Cty_500m') """ Identify whether LC is large enough """ arcpy.MakeFeatureLayer_management(str(freqDir) + '/LC_Poly', 'LClyr') arcpy.MakeFeatureLayer_management( str(freqDir) + '/Bnd_Cty_500m', 'BC_500lyr') arcpy.SelectLayerByLocation_management('BC_500lyr', 'COMPLETELY_WITHIN', 'LClyr', '', 'NEW_SELECTION') bigEnough = float(arcpy.GetCount_management('BC_500lyr').getOutput(0)) arcpy.SelectLayerByAttribute_management('BC_500lyr', 'CLEAR_SELECTION') """ If the LC isn't large enough, edit erroneous BGS """ if bigEnough == 0: """ Identify BGs within 50m of the LC edge """ arcpy.Buffer_analysis( str(freqDir) + '/LC_Poly', 'LC_Poly_Minus15', '-15 meters') arcpy.MakeFeatureLayer_management('LC_Poly_Minus15', 'Minus15') arcpy.MakeFeatureLayer_management(freqDir + '/BG', 'BG') arcpy.SelectLayerByLocation_management('BG', 'COMPLETELY_WITHIN', 'Minus15', '', 'NEW_SELECTION', 'INVERT') bgValue = float(arcpy.GetCount_management('BG').getOutput(0)) """ For all BGs too close to the LC edge, assign both fields a value of -99998 """ if bgValue > 0: bgrps = [] cursor = arcpy.SearchCursor('BG') for row in cursor: value = row.getValue('bgrp') bgrps.append(value) bgrps = list(set(bgrps)) expression = '' for bgrp in bgrps: expression = expression + " OR bgrp = '" + str(bgrp) + "'" expression = expression[4:] arcpy.SelectLayerByAttribute_management( 'NrRdTbl', 'NEW_SELECTION', expression) for field in [ 'IBuff_Pct', 'SBuff_Pct', 'Buff_Pct', 'IBuff_Pop', 'SBuff_Pop', 'Buff_Pop', 'Lane_PctIB', 'Lane_PctSB' ]: arcpy.CalculateField_management('NrRdTbl', str(field), '-99998', 'PYTHON_9.3') arcpy.SelectLayerByAttribute_management( 'NrRdTbl', 'CLEAR_SELECTION') popRF.write( "Calculate Field for BGs within 50m of the edge of the land cover, all fields = -99998.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Create final table """ arcpy.CopyRows_management('NrRdTbl', 'NrRd_Pop_Final') try: arcpy.Delete_management(finalDir + '/' + city + '_NrRd_Pop') except: pass arcpy.TableToTable_conversion('NrRd_Pop_Final', finalDir, city + '_NrRd_Pop') allFields = [ f.name for f in arcpy.ListFields(finalDir + '/' + city + '_NrRd_Pop') ] for field in allFields: if field not in [ 'bgrp', 'OBJECTID', 'IBuff_Pop', 'SBuff_Pop', 'Buff_Pop', 'Buff_Pct', 'Lane_PctSB', 'Lane_PctIB' ]: arcpy.DeleteField_management( finalDir + '/' + city + '_NrRd_Pop', [field]) popRF.write( "Export the fields to be displayed in the EnviroAtlas to a final gdb table: IBuff_Pop, SBuff_Pop, Buff_Pop, Buff_Pct, Lane_PctSB, Lane_PctIB.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') #-------- FOUNDATIONAL LAYER ANALYSIS ----------------------------------- """ Set Environments """ arcpy.env.extent = workDir + '/MFor_29C' arcpy.env.snapRaster = workDir + '/MFor_29C' """-------- Reclassify Moving Window into Percentage Breaks ------------------- """ outReclass2 = arcpy.sa.Reclassify( 'ForBuff', 'Value', arcpy.sa.RemapRange([[0, 77, 12], [78, 154, 25], [155, 307, 50], [308, 460, 75], [461, 613, 100]])) outReclass2.save('For_5Cls') pctRF.write( "Reclassify the extracted raster into percentage classes: 0-77 = 12.5; 78-154 = 25; 155-307 = 50; 308-460 = 75; 461-613 = 100.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Convert to polygon """ arcpy.RasterToPolygon_conversion('For_5Cls', 'For_5Poly', 'NO_SIMPLIFY') pctRF.write( "Convert the reclassified raster into a polygon WITHOUT simplifying.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Join the Polygon with the Road Buffer lines """ arcpy.Intersect_analysis(['BuffLineUse', 'For_5Poly'], 'Class5', 'ALL', '', 'LINE') pctRF.write( "Intersect the analysis line with the polygons, splitting the analysis line into pieces representing each percentage class.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') arcpy.Clip_analysis('Class5', freqDir + '/Bnd_Cty', 'Class5_Bnd') pctRF.write( "Clip the analysis line to the EnviroAtlas community boundary and the county lines.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') arcpy.Dissolve_management('Class5_Bnd', 'Class5_D', 'gridcode') pctRF.write( "Dissolve the analysis line based on the percentage classes.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') arcpy.AddField_management('Class5_D', 'PctTree', 'FLOAT') codeblock = '''def CalPctTree(gc): if (gc == 12): return "12.5" else: return gc ''' arcpy.CalculateField_management('Class5_D', 'PctTree', 'CalPctTree(!gridcode!)', 'PYTHON_9.3', codeblock) arcpy.DeleteField_management('Class5_D', ['gridcode']) pctRF.write( "Add field to the analysis line: PctTree (float) and calculate where PctTree = gridcode--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ If the LC isn't large enough, delete erroneous line segments """ if bigEnough == 0: arcpy.Buffer_analysis( str(freqDir) + '/LC_Poly', 'LC_Poly_Minus_15', '15 meters') arcpy.Union_analysis( [str(freqDir) + '/Bnd_Cty', 'LC_Poly_Minus_15'], 'LC_Minus_BndCty_Union_15', 'ONLY_FID') arcpy.Select_analysis( 'LC_Minus_BndCty_Union_15', 'EdgeAffectedArea_15', 'FID_Bnd_Cty > 0 AND FID_LC_Poly_Minus_15 = -1') arcpy.MakeFeatureLayer_management('Class5_D', 'Class5_lyr') arcpy.MakeFeatureLayer_management('EdgeAffectedArea_15', 'EEArea') arcpy.SelectLayerByLocation_management('Class5_lyr', 'INTERSECT', 'EEArea', '', 'NEW_SELECTION') arcpy.SelectLayerByAttribute_management('Class5_lyr', 'SWITCH_SELECTION') arcpy.CopyFeatures_management('Class5_lyr', 'NrRd_PFor_EdgeCorrected') arcpy.SelectLayerByAttribute_management('Class5_lyr', 'CLEAR_SELECTION') pctRF.write( "Calculate Field for BGs within 50m of the edge of the land cover, all fields = -99998.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Project into Albers """ prjfile = prjDir + '/USA Contiguous Albers Equal Area Conic USGS.prj' try: arcpy.Project_management('NrRd_PFor_EdgeCorrected', 'NrRd_PFor', prjfile) except: arcpy.Project_management('Class5_D', 'NrRd_PFor', prjfile) pctRF.write("Project the analysis line into Albers--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Create final feature class """ try: arcpy.Delete_management(finalDir + '/' + city + '_NrRd_PFor') except: pass try: arcpy.FeatureClassToFeatureClass_conversion( 'NrRd_PFor_EdgeCorrected', finalDir, city + '_NrRd_PFor') except: arcpy.FeatureClassToFeatureClass_conversion( 'NrRd_PFor', finalDir, city + '_NrRd_PFor') pctRF.write( "Export the analysis line to a geodatabase for display in EnviroAtlas.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') print 'NrRd_PFor End Time: ' + time.asctime() + '\n' #-------- COMPELETE LOGFILES --------------------------------------------- pctRF.close() popRF.close() ReuseRF.close() #----------------------------------------------------------------------------- # END ANALYSIS #----------------------------------------------------------------------------- except: """ This part of the script executes if anything went wrong in the main script above """ #-------- PRINT ERRORS --------------------------------------------------- print "\nSomething went wrong.\n\n" print "Python Traceback Message below:" print traceback.format_exc() print "\nArcMap Error Messages below:" print arcpy.GetMessages(2) print "\nArcMap Warning Messages below:" print arcpy.GetMessages(1) #-------- COMPLETE LOGFILE ------------------------------------------------ pctRF.write("\nSomething went wrong.\n\n") pctRF.write("Pyton Traceback Message below:") pctRF.write(traceback.format_exc()) pctRF.write("\nArcMap Error Messages below:") pctRF.write(arcpy.GetMessages(2)) pctRF.write("\nArcMap Warning Messages below:") pctRF.write(arcpy.GetMessages(1)) pctRF.write("\n\nEnded at " + time.asctime() + '\n') pctRF.write("\n---End of Log File---\n") if pctRF: pctRF.close()
Base_adj = os.path.basename(INPUT_FinalSpans_Sub_Group) Adjusted_conversion = os.path.join(outFolder, os.path.splitext(Base_adj)[0] + "_MP_Converted_Q2.shp") for row in rows: poly = row.getValue(shape_field) if poly.isMultipart == 1: if arcpy.CheckProduct("ArcInfo") == "AlreadyInitialized": arcpy.arcpy.AddWarning("ARC ADVANCED LICENSE DETECTED") else: arcpy.AddError("YOU NEED AN ARC ADVANCED LICENSE TO EXECUTE THIS STEP WITH AN ADJUSTED ALIGNMENT EXTRACTION") arcpy.SplitLine_management(INPUT_FinalSpans_Sub_Group, Adjusted_conversion) else: pass if os.path.exists(Adjusted_conversion): arcpy.AddMessage("ADJUSTED ALIGNMENT DETECTED....CONVERTING ADJUSTED ALIGNMENT TO MULTI PART FEATURE(SPANS)....") sub_prod_ref_ADJ(Adjusted_conversion) sub_ref_shps_list = [] for root, dirs, files in os.walk(sub_ref): for file in files: if file.endswith(".shp"): F = (os.path.join(root, file)) sub_ref_shps_list.append(F)
def XSLayout(output_workspace, flowline, split_type, transect_spacing, transect_width, transect_width_unit): # Set environment variables arcpy.env.overwriteOutput = True arcpy.env.XYResolution = "0.00001 Meters" arcpy.env.XYTolerance = "0.0001 Meters" # Create "General" file geodatabase WorkFolder = os.path.dirname(output_workspace) General_GDB = WorkFolder + "\General.gdb" arcpy.CreateFileGDB_management(WorkFolder, "General", "CURRENT") arcpy.env.workspace = General_GDB # List parameter values arcpy.AddMessage("Output Workspace: {}".format(output_workspace)) arcpy.AddMessage("Workfolder: {}".format(WorkFolder)) arcpy.AddMessage("Flowline: " "{}".format(arcpy.Describe(flowline).baseName)) arcpy.AddMessage("Split Type: {}".format(split_type)) arcpy.AddMessage("XS Spacing: {}".format(transect_spacing)) arcpy.AddMessage("XS Width: {}".format(transect_width)) arcpy.AddMessage("XS Width Units: {}".format(transect_width_unit)) #Unsplit Line LineDissolve = "LineDissolve" arcpy.Dissolve_management(flowline, LineDissolve, "", "", "SINGLE_PART") LineSplit = "LineSplit" #Split Line if split_type == "Split at approximate distance": splitline(LineDissolve, LineSplit, transect_spacing) else: arcpy.SplitLine_management(LineDissolve, LineSplit) #Add fields to LineSplit FieldsNames = [ "LineID", "Direction", "Azimuth", "X_mid", "Y_mid", "AziLine_1", "AziLine_2", "Distance" ] for fn in FieldsNames: arcpy.AddField_management(LineSplit, fn, "DOUBLE") #Calculate Fields CodeBlock_Direction = """def GetAzimuthPolyline(shape): radian = math.atan((shape.lastpoint.x - shape.firstpoint.x)/(shape.lastpoint.y - shape.firstpoint.y)) degrees = radian * 180 / math.pi return degrees""" CodeBlock_Azimuth = """def Azimuth(direction): if direction < 0: azimuth = direction + 360 return azimuth else: return direction""" CodeBlock_NULLS = """def findNulls(fieldValue): if fieldValue is None: return 0 elif fieldValue is not None: return fieldValue""" arcpy.CalculateField_management(LineSplit, "LineID", "!OBJECTID!", "PYTHON_9.3") arcpy.CalculateField_management(LineSplit, "Direction", "GetAzimuthPolyline(!Shape!)", "PYTHON_9.3", CodeBlock_Direction) arcpy.CalculateField_management(LineSplit, "Direction", "findNulls(!Direction!)", "PYTHON_9.3", CodeBlock_NULLS) arcpy.CalculateField_management(LineSplit, "Azimuth", "Azimuth(!Direction!)", "PYTHON_9.3", CodeBlock_Azimuth) arcpy.CalculateField_management( LineSplit, "X_mid", "!Shape!.positionAlongLine(0.5,True).firstPoint.X", "PYTHON_9.3") arcpy.CalculateField_management( LineSplit, "Y_mid", "!Shape!.positionAlongLine(0.5,True).firstPoint.Y", "PYTHON_9.3") CodeBlock_AziLine1 = """def Azline1(azimuth): az1 = azimuth + 90 if az1 > 360: az1-=360 return az1 else: return az1""" CodeBlock_AziLine2 = """def Azline2(azimuth): az2 = azimuth - 90 if az2 < 0: az2+=360 return az2 else: return az2""" arcpy.CalculateField_management(LineSplit, "AziLine_1", "Azline1(!Azimuth!)", "PYTHON_9.3", CodeBlock_AziLine1) arcpy.CalculateField_management(LineSplit, "AziLine_2", "Azline2(!Azimuth!)", "PYTHON_9.3", CodeBlock_AziLine2) arcpy.CalculateField_management(LineSplit, "Distance", transect_width, "PYTHON_9.3") #Generate Azline1 and Azline2 spatial_reference = arcpy.Describe(flowline).spatialReference Azline1 = "Azline1" Azline2 = "Azline2" arcpy.BearingDistanceToLine_management(LineSplit, Azline1, "X_mid", "Y_mid", "Distance", transect_width_unit, "AziLine_1", "DEGREES", "GEODESIC", "LineID", spatial_reference) arcpy.BearingDistanceToLine_management(LineSplit, Azline2, "X_mid", "Y_mid", "Distance", transect_width_unit, "AziLine_2", "DEGREES", "GEODESIC", "LineID", spatial_reference) #Create Azline and append Azline1 and Azline2 Azline = "Azline" arcpy.CreateFeatureclass_management(arcpy.env.workspace, "Azline", "POLYLINE", "", "", "", spatial_reference) arcpy.AddField_management(Azline, "LineID", "DOUBLE") arcpy.Append_management([Azline1, Azline2], Azline, "NO_TEST") #Dissolve Azline Azline_Dissolve = "Azline_Dissolve" arcpy.Dissolve_management(Azline, Azline_Dissolve, "LineID", "", "SINGLE_PART") #Add Fields to Azline_Dissolve FieldsNames2 = ["x_start", "y_start", "x_end", "y_end"] for fn2 in FieldsNames2: arcpy.AddField_management(Azline_Dissolve, fn2, "DOUBLE") #Calculate Azline_Dissolve fields arcpy.CalculateField_management( Azline_Dissolve, "x_start", "!Shape!.positionAlongLine(0,True).firstPoint.X", "PYTHON_9.3") arcpy.CalculateField_management( Azline_Dissolve, "y_start", "!Shape!.positionAlongLine(0,True).firstPoint.Y", "PYTHON_9.3") arcpy.CalculateField_management( Azline_Dissolve, "x_end", "!Shape!.positionAlongLine(1,True).firstPoint.X", "PYTHON_9.3") arcpy.CalculateField_management( Azline_Dissolve, "y_end", "!Shape!.positionAlongLine(1,True).firstPoint.Y", "PYTHON_9.3") #Generate output file out_transect_name = "xs_{}_{}".format(int(round(transect_spacing)), int(round(transect_width))) output_transect = os.path.join(output_workspace, out_transect_name) arcpy.XYToLine_management(Azline_Dissolve, output_transect, "x_start", "y_start", "x_end", "y_end", "", "", spatial_reference) # Create `Seq` field arcpy.AddField_management(in_table=output_transect, field_name="Seq", field_type="SHORT") arcpy.CalculateField_management(in_table=output_transect, field="Seq", expression="!OID!", expression_type="PYTHON_9.3") # Set the ReachName field unique_reaches = set( row[0] for row in arcpy.da.SearchCursor(flowline, "ReachName")) reach_name = list(unique_reaches)[0] arcpy.AddField_management(in_table=output_transect, field_name="ReachName", field_type="TEXT") arcpy.CalculateField_management(in_table=output_transect, field="ReachName", expression="'" + reach_name + "'", expression_type="PYTHON_9.3") # Return arcpy.SetParameter(6, output_transect) # Cleanup arcpy.Delete_management(General_GDB)
arcpy.Merge_management(arcpy.ListFeatureClasses('*_routes'), output = PStransit) #Only keep buses with trips and whose schedule lasts more than 1 day arcpy.MakeFeatureLayer_management(PStransit, 'PStransit_lyr', where_clause= '(route_type = 3) AND (MIN_service_len > 1) AND (SUM_adjustnum > 0)') arcpy.CopyFeatures_management('PStransit_lyr', PStransitbus) arcpy.Project_management(PStransitbus, PStransitbus_proj, cs_ref) #Create raster of weekly number of buses at the same resolution as bing data # Convert weekly number of buses to integer arcpy.AddField_management(PStransitbus_proj, 'adjustnum_int', 'SHORT') arcpy.CalculateField_management(PStransitbus_proj, 'adjustnum_int', expression='int(10*!SUM_adjustnum!+0.5)', expression_type='PYTHON') #Split lines at all intersections so that small identical overlapping segments can be dissolved arcpy.SplitLine_management(PStransitbus_proj, PStransitbus_proj + '_split') #Split at intersection arcpy.FindIdentical_management(in_dataset=PStransitbus_proj + '_split', out_dataset=PStransitduplitab, fields="Shape") #Find overlapping segments and make them part of a group (FEAT_SEQ) arcpy.MakeFeatureLayer_management(PStransitbus_proj + '_split', "intlyr") arcpy.AddJoin_management("intlyr", arcpy.Describe("intlyr").OIDfieldName, PStransitduplitab, "IN_FID", "KEEP_ALL") arcpy.Dissolve_management("intlyr", PStransitbus_splitdiss, dissolve_field='explFindID.FEAT_SEQ', statistics_fields=[[os.path.split(PStransitbus_proj)[1] + '_split.adjustnum_int', 'SUM']]) #Dissolve overlapping segments arcpy.RepairGeometry_management(PStransitbus_splitdiss, delete_null = 'DELETE_NULL') #sometimes creates empty geom #Get the length of a half pixel diagonal to create buffers for #guaranteeing that segments potentially falling within the same pixel are rasterized separately tolerance = (2.0**0.5)*float(restemplate.getOutput(0))/2 arcpy.env.workspace = os.path.dirname(soundtransit) ExplodeOverlappingLines(PStransitbus_splitdiss, tolerance) #For each set of non-overlapping lines, create its own raster tilef = 'expl'
newlayer1 = arcpy.mapping.Layer(testbldgs1) arcpy.mapping.AddLayer(df, newlayer, "TOP") arcpy.mapping.AddLayer(df, newlayer1, "TOP") #Parcels are weird shapes and I need just 4 sides so using a bounding box. arcpy.MinimumBoundingGeometry_management(outtestparcels, outbox, "RECTANGLE_BY_WIDTH", "NONE") #Splitting the box into 4 sides that will be: front, back and sides. #I also need the center of the sides to figure out relation to road. tempPolyToLine = r"/temppolytoline.shp" arcpy.PolygonToLine_management(outbox, tempPolyToLine, "IGNORE_NEIGHBORS") tempSplitLines = r"/tempSplitLines.shp" arcpy.SplitLine_management(tempPolyToLine, tempSplitLines) splitLineCenter = r"/splitLineCenter.shp" arcpy.FeatureToPoint_management(tempSplitLines, splitLineCenter, "INSIDE") buildingsCenter = r"/buildingsCenter.shp" arcpy.FeatureToPoint_management(outtestbldgs, buildingsCenter, "INSIDE") splitLineCenter = r"/splitLineCenter.shp" #delete above line after testing #Ok not which is closest/furtherst from the road? arcpy.Near_analysis(splitLineCenter, roads) arcpy.AddField_management(splitLineCenter, "max", "LONG") tempsplitLineCenterDist = r"/tempsplitLineCenterDist.shp" arcpy.Sort_management(splitLineCenter, tempsplitLineCenterDist, [["TARGET_FID", "ASCENDING"]])
# Create "General" file geodatabase WorkFolder=env.workspace General_GDB=WorkFolder+"\General.gdb" arcpy.CreateFileGDB_management(WorkFolder, "General", "CURRENT") env.workspace=General_GDB #Unsplit Line LineDissolve="LineDissolve" arcpy.Dissolve_management (Lines, LineDissolve,"", "", "SINGLE_PART") LineSplit="LineSplit" #Split Line if SplitType=="Split at approximate distance": splitline(LineDissolve, LineSplit, DistanceSplit) else: arcpy.SplitLine_management (LineDissolve, LineSplit) #Add fields to LineSplit FieldsNames=["LineID", "Direction", "Azimuth", "X_mid", "Y_mid", "AziLine_1", "AziLine_2", "Distance"] for fn in FieldsNames: arcpy.AddField_management (LineSplit, fn, "DOUBLE") #Calculate Fields CodeBlock_Direction="""def GetAzimuthPolyline(shape): radian = math.atan((shape.lastpoint.x - shape.firstpoint.x)/(shape.lastpoint.y - shape.firstpoint.y)) degrees = radian * 180 / math.pi return degrees""" CodeBlock_Azimuth="""def Azimuth(direction): if direction < 0: azimuth = direction + 360
# Add X Y Z Coordinates arcpy.AddXY_management(geologyPoints3D) # Recalculate Y coordinate to draw the profile arcpy.CalculateField_management(geologyPoints3D, "POINT_Y", "!POINT_Z! * "+str(exaggerationVal)+"", "PYTHON_9.3", "") # Make the Geology Profile Points through the profile Distance vs Heigh and export to Feature Class or Shapefile geologyProfilePointsLyr = arcpy.MakeXYEventLayer_management(geologyPoints3D, "Distance", "POINT_Y", 'in_memory\geologyProfilePointsLyrT', sr, "") geologyProfilePoints = arcpy.CopyFeatures_management(geologyProfilePointsLyr, 'in_memory\geologyProfilePointsT', "", "0", "0", "0") # Generate line whit the Geology Profile Points line3D = arcpy.PointsToLine_management(geologyProfilePoints, 'in_memory\line3DT', "", "", "NO_CLOSE") # Split the line with each point splitLine3D = arcpy.SplitLine_management(line3D, 'in_memory\splitLine3DT') # Add geology attributes to profile line and dissolve by dissolve field splitGeologyLine3D = arcpy.SpatialJoin_analysis(splitLine3D, geologyProfilePoints, 'in_memory\splitGeologyLine3DT', "JOIN_ONE_TO_ONE", "KEEP_ALL", "", "INTERSECT", "", "") geologyProfile3D = arcpy.Dissolve_management(splitGeologyLine3D, outGeologicProfile, dissolveGeologyField, "", "MULTI_PART", "DISSOLVE_LINES") addLayer = arcpy.mapping.Layer(outGeologicProfile) arcpy.mapping.AddLayer(df, addLayer, "TOP") # Add layer to data frame # Delete temporal data arcpy.DeleteFeatures_management('in_memory\startPointDataT') arcpy.DeleteFeatures_management('in_memory\pointsAlongLineT') arcpy.DeleteFeatures_management('in_memory\geologyPointsT') arcpy.DeleteFeatures_management('in_memory\geologyPoints3DT') arcpy.DeleteFeatures_management('in_memory\geologyProfilePointsT')
endPoints = 'in_memory\\endPoints' endPointsLyr = patchOutputFolder #define the environment variables description = arcpy.Describe(DTM) cellsize = description.children[0].meanCellHeight buffDist = cellsize * 2 pointDistance = str(cellsize) + ' Meters' arcpy.env.snapRaster = DTM arcpy.env.outputCoordinateSystem = arcpy.Describe(DTM).spatialReference #complete all the geoprocessing steps up to iterator arcpy.Buffer_analysis(flowObs, bufferedFlowObs, buffDist, 'FULL', 'FLAT', '', '', '') arcpy.PolygonToLine_management(bufferedFlowObs, lines, '') arcpy.SplitLine_management(lines, splitlines) arcpy.MakeFeatureLayer_management(splitlines, FLsplitlines, '', '', '') arcpy.SelectLayerByLocation_management(FLsplitlines, 'INTERSECT', flowObs, '', 'NEW_SELECTION', 'INVERT') arcpy.DeleteRows_management(FLsplitlines) arcpy.CopyFeatures_management(FLsplitlines, endLines, '', '', '', '') arcpy.GeneratePointsAlongLines_management(endLines, endPoints, 'DISTANCE', pointDistance, '', 'END_POINTS') arcpy.gp.ExtractMultiValuesToPoints_sa(endPoints, str(DTM) + ' Heights', 'None') arcpy.MakeFeatureLayer_management(endPoints, endPointsLyr) IDs = [] with arcpy.da.SearchCursor(endPointsLyr, ('RIGHT_FID', )) as cursor: for row in cursor: if row not in IDs:
arcpy.RasterToPolygon_conversion(con_cal_RS_Raster_DEM, RS_Polygon, "SIMPLIFY") layer3 = "delete_length" + RS_Polygon arcpy.MakeFeatureLayer_management(RS_Polygon, layer3) arcpy.SelectLayerByAttribute_management( layer3, "NEW_SELECTION", '"Shape_Area"<3000') # you can modify this threshold if int(arcpy.GetCount_management(layer3).getOutput(0)) > 0: arcpy.DeleteFeatures_management(layer3) print "Get Raster to Polygon" ##Get Snapped DEM-modeled drainage network which coincided with RS-mapped river network DEM_coincidewith_RS = "coincide_" + DEM arcpy.Clip_analysis(DEM, RS_Polygon, DEM_coincidewith_RS) split_DEM_coincidewith_RS = "split_" + DEM_coincidewith_RS arcpy.SplitLine_management(DEM_coincidewith_RS, split_DEM_coincidewith_RS) unsplit_DEM_coincidewith_RS = "unsplit_" + split_DEM_coincidewith_RS arcpy.UnsplitLine_management(split_DEM_coincidewith_RS, unsplit_DEM_coincidewith_RS) ##Get Snapped DEM-modeled drainage network which not coincided with RS-mapped river network DEM_connect_RS = "connect_" + DEM arcpy.Erase_analysis(DEM, RS_Polygon, DEM_connect_RS) split_DEM_connect_RS = "split_" + DEM_connect_RS arcpy.SplitLine_management(DEM_connect_RS, split_DEM_connect_RS) unsplit_DEM_connect_RS = "unsplit_" + split_DEM_connect_RS arcpy.UnsplitLine_management(split_DEM_connect_RS, unsplit_DEM_connect_RS) print "Get coincided and connect line" ##Get dangle point of RS RS_Point = "RS_Point_" + RS
def main(): try: ################################################################################## #READ PARAMETERS ################################################################################## inpoints = arcpy.GetParameterAsText(0) inlines = arcpy.GetParameterAsText(1) outWorkspace = arcpy.GetParameterAsText(2) outpoints = arcpy.GetParameterAsText(3) outsegments = arcpy.GetParameterAsText(4) outpolygons = arcpy.GetParameterAsText(5) inroads_identifier = arcpy.GetParameterAsText(6) arcpy.env.workspace = outWorkspace ################################################################################## #HARD CODED PARAMETERS ################################################################################## if arcpy.env.scratchWorkspace is None: arcpy.env.scratchWorkspace = r'C:\Users\fancelin\Documents\ArcGIS\Default.gdb' factor = 100 inroads_split_name = "voronoying_lines_split" inroads_split_line_name = "voronoying_lines_split_lines" inroads_split = "{0}{1}{2}".format(arcpy.env.scratchWorkspace, os.path.sep, inroads_split_name) inroads_split_line = "{0}{1}{2}".format(arcpy.env.scratchWorkspace, os.path.sep, inroads_split_line_name) spatial_reference = arcpy.Describe(inlines).spatialReference ################################################################################## #VALIDATION ################################################################################## arcpy.AddMessage("Validation") #Validate license requirements validateLicense() #Validate lines are provided if len(outsegments) == 0: raise Exception("Input lines were not provided.") #Validate that a line identifier was provided if len(inroads_identifier) == 0: raise Exception("Input lines identifer was not provided.") extents = [] #Validate input line feature class. inlinesBBox = validateInputLineFeatureClass(inlines) extents.append(inlinesBBox) #Validate input point feature class if required. inPointsBBox = validateInputPointFeatureClass(inpoints) if len( arcpy.GetParameterAsText(0)) > 0 else None ################################################################################## #REMOVE FEATURE CLASSES ################################################################################## for fc in [ inroads_split, inroads_split_line, "{0}{1}{2}".format(outWorkspace, os.path.sep, outpoints), "{0}{1}{2}".format(outWorkspace, os.path.sep, outsegments), "{0}{1}{2}".format(outWorkspace, os.path.sep, outpolygons) ]: delFCByPath(fc) ################################################################################## #COMPUTING THE BOUNDING BOX ################################################################################## # Instanciate pyvoronoi pv = pyvoronoi.Pyvoronoi(factor) arcpy.AddMessage("Add points to voronoi") pointOIDs = [] if inPointsBBox != None: extents.append(inPointsBBox) for point in arcpy.da.SearchCursor(inpoints, ['SHAPE@X', 'SHAPE@Y', 'OID@']): pointOIDs.append(point[2]) pv.AddPoint([point[0], point[1]]) arcpy.AddMessage("Computing bounding box outlines") finalBBox = mergeExtent(extents) finalBBoxExpended = arcpy.Extent(finalBBox.XMin - 1, finalBBox.YMin - 1, finalBBox.XMax + 1, finalBBox.YMax + 1) bbox_line = [ arcpy.Array([ arcpy.Point(finalBBox.XMin, finalBBox.YMin), arcpy.Point(finalBBox.XMax, finalBBox.YMin) ]), arcpy.Array([ arcpy.Point(finalBBox.XMin, finalBBox.YMin), arcpy.Point(finalBBox.XMin, finalBBox.YMax) ]), arcpy.Array([ arcpy.Point(finalBBox.XMax, finalBBox.YMax), arcpy.Point(finalBBox.XMin, finalBBox.YMax) ]), arcpy.Array([ arcpy.Point(finalBBox.XMax, finalBBox.YMax), arcpy.Point(finalBBox.XMax, finalBBox.YMin) ]), arcpy.Array([ arcpy.Point(finalBBoxExpended.XMin, finalBBoxExpended.YMin), arcpy.Point(finalBBoxExpended.XMax, finalBBoxExpended.YMin) ]), arcpy.Array([ arcpy.Point(finalBBoxExpended.XMin, finalBBoxExpended.YMin), arcpy.Point(finalBBoxExpended.XMin, finalBBoxExpended.YMax) ]), arcpy.Array([ arcpy.Point(finalBBoxExpended.XMax, finalBBoxExpended.YMax), arcpy.Point(finalBBoxExpended.XMin, finalBBoxExpended.YMax) ]), arcpy.Array([ arcpy.Point(finalBBoxExpended.XMax, finalBBoxExpended.YMax), arcpy.Point(finalBBoxExpended.XMax, finalBBoxExpended.YMin) ]) ] arcpy.AddMessage("Bounding Box Info: {0},{1} | {2},{3}".format( finalBBox.XMin, finalBBox.YMin, finalBBox.XMax, finalBBox.YMax)) ################################################################################## #FORMAT INPUT. NEED TO MAKE SURE LINE ARE SPLIT AT VERTICES AND THAT THERE ARE NO OVERLAPS ################################################################################## arcpy.AddMessage("Format lines") arcpy.AddMessage("Split lines at vertices") arcpy.SplitLine_management(in_features=inlines, out_feature_class=inroads_split) arcpy.AddMessage("Add bounding box") with arcpy.da.InsertCursor(inroads_split, ['SHAPE@', inroads_identifier]) as op: for pointArray in bbox_line: arcpy.AddMessage("{0},{1} - {2},{3}".format( pointArray[0].X, pointArray[0].Y, pointArray[1].X, pointArray[1].Y)) op.insertRow([arcpy.Polyline(pointArray), None]) del op arcpy.AddMessage("Split lines at intersections") arcpy.FeatureToLine_management(inroads_split, inroads_split_line, '#', 'ATTRIBUTES') ################################################################################## #SEND LINE INPUT TO VORONOI AND CONSTRUCT THE GRAPH ################################################################################## arcpy.AddMessage("Add lines to voronoi") lineIds = [] for road in arcpy.da.SearchCursor( inroads_split_line, ['SHAPE@', 'OID@', 'SHAPE@LENGTH', inroads_identifier]): if (road[2] > 0): lineIds.append(road[3]) pv.AddSegment([[road[0].firstPoint.X, road[0].firstPoint.Y], [road[0].lastPoint.X, road[0].lastPoint.Y]]) arcpy.AddMessage("Construct voronoi") pv.Construct() cells = pv.GetCells() edges = pv.GetEdges() vertices = pv.GetVertices() ################################################################################## #CREATE THE OUTPUT FEATURE CLASSES ################################################################################## arcpy.AddMessage("Construct output point feature class") if len(outpoints) > 0: arcpy.CreateFeatureclass_management( outWorkspace, outpoints, 'POINT', spatial_reference=spatial_reference) arcpy.AddField_management(outpoints, 'IDENTIFIER', "LONG") fields = ['IDENTIFIER', 'SHAPE@X', 'SHAPE@Y'] cursor = arcpy.da.InsertCursor(outpoints, fields) for vIndex in range(len(vertices)): v = vertices[vIndex] cursor.insertRow([vIndex, v.X, v.Y]) arcpy.AddMessage("Construct output segment feature class") if len(outsegments) > 0: arcpy.CreateFeatureclass_management( outWorkspace, outsegments, 'POLYLINE', spatial_reference=spatial_reference) arcpy.AddField_management(outsegments, 'EdgeIndex', "LONG") arcpy.AddField_management(outsegments, 'Start', "LONG") arcpy.AddField_management(outsegments, 'End', "LONG") arcpy.AddField_management(outsegments, 'IsLinear', "SHORT") arcpy.AddField_management(outsegments, 'IsPrimary', "SHORT") arcpy.AddField_management(outsegments, 'Site1', "LONG") arcpy.AddField_management(outsegments, 'Site2', "LONG") arcpy.AddField_management(outsegments, 'Cell', "LONG") arcpy.AddField_management(outsegments, 'Twin', "LONG") arcpy.AddField_management(outsegments, 'FROM_X', "DOUBLE") arcpy.AddField_management(outsegments, 'FROM_Y', "DOUBLE") arcpy.AddField_management(outsegments, 'TO_X', "DOUBLE") arcpy.AddField_management(outsegments, 'TO_Y', "DOUBLE") fields = [ 'EdgeIndex', 'Start', 'End', 'IsLinear', 'IsPrimary', 'Site1', 'Site2', 'Cell', 'Twin', 'FROM_X', 'FROM_Y', 'TO_X', 'TO_Y', 'SHAPE@' ] cursor = arcpy.da.InsertCursor(outsegments, fields) for cIndex in range(len(cells)): cell = cells[cIndex] if cell.is_open == False: if (cIndex % 5000 == 0 and cIndex > 0): arcpy.AddMessage("Cell Index: {0}".format(cIndex)) for i in range(len(cell.edges)): e = edges[cell.edges[i]] startVertex = vertices[e.start] endVertex = vertices[e.end] max_distance = Distance( [startVertex.X, startVertex.Y], [endVertex.X, endVertex.Y]) / 10 array = arcpy.Array() if startVertex != -1 and endVertex != -1: if (e.is_linear == True): array = arcpy.Array([ arcpy.Point(startVertex.X, startVertex.Y), arcpy.Point(endVertex.X, endVertex.Y) ]) else: try: points = pv.DiscretizeCurvedEdge( cell.edges[i], max_distance, 1 / factor) for p in points: array.append(arcpy.Point(p[0], p[1])) except pyvoronoi.FocusOnDirectixException: arcpy.AddMessage( "FocusOnDirectixException at: {5}. The drawing has been defaulted from a curved line to a straight line. Length {0} - From: {1}, {2} To: {3}, {4}" .format(max_distance, startVertex.X, startVertex.Y, endVertex.X, endVertex.Y, cell.edges[i])) array = arcpy.Array([ arcpy.Point(startVertex.X, startVertex.Y), arcpy.Point(endVertex.X, endVertex.Y) ]) except pyvoronoi.UnsolvableParabolaEquation: edge = pv.outputEdges[cell.edges[i]] sites = pv.ReturnCurvedSiteInformation( edge) pointSite = sites[0] segmentSite = sites[1] edgeStartVertex = pv.outputVertices[ edge.start] edgeEndVertex = pv.outputVertices[edge.end] print "Input Point: {0}".format(pointSite) print "Input Segment: {0}".format( segmentSite) print "Parabola Start: {0}".format( [edgeStartVertex.X, edgeStartVertex.Y]) print "Parabola End: {0}".format( [edgeEndVertex.X, edgeEndVertex.Y]) print "Distance: {0}".format(max_distance) arcpy.AddMessage( "UnsolvableParabolaEquation exception at: {5}. The drawing has been defaulted from a curved line to a straight line. Length {0} - From: {1}, {2} To: {3}, {4}" .format(max_distance, startVertex.X, startVertex.Y, endVertex.X, endVertex.Y, cell.edges[i])) array = arcpy.Array([ arcpy.Point(startVertex.X, startVertex.Y), arcpy.Point(endVertex.X, endVertex.Y) ]) polyline = arcpy.Polyline(array) cursor.insertRow( (cell.edges[i], e.start, e.end, e.is_linear, e.is_primary, e.site1, e.site2, e.cell, e.twin, startVertex.X, startVertex.Y, endVertex.X, endVertex.Y, polyline)) arcpy.AddMessage("Construct output cells feature class") if len(outpolygons) > 0: arcpy.CreateFeatureclass_management( outWorkspace, outpolygons, 'POLYGON', spatial_reference=spatial_reference) arcpy.AddField_management(outpolygons, 'CELL_ID', "LONG") arcpy.AddField_management(outpolygons, 'CONTAINS_POINT', "SHORT") arcpy.AddField_management(outpolygons, 'CONTAINS_SEGMENT', "SHORT") arcpy.AddField_management(outpolygons, 'SITE', "LONG") arcpy.AddField_management(outpolygons, 'SOURCE_CATEGORY', "SHORT") arcpy.AddField_management(outpolygons, 'INPUT_TYPE', "TEXT") arcpy.AddField_management(outpolygons, 'INPUT_ID', "LONG") fields = [ 'CELL_ID', 'CONTAINS_POINT', 'CONTAINS_SEGMENT', 'SHAPE@', 'SITE', 'SOURCE_CATEGORY', 'INPUT_TYPE', 'INPUT_ID' ] cursor = arcpy.da.InsertCursor(outpolygons, fields) for cIndex in range(len(cells)): cell = cells[cIndex] if cell.is_open == False: if (cIndex % 5000 == 0 and cIndex > 0): arcpy.AddMessage("Cell Index: {0}".format(cIndex)) pointArray = arcpy.Array() for vIndex in cell.vertices: pointArray.add( arcpy.Point(vertices[vIndex].X, vertices[vIndex].Y)) input_type = None input_id = None if cell.site >= len(pointOIDs): input_type = "LINE" input_id = lineIds[cell.site - len(pointOIDs)] else: input_type = "POINT" input_id = pointOIDs[cell.site] polygon = arcpy.Polygon(pointArray) cursor.insertRow( (cell.cell_identifier, cell.contains_point, cell.contains_segment, polygon, cell.site, cell.source_category, input_type, input_id)) del cursor except Exception: tb = sys.exc_info()[2] tbInfo = traceback.format_tb(tb)[-1] arcpy.AddError('PYTHON ERRORS:\n%s\n%s: %s\n' % (tbInfo, sys.exc_type, sys.exc_value)) # print('PYTHON ERRORS:\n%s\n%s: %s\n' % # (tbInfo, _sys.exc_type, _sys.exc_value)) arcpy.AddMessage('PYTHON ERRORS:\n%s\n%s: %s\n' % (tbInfo, sys.exc_type, sys.exc_value)) gp_errors = arcpy.GetMessages(2) if gp_errors: arcpy.AddError('GP ERRORS:\n%s\n' % gp_errors)
def procesar_calidad(cant_zonas=0, data=[], campos=['UBIGEO', 'ZONA']): if len(data) == 0: data = conex.obtener_lista_zonas_calidad(cant_zonas)[:] importar_tablas_trabajo(data, campos) where = expresiones_consulta_arcpy.Expresion(data, campos) arcpy.AddField_management(tb_viviendas_ordenadas, 'IDMANZANA', 'TEXT') arcpy.CalculateField_management(tb_viviendas_ordenadas, 'IDMANZANA', '!UBIGEO!+!ZONA!+!MANZANA!', 'PYTHON_9.3') # print "Importar" list_zonas = [(x[0], x[1]) for x in arcpy.da.SearchCursor(tb_zonas, ["UBIGEO", "ZONA"])] ######################################################CALIDAD PUERTAS MULTIFAMILIAR AFUERA DEL FRENTE DE MANZANA############################################################ arcpy.AddField_management(tb_viviendas_ordenadas, 'IDMANZANA', 'TEXT') arcpy.CalculateField_management(tb_viviendas_ordenadas, 'IDMANZANA', '!UBIGEO!+!ZONA!+!MANZANA!', 'PYTHON_9.3') manzanas_mfl = arcpy.MakeFeatureLayer_management(tb_manzanas, "manzanas_mfl", where) viviendas_mfl = arcpy.MakeFeatureLayer_management(tb_viviendas_ordenadas, "viviendas_mfl", where) frentes_mfl = arcpy.MakeFeatureLayer_management(tb_frentes, "frentes_mfl", where) mzs_line = arcpy.FeatureToLine_management(manzanas_mfl, "in_memory/mzs_line") puertas_multifamiliar = arcpy.MakeFeatureLayer_management( tb_viviendas_ordenadas, "puertas_multifamiliar", "p29=6") puertas_multifamiliar_afuera = arcpy.SelectLayerByLocation_management( puertas_multifamiliar, "INTERSECT", mzs_line, '', "NEW_SELECTION", "INVERT") viviendas_selecc_frentes_mfl = arcpy.SelectLayerByLocation_management( viviendas_mfl, "INTERSECT", mzs_line) viviendas_selecc_frentes = arcpy.CopyFeatures_management( viviendas_selecc_frentes_mfl, "in_memory/viv_selecc_frentes") arcpy.CopyFeatures_management(puertas_multifamiliar_afuera, error_1) ########################################LISTA ZONAS CON ERROR PUERTA MULTIFAMILIAR############################### list_1 = list( set([(x[0], x[1]) for x in arcpy.da.SearchCursor(error_1, ["UBIGEO", "ZONA"])])) zonas_error_puertas_multi = list( set([(x[0], x[1]) for x in arcpy.da.SearchCursor(error_1, ["UBIGEO", "ZONA"])])) # print zonas_error_puertas_multi #####################################################CALIDAD EXISTENCIA DE EJES VIALES POR ZONA####################################################################### # tb_ejes_viales ejes_viales_mfl = arcpy.MakeFeatureLayer_management( tb_ejes_viales, "ejes_viales_mfl") manzanas_sin_vias = arcpy.SelectLayerByLocation_management( manzanas_mfl, "INTERSECT", ejes_viales_mfl, "20 METERS", "NEW_SELECTION", "INVERT") arcpy.CopyFeatures_management(manzanas_sin_vias, error_2) ######################################LISTA DE ZONAS SIN EJES VIALES############################################# #list_2 = [] #for x in arcpy.da.SearchCursor(tb_zonas, ["UBIGEO", "ZONA"]): # where = " UBIGEO='{}' AND ZONA='{}'".format(x[0], x[1]) # manzanas_mfl = arcpy.MakeFeatureLayer_management(tb_manzanas, "manzanas_mfl", where) # manzanas_sin_vias_mfl = arcpy.MakeFeatureLayer_management(error_2, "manzanas_sin_vias_mfl", where) # a = int(arcpy.GetCount_management(manzanas_mfl).getOutput(0)) # b = int(arcpy.GetCount_management(manzanas_sin_vias_mfl).getOutput(0)) # if a != 0: # porcentaje = b / float(a) * 100 # # else: # porcentaje = 100 # # if porcentaje > 10: # list_2.append((x[0], x[1])) ##################################################CALIDAD MANZANAS INTERSECTADO CON VIAS######################################## line_mzs = arcpy.FeatureToLine_management(tb_manzanas_ordenadas, "in_memory/line_mzs") buffer_line = arcpy.Buffer_analysis(line_mzs, "in_memory/buffer_line", "0.50 meters") mzs_cortadas = arcpy.Erase_analysis(tb_manzanas_ordenadas, buffer_line, "in_memory/erase_mzs") #manzanas_ordenadas_mfl = arcpy.MakeFeatureLayer_management(tb_manzanas_ordenadas, "manzanas_ordenadas_mfl") manzanas_cortadas_mfl = arcpy.MakeFeatureLayer_management( mzs_cortadas, "mzs_cortadas_mfl") #vias_dentro_manzana = arcpy.SelectLayerByLocation_management(manzanas_ordenadas_mfl, "INTERSECT", tb_ejes_viales,'', "NEW_SELECTION") vias_dentro_manzana = arcpy.SelectLayerByLocation_management( manzanas_cortadas_mfl, "INTERSECT", tb_ejes_viales, '', "NEW_SELECTION") arcpy.CopyFeatures_management(vias_dentro_manzana, error_3) #########################################LISTA DE ZONAS CON VIAS DENTRO DE MANZANAS################################### list_3 = [] if (int(arcpy.GetCount_management(error_3).getOutput(0)) > 0): list_3 = list( set([(x[0], x[1]) for x in arcpy.da.SearchCursor(error_3, ["UBIGEO", "ZONA"])])) #################Calidad Viviendas afuera de la manzana################################################# viviendas_mfl = arcpy.MakeFeatureLayer_management(tb_viviendas_ordenadas, "viviendas_mfl", where) viviendas_afuera_manzana = arcpy.SelectLayerByLocation_management( viviendas_mfl, "INTERSECT", tb_manzanas_ordenadas, '0.2 meters', "NEW_SELECTION", "INVERT") arcpy.CopyFeatures_management(viviendas_afuera_manzana, error_5) ##########################################LISTA DE ZONAS CON VIVIENDAS FUERA DE MANZANA################# list_4 = [] if (int(arcpy.GetCount_management(error_5).getOutput(0)) > 0): list_4 = list( set([(x[0], x[1]) for x in arcpy.da.SearchCursor(error_5, ["UBIGEO", "ZONA"])])) #################################################CALIDAD PUNTOS DE INICIO####################################################################### lineas_viviendas = arcpy.PointsToLine_management( viviendas_selecc_frentes, 'in_memory/lineas_viviendas', "IDMANZANA", "ID_REG_OR") puntos_extremos = arcpy.FeatureVerticesToPoints_management( lineas_viviendas, 'in_memory/puntos_extremos', "BOTH_ENDS") puntos_extremos_buffer = arcpy.Buffer_analysis( puntos_extremos, 'in_memory/puntos_extremos_buffer', "0.2 meters") erase_lineas = arcpy.Erase_analysis(mzs_line, puntos_extremos_buffer, 'in_memory/erase_lineas') split = arcpy.SplitLine_management(erase_lineas, "in_memory/split") dissolve = arcpy.Dissolve_management(split, "in_memory/dissolve", "UBIGEO;CODCCPP;ZONA;MANZANA", "", "MULTI_PART", "DISSOLVE_LINES") dissolve_multi = arcpy.MultipartToSinglepart_management( dissolve, "in_memory/dissolve_multi") dissolve_mfl = arcpy.MakeFeatureLayer_management(dissolve_multi, 'dissolve_mfl') puntos_inicio_mfl = arcpy.MakeFeatureLayer_management( tb_puntos_inicio, 'puntos_inicio_mfl') segmentos_selec = arcpy.SelectLayerByLocation_management( dissolve_mfl, "INTERSECT", tb_viviendas_ordenadas, '', "NEW_SELECTION", "INVERT") tb_segmentos_selec = arcpy.CopyFeatures_management( segmentos_selec, "{}/tb_segmentos_selec.shp".format(path_ini)) puntos_inici_selec = arcpy.SelectLayerByLocation_management( puntos_inicio_mfl, "INTERSECT", tb_segmentos_selec, '', "NEW_SELECTION", "INVERT") arcpy.CopyFeatures_management(puntos_inici_selec, error_4) ################################################LISTA DE ZONAS CON PROBLEMAS DE PUNTO DE INICIO################################################## list_5 = [] if (int(arcpy.GetCount_management(error_4).getOutput(0)) > 0): list_5 = list( set([(x[0], x[1]) for x in arcpy.da.SearchCursor(error_4, ["UBIGEO", "ZONA"])])) ############################ Cantidad de frentes############################################################ ''' resumen_frentes_viv=arcpy.Statistics_analysis(tb_viviendas_ordenadas,'in_memory/resumen_frentes_viv',[["FRENTE_ORD","MAX"]],["UBIGEO","ZONA","MANZANA"]) arcpy.AddField_management(resumen_frentes_viv,"ID_MANZANA","text") with arcpy.da.UpdateCursor(resumen_frentes_viv, ["UBIGEO","ZONA","MANZANA","ID_MANZANA"]) as cursor: for x in cursor: x[4]=u'{}{}{}'.format(x[0],x[1],x[2]) cursor.updateRow(x) #arcpy.CalculateField_management(resumen_frentes_viv,"ID_MANZANA","!UBIGEO!+!ZONA!+!MANZANA!","PYTHON_9.3") resumen_frentes = arcpy.Statistics_analysis(tb_frentes_dissolve, 'in_memory/resumen_frentes',[["FRENTE_ORD", "MAX"],["FRENTE_ORD", "COUNT"]], ["UBIGEO", "ZONA", "MANZANA"]) arcpy.AddField_management(resumen_frentes, "ID_MANZANA", "text") with arcpy.da.UpdateCursor(resumen_frentes, ["UBIGEO","ZONA","MANZANA","ID_MANZANA"]) as cursor: for x in cursor: x[4]=u'{}{}{}'.format(x[0],x[1],x[2]) cursor.updateRow(x) arcpy.CalculateField_management(resumen_frentes, "ID_MANZANA", "!UBIGEO!+!ZONA!+!MANZANA!", "PYTHON_9.3") arcpy.JoinField_management(resumen_frentes,"ID_MANZANA",resumen_frentes_viv,"ID_MANZANA",["MAX_FRENTE_ORD"]) mzs_dif_cant_frent=arcpy.TableSelect_analysis(resumen_frentes, error_6, " (MAX_FRENTE_ORD<>MAX_FRENTE_ORD_1)") arcpy.AddField_management(error_6, "CANT_FR_V", "SHORT") arcpy.CalculateField_management(error_6, "CANT_FR_V", "!MAX_FRENTE_ORD!") arcpy.AddField_management(error_6, "CANT_FR_F", "text") arcpy.CalculateField_management(error_6, "CANT_FR_F", "!MAX_FRENTE_ORD_1!") arcpy.DeleteField_management(error_6,["MAX_FRENTE_ORD","MAX_FRENTE_ORD_1"]) list_6=[] if (int(arcpy.GetCount_management(error_6).getOutput(0)) > 0): list_6 = list(set([(x[0], x[1]) for x in arcpy.da.SearchCursor(error_6, ["UBIGEO", "ZONA"])])) #mzs_dif_cant_frent_1 = arcpy.TableSelect_analysis(resumen_frentes, error_7_cant_frentes_dif, " CapVivNFr<>COUNT_FRENTE_ORD") #list_7 = [] #if (int(arcpy.GetCount_management(error_7_cant_frentes_dif).getOutput(0)) > 0): # list_7 = list(set([(x[0], x[1]) for x in arcpy.da.SearchCursor(error_7_cant_frentes_dif, ["UBIGEO", "ZONA"])])) #arcpy.SelectLayerByLocation_management ''' #####################################################ERROR DE FRENTE DE VIVIENDAS######################################################### resultado = arcpy.Intersect_analysis([tb_viviendas_ordenadas, tb_frentes], 'in_memory/results') arcpy.Select_analysis(resultado, error_7, 'FRENTE_ORD<>FRENTE_ORD_1') fields = arcpy.ListFields(error_7) list_campos_validos = [ 'FID', 'Shape', 'UBIGEO', 'CODCCPP', 'ZONA', 'MANZANA', 'ID_REG_OR', 'FRENTE_ORD' ] delete_fields = [] for el in fields: if el.name not in list_campos_validos: delete_fields.append(el.name) arcpy.DeleteField_management(error_7, delete_fields) #####################################################ERROR FRENTES DE MANZANAS NO COINCIDEN CON LA MANZANA EN FORMA################################# temp_frentes = arcpy.SelectLayerByLocation_management( frentes_mfl, "WITHIN", mzs_line, '', "NEW_SELECTION", "INVERT") arcpy.CopyFeatures_management(temp_frentes, error_8) list_8 = [] if (int(arcpy.GetCount_management(error_8).getOutput(0)) > 0): list_8 = list( set([(x[0], x[1]) for x in arcpy.da.SearchCursor(error_8, ["UBIGEO", "ZONA"])])) ####################################################ERROR NUMERACION DE VIVIENDAS############################################################# lineas_viviendas = arcpy.PointsToLine_management( viviendas_selecc_frentes, 'in_memory/lineas_viviendas', "IDMANZANA", "ID_REG_OR") viviendas_selecc_frentes_buffer = arcpy.Buffer_analysis( viviendas_selecc_frentes, "in_memory/puntos_extremos_buffer", "0.2 meters") erase_lineas = arcpy.Erase_analysis(lineas_viviendas, viviendas_selecc_frentes_buffer, 'in_memory/erase_lineas') split = arcpy.SplitLine_management(erase_lineas, path_ini + "/split.shp") mz_line_erase = arcpy.Erase_analysis(mzs_line, viviendas_selecc_frentes_buffer, "in_memory\mz_line_erase") mz_line_erase_multi = arcpy.MultipartToSinglepart_management( mz_line_erase, 'in_memory\m_l_e_m') result = arcpy.Statistics_analysis(mz_line_erase_multi, 'in_memory/result', [['FID', "MAX"]], ["Shape"]) maxids = [[ x[0] ] for x in arcpy.da.SearchCursor(result, ["MAX_FID"], 'FREQUENCY>1')] if len(maxids) == 0: where_ids = expresiones_consulta_arcpy.Expresion_2([["-1"]], [["FID", "SHORT"]]) else: where_ids = expresiones_consulta_arcpy.Expresion_2( maxids, [["FID", "SHORT"]]) arcpy.Select_analysis(mz_line_erase_multi, error_9, where_ids) ''' intersect=arcpy.Intersect_analysis([mz_line_erase_multi, split], path_ini+"/intersect.shp", "ALL", "", "") list_id_buffer_mzs_line_erase_multi=list(set( [x[0] for x in arcpy.da.SearchCursor(intersect,["FID_m_l_e_"])])) list_intersect= [x[0] for x in arcpy.da.SearchCursor(intersect,["FID_m_l_e_"])] errores_numeracion=[] print list_id_buffer_mzs_line_erase_multi print list_intersect for x in list_id_buffer_mzs_line_erase_multi: cont = 0 for y in list_intersect: if (x==y): cont=cont+1 #print cont if (cont>1): errores_numeracion.append([x]) print errores_numeracion where_exp=UBIGEO.Expresion_2(errores_numeracion,[["FID","SHORT"]]) b_m_l_e_m_selecc = arcpy.Select_analysis(mz_line_erase_multi, error_9, where_exp) list_9=[] if (int(arcpy.GetCount_management(error_9).getOutput(0)) > 0): list_9 = list(set([(x[0], x[1]) for x in arcpy.da.SearchCursor(error_9, ["UBIGEO", "ZONA"])])) #dissolve = arcpy.Dissolve_management(split, "in_memory/dissolve", "UBIGEO;CODCCPP;ZONA;MANZANA", "","MULTI_PART","DISSOLVE_LINES") #dissolve_multi=arcpy.MultipartToSinglepart_management(dissolve, "in_memory/dissolve_multi") #arcpy.SelectLayerByLocation_management (dissolve_multi, "INTERSECT",dissolve_multi) #arcpy.MultipartToSinglepart_management("intersect", "in_memory/intersect2") ''' #################################################VIVIENDAS Y VIAS##################################################### # list_zonas_error=list(set(list_1+list_2+list_3+list_4+list_5+list_6+list_8+list_9)) # print list_zonas_error #nombre_ejes_viales() ################################puertas hijos multifamiliar en el frente########################## puertas_hijos_multifamilar = arcpy.MakeFeatureLayer_management( tb_viviendas_ordenadas, "puertas_multifamiliar", "(p29=1 or p29=3) and ID_REG_PAD<>0 ") error_11_mfl = arcpy.SelectLayerByLocation_management( puertas_hijos_multifamilar, "INTERSECT", mzs_line, '', "NEW_SELECTION") arcpy.CopyFeatures_management(error_11_mfl, error_11) # puertas_hijos_multifamilar=arcpy.MakeFeatureLayer_management(tb_viviendas_ordenadas, "puertas_multifamiliar", "(p29=1 or p29=3) and ID_REG_PAD<>0 ") # error_11_mfl=arcpy.SelectLayerByLocation_management(puertas_hijos_multifamilar, "INTERSECT",mzs_line ,'' , "NEW_SELECTION") # arcpy.CopyFeatures_management(error_11_mfl, error_11) ###############################################ERROR HIJOS SIN PADRES######################################################################### ''' puertas_hijos_multifamilar = arcpy.MakeFeatureLayer_management(tb_viviendas_ordenadas, "puertas_multifamiliar", "(p29=1 or p29=3) and (ID_REG_PAD<>0)" ) list_puertas_hijos_multifamilar=[[x[0],x[1],x[2],x[3],x[4]] for x in arcpy.da.SearchCursor(puertas_hijos_multifamilar,["UBIGEO","ZONA","MANZANA","ID_REG_OR","ID_REG_PAD"])] list_puertas_multifamiliar=[ '{}{}{}{}'.format(x[0],x[1],x[2],x[3]) for x in arcpy.da.SearchCursor(puertas_multifamiliar,["UBIGEO","ZONA","MANZANA","ID_REG"])] where_error_12="" i=0 for el in list_puertas_hijos_multifamilar: i=i+1 id_padre='{}{}{}{}'.format(el[0],el[1],el[2],el[4]) if id_padre not in list_puertas_multifamiliar: if i==1: where_error_12=" (UBIGEO='{}' AND ZONA='{}' AND MANZANA='{}' AND ID_REG_OR={})".format(el[0],el[1],el[2],el[3]) else: where_error_12 = "{} OR (UBIGEO='{}' AND ZONA='{}' AND MANZANA='{}' AND ID_REG_OR={})".format(where_error_12,el[0], el[1], el[2], el[3]) error_12_mfl=arcpy.MakeFeatureLayer_management(tb_viviendas_ordenadas, "error_12",where_error_12 ) arcpy.CopyFeatures_management(error_12_mfl, error_12) #############################ERROR PUERTAS MULTIFAMILIAR CON MAS DE 2 GEOMETRIAS######################################### set_puertas_multi=set(list_puertas_multifamiliar) where_error_13="" j=0 for el in set_puertas_multi: i=0 if el in list_puertas_multifamiliar: i=i+1 if i>1: j=j+1 if (j==1): where_error_13 = " (UBIGEO='{}' AND ZONA='{}' AND MANZANA='{}' AND ID_REG_OR={})".format(el[0], el[1],el[2], el[3]) else: where_error_13 = "{} OR (UBIGEO='{}' AND ZONA='{}' AND MANZANA='{}' AND ID_REG_OR={})".format(where_error_13,el[0], el[1], el[2], el[3]) error_13_mfl = arcpy.MakeFeatureLayer_management(tb_viviendas_ordenadas, "error_13", where_error_13) arcpy.CopyFeatures_management(error_13_mfl, error_13) ''' ################################Insercion de data########################################### arcpy.env.workspace = "Database Connections/PruebaSegmentacion.sde" arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(4326) if arcpy.Exists("GEODATABASE.sde") == False: arcpy.CreateDatabaseConnection_management( "Database Connections", "GEODATABASE.sde", "SQL_SERVER", ip_server, "DATABASE_AUTH", "sde", "$deDEs4Rr0lLo", "#", "GEODB_CPV_SEGM", "#", "#", "#", "#") arcpy.env.workspace = "Database Connections/GEODATABASE.sde" path_conexion2 = "Database Connections/GEODATABASE.sde" path_calidad = path_conexion2 + "/GEODB_CPV_SEGM.SDE.CALIDAD_URBANO" calidad_error_1_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_1_INPUT_PUERTA_MULTIFAMILIAR_DENTRO_MZ' calidad_error_2_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_2_INPUT_MANZANAS_SIN_VIAS' calidad_error_3_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_3_INPUT_MANZANAS_VIAS_DENTRO' calidad_error_4_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_4_INPUT_PUNTOS_INICIO' calidad_error_5_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_5_INPUT_VIVIENDAS_AFUERA_MZ' calidad_error_7_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_7_INPUT_VIVIENDAS_ERROR_FRENTE' calidad_error_8_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_8_INPUT_FRENTES_MANZANAS_FORMA' calidad_error_9_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_9_INPUT_ENUMERACION_VIV_POR_FRENTE' calidad_error_10_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_10_INPUT_VIV_ERROR_NOMBRE_VIA' calidad_error_11_input = path_calidad + '/GEODB_CPV_SEGM.SDE.ERROR_11_INPUT_PUERTAS_HIJOS_MULTI_EN_FRENTE_MZ' #error_7 = path_calidad + "/error_7_viviendas_error_frente.shp" list_errores = [ [error_1, calidad_error_1_input, 1], [error_2, calidad_error_2_input, 1], [error_3, calidad_error_3_input, 1], [error_4, calidad_error_4_input, 1], [error_5, calidad_error_5_input, 1], [error_8, calidad_error_8_input, 1], [error_9, calidad_error_9_input, 1], # [error_10, calidad_error_10_input, 1], [error_11, calidad_error_11_input, 1], [error_7, calidad_error_7_input, 1], ] conn = conex.Conexion2() cursor = conn.cursor() for el in data: ubigeo = el[0] zona = el[1] sql_query = """ DELETE GEODB_CPV_SEGM.SDE.ERROR_1_INPUT_PUERTA_MULTIFAMILIAR_DENTRO_MZ where ubigeo='{ubigeo}' and zona='{zona}' DELETE GEODB_CPV_SEGM.SDE.ERROR_2_INPUT_MANZANAS_SIN_VIAS where ubigeo='{ubigeo}' and zona='{zona}' delete GEODB_CPV_SEGM.SDE.ERROR_3_INPUT_MANZANAS_VIAS_DENTRO where ubigeo='{ubigeo}' and zona='{zona}' delete GEODB_CPV_SEGM.SDE.ERROR_4_INPUT_PUNTOS_INICIO where ubigeo='{ubigeo}' and zona='{zona}' delete GEODB_CPV_SEGM.SDE.ERROR_5_INPUT_VIVIENDAS_AFUERA_MZ where ubigeo='{ubigeo}' and zona='{zona}' delete GEODB_CPV_SEGM.SDE.ERROR_7_INPUT_VIVIENDAS_ERROR_FRENTE where ubigeo='{ubigeo}' and zona='{zona}' delete GEODB_CPV_SEGM.SDE.ERROR_8_INPUT_FRENTES_MANZANAS_FORMA where ubigeo='{ubigeo}' and zona='{zona}' delete GEODB_CPV_SEGM.SDE.ERROR_9_INPUT_ENUMERACION_VIV_POR_FRENTE where ubigeo='{ubigeo}' and zona='{zona}' delete GEODB_CPV_SEGM.SDE.ERROR_10_INPUT_VIV_ERROR_NOMBRE_VIA where ubigeo='{ubigeo}' and zona='{zona}' delete GEODB_CPV_SEGM.SDE.ERROR_11_INPUT_PUERTAS_HIJOS_MULTI_EN_FRENTE_MZ where ubigeo='{ubigeo}' and zona='{zona}' """.format(ubigeo=ubigeo, zona=zona) cursor.execute(sql_query) conn.commit() conn.close() i = 0 for el in list_errores: i = i + 1 print el[0] if (int(el[2]) > 1): a = arcpy.MakeTableView_management( el[0], "a{}".format(i), ) else: a = arcpy.MakeFeatureLayer_management(el[0], "a{}".format(i)) arcpy.Append_management(a, el[1], "NO_TEST") #for el in list_errores: # i = i + 1 # # print where # # if el[2] == 1: # a = arcpy.arcpy.MakeFeatureLayer_management(el[1], "a{}".format(i), where) # else: # a = arcpy.MakeTableView_management(el[1], "a{}".format(i), where) # # if (int(arcpy.GetCount_management(a).getOutput(0)) > 0): # arcpy.DeleteRows_management(a) # # print 'borro' # if el[2] == 1: # b = arcpy.arcpy.MakeFeatureLayer_management(el[0], "b{}".format(i), where) # else: # b = arcpy.MakeTableView_management(el[0], "b{}".format(i), where) # # if (int(arcpy.GetCount_management(b).getOutput(0)) > 0): # arcpy.Append_management(b, el[1], "NO_TEST") # print 'inserto' for el in data: conex.actualizar_errores_input_adicionales(ubigeo=el[0], zona=el[1])
dmax = stream.length d = 0 # start generating points at start of line while d < dmax: point = stream.positionAlongLine(d) intersects.append(point) # add the point to the list d += linespacing # add the user-defined interval between points del scur #################### create transect lines ############################# transects = [] # list of transect lines across the stream # split the stream into individual straight line segments segments = arcpy.SplitLine_management(inputline, arcpy.Geometry()) # loop through the intersect points generated in previous step for points in intersects: pt = points.firstPoint # get XY coords of point xcoord = pt.X ycoord = pt.Y # find the line segment overlapping the intersect point # code snippets in this block derived from Linear Sampling Toolbox developed by Vini Indriasari for line in segments: if line.contains(pt) or line.touches(pt): # get start and end of line segment startpoint = line.firstPoint endpoint = line.lastPoint
###this script generates a line file from the grouse database and then ###marks points that are over 2km for further inspection on if they are ###accurate or not import arcpy point = output = date = #format Jan01_19 #create line line = arcpy.PointsToLine_management(point, output + "\\" + date, "NAME") print("line created") #split into polyline by vertex line_split = arcpy.SplitLine_management(line, output + "\\" + date + "_s") print("line split") #add length field, calculate length = arcpy.AddField_management(line_split, "length_m", "DOUBLE") print("length field added") arcpy.CalculateGeometryAttributes_management(line_split, [["length_m", "LENGTH_GEODESIC"]], "METERS") print("geometry calculated") ###mark as possible problem prob = arcpy.AddField_management(line_split, "mark", "TEXT") print("mark field added") cursor = arcpy.da.UpdateCursor(line_split, ["length_m", "mark"]) print("cursor created") for updateRow in cursor:
##maxval = arcpy.GetRasterProperties_management (xxplus, "MAXIMUM") ##maxval = float(str(maxval)) ##maxcost = maxval * 10 costRaster = arcpy.sa.Con(linkLayer, "50", xxplus, "Value = 0") costRaster.save(tWorkspace + os.sep + "costraster1") # Create source and destination polygons (NSEW) arcpy.AddMessage("Creating corridor destination polygons...") maxval = arcpy.GetRasterProperties_management (linkLayer, "MAXIMUM") xxblank = arcpy.sa.Reclassify(linkLayer, "VALUE", "0 " + str(maxval) + " 1", "DATA") arcpy.RasterToPolygon_conversion(xxblank, tWorkspace + os.sep + "xxpoly", "SIMPLIFY", "VALUE") # Added feature envelope to polygon to clean unwanted vertices from output arcpy.FeatureEnvelopeToPolygon_management(tWorkspace + os.sep + "xxpoly.shp", tWorkspace + os.sep + "xxpoly2.shp", "SINGLEPART") arcpy.FeatureToLine_management(tWorkspace + os.sep + "xxpoly2.shp", tWorkspace + os.sep + "xxline.shp", "", "ATTRIBUTES") arcpy.SplitLine_management(tWorkspace + os.sep + "xxline.shp", tWorkspace + os.sep + "xxsplit.shp") fc = arcpy.Buffer_analysis(tWorkspace + os.sep + "xxsplit.shp", tWorkspace + os.sep + "buf.shp", CellSize + " Meters", "RIGHT", "FLAT", "NONE", "") fc = arcpy.MakeFeatureLayer_management(fc, "pLayer") rows = arcpy.SearchCursor(fc) for row in rows: p = row.getValue("FID") arcpy.MakeFeatureLayer_management(fc, "fLayer") arcpy.SelectLayerByAttribute_management(fc, "NEW_SELECTION", "\"FID\" = " + str(p)) arcpy.MakeFeatureLayer_management(fc, "pLayer_" + str(p)) west = "pLayer_0" north = "pLayer_1"
pdf = PdfPages(arcpy.GetParameterAsText(8) + ".pdf") ############################################################################### # Straighten Lines arcpy.SimplifyLine_cartography(Line_features, Output_Line_features, "POINT_REMOVE", float(arcpy.GetParameterAsText(3)), "FLAG_ERRORS", "NO_KEEP", "NO_CHECK") arcpy.AddMessage("Simplifying Lines complete") # Create temporary input file for Split Line tool tempFC = arcpy.env.scratchGDB + os.path.sep + "tempFC" arcpy.CopyFeatures_management(Output_Line_features, tempFC) # Overwrite Output lines with line segments arcpy.SplitLine_management(tempFC, Output_Line_features) arcpy.Delete_management(tempFC) arcpy.AddMessage("Splitting Lines Complete") # Make a temporary feature class to hold line segment vertices tempVert = arcpy.env.scratchGDB + os.path.sep + "tempVert" # Process: Feature Vertices To Points arcpy.FeatureVerticesToPoints_management(Output_Line_features, tempVert, "BOTH_ENDS") # Process: Add XY Coordinates to points arcpy.AddXY_management(tempVert) # Process: Add Elevation Info to points arcpy.sa.ExtractMultiValuesToPoints(tempVert, Elevation_Raster, "BILINEAR")