def NewBNDpoly(old_boundary, modifying_feature, new_bndpoly='boundary_poly', vertexdist='25 METERS', snapdist='25 METERS', verbose=True): """Snaps the boundary polygon to the shoreline points anywhere they don't already match and as long as as they are within 25 m of each other.""" # boundary = input line or polygon of boundary to be modified by newline typeFC = arcpy.Describe(old_boundary).shapeType if typeFC == "Line" or typeFC =='Polyline': arcpy.FeatureToPolygon_management(old_boundary, new_bndpoly, '1 METER') else: if len(os.path.split(new_bndpoly)[0]): path = os.path.split(new_bndpoly)[0] else: path = arcpy.env.workspace arcpy.FeatureClassToFeatureClass_conversion(old_boundary, path, os.path.basename(new_bndpoly)) typeFC = arcpy.Describe(modifying_feature).shapeType if typeFC == "Line" or typeFC == "Polyline": arcpy.Densify_edit(modifying_feature, 'DISTANCE', vertexdist) # elif typeFC == "Point" or typeFC == "Multipoint": # arcpy.PointsToLine_management(modifying_feature, modifying_feature+'_line') # modifying_feature = modifying_feature+'_line' # arcpy.Densify_edit(modifying_feature, 'DISTANCE', vertexdist) arcpy.Densify_edit(new_bndpoly, 'DISTANCE', vertexdist) #arcpy.Densify_edit(modifying_feature,'DISTANCE',vertexdist) arcpy.Snap_edit(new_bndpoly,[[modifying_feature, 'VERTEX',snapdist]]) # Takes a while if verbose: print("Created: {} ... Should be in your home geodatabase.".format(os.path.basename(new_bndpoly))) return new_bndpoly # string name of new polygon
def setDamAttributes(bratOutput, outputPath, dams, reqFields, newFields): """ Sets all the dam info and updates the output file with that data :param bratOutput: The polyline we're basing our stuff off of :param outputPath: The polyline shapefile with BRAT output :param dams: The points shapefile of observed dams :param damFields: The fields we want to update for dam attributes :return: """ arcpy.Snap_edit(dams, [[bratOutput, 'EDGE', '30 Meters']]) arcpy.SpatialJoin_analysis(bratOutput, dams, outputPath, join_operation='JOIN_ONE_TO_ONE', join_type='KEEP_ALL', match_option='INTERSECT') addFields(outputPath, newFields) with arcpy.da.UpdateCursor(outputPath, reqFields) as cursor: for row in cursor: damNum = row[-4] # fourth to last attribute segLength = row[-3] # third to last attribute oCC_EX = row[-2] # second to last attribute oCC_PT = row[-1] # last attribute row[0] = damNum row[1] = damNum / segLength * 1000 try: row[2] = damNum / oCC_PT except ZeroDivisionError: row[2] = 0 cursor.updateRow(row) arcpy.DeleteField_management(outputPath, ["Join_Count", "TARGET_FID"])
def Snap(self): try: arcpy.env.overwriteOutput = 1 arcpy.AddMessage("\n# Xu ly DoanVuotSongSuoiP") _path_Layer_DoanVuotSongSuoiP = "C:/Generalize_25_50/50K_Process.gdb/GiaoThong/DoanVuotSongSuoiP" _path_Layer_DoanVuotSongSuoiP_Final = "C:/Generalize_25_50/50K_Final.gdb/GiaoThong/DoanVuotSongSuoiP" _path_Layer_DoanVuotSongSuoiP_Snap = "C:/Generalize_25_50/50K_Process.gdb/GiaoThong/DoanVuotSongSuoiP_Snap" _path_Layer_DoanTimDuongBo = "C:/Generalize_25_50/50K_Process.gdb/GiaoThong/DoanTimDuongBo" _path_Layer_SongSuoiL = "C:/Generalize_25_50/50K_Process.gdb/ThuyHe/SongSuoiL" arcpy.Intersect_analysis( [_path_Layer_DoanTimDuongBo, _path_Layer_SongSuoiL], _path_Layer_DoanVuotSongSuoiP_Snap, "", "", "point") arcpy.Snap_edit( _path_Layer_DoanVuotSongSuoiP, [[_path_Layer_DoanVuotSongSuoiP_Snap, "EDGE", "25 Meters"]]) arcpy.CopyFeatures_management(_path_Layer_DoanVuotSongSuoiP, _path_Layer_DoanVuotSongSuoiP_Final) except OSError as error: arcpy.AddMessage("Error" + error.message) except ValueError as error: arcpy.AddMessage("Error" + error.message) except arcpy.ExecuteError as error: arcpy.AddMessage("Error" + error.message) finally: arcpy.Delete_management("in_memory")
def get_isolated_ids(nodes, link): # snap nodes to calculate routes arcpy.CopyFeatures_management( nodes, snapped_old_nodes ) # the old nodes are snapped to the new link, to calculate the sum distance arcpy.MakeFeatureLayer_management(snapped_old_nodes, "snapped") arcpy.Snap_edit(snapped_old_nodes, [[link, "EDGE", near_snap_dist]]) arcpy.SelectLayerByLocation_management("snapped", "INTERSECT", link, "", "NEW_SELECTION", "INVERT") ids = [f[0] for f in arcpy.da.SearchCursor("snapped", '_ID_')] return {x: "" for x in ids}
def save_fixed_points(stream_network, fixed_folder, watershed_folders): arcpy.AddMessage("\nSaving Fixed Points...\n") # Catches error on script rerun if os.path.exists(os.path.join(fixed_folder, "TOR_Points_Fixed.shp")): arcpy.Rename_management( os.path.join(fixed_folder, "TOR_Points_Fixed.shp"), "To_Fix_TOR.shp") if os.path.exists(os.path.join(fixed_folder, "BOR_Points_Fixed.shp")): arcpy.Rename_management( os.path.join(fixed_folder, "BOR_Points_Fixed.shp"), "To_Fix_BOR.shp") # Get shapefile with points tor_points = os.path.join(fixed_folder, "To_Fix_TOR.shp") bor_points = os.path.join(fixed_folder, "To_Fix_BOR.shp") # 10m Snap to network (in case editing wasn't perfect arcpy.Snap_edit(tor_points, [[stream_network, "EDGE", "10 Meters"]]) arcpy.Snap_edit(bor_points, [[stream_network, "EDGE", "10 Meters"]]) # For each watershed for watershed_folder in watershed_folders: # Get the boundary boundary = os.path.join(watershed_folder, "Inputs", "Watershed_Boundary", "Watershed_Boundary.shp") # Get the save loc for tor and bor tor_save = os.path.join(watershed_folder, "Intermediates", "Points", "Unsnapped_Fixed", "TOR_Points_Fixed.shp") bor_save = os.path.join(watershed_folder, "Intermediates", "Points", "Unsnapped_Fixed", "BOR_Points_Fixed.shp") # Clip the points to that boundary arcpy.Clip_analysis(tor_points, boundary, tor_save) arcpy.Clip_analysis(bor_points, boundary, bor_save) # Rename fixed project wide arcpy.Rename_management(tor_points, "TOR_Points_Fixed.shp") arcpy.Rename_management(bor_points, "BOR_Points_Fixed.shp")
def get_coordinates_on_nearest_links(node_shp, link_shp, link_ids): # they have same number of features link_shp_f = "link_shpf" arcpy.MakeFeatureLayer_management(node_shp, "mf") arcpy.MakeFeatureLayer_management(link_shp, link_shp_f) nearxy = [] count = len(link_ids) add_row(node_shp, count - 1) for i in range(count): where_clause = """ "_ID_" = %d""" % link_ids[i] arcpy.SelectLayerByAttribute_management(link_shp_f, "NEW_SELECTION", where_clause) arcpy.SelectLayerByAttribute_management("mf", "NEW_SELECTION", "FID IN ({})".format(i)) arcpy.Snap_edit("mf", [[link_shp_f, "EDGE", buffer_dist_list[-1]]]) # the max snap distance is taken from there # with arcpy.da.SearchCursor(m1, ["SHAPE@XY"]) as curs: for xy in curs: nearxy.append(xy[0]) # return nearxy
def snap_and_split(gdb_feature_path): # This function snaps bike station points to the closest bike route # and splits the route segments at the station points # snap bike station points to closest line segment arcpy.Snap_edit( (gdb_feature_path + r'/Bike_Stations'), (gdb_feature_path + r"/CENTRELINE_BIKEWAY_OD_Layer EDGE '100 Meters'") ) # split bike route line segments at station points (necessary for proper network analyst calculations) arcpy.SplitLineAtPoint_management( (gdb_feature_path + r'/CENTRELINE_BIKEWAY_OD_Layer'), (gdb_feature_path + r'/Bike_Stations'), (gdb_feature_path + r'/Bikeways_Split'), "1 Meters" ) print('Snap and Split Complete!')
def snapping_overhead_cable(unsplit): KVL_dissolve = arcpy.Dissolve_management( unsplit, "KVL_Dissolve_Temp", ["Name", "Voltage", "Start", "End", "Circuit", "Operate_Name"], multi_part="MULTI_PART") rows = arcpy.da.SearchCursor(KVL_dissolve, ['Name']) for row in rows: line_selection = arcpy.MakeFeatureLayer_management( unsplit, "Line_Selection", "Name = '{0}'".format(row[0])) arcpy.Snap_edit(line_selection, [[line_selection, "END", 10]]) KVL_dissolve = arcpy.Dissolve_management( unsplit, "KVL_Dissolve_Temp", [ "Name", "Voltage", "Start", "End", "Circuit", "Operate_Name", "Trace_Version", "Status" ], multi_part="MULTI_PART") return KVL_dissolve
def main(proj_path, in_network, out_name, surveyed_dams=None, conservation_areas=None, conservation_easements=None): """ For each stream segment, assigns a conservation and restoration class :param proj_path: The file path to the BRAT project folder :param in_network: The input Combined Capacity Network :param out_name: The output name for the Conservation Restoration Model :param surveyed_dams: The dams shapefile :param conservation_areas: The conservation areas shapefile :param conservation_easements: The conservation easements shapefile :return: """ arcpy.env.overwriteOutput = True out_network = os.path.dirname(in_network) + "/" + out_name + ".shp" arcpy.CopyFeatures_management(in_network, out_network) # check for oPBRC fields and delete if exists old_fields = [f.name for f in arcpy.ListFields(out_network)] cons_rest_fields = [ "oPBRC_UI", "oPBRC_UD", "oPBRC_CR", "DamStrat", "ObsDam", "ConsRest", "ConsEase" ] for f in cons_rest_fields: if f in old_fields: arcpy.DeleteField_management(out_network, f) # add all new fields arcpy.AddField_management(out_network, "oPBRC_UI", "TEXT", "", "", 30) arcpy.AddField_management(out_network, "oPBRC_UD", "TEXT", "", "", 30) arcpy.AddField_management(out_network, "oPBRC_CR", "TEXT", "", "", 40) arcpy.AddField_management(out_network, "DamStrat", "TEXT", "", "", 60) arcpy.AddField_management(out_network, "ObsDam", "TEXT", "", "", 10) arcpy.AddField_management(out_network, "ConsArea", "TEXT", "", "", 10) arcpy.AddField_management(out_network, "ConsEase", "TEXT", "", "", 10) # use old historic capacity field names if new ones not in combined capacity output if 'oVC_PT' in old_fields: ovc_hpe = 'oVC_PT' else: ovc_hpe = 'oVC_Hpe' if 'oCC_PT' in old_fields: occ_hpe = 'oCC_PT' else: occ_hpe = 'oCC_HPE' fields = [ 'oPBRC_UI', 'oPBRC_UD', 'oPBRC_CR', ovc_hpe, 'oVC_EX', occ_hpe, 'oCC_EX', 'iGeo_Slope', 'mCC_HisDep', 'iPC_VLowLU', 'iPC_HighLU', 'oPC_Dist', 'iPC_LU', 'iHyd_SPLow', 'iHyd_SP2', 'DamStrat', 'iPC_RoadX', 'iPC_Canal', 'ObsDam', 'ConsArea', 'ConsEase' ] # add arbitrarily large value to avoid error if 'iPC_Canal' not in old_fields: arcpy.AddField_management(out_network, "iPC_Canal", "DOUBLE") arcpy.CalculateField_management(out_network, 'iPC_Canal', """500000""", "PYTHON") # 'oPBRC_UI' (Areas beavers can build dams, but could be undesireable impacts) # arcpy.AddMessage(fields) # field_check = [f.name for f in arcpy.ListFields(out_network)] # arcpy.AddMessage("--------------------") # arcpy.AddMessage(field_check) with arcpy.da.UpdateCursor(out_network, fields) as cursor: for row in cursor: occ_ex = row[6] opc_dist = row[11] ipc_lu = row[12] ipc_canal = row[17] if occ_ex <= 0: # if capacity is none risk is negligible row[0] = "Negligible Risk" elif ipc_canal <= 20: # if canals are within 20 meters (usually means canal is on the reach) row[0] = "Major Risk" else: # if infrastructure within 30 m or land use is high # if capacity is frequent or pervasive risk is considerable # if capaicty is rare or ocassional risk is some if opc_dist <= 30 or ipc_lu >= 0.66: if occ_ex >= 5.0: row[0] = "Major Risk" else: row[0] = "Considerable Risk" # if infrastructure within 30 to 100 m # if capacity is frequent or pervasive risk is some # if capacity is rare or ocassional risk is minor elif opc_dist <= 100: if occ_ex >= 5.0: row[0] = "Considerable Risk" else: row[0] = "Minor Risk" # if infrastructure within 100 to 300 m or land use is 0.33 to 0.66 risk is minor elif opc_dist <= 300 or ipc_lu >= 0.33: row[0] = "Minor Risk" else: row[0] = "Negligible Risk" cursor.updateRow(row) # 'oPBRC_UD' (Areas beavers can't build dams and why) with arcpy.da.UpdateCursor(out_network, fields) as cursor: for row in cursor: ovc_hpe = row[3] ovc_ex = row[4] occ_ex = row[6] slope = row[7] landuse = row[12] splow = row[13] sp2 = row[14] # First deal with vegetation limitations # Find places historically veg limited first ('oVC_HPE' None) if ovc_hpe <= 0: # 'oVC_EX' Occasional, Frequent, or Pervasive (some areas have oVC_EX > oVC_HPE) if ovc_ex > 0: row[1] = 'Potential Reservoir or Landuse Conversion' else: row[1] = 'Naturally Vegetation Limited' # 'iGeo_Slope' > 23% elif slope > 0.23: row[1] = 'Slope Limited' # 'oCC_EX' None (Primary focus of this layer is the places that can't support dams now... so why?) elif occ_ex <= 0: if landuse > 0.3: row[1] = "Anthropogenically Limited" elif splow >= 190 or sp2 >= 2400: row[1] = "Stream Power Limited" else: row[1] = "Stream Size Limited" else: row[1] = 'Dam Building Possible' cursor.updateRow(row) # 'oPBRC_CR' (Conservation & Restoration Opportunties) with arcpy.da.UpdateCursor(out_network, fields) as cursor: for row in cursor: # 'oPBRC_UI' Negligible Risk or Minor Risk opbrc_ui = row[0] occ_hpe = row[5] occ_ex = row[6] mcc_his_dep = row[8] ipc_vlow_lu = row[9] ipc_high_lu = row[10] if opbrc_ui == 'Negligible Risk' or opbrc_ui == 'Minor Risk': # 'oCC_EX' Frequent or Pervasive # 'mcc_his_dep' <= 3 if occ_ex >= 5 and mcc_his_dep <= 3: row[2] = 'Easiest - Low-Hanging Fruit' # 'oCC_EX' Occasional, Frequent, or Pervasive # 'oCC_HPE' Frequent or Pervasive # 'mcc_his_dep' <= 3 # 'ipc_vlow_lu'(i.e., Natural) > 75 # 'ipc_high_lu' (i.e., Developed) < 10 elif occ_ex > 1 and mcc_his_dep <= 3 and occ_hpe >= 5 and ipc_vlow_lu > 75 and ipc_high_lu < 10: row[2] = 'Straight Forward - Quick Return' # 'oCC_EX' Rare or Occasional # 'oCC_HPE' Frequent or Pervasive # 'ipc_vlow_lu'(i.e., Natural) > 75 # 'ipc_high_lu' (i.e., Developed) < 10 elif occ_hpe >= 5 > occ_ex > 0 and ipc_vlow_lu > 75 and ipc_high_lu < 10: row[2] = 'Strategic - Long-Term Investment' else: row[2] = 'NA' else: row[2] = 'NA' cursor.updateRow(row) if conservation_areas is not None and surveyed_dams is not None: # beaver dam management strategies (derived from TNC project) with arcpy.da.UpdateCursor( out_network, ["ObsDam", "ConsArea", "ConsEase"]) as cursor: for row in cursor: row[0] = "No" row[1] = "No" row[2] = "No" cursor.updateRow(row) network_lyr = arcpy.MakeFeatureLayer_management( out_network, "network_lyr") dams = os.path.join(proj_path, 'tmp_snapped_dams.shp') arcpy.CopyFeatures_management(surveyed_dams, dams) arcpy.Snap_edit(dams, [[out_network, 'EDGE', '60 Meters']]) arcpy.SelectLayerByLocation_management(network_lyr, "INTERSECT", dams, '', "NEW_SELECTION") with arcpy.da.UpdateCursor(network_lyr, ["ObsDam"]) as cursor: for row in cursor: row[0] = "Yes" cursor.updateRow(row) arcpy.SelectLayerByLocation_management(network_lyr, "INTERSECT", conservation_areas, '', "NEW_SELECTION") with arcpy.da.UpdateCursor(network_lyr, ["ConsArea"]) as cursor: for row in cursor: row[0] = "Yes" cursor.updateRow(row) arcpy.SelectLayerByLocation_management(network_lyr, "INTERSECT", conservation_easements, '', "NEW_SELECTION") with arcpy.da.UpdateCursor(network_lyr, ["ConsEase"]) as cursor: for row in cursor: row[0] = "Yes" cursor.updateRow(row) with arcpy.da.UpdateCursor(out_network, fields) as cursor: for row in cursor: # 'oPBRC_UI' Negligible Risk or Minor Risk opbrc_ui = row[0] hist_veg = row[3] curr_veg = row[4] curr_dams = row[6] infrastructure_dist = row[11] landuse = row[12] obs_dams = row[18] protected = row[19] easement = row[20] hist_veg_departure = hist_veg - curr_veg urban = landuse > 0.66 ag = 0.33 < landuse <= 0.66 no_urban = not urban no_ag = not ag # default category is 'Other' row[15] = 'Other' if curr_dams >= 5: if no_urban: if hist_veg_departure >= 4: row[15] = "3a. Vegetation restoration first-priority" else: row[15] = "3. High restoration potential" if curr_dams >= 20 and protected == 'Yes': row[15] = "2. Highest restoration potential - translocation" if curr_dams >= 20 and easement == 'Yes': row[15] = "2. Highest restoration potential - translocation" if 1 <= curr_dams < 5 and no_urban: if hist_veg_departure >= 4: row[15] = "4a. Vegetation restoration first-priority" else: row[15] = "4. Medium-low restoration potential" if curr_dams >= 1 and infrastructure_dist <= 30: row[15] = "5. Restoration with infrastructure modification" if curr_dams >= 1 and urban: row[15] = "6. Restoration with urban or agricultural modification" if curr_dams >= 1 and ag: row[15] = "6. Restoration with urban or agricultural modification" if obs_dams == 'Yes' and no_urban and no_ag: row[15] = "1. Beaver conservation" cursor.updateRow(row) arcpy.Delete_management(dams) else: # remove strategies map fields if not running this part of the model arcpy.DeleteField_management(out_network, "DamStrat") arcpy.DeleteField_management(out_network, "ObsDam") arcpy.DeleteField_management(out_network, "ConsRest") arcpy.DeleteField_management(out_network, "ConsEase") make_layers(out_network) write_xml(in_network, out_network) return out_network
nonet_proj = "NHDFlowline_NonNetwork_albers" arcpy.Project_management(disgages, disgages_proj, pr) arcpy.Project_management(NHD_net_imp, net_proj, pr) arcpy.Project_management(NHD_nonet_imp, nonet_proj, pr) #Take out gages in Alaska and Hawaii (because the NHDv2 does not include these areas arcpy.MakeFeatureLayer_management(disgages_proj, "gages_notinNHDv2_lyr") arcpy.SelectLayerByAttribute_management( "gages_notinNHDv2_lyr", "NEW_SELECTION", 'dec_lat_va_num < 50 AND dec_lat_va_num > 25 ') #Snap gages notinNHD_gages_snap = "gages_notinNHDv2_snap" arcpy.CopyFeatures_management("gages_notinNHDv2_lyr", notinNHD_gages_snap) snap_env = [net_proj, "EDGE", "500 Meters"] arcpy.Snap_edit(notinNHD_gages_snap, [snap_env]) #Check results through near analysis in_features = notinNHD_gages_snap near_features = net_proj location = "LOCATION" angle = "NO_ANGLE" method = "GEODESIC" arcpy.Near_analysis(in_features, near_features, location=location, angle=angle, method=method) #Select those that snapped and output to new feature class arcpy.MakeFeatureLayer_management(notinNHD_gages_snap,
#get new nodes arcpy.AddField_management(links_n, "_ID_", "LONG") arcpy.CalculateField_management(links_n, "_ID_", '!FID!', "PYTHON") arcpy.FeatureVerticesToPoints_management(links_n, nodes_n, "BOTH_ENDS") arcpy.DeleteIdentical_management(nodes_n, ['Shape']) arcpy.AddField_management(nodes_n, "_ID_", "LONG") arcpy.CalculateField_management(nodes_n, "_ID_", '!FID!', "PYTHON") fieldnames = [x for x in [f.name for f in arcpy.ListFields(nodes_n)] if x not in ['FID', 'Shape', 'OID', "_ID_"]] arcpy.DeleteField_management(nodes_n, fieldnames) # get old nodes arcpy.AddField_management(links_o, "_ID_", "LONG") arcpy.CalculateField_management(links_o, "_ID_", '!FID!', "PYTHON") arcpy.FeatureVerticesToPoints_management(links_o, nodes_o, "BOTH_ENDS") arcpy.DeleteIdentical_management(nodes_o, ['Shape']) arcpy.AddField_management(nodes_o, "_ID_", "LONG") arcpy.CalculateField_management(nodes_o, "_ID_", '!FID!', "PYTHON") fieldnames = [x for x in [f.name for f in arcpy.ListFields(nodes_o)] if x not in ['FID', 'Shape', '_ID_']] arcpy.DeleteField_management(nodes_o, fieldnames) # snap nodes to calculate routes arcpy.Copy_management(nodes_o, snapped_old_nodes) # the old nodes are snapped to the new link, to calculate the sum distance arcpy.Snap_edit(snapped_old_nodes, [[links_n, "EDGE", near_snap_dist]])
def get_centerline (feature, dem, workspace, power = 5, eu_cell_size = 10): """Returns a center line feature of the given polygon feature based on cost over an euclidean distance raster and cost path. points are seeded using minimum and maximum elevation.""" centerline = workspace + '\\centerline.shp' center_length = 0 center_slope = 0 smoothing = 4 trim_distance = "100 Meters" try: # Setup extents / environments for the current feature ARCPY.env.extent = feature.shape.extent desc = ARCPY.Describe(feature) XMin_new = desc.extent.XMin - 200 YMin_new = desc.extent.YMin - 200 XMax_new = desc.extent.XMax + 200 YMax_new = desc.extent.YMax + 200 ARCPY.env.extent = ARCPY.Extent(XMin_new, YMin_new, XMax_new, YMax_new) ARCPY.env.overwriteOutput = True ARCPY.env.cellSize = eu_cell_size ARCPY.env.snapRaster = dem # Get minimum and maximum points resample = ARCPY.Resample_management (dem, 'in_memory\\sample', eu_cell_size) masked_dem = spatial.ExtractByMask (resample, feature.shape) # Find the maximum elevation value in the feature, convert them to # points and then remove all but one. maximum = get_properties (masked_dem, 'MAXIMUM') maximum_raster = spatial.SetNull(masked_dem, masked_dem, 'VALUE <> ' + maximum) maximum_point = ARCPY.RasterToPoint_conversion(maximum_raster, 'in_memory\\max_point') rows = ARCPY.UpdateCursor (maximum_point) for row in rows: if row.pointid <> 1: rows.deleteRow(row) del row, rows # Find the minimum elevation value in the feature, convert them to # points and then remove all but one. minimum = get_properties (masked_dem, 'MINIMUM') minimum_raster = spatial.SetNull(masked_dem, masked_dem, 'VALUE <> ' + minimum) minimum_point = ARCPY.RasterToPoint_conversion(minimum_raster, 'in_memory\\min_point') rows = ARCPY.UpdateCursor (minimum_point) for row in rows: if row.pointid <> 1: rows.deleteRow(row) del row, rows # Calculate euclidean Distance to boundary line for input DEM cells. polyline = ARCPY.PolygonToLine_management(feature.shape, 'in_memory\\polyline') eucdist =spatial.EucDistance(polyline, "", eu_cell_size, '') masked_eucdist = spatial.ExtractByMask (eucdist, feature.shape) # Calculate the cost raster by inverting the euclidean distance results, # and raising it to the power of x to exaggerate the least expensive route. cost_raster = (-1 * masked_eucdist + float(maximum))**power # Run the cost distance and cost path function to find the path of least # resistance between the minimum and maximum values. The results are set # so all values equal 1 (different path segments have different values) # and convert the raster line to a poly-line. backlink = 'in_memory\\backlink' cost_distance = spatial.CostDistance(minimum_point, cost_raster, '', backlink) cost_path = spatial.CostPath(maximum_point, cost_distance, backlink, 'EACH_CELL', '') cost_path_ones = spatial.Con(cost_path, 1, '', 'VALUE > ' + str(-1)) # Set all resulting pixels to 1 r_to_p = ARCPY.RasterToPolyline_conversion (cost_path_ones, 'in_memory\\raster_to_polygon') del ARCPY.env.extent # Delete current extents (need here but do not know why) # Removes small line segments from the centerline shape. These segments are # a byproduct of cost analysis. lines = str(ARCPY.GetCount_management(r_to_p)) #check whether we have more than one line segment if float(lines) > 1: # If there is more then one line rows = ARCPY.UpdateCursor(r_to_p) for row in rows: if row.shape.length == eu_cell_size: # delete all the short 10 m lines rows.deleteRow(row) del row, rows lines = str(ARCPY.GetCount_management(r_to_p)) if float(lines) > 1: ARCPY.Snap_edit(r_to_p, [[r_to_p, "END", "50 Meters"]]) # make sure that the ends of the lines are connected r_to_p = ARCPY.Dissolve_management(r_to_p, 'in_memory\\raster_to_polygon_dissolve') # Smooth the resulting line. Currently smoothing is determined by minimum # and maximum distance. The greater change the greater the smoothing. smooth_tolerance = (float(maximum) - float(minimum)) / smoothing ARCPY.SmoothLine_cartography(r_to_p, centerline, 'PAEK', smooth_tolerance, 'FIXED_CLOSED_ENDPOINT', 'NO_CHECK') field_names = [] # List of field names in the file that will be deleted. fields_list = ARCPY.ListFields(centerline) for field in fields_list: # Loop through the field names if not field.required: # If they are not required append them to the list of field names. field_names.append(field.name) # Add new fields to the center line feature ARCPY.AddField_management(centerline, 'GLIMSID', 'TEXT', '', '', '25') ARCPY.AddField_management(centerline, 'LENGTH', 'FLOAT') ARCPY.AddField_management(centerline, 'SLOPE', 'FLOAT') ARCPY.DeleteField_management(centerline, field_names) # Remove the old fields. # Calculate the length of the line segment and populate segment data. ARCPY.CalculateField_management(centerline, 'LENGTH', 'float(!shape.length@meters!)', 'PYTHON') rows = ARCPY.UpdateCursor (centerline) for row in rows: row.GLIMSID = feature.GLIMSID # Get GLIMS ID and add it to segment center_length = row.LENGTH # Get the length of the center line # Calculate slope of the line based on change in elevation over length of line center_slope = round(math.degrees(math.atan((float(maximum) - float(minimum)) / row.LENGTH)), 2) row.SLOPE = center_slope # Write slope to Segment rows.updateRow(row) # Update the new entry del row, rows #Delete cursors and remove locks # Flip Line if needed - Turn min point and end point into a line segment if # the length of this line is greater then the threshold set, flip the line. end_point = ARCPY.FeatureVerticesToPoints_management(centerline, 'in_memory\\end_point', 'END') merged_points = ARCPY.Merge_management ([end_point, minimum_point], 'in_memory\\merged_points') merged_line = ARCPY.PointsToLine_management (merged_points, 'in_memory\\merged_line') merged_line_length = 0 # Get the line Length rows = ARCPY.SearchCursor (merged_line) for row in rows: merged_line_length += row.shape.length del row, rows # if the line length is greater then a quarter the entire feature length, flip if merged_line_length > (center_length/4): ARCPY.FlipLine_edit(centerline) # This function attempts to extend the line and clip it back to the # feature extents in order to create a line that runs from edge to edge #trimmed_line = ARCPY.Merge_management([polyline, centerline], 'in_memory\\line_merge') trimmed_line = ARCPY.Append_management (polyline, centerline, 'NO_TEST') ARCPY.TrimLine_edit (trimmed_line, trim_distance, "DELETE_SHORT") ARCPY.ExtendLine_edit(trimmed_line, trim_distance, "EXTENSION") rows = ARCPY.UpdateCursor (trimmed_line) for row in rows: if row.LENGTH == 0.0: rows.deleteRow(row) del row, rows # Recalculate length. Must be after 0.0 lengths are deleted or they will # not be removed above. ARCPY.CalculateField_management(centerline, 'LENGTH', 'float(!shape.length@meters!)', 'PYTHON') ARCPY.env.overwriteOutput = False return centerline, center_length, center_slope, False except: ARCPY.env.overwriteOutput = False return centerline, '', '', True
arcpy.MakeFeatureLayer_management("New_snap", "Snap_layer") # Only snap features that are not already identical arcpy.SelectLayerByLocation_management( "Snap_layer", "are_identical_to", Base_map, invert_spatial_relationship="INVERT") # Optional densify currently disabled - makes snap take much longer. # print(" Densifying new features") # arcpy.Densify_edit("Snap_layer", "DISTANCE", "1") print( " Snapping - takes about 15 mins for OSMM-PHI for a LAD, 12 hours for OSMM-Phase 1 habitats in Oxfordshire, " "70 minutes for merging in Oxfordshire designations") arcpy.Snap_edit("Snap_layer", snap_env) arcpy.Delete_management("Snap_layer") # End of section that needs to be commented out if you have to snap manually in ArcMAP print(''.join([" ## Snapping completed on : ", time.ctime()])) # Correcting slivers and overlaps after the snap. Overlaps occur because we only snap non-identical features. Could change this? print(" Correcting overlaps after snapping") MyFunctions.check_and_repair("New_snap") print( " Unioning. If this fails with an exit code, read the comments in the script for help." ) # Have had mysterious problem here (error 999999, Table not found, Topology error, Duplicate segment) or # 'Process finished with exit code -1073741819 (0xC0000005)' even when the process has worked correctly before with the same data! # Sometimes it will work in ArcMap instead (try not entering a numerical rank or cluster tolerance) # If the designation data and base map polygons have not changed, you can set 'snap_new_features' to false and omit
def set_dam_attributes(brat_output, output_path, dams, req_fields, new_fields, da_threshold): """ Sets all the dam info and updates the output file with that data :param brat_output: The polyline we're basing everything on :param output_path: The polyline shapefile with BRAT output :param dams: The points shapefile of observed dams :param req_fields: The fields needed to calculate new fields :param new_fields: Fields to add to the network :param da_threshold: :return: Drainage area at which stream is presumably too large for dam building """ # snap dams within 5 meters to network if above DA threshold, otherwise snap dams within 60 meters if da_threshold: temp_dir = os.path.join( os.path.dirname( os.path.dirname(os.path.dirname( os.path.dirname(brat_output)))), 'Temp') tmp_above_threshold = arcpy.MakeFeatureLayer_management( brat_output, 'tmp_above_threshold') above_threshold_shp = os.path.join(temp_dir, 'tmp_above_da_threshold.shp') tmp_below_threshold = arcpy.MakeFeatureLayer_management( brat_output, 'tmp_below_threshold') below_threshold_shp = os.path.join(temp_dir, 'tmp_below_da_threshold.shp') quer_above = """"{}" >= {}""".format('iGeo_DA', 65) quer_below = """"{}" < {}""".format('iGeo_DA', 65) arcpy.SelectLayerByAttribute_management(tmp_above_threshold, 'NEW_SELECTION', quer_above) arcpy.CopyFeatures_management(tmp_above_threshold, above_threshold_shp) arcpy.SelectLayerByAttribute_management(tmp_below_threshold, 'NEW_SELECTION', quer_below) arcpy.CopyFeatures_management(tmp_below_threshold, below_threshold_shp) arcpy.Snap_edit(dams, [[above_threshold_shp, 'EDGE', '5 Meters']]) arcpy.Snap_edit(dams, [[below_threshold_shp, 'EDGE', '60 Meters']]) # snap all dams within 60 meters to network if no DA threshold provided else: arcpy.Snap_edit(dams, [[brat_output, 'EDGE', '60 Meters']]) # should select all dams snapped to network arcpy.SpatialJoin_analysis(brat_output, dams, output_path, join_operation='JOIN_ONE_TO_ONE', join_type='KEEP_ALL', match_option='INTERSECT') # add new fields to network add_fields(output_path, new_fields) # calculate new field values with arcpy.da.UpdateCursor(output_path, req_fields) as cursor: for row in cursor: dam_num = row[-7] # seventh to last attribute seg_length = row[-6] # sixth to last attribute if seg_length is None: seg_length = 0 occ_ex = row[-5] # fifth to last attribute # TODO Is it necessary to initialize these values? occ_hpe = row[-4] # fourth to last attribute igeo_da = row[-3] # third to last attribute cons_field = row[-2] # second to last attribute mcc_ex_ct = row[-1] # last attribute # eDam_Ct: set equal to join count from snapped dams row[0] = dam_num row[1] = dam_num / seg_length * 1000 # calculate surveyed dam density # BRATvSurv: calculate predicted (BRAT) capacity count vs. observed (surveyed) dam count if row[0] == 0: row[4] = -1 else: row[4] = mcc_ex_ct / row[0] # e_DamPcC: calculate proportion of predicted capacity occupied by dams if occ_ex == 0: row[2] = 0 else: row[2] = row[1] / row[-5] # ConsVRest: differentiate management strategies based on dam occupancy if row[-2] == "Easiest - Low-Hanging Fruit": if row[2] >= 0.25: row[3] = "Immediate: Beaver Conservation" else: row[3] = "Immediate: Potential Beaver Translocation" elif row[-2] == "Straight Forward - Quick Return": row[3] = "Mid Term: Process-based Riparian Vegetation Restoration" elif row[-1] == "Strategic - Long-Term Investment": row[3] = "Long Term: Riparian Vegetation Reestablishment" else: row[3] = "Low Capacity Habitat" cursor.updateRow(row) arcpy.DeleteField_management(output_path, ["Join_Count", "TARGET_FID"]) add_snapped_attribute(dams, brat_output)
if not arcpy.Exists(workingDirectory + "/vectorMosaic"): mosaicFlowLines = arcpy.RasterToPolyline_conversion(mosaicFlowGrid, workingDirectory + "/vectorMosaic", "NODATA", "", "NO_SIMPLIFY") else: mosaicFlowLines = workingDirectory + "/vectorMosaic" # ========================== # Observed points processing # ========================== # Make a feature layer of the observed points so it can be processed arcpy.MakeFeatureLayer_management(observed, "pointsLyr") # Snap the points to the flowlines arcpy.Snap_edit("pointsLyr", [[mosaicFlowLines, "EDGE", bufferInMeters + " Meters"]]) # Sample the different versions of flow grids so the points can be classified. The points now lie on these grids after snapping sampleOutput = Sample([dem, detailed, truncated], "pointsLyr", workingDirectory + "/sampleTable_" + version, "NEAREST") # Calculate the field that described the location arcpy.AddField_management("pointsLyr", "LocationClass", "TEXT") # Join the sample table to the points layer so the points can be classified arcpy.JoinField_management("pointsLyr", "OBJECTID", sampleOutput, "samplePoints_" + version,
def main(): scratch_datasets = [] new_fields = [ 'Crash_Count', 'Crash_Count_Weight', 'Crash_Frequency', 'Crash_Rate', 'Weighted_Crash_Frequency', 'Weighted_Crash_Rate' ] try: streets_intersection = arcpy.GetParameterAsText(0) crashes = arcpy.GetParameterAsText(1) time_interval, time_unit = arcpy.GetParameterAsText(2).split(' ') time_interval = float(time_interval) if time_unit == 'Years': time_interval = time_interval * 365 elif time_unit == 'Weeks': time_interval = time_interval * 7 snap_distance = arcpy.GetParameterAsText(3) weight_field = arcpy.GetParameterAsText(4) weight_table = arcpy.GetParameter(5) adt_field = arcpy.GetParameterAsText(6) output_crash_rates = arcpy.GetParameterAsText(7) params = arcpy.GetParameterInfo() shape_type = arcpy.Describe(streets_intersection).shapeType weight_provided = False if weight_field is not None and weight_field != '': weight_provided = True adt_provided = False if adt_field is not None and adt_field != '': adt_provided = True arcpy.SetProgressorLabel("Creating Temporary Crash Layer...") arcpy.MakeFeatureLayer_management(crashes, "Crash Layer") crashes_snap = os.path.join(arcpy.env.scratchGDB, "Crash_Snap") if arcpy.Exists(crashes_snap): arcpy.Delete_management(crashes_snap) arcpy.CopyFeatures_management("Crash Layer", crashes_snap) scratch_datasets.append(crashes_snap) crash_count_field = new_fields[0] crash_count_weight_field = new_fields[1] arcpy.AddField_management(crashes_snap, crash_count_field, "Double", field_alias="Crash Count") fields = [crash_count_field] if weight_provided: arcpy.AddField_management(crashes_snap, crash_count_weight_field, "Double", field_alias="Crash Count Weight") fields.append(crash_count_weight_field) fields.append(weight_field) for field in arcpy.Describe(crashes).fields: if field.name == weight_field: if field.domain is not None and field.domain != '': database = get_workspace(crashes) if database is not None: for domain in arcpy.da.ListDomains(database): if domain.name == field.domain: if domain.domainType == 'CodedValue': for key, value in domain.codedValues.items( ): for i in range( 0, weight_table.rowCount): if weight_table.getValue( i, 0) == value: weight_table.setValue( i, 0, str(key)) break with arcpy.da.UpdateCursor(crashes_snap, fields) as cursor: for row in cursor: row[0] = 1.0 if len(fields) == 3: value = str(row[2]) for i in range(0, weight_table.rowCount): if value == weight_table.getValue(i, 0): row[1] = weight_table.getValue(i, 1) break cursor.updateRow(row) if (shape_type == "Polyline"): arcpy.SetProgressorLabel("Snapping Crashes to Nearest Street...") else: arcpy.SetProgressorLabel( "Snapping Crashes to Nearest Intersection...") snapEnv = [streets_intersection, "EDGE", snap_distance] arcpy.Snap_edit(crashes_snap, [snapEnv]) fms = arcpy.FieldMappings() desc = arcpy.Describe(streets_intersection) for field in desc.fields: if field.type == 'Geometry' or field.type == 'OID' or field.name in new_fields: continue if shape_type == "Polyline" and field.name == desc.AreaFieldName: continue fm = arcpy.FieldMap() fm.addInputField(streets_intersection, field.name) fms.addFieldMap(fm) fm = arcpy.FieldMap() fm.addInputField(crashes_snap, crash_count_field) fm.mergeRule = 'Sum' fms.addFieldMap(fm) if weight_provided: fm = arcpy.FieldMap() fm.addInputField(crashes_snap, crash_count_weight_field) fm.mergeRule = 'Sum' fms.addFieldMap(fm) crashes_join = os.path.join(arcpy.env.scratchGDB, "Crash") if arcpy.Exists(crashes_join): arcpy.Delete_management(crashes_join) arcpy.SpatialJoin_analysis(streets_intersection, crashes_snap, crashes_join, "JOIN_ONE_TO_ONE", "KEEP_ALL", fms, "Intersect", "0 Feet") scratch_datasets.append(crashes_join) if weight_provided: with arcpy.da.UpdateCursor(crashes_join, [crash_count_weight_field]) as cursor: for row in cursor: if row[0] == 0: row[0] = None cursor.updateRow(row) arcpy.SetProgressorLabel("Calculating Crash Statistics") templateDir = os.path.dirname(__file__) crash_frequency_field = new_fields[2] crash_rate_field = new_fields[3] weighted_crash_frequency_field = new_fields[4] weighted_crash_rate_field = new_fields[5] add_fields = [] fields = [crash_count_field] if (shape_type == "Polyline"): fields.append('SHAPE@') add_fields = [[crash_frequency_field, "Crashes Per Mile Per Year"], [ crash_rate_field, "Crashes Per Million Vehicle Miles" ], [ weighted_crash_frequency_field, "Weighted Crashes Per Mile Per Year" ], [ weighted_crash_rate_field, "Weighted Crashes Per Million Vehicle Miles" ]] else: add_fields = [ [crash_frequency_field, "Crashes Per Year"], [crash_rate_field, "Crashes Per Million Entering Vehicles"], [weighted_crash_frequency_field, "Weighted Crashes Per Year"], [ weighted_crash_rate_field, "Weighted Crashes Per Million Entering Vehicles" ] ] arcpy.AddField_management(crashes_join, add_fields[0][0], "Double", field_alias=add_fields[0][1]) fields.append(add_fields[0][0]) if adt_provided: arcpy.AddField_management(crashes_join, add_fields[1][0], "Double", field_alias=add_fields[1][1]) fields.append(add_fields[1][0]) fields.append(adt_field) if weight_provided: fields.append(crash_count_weight_field) arcpy.AddField_management(crashes_join, add_fields[2][0], "Double", field_alias=add_fields[2][1]) fields.append(add_fields[2][0]) if adt_provided: arcpy.AddField_management(crashes_join, add_fields[3][0], "Double", field_alias=add_fields[3][1]) fields.append(add_fields[3][0]) with arcpy.da.UpdateCursor(crashes_join, fields) as cursor: for row in cursor: if row[cursor.fields.index(crash_count_field)] is None: continue miles = 1.0 if 'SHAPE@' in cursor.fields: miles = row[cursor.fields.index('SHAPE@')].getLength( 'GEODESIC', 'MILES') row[cursor.fields.index(crash_frequency_field)] = row[ cursor.fields.index(crash_count_field)] / ( (time_interval / 365) * miles) if crash_count_weight_field in cursor.fields and row[ cursor.fields.index( crash_count_weight_field)] is not None: row[cursor.fields.index( weighted_crash_frequency_field )] = row[cursor.fields.index(crash_count_weight_field)] / ( (time_interval / 365) * miles) if adt_field in cursor.fields and row[cursor.fields.index( adt_field)] is not None: row[cursor.fields.index(crash_rate_field)] = ( row[cursor.fields.index(crash_count_field)] * 1000000 ) / (time_interval * row[cursor.fields.index(adt_field)] * miles) if crash_count_weight_field in cursor.fields and row[ cursor.fields.index( crash_count_weight_field)] is not None: row[cursor.fields.index(weighted_crash_rate_field)] = ( row[cursor.fields.index(crash_count_weight_field)] * 1000000) / (time_interval * row[cursor.fields.index(adt_field)] * miles) cursor.updateRow(row) arcpy.SetProgressorLabel("Creating Crash Rate Layer...") field_info = "" fields_to_hide = ['Join_Count', 'TARGET_FID', new_fields[0]] if weight_provided: fields_to_hide.append(new_fields[1]) field_list = arcpy.ListFields(crashes_join) for field in field_list: if field.name in fields_to_hide: field_info = "{0}{1} {1} HIDDEN;".format( field_info, field.name) else: field_info = "{0}{1} {1} VISIBLE;".format( field_info, field.name) arcpy.MakeFeatureLayer_management(crashes_join, "Output Crash Layer", field_info=field_info[:-1]) arcpy.SelectLayerByAttribute_management( "Output Crash Layer", "NEW_SELECTION", '{0} IS NOT NULL'.format(new_fields[2])) arcpy.CopyFeatures_management("Output Crash Layer", output_crash_rates) lyrx_json = _CRASH_RATE_POINT if (shape_type == "Polyline"): lyrx_json = _CRASH_RATE_POLYLINE with tempfile.NamedTemporaryFile(delete=False) as temp_lyrx: temp_lyrx.write(lyrx_json.encode()) lyrx_path = "{0}.lyrx".format(temp_lyrx.name) os.rename(temp_lyrx.name, lyrx_path) params[7].symbology = lyrx_path finally: for dataset in scratch_datasets: if arcpy.Exists(dataset): arcpy.Delete_management(dataset)
#snapStreetfile = r"C:\Users\rl53\Desktop\test\OD_test\streets.gdb\drivable_st" input = arcpy.GetParameterAsText(0) outputfolder = arcpy.GetParameterAsText(1) snapStreetfile = arcpy.GetParameterAsText(2) snapDist = arcpy.GetParameterAsText(3) if snapDist == '#' or snapDist == '': snapDist = "500 Meters" arcpy.AddMessage(snapDist) tempgdb = outputfolder + '\\temp.gdb' if not os.path.exists(tempgdb): arcpy.AddMessage("Creating temp geodatabase...") arcpy.CreateFileGDB_management(outputfolder, 'temp.gdb') ref_input = arcpy.Describe(input).spatialReference ref_street = arcpy.Describe(snapStreetfile).spatialReference if ref_street.name <> ref_input.name: input_copy = arcpy.Project_management( input, tempgdb + '\\' + os.path.split(input)[1] + '_prj', snapStreetfile) else: input_copy = arcpy.FeatureClassToFeatureClass_conversion( input, tempgdb, os.path.split(input)[1] + '_copy') arcpy.AddMessage( "Start to Snap points to Road... The process may take up to a few days...") arcpy.Snap_edit(input_copy, [[snapStreetfile, "EDGE", snapDist]])
# to ensure common boundary is coincident # import system modules import arcpy # Set environment settings arcpy.env.workspace = "C:\\Users\\awiegman\\Downloads\\OtterData\\" # Make backup copy of climate regions feature class, since modification with # the Editing tools below is permanent points = "NRCS_easements_OtterCreek\\Points.shp" import os if not os.path.exists("C:/Temp/pointsBackup.shp"): pointsBackup = "C:/Temp/pointsBackup.shp" arcpy.CopyFeatures_management(points, pointsBackup) """ # Densify climate regions feature class to make sure there are enough vertices # to match detail of vegetation layer when layers are snapped arcpy.Densify_edit(climate, "DISTANCE", "10 Feet") """ # Snap climate regions feature class to vegetation layer vertices and edge lines = "MajorStreams\\MajorStreams.shp" # first, snap climate region vertices to the nearest vegetation layer vertex within 30 Feet snapEnv1 = [lines, "VERTEX", "50 meters"] # second, snap climate region vertices to the nearest vegetation layer edge within 20 Feet #snapEnv2 = [veg, "EDGE", "20 Feet"] arcpy.Snap_edit(points, [snapEnv1]) # expects a list of environments
def snap_points(self, points, dissolved): print("Snapping points file", points, "to", dissolved) print("") arcpy.Snap_edit(points, [[dissolved, "VERTEX", "2 Kilometers"]])
fieldMapScale = arcpy.GetParameter( 7) #Required input - field map absolute scale digitisingAccuracy = float( arcpy.GetParameter(8) ) #Optional- This overrides value from map scale.(the default is empty) ##other parameters used in this script, FC = Feature Class unitsTempFCName = "Units_temp" unitsUpdatedFCPath = increment_trailing_number(inputUnitsFCPath) ##set the environment to the working geodatabase env.workspace = workingGDBPath ##Snap the contacts vertices to their own ends, the edges of the boundary layer, and then the contacts edges arcpy.Snap_edit(contactsFCPath, [[contactsFCPath, "END", digitisingAccuracy], [boundaryFCPath, "EDGE", digitisingAccuracy], [contactsFCPath, "EDGE", digitisingAccuracy]]) ##Create the unit polygons #turn the contacts and boundary into unit polygons inFeatures = [contactsFCPath, boundaryFCPath] arcpy.FeatureToPolygon_management( inFeatures, os.path.join(digitisingFDPath, unitsTempFCName), "", "NO_ATTRIBUTES", "") ##Get the attributes for Unit polygons V2 from the original Unitsl Feature Class using a Spatial Join arcpy.SpatialJoin_analysis(os.path.join(digitisingFDPath, unitsTempFCName), inputUnitsFCPath, unitsUpdatedFCPath, "", "", "", "HAVE_THEIR_CENTER_IN", "", "")
def BanklinePoints(output_workspace, loop_points, banklines, valleyline, dem, station_distance): # Check out the extension licenses arcpy.CheckOutExtension("3D") # Set environment variables arcpy.env.overwriteOutput = True arcpy.env.workspace = output_workspace # List parameter values arcpy.AddMessage("Workspace: {}".format(arcpy.env.workspace)) arcpy.AddMessage("loop_points: {}".format(arcpy.Describe(loop_points).baseName)) arcpy.AddMessage("banklines: {}".format(arcpy.Describe(banklines).baseName)) arcpy.AddMessage("valleyline: {}".format(arcpy.Describe(valleyline).baseName)) arcpy.AddMessage("DEM: {}".format(arcpy.Describe(dem).baseName)) arcpy.AddMessage("Station distance: {}".format(str(station_distance))) # Snap loop_points to banklines snap_string = "{} 'EDGE' '50 feet'".format(arcpy.Describe(banklines).baseName) arcpy.Snap_edit(in_features = loop_points, snap_environment = snap_string) arcpy.AddMessage("loop_points snapped to banklines") # Convert banklines to points banklines_points = line_route_points(output_workspace = output_workspace, line = banklines, station_distance = station_distance, route_id_field = "bank_id", fields = ["bank","ReachName"]) # Add elevation to banklines_points add_elevation(banklines_points, dem) # Buffer loop_points to use for spatal join loop_points_buffer = os.path.join(output_workspace, "loop_points_buffer") arcpy.Buffer_analysis(in_features = loop_points, out_feature_class = loop_points_buffer, buffer_distance_or_field = "1 Meters") # Identify loop_points close to bankline_points and transfer attributes bankline_loop_points = os.path.join(output_workspace, "bankline_loop_points") arcpy.SpatialJoin_analysis(target_features = banklines_points, join_features = loop_points_buffer, out_feature_class = bankline_loop_points, match_option = "INTERSECT") arcpy.DeleteField_management(in_table = bankline_loop_points, drop_field = ["Join_Count", "TARGET_FID", "ReachName_1"]) arcpy.AddMessage("loop_points joined to banklines_points") # Assign loop and bend values to bankline_points assignLoopAndBend(bankline_loop_points, loop_points) # Convert valleyline to points valleyline_points = line_route_points(output_workspace = output_workspace, line = valleyline, station_distance = station_distance, route_id_field = "ReachName", fields = []) # Assign valleyline_points values to bankline_points bankline_points = os.path.join(output_workspace, "bankline_points") arcpy.SpatialJoin_analysis(target_features = bankline_loop_points, join_features = valleyline_points, out_feature_class = bankline_points, match_option = "CLOSEST") arcpy.DeleteField_management(in_table = bankline_points, drop_field = ["Join_Count", "TARGET_FID", "BUFF_DIST", "ORIG_FID", "ReachName_1" , "ReachName_12", "from_measure", "to_measure", "InLine_FID", "SmoLnFlag"]) # Set the name of bankline_points coordinates arcpy.AlterField_management(bankline_points, "POINT_X", 'bank_POINT_X', 'bank_POINT_X') arcpy.AlterField_management(bankline_points, "POINT_Y", 'bank_POINT_Y', 'bank_POINT_Y') arcpy.AlterField_management(bankline_points, "POINT_M", 'bank_POINT_M', 'bank_POINT_M') # Set the name of valleyline_points coordinates arcpy.AlterField_management(bankline_points, "POINT_X_1", 'valley_POINT_X', 'valley_POINT_X') arcpy.AlterField_management(bankline_points, "POINT_Y_1", 'valley_POINT_Y', 'valley_POINT_Y') arcpy.AlterField_management(bankline_points, "POINT_M_1", 'valley_POINT_M', 'valley_POINT_M') # Return arcpy.SetParameter(6, bankline_points) # Cleanup arcpy.Delete_management(banklines_points) arcpy.Delete_management(bankline_loop_points) arcpy.Delete_management(loop_points_buffer) arcpy.Delete_management(valleyline_points)
RS_vector = "RS_mapped_river_network" # RS vector river RS_Raster = "RS_water_mask" # RS raster mask cellsize = 10 delete_length = 300 # area less than 300 m2 will be deleted ##delete error buffer_DEM_clip = "buffer50_" + DEM arcpy.Buffer_analysis(DEM, buffer_DEM_clip, "50 Meters", "FULL", "ROUND", "ALL") RS = "clip_" + RS_vector arcpy.Intersect_analysis([RS_vector, buffer_DEM_clip], RS, "ALL", "0", "LINE") #(RS_vector,buffer_DEM_clip,RS) print "finish delete error" ##snap DEM to RS arcpy.Snap_edit(DEM, [[RS, "EDGE", "50 Meters"]]) print "finish snap" ##Get RS and DEM raster buffer_DEM = "snaped_buffer50_" + DEM arcpy.Buffer_analysis(DEM, buffer_DEM, "50 Meters", "FULL", "ROUND", "ALL") arcpy.AddField_management(buffer_DEM, "value", "LONG", "", "") arcpy.CalculateField_management(buffer_DEM, "value", "2", "PYTHON_9.3") ToRaster_buffer_DEM = "To_Raster_" + buffer_DEM arcpy.PolygonToRaster_conversion(buffer_DEM, "value", ToRaster_buffer_DEM, "CELL_CENTER", "value", cellsize) print "finish RS and DEM raster" extract_RS_Raster = "clip_" + RS_Raster extract_RS_Raster1 = ExtractByMask(RS_Raster, buffer_DEM)
y = row[0][1] + y_shift * math.sin(math.degrees(int(row[1]))) row[0] = (x, y) cursor.updateRow(row) return if AngleField: shift_photopoints(PhotoFeatureClass2, 15, 15) else: pass snapenv = [ParcelsFeatureClass, "EDGE", "30 Feet"] arcpy.Snap_edit(PhotoFeatureClass2, [snapenv]) parcelsOID = arcpy.Describe(ParcelsFeatureClass).OIDFieldName Nearhelper = Geodatabase + "\\NEAR" NEAR = Nearhelper arcpy.GenerateNearTable_analysis(PhotoFeatureClass2, ParcelsFeatureClass, NEAR, "5 Feet", "NO_LOCATION", "NO_ANGLE", "CLOSEST", "0", "GEODESIC") arcpy.AddMessage("Step 4: Associating passenger photo points to nearest parcel") arcpy.JoinField_management(NEAR, "NEAR_FID", ParcelsFeatureClass, parcelsOID, ParcelPIN) # Export non-matched Photos to table (no GPS, wrong attributes, etc.) arcpy.JoinField_management(PhotoFeatureClass2, "OBJECTID", NEAR, "IN_FID") arcpy.TableToTable_conversion(PhotoFeatureClass2, Geodatabase,
import arcpy import os import json import sys import inspect arcpy.env.overwriteOutput = 1 duongDanNguon = "C:/Generalize_25_50/50K_Process.gdb" arcpy.AddMessage("\n\t# Snap other: {0}".format("PhuBeMat")) arcpy.Integrate_management([[duongDanNguon + "/PhuBeMat/PhuBeMat", 1]], "2 Meters") arcpy.Densify_edit(duongDanNguon + "/PhuBeMat/PhuBeMat", "DISTANCE", "2 Meters", None, None) arcpy.Snap_edit(duongDanNguon + "/PhuBeMat/PhuBeMat", [[ duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", "EDGE", "35 Meters" ]]) arcpy.Integrate_management( [[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", 1], [duongDanNguon + "/PhuBeMat/PhuBeMat", 2]], "2 Meters") arcpy.Erase_analysis(in_features=duongDanNguon + "/PhuBeMat/PhuBeMat_LocMatNuoc", erase_features=duongDanNguon + "/PhuBeMat/PhuBeMat", out_feature_class=duongDanNguon + "/PhuBeMat/PhuBeMat_Lo") arcpy.CalculateField_management(duongDanNguon + "/PhuBeMat/PhuBeMat_Lo", "maNhanDang", '"temp123"', "PYTHON_9.3") arcpy.Append_management([duongDanNguon + "/PhuBeMat/PhuBeMat_Lo"], duongDanNguon + "/PhuBeMat/PhuBeMat", "NO_TEST", None, None) arcpy.MultipartToSinglepart_management(duongDanNguon + "/PhuBeMat/PhuBeMat", duongDanNguon + "/PhuBeMat/PhuBeMat2") arcpy.MakeFeatureLayer_management(duongDanNguon + "/PhuBeMat/PhuBeMat2",
# -*- coding: utf-8 -*- """ Created on Thu Jan 18 10:10:19 2018 @author: danwa """ import arcpy from arcpy import env env.workspace = r"C:\\Users\\danwa\\Documents\\Programming\\Trutta\\DuckRiver\\" #fcs = ['T19_straight', 'T21_straight','T22_straight', 'T24_straight', 'T26_straight', 'T26_straight', 'T28_straight', 'T29_straight'] features = [] string = "T" end1 = ".shp" features = [str(i) + end1 for i in range(30, 95)] snapEnv = ['transectLines', 'EDGE', '50 Feet'] for fc in features: arcpy.Snap_edit(fc, [snapEnv])
def simplify(self): try: # Init WorkSpase # arcpy.env.overwriteOutput = 1 duongDanNguon = "C:/Generalize_25_50/50K_Process.gdb" duongDanDich = "C:/Generalize_25_50/50K_Final.gdb" urlFile = '/ConfigSimplify.json' _algorithm = "BEND_SIMPLIFY" _tolerance = "50 Meters" _error_option = "NO_CHECK" _collapsed_point_option = "NO_KEEP" _checkExitLayer = False if arcpy.Exists(duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM") and arcpy.Exists(duongDanNguon + "/PhuBeMat/PhuBeMat_Full"): #arcpy.CopyFeatures_management(duongDanNguon + "/PhuBeMat/PhuBeMat_LocMatNuoc", duongDanNguon + "/PhuBeMat/PhuBeMat") _checkExitLayer = True #Doc file config s1 = inspect.getfile(inspect.currentframe()) s2 = os.path.dirname(s1) urlFile = s2 + urlFile arcpy.AddMessage("\n# Doc file cau hinh: \"{0}\"".format(urlFile)) if os.path.exists(urlFile): fileConfig = open(urlFile) listLayerConfig = json.load(fileConfig) fileConfig.close() ############################### Simplify Polygon ######################################## arcpy.Integrate_management([[duongDanNguon + "/PhuBeMat/PhuBeMat", 1]], "2 Meters") arcpy.AddMessage("\n# Bat dau Simplify Polygon") listPolygon = [] fieldMappings = arcpy.FieldMappings() enableFields = [] inputsMerge = [] for objConfig in listLayerConfig: if objConfig["LayerType"] == "Polygon" and objConfig["RunStatus"] == "True": if not(_checkExitLayer == False and objConfig["LayerName"] == "PhuBeMat_Full"): temp = { "LayerType": objConfig["LayerType"], "DatasetName": objConfig["DatasetName"], "LayerName": objConfig["LayerName"], "featureLayer": "in_memory\\" + objConfig["LayerName"] + "_Layer", "featureCopy": "in_memory\\" + objConfig["LayerName"] + "_Copy", "featureCopyLayer": "in_memory\\" + objConfig["LayerName"] + "_Copy_Layer", "FID_XXX": "FID_" + objConfig["LayerName"] } listPolygon.append(temp) elif objConfig["LayerType"] == "Polyline" and objConfig["RunStatus"] == "True" and objConfig["LayerName"] <> "DuongBinhDo": if not(_checkExitLayer == False and objConfig["LayerName"] == "SongSuoiL_KenhMuongL_SnapPBM"): arcpy.AddMessage("\n# Buffer lop: \"{0}\"".format(objConfig["LayerName"])) layerPath = duongDanNguon + "/" + objConfig["DatasetName"] + "/" + objConfig["LayerName"] arcpy.Buffer_analysis(in_features = layerPath, out_feature_class = layerPath + "_Buffer", buffer_distance_or_field = "0.1 Meters", line_side = "RIGHT") temp = { "LayerType": objConfig["LayerType"], "DatasetName": objConfig["DatasetName"], "LayerName": objConfig["LayerName"] + "_Buffer", "featureLayer": "in_memory\\" + objConfig["LayerName"] + "_Buffer_Layer", "featureCopy": "in_memory\\" + objConfig["LayerName"] + "_Buffer_Copy", "featureCopyLayer": "in_memory\\" + objConfig["LayerName"] + "_Buffer_Copy_Layer", "FID_XXX": "FID_" + objConfig["LayerName"] } listPolygon.append(temp) for element in listPolygon: arcpy.AddMessage("\n# Xu ly lop: {0}".format(element["LayerName"])) layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) arcpy.AddField_management(element["featureLayer"], element["FID_XXX"], "LONG") with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", element["FID_XXX"]]) as cursor: for row in cursor: row[1] = row[0] cursor.updateRow(row) arcpy.CopyFeatures_management(layerPath, element["featureCopy"]) arcpy.MakeFeatureLayer_management(element["featureCopy"], element["featureCopyLayer"]) ## Field Mappings ## enableFields.append(element["FID_XXX"]) fieldMappings.addTable(element["featureCopyLayer"]) inputsMerge.append(element["featureCopyLayer"]) for field in fieldMappings.fields: if field.name not in enableFields: fieldMappings.removeFieldMap(fieldMappings.findFieldMapIndex(field.name)) ## Merge ## arcpy.AddMessage("\n# Merge Polygon...") outPathMerge = "in_memory\\outPathMergeTemp" #outPathMerge = "C:/Generalize_25_50/50K_Process.gdb/DanCuCoSoHaTang/outPathMergeTemp" arcpy.Merge_management (inputsMerge, outPathMerge, fieldMappings) ## Simplify Polygon ## arcpy.AddMessage("\n# Simplify Polygon...") outPathSimplify = "in_memory\\outPathSimplifyTemp" #outPathSimplify = "C:/Generalize_25_50/50K_Process.gdb/DanCuCoSoHaTang/outPathSimplifyTemp" arcpy.SimplifyPolygon_cartography(in_features = outPathMerge, out_feature_class = outPathSimplify, algorithm = _algorithm, tolerance = _tolerance, minimum_area = "0 SquareMeters", error_option = _error_option, collapsed_point_option = _collapsed_point_option) ## MakeLayerFeature ## outPathSimplifyLayer = "in_memory\\outPathSimplifyTempLayer" arcpy.MakeFeatureLayer_management(outPathSimplify, outPathSimplifyLayer) ## Update Shape Feature Class ## arcpy.AddMessage("\n# Update Shape Feature Class:") for element in listPolygon: arcpy.AddMessage("\n\t# Update {0}...".format(element["LayerName"])) ### MakeLayerFeature ### layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) ### Select ### strQuery = element["FID_XXX"] + " IS NOT NULL" arcpy.SelectLayerByAttribute_management(outPathSimplifyLayer, "NEW_SELECTION", strQuery) ### Copy To Table Temp ### outTableTemp = "in_memory\\outTableTemp" arcpy.CopyFeatures_management(outPathSimplifyLayer, outTableTemp) ### ... ### with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", "SHAPE@"]) as cursor: for row in cursor: found = False with arcpy.da.UpdateCursor(outTableTemp, [element["FID_XXX"], "SHAPE@"]) as cursorSub: for rowSub in cursorSub: if row[0] == rowSub[0]: found = True row[1] = rowSub[1] cursor.updateRow(row) cursorSub.deleteRow() break if found == False: cursor.deleteRow() arcpy.AddMessage("\n# Hoan thanh Simplify Polygon!!!") ############################################## Simplify Line ############################# arcpy.AddMessage("\n# Bat dau Simplify Line") listPolyLine = [] fieldMappingLine = arcpy.FieldMappings() enableFieldLine = [] inputsMergeLine = [] for objConfig in listLayerConfig: if objConfig["LayerType"] == "Polyline" and objConfig["RunStatus"] == "True": if not(_checkExitLayer == False and objConfig["LayerName"] == "SongSuoiL_KenhMuongL_SnapPBM"): temp = { "LayerType": objConfig["LayerType"], "DatasetName": objConfig["DatasetName"], "LayerName": objConfig["LayerName"], "featureLayer": "in_memory\\" + objConfig["LayerName"] + "_Layer", "featureCopy": "in_memory\\" + objConfig["LayerName"] + "_Copy", "featureCopyLayer": "in_memory\\" + objConfig["LayerName"] + "_Copy_Layer", "FID_XXX": "FID_" + objConfig["LayerName"] } listPolyLine.append(temp) for element in listPolyLine: arcpy.AddMessage("\n# Xu ly lop: {0}".format(element["LayerName"])) layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] if element["LayerName"] == "DuongBinhDo": arcpy.AddField_management(layerPath, "OLD_OBJECTID", "LONG", None, None, None,"OLD_OBJECTID", "NULLABLE") arcpy.CalculateField_management(layerPath, "OLD_OBJECTID", "!OBJECTID!", "PYTHON_9.3") arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) arcpy.AddField_management(element["featureLayer"], element["FID_XXX"], "LONG") with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", element["FID_XXX"]]) as cursor: for row in cursor: row[1] = row[0] cursor.updateRow(row) arcpy.CopyFeatures_management(layerPath, element["featureCopy"]) arcpy.MakeFeatureLayer_management(element["featureCopy"], element["featureCopyLayer"]) ## Field Mappings ## enableFieldLine.append(element["FID_XXX"]) fieldMappingLine.addTable(element["featureCopyLayer"]) inputsMergeLine.append(element["featureCopyLayer"]) for field in fieldMappingLine.fields: if field.name not in enableFieldLine: fieldMappingLine.removeFieldMap(fieldMappingLine.findFieldMapIndex(field.name)) ## Merge ## arcpy.AddMessage("\n# Merge Polyline...") outPathMerge = "in_memory\\outPathMergeTemp" arcpy.Merge_management (inputsMergeLine, outPathMerge, fieldMappingLine) ## Simplify Polyline ## arcpy.AddMessage("\n# Simplify Polyline...") outPathSimplify = "in_memory\\outPathSimplifyTemp" ''' arcpy.MakeFeatureLayer_management(duongDanNguon + "/ThuyHe/SongSuoiA", "ThuyHe_SongSuoiA_Lyr") arcpy.MakeFeatureLayer_management(duongDanNguon + "/ThuyHe/MatNuocTinh", "ThuyHe_MatNuocTinh_Lyr") arcpy.MakeFeatureLayer_management(duongDanNguon + "/ThuyHe/KenhMuongA", "ThuyHe_KenhMuongA_Lyr") in_barriers_Line = ["ThuyHe_SongSuoiA_Lyr", "ThuyHe_MatNuocTinh_Lyr", "ThuyHe_KenhMuongA_Lyr"] ''' arcpy.SimplifyLine_cartography(in_features = outPathMerge, out_feature_class = outPathSimplify, algorithm = _algorithm, tolerance = _tolerance, collapsed_point_option = _collapsed_point_option) ## MakeLayerFeature ## outPathSimplifyLayer = "in_memory\\outPathSimplifyTempLayer" arcpy.MakeFeatureLayer_management(outPathSimplify, outPathSimplifyLayer) ## Update Shape Feature Class ## arcpy.AddMessage("\n# Update Shape Feature Class:") for element in listPolyLine: if element["LayerType"] == "Polyline": arcpy.AddMessage("\n\t# Update {0}...".format(element["LayerName"])) ### MakeLayerFeature ### layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) ### Select ### strQuery = element["FID_XXX"] + " IS NOT NULL" arcpy.SelectLayerByAttribute_management(outPathSimplifyLayer, "NEW_SELECTION", strQuery) ### Copy To Table Temp ### outTableTemp = "in_memory\\outTableTemp" arcpy.CopyFeatures_management(outPathSimplifyLayer, outTableTemp) ### ... ### with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", "SHAPE@"]) as cursor: for row in cursor: found = False with arcpy.da.UpdateCursor(outTableTemp, [element["FID_XXX"], "SHAPE@"]) as cursorSub: for rowSub in cursorSub: if row[0] == rowSub[0]: found = True row[1] = rowSub[1] cursor.updateRow(row) cursorSub.deleteRow() break if found == False: cursor.deleteRow() arcpy.AddMessage("\n# Hoan thanh Simplify Polyline!!!") ############################################## Snap Line to Polygon ############################# arcpy.AddMessage("\n# Bat dau Snap") for elementPolygon in listPolygon: if elementPolygon["LayerType"] == "Polyline": lineLayerName = elementPolygon["LayerName"][:elementPolygon["LayerName"].find('_Buffer')] if (lineLayerName <> "DuongBinhDo"): arcpy.AddMessage("\n\t# Snap: {0}".format(lineLayerName)) layerBufferPath = duongDanNguon + "/" + elementPolygon["DatasetName"] + "/" + elementPolygon["LayerName"] layerLinePath = duongDanNguon + "/" + elementPolygon["DatasetName"] + "/" + lineLayerName arcpy.Snap_edit(layerLinePath, [[layerBufferPath, "EDGE", self.snap_distance]]) ############## Snap Other if _checkExitLayer: arcpy.AddMessage("\n\t# Snap other: {0}".format("PhuBeMat")) arcpy.Integrate_management([[duongDanNguon + "/PhuBeMat/PhuBeMat", 1]], "2 Meters") arcpy.Densify_edit(duongDanNguon + "/PhuBeMat/PhuBeMat", "DISTANCE","2 Meters",None ,None) arcpy.Snap_edit(duongDanNguon + "/PhuBeMat/PhuBeMat", [[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", "EDGE", "35 Meters"]]) arcpy.Integrate_management([[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", 1],[duongDanNguon + "/PhuBeMat/PhuBeMat", 2]], "2 Meters") arcpy.Erase_analysis(in_features = duongDanNguon + "/PhuBeMat/PhuBeMat_Full", erase_features = duongDanNguon + "/PhuBeMat/PhuBeMat", out_feature_class = duongDanNguon + "/PhuBeMat/PhuBeMat_Lo") arcpy.CalculateField_management(duongDanNguon + "/PhuBeMat/PhuBeMat_Lo", "maNhanDang", '"temp123"', "PYTHON_9.3") arcpy.Append_management([duongDanNguon + "/PhuBeMat/PhuBeMat_Lo"], duongDanNguon + "/PhuBeMat/PhuBeMat", "NO_TEST",None,None) arcpy.MultipartToSinglepart_management(duongDanNguon + "/PhuBeMat/PhuBeMat", duongDanNguon + "/PhuBeMat/PhuBeMat2") arcpy.MakeFeatureLayer_management(duongDanNguon + "/PhuBeMat/PhuBeMat2", "PhuBeMat_Temp_Lyr") arcpy.SelectLayerByAttribute_management("PhuBeMat_Temp_Lyr", "NEW_SELECTION", "maNhanDang = 'temp123'") arcpy.Eliminate_management(in_features = "PhuBeMat_Temp_Lyr", out_feature_class = duongDanNguon + "/PhuBeMat/PhuBeMat3", selection = "LENGTH") arcpy.Densify_edit(duongDanNguon + "/ThuyHe/SongSuoiL", "DISTANCE","2 Meters",None ,None) arcpy.Snap_edit(duongDanNguon + "/ThuyHe/SongSuoiL", [[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", "EDGE", "2 Meters"]]) arcpy.CopyFeatures_management(duongDanNguon + "/PhuBeMat/PhuBeMat3", duongDanNguon + "/PhuBeMat/PhuBeMat") ############################################## Copy to final ############################# for element in listPolygon: if element["LayerType"] == "Polygon": if element["LayerName"] <> "PhuBeMat_Full": layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] layerFinalPath = duongDanDich + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.DeleteField_management(layerPath, [element["FID_XXX"]]) arcpy.CopyFeatures_management(layerPath, layerFinalPath) for element in listPolyLine: if element["LayerType"] == "Polyline": if element["LayerName"] <> "SongSuoiL_KenhMuongL_SnapPBM": layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] layerFinalPath = duongDanDich + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.DeleteField_management(layerPath, [element["FID_XXX"]]) arcpy.CopyFeatures_management(layerPath, layerFinalPath) #arcpy.AddMessage("\n# Hoan thanh!!!") else: arcpy.AddMessage("\n# Khong tim thay file cau hinh: \"{0}\"".format(urlFile)) except OSError as error: arcpy.AddMessage("Error" + error.message) except ValueError as error: arcpy.AddMessage("Error" + error.message) except arcpy.ExecuteError as error: arcpy.AddMessage("Error" + error.message) finally: arcpy.Delete_management("in_memory")
def xu_ly_duong_bo_nuoc(self): arcpy.env.overwriteOutput = 1 # Khai bao bien SongSuoiA = self.duong_dan_nguon + "ThuyHe/SongSuoiA" MatNuocTinh = self.duong_dan_nguon + "ThuyHe/MatNuocTinh" KenhMuongA = self.duong_dan_nguon + "ThuyHe/KenhMuongA" lop_bai_boi = self.duong_dan_nguon + "ThuyHe/BaiBoiA" SongSuoiA_Copy = SongSuoiA + "_Copy" MatNuocTinh_Copy = MatNuocTinh + "_Copy" KenhMuongA_Copy = KenhMuongA + "_Copy" lop_thuy_he_Copy_Agg = SongSuoiA_Copy + "_Agg" lop_thuy_he_Copy_Agg_Tbl = self.duong_dan_nguon + "SongSuoiA_Copy_Agg_Tbl" lop_thuy_he_DuongBoNuoc = SongSuoiA_Copy + "_DuongBoNuoc" arcpy.Snap_edit(lop_bai_boi, [[KenhMuongA, "EDGE", self.khoang_Cach], [MatNuocTinh, "EDGE", self.khoang_Cach], [SongSuoiA, "EDGE", self.khoang_Cach]]) #Append arcpy.CopyFeatures_management(SongSuoiA, SongSuoiA_Copy) arcpy.AddField_management(SongSuoiA_Copy, "LOAI_RANH_GIOI", "LONG", None, None, None, "LOAI_RANH_GIOI", "NULLABLE") arcpy.CalculateField_management(SongSuoiA_Copy, "LOAI_RANH_GIOI", 6, "PYTHON_9.3") arcpy.CopyFeatures_management(MatNuocTinh, MatNuocTinh_Copy) arcpy.AddField_management(MatNuocTinh_Copy, "LOAI_RANH_GIOI", "LONG", None, None, None, "LOAI_RANH_GIOI", "NULLABLE") arcpy.CalculateField_management(MatNuocTinh_Copy, "LOAI_RANH_GIOI", 1, "PYTHON_9.3") arcpy.CopyFeatures_management(KenhMuongA, KenhMuongA_Copy) arcpy.AddField_management(KenhMuongA_Copy, "LOAI_RANH_GIOI", "LONG", None, None, None, "LOAI_RANH_GIOI", "NULLABLE") arcpy.CalculateField_management(KenhMuongA_Copy, "LOAI_RANH_GIOI", 4, "PYTHON_9.3") arcpy.Append_management( [lop_bai_boi, MatNuocTinh_Copy, KenhMuongA_Copy], SongSuoiA_Copy, "NO_TEST", None, None) #AggregatePolygons arcpy.AggregatePolygons_cartography(SongSuoiA_Copy, lop_thuy_he_Copy_Agg, "0.001 Meters", "0 SquareMeters", "0 SquareMeters", "NON_ORTHOGONAL", "", lop_thuy_he_Copy_Agg_Tbl) DM.JoinField(lop_thuy_he_Copy_Agg_Tbl, "INPUT_FID", SongSuoiA_Copy, "OBJECTID", None) #danh dau sông có diện tích lớn nhất trong group rows2 = arcpy.SearchCursor(lop_thuy_he_Copy_Agg_Tbl, sort_fields="OUTPUT_FID A") _outPut_id = 0 _area_max = 0 my_dict = {} for row2 in rows2: if row2.getValue("LOAI_RANH_GIOI") is not None: if _outPut_id == row2.getValue("OUTPUT_FID"): if _area_max < row2.getValue("Shape_Area"): _area_max = row2.getValue("Shape_Area") my_dict[row2.getValue("OUTPUT_FID")] = _area_max else: _area_max = row2.getValue("Shape_Area") my_dict[row2.getValue("OUTPUT_FID")] = _area_max _outPut_id = row2.getValue("OUTPUT_FID") #Update lại bảng join rows_update = arcpy.UpdateCursor(lop_thuy_he_Copy_Agg_Tbl) for row_update in rows_update: if row_update.getValue("LOAI_RANH_GIOI") is None: rows_update.deleteRow(row_update) else: if row_update.getValue("Shape_Area") != my_dict[ row_update.getValue("OUTPUT_FID")]: rows_update.deleteRow(row_update) del row_update del rows_update DM.JoinField(lop_thuy_he_Copy_Agg, "OBJECTID", lop_thuy_he_Copy_Agg_Tbl, "OUTPUT_FID", None) #Xóa bãi bồi trong Aggregate rows_update = arcpy.UpdateCursor(lop_thuy_he_Copy_Agg) for row_update in rows_update: if row_update.getValue("LOAI_RANH_GIOI") is None: rows_update.deleteRow(row_update) del row_update del rows_update #FeatureToLine arcpy.FeatureToLine_management([lop_thuy_he_Copy_Agg], lop_thuy_he_DuongBoNuoc, None, "ATTRIBUTES") #Chỉnh sửa lại field arcpy.DeleteField_management(lop_thuy_he_DuongBoNuoc, [ "FID_SongSuoiA_Copy2_Agg", "OUTPUT_FID", "INPUT_FID", "loaiTrangThaiNuocMat", "ten", "doRong", "SongSuoiA_Rep_ID", "SongSuoiA_Rep_OVERRIDE", "RuleID", "Override", "Shape_Length_1", "Shape_Area_1", "loaiTrangThaiDuongBoNuoc", "loaiRanhGioiNuocMat" ]) arcpy.AddField_management(lop_thuy_he_DuongBoNuoc, "loaiTrangThaiDuongBoNuoc", "SHORT", None, None, None, "Loai trang thai duong bo nuoc", "NULLABLE", None, "LoaiTrangThaiDuongBoNuoc") arcpy.AddField_management(lop_thuy_he_DuongBoNuoc, "loaiRanhGioiNuocMat", "LONG", None, None, None, "Loai ranh gioi nuoc mat", "NULLABLE", None, "LoaiRanhGioiNuocMat") arcpy.CalculateField_management(lop_thuy_he_DuongBoNuoc, "loaiTrangThaiDuongBoNuoc", 1, "PYTHON_9.3") arcpy.CalculateField_management(lop_thuy_he_DuongBoNuoc, "loaiRanhGioiNuocMat", "!LOAI_RANH_GIOI!", "PYTHON_9.3") arcpy.AssignDefaultToField_management(lop_thuy_he_DuongBoNuoc, "maDoiTuong", "LG01", None) arcpy.CalculateField_management(lop_thuy_he_DuongBoNuoc, "maDoiTuong", "'LG01'", "PYTHON_9.3") arcpy.DeleteField_management(lop_thuy_he_DuongBoNuoc, ["LOAI_RANH_GIOI"]) DuongBoNuoc_Path = self.duong_dan_nguon + "ThuyHe/DuongBoNuoc" if int(arcpy.GetCount_management(DuongBoNuoc_Path).getOutput(0)) > 0: arcpy.DeleteFeatures_management(DuongBoNuoc_Path) duongBoNuocFields = [ "SHAPE@", "maNhanDang", "ngayThuNhan", "ngayCapNhat", "maDoiTuong", "loaiTrangThaiDuongBoNuoc", "loaiRanhGioiNuocMat", "nguonDuLieu", "maTrinhBay", "tenManh", "soPhienHieuManhBanDo" ] duongBoNuocFields2 = [ "SHAPE@", "maNhanDang", "ngayThuNhan", "ngayCapNhat", "maDoiTuong", "loaiTrangThaiDuongBoNuoc", "loaiRanhGioiNuocMat", "nguonDuLieu", "maTrinhBay", "tenManh", "soPhienHieuManhBanDo", "DuongBoNuoc_Rep_ID" ] with arcpy.da.SearchCursor(lop_thuy_he_DuongBoNuoc, duongBoNuocFields) as sCur: with arcpy.da.InsertCursor(DuongBoNuoc_Path, duongBoNuocFields2) as iCur: for sRow in sCur: iCur.insertRow([ sRow[0], sRow[1], sRow[2], sRow[3], sRow[4], sRow[5], sRow[6], sRow[7], sRow[8], sRow[9], sRow[10], 1 ]) arcpy.CopyFeatures_management( DuongBoNuoc_Path, self.duong_dan_dich + "ThuyHe/DuongBoNuoc")
def CreateDuongDiaGioi(self): try: arcpy.env.overwriteOutput = 1 duongDanNguon = "C:/Generalize_25_50/50K_Process.gdb" duongDanDich = "C:/Generalize_25_50/50K_Final.gdb" arcpy.env.workspace = duongDanNguon + "/BienGioiDiaGioi" DiaPhan_Name = "DiaPhan" DiaPhan_Lyr = "DiaPhan_Lyr" DiaPhan_Path = duongDanNguon + "/BienGioiDiaGioi/" + DiaPhan_Name DiaPhan_Path_Final = duongDanDich + "/BienGioiDiaGioi/" + DiaPhan_Name DiaPhan_Xa_Path = DiaPhan_Path + "_Xa" DiaPhan_Huyen_Path = DiaPhan_Path + "_Huyen" DiaPhan_Tinh_Path = DiaPhan_Path + "_Tinh" intersect_Xa_Path = duongDanNguon + "/BienGioiDiaGioi/DuongDiaGioi_Xa" intersect_Huyen_Path = duongDanNguon + "/BienGioiDiaGioi/DuongDiaGioi_Huyen" intersect_Tinh_Path = duongDanNguon + "/BienGioiDiaGioi/DuongDiaGioi_Tinh" joint_Xa_Path = duongDanNguon + "/BienGioiDiaGioi/DuongDiaGioi_Xa_Join" joint_Huyen_Path = duongDanNguon + "/BienGioiDiaGioi/DuongDiaGioi_Huyen_Join" joint_Tinh_Path = duongDanNguon + "/BienGioiDiaGioi/DuongDiaGioi_Tinh_Join" DuongDiaGioi_Name = "DuongDiaGioi" DuongDiaGioi_Path = duongDanNguon + "/BienGioiDiaGioi/" + DuongDiaGioi_Name DuongDiaGioi_Dich_Path = duongDanDich + "/BienGioiDiaGioi/" + DuongDiaGioi_Name songSuoiL_Path = duongDanNguon + "/ThuyHe/SongSuoiL" songSuoiL_Path_Final = duongDanDich + "/ThuyHe/SongSuoiL" doanTimDuongBo_Path = duongDanNguon + "/GiaoThong/DoanTimDuongBo" doanTimDuongBo_Path_Final = duongDanDich + "/GiaoThong/DoanTimDuongBo" #arcpy.Integrate_management([[DiaPhan_Path, 1], [songSuoiL_Path, 2], [doanTimDuongBo_Path, 3]], "5 Meters") arcpy.Integrate_management([[DiaPhan_Path, 1]], "1 Meters") arcpy.Snap_edit( DiaPhan_Path, [[duongDanNguon + "/ThuyHe/SongSuoiL", "VERTEX", "25 Meters"], [duongDanNguon + "/ThuyHe/SongSuoiL", "EDGE", "25 Meters"]]) arcpy.Snap_edit(DiaPhan_Path, [[ duongDanNguon + "/GiaoThong/DoanTimDuongBo", "VERTEX", "5 Meters" ], [ duongDanNguon + "/GiaoThong/DoanTimDuongBo", "EDGE", "5 Meters" ]]) #Xa arcpy.MakeFeatureLayer_management(DiaPhan_Path, DiaPhan_Lyr) arcpy.SelectLayerByAttribute_management(DiaPhan_Lyr, "NEW_SELECTION", "doiTuong = 3") arcpy.CopyFeatures_management(DiaPhan_Lyr, DiaPhan_Xa_Path) arcpy.Intersect_analysis([[DiaPhan_Xa_Path, 1]], intersect_Xa_Path, "ALL", None, "LINE") arcpy.DeleteIdentical_management(intersect_Xa_Path, ["Shape"], None, None) arcpy.AddField_management(intersect_Xa_Path, "loaiHienTrangPhapLy", "SHORT", None, None, None, "loaiHienTrangPhapLy", "NULLABLE") arcpy.AddField_management(intersect_Xa_Path, "donViHanhChinhLienKeTrai", "TEXT", None, None, None, "donViHanhChinhLienKeTrai", "NULLABLE") arcpy.AddField_management(intersect_Xa_Path, "donViHanhChinhLienKePhai", "TEXT", None, None, None, "donViHanhChinhLienKePhai", "NULLABLE") arcpy.AddField_management(intersect_Xa_Path, "chieuDai", "DOUBLE", None, None, None, "chieuDai", "NULLABLE") fieldMappings = arcpy.FieldMappings() fieldMappings.addTable(DiaPhan_Xa_Path) for field in fieldMappings.fields: if field.name not in ["doiTuong", "danhTuChung", "diaDanh"]: fieldMappings.removeFieldMap( fieldMappings.findFieldMapIndex(field.name)) arcpy.SpatialJoin_analysis(target_features=intersect_Xa_Path, join_features=DiaPhan_Xa_Path, out_feature_class=joint_Xa_Path, join_operation="JOIN_ONE_TO_MANY", join_type="KEEP_ALL", field_mapping=fieldMappings, match_option="WITHIN") with arcpy.da.UpdateCursor(intersect_Xa_Path, [ "OID@", "FID_DiaPhan_Xa", "loaiHienTrangPhapLy", "donViHanhChinhLienKeTrai", "donViHanhChinhLienKePhai", "chieuDai", "Shape_Length", "doiTuong" ]) as uCur: for uRow in uCur: with arcpy.da.SearchCursor(joint_Xa_Path, [ "TARGET_FID", "JOIN_FID", "doiTuong", "danhTuChung", "diaDanh" ]) as sCur: for sRow in sCur: if uRow[0] == sRow[0] and sRow[2] == 3: if uRow[1] == sRow[1]: uRow[2] = 1 uRow[5] = uRow[6] uRow[3] = sRow[3] + " " + sRow[4] uRow[7] = sRow[2] uCur.updateRow(uRow) else: uRow[2] = 1 uRow[5] = uRow[6] uRow[4] = sRow[3] + " " + sRow[4] uRow[7] = sRow[2] uCur.updateRow(uRow) #Huyen arcpy.SelectLayerByAttribute_management(DiaPhan_Lyr, "NEW_SELECTION", "doiTuong = 2") arcpy.CopyFeatures_management(DiaPhan_Lyr, DiaPhan_Huyen_Path) arcpy.Intersect_analysis([[DiaPhan_Huyen_Path, 1]], intersect_Huyen_Path, "ALL", None, "LINE") arcpy.DeleteIdentical_management(intersect_Huyen_Path, ["Shape"], None, None) arcpy.AddField_management(intersect_Huyen_Path, "loaiHienTrangPhapLy", "SHORT", None, None, None, "loaiHienTrangPhapLy", "NULLABLE") arcpy.AddField_management(intersect_Huyen_Path, "donViHanhChinhLienKeTrai", "TEXT", None, None, None, "donViHanhChinhLienKeTrai", "NULLABLE") arcpy.AddField_management(intersect_Huyen_Path, "donViHanhChinhLienKePhai", "TEXT", None, None, None, "donViHanhChinhLienKePhai", "NULLABLE") arcpy.AddField_management(intersect_Huyen_Path, "chieuDai", "DOUBLE", None, None, None, "chieuDai", "NULLABLE") fieldMappings = arcpy.FieldMappings() fieldMappings.addTable(DiaPhan_Huyen_Path) for field in fieldMappings.fields: if field.name not in ["doiTuong", "danhTuChung", "diaDanh"]: fieldMappings.removeFieldMap( fieldMappings.findFieldMapIndex(field.name)) arcpy.SpatialJoin_analysis(target_features=intersect_Huyen_Path, join_features=DiaPhan_Huyen_Path, out_feature_class=joint_Huyen_Path, join_operation="JOIN_ONE_TO_MANY", join_type="KEEP_ALL", field_mapping=fieldMappings, match_option="WITHIN") with arcpy.da.UpdateCursor(intersect_Huyen_Path, [ "OID@", "FID_DiaPhan_Huyen", "loaiHienTrangPhapLy", "donViHanhChinhLienKeTrai", "donViHanhChinhLienKePhai", "chieuDai", "Shape_Length", "doiTuong" ]) as uCur: for uRow in uCur: with arcpy.da.SearchCursor(joint_Huyen_Path, [ "TARGET_FID", "JOIN_FID", "doiTuong", "danhTuChung", "diaDanh" ]) as sCur: for sRow in sCur: if uRow[0] == sRow[0] and sRow[2] == 2: if uRow[1] == sRow[1]: uRow[2] = 1 uRow[5] = uRow[6] uRow[3] = sRow[3] + " " + sRow[4] uRow[7] = sRow[2] uCur.updateRow(uRow) else: uRow[2] = 1 uRow[5] = uRow[6] uRow[4] = sRow[3] + " " + sRow[4] uRow[7] = sRow[2] uCur.updateRow(uRow) #Tinh arcpy.SelectLayerByAttribute_management(DiaPhan_Lyr, "NEW_SELECTION", "doiTuong = 1") arcpy.CopyFeatures_management(DiaPhan_Lyr, DiaPhan_Tinh_Path) arcpy.Intersect_analysis([[DiaPhan_Tinh_Path, 1]], intersect_Tinh_Path, "ALL", None, "LINE") arcpy.DeleteIdentical_management(intersect_Tinh_Path, ["Shape"], None, None) arcpy.AddField_management(intersect_Tinh_Path, "loaiHienTrangPhapLy", "SHORT", None, None, None, "loaiHienTrangPhapLy", "NULLABLE") arcpy.AddField_management(intersect_Tinh_Path, "donViHanhChinhLienKeTrai", "TEXT", None, None, None, "donViHanhChinhLienKeTrai", "NULLABLE") arcpy.AddField_management(intersect_Tinh_Path, "donViHanhChinhLienKePhai", "TEXT", None, None, None, "donViHanhChinhLienKePhai", "NULLABLE") arcpy.AddField_management(intersect_Tinh_Path, "chieuDai", "DOUBLE", None, None, None, "chieuDai", "NULLABLE") fieldMappings = arcpy.FieldMappings() fieldMappings.addTable(DiaPhan_Tinh_Path) for field in fieldMappings.fields: if field.name not in ["doiTuong", "danhTuChung", "diaDanh"]: fieldMappings.removeFieldMap( fieldMappings.findFieldMapIndex(field.name)) arcpy.SpatialJoin_analysis(target_features=intersect_Tinh_Path, join_features=DiaPhan_Tinh_Path, out_feature_class=joint_Tinh_Path, join_operation="JOIN_ONE_TO_MANY", join_type="KEEP_ALL", field_mapping=fieldMappings, match_option="WITHIN") with arcpy.da.UpdateCursor(intersect_Tinh_Path, [ "OID@", "FID_DiaPhan_Tinh", "loaiHienTrangPhapLy", "donViHanhChinhLienKeTrai", "donViHanhChinhLienKePhai", "chieuDai", "Shape_Length", "doiTuong" ]) as uCur: for uRow in uCur: with arcpy.da.SearchCursor(joint_Tinh_Path, [ "TARGET_FID", "JOIN_FID", "doiTuong", "danhTuChung", "diaDanh" ]) as sCur: for sRow in sCur: if uRow[0] == sRow[0] and sRow[2] == 1: if uRow[1] == sRow[1]: uRow[2] = 1 uRow[5] = uRow[6] uRow[3] = sRow[3] + " " + sRow[4] uRow[7] = sRow[2] uCur.updateRow(uRow) else: uRow[2] = 1 uRow[5] = uRow[6] uRow[4] = sRow[3] + " " + sRow[4] uRow[7] = sRow[2] uCur.updateRow(uRow) #Xoa Xa bi trung arcpy.MakeFeatureLayer_management(intersect_Xa_Path, "DuongDiaGioi_Xa_Lyr") arcpy.MakeFeatureLayer_management(intersect_Huyen_Path, "DuongDiaGioi_Huyen_Lyr") arcpy.SelectLayerByLocation_management( in_layer="DuongDiaGioi_Xa_Lyr", overlap_type="WITHIN", select_features="DuongDiaGioi_Huyen_Lyr", selection_type="NEW_SELECTION") if int( arcpy.GetCount_management("DuongDiaGioi_Xa_Lyr").getOutput( 0)) > 0: arcpy.DeleteFeatures_management("DuongDiaGioi_Xa_Lyr") #Xoa Huyen bi trung arcpy.MakeFeatureLayer_management(intersect_Tinh_Path, "DuongDiaGioi_Tinh_Lyr") arcpy.SelectLayerByLocation_management( in_layer="DuongDiaGioi_Huyen_Lyr", overlap_type="WITHIN", select_features="DuongDiaGioi_Tinh_Lyr", selection_type="NEW_SELECTION") if int( arcpy.GetCount_management( "DuongDiaGioi_Huyen_Lyr").getOutput(0)) > 0: arcpy.DeleteFeatures_management("DuongDiaGioi_Huyen_Lyr") #Copy dữ liệu vào lớp DuongDiaGioi if int(arcpy.GetCount_management(DuongDiaGioi_Path).getOutput( 0)) > 0: arcpy.DeleteFeatures_management(DuongDiaGioi_Path) duongDiaGioiFields = [ "SHAPE@", "maNhanDang", "ngayThuNhan", "ngayCapNhat", "maDoiTuong", "loaiHienTrangPhapLy", "donViHanhChinhLienKeTrai", "donViHanhChinhLienKePhai", "chieuDai", "doiTuong", "maTrinhBay", "tenManh", "soPhienHieuManhBanDo" ] duongDiaGioiFields2 = [ "SHAPE@", "maNhanDang", "ngayThuNhan", "ngayCapNhat", "maDoiTuong", "loaiHienTrangPhapLy", "donViHanhChinhLienKeTrai", "donViHanhChinhLienKePhai", "chieuDai", "doiTuong", "maTrinhBay", "tenManh", "soPhienHieuManhBanDo", "DuongDiaGioi_Rep_ID", "RuleID" ] with arcpy.da.SearchCursor(intersect_Xa_Path, duongDiaGioiFields) as sCur: with arcpy.da.InsertCursor(DuongDiaGioi_Path, duongDiaGioiFields2) as iCur: for sRow in sCur: iCur.insertRow([ sRow[0], sRow[1], sRow[2], sRow[3], sRow[4], 1, sRow[6], sRow[7], sRow[8], sRow[9], sRow[10], sRow[11], sRow[12], 5, 1 ]) with arcpy.da.SearchCursor(intersect_Huyen_Path, duongDiaGioiFields) as sCur: with arcpy.da.InsertCursor(DuongDiaGioi_Path, duongDiaGioiFields2) as iCur: for sRow in sCur: iCur.insertRow([ sRow[0], sRow[1], sRow[2], sRow[3], sRow[4], 1, sRow[6], sRow[7], sRow[8], sRow[9], sRow[10], sRow[11], sRow[12], 3, 3 ]) with arcpy.da.SearchCursor(intersect_Tinh_Path, duongDiaGioiFields) as sCur: with arcpy.da.InsertCursor(DuongDiaGioi_Path, duongDiaGioiFields2) as iCur: for sRow in sCur: iCur.insertRow([ sRow[0], sRow[1], sRow[2], sRow[3], sRow[4], 1, sRow[6], sRow[7], sRow[8], sRow[9], sRow[10], sRow[11], sRow[12], 1, 2 ]) ############################################### Snap Other ############################ ''' arcpy.AddMessage("\n#Snap DoanTimDuongBo") arcpy.Densify_edit(duongDanNguon + "/GiaoThong/DoanTimDuongBo", "DISTANCE","10 Meters",None ,None) arcpy.Snap_edit(duongDanNguon + "/GiaoThong/DoanTimDuongBo", [[DuongDiaGioi_Path, "VERTEX", "10 Meters"], [DuongDiaGioi_Path, "EDGE", "10 Meters"]]) arcpy.AddMessage("\n#Snap SongSuoiL") arcpy.Densify_edit(duongDanNguon + "/ThuyHe/SongSuoiL", "DISTANCE","10 Meters",None ,None) arcpy.Snap_edit(duongDanNguon + "/ThuyHe/SongSuoiL", [[DuongDiaGioi_Path, "VERTEX", "10 Meters"], [DuongDiaGioi_Path, "EDGE", "10 Meters"]]) ''' ''' arcpy.AddMessage("\n#Snap DoanTimDuongBo") arcpy.Densify_edit(DuongDiaGioi_Path, "DISTANCE","10 Meters",None ,None) arcpy.Snap_edit(DuongDiaGioi_Path, [[duongDanNguon + "/GiaoThong/DoanTimDuongBo", "VERTEX", "10 Meters"], [duongDanNguon + "/GiaoThong/DoanTimDuongBo", "EDGE", "10 Meters"]]) arcpy.AddMessage("\n#Snap SongSuoiL") arcpy.Snap_edit(DuongDiaGioi_Path, [[duongDanNguon + "/ThuyHe/SongSuoiL", "VERTEX", "10 Meters"], [duongDanNguon + "/ThuyHe/SongSuoiL", "EDGE", "10 Meters"]]) ''' arcpy.CopyFeatures_management(DuongDiaGioi_Path, DuongDiaGioi_Dich_Path) arcpy.CopyFeatures_management(songSuoiL_Path, songSuoiL_Path_Final) arcpy.CopyFeatures_management(DiaPhan_Path, DiaPhan_Path_Final) arcpy.CopyFeatures_management(doanTimDuongBo_Path, doanTimDuongBo_Path_Final) except OSError as error: arcpy.AddMessage("Error" + error.message) except ValueError as error: arcpy.AddMessage("Error" + error.message) except arcpy.ExecuteError as error: arcpy.AddMessage("Error" + error.message) finally: arcpy.Delete_management("in_memory")
arcpy.env.overwriteOutput = True # overwrite files if its already present alllinks = "../shp/NHP/nhp_dissolved/nhp.shp" fips = 'C:/Users/pankaj/Desktop/RAIL/gis/standards/FIPS.shp' #road network with length and speed limit networkDataset = "../shp/NHP/nhpnv14-05shp/NHPNLine_ND" arcpy.CalculateField_management(alllinks, "Length", '!Shape.length@miles!', "PYTHON") snapped_dumm = "C:\GIS\deletethis1.shp" # snap FIPS to nearest links arcpy.Copy_management(fips, snapped_dumm) arcpy.Snap_edit(snapped_dumm, [[alllinks, "EDGE", "100 Miles"]]) # update the network dataset if required (this is not required generally) ND = "../shp/NHP/nhp_dissolved/NHP_ND.nd" feature = "C:/GIS/temp.shp" # i guess this is just a temporary layer/ no worries arcpy.MakeRouteLayer_na(ND, "Route", "Length") snapped_nodes = "allnodes_Snapped_rail2.shp" sumlayer = "C:/GIS/temptemptemp.shp" templayer = "C:/GIS/temp1.shp" emptyshapefile = "C:/GIS/empty.shp" o = "C:/GIS/o.shp" # temporary files d = "C:/GIS/d.shp" m = "C:/GIS/m.shp"
arcpy.FeatureToLine_management(railway, "in_memory/T1", "{0} Feet".format(XY_tolerance), "NO_ATTRIBUTES") print("Dissolving all the features, Preparing ORLinks...") arcpy.Dissolve_management("in_memory/T1", ORLinks, "", "", "SINGLE_PART", "UNSPLIT_LINES") print("Generating Points at intersections...") arcpy.FeatureVerticesToPoints_management(ORLinks, ORNodes, "BOTH_ENDS") # arcpy.GeneratePointsAlongLines_management (ORLinks, ORNodes, "DISTANCE", Distance='50000000 meters', Include_End_Points='END_POINTS') #this takes the longest time print("Deleting identical points...") arcpy.DeleteIdentical_management(ORNodes, ["Shape"]) print("US Data prepared successfully") # snap allnodes to nearest Rail2 arcpy.Copy_management(allnodes, SnappedNodes) arcpy.Snap_edit(SnappedNodes, [[ORLinks, "VERTEX", "10000 Feet"]]) arcpy.SplitLineAtPoint_management(ORLinks, SnappedNodes, temp1) arcpy.Copy_management(temp1, ORLinks) # preparing Clipped data arcpy.Buffer_analysis(Links_Final2, "in_memory/T2", "{0} Miles".format(clip_distance), "FULL", "ROUND", "NONE", "", "PLANAR") # arcpy.GetCount_management("in_memory/T3").getOutput (0) # arcpy.Dissolve_management("in_memory/T2", "in_memory/T3" , "FID", "", "SINGLE_PART", "DISSOLVE_LINES") fieldname = "in_memory/T3" fields = arcpy.ListFields(fieldname) arcpy.GetCount_management(fieldname).getOutput(0) for field in fields: print("{0}".format(field.name)),