def integrating_network(lines, tolerance="0 Meters"): overhead_lines = arcpy.FeatureClassToFeatureClass_conversion( lines, "Network", "Lines_over_p", where_clause="Line_Type = 'ВЛ'") cable_lines = arcpy.FeatureClassToFeatureClass_conversion( lines, "Network", "Lines_cable_p", where_clause="Line_Type = 'КЛ'") arcpy.Integrate_management(overhead_lines, tolerance) arcpy.Integrate_management(cable_lines, "0.1 Meters") lines = arcpy.Merge_management([overhead_lines, cable_lines], "Lines_merge") split = arcpy.SplitLine_management(lines, "SplitLine") find = arcpy.FindIdentical_management( split, "in_memory/Find_Ident", ["Shape", "Name", "Voltage"], xy_tolerance=tolerance, output_record_option="ONLY_DUPLICATES") joined_split = arcpy.JoinField_management(split, "OBJECTID", find, "IN_FID") arcpy.DeleteIdentical_management(joined_split, ["Shape", "Name", "Voltage"], "0.1 Meters") unsplit = arcpy.Dissolve_management( joined_split, "Unsplited_Lines", [ "Name", "Voltage", "Line_Type", "Start", "End", "Circuit", "Operate_Name", "Trace_Version", "Status" ], multi_part="MULTI_PART") return unsplit
def integrate(self): if self.shapeType != "POINT": linearUnit = string.upper(str(self.spatialReferenceLinearUnitName)) if linearUnit in ("[M*0.30480060960121924]", "FOOT_US", "FOOT", "[M*0.3048006096012192]", "M*0.3048006096012192", "FT"): arcpy.Integrate_management(self.catalogPath, "0.8") elif linearUnit in ("METER", "M"): arcpy.Integrate_management(self.catalogPath, "0.25") else: raise UnitError
def Generalizacja_drog(): arcpy.Dissolve_management( "ZL_101", "D:\mgr\M33033CD\droga\ZL101.shp" ) # Agreguje warstw? w oparciu o okre?lone atrybuty. arcpy.Integrate_management( "D:\mgr\M33033CD\droga\ZL101.shp", 20 ) #integralno?ci wsp?lnych granic element?w znjaduj?cej sie w odleglosci 15 arcpy.Dissolve_management("ZL_103", "D:\mgr\M33033CD\droga\ZL103.shp") arcpy.Integrate_management("D:\mgr\M33033CD\droga\ZL103.shp", 20) arcpy.Dissolve_management("ZL_104", "D:\mgr\M33033CD\droga\ZL104.shp") arcpy.Integrate_management("D:\mgr\M33033CD\droga\ZL104.shp", 20) arcpy.Dissolve_management("ZL_105", "D:\mgr\M33033CD\droga\ZL105.shp") arcpy.Integrate_management("D:\mgr\M33033CD\droga\ZL105.shp", 20) arcpy.Dissolve_management("ZL_106", "D:\mgr\M33033CD\droga\ZL106.shp") arcpy.Integrate_management("D:\mgr\M33033CD\droga\ZL106.shp", 20) arcpy.Dissolve_management("ZL_107", "D:\mgr\M33033CD\droga\ZL107.shp") arcpy.Integrate_management("D:\mgr\M33033CD\droga\ZL107.shp", 20) arcpy.Dissolve_management("ZL_108", "D:\mgr\M33033CD\droga\ZL108.shp") arcpy.Integrate_management("D:\mgr\M33033CD\droga\ZL108.shp", 20) arcpy.Dissolve_management("ZL_018", "D:\mgr\M33033CD\droga\ZL018.shp") arcpy.Integrate_management("D:\mgr\M33033CD\droga\ZL018.shp", 20)
def integrate(featureClass): if not arcpy.Exists(featureClass): raise ExistsError try: fcDesc = arcpy.Describe(featureClass) fcSR = fcDesc.SpatialReference linearUnit = string.upper(str(fcSR.LinearUnitName)) if linearUnit in ("[M*0.30480060960121924]", "FOOT_US", "FOOT", "[M*0.3048006096012192]", "M*0.3048006096012192", "FT"): arcpy.Integrate_management(featureClass, "0.8") elif linearUnit in ("METER", "M"): arcpy.Integrate_management(featureClass, "0.25") else: raise UnitError except: config.run_error_message(featureClass, "Integration Failure") gc.collect()
def ProcessGioiPhuBeMat(self): try: arcpy.env.overwriteOutput = 1 duongDanNguon = "C:/Generalize_25_50/50K_Process.gdb" duongDanDich = "C:/Generalize_25_50/50K_Final.gdb" arcpy.env.workspace = duongDanNguon + "/PhuBeMat" arcpy.Integrate_management([[duongDanNguon + "/PhuBeMat/PhuBeMat", 1]], "3 Meters") arcpy.CopyFeatures_management(duongDanNguon + "/PhuBeMat/PhuBeMat", duongDanDich + "/PhuBeMat/PhuBeMat") except OSError as error: arcpy.AddMessage("Error" + error.message) except ValueError as error: arcpy.AddMessage("Error" + error.message) except arcpy.ExecuteError as error: arcpy.AddMessage("Error" + error.message) finally: arcpy.Delete_management("in_memory")
def delete_dangles(KVL_dissolve, input_points_p): points_subset = arcpy.FeatureClassToFeatureClass_conversion( input_points_p, "in_memory", "Points_Subset", "Point_Type IN ('ПС', 'ЭС', 'РУ')") points_layer = arcpy.MakeFeatureLayer_management(points_subset, "Points_Layer") arcpy.Integrate_management(KVL_dissolve) split2 = arcpy.SplitLine_management(KVL_dissolve, "SplitLine2") arcpy.DeleteIdentical_management(split2, ["SHAPE", "Name"]) unsplit2 = arcpy.Dissolve_management( split2, "Unsplited_Lines2", [ "Name", "Voltage", "Start", "End", "Circuit", "Operate_Name", "Trace_Version", "Status" ], multi_part="MULTI_PART") KVL_splitted = arcpy.SplitLineAtPoint_management(unsplit2, points_subset, "SplitAtPoint", search_radius="1 Meters") dangles_new = arcpy.FeatureVerticesToPoints_management( KVL_splitted, 'Dangles_KVL', 'DANGLE') dangles_layer = arcpy.MakeFeatureLayer_management(dangles_new, "Dangles_Layer") lines_layer = arcpy.MakeFeatureLayer_management(KVL_splitted, "Lines_Layer") arcpy.SelectLayerByLocation_management(dangles_layer, "INTERSECT", points_layer) arcpy.SelectLayerByAttribute_management(dangles_layer, "SWITCH_SELECTION") arcpy.SelectLayerByLocation_management(lines_layer, "INTERSECT", dangles_layer) arcpy.DeleteFeatures_management(lines_layer) KVL_dissolve_final = arcpy.Dissolve_management( lines_layer, "KVL_Dissolve", [ "Name", "Voltage", "Start", "End", "Circuit", "Operate_Name", "Status" ], multi_part="MULTI_PART") return KVL_dissolve_final
def orthogonalizepolygons(layer, in_edit=False, threshold=10.0, proceed_groups=True, editor=None): # Main function """ :type layer: string :type in_edit: boolean :type threshold: double :type proceed_groups: boolean :type editor: Editor object """ arcpy.env.overwriteOutput = True arcpy.env.XYTolerance = "0.1 Meters" arcpy.env.XYResolution = "0.01 Meters" if not in_edit: if layer == '': layer = arcpy.GetParameterAsText(0) threshold = arcpy.GetParameterAsText(1) proceed_groups = arcpy.GetParameterAsText(2) try: threshold = float(threshold) except ValueError: arcpy.AddMessage(u'Default threshold will be used') threshold = 10.0 try: if proceed_groups == 'false': proceed_groups = False else: proceed_groups = True except NameError: proceed_groups = True if proceed_groups: arcpy.AddMessage(u'-> All buildings will be proceeded') else: arcpy.AddMessage(u'-> Only detached buildings will be proceeded') if in_edit: layer_work = layer else: # Если операция надо всем слоем - работает в резервной копии dsc = arcpy.Describe(layer) layer_path = dsc.catalogPath if len(os.path.splitext(layer_path)[1]) >= 1: layer_work = u'{0}_ortho.{1}'.format( os.path.splitext(layer_path)[0], os.path.splitext(layer_path)[1]) else: layer_work = u'{0}_ortho'.format(layer_path) arcpy.AddMessage(u'-> New Layer: {0}'.format(layer_work)) arcpy.Copy_management(layer_path, layer_work) #################################################################################################################### # FIXME # Integrate fails if path contains restricted characters (spaces etc.) # It happens when running a tool for layer, not selection try: arcpy.Integrate_management(layer_work, '0.1 Meters') except RuntimeError: arcpy.AddMessage(u'...Failed to run Integrate tool') # dictionary created from main layer with {ID: {geom: [Geomentry], isAlone: Boolean} poly_dict = {} with arcpy.da.SearchCursor(layer_work, ['OID@', 'SHAPE@WKT'], spatial_reference=WEBMERC) as sc: for row in sc: geom = wkt2list(row[1]) poly_dict[row[0]] = {'geom': geom, 'isAlone': False} if in_edit: polygons_groups = [[ 0, [row[0] for row in arcpy.da.SearchCursor(layer_work, 'OID@')] ]] else: polygons_groups = grouppolygons(layer_work) # arcpy.AddMessage(u'Threshold: {0} Meters'. format(threshold)) # ############################################################## # TODO # for k in xrange(3) - dummy way to run orthogonalization few times in a row. # normal way is to track changes and stop when the changes aren't significant for g in polygons_groups: # repeat = True # test_dict_before = {k: poly_dict[k] for k in g[1]} # test_dict_before = deepcopy(test_dict_before) # cycle = 1 # while repeat: for k in xrange(3): # print u'{0} iteration'.format(cycle) points_coords, points_id = removepoints(poly_dict=poly_dict, group=g, threshold=threshold) if proceed_groups or len(g[1]) == 1: cluster_index, cluster_angles = clusterizeangles( poly_dict=poly_dict, group=g, threshold=threshold) poly_dict = orthogonalizegroup(poly_dict=poly_dict, group=g, index=cluster_index, angle=cluster_angles, points_coords=points_coords, points_id=points_id, threshold=threshold / 2) # test_dict_after = {k: poly_dict[k] for k in g[1]} # if test_dict_after == test_dict_before: # repeat = False # else: # test_dict_before = deepcopy(test_dict_after) # cycle += 1 arcpy.AddMessage(u'-> Excessive points removed, polygons orthogonalized') if in_edit and editor is not None: editor.start_operation() with arcpy.da.UpdateCursor(layer_work, ['SHAPE@', 'OID@'], spatial_reference=WEBMERC) as uc: for row in uc: row = [creategeometryfromlist(poly_dict[row[1]]['geom']), row[1]] uc.updateRow(row) if in_edit and editor is not None: editor.stop_operation('Orthogonalize')
#logger.info("processing siteID below " + str(siteID)) if siteID in siteIDs: sites.deleteRow(row) #logger.info("delete row with ID: " + str(siteID)) else: siteIDs.append(int(row.getValue("ID_CHAR"))) #logger.info("stored siteID " + str(int(row.getValue("ID"))) + " for search") #del row del sites #6. Calculate Elevation of centroid for XML as separate columns- Elevation script is part of ERIS toolbox # img directory ERIS_clipcopy = os.path.join(scratch, "ERISCC.shp") arcpy.CopyFeatures_management(ERISPR, ERIS_clipcopy) arcpy.Integrate_management(ERIS_clipcopy, ".3 Meters") #10. Calculate Distance with integration and spatial join- can be easily done with Distance tool along with direction if ArcInfo or Advanced license arcpy.ImportToolbox( os.path.join(r'\\cabcvan1gis006\GISData\PSR\python', "ERIS.tbx")) projPointSHP = arcpy.inhouseElevation_ERIS(projPointSHP).getOutput(0) elevationArray = [] Call_Google = '' rows = arcpy.SearchCursor(projPointSHP) for row in rows: #print row.Elevation if row.Elevation == -999: Call_Google = 'YES' break
# PROCESSING arcpy.env.overwriteOutput = True print "Making copy of taxlots into working GDB" taxlot1_loc = testingOutput_loc + "\\" + "taxlot_clip_" + year + month + day arcpy.CopyFeatures_management(in_features=rawTaxlot_loc, out_feature_class=taxlot1_loc) print "Repairing taxlot geometry" arcpy.RepairGeometry_management(in_features=taxlot1_loc) print "Performing integration on taxlots" xyTolerance = "0.5 feet" arcpy.Integrate_management(in_features=taxlot1_loc, cluster_tolerance=xyTolerance) print "Merging closing resource polygons into taxlots in preparation for aggregation" # combine the manually generated polygons which close off the major highway gaps that cannot be automatically # closed while also maintaining detail arcpy.Merge_management(inputs=[taxlot1_loc, testingClosingPolys_loc], output=intermediates_outline[0]) print "Dissolving taxlot layer in preparation for aggregation" # dissolve together all features to make aggregation simpler arcpy.Dissolve_management(in_features=intermediates_outline[0], out_feature_class=intermediates_outline[1]) print "Performing first aggregation" # The first aggregation leaves holes and some island features leftover, must be re-aggregated after holes are filled CA.AggregatePolygons(
def findsinglevertex(layer): # find single vertices # FIXME # Integrate fails when path contains spaces # Possible way to fix it is to use arcpy.env and call layer by it's name try: arcpy.Integrate_management(layer, '0.1 Meters') except RuntimeError: arcpy.AddMessage(u'...Failed to run Integrate tool') arcpy.env.addOutputsToMap = False vert = u'in_memory\\vert' ident = u'in_memory\\ident' arcpy.FeatureVerticesToPoints_management( in_features = layer, out_feature_class = vert, point_location = u'ALL') # FIXME # test way to skip last points in polygons # sql-clause doesn't work in in_memory datasets oid_field = arcpy.Describe(layer).OIDFieldName lastpoints = [] if arcpy.Describe(layer).shapeType == 'Polygon': prev_id = -1 prev_row = -1 with arcpy.da.SearchCursor(vert, ['OID@', 'ORIG_FID']) as sc: for row in sc: if row[1] != prev_id: lastpoints.append(prev_row) prev_id = row[1] prev_row = row[0] # append last point of last polygon lastpoints.append(row[0]) with arcpy.da.UpdateCursor(vert, ['OID@']) as uc: for row in uc: if row[0] in lastpoints: uc.deleteRow() # points = {PointFID: [LineID, PointNo], ...} points = {} lines_id = [] clause = (None, 'ORDER BY {0} DESC'.format(oid_field)) with arcpy.da.SearchCursor(vert, ['OID@', 'ORIG_FID'], sql_clause = clause) as sc: feat_num = -1 vert_num = -1 for row in sc: if row[1] != feat_num: feat_num = row[1] vert_num = 0 lines_id.append(feat_num) else: vert_num += 1 points[row[0]] = [feat_num, vert_num] arcpy.FindIdentical_management( in_dataset = vert, out_dataset = ident, fields = u'SHAPE', xy_tolerance = u'0.2 Meters', output_record_option = u'ONLY_DUPLICATES') identical_v = [row[0] for row in arcpy.da.SearchCursor(ident, 'IN_FID')] # ids of identical vetices single_pairs = [val for key, val in points.items() if key not in identical_v] single_out = {oid: [] for oid in lines_id} for p in single_pairs: single_out[p[0]].append(p[1]) arcpy.Delete_management(vert) arcpy.Delete_management(ident) return single_out
def simplify(self): try: # Init WorkSpase # arcpy.env.overwriteOutput = 1 duongDanNguon = "C:/Generalize_25_50/50K_Process.gdb" duongDanDich = "C:/Generalize_25_50/50K_Final.gdb" urlFile = '/ConfigSimplify.json' _algorithm = "BEND_SIMPLIFY" _tolerance = "50 Meters" _error_option = "NO_CHECK" _collapsed_point_option = "NO_KEEP" _checkExitLayer = False if arcpy.Exists(duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM") and arcpy.Exists(duongDanNguon + "/PhuBeMat/PhuBeMat_Full"): #arcpy.CopyFeatures_management(duongDanNguon + "/PhuBeMat/PhuBeMat_LocMatNuoc", duongDanNguon + "/PhuBeMat/PhuBeMat") _checkExitLayer = True #Doc file config s1 = inspect.getfile(inspect.currentframe()) s2 = os.path.dirname(s1) urlFile = s2 + urlFile arcpy.AddMessage("\n# Doc file cau hinh: \"{0}\"".format(urlFile)) if os.path.exists(urlFile): fileConfig = open(urlFile) listLayerConfig = json.load(fileConfig) fileConfig.close() ############################### Simplify Polygon ######################################## arcpy.Integrate_management([[duongDanNguon + "/PhuBeMat/PhuBeMat", 1]], "2 Meters") arcpy.AddMessage("\n# Bat dau Simplify Polygon") listPolygon = [] fieldMappings = arcpy.FieldMappings() enableFields = [] inputsMerge = [] for objConfig in listLayerConfig: if objConfig["LayerType"] == "Polygon" and objConfig["RunStatus"] == "True": if not(_checkExitLayer == False and objConfig["LayerName"] == "PhuBeMat_Full"): temp = { "LayerType": objConfig["LayerType"], "DatasetName": objConfig["DatasetName"], "LayerName": objConfig["LayerName"], "featureLayer": "in_memory\\" + objConfig["LayerName"] + "_Layer", "featureCopy": "in_memory\\" + objConfig["LayerName"] + "_Copy", "featureCopyLayer": "in_memory\\" + objConfig["LayerName"] + "_Copy_Layer", "FID_XXX": "FID_" + objConfig["LayerName"] } listPolygon.append(temp) elif objConfig["LayerType"] == "Polyline" and objConfig["RunStatus"] == "True" and objConfig["LayerName"] <> "DuongBinhDo": if not(_checkExitLayer == False and objConfig["LayerName"] == "SongSuoiL_KenhMuongL_SnapPBM"): arcpy.AddMessage("\n# Buffer lop: \"{0}\"".format(objConfig["LayerName"])) layerPath = duongDanNguon + "/" + objConfig["DatasetName"] + "/" + objConfig["LayerName"] arcpy.Buffer_analysis(in_features = layerPath, out_feature_class = layerPath + "_Buffer", buffer_distance_or_field = "0.1 Meters", line_side = "RIGHT") temp = { "LayerType": objConfig["LayerType"], "DatasetName": objConfig["DatasetName"], "LayerName": objConfig["LayerName"] + "_Buffer", "featureLayer": "in_memory\\" + objConfig["LayerName"] + "_Buffer_Layer", "featureCopy": "in_memory\\" + objConfig["LayerName"] + "_Buffer_Copy", "featureCopyLayer": "in_memory\\" + objConfig["LayerName"] + "_Buffer_Copy_Layer", "FID_XXX": "FID_" + objConfig["LayerName"] } listPolygon.append(temp) for element in listPolygon: arcpy.AddMessage("\n# Xu ly lop: {0}".format(element["LayerName"])) layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) arcpy.AddField_management(element["featureLayer"], element["FID_XXX"], "LONG") with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", element["FID_XXX"]]) as cursor: for row in cursor: row[1] = row[0] cursor.updateRow(row) arcpy.CopyFeatures_management(layerPath, element["featureCopy"]) arcpy.MakeFeatureLayer_management(element["featureCopy"], element["featureCopyLayer"]) ## Field Mappings ## enableFields.append(element["FID_XXX"]) fieldMappings.addTable(element["featureCopyLayer"]) inputsMerge.append(element["featureCopyLayer"]) for field in fieldMappings.fields: if field.name not in enableFields: fieldMappings.removeFieldMap(fieldMappings.findFieldMapIndex(field.name)) ## Merge ## arcpy.AddMessage("\n# Merge Polygon...") outPathMerge = "in_memory\\outPathMergeTemp" #outPathMerge = "C:/Generalize_25_50/50K_Process.gdb/DanCuCoSoHaTang/outPathMergeTemp" arcpy.Merge_management (inputsMerge, outPathMerge, fieldMappings) ## Simplify Polygon ## arcpy.AddMessage("\n# Simplify Polygon...") outPathSimplify = "in_memory\\outPathSimplifyTemp" #outPathSimplify = "C:/Generalize_25_50/50K_Process.gdb/DanCuCoSoHaTang/outPathSimplifyTemp" arcpy.SimplifyPolygon_cartography(in_features = outPathMerge, out_feature_class = outPathSimplify, algorithm = _algorithm, tolerance = _tolerance, minimum_area = "0 SquareMeters", error_option = _error_option, collapsed_point_option = _collapsed_point_option) ## MakeLayerFeature ## outPathSimplifyLayer = "in_memory\\outPathSimplifyTempLayer" arcpy.MakeFeatureLayer_management(outPathSimplify, outPathSimplifyLayer) ## Update Shape Feature Class ## arcpy.AddMessage("\n# Update Shape Feature Class:") for element in listPolygon: arcpy.AddMessage("\n\t# Update {0}...".format(element["LayerName"])) ### MakeLayerFeature ### layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) ### Select ### strQuery = element["FID_XXX"] + " IS NOT NULL" arcpy.SelectLayerByAttribute_management(outPathSimplifyLayer, "NEW_SELECTION", strQuery) ### Copy To Table Temp ### outTableTemp = "in_memory\\outTableTemp" arcpy.CopyFeatures_management(outPathSimplifyLayer, outTableTemp) ### ... ### with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", "SHAPE@"]) as cursor: for row in cursor: found = False with arcpy.da.UpdateCursor(outTableTemp, [element["FID_XXX"], "SHAPE@"]) as cursorSub: for rowSub in cursorSub: if row[0] == rowSub[0]: found = True row[1] = rowSub[1] cursor.updateRow(row) cursorSub.deleteRow() break if found == False: cursor.deleteRow() arcpy.AddMessage("\n# Hoan thanh Simplify Polygon!!!") ############################################## Simplify Line ############################# arcpy.AddMessage("\n# Bat dau Simplify Line") listPolyLine = [] fieldMappingLine = arcpy.FieldMappings() enableFieldLine = [] inputsMergeLine = [] for objConfig in listLayerConfig: if objConfig["LayerType"] == "Polyline" and objConfig["RunStatus"] == "True": if not(_checkExitLayer == False and objConfig["LayerName"] == "SongSuoiL_KenhMuongL_SnapPBM"): temp = { "LayerType": objConfig["LayerType"], "DatasetName": objConfig["DatasetName"], "LayerName": objConfig["LayerName"], "featureLayer": "in_memory\\" + objConfig["LayerName"] + "_Layer", "featureCopy": "in_memory\\" + objConfig["LayerName"] + "_Copy", "featureCopyLayer": "in_memory\\" + objConfig["LayerName"] + "_Copy_Layer", "FID_XXX": "FID_" + objConfig["LayerName"] } listPolyLine.append(temp) for element in listPolyLine: arcpy.AddMessage("\n# Xu ly lop: {0}".format(element["LayerName"])) layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] if element["LayerName"] == "DuongBinhDo": arcpy.AddField_management(layerPath, "OLD_OBJECTID", "LONG", None, None, None,"OLD_OBJECTID", "NULLABLE") arcpy.CalculateField_management(layerPath, "OLD_OBJECTID", "!OBJECTID!", "PYTHON_9.3") arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) arcpy.AddField_management(element["featureLayer"], element["FID_XXX"], "LONG") with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", element["FID_XXX"]]) as cursor: for row in cursor: row[1] = row[0] cursor.updateRow(row) arcpy.CopyFeatures_management(layerPath, element["featureCopy"]) arcpy.MakeFeatureLayer_management(element["featureCopy"], element["featureCopyLayer"]) ## Field Mappings ## enableFieldLine.append(element["FID_XXX"]) fieldMappingLine.addTable(element["featureCopyLayer"]) inputsMergeLine.append(element["featureCopyLayer"]) for field in fieldMappingLine.fields: if field.name not in enableFieldLine: fieldMappingLine.removeFieldMap(fieldMappingLine.findFieldMapIndex(field.name)) ## Merge ## arcpy.AddMessage("\n# Merge Polyline...") outPathMerge = "in_memory\\outPathMergeTemp" arcpy.Merge_management (inputsMergeLine, outPathMerge, fieldMappingLine) ## Simplify Polyline ## arcpy.AddMessage("\n# Simplify Polyline...") outPathSimplify = "in_memory\\outPathSimplifyTemp" ''' arcpy.MakeFeatureLayer_management(duongDanNguon + "/ThuyHe/SongSuoiA", "ThuyHe_SongSuoiA_Lyr") arcpy.MakeFeatureLayer_management(duongDanNguon + "/ThuyHe/MatNuocTinh", "ThuyHe_MatNuocTinh_Lyr") arcpy.MakeFeatureLayer_management(duongDanNguon + "/ThuyHe/KenhMuongA", "ThuyHe_KenhMuongA_Lyr") in_barriers_Line = ["ThuyHe_SongSuoiA_Lyr", "ThuyHe_MatNuocTinh_Lyr", "ThuyHe_KenhMuongA_Lyr"] ''' arcpy.SimplifyLine_cartography(in_features = outPathMerge, out_feature_class = outPathSimplify, algorithm = _algorithm, tolerance = _tolerance, collapsed_point_option = _collapsed_point_option) ## MakeLayerFeature ## outPathSimplifyLayer = "in_memory\\outPathSimplifyTempLayer" arcpy.MakeFeatureLayer_management(outPathSimplify, outPathSimplifyLayer) ## Update Shape Feature Class ## arcpy.AddMessage("\n# Update Shape Feature Class:") for element in listPolyLine: if element["LayerType"] == "Polyline": arcpy.AddMessage("\n\t# Update {0}...".format(element["LayerName"])) ### MakeLayerFeature ### layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) ### Select ### strQuery = element["FID_XXX"] + " IS NOT NULL" arcpy.SelectLayerByAttribute_management(outPathSimplifyLayer, "NEW_SELECTION", strQuery) ### Copy To Table Temp ### outTableTemp = "in_memory\\outTableTemp" arcpy.CopyFeatures_management(outPathSimplifyLayer, outTableTemp) ### ... ### with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", "SHAPE@"]) as cursor: for row in cursor: found = False with arcpy.da.UpdateCursor(outTableTemp, [element["FID_XXX"], "SHAPE@"]) as cursorSub: for rowSub in cursorSub: if row[0] == rowSub[0]: found = True row[1] = rowSub[1] cursor.updateRow(row) cursorSub.deleteRow() break if found == False: cursor.deleteRow() arcpy.AddMessage("\n# Hoan thanh Simplify Polyline!!!") ############################################## Snap Line to Polygon ############################# arcpy.AddMessage("\n# Bat dau Snap") for elementPolygon in listPolygon: if elementPolygon["LayerType"] == "Polyline": lineLayerName = elementPolygon["LayerName"][:elementPolygon["LayerName"].find('_Buffer')] if (lineLayerName <> "DuongBinhDo"): arcpy.AddMessage("\n\t# Snap: {0}".format(lineLayerName)) layerBufferPath = duongDanNguon + "/" + elementPolygon["DatasetName"] + "/" + elementPolygon["LayerName"] layerLinePath = duongDanNguon + "/" + elementPolygon["DatasetName"] + "/" + lineLayerName arcpy.Snap_edit(layerLinePath, [[layerBufferPath, "EDGE", self.snap_distance]]) ############## Snap Other if _checkExitLayer: arcpy.AddMessage("\n\t# Snap other: {0}".format("PhuBeMat")) arcpy.Integrate_management([[duongDanNguon + "/PhuBeMat/PhuBeMat", 1]], "2 Meters") arcpy.Densify_edit(duongDanNguon + "/PhuBeMat/PhuBeMat", "DISTANCE","2 Meters",None ,None) arcpy.Snap_edit(duongDanNguon + "/PhuBeMat/PhuBeMat", [[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", "EDGE", "35 Meters"]]) arcpy.Integrate_management([[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", 1],[duongDanNguon + "/PhuBeMat/PhuBeMat", 2]], "2 Meters") arcpy.Erase_analysis(in_features = duongDanNguon + "/PhuBeMat/PhuBeMat_Full", erase_features = duongDanNguon + "/PhuBeMat/PhuBeMat", out_feature_class = duongDanNguon + "/PhuBeMat/PhuBeMat_Lo") arcpy.CalculateField_management(duongDanNguon + "/PhuBeMat/PhuBeMat_Lo", "maNhanDang", '"temp123"', "PYTHON_9.3") arcpy.Append_management([duongDanNguon + "/PhuBeMat/PhuBeMat_Lo"], duongDanNguon + "/PhuBeMat/PhuBeMat", "NO_TEST",None,None) arcpy.MultipartToSinglepart_management(duongDanNguon + "/PhuBeMat/PhuBeMat", duongDanNguon + "/PhuBeMat/PhuBeMat2") arcpy.MakeFeatureLayer_management(duongDanNguon + "/PhuBeMat/PhuBeMat2", "PhuBeMat_Temp_Lyr") arcpy.SelectLayerByAttribute_management("PhuBeMat_Temp_Lyr", "NEW_SELECTION", "maNhanDang = 'temp123'") arcpy.Eliminate_management(in_features = "PhuBeMat_Temp_Lyr", out_feature_class = duongDanNguon + "/PhuBeMat/PhuBeMat3", selection = "LENGTH") arcpy.Densify_edit(duongDanNguon + "/ThuyHe/SongSuoiL", "DISTANCE","2 Meters",None ,None) arcpy.Snap_edit(duongDanNguon + "/ThuyHe/SongSuoiL", [[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", "EDGE", "2 Meters"]]) arcpy.CopyFeatures_management(duongDanNguon + "/PhuBeMat/PhuBeMat3", duongDanNguon + "/PhuBeMat/PhuBeMat") ############################################## Copy to final ############################# for element in listPolygon: if element["LayerType"] == "Polygon": if element["LayerName"] <> "PhuBeMat_Full": layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] layerFinalPath = duongDanDich + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.DeleteField_management(layerPath, [element["FID_XXX"]]) arcpy.CopyFeatures_management(layerPath, layerFinalPath) for element in listPolyLine: if element["LayerType"] == "Polyline": if element["LayerName"] <> "SongSuoiL_KenhMuongL_SnapPBM": layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] layerFinalPath = duongDanDich + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.DeleteField_management(layerPath, [element["FID_XXX"]]) arcpy.CopyFeatures_management(layerPath, layerFinalPath) #arcpy.AddMessage("\n# Hoan thanh!!!") else: arcpy.AddMessage("\n# Khong tim thay file cau hinh: \"{0}\"".format(urlFile)) except OSError as error: arcpy.AddMessage("Error" + error.message) except ValueError as error: arcpy.AddMessage("Error" + error.message) except arcpy.ExecuteError as error: arcpy.AddMessage("Error" + error.message) finally: arcpy.Delete_management("in_memory")
import arcpy import os import json import sys import inspect arcpy.env.overwriteOutput = 1 duongDanNguon = "C:/Generalize_25_50/50K_Process.gdb" arcpy.AddMessage("\n\t# Snap other: {0}".format("PhuBeMat")) arcpy.Integrate_management([[duongDanNguon + "/PhuBeMat/PhuBeMat", 1]], "2 Meters") arcpy.Densify_edit(duongDanNguon + "/PhuBeMat/PhuBeMat", "DISTANCE", "2 Meters", None, None) arcpy.Snap_edit(duongDanNguon + "/PhuBeMat/PhuBeMat", [[ duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", "EDGE", "35 Meters" ]]) arcpy.Integrate_management( [[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", 1], [duongDanNguon + "/PhuBeMat/PhuBeMat", 2]], "2 Meters") arcpy.Erase_analysis(in_features=duongDanNguon + "/PhuBeMat/PhuBeMat_LocMatNuoc", erase_features=duongDanNguon + "/PhuBeMat/PhuBeMat", out_feature_class=duongDanNguon + "/PhuBeMat/PhuBeMat_Lo") arcpy.CalculateField_management(duongDanNguon + "/PhuBeMat/PhuBeMat_Lo", "maNhanDang", '"temp123"', "PYTHON_9.3") arcpy.Append_management([duongDanNguon + "/PhuBeMat/PhuBeMat_Lo"], duongDanNguon + "/PhuBeMat/PhuBeMat", "NO_TEST", None, None) arcpy.MultipartToSinglepart_management(duongDanNguon + "/PhuBeMat/PhuBeMat", duongDanNguon + "/PhuBeMat/PhuBeMat2") arcpy.MakeFeatureLayer_management(duongDanNguon + "/PhuBeMat/PhuBeMat2",
def generateFCExtent(server, port, adminUser, adminPass, logFC, mapService, workspace, featureClass, raster, token=None): millisecondsToQuery = 6048000000 # One week hitDict = {} if token is None: token = gentoken(server, port, adminUser, adminPass) # Assign map service if mapService.endswith(".MapServer"): pass else: mapService += ".MapServer" serviceURL = "/arcgis/rest/services/{0}".format( mapService.replace(".", "/")) # Get Extent detail for service serviceURL = serviceURL + "/?Token=" + token fullExtent = getFullExtent(server, port, serviceURL) if not fullExtent: return # Construct URL to query the logs logQueryURL = "/arcgis/admin/logs/query" logFilter = "{'services': ['" + mapService + "']}" startTime = int(round(time.time() * 1000)) endTime = startTime - millisecondsToQuery # Supply the log level, filter, token, and return format params = urllib.urlencode({ 'level': 'FINE', 'startTime': startTime, 'endTime': endTime, 'filter': logFilter, 'token': token, 'f': 'json', 'pageSize': 10000 }) headers = { "Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain" } # Connect to URL and post parameters print "Accessing Logs..." httpConn = httplib.HTTPConnection(server, port) httpConn.request("POST", logQueryURL, params, headers) # Read response response = httpConn.getresponse() if (response.status != 200): httpConn.close() print " Error while querying logs." return else: data = response.read() # Check that data returned is not an error object if not assertJsonSuccess(data): print " Error returned by operation. " + data else: print " Operation completed successfully!" # Deserialize response into Python object dataObj = json.loads(data) httpConn.close() # Open Insert Cursor on output output = openCursor(workspace, featureClass, fullExtent["spatialReference"]["wkid"]) if not output: return # Need this variable to track number of events found for ExportMapImage call logEvents = 0 # Need Array to hold Shape shapeArray = arcpy.Array() # Iterate over messages for item in dataObj["logMessages"]: eventDateTime = datetime.datetime.fromtimestamp( float(item["time"]) / 1000) if item["message"].startswith("Extent:"): eventScale = None # Scale eventInvScale = None # Inverse-Scale eventWidth = None # Width eventHeight = None # Height # Cycle through message details for pair in item["message"].replace(" ", "").split(";"): if pair.count(":") == 1: key, val = pair.split(":") # Pick out Extent if key == "Extent" and val.count(",") == 3: # Split into ordinate values MinX, MinY, MaxX, MaxY = val.split(",") MinX = float(MinX) MinY = float(MinY) MaxX = float(MaxX) MaxY = float(MaxY) # Make sure extent is within range if MinX > fullExtent["xmin"] and MaxX < fullExtent[ "xmax"] and MinY > fullExtent[ "ymin"] and MaxY < fullExtent["ymax"]: shapeArray.add(arcpy.Point(MinX, MinY)) shapeArray.add(arcpy.Point(MinX, MaxY)) shapeArray.add(arcpy.Point(MaxX, MaxY)) shapeArray.add(arcpy.Point(MaxX, MinY)) shapeArray.add(arcpy.Point(MinX, MinY)) # Pick out Size if key == "Size" and val.count(",") == 1: eventWidth, eventHeight = val.split(",") eventWidth = float(eventWidth) eventHeight = float(eventHeight) # Pick out Scale if key == "Scale": eventScale = float(val) eventInvScale = 1 / eventScale # Save if Shape created if shapeArray.count > 0: # Create new row newRow = output.newRow() # Add Shape and Event Date newRow.setValue("Shape", shapeArray) newRow.setValue("EventDate", eventDateTime) newRow.setValue("Scale", eventScale) newRow.setValue("InvScale", eventInvScale) newRow.setValue("Width", eventWidth) newRow.setValue("Height", eventHeight) output.insertRow(newRow) # Clear out Array points shapeArray.removeAll() logEvents += 1 # Need ArcGIS Desktop Advanced and Spatial Analyst licensed # Create a raster layer from the extents feature class if spatial analyst extension available if (raster != "None"): print "Creating raster from feature class..." extentsFeatureClass = os.path.join(workspace, featureClass) # Convert to points arcpy.FeatureToPoint_management(extentsFeatureClass, "in_memory\\extentPoints", "CENTROID") arcpy.Integrate_management("in_memory\\extentPoints #", "5000 Meters") arcpy.CollectEvents_stats("in_memory\\extentPoints", "in_memory\\extentCollectEvents") # Create density raster # Check out necessary license arcpy.CheckOutExtension("spatial") arcpy.gp.KernelDensity_sa("in_memory\\extentCollectEvents", "ICOUNT", "in_memory\\extentRaster", "50", "20000", "SQUARE_MAP_UNITS") # Remove values that are 0 arcpy.gp.SetNull_sa("in_memory\\extentRaster", "in_memory\\extentRaster", os.path.join(workspace, raster), "VALUE = 0") print "\nDone!\n\nTotal number of events found in logs: {0}".format( logEvents) return
def CreateRanhGioiPhuBeMat(self): try: arcpy.env.overwriteOutput = 1 duongDanNguon = "C:/Generalize_25_50/50K_Process.gdb" duongDanDich = "C:/Generalize_25_50/50K_Final.gdb" arcpy.env.workspace = duongDanNguon + "/PhuBeMat" PhuBeMat_Name = "PhuBeMat" PhuBeMat_Path = duongDanNguon + "/PhuBeMat/" + PhuBeMat_Name PhuBeMat_Dich_Path = duongDanDich + "/PhuBeMat/" + PhuBeMat_Name intersect_Path = duongDanNguon + "/PhuBeMat/RanhGioiPhuBeMat_Intersect" RanhGioiPhuBeMat_Name = "RanhGioiPhuBeMat" RanhGioiPhuBeMat_Path = duongDanNguon + "/PhuBeMat/" + RanhGioiPhuBeMat_Name RanhGioiPhuBeMat_Dich_Path = duongDanDich + "/PhuBeMat/" + RanhGioiPhuBeMat_Name arcpy.Integrate_management([[PhuBeMat_Path, 1]], "1 Meters") arcpy.Intersect_analysis([[PhuBeMat_Path, 1]], intersect_Path, "ALL", None, "LINE") arcpy.DeleteIdentical_management(intersect_Path, ["Shape"], None, None) arcpy.AddField_management(intersect_Path, "loaiRanhGioiPhuBeMat", "SHORT", None, None, None, "loaiRanhGioiPhuBeMat", "NULLABLE") #Copy dữ liệu vào lớp RanhGioiPhuBeMat if int( arcpy.GetCount_management(RanhGioiPhuBeMat_Path).getOutput( 0)) > 0: arcpy.DeleteFeatures_management(RanhGioiPhuBeMat_Path) ranhGioiPhuBeMatFields = [ "SHAPE@", "maNhanDang", "ngayThuNhan", "ngayCapNhat", "maDoiTuong", "loaiRanhGioiPhuBeMat", "nguonDuLieu", "maTrinhBay", "tenManh", "soPhienHieuManhBanDo" ] ranhGioiPhuBeMatFields2 = [ "SHAPE@", "maNhanDang", "ngayThuNhan", "ngayCapNhat", "maDoiTuong", "loaiRanhGioiPhuBeMat", "nguonDuLieu", "maTrinhBay", "tenManh", "soPhienHieuManhBanDo", "RanhGioiPhuBeMat_Rep_ID" ] with arcpy.da.SearchCursor(intersect_Path, ranhGioiPhuBeMatFields) as sCur: with arcpy.da.InsertCursor(RanhGioiPhuBeMat_Path, ranhGioiPhuBeMatFields2) as iCur: for sRow in sCur: iCur.insertRow([ sRow[0], sRow[1], sRow[2], sRow[3], sRow[4], 1, sRow[6], sRow[7], sRow[8], sRow[9], 1 ]) arcpy.CopyFeatures_management(RanhGioiPhuBeMat_Path, RanhGioiPhuBeMat_Dich_Path) arcpy.CopyFeatures_management(PhuBeMat_Path, PhuBeMat_Dich_Path) #arcpy.AddMessage("\n# Hoan thanh!!!") except OSError as error: arcpy.AddMessage("Error" + error.message) except ValueError as error: arcpy.AddMessage("Error" + error.message) except arcpy.ExecuteError as error: arcpy.AddMessage("Error" + error.message) finally: arcpy.Delete_management("in_memory")
def generate_ogw_report(order_obj): arcpy.AddMessage( ' -- Start generating PSR Oil, Gas and Water wells map report...') start = timeit.default_timer() ### set scratch folder arcpy.env.workspace = config.scratch_folder arcpy.env.overwriteOutput = True centre_point = order_obj.geometry.trueCentroid elevation = psr_utility.get_elevation(centre_point.X, centre_point.Y) if elevation != None: centre_point.Z = float(elevation) ### create order geometry center shapefile order_rows = arcpy.SearchCursor(config.order_geometry_pcs_shp) point = arcpy.Point() array = arcpy.Array() feature_list = [] arcpy.CreateFeatureclass_management( config.scratch_folder, os.path.basename(config.order_center_pcs), "POINT", "", "DISABLED", "DISABLED", order_obj.spatial_ref_pcs) arcpy.AddField_management(config.order_center_pcs, "Site_z", "DOUBLE", "12", "6", "", "", "NULLABLE", "NON_REQUIRED", "") insert_cursor = arcpy.InsertCursor(config.order_center_pcs) feat = insert_cursor.newRow() for order_row in order_rows: # Set X and Y for start and end points geometry = order_row.SHAPE geometry_gcs = geometry.projectAs(order_obj.spatial_ref_gcs) site_elevation = psr_utility.get_elevation(geometry_gcs.trueCentroid.X, geometry_gcs.trueCentroid.Y) point.X = geometry.trueCentroid.X point.Y = geometry.trueCentroid.Y array.add(point) center_point = arcpy.Multipoint(array) array.removeAll() feature_list.append(center_point) feat.shape = point feat.Site_Z = float(site_elevation) insert_cursor.insertRow(feat) del feat del insert_cursor del order_row del order_rows del point del array output_jpg_wells = config.output_jpg(order_obj, config.Report_Type.wells) if '10685' not in order_obj.psr.search_radius.keys(): arcpy.AddMessage(' -- OGW search radius is not availabe') return config.buffer_dist_ogw = str( order_obj.psr.search_radius['10685']) + ' MILES' ds_oid_wells_max_radius = '10093' # 10093 is a federal source, PWSV ds_oid_wells = [] for key in order_obj.psr.search_radius: if key not in [ '9334', '10683', '10684', '10685', '10688', '10689', '10695', '10696' ]: #10695 is US topo, 10696 is HTMC, 10688 and 10689 are radons ds_oid_wells.append(key) if (order_obj.psr.search_radius[key] > order_obj.psr.search_radius[ds_oid_wells_max_radius]): ds_oid_wells = key merge_list = [] for ds_oid in ds_oid_wells: buffer_wells_fc = os.path.join(config.scratch_folder, "order_buffer_" + str(ds_oid) + ".shp") arcpy.Buffer_analysis( config.order_geometry_pcs_shp, buffer_wells_fc, str(order_obj.psr.search_radius[ds_oid]) + " MILES") wells_clip = os.path.join(config.scratch_folder, 'wells_clip_' + str(ds_oid) + '.shp') arcpy.Clip_analysis(config.eris_wells, buffer_wells_fc, wells_clip) arcpy.Select_analysis( wells_clip, os.path.join(config.scratch_folder, 'wells_selected_' + str(ds_oid) + '.shp'), "DS_OID =" + str(ds_oid)) merge_list.append( os.path.join(config.scratch_folder, 'wells_selected_' + str(ds_oid) + '.shp')) arcpy.Merge_management(merge_list, config.wells_merge) del config.eris_wells # Calculate Distance with integration and spatial join- can be easily done with Distance tool along with direction if ArcInfo or Advanced license wells_merge_pcs = os.path.join(config.scratch_folder, "wells_merge_pcs.shp") arcpy.Project_management(config.wells_merge, wells_merge_pcs, order_obj.spatial_ref_pcs) arcpy.Integrate_management(wells_merge_pcs, ".5 Meters") # Add distance to selected wells arcpy.SpatialJoin_analysis(wells_merge_pcs, config.order_geometry_pcs_shp, config.wells_sj, "JOIN_ONE_TO_MANY", "KEEP_ALL", "#", "CLOSEST", "5000 Kilometers", "Distance") # this is the reported distance arcpy.SpatialJoin_analysis( config.wells_sj, config.order_center_pcs, config.wells_sja, "JOIN_ONE_TO_MANY", "KEEP_ALL", "#", "CLOSEST", "5000 Kilometers", "Dist_cent") # this is used for mapkey calculation if int( arcpy.GetCount_management(os.path.join( config.wells_merge)).getOutput(0)) != 0: arcpy.AddMessage(' - Water Wells section, exists water wells') add_fields() with arcpy.da.UpdateCursor(config.wells_sja, ['X', 'Y', 'Elevation']) as update_cursor: for row in update_cursor: row[2] = psr_utility.get_elevation(row[0], row[1]) update_cursor.updateRow(row) # generate map key gis_utility.generate_map_key(config.wells_sja) # Add Direction to ERIS sites add_direction() arcpy.Select_analysis(config.wells_sja, config.wells_final, '"MapKeyTot" = 1') arcpy.Sort_management(config.wells_final, config.wells_display, [["MapKeyLoc", "ASCENDING"]]) arcpy.AddField_management(config.wells_display, "Ele_Diff", "DOUBLE", "12", "6", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(config.wells_display, 'Ele_Diff', '!Elevation!-!Site_z!', "PYTHON_9.3", "") arcpy.AddField_management(config.wells_display, "Elev_Rank", "SHORT", "12", "6", "", "", "NULLABLE", "NON_REQUIRED", "") # categorize elevation for symbology elevation_ranking() ## create a map with water wells and ogw wells mxd_wells = arcpy.mapping.MapDocument(config.mxd_file_wells) df_wells = arcpy.mapping.ListDataFrames(mxd_wells, "*")[0] df_wells.spatialReference = order_obj.spatial_ref_pcs lyr = arcpy.mapping.ListLayers(mxd_wells, "wells", df_wells)[0] lyr.replaceDataSource(config.scratch_folder, "SHAPEFILE_WORKSPACE", "wells_display") else: arcpy.AddMessage(' - WaterWells section, no water wells exists') mxd_wells = arcpy.mapping.MapDocument(config.mxd_file_wells) df_wells = arcpy.mapping.ListDataFrames(mxd_wells, "*")[0] df_wells.spatialReference = order_obj.spatial_ref_pcs for item in ds_oid_wells: psr_utility.add_layer_to_mxd("order_buffer_" + str(item), df_wells, config.buffer_lyr_file, 1.1) psr_utility.add_layer_to_mxd("order_geometry_pcs", df_wells, config.order_geom_lyr_file, 1) # create single-page if not config.if_multi_page or int( arcpy.GetCount_management(config.wells_sja).getOutput(0)) == 0: mxd_wells.saveACopy( os.path.join(config.scratch_folder, "mxd_wells.mxd")) arcpy.mapping.ExportToJPEG(mxd_wells, output_jpg_wells, "PAGE_LAYOUT", 480, 640, 150, "False", "24-BIT_TRUE_COLOR", 85) if not os.path.exists( os.path.join(config.report_path, 'PSRmaps', order_obj.number)): os.mkdir( os.path.join(config.report_path, 'PSRmaps', order_obj.number)) shutil.copy( output_jpg_wells, os.path.join(config.report_path, 'PSRmaps', order_obj.number)) # arcpy.AddMessage(' - output jpg image: %s' % os.path.join(config.report_path, 'PSRmaps', order_obj.number,os.path.basename(output_jpg_wells))) del mxd_wells del df_wells else: # multipage grid_lyr_shp = os.path.join(config.scratch_folder, 'grid_lyr_wells.shp') #note the tool takes featureclass name only, not the full path arcpy.GridIndexFeatures_cartography( grid_lyr_shp, os.path.join(config.scratch_folder, "order_buffer_" + ds_oid_wells_max_radius + '.shp'), "", "", "", config.grid_size, config.grid_size) # part 1: the overview map #add grid layer grid_layer = arcpy.mapping.Layer(config.grid_lyr_file) grid_layer.replaceDataSource(config.scratch_folder, "SHAPEFILE_WORKSPACE", "grid_lyr_wells") arcpy.mapping.AddLayer(df_wells, grid_layer, "Top") # turn the site label off well_lyr = arcpy.mapping.ListLayers(mxd_wells, "wells", df_wells)[0] well_lyr.showLabels = False df_wells.extent = grid_layer.getExtent() df_wells.scale = df_wells.scale * 1.1 mxd_wells.saveACopy( os.path.join(config.scratch_folder, "mxd_wells.mxd")) arcpy.mapping.ExportToJPEG(mxd_wells, output_jpg_wells, "PAGE_LAYOUT", 480, 640, 150, "False", "24-BIT_TRUE_COLOR", 85) if not os.path.exists( os.path.join(config.report_path, 'PSRmaps', order_obj.number)): os.mkdir( os.path.join(config.report_path, 'PSRmaps', order_obj.number)) shutil.copy( output_jpg_wells, os.path.join(config.report_path, 'PSRmaps', order_obj.number)) arcpy.AddMessage( ' - output jpg image page 1: %s' % os.path.join(config.report_path, 'PSRmaps', order_obj.number, os.path.basename(output_jpg_wells))) del mxd_wells del df_wells # part 2: the data driven pages page = int(arcpy.GetCount_management(grid_lyr_shp).getOutput(0)) + 1 mxd_mm_wells = arcpy.mapping.MapDocument(config.mxd_mm_file_wells) df_mm_wells = arcpy.mapping.ListDataFrames(mxd_mm_wells)[0] df_mm_wells.spatialReference = order_obj.spatial_ref_pcs for item in ds_oid_wells: psr_utility.add_layer_to_mxd("order_buffer_" + str(item), df_mm_wells, config.buffer_lyr_file, 1.1) psr_utility.add_layer_to_mxd("order_geometry_pcs", df_mm_wells, config.order_geom_lyr_file, 1) grid_layer_mm = arcpy.mapping.ListLayers(mxd_mm_wells, "Grid", df_mm_wells)[0] grid_layer_mm.replaceDataSource(config.scratch_folder, "SHAPEFILE_WORKSPACE", "grid_lyr_wells") arcpy.CalculateAdjacentFields_cartography(grid_lyr_shp, 'PageNumber') lyr = arcpy.mapping.ListLayers( mxd_mm_wells, "wells", df_mm_wells)[0] #"wells" or "Wells" doesn't seem to matter lyr.replaceDataSource(config.scratch_folder, "SHAPEFILE_WORKSPACE", "wells_display") for i in range( 1, int(arcpy.GetCount_management(grid_lyr_shp).getOutput(0)) + 1): arcpy.SelectLayerByAttribute_management( grid_layer_mm, "NEW_SELECTION", ' "PageNumber" = ' + str(i)) df_mm_wells.extent = grid_layer_mm.getSelectedExtent(True) df_mm_wells.scale = df_mm_wells.scale * 1.1 arcpy.SelectLayerByAttribute_management(grid_layer_mm, "CLEAR_SELECTION") title_text = arcpy.mapping.ListLayoutElements( mxd_mm_wells, "TEXT_ELEMENT", "MainTitleText")[0] title_text.text = "Wells & Additional Sources - Page " + str(i) title_text.elementPositionX = 0.6438 arcpy.RefreshTOC() arcpy.mapping.ExportToJPEG( mxd_mm_wells, output_jpg_wells[0:-4] + str(i) + ".jpg", "PAGE_LAYOUT", 480, 640, 150, "False", "24-BIT_TRUE_COLOR", 85) if not os.path.exists( os.path.join(config.report_path, 'PSRmaps', order_obj.number)): os.mkdir( os.path.join(config.report_path, 'PSRmaps', order_obj.number)) shutil.copy( output_jpg_wells[0:-4] + str(i) + ".jpg", os.path.join(config.report_path, 'PSRmaps', order_obj.number)) del mxd_mm_wells del df_mm_wells ### Save wells data in database save_wells_to_db(order_obj, page) end = timeit.default_timer() arcpy.AddMessage( (' -- End generating PSR Oil, Gas and Water wells report. Duration:', round(end - start, 4)))
field_length=None, field_alias="", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="") arcpy.AddField_management(in_table=shppoint, field_name="LATITUDE", field_type="DOUBLE", field_precision=None, field_scale=None, field_length=None, field_alias="", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="") arcpy.Integrate_management(in_features=[[shppoint, ""]], cluster_tolerance="2.2 Meters") arcpy.CalculateGeometryAttributes_management( in_features=shppoint, geometry_property=[["LONGITUDE", "POINT_X"], ["LATITUDE", "POINT_Y"]], length_unit="", area_unit="", coordinate_system= "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]" ) with arcpy.EnvManager(XYTolerance="1 Meters"): arcpy.Dissolve_management(in_features=shppoint, out_feature_class=dissolve, dissolve_field=["LONGITUDE", "LATITUDE"], statistics_fields=[["LONGITUDE", "FIRST"], ["LATITUDE", "FIRST"]], multi_part="MULTI_PART",
def highwaysimp(city): """ :param city: 待处理城市的名称(汉语拼音全拼 小写字母) :return: 返回当前城市简化后的高速路网(city + '_highway') """ # remove '%link%' class print city + ' Highway' + ' remove link roads...' fclass_remove = ['trunk_link', 'motorway_link'] arcpy.MakeFeatureLayer_management(city + '_highway', "lyr") # read in original highway data arcpy.SelectLayerByAttribute_management( "lyr", "NEW_SELECTION", '"fclass" NOT IN (\'' + '\',\''.join(map(str, fclass_remove)) + '\')') arcpy.CopyFeatures_management("lyr", city + "_hnl") arcpy.Delete_management("lyr") # Project to Asia Lambert out_coordinate_system = arcpy.SpatialReference(102012) arcpy.Project_management(city + "_hnl", city + "_hnlproj", out_coordinate_system) arcpy.MakeFeatureLayer_management(city + "_hnlproj", 'lyr') arcpy.CopyFeatures_management('lyr', city + '_hnl') arcpy.Delete_management('lyr') # add merge field = 1 print city + ' Highway' + ' adding field and calculating...' arcpy.MultipartToSinglepart_management(city + '_hnl', city + '_hnl_sin') arcpy.AddField_management(city + '_hnl_sin', "merge", "SHORT") arcpy.CalculateField_management(city + '_hnl_sin', "merge", 1, "PYTHON_9.3") # merge divided roads print city + ' Highway' + ' merging divided roads...' arcpy.MergeDividedRoads_cartography(city + '_hnl_sin', "merge", "20 Meters", city + '_hnl_sin_mer20', "") arcpy.MergeDividedRoads_cartography(city + '_hnl_sin_mer20', "merge", "20 Meters", city + '_hnl_sin_mer20_20', "") arcpy.MergeDividedRoads_cartography(city + '_hnl_sin_mer20_20', "merge", "20 Meters", city + '_hnl_sin_mer20_20_20', "") arcpy.MultipartToSinglepart_management(city + '_hnl_sin_mer20_20_20', city + '_hnl_sin_mer20_20_20_sin') arcpy.MergeDividedRoads_cartography(city + '_hnl_sin_mer20_20_20_sin', "merge", "50 Meters", city + '_hnl_sin_mer20_20_20_sin_50', "") arcpy.MergeDividedRoads_cartography( city + '_hnl_sin_mer20_20_20_sin_50', "merge", "50 Meters", city + '_hnl_sin_mer20_20_20_sin_50_50', "") arcpy.MergeDividedRoads_cartography( city + '_hnl_sin_mer20_20_20_sin_50_50', "merge", "50 Meters", city + '_hnl_sin_mer20_20_20_sin_50_50_50', "") arcpy.MultipartToSinglepart_management( city + '_hnl_sin_mer20_20_20_sin_50_50_50', city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin') arcpy.MergeDividedRoads_cartography( city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin', "merge", "100 Meters", city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin_100', "") arcpy.MergeDividedRoads_cartography( city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin_100', "merge", "100 Meters", city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin_100_100', "") arcpy.MergeDividedRoads_cartography( city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin_100_100', "merge", "100 Meters", city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin_100_100_100', "") arcpy.MultipartToSinglepart_management( city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin_100_100_100', city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin_100_100_100_sin') arcpy.MergeDividedRoads_cartography( city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin_100_100_100_sin', "merge", "150 Meters", city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin_100_100_100_sin_150', "") arcpy.MergeDividedRoads_cartography( city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin_100_100_100_sin_150', "merge", "150 Meters", city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin_100_100_100_sin_150_150', "") arcpy.MergeDividedRoads_cartography( city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin_100_100_100_sin_150_150', "merge", "150 Meters", city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin_100_100_100_sin_150_150_150', "") # optimize highway print city + " Highway" + " optimize..." arcpy.CopyFeatures_management( city + '_hnl_sin_mer20_20_20_sin_50_50_50_sin_100_100_100_sin_150_150_150', city + "_hnl_sin_merge_int") arcpy.Integrate_management(city + "_hnl_sin_merge_int", "50 Meters") arcpy.CopyFeatures_management(city + "_hnl_sin_merge_int", city + "_hnl_sin_merge_int_deleteid") arcpy.DeleteIdentical_management(city + "_hnl_sin_merge_int_deleteid", "shape") # Project to WGS84 out_coordinate_system = arcpy.SpatialReference(4326) arcpy.Project_management(city + "_hnl_sin_merge_int_deleteid", city + "_hnlsimpproj", out_coordinate_system) # save the final result of highway to a simple name and convenient path arcpy.MakeFeatureLayer_management(city + "_hnlsimpproj", 'lyr') arcpy.CopyFeatures_management('lyr', city + '_highway') arcpy.Delete_management('lyr') print city + ' Highway' + ' Finished!'
def CreateDuongDiaGioi(self): try: arcpy.env.overwriteOutput = 1 duongDanNguon = "C:/Generalize_25_50/50K_Process.gdb" duongDanDich = "C:/Generalize_25_50/50K_Final.gdb" arcpy.env.workspace = duongDanNguon + "/BienGioiDiaGioi" DiaPhan_Name = "DiaPhan" DiaPhan_Lyr = "DiaPhan_Lyr" DiaPhan_Path = duongDanNguon + "/BienGioiDiaGioi/" + DiaPhan_Name DiaPhan_Path_Final = duongDanDich + "/BienGioiDiaGioi/" + DiaPhan_Name DiaPhan_Xa_Path = DiaPhan_Path + "_Xa" DiaPhan_Huyen_Path = DiaPhan_Path + "_Huyen" DiaPhan_Tinh_Path = DiaPhan_Path + "_Tinh" intersect_Xa_Path = duongDanNguon + "/BienGioiDiaGioi/DuongDiaGioi_Xa" intersect_Huyen_Path = duongDanNguon + "/BienGioiDiaGioi/DuongDiaGioi_Huyen" intersect_Tinh_Path = duongDanNguon + "/BienGioiDiaGioi/DuongDiaGioi_Tinh" joint_Xa_Path = duongDanNguon + "/BienGioiDiaGioi/DuongDiaGioi_Xa_Join" joint_Huyen_Path = duongDanNguon + "/BienGioiDiaGioi/DuongDiaGioi_Huyen_Join" joint_Tinh_Path = duongDanNguon + "/BienGioiDiaGioi/DuongDiaGioi_Tinh_Join" DuongDiaGioi_Name = "DuongDiaGioi" DuongDiaGioi_Path = duongDanNguon + "/BienGioiDiaGioi/" + DuongDiaGioi_Name DuongDiaGioi_Dich_Path = duongDanDich + "/BienGioiDiaGioi/" + DuongDiaGioi_Name songSuoiL_Path = duongDanNguon + "/ThuyHe/SongSuoiL" songSuoiL_Path_Final = duongDanDich + "/ThuyHe/SongSuoiL" doanTimDuongBo_Path = duongDanNguon + "/GiaoThong/DoanTimDuongBo" doanTimDuongBo_Path_Final = duongDanDich + "/GiaoThong/DoanTimDuongBo" #arcpy.Integrate_management([[DiaPhan_Path, 1], [songSuoiL_Path, 2], [doanTimDuongBo_Path, 3]], "5 Meters") arcpy.Integrate_management([[DiaPhan_Path, 1]], "1 Meters") arcpy.Snap_edit( DiaPhan_Path, [[duongDanNguon + "/ThuyHe/SongSuoiL", "VERTEX", "25 Meters"], [duongDanNguon + "/ThuyHe/SongSuoiL", "EDGE", "25 Meters"]]) arcpy.Snap_edit(DiaPhan_Path, [[ duongDanNguon + "/GiaoThong/DoanTimDuongBo", "VERTEX", "5 Meters" ], [ duongDanNguon + "/GiaoThong/DoanTimDuongBo", "EDGE", "5 Meters" ]]) #Xa arcpy.MakeFeatureLayer_management(DiaPhan_Path, DiaPhan_Lyr) arcpy.SelectLayerByAttribute_management(DiaPhan_Lyr, "NEW_SELECTION", "doiTuong = 3") arcpy.CopyFeatures_management(DiaPhan_Lyr, DiaPhan_Xa_Path) arcpy.Intersect_analysis([[DiaPhan_Xa_Path, 1]], intersect_Xa_Path, "ALL", None, "LINE") arcpy.DeleteIdentical_management(intersect_Xa_Path, ["Shape"], None, None) arcpy.AddField_management(intersect_Xa_Path, "loaiHienTrangPhapLy", "SHORT", None, None, None, "loaiHienTrangPhapLy", "NULLABLE") arcpy.AddField_management(intersect_Xa_Path, "donViHanhChinhLienKeTrai", "TEXT", None, None, None, "donViHanhChinhLienKeTrai", "NULLABLE") arcpy.AddField_management(intersect_Xa_Path, "donViHanhChinhLienKePhai", "TEXT", None, None, None, "donViHanhChinhLienKePhai", "NULLABLE") arcpy.AddField_management(intersect_Xa_Path, "chieuDai", "DOUBLE", None, None, None, "chieuDai", "NULLABLE") fieldMappings = arcpy.FieldMappings() fieldMappings.addTable(DiaPhan_Xa_Path) for field in fieldMappings.fields: if field.name not in ["doiTuong", "danhTuChung", "diaDanh"]: fieldMappings.removeFieldMap( fieldMappings.findFieldMapIndex(field.name)) arcpy.SpatialJoin_analysis(target_features=intersect_Xa_Path, join_features=DiaPhan_Xa_Path, out_feature_class=joint_Xa_Path, join_operation="JOIN_ONE_TO_MANY", join_type="KEEP_ALL", field_mapping=fieldMappings, match_option="WITHIN") with arcpy.da.UpdateCursor(intersect_Xa_Path, [ "OID@", "FID_DiaPhan_Xa", "loaiHienTrangPhapLy", "donViHanhChinhLienKeTrai", "donViHanhChinhLienKePhai", "chieuDai", "Shape_Length", "doiTuong" ]) as uCur: for uRow in uCur: with arcpy.da.SearchCursor(joint_Xa_Path, [ "TARGET_FID", "JOIN_FID", "doiTuong", "danhTuChung", "diaDanh" ]) as sCur: for sRow in sCur: if uRow[0] == sRow[0] and sRow[2] == 3: if uRow[1] == sRow[1]: uRow[2] = 1 uRow[5] = uRow[6] uRow[3] = sRow[3] + " " + sRow[4] uRow[7] = sRow[2] uCur.updateRow(uRow) else: uRow[2] = 1 uRow[5] = uRow[6] uRow[4] = sRow[3] + " " + sRow[4] uRow[7] = sRow[2] uCur.updateRow(uRow) #Huyen arcpy.SelectLayerByAttribute_management(DiaPhan_Lyr, "NEW_SELECTION", "doiTuong = 2") arcpy.CopyFeatures_management(DiaPhan_Lyr, DiaPhan_Huyen_Path) arcpy.Intersect_analysis([[DiaPhan_Huyen_Path, 1]], intersect_Huyen_Path, "ALL", None, "LINE") arcpy.DeleteIdentical_management(intersect_Huyen_Path, ["Shape"], None, None) arcpy.AddField_management(intersect_Huyen_Path, "loaiHienTrangPhapLy", "SHORT", None, None, None, "loaiHienTrangPhapLy", "NULLABLE") arcpy.AddField_management(intersect_Huyen_Path, "donViHanhChinhLienKeTrai", "TEXT", None, None, None, "donViHanhChinhLienKeTrai", "NULLABLE") arcpy.AddField_management(intersect_Huyen_Path, "donViHanhChinhLienKePhai", "TEXT", None, None, None, "donViHanhChinhLienKePhai", "NULLABLE") arcpy.AddField_management(intersect_Huyen_Path, "chieuDai", "DOUBLE", None, None, None, "chieuDai", "NULLABLE") fieldMappings = arcpy.FieldMappings() fieldMappings.addTable(DiaPhan_Huyen_Path) for field in fieldMappings.fields: if field.name not in ["doiTuong", "danhTuChung", "diaDanh"]: fieldMappings.removeFieldMap( fieldMappings.findFieldMapIndex(field.name)) arcpy.SpatialJoin_analysis(target_features=intersect_Huyen_Path, join_features=DiaPhan_Huyen_Path, out_feature_class=joint_Huyen_Path, join_operation="JOIN_ONE_TO_MANY", join_type="KEEP_ALL", field_mapping=fieldMappings, match_option="WITHIN") with arcpy.da.UpdateCursor(intersect_Huyen_Path, [ "OID@", "FID_DiaPhan_Huyen", "loaiHienTrangPhapLy", "donViHanhChinhLienKeTrai", "donViHanhChinhLienKePhai", "chieuDai", "Shape_Length", "doiTuong" ]) as uCur: for uRow in uCur: with arcpy.da.SearchCursor(joint_Huyen_Path, [ "TARGET_FID", "JOIN_FID", "doiTuong", "danhTuChung", "diaDanh" ]) as sCur: for sRow in sCur: if uRow[0] == sRow[0] and sRow[2] == 2: if uRow[1] == sRow[1]: uRow[2] = 1 uRow[5] = uRow[6] uRow[3] = sRow[3] + " " + sRow[4] uRow[7] = sRow[2] uCur.updateRow(uRow) else: uRow[2] = 1 uRow[5] = uRow[6] uRow[4] = sRow[3] + " " + sRow[4] uRow[7] = sRow[2] uCur.updateRow(uRow) #Tinh arcpy.SelectLayerByAttribute_management(DiaPhan_Lyr, "NEW_SELECTION", "doiTuong = 1") arcpy.CopyFeatures_management(DiaPhan_Lyr, DiaPhan_Tinh_Path) arcpy.Intersect_analysis([[DiaPhan_Tinh_Path, 1]], intersect_Tinh_Path, "ALL", None, "LINE") arcpy.DeleteIdentical_management(intersect_Tinh_Path, ["Shape"], None, None) arcpy.AddField_management(intersect_Tinh_Path, "loaiHienTrangPhapLy", "SHORT", None, None, None, "loaiHienTrangPhapLy", "NULLABLE") arcpy.AddField_management(intersect_Tinh_Path, "donViHanhChinhLienKeTrai", "TEXT", None, None, None, "donViHanhChinhLienKeTrai", "NULLABLE") arcpy.AddField_management(intersect_Tinh_Path, "donViHanhChinhLienKePhai", "TEXT", None, None, None, "donViHanhChinhLienKePhai", "NULLABLE") arcpy.AddField_management(intersect_Tinh_Path, "chieuDai", "DOUBLE", None, None, None, "chieuDai", "NULLABLE") fieldMappings = arcpy.FieldMappings() fieldMappings.addTable(DiaPhan_Tinh_Path) for field in fieldMappings.fields: if field.name not in ["doiTuong", "danhTuChung", "diaDanh"]: fieldMappings.removeFieldMap( fieldMappings.findFieldMapIndex(field.name)) arcpy.SpatialJoin_analysis(target_features=intersect_Tinh_Path, join_features=DiaPhan_Tinh_Path, out_feature_class=joint_Tinh_Path, join_operation="JOIN_ONE_TO_MANY", join_type="KEEP_ALL", field_mapping=fieldMappings, match_option="WITHIN") with arcpy.da.UpdateCursor(intersect_Tinh_Path, [ "OID@", "FID_DiaPhan_Tinh", "loaiHienTrangPhapLy", "donViHanhChinhLienKeTrai", "donViHanhChinhLienKePhai", "chieuDai", "Shape_Length", "doiTuong" ]) as uCur: for uRow in uCur: with arcpy.da.SearchCursor(joint_Tinh_Path, [ "TARGET_FID", "JOIN_FID", "doiTuong", "danhTuChung", "diaDanh" ]) as sCur: for sRow in sCur: if uRow[0] == sRow[0] and sRow[2] == 1: if uRow[1] == sRow[1]: uRow[2] = 1 uRow[5] = uRow[6] uRow[3] = sRow[3] + " " + sRow[4] uRow[7] = sRow[2] uCur.updateRow(uRow) else: uRow[2] = 1 uRow[5] = uRow[6] uRow[4] = sRow[3] + " " + sRow[4] uRow[7] = sRow[2] uCur.updateRow(uRow) #Xoa Xa bi trung arcpy.MakeFeatureLayer_management(intersect_Xa_Path, "DuongDiaGioi_Xa_Lyr") arcpy.MakeFeatureLayer_management(intersect_Huyen_Path, "DuongDiaGioi_Huyen_Lyr") arcpy.SelectLayerByLocation_management( in_layer="DuongDiaGioi_Xa_Lyr", overlap_type="WITHIN", select_features="DuongDiaGioi_Huyen_Lyr", selection_type="NEW_SELECTION") if int( arcpy.GetCount_management("DuongDiaGioi_Xa_Lyr").getOutput( 0)) > 0: arcpy.DeleteFeatures_management("DuongDiaGioi_Xa_Lyr") #Xoa Huyen bi trung arcpy.MakeFeatureLayer_management(intersect_Tinh_Path, "DuongDiaGioi_Tinh_Lyr") arcpy.SelectLayerByLocation_management( in_layer="DuongDiaGioi_Huyen_Lyr", overlap_type="WITHIN", select_features="DuongDiaGioi_Tinh_Lyr", selection_type="NEW_SELECTION") if int( arcpy.GetCount_management( "DuongDiaGioi_Huyen_Lyr").getOutput(0)) > 0: arcpy.DeleteFeatures_management("DuongDiaGioi_Huyen_Lyr") #Copy dữ liệu vào lớp DuongDiaGioi if int(arcpy.GetCount_management(DuongDiaGioi_Path).getOutput( 0)) > 0: arcpy.DeleteFeatures_management(DuongDiaGioi_Path) duongDiaGioiFields = [ "SHAPE@", "maNhanDang", "ngayThuNhan", "ngayCapNhat", "maDoiTuong", "loaiHienTrangPhapLy", "donViHanhChinhLienKeTrai", "donViHanhChinhLienKePhai", "chieuDai", "doiTuong", "maTrinhBay", "tenManh", "soPhienHieuManhBanDo" ] duongDiaGioiFields2 = [ "SHAPE@", "maNhanDang", "ngayThuNhan", "ngayCapNhat", "maDoiTuong", "loaiHienTrangPhapLy", "donViHanhChinhLienKeTrai", "donViHanhChinhLienKePhai", "chieuDai", "doiTuong", "maTrinhBay", "tenManh", "soPhienHieuManhBanDo", "DuongDiaGioi_Rep_ID", "RuleID" ] with arcpy.da.SearchCursor(intersect_Xa_Path, duongDiaGioiFields) as sCur: with arcpy.da.InsertCursor(DuongDiaGioi_Path, duongDiaGioiFields2) as iCur: for sRow in sCur: iCur.insertRow([ sRow[0], sRow[1], sRow[2], sRow[3], sRow[4], 1, sRow[6], sRow[7], sRow[8], sRow[9], sRow[10], sRow[11], sRow[12], 5, 1 ]) with arcpy.da.SearchCursor(intersect_Huyen_Path, duongDiaGioiFields) as sCur: with arcpy.da.InsertCursor(DuongDiaGioi_Path, duongDiaGioiFields2) as iCur: for sRow in sCur: iCur.insertRow([ sRow[0], sRow[1], sRow[2], sRow[3], sRow[4], 1, sRow[6], sRow[7], sRow[8], sRow[9], sRow[10], sRow[11], sRow[12], 3, 3 ]) with arcpy.da.SearchCursor(intersect_Tinh_Path, duongDiaGioiFields) as sCur: with arcpy.da.InsertCursor(DuongDiaGioi_Path, duongDiaGioiFields2) as iCur: for sRow in sCur: iCur.insertRow([ sRow[0], sRow[1], sRow[2], sRow[3], sRow[4], 1, sRow[6], sRow[7], sRow[8], sRow[9], sRow[10], sRow[11], sRow[12], 1, 2 ]) ############################################### Snap Other ############################ ''' arcpy.AddMessage("\n#Snap DoanTimDuongBo") arcpy.Densify_edit(duongDanNguon + "/GiaoThong/DoanTimDuongBo", "DISTANCE","10 Meters",None ,None) arcpy.Snap_edit(duongDanNguon + "/GiaoThong/DoanTimDuongBo", [[DuongDiaGioi_Path, "VERTEX", "10 Meters"], [DuongDiaGioi_Path, "EDGE", "10 Meters"]]) arcpy.AddMessage("\n#Snap SongSuoiL") arcpy.Densify_edit(duongDanNguon + "/ThuyHe/SongSuoiL", "DISTANCE","10 Meters",None ,None) arcpy.Snap_edit(duongDanNguon + "/ThuyHe/SongSuoiL", [[DuongDiaGioi_Path, "VERTEX", "10 Meters"], [DuongDiaGioi_Path, "EDGE", "10 Meters"]]) ''' ''' arcpy.AddMessage("\n#Snap DoanTimDuongBo") arcpy.Densify_edit(DuongDiaGioi_Path, "DISTANCE","10 Meters",None ,None) arcpy.Snap_edit(DuongDiaGioi_Path, [[duongDanNguon + "/GiaoThong/DoanTimDuongBo", "VERTEX", "10 Meters"], [duongDanNguon + "/GiaoThong/DoanTimDuongBo", "EDGE", "10 Meters"]]) arcpy.AddMessage("\n#Snap SongSuoiL") arcpy.Snap_edit(DuongDiaGioi_Path, [[duongDanNguon + "/ThuyHe/SongSuoiL", "VERTEX", "10 Meters"], [duongDanNguon + "/ThuyHe/SongSuoiL", "EDGE", "10 Meters"]]) ''' arcpy.CopyFeatures_management(DuongDiaGioi_Path, DuongDiaGioi_Dich_Path) arcpy.CopyFeatures_management(songSuoiL_Path, songSuoiL_Path_Final) arcpy.CopyFeatures_management(DiaPhan_Path, DiaPhan_Path_Final) arcpy.CopyFeatures_management(doanTimDuongBo_Path, doanTimDuongBo_Path_Final) except OSError as error: arcpy.AddMessage("Error" + error.message) except ValueError as error: arcpy.AddMessage("Error" + error.message) except arcpy.ExecuteError as error: arcpy.AddMessage("Error" + error.message) finally: arcpy.Delete_management("in_memory")
def createSegments(contour_at_mean_high_water, contour_at_surge): # Start a timer time1 = time.clock() arcpy.AddMessage("\nSegmentation of the coastline started at "+str(datetime.now())) # Specify a tolerance distance or minimum length of a seawall # Users are not yet given control of this th = 150 # Create random points along the lines (mean high water and the surge of choice) # The numbers used are just my choice based on iterative observations random0 = arcpy.CreateRandomPoints_management(out_path= arcpy.env.workspace, \ out_name= "random0", \ constraining_feature_class= contour_at_mean_high_water, \ number_of_points_or_field= long(1600), \ minimum_allowed_distance = "{0} Feet".format(th)) random1 = arcpy.CreateRandomPoints_management(out_path= arcpy.env.workspace, \ out_name= "random1", \ constraining_feature_class= contour_at_surge, \ number_of_points_or_field= long(1600), \ minimum_allowed_distance = "{0} Feet".format(th)) # Perform a proximity analysis with the NEAR tool arcpy.Near_analysis(random0, random1) # Give each point a fixed unique ID # Create the ID field arcpy.AddField_management (random0, "UniqueID", "SHORT") arcpy.AddField_management (random1, "UniqueID", "SHORT") # Add Unique IDs arcpy.CalculateField_management(random0, "UniqueID", "[FID]") arcpy.CalculateField_management(random1, "UniqueID", "[FID]") # Categorize/Separate each feature based on their near feature # Crate a table view of random0 table0 = arcpy.MakeTableView_management(random0, "random0_table") #table1 = arcpy.MakeTableView_management(random1, "random1_table") # Sort the near feature for each points in random0 random0_sorted = arcpy.Sort_management(table0, "random0_sorte.dbf", [["NEAR_FID", "ASCENDING"]]) # Create "long enough" lists for each of the field of interests: ID, NEAR_ID, and NEAR_DIST # (distance to closest point). I added [99999] here to extend the list length and avoid IndexError list_fid = [r.getValue("UniqueID") for r in arcpy.SearchCursor(random0_sorted, ["UniqueID"])] +[99999] list_nearid = [r.getValue("NEAR_FID") for r in arcpy.SearchCursor(random0_sorted, ["NEAR_FID"])]\ +[99999] list_neardist = [r.getValue("NEAR_DIST") for r in arcpy.SearchCursor(random0_sorted, ["NEAR_DIST"])]\ +[99999] del r # Only take points with near feature within the specified threshold. If it's too far, it's not better # than the others for a segment point list_fid_filtered = [i for i in list_neardist if i < th] # Then initiate a list o contain their Unique ID and Near ID first_unique_id = [] first_near_id = [] # Get NEAR_ID and Unique ID for each of these points for i in list_fid_filtered: first_unique_id.append(list_fid[list_neardist.index(i)]) first_near_id.append(list_nearid[list_neardist.index(i)]) # Only take the unique values in case there are duplicates. This shoudn't happen. Just to make sure. first_unique_id = [i for i in set(first_unique_id)] first_near_id = [i for i in set(first_near_id)] # Now create a new feature out of these points # Frist let's create a Feature Layer arcpy.MakeFeatureLayer_management("random0.shp", "random0_lyr") # Let's select all points and export them into a new feature random0_points = arcpy.SearchCursor(random0, ["UniqueID"]) point0 = random0_points.next() for point0 in random0_points: for i in range(len(first_unique_id)): if point0.getValue("UniqueID") == first_unique_id[i]: selector0 = arcpy.SelectLayerByAttribute_management(\ "random0_lyr", "ADD_TO_SELECTION", '"UniqueID" = {0}'.format(first_unique_id[i])) del point0, random0_points new_random0 = arcpy.CopyFeatures_management(selector0, "new_random0") arcpy.Delete_management('random0_lyr') # Now for the new point feature, remove clusters of points around them and take only the ones # with minimum NEAR_DIST # First, get the geometry attributes of the new points arcpy.AddGeometryAttributes_management(new_random0, "POINT_X_Y_Z_M", "", "", "") # Create long enough list of the field of interest (same as the previous) pointx = [r.getValue("POINT_X") for r in arcpy.SearchCursor(new_random0, ["POINT_X"])] +[99999] pointy = [r.getValue("POINT_Y") for r in arcpy.SearchCursor(new_random0, ["POINT_Y"])] +[99999] new_list_fid = [r.getValue("UniqueID") for r in arcpy.SearchCursor(new_random0, ["UniqueID"])]\ +[99999] new_list_nearid = [r.getValue("NEAR_FID") for r in arcpy.SearchCursor(new_random0, ["NEAR_FID"])]\ +[99999] new_list_neardist = [r.getValue("NEAR_DIST") for r in arcpy.SearchCursor(new_random0, ["NEAR_DIST"])]\ +[99999] del r # Initiate a list of every points that has already been compared to the near points garbage = [] # Also initiate a list for the new Unique ID and NEAR ID new_unique_ID = [] new_near_ID = [] # Then, check if the points are right next to them. If so, add them to a temporary list # and find the one with closest near ID (or find minimum of their NEAR_DIST) for i in range(len(pointx)): if i+1 < len(pointx): # If not within the th range if not calculateDistance(pointx[i], pointy[i], pointx[i+1], pointy[i+1]) < float(th)*1.5: # Skip if it's in garbage if new_list_nearid[i] in garbage: continue else: new_unique_ID.append(new_list_fid[i]) new_near_ID.append(new_list_nearid[i]) # If within the range else: # Skip if it's in garbage if new_list_nearid[i] in garbage: continue else: temp_ID = [] temp_NEAR = [] temp_DIST = [] while True: temp_ID.append(new_list_fid[i]) temp_NEAR.append(new_list_nearid[i]) temp_DIST.append(new_list_neardist[i]) garbage.append(new_list_nearid[i]) i = i+1 # Stop when within the range again. And add the last point within the range if not calculateDistance(pointx[i], pointy[i], pointx[i+1], pointy[i+1]) < 200: temp_ID.append(new_list_fid[i]) temp_NEAR.append(new_list_nearid[i]) temp_DIST.append(new_list_neardist[i]) garbage.append(new_list_nearid[i]) # Calculate the minimum and get the Unique ID and Near ID minD = min(temp_DIST) new_unique_ID.append(new_list_fid[new_list_neardist.index(minD)]) new_near_ID.append(new_list_nearid[new_list_neardist.index(minD)]) del temp_ID, temp_NEAR, temp_DIST break # Now select these final points export them into new feature. # These are the end points for the segments to be created # First, make a layer out of all the random points arcpy.MakeFeatureLayer_management("random0.shp", "random0_lyr") arcpy.MakeFeatureLayer_management("random1.shp", "random1_lyr") # Then select and export the end points into feature0 and feature1 # Based on new_unique_ID for random0 random0_points = arcpy.SearchCursor(random0, ["UniqueID"]) point0 = random0_points.next() for point0 in random0_points: for i in range(len(new_unique_ID)): if point0.getValue("UniqueID") == new_unique_ID[i]: selected0 = arcpy.SelectLayerByAttribute_management(\ "random0_lyr", "ADD_TO_SELECTION", '"UniqueID" = {0}'.format(new_unique_ID[i])) feature0 = arcpy.CopyFeatures_management(selected0, "feature0") # Based on new_near_ID for random1 random1_points = arcpy.SearchCursor(random1, ["UniqueID"]) point1 = random1_points.next() for point1 in random1_points: for k in range(len(new_near_ID)): if point1.getValue("UniqueID") == new_near_ID[k]: selected1 = arcpy.SelectLayerByAttribute_management(\ "random1_lyr", "ADD_TO_SELECTION", '"UniqueID" = {0}'.format(new_near_ID[k])) feature1 = arcpy.CopyFeatures_management(selected1, "feature1") del point0, point1, random0_points, random1_points arcpy.Delete_management('random0_lyr') arcpy.Delete_management('random1_lyr') # Now for the actual create of the coastal segments # Which include creation of polygon and splitting the contours as the corresponding points # STEPS NECESSARY FOR POLYGON CREATION # Let's first add geometry attributes to these points arcpy.AddGeometryAttributes_management(feature0, "POINT_X_Y_Z_M", "", "", "") arcpy.AddGeometryAttributes_management(feature1, "POINT_X_Y_Z_M", "", "", "") # Let's create lines that connects points from feature0 to feature1 # Initiate a POLYLINE feature class for these lines arcpy.CreateFeatureclass_management (arcpy.env.workspace, "connector_lines.shp", "POLYLINE") # Then for each of the points in feature0, get the correspondingin feature1 # And create a line for each of the two points with arcpy.da.SearchCursor(feature0, ["NEAR_FID", "POINT_X", "POINT_Y"]) as features0: for feat0 in features0: with arcpy.da.SearchCursor(feature1, ["UniqueID", "POINT_X", "POINT_Y"]) as features1: x=0 for feat1 in features1: x = x+1 theseTwoPoints = [] if feat0[0] == feat1[0]: # Get coordinates X0, Y0 = feat0[1], feat0[2] X1, Y1 = feat1[1], feat1[2] # Append coordinates theseTwoPoints.append(arcpy.PointGeometry(arcpy.Point(X0, Y0))) theseTwoPoints.append(arcpy.PointGeometry(arcpy.Point(X1, Y1))) # Create line from the coordinates subline = arcpy.PointsToLine_management(theseTwoPoints, "subline"+str(x)+".shp") # Append all lines into one feature lines = arcpy.Append_management(["subline"+str(x)+".shp"], "connector_lines.shp") # Then delete subline as it's now unnecessary arcpy.Delete_management(subline) continue del feat0, feat1, features0, features1 # Now that the connectors are created, let's split the segments # Before splitting contours into segments, let's integrate the points and the segments # Just in case, there are misalignment arcpy.Integrate_management([contour_at_mean_high_water, feature0]) arcpy.Integrate_management([contour_at_surge, feature1]) segments0 = arcpy.SplitLineAtPoint_management(contour_at_mean_high_water, feature0, "segments0.shp", "10 Feet") segments1 = arcpy.SplitLineAtPoint_management(contour_at_surge, feature1, "segments1.shp", "10 Feet") # And let's give fixed unique ID for each segment arcpy.CalculateField_management(segments0, "Id", "[FID]") arcpy.CalculateField_management(segments1, "Id", "[FID]") # Now with the split segments and connector lines, let's make segment polygon of the segments almost_segment_polygons = arcpy.FeatureToPolygon_management([segments0, segments1, lines],\ "almost_segment_polygons.shp") # Adding unique ID to the segment polygons arcpy.CalculateField_management(almost_segment_polygons, "Id", "[FID]") # The Feature to Polygon process also created polygons that are surrounded by polygons # These are because these areas are surrounded by flooded areas at surge. # They are above the surge and technically safe. So, let's remove them. arcpy.MakeFeatureLayer_management(almost_segment_polygons, 'almost_segment_polygons_lyr') arcpy.MakeFeatureLayer_management(segments0, 'segments0_lyr') # Only the polygons within the mean_high_water segments are at risk arcpy.SelectLayerByLocation_management('almost_segment_polygons_lyr', 'INTERSECT', 'segments0_lyr') final_without_length = arcpy.CopyFeatures_management('almost_segment_polygons_lyr', 'final.shp') arcpy.Delete_management('segments0_lyr') arcpy.Delete_management('almost_segment_polygons_lyr') # For the new polygons, let's add the corresponding seawall length # Let's add Length field to both first arcpy.AddField_management(final_without_length, "Length", "SHORT") arcpy.AddField_management(segments0, "Length", "SHORT") # Calculation of the length with arcpy.da.UpdateCursor(segments0, ["SHAPE@LENGTH", "Length"]) as segments_0: for segment_0 in segments_0: length = segment_0[0] segment_0[1] = length segments_0.updateRow(segment_0) del segment_0, segments_0 # With spatial join, let's add these results to the segment polygons final = spatialJoin(final_without_length, segments0, "Length", "Length", "max", "joined_segment.shp") # Delete the created but now unnecessary files arcpy.Delete_management(random0) arcpy.Delete_management(random1) # Stop the timer time2 = time.clock() arcpy.AddMessage("Seawall segments and regions successfully created. It took "\ +str(time2-time1)+" seconds") return final
def createpolyfeatureclass_2d(mainpolylist_2d, postbottomboxlist_2d, minsfirst, minslast, maxfirst, maxlast, prior, ExtendLine_edit_distance, TrimLine_edit_dangle_length, Integrate_management_distance, smooth_2d): ''' arcpy.ExtendLine_edit(joinadd("in_memory","all_lines_2d_dissolved"),"5") ExtendLine_edit_distance=The maximum distance a line segment can be extended to an intersecting feature. arcpy.TrimLine_edit (joinadd("in_memory","all_lines_2d_dissolved"),"2", "kEEP_SHORT") TrimLine_edit_dangle_length=Line segments that are shorter than the specified Dangle Length and do not touch another line at both endpoints (dangles) will be trimmed. arcpy.Integrate_management(joinadd("in_memory","all_lines_2d_dissolved"), 0.01) Integrate_management_distance=The distance that determines the range in which feature vertices aremade coincident. To minimize undesired movement of vertices, the x,ytolerance should be fairly small. ''' #arcpy.CreateFeatureclass_management("C:\Users\usuari\Desktop\Interpretation-test01-2018.mdb", "mainpolyli", "POLYLINE","", "DISABLED", "ENABLED") allpolies = list() infpolies = list() ################## 'filenames1' homeadd = joinadd(expanduser("~"), "arcgistemp_2d") #plnsadd=joinadd(homeadd,"plns_2d") ################# 'making priorpolylist [ priority number, [polyline1,polyline2,...] ]' for i in range(0, len(mainpolylist_2d)): for j in mainpolylist_2d[i][2]: allpolies.append([j[0], j[2]]) ####### #postbottomboxlist=[ [point1coord,point2coord,point 1 borehole or mid (0 or 1) ,point 2 borehole or mid (0 or 1), polyline] ,... ] for kk in postbottomboxlist_2d: allpolies.append(["post_bottombox_2d", kk[4]]) ###### pointlist = [maxfirst, minsfirst[1]] firstbhline_2d = arcpy.Polyline( arcpy.Array([arcpy.Point(*coords) for coords in pointlist]), "Unknown", False, False) allpolies.append(["firstbhline_2d", firstbhline_2d]) pointlist = [maxlast, minslast[1]] lastbhline_2d = arcpy.Polyline( arcpy.Array([arcpy.Point(*coords) for coords in pointlist]), "Unknown", False, False) allpolies.append(["lastbhline_2d", lastbhline_2d]) ####### priorpolylist = list() #print 'allpolies is:', allpolies for n in allpolies: if [n[0], []] not in priorpolylist: priorpolylist.append([n[0], []]) for m in priorpolylist: for b in allpolies: if b[0] == m[0]: m[1].append(b[1]) #print 'priorpolylist_2d' , priorpolylist ################### 'creating the main polyline featureclass' con = 0 polylineadlistmerge = [] polylineadlistmerge_smooth = [] arcpy.CreateFeatureclass_management("in_memory", "all_lines_2d", "POLYLINE", "", "DISABLED", "DISABLED", "") for ii in range(0, len(priorpolylist)): con = con + 1 'names' temppolyname = "temppoly_2d" + str(con) #plnslayername=joinadd(homeadd,"plnslayer_2d")+str(con)+".shp" #mainplnsadd=joinadd(homeadd,"mainplns_2d")+str(con)+".shp" #mainplnsadd2=joinadd(homeadd,"mainplns_2d")+str(con+1) ############ arcpy.CreateFeatureclass_management("in_memory", temppolyname, "POLYLINE", "", "DISABLED", "DISABLED", "") cursor = arcpy.da.InsertCursor(joinadd("in_memory", temppolyname), ["SHAPE@"]) for t in priorpolylist[ii][1]: cursor.insertRow([t]) del cursor ############### test 2019 cursor = arcpy.da.InsertCursor(joinadd("in_memory", "all_lines_2d"), ["SHAPE@"]) for t in priorpolylist[ii][1]: cursor.insertRow([t]) del cursor ################### #dissolve:basic #integrate: basic #ExtendLine_edit: standard #TrimLine_edit: standard #Integrate_management:basic #RepairGeometry_management:basic #FeatureToPolygon_management: advanced # arcpy.Dissolve_management( joinadd("in_memory", temppolyname), joinadd("in_memory", "plnslayertemp_2d" + str(con)), "", "", "", "UNSPLIT_LINES") arcpy.Integrate_management( joinadd("in_memory", "plnslayertemp_2d" + str(con)), 0.01) polylineadlistmerge.append( joinadd("in_memory", "plnslayertemp_2d" + str(con))) #arcpy.FeatureVerticesToPoints_management(joinadd("in_memory","plnslayertemp_2d"+str(con)),joinadd(homeadd,"dangle"+str(con)),"DANGLE") ################ if smooth_2d == True: #smoothing arcpy.SmoothLine_cartography( joinadd("in_memory", "plnslayertemp_2d" + str(con)), joinadd("in_memory", "smoothed" + str(con)), "BEZIER_INTERPOLATION", "", "FIXED_CLOSED_ENDPOINT", "") polylineadlistmerge_smooth.append( joinadd("in_memory", "smoothed" + str(con))) #############test 2019 arcpy.Dissolve_management(joinadd("in_memory", "all_lines_2d"), joinadd("in_memory", "all_lines_2d_dissolved"), "", "", "", "UNSPLIT_LINES") arcpy.ExtendLine_edit(joinadd("in_memory", "all_lines_2d_dissolved"), str(ExtendLine_edit_distance)) arcpy.TrimLine_edit(joinadd("in_memory", "all_lines_2d_dissolved"), str(TrimLine_edit_dangle_length), "kEEP_SHORT") arcpy.Integrate_management(joinadd("in_memory", "all_lines_2d_dissolved"), Integrate_management_distance) arcpy.RepairGeometry_management( joinadd("in_memory", "all_lines_2d_dissolved")) arcpy.FeatureToPolygon_management( joinadd("in_memory", "all_lines_2d_dissolved"), joinadd("in_memory", "all_lines_2d_dissolved_feat_to_poly"), "0.02", "", "") polylineadlistmerge.append( joinadd("in_memory", "all_lines_2d_dissolved_feat_to_poly")) ################### #smoothing if smooth_2d == True: arcpy.SmoothLine_cartography( joinadd("in_memory", "all_lines_2d_dissolved"), joinadd("in_memory", "all_lines_2d_dissolved_smoothed"), "BEZIER_INTERPOLATION", "", "FIXED_CLOSED_ENDPOINT", "") arcpy.FeatureToPolygon_management( joinadd("in_memory", "all_lines_2d_dissolved_smoothed"), joinadd("in_memory", "all_lines_2d_dissolved_feat_to_poly_smoothed"), "0.02", "", "") polylineadlistmerge_smooth.append( joinadd("in_memory", "all_lines_2d_dissolved_feat_to_poly_smoothed")) arcpy.CreateFileGDB_management(homeadd, "arcgistempdb_2d_smoothed.gdb") arcpy.FeatureClassToGeodatabase_conversion( polylineadlistmerge_smooth, joinadd(homeadd, "arcgistempdb_2d_smoothed.gdb")) ################### mergedpolygonsfromlines_2d = joinadd("in_memory", "mergedpolygonsfromlines_2d") arcgistempdb_2d = joinadd(homeadd, "arcgistempdb_2d.gdb") arcpy.FeatureToPolygon_management(polylineadlistmerge, mergedpolygonsfromlines_2d, "", "", "") polylineadlistmerge.append(mergedpolygonsfromlines_2d) arcpy.CreateFileGDB_management(homeadd, "arcgistempdb_2d.gdb") arcpy.FeatureClassToGeodatabase_conversion(polylineadlistmerge, arcgistempdb_2d) ##################### return arcgistempdb_2d