def NewBNDpoly(old_boundary, modifying_feature, new_bndpoly='boundary_poly', vertexdist='25 METERS', snapdist='25 METERS', verbose=True): """Snaps the boundary polygon to the shoreline points anywhere they don't already match and as long as as they are within 25 m of each other.""" # boundary = input line or polygon of boundary to be modified by newline typeFC = arcpy.Describe(old_boundary).shapeType if typeFC == "Line" or typeFC =='Polyline': arcpy.FeatureToPolygon_management(old_boundary, new_bndpoly, '1 METER') else: if len(os.path.split(new_bndpoly)[0]): path = os.path.split(new_bndpoly)[0] else: path = arcpy.env.workspace arcpy.FeatureClassToFeatureClass_conversion(old_boundary, path, os.path.basename(new_bndpoly)) typeFC = arcpy.Describe(modifying_feature).shapeType if typeFC == "Line" or typeFC == "Polyline": arcpy.Densify_edit(modifying_feature, 'DISTANCE', vertexdist) # elif typeFC == "Point" or typeFC == "Multipoint": # arcpy.PointsToLine_management(modifying_feature, modifying_feature+'_line') # modifying_feature = modifying_feature+'_line' # arcpy.Densify_edit(modifying_feature, 'DISTANCE', vertexdist) arcpy.Densify_edit(new_bndpoly, 'DISTANCE', vertexdist) #arcpy.Densify_edit(modifying_feature,'DISTANCE',vertexdist) arcpy.Snap_edit(new_bndpoly,[[modifying_feature, 'VERTEX',snapdist]]) # Takes a while if verbose: print("Created: {} ... Should be in your home geodatabase.".format(os.path.basename(new_bndpoly))) return new_bndpoly # string name of new polygon
def fixCurves(fc): arcpy.env.overwriteOutput = True print( "\tProcessing true curves in {0}... this will take awhile to complete" ).format(fc.name) whereOID, cntSource = getCurvy(fc.dataSource, True, False) if len(cntSource) == 1: whereOID = whereOID.replace(',', '') #arcpy.SelectLayerByAttribute_management(fc,"NEW_SELECTION",whereOID) #arcpy.CopyFeatures_management(fc,"curvy_" + fc.name.replace(" ","_")) arcpy.Select_analysis(fc.dataSource, "curvy_" + fc.name.replace(" ", "_"), whereOID) expression, cntCopy = getCurvy( scratchWksp + "\curvy_" + fc.name.replace(" ", "_"), False, False) arcpy.Densify_edit(scratchWksp + "\curvy_" + fc.name.replace(" ", "_"), "ANGLE", "200 Feet", "2 Feet", "10") arcpy.FeatureVerticesToPoints_management( scratchWksp + "\curvy_" + fc.name.replace(" ", "_"), scratchWksp + "\curvy_" + fc.name.replace(" ", "_") + "_Pnts", "ALL") arcpy.PointsToLine_management( scratchWksp + "\curvy_" + fc.name.replace(" ", "_") + "_Pnts", scratchWksp + "\\notCurvy_" + fc.name.replace(" ", "_"), "ORIG_FID") if getCurvy(scratchWksp + "\\notCurvy_" + fc.name.replace(" ", "_"), False, False): print("Something went horribly wrong! {0}").format(fc.name) flds = arcpy.ListFields(fc.dataSource) # use python list comprehension, removing list objects in a loop will return an error fldsList = [fld for fld in flds if fld.name not in passFlds] # a feature class may have only passFlds and script fails if fldsList: fldNames = [] cnt = 1 for f in fldsList: if cnt < len(fldsList): fldNames.append(f.name) elif cnt == len(fldsList): fldNames.append(f.name) cnt = cnt + 1 fldNames = ';'.join(map(str, fldNames)) if getShapeType(fc) == "Polyline": arcpy.TransferAttributes_edit( scratchWksp + "\curvy_" + fc.name.replace(" ", "_"), scratchWksp + "\\notCurvy_" + fc.name.replace(" ", "_"), fldNames, "1 Feet", "", "attTransfer" + fc.name.replace(" ", "_")) if fixTrueCurves: # delete coincident lines first due to ArcFM Feeder Mananger messages # append after delete or ArcFM Feeder Manager will present excessive messages arcpy.SelectLayerByAttribute_management( fc, "NEW_SELECTION", whereOID) arcpy.DeleteFeatures_management(fc) arcpy.Append_management( scratchWksp + "\\notCurvy_" + fc.name.replace(" ", "_"), fc.dataSource, "NO_TEST") #pass else: pass print("{0}: {1} Copied: {2} notCurvy: {3}".format(fc.name, len(cntSource), len(cntCopy), len(curveList)))
def simplify_then_densify(input_features): """ Simplify and densify the input features to normalize them for processing """ if os.path.basename( os.path.splitext(input_features)[0]).endswith("_projected"): if os.path.basename(input_features).endswith(".shp"): processed_output = (os.path.splitext(input_features)[0] ).rstrip("_projected") + "_processed.shp" else: processed_output = (os.path.splitext(input_features)[0] ).rstrip("_projected") + "_processed" else: if os.path.basename(input_features).endswith(".shp"): processed_output = os.path.splitext( input_features)[0] + "_processed.shp" else: processed_output = os.path.splitext( input_features)[0] + "_processed" print "Simplifying..." arcpy.SimplifyLine_cartography(input_features, processed_output, "BEND_SIMPLIFY", "100 Feet", collapsed_point_option="NO_KEEP") print "Densifying..." return arcpy.Densify_edit(processed_output, "Distance", "100 Feet")
def make_rand_road_pts(): """ makes the 'rd_far_fld.shp' file which is points on roads that are spaced at least 300 ft from each other and at least 200 ft from any flood points :return: """ road_shapefile = "nor_roads_centerlines.shp" arcpy.Densify_edit(road_shapefile, densification_method='DISTANCE', distance=30) road_pts_file = 'rd_pts_all_1.shp' arcpy.FeatureVerticesToPoints_management(road_shapefile, road_pts_file) rand_rd_pts_file = 'rand_road.shp' rand_rd_pts_lyr = 'rand_road_lyr' arcpy.CreateRandomPoints_management(gis_proj_dir, rand_rd_pts_file, road_pts_file, number_of_points_or_field=50000, minimum_allowed_distance='200 Feet') print "rand_rd_points_file" fld_pts_file = 'flooded_points.shp' fld_pts_buf = 'fld_pt_buf.shp' arcpy.Buffer_analysis(fld_pts_file, fld_pts_buf, buffer_distance_or_field="200 Feet", dissolve_option='ALL') print "buffer" arcpy.MakeFeatureLayer_management(rand_rd_pts_file, rand_rd_pts_lyr) arcpy.SelectLayerByLocation_management(rand_rd_pts_lyr, overlap_type='WITHIN', select_features=fld_pts_buf, invert_spatial_relationship='INVERT') rd_pts_outside_buf = 'rd_far_fld.shp' arcpy.CopyFeatures_management(rand_rd_pts_lyr, rd_pts_outside_buf) arcpy.JoinField_management(rd_pts_outside_buf, in_field='CID', join_table=road_pts_file, join_field='FID') print "rd_points_outside_buf"
def create_points(lines, current_raster): points = temp_loc + "\Points.shp" # path to temp file with points from line cell_size = read_cell_size(current_raster) # gets raster cell_size dist = str( cell_size / 2 ) + " Meters" # calculates distance between points as 1/2 of cell_size temp_lines = temp_loc + "\\temp_lines" arcpy.FeatureClassToFeatureClass_conversion(lines, temp_loc, "temp_lines", "", "", "") arcpy.Densify_edit(lines, "DISTANCE", dist, "1 Meters", "10") # densifies line vertices to dist try: arcpy.FeatureVerticesToPoints_management( lines, points, "ALL") # convert vertices from lines to points except arcpy.ExecuteError: error_code = get_error_code() # gets error code if error_code == '000725' or error_code == '000872': # if code means that points already exist arcpy.Delete_management(points, "") # deletes old points arcpy.FeatureVerticesToPoints_management( lines, points, "ALL") # convert vertices again else: # if it's other error, prints message print 'Process broken by error. Info below:' print error_message arcpy.Delete_management(temp_lines, "") return points # returns path to created points
def vulnerabilite_rr(in_rr_layer, in_ras, vul_infra_route, id_vul_infra_route): # Selection des segments touches par l'alea # inRRLayer = 'inRRLayer' # arcpy.MakeFeatureLayer_management(inRR, inRRLayer) # arcpy.SelectLayerByLocation_management(inRRLayer,"INTERSECT",maskZIl) # Copie des segments selectionnes du rr pour l'analyse de la fonctionnalite zi_rr2 = 'zi_rr3a' # A modifier, Generer un nom automatiquement arcpy.CopyFeatures_management(in_rr_layer, zi_rr2) arcpy.Densify_edit(zi_rr2, "DISTANCE", "10") # Conversion des vertex en points arcpy.AddMessage(' Conversion des vertex en points') rr_pts = 'RRpts' # A modifier, Generer un nom automatiquement arcpy.FeatureVerticesToPoints_management(zi_rr2, rr_pts, 'ALL') # Extraction de la profondeur de submersion aux points arcpy.AddMessage(' Extraction de la profondeur de submersion') rr_pts_extract = 'RRptsExtract' # A modifier, Generer un nom automatiquement arcpy.sa.ExtractValuesToPoints(rr_pts, in_ras, rr_pts_extract) # Changement des valeurs NoData (-9999 ou moins) pour la valeur 0 rows = arcpy.da.UpdateCursor(rr_pts_extract, ["RASTERVALU"]) for row in rows: if row[0] <= -9999: row[0] = 0 rows.updateRow(row) del rows arcpy.AddMessage( ' Comparaison des profondeur de submersion avec les seuils') out_stats_table = 'stats' arcpy.Statistics_analysis( rr_pts_extract, out_stats_table, [["RASTERVALU", "MAX"]], [id_vul_infra_route, 'PRECISION'] ) # MAX (hauteurs inondation positives) ou MIN (hauteurs negatives) # , 'CRCC_NO_SEQ' for f in arcpy.ListFields(out_stats_table): if 'max' in f.name.lower(): # min ou max maxfield = f.name break status_rr = {} rows = arcpy.da.SearchCursor(out_stats_table, [id_vul_infra_route, maxfield, "PRECISION"]) for row in rows: # statusRR[identifiant_route] = niveau_de_fonctionnalite[hauteur eau, inonde/non-inonde] # print row.getValue(IDvulInfraRou) status_rr[row[0]] = [ row[1], find_class(row[1], vul_infra_route[row[2]]) ] # '-' en face du row de "findClass( ICI row.getValue (maxfield)" #[row.CRCC_NO_SEQ] del rows return status_rr
def simplifyPolygons(sourceDataset): # simplify polygons using approach developed by Chris Bus. dla.addMessage("Simplifying (densifying) Geometry") arcpy.Densify_edit(sourceDataset) simplify = sourceDataset + '_simplified' if arcpy.Exists(simplify): arcpy.Delete_management(simplify) if arcpy.Exists(simplify + '_Pnt'): arcpy.Delete_management(simplify + '_Pnt') arcpy.SimplifyPolygon_cartography(sourceDataset, simplify, "POINT_REMOVE", "1 Meters") return simplify
def onLine(self, line_geometry): self.line_geometry = line_geometry try: arcpy.CheckOutExtension('SPATIAL') except Exception as e: arcpy.AddError('Spatial Extension required.\r\n' + str(e)) raster_layer = raster_layer_menu.raster_layer poly = self.line_geometry use_units = meas_units.units poly_length = poly.getLength(units='{}'.format(use_units)) dens_distance = int(poly_length / 400) arcpy.env.overwriteOutput = True arcpy.CopyFeatures_management(poly, 'in_memory\\Route') arcpy.Densify_edit(r'in_memory\Route', 'DISTANCE', str(dens_distance) + ' {}'.format(use_units)) arcpy.FeatureVerticesToPoints_management('in_memory\\Route', 'in_memory\\poly_feature', 'ALL') pts = [ list(i[0]) for i in arcpy.da.SearchCursor( 'in_memory\\poly_feature', 'SHAPE@XY') ] #Derive elevation values from DEM specified. arcpy.sa.ExtractMultiValuesToPoints('in_memory\\poly_feature', [[raster_layer, 'Z']]) arcpy.CheckInExtension('SPATIAL') elev = [ i[0] for i in arcpy.da.SearchCursor('in_memory\\poly_feature', 'Z') ] seg_length = int(poly_length / len(elev)) splits = [] for i in range(len(elev)): splits.append(i * seg_length) arcpy.Delete_management('poly_feature') #Produce and export elevation profile. fig, ax = plt.subplots() fig.canvas.set_window_title('Elevation Profile') ax.plot(splits, elev, linewidth=2, color='g') fig.add_axes(ax) plt.xlabel('Feet Along Route') plt.ylabel('Elevation') try: plt.savefig( pythonaddins.SaveDialog( 'Save Elevation Profile', 'Profile.png', os.path.dirname( arcpy.mapping.MapDocument("CURRENT").filePath))) except: pythonaddins.MessageBox( 'Error during save. Click reset and re-run process.', 'Alert')
def main(inFC,Vertices,Output): try: arcpy.AddMessage('\n') fields = [f.name for f in arcpy.ListFields(inFC)] Count = int(arcpy.GetCount_management(inFC).getOutput(0)) TempFolder = os.path.dirname(inFC) temp = os.path.join(TempFolder,'temp.lyr') merge = [] for n in xrange(Count): arcpy.AddMessage('Processing feature %s of %s'%(n+1,Count)) arcpy.MakeFeatureLayer_management(inFC, temp) arcpy.SelectLayerByAttribute_management(temp, "NEW_SELECTION", '"Id" = %s'%(n)) if '.gdb' in TempFolder: #Geodatabase file TempInput = os.path.join(TempFolder,'temp_input') TempOutput = os.path.join(TempFolder,'temp_%s'%(n)) else: #Shapefile TempInput = os.path.join(TempFolder,'temp_input.shp') TempOutput = os.path.join(TempFolder,'temp_%s.shp'%(n)) arcpy.CopyFeatures_management(temp,TempInput) for f in arcpy.da.SearchCursor(TempInput,['SHAPE@']): perim = f[0].length Densify = perim/Vertices arcpy.Densify_edit(TempInput, "DISTANCE",Densify, "", "") merge.append(TempOutput) Voronoi_Lines(TempInput,TempOutput) arcpy.Merge_management(merge,Output) for fname in merge: arcpy.Delete_management(fname) arcpy.Delete_management(TempInput) arcpy.AddMessage('\n') except Exception,e: arcpy.AddError('%s'%(e))
def do_with_shp(raw_raster, raw_shp, out_raster, pix_size): in_shp = 'xx_in_shp_densi.shp' clear_fea(in_shp) arcpy.Copy_management(raw_shp, in_shp) arcpy.Densify_edit(in_shp, "DISTANCE", "20000") print('with shpape :') in_raster = 'xx_remaple_twice.tif' clear_fea(in_raster) print(' ' * 4 + 'resample ... ') arcpy.Resample_management(raw_raster, in_raster, int(pix_size * 0.9 ), "CUBIC") tmp_raster = 'xx_with_extract.tif' tmp_raster2 = 'xx_with_project.tif' in_shp_buf = 'xx_shp_buf.shp' clear_fea(in_shp_buf) arcpy.Buffer_analysis(in_shp, in_shp_buf, pix_size) in_shp_proj = 'xx_shp_proj.shp' clear_fea(in_shp_proj) arcpy.Project_management(in_shp, in_shp_proj, proj3857, '', arcpy.Describe(in_raster).spatialReference) clear_fea(tmp_raster) clear_fea(tmp_raster2) print(' ' * 4 + 'extract ... ') outExtractByMask2 = arcpy.sa.ExtractByMask(in_raster, in_shp_buf) outExtractByMask2.save(tmp_raster) try: print('-' * 20) print(' to project ...') print(tmp_raster) print(out_raster) arcpy.ProjectRaster_management(tmp_raster, tmp_raster2, proj3857, 'CUBIC', pix_size) # clear_fea(tmp_raster) except: print "Project Raster example failed." print arcpy.GetMessages() print(' ' * 4 + 'extract ... ') outma8 = arcpy.sa.ExtractByMask(tmp_raster2, in_shp_proj) outma8.save(out_raster)
def digue(in_lines, in_raster): """ Calculate quantities of sand and bags required for dike construction :param in_lines: (string) path of lines shapefile :param in_raster: (string) path of water height raster """ # Parametre de configuration de la digue sommet = 0.61 # Largeur du sommet (must be at least 2' wide across the top of the dike) angle = 45 # angle des cotes freeboard = 0.61 # area of the dike between the highest floodwater level and the top of the dike (2') compaction = 1.05 # 5% of the required height of the dike to account for compaction due to wetting volumeSac = 0.02 # 32kg de sable (20 litres) arcpy.env.workspace = arcpy.Describe(in_lines).path arcpy.env.overwriteOutput = True # Conversion du raster en polygone arcpy.CheckOutExtension('Spatial') rasBool = arcpy.sa.Int(arcpy.sa.Con(arcpy.sa.IsNull(in_raster), 0, 1)) filteredRaster = arcpy.sa.SetNull(rasBool, rasBool, "VALUE = 0") outPolygon = "zi_polygon.shp" arcpy.RasterToPolygon_conversion(filteredRaster, outPolygon, 'NO_SIMPLIFY') # Clip des lignes par le polygone outClip = "clip_lines.shp" arcpy.Clip_analysis(in_lines, outPolygon, outClip) # Multipart to single part + densify outMulti = "multi_lines.shp" arcpy.MultipartToSinglepart_management(outClip, outMulti) arcpy.Densify_edit(outMulti, 'DISTANCE', 0.2) allPts = "allPts.shp" arcpy.FeatureVerticesToPoints_management(outMulti, allPts, 'All') # Extractition des valeurs sur le raster pour chaque point allPtsZ = "allPtsZ.shp" arcpy.sa.ExtractValuesToPoints(allPts, in_raster, allPtsZ, 'INTERPOLATE') arcpy.AddField_management(outMulti, 'Volume', 'FLOAT') arcpy.AddField_management(outMulti, 'Nb_Sacs', 'INTEGER') arcpy.AddField_management(outMulti, 'Comments', 'TEXT') vol_sable, nb_sacs = 0, 0 with arcpy.da.UpdateCursor( outMulti, ['SHAPE@', 'RIGHT_FID', 'Volume', 'Nb_Sacs', 'Comments']) as ucursor: for urow in ucursor: volumeTotal = 0 with arcpy.da.SearchCursor(allPtsZ, ['RASTERVALU'], """"RIGHT_FID" = {0}""".format( urow[1])) as scursor: allDepth = [] for srow in scursor: allDepth.append(srow[0]) for d in allDepth: if d == -9999: base = baseTrapeze(freeboard, angle, sommet) volumeTotal += volumeTrapeze(freeboard * compaction, sommet, base, 0.2) else: base = baseTrapeze(d + freeboard, angle, sommet) volumeTotal += volumeTrapeze( (d + freeboard) * compaction, sommet, base, 0.2) urow[2] = volumeTotal urow[3] = volumeTotal / volumeSac vol_sable = volumeTotal nb_sacs = volumeTotal / volumeSac ucursor.updateRow(urow) outDigues = "out_digues.shp" arcpy.Dissolve_management(outMulti, outDigues, ['RIGHT_FID', 'Volume', 'Nb_Sacs']) arcpy.Delete_management(outPolygon) arcpy.Delete_management(outClip) arcpy.Delete_management(outMulti) arcpy.Delete_management(allPts) arcpy.Delete_management(allPtsZ) arcpy.Delete_management("lines.shp") return vol_sable, nb_sacs
def main(): fsId = None groupLayer = None layerMap = None matchEntireName = None projection = None scratchGDB = None scratchLayer = None fst = None rows = None fieldList = None layerToServiceLayer = None matches = False try: proxy_port = None proxy_url = None securityinfo = {} securityinfo[ 'security_type'] = 'ArcGIS' #LDAP, NTLM, OAuth, Portal, PKI securityinfo['proxy_url'] = proxy_url securityinfo['proxy_port'] = proxy_port securityinfo['referer_url'] = None securityinfo['token_url'] = None securityinfo['certificatefile'] = None securityinfo['keyfile'] = None securityinfo['client_id'] = None securityinfo['secret_id'] = None fsId = arcpy.GetParameterAsText(1) groupLayer = arcpy.GetParameterAsText(0) layerMap = arcpy.GetParameterAsText(2) matchEntireName = arcpy.GetParameterAsText(3) projection = arcpy.GetParameterAsText(4) if projection is not None and projection != '#' and projection != '': #outputPrinter(message="Projecting %s" % str(projection)) pass else: projection = None #outputPrinter(message="No Projection defined") arcpy.SetParameterAsText(5, "true") scratchGDB = arcpy.env.scratchWorkspace scratchLayName = "tempAppGrpFS" scratchLayer = os.path.join(scratchGDB, scratchLayName) groupLayer = arcpy.mapping.Layer(groupLayer) fst = featureservicetools.featureservicetools(securityinfo) if fst.valid: #outputPrinter(message="Security handler created") fs = fst.GetFeatureService(itemId=fsId, returnURLOnly=False) if not fs is None: #Get a cursor to the layer map recordset rows = arcpy.SearchCursor(layerMap) #Get the fields fieldList = arcpy.ListFields(layerMap) #initialize the Translation Dictionary layerToServiceLayer = {} #Loop through each input row and add it to the conversion dict for row in rows: layerToServiceLayer[row.getValue( fieldList[1].name)] = row.getValue(fieldList[2].name) del row if groupLayer.isGroupLayer: for lyr in groupLayer: for key, value in layerToServiceLayer.iteritems(): if matchEntireName == 'true' and key == lyr.name: matches = True elif matchEntireName == 'false' and key in lyr.name: matches = True else: matches = False if matches: if arcpy.Exists(lyr.name) == True: result = arcpy.GetCount_management( lyr.name) count = int(result.getOutput(0)) if count > 0: layerNameFull = groupLayer.name + '\\' + lyr.name if projection is not None: outputPrinter( message="Projecting %s" % lyr.name) arcpy.Project_management( layerNameFull, scratchLayer, projection, "", "", "NO_PRESERVE_SHAPE", "") else: outputPrinter( message="Copying %s" % lyr.name) arcpy.FeatureClassToFeatureClass_conversion( layerNameFull, scratchGDB, scratchLayName) desc = arcpy.Describe(scratchLayer) if desc.shapeType == 'Polygon': outputPrinter( message="Densifying %s" % lyr.name) arcpy.Densify_edit( scratchLayer, "ANGLE", "33 Unknown", "0.33 Unknown", "4") syncLayer(fst, fs, scratchLayer, value, lyr.name) else: outputPrinter( message= "%s does not contain any features, skipping" % lyr.name) else: outputPrinter( message="%s does not exist, skipping" % lyr.name) break else: outputPrinter(message="Group layer is not a group layer", typeOfMessage='error') else: outputPrinter( message="Feature Service with id %s was not found" % fsId, typeOfMessage='error') arcpy.SetParameterAsText(5, "false") else: outputPrinter(fst.message, typeOfMessage='error') arcpy.SetParameterAsText(5, "false") except arcpy.ExecuteError: line, filename, synerror = trace() outputPrinter(message="error on line: %s" % line, typeOfMessage='error') outputPrinter(message="error in file name: %s" % filename, typeOfMessage='error') outputPrinter(message="with error message: %s" % synerror, typeOfMessage='error') outputPrinter(message="ArcPy Error Message: %s" % arcpy.GetMessages(2), typeOfMessage='error') arcpy.SetParameterAsText(5, "false") except (common.ArcRestHelperError), e: outputPrinter(message=e, typeOfMessage='error') arcpy.SetParameterAsText(5, "false")
# convert XY file to .dbf table boxdbf = arcpy.CreateScratchName('xxx', '.dbf', '', scratch) boxdbf = os.path.basename(boxdbf) arcpy.TableToTable_conversion(os.path.join(scratch, 'xxxbox.csv'), scratch, boxdbf) # make XY event layer from .dbf table arcpy.MakeXYEventLayer_management(os.path.join(scratch, boxdbf), 'LONGITUDE', 'LATITUDE', 'boxlayer', xycs) # convert event layer to preliminary line feature class with PointsToLine_management arcpy.PointsToLine_management('boxlayer', 'xxMapOutline') # densify MapOutline arcpy.Densify_edit('xxMapOutline', 'DISTANCE', 0.0001) # project to correct spatial reference ### THIS ASSUMES THAT OUTPUT COORDINATE SYSTEM IS HARN AND WE ARE IN OREGON OR WASHINGTON!! if isNAD27: geotransformation = 'NAD_1927_To_NAD_1983_NADCON;NAD_1983_To_HARN_OR_WA' else: geotransformation = 'NAD_1983_To_HARN_OR_WA' geotransformation = '' arcpy.Project_management('xxMapOutline', 'MapOutline', outSpRef, geotransformation, xycs) ## TICS # calculate minTicLong, minTicLat, maxTicLong, maxTiclat
def simplify(self): try: # Init WorkSpase # arcpy.env.overwriteOutput = 1 duongDanNguon = "C:/Generalize_25_50/50K_Process.gdb" duongDanDich = "C:/Generalize_25_50/50K_Final.gdb" urlFile = '/ConfigSimplify.json' _algorithm = "BEND_SIMPLIFY" _tolerance = "50 Meters" _error_option = "NO_CHECK" _collapsed_point_option = "NO_KEEP" _checkExitLayer = False if arcpy.Exists(duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM") and arcpy.Exists(duongDanNguon + "/PhuBeMat/PhuBeMat_Full"): #arcpy.CopyFeatures_management(duongDanNguon + "/PhuBeMat/PhuBeMat_LocMatNuoc", duongDanNguon + "/PhuBeMat/PhuBeMat") _checkExitLayer = True #Doc file config s1 = inspect.getfile(inspect.currentframe()) s2 = os.path.dirname(s1) urlFile = s2 + urlFile arcpy.AddMessage("\n# Doc file cau hinh: \"{0}\"".format(urlFile)) if os.path.exists(urlFile): fileConfig = open(urlFile) listLayerConfig = json.load(fileConfig) fileConfig.close() ############################### Simplify Polygon ######################################## arcpy.Integrate_management([[duongDanNguon + "/PhuBeMat/PhuBeMat", 1]], "2 Meters") arcpy.AddMessage("\n# Bat dau Simplify Polygon") listPolygon = [] fieldMappings = arcpy.FieldMappings() enableFields = [] inputsMerge = [] for objConfig in listLayerConfig: if objConfig["LayerType"] == "Polygon" and objConfig["RunStatus"] == "True": if not(_checkExitLayer == False and objConfig["LayerName"] == "PhuBeMat_Full"): temp = { "LayerType": objConfig["LayerType"], "DatasetName": objConfig["DatasetName"], "LayerName": objConfig["LayerName"], "featureLayer": "in_memory\\" + objConfig["LayerName"] + "_Layer", "featureCopy": "in_memory\\" + objConfig["LayerName"] + "_Copy", "featureCopyLayer": "in_memory\\" + objConfig["LayerName"] + "_Copy_Layer", "FID_XXX": "FID_" + objConfig["LayerName"] } listPolygon.append(temp) elif objConfig["LayerType"] == "Polyline" and objConfig["RunStatus"] == "True" and objConfig["LayerName"] <> "DuongBinhDo": if not(_checkExitLayer == False and objConfig["LayerName"] == "SongSuoiL_KenhMuongL_SnapPBM"): arcpy.AddMessage("\n# Buffer lop: \"{0}\"".format(objConfig["LayerName"])) layerPath = duongDanNguon + "/" + objConfig["DatasetName"] + "/" + objConfig["LayerName"] arcpy.Buffer_analysis(in_features = layerPath, out_feature_class = layerPath + "_Buffer", buffer_distance_or_field = "0.1 Meters", line_side = "RIGHT") temp = { "LayerType": objConfig["LayerType"], "DatasetName": objConfig["DatasetName"], "LayerName": objConfig["LayerName"] + "_Buffer", "featureLayer": "in_memory\\" + objConfig["LayerName"] + "_Buffer_Layer", "featureCopy": "in_memory\\" + objConfig["LayerName"] + "_Buffer_Copy", "featureCopyLayer": "in_memory\\" + objConfig["LayerName"] + "_Buffer_Copy_Layer", "FID_XXX": "FID_" + objConfig["LayerName"] } listPolygon.append(temp) for element in listPolygon: arcpy.AddMessage("\n# Xu ly lop: {0}".format(element["LayerName"])) layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) arcpy.AddField_management(element["featureLayer"], element["FID_XXX"], "LONG") with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", element["FID_XXX"]]) as cursor: for row in cursor: row[1] = row[0] cursor.updateRow(row) arcpy.CopyFeatures_management(layerPath, element["featureCopy"]) arcpy.MakeFeatureLayer_management(element["featureCopy"], element["featureCopyLayer"]) ## Field Mappings ## enableFields.append(element["FID_XXX"]) fieldMappings.addTable(element["featureCopyLayer"]) inputsMerge.append(element["featureCopyLayer"]) for field in fieldMappings.fields: if field.name not in enableFields: fieldMappings.removeFieldMap(fieldMappings.findFieldMapIndex(field.name)) ## Merge ## arcpy.AddMessage("\n# Merge Polygon...") outPathMerge = "in_memory\\outPathMergeTemp" #outPathMerge = "C:/Generalize_25_50/50K_Process.gdb/DanCuCoSoHaTang/outPathMergeTemp" arcpy.Merge_management (inputsMerge, outPathMerge, fieldMappings) ## Simplify Polygon ## arcpy.AddMessage("\n# Simplify Polygon...") outPathSimplify = "in_memory\\outPathSimplifyTemp" #outPathSimplify = "C:/Generalize_25_50/50K_Process.gdb/DanCuCoSoHaTang/outPathSimplifyTemp" arcpy.SimplifyPolygon_cartography(in_features = outPathMerge, out_feature_class = outPathSimplify, algorithm = _algorithm, tolerance = _tolerance, minimum_area = "0 SquareMeters", error_option = _error_option, collapsed_point_option = _collapsed_point_option) ## MakeLayerFeature ## outPathSimplifyLayer = "in_memory\\outPathSimplifyTempLayer" arcpy.MakeFeatureLayer_management(outPathSimplify, outPathSimplifyLayer) ## Update Shape Feature Class ## arcpy.AddMessage("\n# Update Shape Feature Class:") for element in listPolygon: arcpy.AddMessage("\n\t# Update {0}...".format(element["LayerName"])) ### MakeLayerFeature ### layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) ### Select ### strQuery = element["FID_XXX"] + " IS NOT NULL" arcpy.SelectLayerByAttribute_management(outPathSimplifyLayer, "NEW_SELECTION", strQuery) ### Copy To Table Temp ### outTableTemp = "in_memory\\outTableTemp" arcpy.CopyFeatures_management(outPathSimplifyLayer, outTableTemp) ### ... ### with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", "SHAPE@"]) as cursor: for row in cursor: found = False with arcpy.da.UpdateCursor(outTableTemp, [element["FID_XXX"], "SHAPE@"]) as cursorSub: for rowSub in cursorSub: if row[0] == rowSub[0]: found = True row[1] = rowSub[1] cursor.updateRow(row) cursorSub.deleteRow() break if found == False: cursor.deleteRow() arcpy.AddMessage("\n# Hoan thanh Simplify Polygon!!!") ############################################## Simplify Line ############################# arcpy.AddMessage("\n# Bat dau Simplify Line") listPolyLine = [] fieldMappingLine = arcpy.FieldMappings() enableFieldLine = [] inputsMergeLine = [] for objConfig in listLayerConfig: if objConfig["LayerType"] == "Polyline" and objConfig["RunStatus"] == "True": if not(_checkExitLayer == False and objConfig["LayerName"] == "SongSuoiL_KenhMuongL_SnapPBM"): temp = { "LayerType": objConfig["LayerType"], "DatasetName": objConfig["DatasetName"], "LayerName": objConfig["LayerName"], "featureLayer": "in_memory\\" + objConfig["LayerName"] + "_Layer", "featureCopy": "in_memory\\" + objConfig["LayerName"] + "_Copy", "featureCopyLayer": "in_memory\\" + objConfig["LayerName"] + "_Copy_Layer", "FID_XXX": "FID_" + objConfig["LayerName"] } listPolyLine.append(temp) for element in listPolyLine: arcpy.AddMessage("\n# Xu ly lop: {0}".format(element["LayerName"])) layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] if element["LayerName"] == "DuongBinhDo": arcpy.AddField_management(layerPath, "OLD_OBJECTID", "LONG", None, None, None,"OLD_OBJECTID", "NULLABLE") arcpy.CalculateField_management(layerPath, "OLD_OBJECTID", "!OBJECTID!", "PYTHON_9.3") arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) arcpy.AddField_management(element["featureLayer"], element["FID_XXX"], "LONG") with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", element["FID_XXX"]]) as cursor: for row in cursor: row[1] = row[0] cursor.updateRow(row) arcpy.CopyFeatures_management(layerPath, element["featureCopy"]) arcpy.MakeFeatureLayer_management(element["featureCopy"], element["featureCopyLayer"]) ## Field Mappings ## enableFieldLine.append(element["FID_XXX"]) fieldMappingLine.addTable(element["featureCopyLayer"]) inputsMergeLine.append(element["featureCopyLayer"]) for field in fieldMappingLine.fields: if field.name not in enableFieldLine: fieldMappingLine.removeFieldMap(fieldMappingLine.findFieldMapIndex(field.name)) ## Merge ## arcpy.AddMessage("\n# Merge Polyline...") outPathMerge = "in_memory\\outPathMergeTemp" arcpy.Merge_management (inputsMergeLine, outPathMerge, fieldMappingLine) ## Simplify Polyline ## arcpy.AddMessage("\n# Simplify Polyline...") outPathSimplify = "in_memory\\outPathSimplifyTemp" ''' arcpy.MakeFeatureLayer_management(duongDanNguon + "/ThuyHe/SongSuoiA", "ThuyHe_SongSuoiA_Lyr") arcpy.MakeFeatureLayer_management(duongDanNguon + "/ThuyHe/MatNuocTinh", "ThuyHe_MatNuocTinh_Lyr") arcpy.MakeFeatureLayer_management(duongDanNguon + "/ThuyHe/KenhMuongA", "ThuyHe_KenhMuongA_Lyr") in_barriers_Line = ["ThuyHe_SongSuoiA_Lyr", "ThuyHe_MatNuocTinh_Lyr", "ThuyHe_KenhMuongA_Lyr"] ''' arcpy.SimplifyLine_cartography(in_features = outPathMerge, out_feature_class = outPathSimplify, algorithm = _algorithm, tolerance = _tolerance, collapsed_point_option = _collapsed_point_option) ## MakeLayerFeature ## outPathSimplifyLayer = "in_memory\\outPathSimplifyTempLayer" arcpy.MakeFeatureLayer_management(outPathSimplify, outPathSimplifyLayer) ## Update Shape Feature Class ## arcpy.AddMessage("\n# Update Shape Feature Class:") for element in listPolyLine: if element["LayerType"] == "Polyline": arcpy.AddMessage("\n\t# Update {0}...".format(element["LayerName"])) ### MakeLayerFeature ### layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.MakeFeatureLayer_management(layerPath, element["featureLayer"]) ### Select ### strQuery = element["FID_XXX"] + " IS NOT NULL" arcpy.SelectLayerByAttribute_management(outPathSimplifyLayer, "NEW_SELECTION", strQuery) ### Copy To Table Temp ### outTableTemp = "in_memory\\outTableTemp" arcpy.CopyFeatures_management(outPathSimplifyLayer, outTableTemp) ### ... ### with arcpy.da.UpdateCursor(element["featureLayer"], ["OID@", "SHAPE@"]) as cursor: for row in cursor: found = False with arcpy.da.UpdateCursor(outTableTemp, [element["FID_XXX"], "SHAPE@"]) as cursorSub: for rowSub in cursorSub: if row[0] == rowSub[0]: found = True row[1] = rowSub[1] cursor.updateRow(row) cursorSub.deleteRow() break if found == False: cursor.deleteRow() arcpy.AddMessage("\n# Hoan thanh Simplify Polyline!!!") ############################################## Snap Line to Polygon ############################# arcpy.AddMessage("\n# Bat dau Snap") for elementPolygon in listPolygon: if elementPolygon["LayerType"] == "Polyline": lineLayerName = elementPolygon["LayerName"][:elementPolygon["LayerName"].find('_Buffer')] if (lineLayerName <> "DuongBinhDo"): arcpy.AddMessage("\n\t# Snap: {0}".format(lineLayerName)) layerBufferPath = duongDanNguon + "/" + elementPolygon["DatasetName"] + "/" + elementPolygon["LayerName"] layerLinePath = duongDanNguon + "/" + elementPolygon["DatasetName"] + "/" + lineLayerName arcpy.Snap_edit(layerLinePath, [[layerBufferPath, "EDGE", self.snap_distance]]) ############## Snap Other if _checkExitLayer: arcpy.AddMessage("\n\t# Snap other: {0}".format("PhuBeMat")) arcpy.Integrate_management([[duongDanNguon + "/PhuBeMat/PhuBeMat", 1]], "2 Meters") arcpy.Densify_edit(duongDanNguon + "/PhuBeMat/PhuBeMat", "DISTANCE","2 Meters",None ,None) arcpy.Snap_edit(duongDanNguon + "/PhuBeMat/PhuBeMat", [[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", "EDGE", "35 Meters"]]) arcpy.Integrate_management([[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", 1],[duongDanNguon + "/PhuBeMat/PhuBeMat", 2]], "2 Meters") arcpy.Erase_analysis(in_features = duongDanNguon + "/PhuBeMat/PhuBeMat_Full", erase_features = duongDanNguon + "/PhuBeMat/PhuBeMat", out_feature_class = duongDanNguon + "/PhuBeMat/PhuBeMat_Lo") arcpy.CalculateField_management(duongDanNguon + "/PhuBeMat/PhuBeMat_Lo", "maNhanDang", '"temp123"', "PYTHON_9.3") arcpy.Append_management([duongDanNguon + "/PhuBeMat/PhuBeMat_Lo"], duongDanNguon + "/PhuBeMat/PhuBeMat", "NO_TEST",None,None) arcpy.MultipartToSinglepart_management(duongDanNguon + "/PhuBeMat/PhuBeMat", duongDanNguon + "/PhuBeMat/PhuBeMat2") arcpy.MakeFeatureLayer_management(duongDanNguon + "/PhuBeMat/PhuBeMat2", "PhuBeMat_Temp_Lyr") arcpy.SelectLayerByAttribute_management("PhuBeMat_Temp_Lyr", "NEW_SELECTION", "maNhanDang = 'temp123'") arcpy.Eliminate_management(in_features = "PhuBeMat_Temp_Lyr", out_feature_class = duongDanNguon + "/PhuBeMat/PhuBeMat3", selection = "LENGTH") arcpy.Densify_edit(duongDanNguon + "/ThuyHe/SongSuoiL", "DISTANCE","2 Meters",None ,None) arcpy.Snap_edit(duongDanNguon + "/ThuyHe/SongSuoiL", [[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", "EDGE", "2 Meters"]]) arcpy.CopyFeatures_management(duongDanNguon + "/PhuBeMat/PhuBeMat3", duongDanNguon + "/PhuBeMat/PhuBeMat") ############################################## Copy to final ############################# for element in listPolygon: if element["LayerType"] == "Polygon": if element["LayerName"] <> "PhuBeMat_Full": layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] layerFinalPath = duongDanDich + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.DeleteField_management(layerPath, [element["FID_XXX"]]) arcpy.CopyFeatures_management(layerPath, layerFinalPath) for element in listPolyLine: if element["LayerType"] == "Polyline": if element["LayerName"] <> "SongSuoiL_KenhMuongL_SnapPBM": layerPath = duongDanNguon + "/" + element["DatasetName"] + "/" + element["LayerName"] layerFinalPath = duongDanDich + "/" + element["DatasetName"] + "/" + element["LayerName"] arcpy.DeleteField_management(layerPath, [element["FID_XXX"]]) arcpy.CopyFeatures_management(layerPath, layerFinalPath) #arcpy.AddMessage("\n# Hoan thanh!!!") else: arcpy.AddMessage("\n# Khong tim thay file cau hinh: \"{0}\"".format(urlFile)) except OSError as error: arcpy.AddMessage("Error" + error.message) except ValueError as error: arcpy.AddMessage("Error" + error.message) except arcpy.ExecuteError as error: arcpy.AddMessage("Error" + error.message) finally: arcpy.Delete_management("in_memory")
arcpy.MakeFeatureLayer_management('sp_okBlack', 'lyr', "sp_type = 'land-noeez' AND sp_name IN ('%s')" % "','".join(names_Black)) arcpy.CalculateField_management('lyr', 'sp_type', "'land'", 'PYTHON_9.3') arcpy.CalculateField_management('lyr', 'rgn_type', "'land'", 'PYTHON_9.3') # still need to dissolve # add-hoc Antarctica arcpy.env.outputCoordinateSystem = sr_ant arcpy.MakeFeatureLayer_management('sp_okBlack', 'lyr', "sp_type = 'land' AND sp_name ='Antarctica'") arcpy.Dissolve_management('lyr', 'aq_land') arcpy.DeleteFeatures_management('lyr') arcpy.MakeFeatureLayer_management('sp_okBlack', 'lyr', "sp_type = 'ccamlr'") arcpy.CalculateField_management('lyr','sp_type',"'eez-ccamlr'",'PYTHON_9.3') arcpy.Select_analysis('sp_okBlack', 'aq_ccamlr', "sp_type = 'ccamlr'") # create theissen polygons used to split slivers arcpy.Densify_edit('aq_ccamlr', 'DISTANCE', '10 Kilometers') arcpy.FeatureVerticesToPoints_management('aq_ccamlr', 'aq_ccamlr_pts', 'ALL') # delete interior points arcpy.Dissolve_management('aq_ccamlr', 'aq_ccamlr_d') arcpy.MakeFeatureLayer_management('aq_ccamlr_pts', 'lyr_aq_ccamlr_pts') arcpy.SelectLayerByLocation_management('lyr_aq_ccamlr_pts', 'WITHIN_CLEMENTINI', 'aq_ccamlr_d') arcpy.DeleteFeatures_management('lyr_aq_ccamlr_pts') # generate thiessen polygons of gadm for intersecting with land slivers arcpy.env.extent = 'aq_land' arcpy.CreateThiessenPolygons_analysis('aq_ccamlr_pts', 'aq_thiessen', 'ALL') arcpy.Dissolve_management('aq_thiessen', 'aq_thiessen_d', ['sp_type','sp_id','sp_name','sp_key','rgn_type','rgn_id','rgn_name','rgn_key','cntry_id12','rgn_id12','rgn_name12']) arcpy.RepairGeometry_management('aq_thiessen_d') # get slivers, which are land but not identified by gadm, intersect with thiessen so break at junctions
def onClick(self): # get and update Editor object global editor editor.get_current() # get layer with selection workspace = getworkspace() # if there is a layer with selection: if workspace[u'data'] is not None: # for every layer with selection: for data in workspace[u'data']: # if layer's workspace and editor's workspace are the same location if data[u'gdb'] == editor.path: # get Layer object layer = data[u'lyr'] timer.start() # if WKT - generalize curves if json_or_wkt == u'WKT': arcpy.Densify_edit(in_features=layer, densification_method=u'OFFSET', max_deviation=u'0.01 Meters') editor.start_operation() # run processing createcurves(layer=layer, jsonwkt=json_or_wkt, angle_th=settings_dict[u'Curves Angle'], radius_th=settings_dict[u'Curves Radius']) editor.stop_operation('Create Curves') arcpy.RefreshActiveView() timer.stop(message=u'Create Curves') elif editor.path is None: # Fire a tool from toolbox if layer isn't in an edit session layer = data[u'lyr'] title = u'Warning' message = u'{0} layer is not in edit session\nWould you run a geoprocessing tool?'.format( layer.name) mb_type = 1 answer = pythonaddins.MessageBox(message, title, mb_type) if answer == u'OK': relpath = os.path.dirname(__file__) toolbox = os.path.join(relpath, u'scripts', u'toolbox', u'geometry_tools.pyt') pythonaddins.GPToolDialog(toolbox, u'CreateCurves') else: if editor.path is None: # Fire a tool from toolbox if there is no selection in a layer and no edit session title = u'Warning' message = u'{0}\nWould you run a geoprocessing tool?'.format( workspace[u'message']) mb_type = 1 answer = pythonaddins.MessageBox(message, title, mb_type) if answer == u'OK': relpath = os.path.dirname(__file__) toolbox = os.path.join(relpath, u'scripts', u'toolbox', u'geometry_tools.pyt') pythonaddins.GPToolDialog(toolbox, u'CreateCurves') else: title = u'Warning' message = u'{0}'.format(workspace[u'message']) mb_type = 0 pythonaddins.MessageBox(message, title, mb_type)
current_step += 1 arcpy.AddMessage( "[step " + str(current_step) + " of " + str(total_steps) + "] Moving numeric alias values from A1 and A2 to AN fields...") rows = arcpy.UpdateCursor(outputFeatureClass) MoveNumericA1orA2ToANfield(rows) del rows # delete the temp/scratch layer arcpy.Delete_management(countySourceTEMP, "") # remove curves from the the data in our schema current_step += 1 arcpy.AddMessage("[step " + str(current_step) + " of " + str(total_steps) + "] Removing curves, if any...") arcpy.Densify_edit(outputFeatureClass, "ANGLE", "", "", "") # enusre that vertices are not too close, causing errors for the roads and highways system that does not allow vertices within 1 meter - this tool also removes bezier curves and arc segments, converting them to strait lines so I don't think we need the densify tool above, but let's keep it for now. current_step += 1 arcpy.AddMessage("[step " + str(current_step) + " of " + str(total_steps) + "] Generalizing the line features...") arcpy.Generalize_edit(outputFeatureClass, "2 Meters") # remove any segments that are not within the county current_step += 1 arcpy.AddMessage("[step " + str(current_step) + " of " + str(total_steps) + "] Begin removing segments that are outside of the county...") # add space to county name for county query if BoxElder, SaltLake, or SanJuan queryCountyName = countyName if queryCountyName == "BoxElder": queryCountyName = "Box Elder"
out_path = working_gdb, out_name = working_fc ) arcpy.AddMessage("Working feature class created: " + working_gdb + '\\' + working_fc) # 2) Make Feature Layer from Selection arcpy.MakeFeatureLayer_management( in_features = working_gdb + '\\' + working_fc, out_layer = 'road_seg_working', where_clause = street_select_expression ) # 3) Densify arcpy.Densify_edit( in_features = 'road_seg_working', densification_method = "DISTANCE", distance = densify_distance ) arcpy.AddMessage("Polylines densified by distance every " + str(densify_distance)) # 4) Feature Vertices To Points arcpy.FeatureVerticesToPoints_management( in_features = 'road_seg_working', out_feature_class = fc_Densify_VertPoints, point_location = "ALL" ) arcpy.AddMessage("Polyline vertices saved as Points: " + fc_Densify_VertPoints) # 5) Project arcpy.Project_management( in_dataset = fc_Densify_VertPoints,
def writeMessageFile(): global DEBUG_GEOMETRY_CONVERSION, appendFile, foundEmptySIDC, FORCE_UNIQUE_IDs try: arcpy.AddMessage("Starting: Write/Append Message File") # Get input feature class inputFC = arcpy.GetParameter(0) if (inputFC == "") or (inputFC is None): inputFC = os.path.join( MilitaryUtilities.dataPath, r"/test_inputs.gdb/FriendlyOperations/FriendlyUnits") desc = arcpy.Describe(inputFC) if desc is None: arcpy.AddError("Bad Input Feature Class") return shapeType = desc.shapeType # Get output filename outputFile = arcpy.GetParameterAsText(1) # Get standard standard = arcpy.GetParameterAsText(2) # Message Type Field messageTypeField = arcpy.GetParameterAsText(3) # Sort Order orderBy = arcpy.GetParameterAsText(4) # Disable Geo Transformation and use default SIDC disableGeoTransform = arcpy.GetParameterAsText(5) if not ((disableGeoTransform == "") or (disableGeoTransform is None)): DEBUG_GEOMETRY_CONVERSION = (disableGeoTransform.upper() == "TRUE") arcpy.AddMessage("Running with Parameters:") arcpy.AddMessage("0 - Input FC: " + str(inputFC)) arcpy.AddMessage("1 - outputXMLFile: " + str(outputFile)) arcpy.AddMessage("2 - symbology standard: " + str(standard)) arcpy.AddMessage("3 - MessageTypeField: " + messageTypeField) arcpy.AddMessage("4 - orderBy: " + orderBy) arcpy.AddMessage("5 - disableGeoTransform: " + disableGeoTransform) # initialize the standard MilitaryUtilities.getGeometryConverterStandard(standard) if DEBUG_GEOMETRY_CONVERSION: arcpy.AddWarning( "Running in Debug Geo-Transformation Mode, symbol will use default/unknown SIDC for shape" ) if not ((messageTypeField == "") or (messageTypeField is None)): # make sure the messageTypeField exists in the input if messageTypeField in [field.name for field in desc.Fields]: MilitaryUtilities.MessageTypeField = messageTypeField else: arcpy.AddWarning("MessageTypeField does not exist in input: " + messageTypeField + " , using default") # Check Output Filename & see handle case if we are appending if (outputFile == "") or (outputFile is None): # For a standalone test (debug) if no output filename provided if DEBUG_GEOMETRY_CONVERSION: defaultOutputName = "Mil2525CMessages-NoTransform.xml" else: defaultOutputName = "Mil2525CMessages.xml" outputFile = os.path.join(os.path.dirname(__file__), defaultOutputName) messageFile = open(outputFile, "w") arcpy.AddWarning("No Output set, using default: " + str(outputFile)) else: arcpy.AddMessage("Append File set to " + str(appendFile)) if (not appendFile): messageFile = open(outputFile, "w") elif (not os.path.isfile(outputFile)): arcpy.AddWarning( "Can't Append: Output File does not exist, creating new file" ) messageFile = open(outputFile, "w") else: arcpy.AddMessage("Appending Existing File...") # Appending the file is a bit more complicated because we have to remove the # "</messages>" from the end of the original file therefore it can't just be # opened as an append "a+"-we have to create a temp file, read the original file in, # except for the line "</messages>", and then write back out fileToAppend = open(outputFile, "r") # Workaround/Note: didn't work in ArcCatalog unless I opened temp file this way temporaryFile = tempfile.NamedTemporaryFile(mode="w", delete=False) # Copy the file line by line, but don't include last end tag ex. </messages> finalTag = "</%s>" % MilitaryUtilities.getMessageRootTag() finalTagFound = False while True: line = fileToAppend.readline() if line: if not finalTag in line: # ex. "</messages>" temporaryFile.write(line) else: finalTagFound = True else: break if (not finalTagFound): arcpy.AddError( "XML Append File will be corrupt: Could not find Tag: " + finalTag) # now write those lines back fileToAppend.close() temporaryFile.close() messageFile = open(outputFile, "w") temporaryFile = open(temporaryFile.name, "r") while True: line = temporaryFile.readline() if line: messageFile.write(line) else: break temporaryFile.close() if (messageFile is None): arcpy.AddError("Output file can't be created, exiting") return ##################Setup for export############################ # We need to set overwriteOutput=true or the tools below may fail previousOverwriteOutputSetting = arcpy.env.overwriteOutput arcpy.env.overwriteOutput = True # Densify if this is a polygon FC if (shapeType == "Polygon"): try: densifiedFC = "in_memory/DensifiedFC" arcpy.CopyFeatures_management(inputFC, densifiedFC) arcpy.Densify_edit(densifiedFC, "ANGLE", "", "", 10) inputFC = densifiedFC except: arcpy.AddWarning( "Could not densify polygons, skipping. Densify_edit tool failed - is Desktop Standard License available?" ) # Get fields and coded domains CODE_FIELD_NAME = "code" DESCRIPTION_FIELD_NAME = "description" fieldNameList = [] fieldNameToDomainName = {} for field in desc.Fields: if not (field.name in DictionaryConstants.MILFEATURES_FIELD_EXCLUDE_LIST): fieldNameList.append(field.name) # Get domain if any if (field.domain is not None and field.domain != ""): fieldNameToDomainName[field.name] = field.domain dataPath = desc.path gdbPath = dataPath.split(".gdb")[0] gdbPath += ".gdb" arcpy.DomainToTable_management(gdbPath, field.domain, "in_memory/" + field.domain, CODE_FIELD_NAME, DESCRIPTION_FIELD_NAME) # print fieldNameList # restore this setting (set above) arcpy.env.overwriteOutput = previousOverwriteOutputSetting # Projected or geographic? xname = "lon" yname = "lat" isProjected = desc.spatialReference.type == "Projected" if (isProjected): xname = "x" yname = "y" wkid = desc.spatialReference.factoryCode ################Begin Export ########################## # Open a search cursor (if possible) try: rows = arcpy.SearchCursor(inputFC, "", "", "", orderBy) except: arcpy.AddError("Could not open Feature Class " + str(inputFC)) if (not ((orderBy == "") and not (orderBy is None))): arcpy.AddError("OrderBy Search Option: " + orderBy) raise Exception("Bad Feature Class Input") # Dictionary to map unique designation to ID unitDesignationToId = dict() featureFields = desc.fields ###################### Write XML file ######################### if not appendFile: # Ex: Next line writes: messageFile.write("<geomessages>\n") messageFile.write("<%s>\n" % MilitaryUtilities.getMessageRootTag()) rowCount = 0 # Iterate through the rows in the cursor for row in rows: shape = row.shape.getPart(0) arcpy.AddMessage("Processing row: " + str(rowCount)) ############################################## # Map Unique Names to same Unique IDs # IMPORTANT: this section tries to keep Unique Designations mapped to the # same Message Unique ID (so they will move in Message Processor), so... # WARNING: if you have repeated Unique Designations, # they are going to get mapped to the same Unique ID uniqueId = "{%s}" % str(uuid.uuid4()) uniqueDesignation = str( rowCount) # fallback value in case field does not exist try: uniqueDesignation = row.getValue( MilitaryUtilities.UniqueDesignationField) if ((uniqueDesignation is None) or (uniqueDesignation == "")): arcpy.AddWarning("Unique Designation is Empty") elif (DEBUG_GEOMETRY_CONVERSION or FORCE_UNIQUE_IDs): pass else: # Otherwise, see if we have seen this Designation before if (uniqueDesignation in unitDesignationToId): arcpy.AddMessage( "Creating update message for repeated Unique Designation: " + uniqueDesignation) uniqueId = unitDesignationToId[uniqueDesignation] else: unitDesignationToId[uniqueDesignation] = uniqueId except: arcpy.AddWarning( "Could not find Unique Designation field in row: " + str(rowCount)) ############################################## # work with "sidc" or "sic" try: SymbolIdCodeVal = row.getValue( MilitaryUtilities.SidcFieldChoice1) # "sic" except: try: SymbolIdCodeVal = row.getValue( MilitaryUtilities.SidcFieldChoice2) # "sidc" except: SymbolIdCodeVal = None # Note/Important: attributes need to be set in converter so needs declared before geometrytoControlPoints attributes = {} conversionNotes = None attributes[ DictionaryConstants.Tag_Wkid] = wkid # needed by conversion if (SymbolIdCodeVal is None) or (SymbolIdCodeVal == ""): SymbolIdCodeVal = DictionaryConstants.getDefaultSidcForShapeType( shapeType) if not (DEBUG_GEOMETRY_CONVERSION): foundEmptySIDC = True msg = "SIDC is not set, using default: " + SymbolIdCodeVal arcpy.AddWarning(msg) # TODO: we may need to add an option to Disable the geometry conversion # *but* not to change the SIDC to the default one, if you don't want the SIDC to change # when "Disable Geometry Conversion" is checked, comment/uncomment thses lines to # set this to false/disable this behavior: # elif False : elif DEBUG_GEOMETRY_CONVERSION: print "Using Debug SIDC" conversionNotes = "Original SIDC: " + SymbolIdCodeVal uniqueDesignation = SymbolIdCodeVal # use this label for debugging SymbolIdCodeVal = DictionaryConstants.getDefaultSidcForShapeType( shapeType) controlPointsString = MilitaryUtilities.parseGeometryToControlPoints( shape) requiresConversion = MilitaryUtilities.geoConverter.requiresConversion( SymbolIdCodeVal) if requiresConversion and not DEBUG_GEOMETRY_CONVERSION: msg = "SIC: " + SymbolIdCodeVal + " requires conversion/translation" print msg arcpy.AddMessage(msg) transformedPoints, conversionNotes = \ MilitaryUtilities.geoConverter.geometrytoControlPoints(SymbolIdCodeVal, controlPointsString, attributes) if (conversionNotes == DictionaryConstants.CONVERSION_IGNORE_SECOND_LINE): continue elif (transformedPoints is None): arcpy.AddWarning("Conversion FAILED for SIC: " + SymbolIdCodeVal + \ ", Notes: " + conversionNotes + " (using original points)") else: controlPointsString = transformedPoints # Write Output Message # Ex: Next line writes: ex. "\t<geomessage v=\"1.0\">\n" messageFile.write("\t<%s v=\"%s\">\n" % (MilitaryUtilities.getMessageTag(), \ MilitaryUtilities.getMessageVersion())) messageFile.write("\t\t<sic>%s</sic>\n" % SymbolIdCodeVal) # Try to get a message type if the field exists try: messageTypeVal = row.getValue( MilitaryUtilities.MessageTypeField) messageFile.write("\t\t<_type>%s</_type>\n" % messageTypeVal) except: # if not default to position_report messageFile.write("\t\t<_type>%s</_type>\n" % DictionaryConstants.DefaultMessageType) ##TODO: see if other actions are valid besides just "update" messageFile.write( "\t\t<_action>%s</_action>\n" % DictionaryConstants.DefaultMessageAction) # = update messageFile.write("\t\t<_id>%s</_id>\n" % uniqueId) messageFile.write("\t\t<_control_points>%s</_control_points>\n" % controlPointsString) if not ((conversionNotes is None) or (conversionNotes == "")): messageFile.write( "\t\t<ConversionNotes>%s</ConversionNotes>\n" % conversionNotes) # Note: written with attributes below: messageFile.write("\t\t<_wkid>%i</_wkid>\n" % wkid) if not ((uniqueDesignation is None) or (uniqueDesignation == "")): messageFile.write("\t\t<%s>%s</%s>\n" % (DictionaryConstants.Tag_UniqueDesignation, \ uniqueDesignation, DictionaryConstants.Tag_UniqueDesignation)) # Check on Military Geometries for Lines/Areas if (shapeType is "Point"): messageFile.write("\t\t<altitude_depth>%d</altitude_depth>\n" % shape.Z) rowCount = rowCount + 1 messageFile.write("\t\t<MessageCount>%s</MessageCount>\n" % str(rowCount)) for key in attributes: attrValAsString = str(attributes[key]) messageFile.write("\t\t<" + key + ">" + attrValAsString + "</" + key + ">\n") ###################Common Fields/Attributes##################### # Write out remaining table fields as Tag attributes for field in fieldNameList: try: # But don't repeat existing tags we've created if field in DictionaryConstants.MESSAGES_TAG_LIST: rowVal = None else: rowVal = row.getValue(field) except: print "Could not get row val for field" + field rowVal = None if (rowVal is not None) and (rowVal != ''): try: fieldValAsString = str(row.getValue(field)) messageFile.write("\t\t<" + field + ">" + fieldValAsString + "</" + field + ">\n") except: #fixed issue #19 fieldValAsString = row.getValue(field) decodedstring = fieldValAsString.encode( 'ascii', 'ignore') arcpy.AddMessage( "trying to fix unicode problem, changing " + fieldValAsString + " -> " + decodedstring) messageFile.write("\t\t<" + field + ">" + decodedstring + "</" + field + ">\n") ###################Common Fields/Attributes##################### # Ex: messageFile.write("\t</geomessage>\n") messageFile.write("\t</%s>\n" % MilitaryUtilities.getMessageTag()) # Ex: messageFile.write("</geomessages>") messageFile.write("</%s>\n" % MilitaryUtilities.getMessageRootTag()) arcpy.AddMessage("Rows Processed: " + str(rowCount)) if foundEmptySIDC: arcpy.AddWarning( "IMPORTANT: Some rows did not have SIDC set - you may need to run CalcSIDCField tool first." ) arcpy.AddMessage("Write/Append Message File Complete") except: print "Exception: " tb = traceback.format_exc() print tb arcpy.AddError("Exception") arcpy.AddError(tb)
def main(fcInputCenterline, fcInputPolygon, fcSegmentedPolygons, workspaceTemp, dblPointDensity=10.0, dblJunctionBuffer=120.00): arcpy.AddMessage("GNAT Divide Polygon By Segment Tool") arcpy.AddMessage("GNAT DPS: Saving Polygon Results to: " + fcSegmentedPolygons) arcpy.AddMessage("GNAT DPS: Saving Temporary Files to: " + workspaceTemp) arcpy.env.OutputMFlag = "Disabled" arcpy.env.OutputZFlag = "Disabled" arcpy.AddMessage("arcpy M Output Flag: " + str(arcpy.env.OutputMFlag)) ## Copy Centerline to Temp Workspace fcCenterline = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_Centerline") arcpy.CopyFeatures_management(fcInputCenterline, fcCenterline) ## Build Thiessan Polygons arcpy.AddMessage("GNAT DPS: Building Thiessan Polygons") arcpy.env.extent = fcInputPolygon ## Set full extent to build Thiessan polygons over entire line network. arcpy.Densify_edit(fcCenterline, "DISTANCE", str(dblPointDensity) + " METERS") fcTribJunctionPoints = gis_tools.newGISDataset( workspaceTemp, "GNAT_DPS_TribJunctionPoints") # All Segment Junctions?? #gis_tools.findSegmentJunctions(fcCenterline,fcTribJunctionPoints,"ALL") arcpy.Intersect_analysis(fcCenterline, fcTribJunctionPoints, output_type="POINT") fcThiessanPoints = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_ThiessanPoints") arcpy.FeatureVerticesToPoints_management(fcCenterline, fcThiessanPoints, "ALL") lyrThiessanPoints = gis_tools.newGISDataset("Layer", "lyrThiessanPoints") arcpy.MakeFeatureLayer_management(fcThiessanPoints, lyrThiessanPoints) arcpy.SelectLayerByLocation_management(lyrThiessanPoints, "INTERSECT", fcTribJunctionPoints, str(dblJunctionBuffer) + " METERS", "NEW_SELECTION") fcThiessanPoly = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_ThiessanPoly") arcpy.CreateThiessenPolygons_analysis(lyrThiessanPoints, fcThiessanPoly, "ONLY_FID") fcThiessanPolyClip = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_TheissanPolyClip") arcpy.Clip_analysis(fcThiessanPoly, fcInputPolygon, fcThiessanPolyClip) ### Code to Split the Junction Thiessan Polys ### arcpy.AddMessage("GNAT DPS: Split Junction Thiessan Polygons") lyrTribThiessanPolys = gis_tools.newGISDataset("Layer", "lyrTribThiessanPolys") arcpy.MakeFeatureLayer_management(fcThiessanPolyClip, lyrTribThiessanPolys) arcpy.SelectLayerByLocation_management(lyrTribThiessanPolys, "INTERSECT", fcTribJunctionPoints, selection_type="NEW_SELECTION") fcSplitPoints = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_SplitPoints") arcpy.Intersect_analysis([lyrTribThiessanPolys, fcCenterline], fcSplitPoints, output_type="POINT") arcpy.AddMessage("GNAT DPS: Moving Starting Vertices of Junction Polygons") geometry_functions.changeStartingVertex(fcTribJunctionPoints, lyrTribThiessanPolys) arcpy.AddMessage("GNAT DPS: Vertices Moved.") fcThiessanTribPolyEdges = gis_tools.newGISDataset( workspaceTemp, "GNAT_DPS_ThiessanTribPolyEdges") arcpy.FeatureToLine_management(lyrTribThiessanPolys, fcThiessanTribPolyEdges) fcSplitLines = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_SplitLines") arcpy.SplitLineAtPoint_management(fcThiessanTribPolyEdges, fcSplitPoints, fcSplitLines, "0.1 METERS") fcMidPoints = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_MidPoints") arcpy.FeatureVerticesToPoints_management(fcSplitLines, fcMidPoints, "MID") arcpy.Near_analysis(fcMidPoints, fcTribJunctionPoints, location="LOCATION") arcpy.AddXY_management(fcMidPoints) fcTribToMidLines = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_TribToMidLines") arcpy.XYToLine_management(fcMidPoints, fcTribToMidLines, "POINT_X", "POINT_Y", "NEAR_X", "NEAR_Y") ### Select Polys by Centerline ### arcpy.AddMessage("GNAT DPS: Select Polygons By Centerline") fcThiessanEdges = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_ThiessanEdges") arcpy.FeatureToLine_management(fcThiessanPolyClip, fcThiessanEdges) fcAllEdges = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_AllEdges") arcpy.Merge_management([fcTribToMidLines, fcThiessanEdges, fcCenterline], fcAllEdges) # include fcCenterline if needed fcAllEdgesPolygons = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_AllEdgesPolygons") arcpy.FeatureToPolygon_management(fcAllEdges, fcAllEdgesPolygons) fcAllEdgesPolygonsClip = gis_tools.newGISDataset( workspaceTemp, "GNAT_DPS_AllEdgesPolygonsClip") arcpy.Clip_analysis(fcAllEdgesPolygons, fcInputPolygon, fcAllEdgesPolygonsClip) fcPolygonsJoinCenterline = gis_tools.newGISDataset( workspaceTemp, "GNAT_DPS_PolygonsJoinCenterline") arcpy.SpatialJoin_analysis(fcAllEdgesPolygonsClip, fcCenterline, fcPolygonsJoinCenterline, "JOIN_ONE_TO_MANY", "KEEP_ALL", match_option="SHARE_A_LINE_SEGMENT_WITH") fcPolygonsDissolved = gis_tools.newGISDataset( workspaceTemp, "GNAT_DPS_PolygonsDissolved") arcpy.Dissolve_management(fcPolygonsJoinCenterline, fcPolygonsDissolved, "JOIN_FID", multi_part="SINGLE_PART") #fcSegmentedPolygons = gis_tools.newGISDataset(workspaceOutput,"SegmentedPolygons") lyrPolygonsDissolved = gis_tools.newGISDataset("Layer", "lyrPolygonsDissolved") arcpy.MakeFeatureLayer_management(fcPolygonsDissolved, lyrPolygonsDissolved) arcpy.SelectLayerByAttribute_management(lyrPolygonsDissolved, "NEW_SELECTION", """ "JOIN_FID" = -1 """) arcpy.Eliminate_management(lyrPolygonsDissolved, fcSegmentedPolygons, "LENGTH") arcpy.AddMessage("GNAT DPS: Tool Complete.") return
def main(*argv): fsId = None layerName = None dataToAppend = None fst = None fs = None results = None fl = None existingDef = None scratchGDB = None scratchLayName = None scratchLayer = None try: arcpy.env.overwriteOutput = True proxy_port = None proxy_url = None securityinfo = {} securityinfo['proxy_url'] = proxy_url securityinfo['proxy_port'] = proxy_port securityinfo['referer_url'] = None securityinfo['token_url'] = None securityinfo['certificatefile'] = None securityinfo['keyfile'] = None securityinfo['client_id'] = None securityinfo['secret_id'] = None username = argv[0] password = argv[1] siteURL = argv[2] version = arcpy.GetInstallInfo()['Version'] if re.search("^10\.[0-2]", version) is not None: bReqUserName = True else: bReqUserName = False if bReqUserName and \ (username == None or username == "#" or str(username).strip() == "" or \ password == None or password== "#" or password== "*" or str(password).strip() == ""): outputPrinter( "{0} Requires a username and password".format(version), typeOfMessage='error') return if bReqUserName: securityinfo[ 'security_type'] = 'Portal' #LDAP, NTLM, OAuth, Portal, PKI securityinfo['username'] = username securityinfo['password'] = password securityinfo['org_url'] = siteURL else: securityinfo[ 'security_type'] = 'ArcGIS' #LDAP, NTLM, OAuth, Portal, PKI fsId = argv[3] layerName = argv[4] dataToAppend = argv[5] projection = argv[6] lowerCaseFieldNames = argv[7] showFullResponse = argv[8] scratchGDB = arcpy.env.scratchWorkspace scratchLayName = random_string_generator() scratchLayer = os.path.join(scratchGDB, scratchLayName) if str(lowerCaseFieldNames).upper() == 'TRUE': lowerCaseFieldNames = True else: lowerCaseFieldNames = False fst = featureservicetools.featureservicetools(securityinfo) if fst.valid: fs = fst.GetFeatureService(itemId=fsId, returnURLOnly=False) if not fs is None: if arcpy.Exists(dataset=dataToAppend) == True: lyr = arcpy.Describe(dataToAppend) result = arcpy.GetCount_management(dataToAppend) count = int(result.getOutput(0)) outputPrinter(message="\t\t%s features" % (count)) if count > 0: if projection is not None and projection != "#" and \ projection.strip() !='' : outputPrinter(message="\t\tProjecting %s" % (lyr.name)) result = arcpy.Project_management( dataToAppend, scratchLayer, projection) else: outputPrinter( message="\t\tCopying %s feature from %s" % (count, lyr.name)) arcpy.FeatureClassToFeatureClass_conversion( dataToAppend, scratchGDB, scratchLayName) desc = arcpy.Describe(scratchLayer) if desc.shapeType == 'Polygon': outputPrinter(message="\t\tDensifying %s" % lyr.name) arcpy.Densify_edit(scratchLayer, "ANGLE", "33 Unknown", "0.33 Unknown", "4") if desc.shapeType == 'Polyline': outputPrinter(message="\t\tDensifying %s" % lyr.name) arcpy.Densify_edit(scratchLayer, "ANGLE", "33 Unknown", "0.33 Unknown", "4") syncLayer(fst, fs, scratchLayer, layerName, lyr.name, lowerCaseFieldNames, showFullResponse) outputPrinter(message="\tComplete") outputPrinter(message="\t") else: outputPrinter( message= "\t\t%s does not contain any features, skipping" % lyr.name) outputPrinter(message="\tComplete") outputPrinter(message="\t") else: outputPrinter(message="\t%s does not exist" % dataToAppend) outputPrinter(message="\tComplete") outputPrinter(message="\t") else: outputPrinter( message="\tFeature Service with id %s was not found" % fsId, typeOfMessage='error') arcpy.SetParameterAsText(9, "false") else: outputPrinter(message=fst.message, typeOfMessage="error") arcpy.SetParameterAsText(9, "false") except arcpy.ExecuteError: line, filename, synerror = trace() outputPrinter(message="error on line: %s" % line, typeOfMessage='error') outputPrinter(message="error in file name: %s" % filename, typeOfMessage='error') outputPrinter(message="with error message: %s" % synerror, typeOfMessage='error') outputPrinter(message="ArcPy Error Message: %s" % arcpy.GetMessages(2), typeOfMessage='error') arcpy.SetParameterAsText(9, "false") except (common.ArcRestHelperError), e: outputPrinter(message=e, typeOfMessage='error') arcpy.SetParameterAsText(9, "false")
arcpy.Delete_management(all_lakes_lyr) # Step 5: Repair geometry # Optional: to see which features will change: arcpy.CheckGeometry_management(CONUS_LAKES_FC, 'in_memory/checkgeom_lakes') # 155 self-intersections arcpy.RepairGeometry_management(CONUS_LAKES_FC) # Step 6: Densify features with 2 vertices (circular arcs) using 10 meters as maximum deviation (within National Map # horizontal accuracy standards) arcpy.AddField_management(CONUS_LAKES_FC, "VertexCount", "LONG") arcpy.CalculateField_management(CONUS_LAKES_FC, "VertexCount", "!shape!.pointcount", "PYTHON") conus_lakes_lyr = arcpy.MakeFeatureLayer_management(CONUS_LAKES_FC) arcpy.SelectLayerByAttribute_management(conus_lakes_lyr, "NEW_SELECTION", "VertexCount < 4") arcpy.Densify_edit(CONUS_LAKES_FC, "OFFSET", max_deviation="10 Meters") arcpy.CalculateField_management(conus_lakes_lyr, "VertexCount", "!shape!.pointcount", "PYTHON") arcpy.Delete_management(conus_lakes_lyr) # Step 7: Add HU2, HU4, HU6, HU8 based on reach code. arcpy.AddField_management(CONUS_LAKES_FC, "HU4", "TEXT", field_length=4) arcpy.AddField_management(CONUS_LAKES_FC, "HU6", "TEXT", field_length=6) arcpy.AddField_management(CONUS_LAKES_FC, "HU8", "TEXT", field_length=8) conus_lakes_lyr = arcpy.MakeFeatureLayer_management(CONUS_LAKES_FC) arcpy.CalculateField_management(conus_lakes_lyr, "HU4", "!ReachCode![0:4]", "PYTHON") arcpy.CalculateField_management(conus_lakes_lyr, "HU6", "!ReachCode![0:6]", "PYTHON") arcpy.CalculateField_management(conus_lakes_lyr, "HU8", "!ReachCode![0:8]", "PYTHON")
out_name_bezier = helpers.make_output_name(input_name, 'bezier', bezier_deviation) paek_tolerance = (i + 1) * 2 out_name_paek = helpers.make_output_name(input_name, 'paek', paek_tolerance) if output_workspace.strip().lower().endswith('.gdb'): out_name_bezier = re.sub(r'\.(s|S)(h|H)(p|P)$', '', out_name_bezier) out_name_paek = re.sub(r'\.(s|S)(h|H)(p|P)$', '', out_name_paek) bezier_deviation_str = "{0} Meters".format(bezier_deviation) paek_tolerance_str = "{0} Meters".format(paek_tolerance) arcpy.AddMessage("BEZIER: {0}, PAEK: {1}".format(bezier_deviation_str, paek_tolerance_str)) arcpy.CopyFeatures_management(smooth, tmp_smooth) arcpy.Densify_edit(tmp_smooth, "OFFSET", max_deviation=bezier_deviation_str) arcpy.CopyFeatures_management( tmp_smooth, os.path.join(output_workspace, out_name_bezier)) arcpy.SmoothLine_cartography(input_f, os.path.join(output_workspace, out_name_paek), "PAEK", tolerance=paek_tolerance_str) arcpy.SetParameterAsText(3, output_workspace)
"NAME_0 = '%s'" % gadm) # remove fields which are from global analysis, not to be confused with subcountry fields for fld in [ 'rgn_type', 'rgn_id', 'rgn_name', 'rgn_key', 'area_km2' ]: for fc in ['c_eezland', 'c_eez', 'c_land']: if fld in [x.name for x in arcpy.ListFields(fc)]: arcpy.DeleteField_management(fc, fld) # get administrative land arcpy.Clip_analysis('c_gadm', 'c_land', 'c_states') # create theissen polygons used to split slivers arcpy.Densify_edit('c_states', 'DISTANCE', '1 Kilometers') arcpy.FeatureVerticesToPoints_management( 'c_states', 'c_states_pts', 'ALL') # delete interior points for faster thiessen rendering arcpy.Dissolve_management('c_states', 'c_states_d') arcpy.MakeFeatureLayer_management('c_states_pts', 'lyr_c_states_pts') arcpy.SelectLayerByLocation_management( 'lyr_c_states_pts', 'WITHIN_CLEMENTINI', 'c_states_d') arcpy.DeleteFeatures_management('lyr_c_states_pts') # generate thiessen polygons of gadm for intersecting with land slivers arcpy.env.extent = 'c_eezland' arcpy.CreateThiessenPolygons_analysis( 'c_states_pts', 'c_states_t', 'ALL')
print shp_file if os.path.exists(shp_file): print "ok" else: print "file is not exist" exit(1) ################################################################## arcpy.AddField_management(shp_file, "LengthKM", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(shp_file, "LengthKM", "!shape.geodesicLength@KILOMETERS!", "PYTHON_9.3") ################################################################## arcpy.Generalize_edit(in_features=shp_file, tolerance="1 Centimeters") print "1" # arcpy.Generalize_edit(in_features=shp_file, tolerance="1 Centimeters") # Replace a layer/table view name with a path to a dataset (which can be a layer file) or create the layer/table view within the script # The following inputs are layers or table views: "1" retval = arcpy.Densify_edit(in_features=shp_file, densification_method="DISTANCE", distance="100 Meters", max_deviation=".1 Meters", max_angle="10") print "2" print retval.status retval.save("123.shp") print type(retval.getOutput(0)) print type(retval)
# get other basin and merge with basins arcpy.Erase_analysis('eez','basins','eez_e') # NOTE: slivers of eez beyond basins to exclude arcpy.MultipartToSinglepart_management('eez_e', 'eez_e_m') arcpy.Select_analysis('eez_e_m', 'eez_e_m_s', '"Shape_Area" > 1') arcpy.Dissolve_management('eez_e_m_s', 'basin_other') arcpy.AddField_management('basin_other', 'basin_name', 'TEXT') arcpy.CalculateField_management('basin_other', 'basin_name', "'OT'", 'PYTHON_9.3') arcpy.Merge_management(['basins','basin_other'], 'basins_m') # setup for theissen polygons arcpy.Buffer_analysis('eez_basins', 'eez_basins_buf200km', '200 kilometers', dissolve_option='ALL') arcpy.env.extent = 'eez_basins_buf200km' arcpy.env.outputCoordinateSystem = sr_mol arcpy.CopyFeatures_management('basins_m', 'thie') arcpy.Densify_edit('thie', 'DISTANCE', '1 Kilometers') arcpy.FeatureVerticesToPoints_management('thie', 'thie_pts', 'ALL') # delete interior points arcpy.Dissolve_management('thie', 'thie_d') arcpy.MakeFeatureLayer_management('thie_pts', 'lyr_pts') arcpy.SelectLayerByLocation_management('lyr_pts', 'WITHIN_CLEMENTINI', 'thie_d') arcpy.DeleteFeatures_management('lyr_pts') # generate thiessen polygons arcpy.CreateThiessenPolygons_analysis('thie_pts', 'thie_polys', 'ALL') arcpy.env.outputCoordinateSystem = sr_gcs arcpy.Dissolve_management('thie_polys', 'thie_polys_d', ['basin_name']) arcpy.Erase_analysis('thie_polys_d', 'basins_m', 'thie_polys_d_e') arcpy.Merge_management(['thie_polys_d_e','basins_m'], 'thie_polys_d_e_m')
def execute(self, inRefGrid, in3DFC, out3DGrid, pScratchWorkspace = None): #def Convert3DLinetoRasterPy(self, inRefGrid, in3DFC, out3DGrid): sOK = apwrutils.C_OK try: # use scratchworkspace to hold intermediate datasets. ..ye, @1/2/2016 9:35:45 AM on ZYE1 if((flooddsconfig.debugLevel & 2)==2): arcpy.AddMessage("in conver3dlinetoraster: os.environ['TMP']={}, os.environ['TEMP']={}".format(os.environ['TMP'], os.environ['TEMP'])) if(pScratchWorkspace!=None): pScratchWKS = pScratchWorkspace arcpy.env.scratchWorkspace = pScratchWorkspace else: pScratchWKS = flooddsconfig.pScratchWorkspace # "%scratchworkspace%" # "in_memory" # # Set current environment state envInitSnapRaster = arcpy.env.snapRaster # snap raster envInitCellSize = arcpy.env.cellSize # cell size envInitEnvExtent = arcpy.env.extent # analysis environment # Set raster processing environment to input DEM grid arcpy.env.snapRaster = inRefGrid outCellSize = float(str(arcpy.GetRasterProperties_management(inRefGrid, "CELLSIZEX"))) arcpy.env.cellSize = outCellSize arcpy.env.extent = inRefGrid # Setting workspace to input fc for the temporary FC copy fullPath = arcpy.Describe(in3DFC).path arcpy.env.workspace = fullPath tmpLineFC = os.path.join(pScratchWKS, "xTmpLine") tmpPntFC = os.path.join(pScratchWKS, "xTmpPnt") if(arcpy.Exists(tmpLineFC)): arcpy.Delete_management(tmpLineFC) if(arcpy.Exists(tmpPntFC)): arcpy.Delete_management(tmpPntFC) #tmpLineFC = fullPath + "\\xxxTmpLine" # temporary 3D line FC #tmpPntFC = fullPath + "\\xxxTmpPnt" # temporary 3D point FC # Start processing # ---------------- dt = time.clock() # Create temporary 3D line FC and densify it (densify). if((self.DebugLevel & 1)==1): arcpy.AddMessage(" Densifying input 3D line feature class...") arcpy.CopyFeatures_management(in3DFC, tmpLineFC) denDistance = outCellSize * 0.1 # set densification distance to be 1/2 of the cell size # arcpy.Densify_edit(tmpLineFC, "DISTANCE", "10 Feet") # need to adjust the densification as function of cell size arcpy.Densify_edit(tmpLineFC, "DISTANCE", denDistance) # need to adjust the densification as function of cell size dt2 = time.clock() if((self.DebugLevel & 1)==1): arcpy.AddMessage(" Densifying input 3D line feature class completed in " + str("%.2f" % (dt2 - dt)) + " seconds.") # Create temporary point FC (feature vertices to points). if((self.DebugLevel & 1)==1): arcpy.AddMessage(" Converting densified 3D line into points...") arcpy.FeatureVerticesToPoints_management(tmpLineFC, tmpPntFC, "ALL") dt3 = time.clock() if((self.DebugLevel & 1)==1): arcpy.AddMessage(" Converting densified 3D line into points completed in " + str("%.2f" % (dt3 - dt2)) + " seconds.") # Create 3D stream grid from points. if((self.DebugLevel & 1)==1): arcpy.AddMessage(" Generating 3D line raster...") arcpy.PointToRaster_conversion(tmpPntFC, "Shape.Z", out3DGrid) #arcpy.PointToRaster_conversion(tmpPntFC, apwrutils.FN_ShapeAtZ, out3DGrid) dt4 = time.clock() if((self.DebugLevel & 1)==1): arcpy.AddMessage(" Generating 3D line raster completed in " + str("%.2f" % (dt4 - dt3)) + " seconds.") # Clean up - delete temporary grids and FCs if((self.DebugLevel & 1)==1): arcpy.AddMessage(" Cleaning up...") try: arcpy.Delete_management(tmpLineFC, "") arcpy.Delete_management(tmpPntFC, "") except arcpy.ExecuteError: arcpy.AddWarning(str(arcpy.GetMessages(2))) except: arcpy.AddWarning(str(trace())) except arcpy.ExecuteError: sMsg = str(arcpy.GetMessages(2)) arcpy.AddError(sMsg) except: sMsg = str(trace()) arcpy.AddWarning(sMsg) #arcpy.AddError(str(arcpy.GetMessages(2))) finally: # Setting output variables - needed for outputs for proper chaining arcpy.SetParameterAsText(2,out3DGrid) # output = 3D line grid print ('Function Convert3DLinetoRasterPy finished') if(sOK==apwrutils.C_OK): tResults = (apwrutils.C_OK, out3DGrid) else: tResults = (sOK) return tResults
import arcpy import os import json import sys import inspect arcpy.env.overwriteOutput = 1 duongDanNguon = "C:/Generalize_25_50/50K_Process.gdb" arcpy.AddMessage("\n\t# Snap other: {0}".format("PhuBeMat")) arcpy.Integrate_management([[duongDanNguon + "/PhuBeMat/PhuBeMat", 1]], "2 Meters") arcpy.Densify_edit(duongDanNguon + "/PhuBeMat/PhuBeMat", "DISTANCE", "2 Meters", None, None) arcpy.Snap_edit(duongDanNguon + "/PhuBeMat/PhuBeMat", [[ duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", "EDGE", "35 Meters" ]]) arcpy.Integrate_management( [[duongDanNguon + "/ThuyHe/SongSuoiL_KenhMuongL_SnapPBM", 1], [duongDanNguon + "/PhuBeMat/PhuBeMat", 2]], "2 Meters") arcpy.Erase_analysis(in_features=duongDanNguon + "/PhuBeMat/PhuBeMat_LocMatNuoc", erase_features=duongDanNguon + "/PhuBeMat/PhuBeMat", out_feature_class=duongDanNguon + "/PhuBeMat/PhuBeMat_Lo") arcpy.CalculateField_management(duongDanNguon + "/PhuBeMat/PhuBeMat_Lo", "maNhanDang", '"temp123"', "PYTHON_9.3") arcpy.Append_management([duongDanNguon + "/PhuBeMat/PhuBeMat_Lo"], duongDanNguon + "/PhuBeMat/PhuBeMat", "NO_TEST", None, None) arcpy.MultipartToSinglepart_management(duongDanNguon + "/PhuBeMat/PhuBeMat", duongDanNguon + "/PhuBeMat/PhuBeMat2") arcpy.MakeFeatureLayer_management(duongDanNguon + "/PhuBeMat/PhuBeMat2",
def main(fcInputCenterline, fcInputPolygon, fcSegmentedPolygons, dblPointDensity=10.0, dblJunctionBuffer=100.00, workspaceTemp="in_memory"): # Manage Environments env_extent = arcpy.env.extent env_outputmflag = arcpy.env.outputMFlag env_outputzflag = arcpy.env.outputZFlag arcpy.env.outputMFlag = "Disabled" arcpy.env.outputZFlag = "Disabled" arcpy.env.extent = fcInputPolygon ## Set full extent to build Thiessan polygons over entire line network. # Copy centerline to temporary workspace fcCenterline = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_Centerline") arcpy.CopyFeatures_management(fcInputCenterline, fcCenterline) if "FromID" not in [ field.name for field in arcpy.ListFields(fcCenterline) ]: arcpy.AddField_management(fcCenterline, "FromID", "LONG") arcpy.CalculateField_management( fcCenterline, "FromID", "!{}!".format(arcpy.Describe(fcCenterline).OIDFieldName), "PYTHON_9.3") # Build Thiessan polygons arcpy.AddMessage("GNAT DPS: Building Thiessan polygons") arcpy.Densify_edit(fcCenterline, "DISTANCE", "{} METERS".format(dblPointDensity)) fcTribJunctionPoints = gis_tools.newGISDataset( workspaceTemp, "GNAT_DPS_TribJunctionPoints") arcpy.Intersect_analysis([fcCenterline], fcTribJunctionPoints, output_type="POINT") fcThiessanPoints = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_ThiessanPoints") arcpy.FeatureVerticesToPoints_management(fcCenterline, fcThiessanPoints, "ALL") lyrThiessanPoints = gis_tools.newGISDataset("Layer", "lyrThiessanPoints") arcpy.MakeFeatureLayer_management(fcThiessanPoints, lyrThiessanPoints) arcpy.SelectLayerByLocation_management( lyrThiessanPoints, "INTERSECT", fcTribJunctionPoints, "{} METERS".format(dblJunctionBuffer)) fcThiessanPoly = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_ThiessanPoly") # arcpy.CreateThiessenPolygons_analysis(lyrThiessanPoints,fcThiessanPoly,"ONLY_FID") arcpy.CreateThiessenPolygons_analysis(lyrThiessanPoints, fcThiessanPoly, "ALL") # Clean polygons # lyrInputPolygon = gis_tools.newGISDataset("Layer", "lyrInputPolygon") # arcpy.MakeFeatureLayer_management(fcInputPolygon, lyrInputPolygon) arcpy.RepairGeometry_management(fcInputPolygon, "KEEP_NULL") fcThiessanPolyClip = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_TheissanPolyClip") arcpy.Clip_analysis(fcThiessanPoly, fcInputPolygon, fcThiessanPolyClip) # Split the junction Thiessan polygons arcpy.AddMessage("GNAT DPS: Split junction Thiessan polygons") lyrTribThiessanPolys = gis_tools.newGISDataset("Layer", "lyrTribThiessanPolys") arcpy.MakeFeatureLayer_management(fcThiessanPolyClip, lyrTribThiessanPolys) arcpy.SelectLayerByLocation_management(lyrTribThiessanPolys, "INTERSECT", fcTribJunctionPoints) fcSplitPoints = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_SplitPoints") arcpy.Intersect_analysis([lyrTribThiessanPolys, fcCenterline], fcSplitPoints, output_type="POINT") arcpy.AddMessage("GNAT DPS: Moving starting vertices of junction polygons") geometry_functions.changeStartingVertex(fcTribJunctionPoints, lyrTribThiessanPolys) arcpy.AddMessage("GNAT DPS: Vertices moved") fcThiessanTribPolyEdges = gis_tools.newGISDataset( workspaceTemp, "GNAT_DPS_ThiessanTribPolyEdges") arcpy.FeatureToLine_management(lyrTribThiessanPolys, fcThiessanTribPolyEdges) fcSplitLines = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_SplitLines") arcpy.SplitLineAtPoint_management(fcThiessanTribPolyEdges, fcSplitPoints, fcSplitLines, "0.1 METERS") fcMidPoints = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_MidPoints") arcpy.FeatureVerticesToPoints_management(fcSplitLines, fcMidPoints, "MID") arcpy.Near_analysis(fcMidPoints, fcTribJunctionPoints, location="LOCATION") arcpy.AddXY_management(fcMidPoints) fcTribToMidLines = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_TribToMidLines") arcpy.XYToLine_management(fcMidPoints, fcTribToMidLines, "POINT_X", "POINT_Y", "NEAR_X", "NEAR_Y") ### Select polygons by centerline ### arcpy.AddMessage("GNAT DPS: Select polygons by centerline") fcThiessanEdges = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_ThiessanEdges") arcpy.FeatureToLine_management(fcThiessanPolyClip, fcThiessanEdges) fcAllEdges = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_AllEdges") arcpy.Merge_management([fcTribToMidLines, fcThiessanEdges, fcCenterline], fcAllEdges) # include fcCenterline if needed fcAllEdgesPolygons = gis_tools.newGISDataset(workspaceTemp, "GNAT_DPS_AllEdgesPolygons") arcpy.FeatureToPolygon_management(fcAllEdges, fcAllEdgesPolygons) fcAllEdgesPolygonsClip = gis_tools.newGISDataset( workspaceTemp, "GNAT_DPS_AllEdgesPolygonsClip") arcpy.Clip_analysis(fcAllEdgesPolygons, fcInputPolygon, fcAllEdgesPolygonsClip) fcPolygonsJoinCenterline = gis_tools.newGISDataset( workspaceTemp, "GNAT_DPS_PolygonsJoinCenterline") arcpy.SpatialJoin_analysis(fcAllEdgesPolygonsClip, fcCenterline, fcPolygonsJoinCenterline, "JOIN_ONE_TO_MANY", "KEEP_ALL", match_option="SHARE_A_LINE_SEGMENT_WITH") fcPolygonsDissolved = gis_tools.newGISDataset( workspaceTemp, "GNAT_DPS_PolygonsDissolved") arcpy.Dissolve_management(fcPolygonsJoinCenterline, fcPolygonsDissolved, "FromID", multi_part="SINGLE_PART") lyrPolygonsDissolved = gis_tools.newGISDataset("Layer", "lyrPolygonsDissolved") arcpy.MakeFeatureLayer_management(fcPolygonsDissolved, lyrPolygonsDissolved) arcpy.SelectLayerByAttribute_management(lyrPolygonsDissolved, "NEW_SELECTION", """ "FromID" IS NULL """) arcpy.Eliminate_management(lyrPolygonsDissolved, fcSegmentedPolygons, "LENGTH") arcpy.AddMessage("GNAT DPS: Tool complete") # Reset env arcpy.env.extent = env_extent arcpy.env.outputMFlag = env_outputmflag arcpy.env.outputZFlag = env_outputzflag return
def ejecta_distance(path, pathdab, infile_center_crater, infile_ejecta_polygon): ''' path = "C:/Users/nilscp/Desktop/testarcpy/DTM/" pathdab = "C:/Users/nilscp/Desktop/testarcpy/DTM/database.gdb/" infile_center_crater = "CENTER_crater001" infile_ejecta_polygon = "continuousej_crater001" I need somehow to get the diameter of the crater and infile_center_crater and infile_ejecta_polygon should have the same coordinates system (preferentially equirectangular with lat, lon of the location of the centre of the crater) ''' # change to directory of interest os.chdir(path) # define paths and workspace (I need to create the gdb at some points) env.workspace = env.scratchWorkspace = pathdab # first we need to densify the number vertices along the polygon arcpy.Densify_edit(in_features=infile_ejecta_polygon, densification_method="ANGLE", max_angle="1.0") #max angle of 0.5 was making way too many points # create name for new point shapefile tmpstr = infile_ejecta_polygon.split("_") infile_ejecta_vertices = tmpstr[0] + "_vertices_" + tmpstr[1] # Feature vertice to points arcpy.FeatureVerticesToPoints_management(in_features=infile_ejecta_polygon, out_feature_class=infile_ejecta_vertices, point_location="ALL") # add xy coordinates for the crater centre and ejecta vertices arcpy.AddXY_management(in_features=infile_ejecta_vertices) arcpy.AddXY_management(in_features=infile_center_crater) # For the crater center with arcpy.da.SearchCursor(infile_center_crater, ["POINT_X", "POINT_Y"]) as cursor: for row in cursor: xcenter = row[0] ycenter = row[1] # For the vertices from the polygon n = int(arcpy.GetCount_management(infile_ejecta_vertices)[0]) xvertices = np.zeros(n) yvertices = np.zeros(n) with arcpy.da.SearchCursor(infile_ejecta_vertices, ["POINT_X", "POINT_Y"]) as cursor: ix = 0 for row in cursor: xvertices[ix] = row[0] yvertices[ix] = row[1] ix = ix + 1 # calculating the distance to each vertice a = (yvertices-ycenter)**2.0 b = (xvertices-xcenter)**2.0 dist = np.sqrt(a + b) # get the min, 25p, median, 75p and max distance ej_min_distance = np.min(dist) ej_25p_distance = np.percentile(dist,25.0) ej_median_distance = np.percentile(dist,50.0) ej_75p_distance = np.percentile(dist,75.0) ej_max_distance = np.max(dist) return (ej_min_distance, ej_25p_distance, ej_median_distance, ej_75p_distance, ej_max_distance)