def spreadsheetToLinePolygon(dataFile, geometryType, xField, yField, spreadsheetUniqueID, inputCoordinateSystemName, inputCoordinateSystem, outputCoordinateSystemName, outputCoordinateSystem, transformation): # If excel spreadsheet if dataFile.lower().endswith(('.xls', '.xlsx')): dataFile = arcpy.ExcelToTable_conversion(dataFile, "in_memory\\DatasetExcel", "") # If projection needed if (transformation.lower() != "none"): printMessage("Importing CSV/Excel...", "info") arcpy.MakeXYEventLayer_management(dataFile, xField, yField, "InputLayer", inputCoordinateSystem, "") printMessage( "Projecting layer from " + inputCoordinateSystemName + " to " + outputCoordinateSystemName + "...", "info") arcpy.Project_management( "InputLayer", os.path.join(arcpy.env.scratchGDB, "Layer_Projected"), outputCoordinateSystem, transformation, inputCoordinateSystem, "NO_PRESERVE_SHAPE", "") else: printMessage("Importing CSV/Excel...", "info") arcpy.MakeXYEventLayer_management(dataFile, xField, yField, "Layer", inputCoordinateSystem, "") arcpy.CopyFeatures_management("Layer", "in_memory\\Dataset", "", "0", "0", "0") if (transformation.lower() != "none"): dataset = os.path.join(arcpy.env.scratchGDB, "Layer_Projected") else: dataset = "in_memory\\Dataset" printMessage("Creating layer...", "info") if (geometryType.lower() == "line"): # Convert the points to lines using the unique identifier field to create each unique line arcpy.PointsToLine_management(dataset, "in_memory\\DatasetLine", spreadsheetUniqueID, "", "NO_CLOSE") output = arcpy.MakeFeatureLayer_management("in_memory\\DatasetLine", "Layer", "", "", "") if (geometryType.lower() == "polygon"): # Convert the points to lines using the unique identifier field to create each unique line, then close the final line arcpy.PointsToLine_management(dataset, "in_memory\\DatasetLine", spreadsheetUniqueID, "", "CLOSE") # Convert the lines to polygons and join on attribute from lines arcpy.FeatureToPolygon_management("in_memory\\DatasetLine", "in_memory\\DatasetPolygon", "", "ATTRIBUTES", "") arcpy.JoinField_management("in_memory\\DatasetPolygon", "OID", "in_memory\\DatasetLine", "OID", spreadsheetUniqueID) output = arcpy.MakeFeatureLayer_management("in_memory\\DatasetPolygon", "Layer", "", "", "") return output
def create_gpx_features(path2GPXFiles): gpxDir = path2GPXFiles arcpy.CreateFileGDB_management(gpxDir, "heatmap", "CURRENT") arcpy.env.workspace = os.path.join(gpxDir, "heatmap.gdb") # Iterate over gpx files, generate feature classes featuresToMerge = [] #create empty list for file in os.listdir(gpxDir): if file.endswith(".gpx"): ptFeatureClass = file.replace(".gpx", "_pt") # point feature class name try: arcpy.GPXtoFeatures_conversion(os.path.join(gpxDir, file), ptFeatureClass) print str(file) + " converted to features" except Exception as E: print str(file) + " failed..." print E lineFeatureClass = file.replace(".gpx", "_line") # line feature class name try: arcpy.PointsToLine_management(ptFeatureClass, lineFeatureClass, "", "", "NO_CLOSE") print str(file) + " converted to features" except Exception as E: print str(file) + " failed..." print E featuresToMerge.append(lineFeatureClass) mergedFeatureClass = "merged_lines" arcpy.Merge_management(featuresToMerge, mergedFeatureClass, "") # merge line feature classes arcpy.gp.LineDensity_sa(mergedFeatureClass, "NONE", "heat_map", ".00001", "0.0001", "SQUARE_MAP_UNITS") # do the heatmap
def grafo2(edificio): auxiliar4 = root + "\\AUXILIAR4" os.mkdir(auxiliar4) output = "AUXILIAR2\\" + str(edificio) + "_9.dbf" row2s = arcpy.SearchCursor(output) for row2 in row2s: output = "AUXILIAR2\\" + str(edificio) + "_9.shp" clause = '"FID" = ' + str(row2.FID) output2 = "AUXILIAR4\\" + str(row2.FID) + ".shp" arcpy.Select_analysis(output, output2, clause) if row2.VIVIENDA == 0: output = "AUXILIAR3\\" + str(edificio) + "_0_20.shp" else: output = "AUXILIAR3\\" + str(edificio) + "_1_20.shp" output3 = "AUXILIAR4\\" + str(row2.FID) + "b.shp" arcpy.Merge_management([output2, output], output3) output = "AUXILIAR4\\" + str(row2.FID) + "c.shp" arcpy.PointsToLine_management(output3, output) outputb = "AUXILIAR4\\" + str(row2.FID) + "c.dbf" arcpy.AddField_management(outputb, "VIVIENDA", "SHORT") arcpy.CalculateField_management(outputb, "VIVIENDA", row2.VIVIENDA, "PYTHON_9.3") output2 = "AUXILIAR3\\" + str(edificio) + "_21.shp" output3 = "AUXILIAR4\\" + str(edificio) + "_21.shp" if row2.FID == 0: arcpy.Copy_management(output, output2) else: arcpy.Merge_management([output2, output], output3) arcpy.Copy_management(output3, output2) shutil.rmtree(auxiliar4)
def grafo(vivienda): output = "AUXILIAR2\\" + str(vivienda) + ".shp" output2 = "AUXILIAR3\\" + str(vivienda) + "_20.shp" output3 = "AUXILIAR2\\" + str(vivienda) + "_6.shp" try: arcpy.FeatureToPoint_management(output3, output2) except Exception: arcpy.FeatureToPoint_management(output, output2) output2 = "AUXILIAR3\\" + str(vivienda) + "_21.shp" arcpy.Select_analysis("AUXILIAR2\\0_1.shp", output2, "FID = -1") #crea capa en blanco auxiliar4 = root + "\\AUXILIAR4" os.mkdir(auxiliar4) output = "AUXILIAR2\\" + str(vivienda) + "_9.dbf" row2s = arcpy.SearchCursor(output) for row2 in row2s: output = "AUXILIAR2\\" + str(vivienda) + "_9.shp" clause = '"FID" = ' + str(row2.FID) output2 = "AUXILIAR4\\" + str(row2.FID) + ".shp" arcpy.Select_analysis(output, output2, clause) output = "AUXILIAR3\\" + str(vivienda) + "_20.shp" output3 = "AUXILIAR4\\" + str(row2.FID) + "b.shp" arcpy.Merge_management([output2, output], output3) output = "AUXILIAR4\\" + str(row2.FID) + "c.shp" arcpy.PointsToLine_management(output3, output) output2 = "AUXILIAR3\\" + str(vivienda) + "_21.shp" output3 = "AUXILIAR4\\" + str(vivienda) + "_21.shp" if row2.FID == 0: arcpy.Copy_management(output, output2) else: arcpy.Merge_management([output2, output], output3) arcpy.Copy_management(output3, output2) shutil.rmtree(auxiliar4)
def grafo(edificio, vivienda): outputb = "AUXILIAR2\\" + str(edificio) + "_7.dbf" row2s = arcpy.SearchCursor(outputb) for row2 in row2s: if row2.DWELLING == vivienda: output = "AUXILIAR2\\" + str(edificio) + "_7.shp" output2 = "AUXILIAR4\\" + str(row2.FID) + "_1.shp" clause = '"FID" = ' + str(row2.FID) arcpy.Select_analysis(output, output2, clause) output = "AUXILIAR2\\" + str(edificio) + "_4.shp" output3 = "AUXILIAR4\\" + str(row2.FID) + "_2.shp" clause = '"DWELLING" = ' + str(row2.DWELLING) arcpy.Select_analysis(output, output3, clause) output = "AUXILIAR4\\" + str(row2.FID) + "_3.shp" arcpy.Merge_management([output2, output3], output) output3 = "AUXILIAR4\\" + str(row2.FID) + "_4.shp" arcpy.PointsToLine_management(output, output3) output2 = "AUXILIAR2\\" + str(edificio) + "_8.shp" output = "AUXILIAR4\\" + str(row2.FID) + "_5.shp" arcpy.Merge_management([output2, output3], output) arcpy.Copy_management(output, output2)
def fixCurves(fc): arcpy.env.overwriteOutput = True print( "\tProcessing true curves in {0}... this will take awhile to complete" ).format(fc.name) whereOID, cntSource = getCurvy(fc.dataSource, True, False) if len(cntSource) == 1: whereOID = whereOID.replace(',', '') #arcpy.SelectLayerByAttribute_management(fc,"NEW_SELECTION",whereOID) #arcpy.CopyFeatures_management(fc,"curvy_" + fc.name.replace(" ","_")) arcpy.Select_analysis(fc.dataSource, "curvy_" + fc.name.replace(" ", "_"), whereOID) expression, cntCopy = getCurvy( scratchWksp + "\curvy_" + fc.name.replace(" ", "_"), False, False) arcpy.Densify_edit(scratchWksp + "\curvy_" + fc.name.replace(" ", "_"), "ANGLE", "200 Feet", "2 Feet", "10") arcpy.FeatureVerticesToPoints_management( scratchWksp + "\curvy_" + fc.name.replace(" ", "_"), scratchWksp + "\curvy_" + fc.name.replace(" ", "_") + "_Pnts", "ALL") arcpy.PointsToLine_management( scratchWksp + "\curvy_" + fc.name.replace(" ", "_") + "_Pnts", scratchWksp + "\\notCurvy_" + fc.name.replace(" ", "_"), "ORIG_FID") if getCurvy(scratchWksp + "\\notCurvy_" + fc.name.replace(" ", "_"), False, False): print("Something went horribly wrong! {0}").format(fc.name) flds = arcpy.ListFields(fc.dataSource) # use python list comprehension, removing list objects in a loop will return an error fldsList = [fld for fld in flds if fld.name not in passFlds] # a feature class may have only passFlds and script fails if fldsList: fldNames = [] cnt = 1 for f in fldsList: if cnt < len(fldsList): fldNames.append(f.name) elif cnt == len(fldsList): fldNames.append(f.name) cnt = cnt + 1 fldNames = ';'.join(map(str, fldNames)) if getShapeType(fc) == "Polyline": arcpy.TransferAttributes_edit( scratchWksp + "\curvy_" + fc.name.replace(" ", "_"), scratchWksp + "\\notCurvy_" + fc.name.replace(" ", "_"), fldNames, "1 Feet", "", "attTransfer" + fc.name.replace(" ", "_")) if fixTrueCurves: # delete coincident lines first due to ArcFM Feeder Mananger messages # append after delete or ArcFM Feeder Manager will present excessive messages arcpy.SelectLayerByAttribute_management( fc, "NEW_SELECTION", whereOID) arcpy.DeleteFeatures_management(fc) arcpy.Append_management( scratchWksp + "\\notCurvy_" + fc.name.replace(" ", "_"), fc.dataSource, "NO_TEST") #pass else: pass print("{0}: {1} Copied: {2} notCurvy: {3}".format(fc.name, len(cntSource), len(cntCopy), len(curveList)))
def grafo2(edificio): auxiliar4 = root + "\\AUXILIAR4" os.mkdir(auxiliar4) outputb = "AUXILIAR2\\" + str(edificio) + "_4.dbf" row2s = arcpy.SearchCursor(outputb) for row2 in row2s: output = "AUXILIAR2\\" + str(edificio) + "_4.shp" output2 = "AUXILIAR4\\" + str(row2.FID) + "_1.shp" clause = '"FID" = ' + str(row2.FID) arcpy.Select_analysis(output, output2, clause) output = "AUXILIAR2\\" + str(edificio) + "_2.shp" output3 = "AUXILIAR4\\" + str(row2.FID) + "_3.shp" arcpy.Merge_management([output2, output], output3) output = "AUXILIAR4\\" + str(row2.FID) + "_4.shp" arcpy.PointsToLine_management(output3, output) output2 = "AUXILIAR2\\" + str(edificio) + "_5.shp" output3 = "AUXILIAR4\\" + str(edificio) + "_5.shp" if row2.FID == 0: arcpy.Copy_management(output, output2) else: arcpy.Merge_management([output2, output], output3) arcpy.Copy_management(output3, output2) shutil.rmtree(auxiliar4)
def create_line_features(in_fc, gdb, unique_event_list, dictionary): for event in unique_event_list: event_lines = os.path.join(gdb, 'event_' + str(event)) if arcpy.Exists(event_lines) == False: print('Creating lines for event ' + str(event)) layer = 'in_memory\\event_lyr_' + str(event) query = '"EVENT_ID" = ' + str(event) arcpy.MakeFeatureLayer_management(in_fc, layer, query, "#", "#") arcpy.PointsToLine_management(layer, event_lines, "PLAYER_ID", "GAME_TIME", "NO_CLOSE") arcpy.AddField_management(os.path.join(gdb, 'event_' + str(event)), "TEAM_ID", "LONG", "", "", "") arcpy.AddField_management(os.path.join(gdb, 'event_' + str(event)), "QUARTER", "SHORT", "", "", "") arcpy.AddField_management(os.path.join(gdb, 'event_' + str(event)), "START_TIME", "TEXT", "", "", 30) #Get Start time of lines with arcpy.da.SearchCursor(layer, ('GAME_TIME', 'QUARTER')) as cursor: first_time = cursor.next() print(first_time[0]) #Add start time of lines to line features with arcpy.da.UpdateCursor( os.path.join(gdb, 'event_' + str(event)), ('START_TIME', 'PLAYER_ID', 'TEAM_ID', 'QUARTER')) as cursor: for row in cursor: row[0] = first_time[0] row[2] = dictionary[row[1]] row[3] = first_time[1] cursor.updateRow(row)
def convert_points2line(inputpointsshpfile, outputlineshpfilepath, line_field=None): """Convert all of the points into a single line inputshpfile: (str) outputshpfile: (str)""" arcpy.PointsToLine_management(inputpointsshpfile, outputlineshpfilepath, Line_Field=line_field)
def analyzePolylines(fcs, outDir, loadCSVtoFeatureClass=False,spatialRef=None): """ This is the only function you need to call. Given a list of Polyline Feature classes, compute the curve data for each curve (circular arc segment) that passes through each triplet of points. :param fcs: Name of feature class to be processed (or list of feature classes) :param outDir: Directory to put the resulting csv files. (CSV names are autogenerated) :param loadCSVtoFeatureClass: Optional. Load the csv file back into arcmap as a confidence check :param spatialRef: Coordinate System to which to project point coordinates and show length units :return: None """ try: validate_or_create_outDir(outDir) except: arcPrint("Unable to create output directory. No files processed.") return for fc in fcs: try: arcPrint("Now processing {0}".format(fc)) csvName = processFCforCogoAnalysis(fc, outDir, spatialRef=spatialRef) successList.extend(csvName) arcPrint("File created: {0}".format(csvName)) arcPrint(" ") except NotPolylineError: arcPrint("{0} not processed because it " + \ "is not a Polyline Feature Class.".format(fc)) except arcpy.ExecuteError: arcPrint("Arc Error while processing Feature Class: {0}".format(fc)) except Exception as e: arcPrint("Unexpected error: {0}".format(e.message)) raise if loadCSVtoFeatureClass and len(successList) > 0: tempPoints = 'tempPoints___' mxd = arcpy.mapping.MapDocument('CURRENT') dataFrame = mxd.activeDataFrame try: for csv in successList: baseName = os.path.basename(csv)[:-4] + "_check" newLayerName = arcpy.env.workspace + '/' + baseName arcpy.AddMessage('Workspace: {0}'.format(arcpy.env.workspace)) arcpy.MakeXYEventLayer_management(csv, 'X', 'Y', tempPoints, spatial_reference=spatialRef) arcpy.AddMessage('Attempting to Add Layer: {0}'.format(baseName)) arcpy.PointsToLine_management(tempPoints, newLayerName) layerObj = arcpy.mapping.Layer(newLayerName) arcpy.mapping.AddLayer(dataFrame, layerObj, 'BOTTOM') arcpy.AddMessage('Added Layer: {0}'.format(baseName)) finally: arcpy.Delete_management(tempPoints) del mxd else: arcpy.AddMessage('Loading check layers was not requested.') arcpy.AddMessage(' ')
def snap_junction_points(from_line_lyr, to_line_lyr, search_distance): """ Shifts junction points (i.e. tributary confluences) in the 'From' network to same coordinates of junction points in the 'To' network, found within a user-specified search distance. :param from_line_lyr: polyline layer representing 'From' stream network :param to_line_lyr: polyline layer representing 'To' stream network :param search_distance: buffer distance around each 'To' junction point, in meters :return: line feature class """ tempWorkspace = "in_memory" arcpy.AddMessage("GNAT TLA: snapping junction points in 'From' network to 'To' network") snapped_from_line = gis_tools.newGISDataset(tempWorkspace, "snapped_from_line") arcpy.CopyFeatures_management(from_line_lyr, snapped_from_line) snap_line_lyr = gis_tools.newGISDataset("Layer", "snap_line_lyr") arcpy.MakeFeatureLayer_management(snapped_from_line, snap_line_lyr) list_field_objects = arcpy.ListFields(snap_line_lyr) list_from_fields = [f.name for f in list_field_objects if f.type != "OID" and f.type != "Geometry"] # Plot junction points for 'From' and 'To' stream networks from_junction_pnts = plot_junction_points(snap_line_lyr, "from") to_junction_pnts = plot_junction_points(to_line_lyr, "to") lyr_from_junc_pnts = gis_tools.newGISDataset("Layer", "lyr_from_junc_pnts") arcpy.MakeFeatureLayer_management(from_junction_pnts, lyr_from_junc_pnts) from_line_oidfield = arcpy.Describe(snap_line_lyr).OIDFieldName from_vrtx = gis_tools.newGISDataset(tempWorkspace, "from_vrtx") arcpy.FeatureVerticesToPoints_management(snap_line_lyr, from_vrtx, point_location="ALL") arcpy.AddXY_management(from_vrtx) from_vrtx_lyr = gis_tools.newGISDataset("Layer", "from_vrtx_lyr") arcpy.MakeFeatureLayer_management(from_vrtx, from_vrtx_lyr) arcpy.Near_analysis(from_vrtx_lyr, to_junction_pnts, search_distance, "LOCATION") arcpy.SelectLayerByLocation_management(from_vrtx_lyr, "INTERSECT", lyr_from_junc_pnts, "#", "NEW_SELECTION") update_xy_coord(from_vrtx_lyr) arcpy.MakeXYEventLayer_management(from_vrtx_lyr, "POINT_X", "POINT_Y", "xy_events", from_vrtx_lyr) xy_events_pnt = gis_tools.newGISDataset(tempWorkspace, "xy_events_pnt") arcpy.CopyFeatures_management("xy_events", xy_events_pnt) arcpy.MakeFeatureLayer_management(xy_events_pnt, "xy_events_lyr") xy_line = gis_tools.newGISDataset(tempWorkspace, "xy_line") arcpy.PointsToLine_management("xy_events_lyr", xy_line, "ORIG_FID") arcpy.JoinField_management(xy_line, "ORIG_FID", snap_line_lyr, from_line_oidfield, list_from_fields) arcpy.DeleteFeatures_management(snap_line_lyr) arcpy.Append_management(xy_line, snap_line_lyr, "NO_TEST") return snap_line_lyr
def plotfencefromraster(self, outputWorspace, outputFeatureName): try: outFilePath = os.path.join(outputWorspace, outputFeatureName) arcpy.CreateFeatureclass_management(outputWorspace, outputFeatureName, "POLYLINE") arcpy.AddField_management(outFilePath, "RasterID", "TEXT") for rasterDataObject in self.rasterDataCollection: rasterName = os.path.basename(rasterDataObject.path) tempPoint = os.path.join(arcpy.env.scratchGDB, "TempRasterFencePoint") arcpy.CreateFeatureclass_management(arcpy.env.scratchGDB, "TempRasterFencePoint", "POINT") tempLine = os.path.join(arcpy.env.scratchGDB, "TempRasterFenceLine") i = 0 for xCoord in rasterDataObject.fenceXcoords: cursor = arcpy.da.InsertCursor(tempPoint, ["SHAPE@XY"]) row = (xCoord, rasterDataObject.fenceYcoords[i]) cursor.insertRow([row]) del cursor i += 1 arcpy.PointsToLine_management(tempPoint, tempLine) arcpy.AddField_management(tempLine, "RasterID", "TEXT") arcpy.CalculateField_management(tempLine, "RasterID", rasterName) arcpy.CopyFeatures_management(tempLine, outFilePath) arcpy.Delete_management(tempLine) arcpy.Delete_management(tempPoint) except Exception: arcpy.AddMessage(traceback.format_exc()) finally: if arcpy.Exists(tempLine): arcpy.Delete_management(tempLine) if arcpy.Exists(tempPoint): arcpy.Delete_management(tempPoint)
def onClick(self): """Construction du reseau (lignes et stations) et ajout au document""" rep_data = "D:\\ProgSIG\\data" gdb = os.path.join(rep_data + "TD_itinearaire.gdb") arcpy.env.workspace = gdb layer_stations = "Layer_stations_ligne_" fc_stations = "Stations_ligne_" fc_line = "Ligne_" # Construction des stations et lignes pour chacun des fichiers texte du répertoire txt_files = [f for f in os.listdir(rep_data) if f.endswith(".txt")] for txt_file in txt_files: num_line = txt_file[ 1:-4] # on supprime la première lettre et les 4 dernières arcpy.MakeXYEventLayer_management( txt_file, "x", "y", layer_stations + num_line, "PROJCS['RGF_1993_Lambert_93',GEOGCS['GCS_RGF_1993',DATUM['D_RGF_1993',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',700000.0],PARAMETER['False_Northing',6600000.0],PARAMETER['Central_Meridian',3.0],PARAMETER['Standard_Parallel_1',44.0],PARAMETER['Standard_Parallel_2',49.0],PARAMETER['Latitude_Of_Origin',46.5],UNIT['Meter',1.0]];-35597500 -23641900 10000;-100000 10000;-100000 10000;0,001;0,001;0,001;IsHighPrecision", "") arcpy.FeatureClassToFeatureClass_conversion( layer_stations + num_line, gdb, fc_stations_name + num_line, "", "x \"x\" true true false 8 Double 0 0 ,First,#,stations_layer,x,-1,-1;y \"y\" true true false 8 Double 0 0 ,First,#,stations_layer,y,-1,-1;nom \"nom\" true true false 8000 Text 0 0 ,First,#,stations_layer,nom,-1,-1", "") arcpy.PointsToLine_management(fc_stations_name + num_line, fc_line + num_line, "", "", "NO_CLOSE") # Fusion de toutes les stations dans une unique classe d'entités fcs_stations = [] fcs = arcpy.ListFeatureClasses() for fc in fcs: if fc_stations in fc: fcs_stations.append(fc) arcpy.Merge_management(fcs_stations, FC_ALL_STATIONS) arcpy.DeleteIdentical_management(FC_ALL_STATIONS, "Shape") # On l'ajoute au document add_layer(mxd, FL_ALL_STATIONS)
def doArcpyStuff(outputDIR): import arcpy arcpy.env.overwriteOutput = True gdb = os.path.join(outputDIR, "f.gdb") if not arcpy.Exists(gdb): arcpy.CreateFileGDB_management(outputDIR, "f.gdb") travelCSV = glob.glob(outputDIR + '\*.csv') for csv in travelCSV: teamName = os.path.splitext(os.path.basename(csv))[0].replace(" ", "") teamName = teamName.replace('.', '') print(teamName) arcpy.MakeXYEventLayer_management( csv, "Field6", "Field7", teamName, "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision", "#") outputPoint = os.path.join(gdb, teamName + "_pts") #"in_memory\\TempPoint" arcpy.CopyFeatures_management(teamName, outputPoint, "#", "0", "0", "0") finalLines = os.path.join(gdb, teamName) arcpy.PointsToLine_management(outputPoint, finalLines, "", "Field1", "NO_CLOSE") arcpy.AddField_management(finalLines, "distance", "DOUBLE") arcpy.AddField_management(finalLines, "team", "TEXT", 50) arcpy.CalculateField_management(finalLines, "team", "\'" + teamName + "\'", "PYTHON_9.3") arcpy.CalculateField_management(finalLines, "distance", "!shape.length@kilometers!", "PYTHON_9.3", "#") return gdb
) log.write( str(datetime.datetime.today().strftime('%H:%M:%S')) + ' Make Query Table Management Complete to {0}.\n'.format( 'RigSched')) print str(datetime.datetime.today().strftime('%H:%M:%S') ) + ' Make Query Table Management Complete to {0}.\n'.format( 'RigSched') #TEST OUTPUT #FeatureClassToFeatureClass_conversion (in_features, out_path, out_name, {where_clause}, {field_mapping}, {config_keyword}) #arcpy.FeatureClassToFeatureClass_conversion ('Test1', fgdbPath, 'Test') #Point to Line #PointsToLine_management (Input_Features, Output_Feature_Class, {Line_Field}, {Sort_Field}, {Close_Line}) arcpy.PointsToLine_management(RigSched, 'in_memory\\tempriglines', 'RigId', 'StartDate') log.write( str(datetime.datetime.today().strftime('%H:%M:%S')) + ' Points to Line Complete.\n') print str(datetime.datetime.today().strftime( '%H:%M:%S')) + ' Points to Line Complete.\n' #one time #arcpy.FeatureClassToFeatureClass_conversion ('in_memory\\tempriglines',GISpegistestPath,'EPEX_RigLines') infc = 'in_memory\\tempriglines' outfc = 'pegistest.GIS.EPEX_RigLines' #Truncate #TruncateTable_management (in_table) arcpy.TruncateTable_management(outfc) log.write(
def get_centerline (feature, dem, workspace, power = 5, eu_cell_size = 10): """Returns a center line feature of the given polygon feature based on cost over an euclidean distance raster and cost path. points are seeded using minimum and maximum elevation.""" centerline = workspace + '\\centerline.shp' center_length = 0 center_slope = 0 smoothing = 4 trim_distance = "100 Meters" try: # Setup extents / environments for the current feature ARCPY.env.extent = feature.shape.extent desc = ARCPY.Describe(feature) XMin_new = desc.extent.XMin - 200 YMin_new = desc.extent.YMin - 200 XMax_new = desc.extent.XMax + 200 YMax_new = desc.extent.YMax + 200 ARCPY.env.extent = ARCPY.Extent(XMin_new, YMin_new, XMax_new, YMax_new) ARCPY.env.overwriteOutput = True ARCPY.env.cellSize = eu_cell_size ARCPY.env.snapRaster = dem # Get minimum and maximum points resample = ARCPY.Resample_management (dem, 'in_memory\\sample', eu_cell_size) masked_dem = spatial.ExtractByMask (resample, feature.shape) # Find the maximum elevation value in the feature, convert them to # points and then remove all but one. maximum = get_properties (masked_dem, 'MAXIMUM') maximum_raster = spatial.SetNull(masked_dem, masked_dem, 'VALUE <> ' + maximum) maximum_point = ARCPY.RasterToPoint_conversion(maximum_raster, 'in_memory\\max_point') rows = ARCPY.UpdateCursor (maximum_point) for row in rows: if row.pointid <> 1: rows.deleteRow(row) del row, rows # Find the minimum elevation value in the feature, convert them to # points and then remove all but one. minimum = get_properties (masked_dem, 'MINIMUM') minimum_raster = spatial.SetNull(masked_dem, masked_dem, 'VALUE <> ' + minimum) minimum_point = ARCPY.RasterToPoint_conversion(minimum_raster, 'in_memory\\min_point') rows = ARCPY.UpdateCursor (minimum_point) for row in rows: if row.pointid <> 1: rows.deleteRow(row) del row, rows # Calculate euclidean Distance to boundary line for input DEM cells. polyline = ARCPY.PolygonToLine_management(feature.shape, 'in_memory\\polyline') eucdist =spatial.EucDistance(polyline, "", eu_cell_size, '') masked_eucdist = spatial.ExtractByMask (eucdist, feature.shape) # Calculate the cost raster by inverting the euclidean distance results, # and raising it to the power of x to exaggerate the least expensive route. cost_raster = (-1 * masked_eucdist + float(maximum))**power # Run the cost distance and cost path function to find the path of least # resistance between the minimum and maximum values. The results are set # so all values equal 1 (different path segments have different values) # and convert the raster line to a poly-line. backlink = 'in_memory\\backlink' cost_distance = spatial.CostDistance(minimum_point, cost_raster, '', backlink) cost_path = spatial.CostPath(maximum_point, cost_distance, backlink, 'EACH_CELL', '') cost_path_ones = spatial.Con(cost_path, 1, '', 'VALUE > ' + str(-1)) # Set all resulting pixels to 1 r_to_p = ARCPY.RasterToPolyline_conversion (cost_path_ones, 'in_memory\\raster_to_polygon') del ARCPY.env.extent # Delete current extents (need here but do not know why) # Removes small line segments from the centerline shape. These segments are # a byproduct of cost analysis. lines = str(ARCPY.GetCount_management(r_to_p)) #check whether we have more than one line segment if float(lines) > 1: # If there is more then one line rows = ARCPY.UpdateCursor(r_to_p) for row in rows: if row.shape.length == eu_cell_size: # delete all the short 10 m lines rows.deleteRow(row) del row, rows lines = str(ARCPY.GetCount_management(r_to_p)) if float(lines) > 1: ARCPY.Snap_edit(r_to_p, [[r_to_p, "END", "50 Meters"]]) # make sure that the ends of the lines are connected r_to_p = ARCPY.Dissolve_management(r_to_p, 'in_memory\\raster_to_polygon_dissolve') # Smooth the resulting line. Currently smoothing is determined by minimum # and maximum distance. The greater change the greater the smoothing. smooth_tolerance = (float(maximum) - float(minimum)) / smoothing ARCPY.SmoothLine_cartography(r_to_p, centerline, 'PAEK', smooth_tolerance, 'FIXED_CLOSED_ENDPOINT', 'NO_CHECK') field_names = [] # List of field names in the file that will be deleted. fields_list = ARCPY.ListFields(centerline) for field in fields_list: # Loop through the field names if not field.required: # If they are not required append them to the list of field names. field_names.append(field.name) # Add new fields to the center line feature ARCPY.AddField_management(centerline, 'GLIMSID', 'TEXT', '', '', '25') ARCPY.AddField_management(centerline, 'LENGTH', 'FLOAT') ARCPY.AddField_management(centerline, 'SLOPE', 'FLOAT') ARCPY.DeleteField_management(centerline, field_names) # Remove the old fields. # Calculate the length of the line segment and populate segment data. ARCPY.CalculateField_management(centerline, 'LENGTH', 'float(!shape.length@meters!)', 'PYTHON') rows = ARCPY.UpdateCursor (centerline) for row in rows: row.GLIMSID = feature.GLIMSID # Get GLIMS ID and add it to segment center_length = row.LENGTH # Get the length of the center line # Calculate slope of the line based on change in elevation over length of line center_slope = round(math.degrees(math.atan((float(maximum) - float(minimum)) / row.LENGTH)), 2) row.SLOPE = center_slope # Write slope to Segment rows.updateRow(row) # Update the new entry del row, rows #Delete cursors and remove locks # Flip Line if needed - Turn min point and end point into a line segment if # the length of this line is greater then the threshold set, flip the line. end_point = ARCPY.FeatureVerticesToPoints_management(centerline, 'in_memory\\end_point', 'END') merged_points = ARCPY.Merge_management ([end_point, minimum_point], 'in_memory\\merged_points') merged_line = ARCPY.PointsToLine_management (merged_points, 'in_memory\\merged_line') merged_line_length = 0 # Get the line Length rows = ARCPY.SearchCursor (merged_line) for row in rows: merged_line_length += row.shape.length del row, rows # if the line length is greater then a quarter the entire feature length, flip if merged_line_length > (center_length/4): ARCPY.FlipLine_edit(centerline) # This function attempts to extend the line and clip it back to the # feature extents in order to create a line that runs from edge to edge #trimmed_line = ARCPY.Merge_management([polyline, centerline], 'in_memory\\line_merge') trimmed_line = ARCPY.Append_management (polyline, centerline, 'NO_TEST') ARCPY.TrimLine_edit (trimmed_line, trim_distance, "DELETE_SHORT") ARCPY.ExtendLine_edit(trimmed_line, trim_distance, "EXTENSION") rows = ARCPY.UpdateCursor (trimmed_line) for row in rows: if row.LENGTH == 0.0: rows.deleteRow(row) del row, rows # Recalculate length. Must be after 0.0 lengths are deleted or they will # not be removed above. ARCPY.CalculateField_management(centerline, 'LENGTH', 'float(!shape.length@meters!)', 'PYTHON') ARCPY.env.overwriteOutput = False return centerline, center_length, center_slope, False except: ARCPY.env.overwriteOutput = False return centerline, '', '', True
tejeria_dsm_medium_tif = "D:/GullyGeoChallenge/data/tejeria_dsm_medium.tif" GPS_Points_Tejeria_shp = "D:/GullyGeoChallenge/data/GPS_points/GPS_Points_Tejeria.shp" # Process: Select # Set GPS points in the expression below. # Replace one of the following combinations for the 001 and 003 in the expression. # Valid combinations are: (001, 003); (0013, 0016); (0026, 0028); (039, 041); (048, 050); # (073, 075); (078, 079); (0115, 0117); (0146, 0147); (0148, 0149) Expression = "\"id\" = 'TEJGUL0026' OR \"id\" = 'TEJGUL0028'" Selected_Points = "D:/GullyGeoChallenge/data/Step.gdb/Selected_Points" arcpy.Select_analysis(GPS_Points_Tejeria_shp, Selected_Points, Expression) # Process: Points To Line Points_to_Line = "D:/GullyGeoChallenge/data/Step.gdb/Points_to_Line" arcpy.PointsToLine_management(Selected_Points, Points_to_Line, "", "y", "NO_CLOSE") # Process: Create Points Along Lines Points_along_Line = "D:/GullyGeoChallenge/data/Step.gdb/Points_along_Line" arcpy.CreatePointsAlongLines_alonglines(Points_to_Line, Points_along_Line, "0.05", "VALUE", "NO_END_POINTS") # Process: Extract Values to Points Values_Extracted_to_Points = "D:/GullyGeoChallenge/data/Step.gdb/Values_Extracted_to_Points" arcpy.gp.ExtractValuesToPoints_sa(Points_along_Line, tejeria_dsm_medium_tif, Values_Extracted_to_Points, "NONE", "VALUE_ONLY") # Process: Add Field arcpy.AddField_management(Values_Extracted_to_Points, "Distance", "FLOAT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
outTxt = dirout + "\\" + cordexDom + ".txt" outLayer = dirout + "\\" + cordexDom + ".lyr" outPtsShp = dirout + "\\" + cordexDom + "_pts.shp" outLinShp = dirout + "\\" + cordexDom + "_lin.shp" outPolShp = dirout + "\\" + cordexDom + ".shp" outGlbShp = dirout + "\\cordex.shp" if not os.path.exists(outPolShp): print cordexDom wFile = open(outTxt, "w") wFile.write("pol" + "\t" + "lat" + "\t" + "lon" + "\n") wFile.write(str(corDic[cordexDom])) wFile.close() arcpy.MakeXYEventLayer_management(outTxt, "lon", "lat", outLayer, "", "") arcpy.CopyFeatures_management(outLayer, outPtsShp) arcpy.PointsToLine_management(outPtsShp, outLinShp, "", "pol", "CLOSE") arcpy.FeatureToPolygon_management(outLinShp, outPolShp, "", "", "") arcpy.Delete_management(outLinShp) arcpy.Delete_management(outPtsShp) list = list + ';' + outPolShp LIST = "\"" + list[1:] + "\"" arcpy.Merge_management(LIST, outGlbShp, "") print "Process done!!!" print "Process done!!!"
#For every extension field added in the try block, insert an appropriate AddField except Exception, ErrorDesc: arcpy.AddError(str(ErrorDesc)) arcpy.env.workspace = outPath rows = arcpy.InsertCursor(outFC) recComplete = 0 # walk through each trkpt, create and insert a record into the feature class for each for pt, ele, t in trkpt2dict( gpxfile): # Add in each value within the Extension Try block row = rows.newRow() row.SHAPE = pt row.Elevation = ele row.Date_Time = parse_timestamp(t)[0] #row.hr = hr #Extension element #Add an entry for each additional entry in the Extension Try block within the dictionary rows.insertRow(row) recComplete += 1 if (recComplete % 2000) == 0: arcpy.AddMessage("Processed " + str(recComplete) + " records.") #If "Point to Line" is required, create second output of line featureclass if arcpy.GetParameter(2) == 1: arcpy.PointsToLine_management(outFC, outFC + "_line") arcpy.SetParameterAsText(3, outFC + "_line")
# --------------------------------------------------------------------------- # Author: LiaoChenchen # Created on: 2020/9/28 9:21 # Reference: """ Description: 实现arcgis中的一个功能 Usage: """ # --------------------------------------------------------------------------- import arcpy import os # 创建文件夹 # scratch_folder = "scratchfolder" # if not os.path.isdir(scratch_folder): # os.mkdir(scratch_folder) # arcpy.env.workspace = scratch_folder """--------------------------------------创建数据库--------------------------""" """-------------------------------------------------------------------------""" """-------------------------------------------------------------------------""" # make dir from hybag import ezarcpy """-------------------------------------------------------------------------""" scratch_path = ezarcpy.initialize_environment()[0] scratch_gdb = ezarcpy.initialize_environment()[1] inFeatures = os.path.join(scratch_path, "Export_Output.shp") # 必须要有后缀 outFeatures = os.path.join(scratch_gdb, "line") # 点集转线 arcpy.PointsToLine_management(inFeatures, outFeatures)
# Çýktý Dosyasý GDB = arcpy.GetParameterAsText(1) #GDB="C:\\150827\\Maras.gdb" # KMZ Yapýlsýn mý Yapýlmasýn mý KMZ = arcpy.GetParameterAsText(2) except: arcpy.AddMessage("\nBaþlangýç Deðerlerini Almada Sýkýntý: " + arcpy.GetMessages(2)) raise Exception try: number= int(arcpy.GetCount_management(SHP).getOutput(0)) for x in range (0,number,1): whereClause="FID"+"="+str("%d") %(x) fields=['FID','ProjectNam'] with arcpy.da.SearchCursor(SHP,fields,whereClause)as cursor: for RES in cursor: res2=RES[1] arcpy.AddMessage(res2+"Yapýlýyor") duzelt=res2.split() YOL=GDB+"\\"+duzelt[0]+duzelt[1] if not arcpy.Exists(YOL): Clause = "ProjectNam" + " = '" + res2 + "'"+"AND PointType='K'" arcpy.SelectLayerByAttribute_management(SHP,"NEW_SELECTION",Clause) arcpy.PointsToLine_management(SHP,YOL,"","","CLOSE") else: arcpy.SelectLayerByAttribute_management(SHP, "CLEAR_SELECTION") except: arcpy.AddMessage("\nHesaplamalarda Sýkýntý: " + arcpy.GetMessages(2)) raise Exception except: arcpy.AddError("\nError running script") raise Exception
Point3dName = GDBworkspace + "\\" + 'point3d' Point23dName = GDBworkspace + "\\" + 'point23d' # inputSR = r"GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision" inputSR = r"PROJCS['WGS_1984_Web_Mercator_Auxiliary_Sphere',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator_Auxiliary_Sphere'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],PARAMETER['Auxiliary_Sphere_Type',0.0],UNIT['Meter',1.0]]" try: # 生成三维管线 arcpy.ExcelToTable_conversion(Input_Excel_File = outTempTable, Output_Table = connectsheet.name, Sheet = connectsheet.name) arcpy.ExcelToTable_conversion(Input_Excel_File = outTempTable, Output_Table = pointsheet.name, Sheet = pointsheet.name) arcpy.ExcelToTable_conversion(Input_Excel_File = outTempTable, Output_Table = pointsheet2.name, Sheet = pointsheet2.name) arcpy.ExcelToTable_conversion(Input_Excel_File = outTempTable, Output_Table = allpointssheet.name, Sheet = allpointssheet.name) arcpy.MakeXYEventLayer_management(connect_table, "x", "y", "templayer",spatial_reference = inputSR, in_z_field = "z") logging.info('MakeXYEventLayer_management') arcpy.FeatureClassToFeatureClass_conversion(in_features = "templayer",out_path = "in_memory", out_name = "tempPoint") arcpy.PointsToLine_management(Input_Features = "in_memory"+ "\\" + "tempPoint", Output_Feature_Class = "in_memory" + "\\" + "resultLine", Line_Field = "id") arcpy.MakeFeatureLayer_management("in_memory" + "\\" + "resultLine" , "layer") arcpy.AddJoin_management("layer", "id", connect_table, "id") logging.info('AddJoin_management') arcpy.FeatureClassToFeatureClass_conversion("layer", out_path = outputFile, out_name = PipeFeatureName) logging.info('Buffer3D_3d') arcpy.Buffer3D_3d(outputFile + "\\" + PipeFeatureName, GDBPipeName, 'width', 'STRAIGHT', 30) logging.info('Append pipeline3d data') arcpy.Append_management(inputs = GDBPipeName, target = PipeName) arcpy.Delete_management("in_memory") # 生成三维管点 logging.info('create point3d') arcpy.MakeXYEventLayer_management(point_table, "x", "y", "templayer",spatial_reference = inputSR, in_z_field = "z") arcpy.FeatureClassToFeatureClass_conversion(in_features = "templayer",out_path = "in_memory", out_name = "tempPoint") logging.info('FeatureClassToFeatureClass_conversion')
def createSegments(contour_at_mean_high_water, contour_at_surge): # Start a timer time1 = time.clock() arcpy.AddMessage("\nSegmentation of the coastline started at "+str(datetime.now())) # Specify a tolerance distance or minimum length of a seawall # Users are not yet given control of this th = 150 # Create random points along the lines (mean high water and the surge of choice) # The numbers used are just my choice based on iterative observations random0 = arcpy.CreateRandomPoints_management(out_path= arcpy.env.workspace, \ out_name= "random0", \ constraining_feature_class= contour_at_mean_high_water, \ number_of_points_or_field= long(1600), \ minimum_allowed_distance = "{0} Feet".format(th)) random1 = arcpy.CreateRandomPoints_management(out_path= arcpy.env.workspace, \ out_name= "random1", \ constraining_feature_class= contour_at_surge, \ number_of_points_or_field= long(1600), \ minimum_allowed_distance = "{0} Feet".format(th)) # Perform a proximity analysis with the NEAR tool arcpy.Near_analysis(random0, random1) # Give each point a fixed unique ID # Create the ID field arcpy.AddField_management (random0, "UniqueID", "SHORT") arcpy.AddField_management (random1, "UniqueID", "SHORT") # Add Unique IDs arcpy.CalculateField_management(random0, "UniqueID", "[FID]") arcpy.CalculateField_management(random1, "UniqueID", "[FID]") # Categorize/Separate each feature based on their near feature # Crate a table view of random0 table0 = arcpy.MakeTableView_management(random0, "random0_table") #table1 = arcpy.MakeTableView_management(random1, "random1_table") # Sort the near feature for each points in random0 random0_sorted = arcpy.Sort_management(table0, "random0_sorte.dbf", [["NEAR_FID", "ASCENDING"]]) # Create "long enough" lists for each of the field of interests: ID, NEAR_ID, and NEAR_DIST # (distance to closest point). I added [99999] here to extend the list length and avoid IndexError list_fid = [r.getValue("UniqueID") for r in arcpy.SearchCursor(random0_sorted, ["UniqueID"])] +[99999] list_nearid = [r.getValue("NEAR_FID") for r in arcpy.SearchCursor(random0_sorted, ["NEAR_FID"])]\ +[99999] list_neardist = [r.getValue("NEAR_DIST") for r in arcpy.SearchCursor(random0_sorted, ["NEAR_DIST"])]\ +[99999] del r # Only take points with near feature within the specified threshold. If it's too far, it's not better # than the others for a segment point list_fid_filtered = [i for i in list_neardist if i < th] # Then initiate a list o contain their Unique ID and Near ID first_unique_id = [] first_near_id = [] # Get NEAR_ID and Unique ID for each of these points for i in list_fid_filtered: first_unique_id.append(list_fid[list_neardist.index(i)]) first_near_id.append(list_nearid[list_neardist.index(i)]) # Only take the unique values in case there are duplicates. This shoudn't happen. Just to make sure. first_unique_id = [i for i in set(first_unique_id)] first_near_id = [i for i in set(first_near_id)] # Now create a new feature out of these points # Frist let's create a Feature Layer arcpy.MakeFeatureLayer_management("random0.shp", "random0_lyr") # Let's select all points and export them into a new feature random0_points = arcpy.SearchCursor(random0, ["UniqueID"]) point0 = random0_points.next() for point0 in random0_points: for i in range(len(first_unique_id)): if point0.getValue("UniqueID") == first_unique_id[i]: selector0 = arcpy.SelectLayerByAttribute_management(\ "random0_lyr", "ADD_TO_SELECTION", '"UniqueID" = {0}'.format(first_unique_id[i])) del point0, random0_points new_random0 = arcpy.CopyFeatures_management(selector0, "new_random0") arcpy.Delete_management('random0_lyr') # Now for the new point feature, remove clusters of points around them and take only the ones # with minimum NEAR_DIST # First, get the geometry attributes of the new points arcpy.AddGeometryAttributes_management(new_random0, "POINT_X_Y_Z_M", "", "", "") # Create long enough list of the field of interest (same as the previous) pointx = [r.getValue("POINT_X") for r in arcpy.SearchCursor(new_random0, ["POINT_X"])] +[99999] pointy = [r.getValue("POINT_Y") for r in arcpy.SearchCursor(new_random0, ["POINT_Y"])] +[99999] new_list_fid = [r.getValue("UniqueID") for r in arcpy.SearchCursor(new_random0, ["UniqueID"])]\ +[99999] new_list_nearid = [r.getValue("NEAR_FID") for r in arcpy.SearchCursor(new_random0, ["NEAR_FID"])]\ +[99999] new_list_neardist = [r.getValue("NEAR_DIST") for r in arcpy.SearchCursor(new_random0, ["NEAR_DIST"])]\ +[99999] del r # Initiate a list of every points that has already been compared to the near points garbage = [] # Also initiate a list for the new Unique ID and NEAR ID new_unique_ID = [] new_near_ID = [] # Then, check if the points are right next to them. If so, add them to a temporary list # and find the one with closest near ID (or find minimum of their NEAR_DIST) for i in range(len(pointx)): if i+1 < len(pointx): # If not within the th range if not calculateDistance(pointx[i], pointy[i], pointx[i+1], pointy[i+1]) < float(th)*1.5: # Skip if it's in garbage if new_list_nearid[i] in garbage: continue else: new_unique_ID.append(new_list_fid[i]) new_near_ID.append(new_list_nearid[i]) # If within the range else: # Skip if it's in garbage if new_list_nearid[i] in garbage: continue else: temp_ID = [] temp_NEAR = [] temp_DIST = [] while True: temp_ID.append(new_list_fid[i]) temp_NEAR.append(new_list_nearid[i]) temp_DIST.append(new_list_neardist[i]) garbage.append(new_list_nearid[i]) i = i+1 # Stop when within the range again. And add the last point within the range if not calculateDistance(pointx[i], pointy[i], pointx[i+1], pointy[i+1]) < 200: temp_ID.append(new_list_fid[i]) temp_NEAR.append(new_list_nearid[i]) temp_DIST.append(new_list_neardist[i]) garbage.append(new_list_nearid[i]) # Calculate the minimum and get the Unique ID and Near ID minD = min(temp_DIST) new_unique_ID.append(new_list_fid[new_list_neardist.index(minD)]) new_near_ID.append(new_list_nearid[new_list_neardist.index(minD)]) del temp_ID, temp_NEAR, temp_DIST break # Now select these final points export them into new feature. # These are the end points for the segments to be created # First, make a layer out of all the random points arcpy.MakeFeatureLayer_management("random0.shp", "random0_lyr") arcpy.MakeFeatureLayer_management("random1.shp", "random1_lyr") # Then select and export the end points into feature0 and feature1 # Based on new_unique_ID for random0 random0_points = arcpy.SearchCursor(random0, ["UniqueID"]) point0 = random0_points.next() for point0 in random0_points: for i in range(len(new_unique_ID)): if point0.getValue("UniqueID") == new_unique_ID[i]: selected0 = arcpy.SelectLayerByAttribute_management(\ "random0_lyr", "ADD_TO_SELECTION", '"UniqueID" = {0}'.format(new_unique_ID[i])) feature0 = arcpy.CopyFeatures_management(selected0, "feature0") # Based on new_near_ID for random1 random1_points = arcpy.SearchCursor(random1, ["UniqueID"]) point1 = random1_points.next() for point1 in random1_points: for k in range(len(new_near_ID)): if point1.getValue("UniqueID") == new_near_ID[k]: selected1 = arcpy.SelectLayerByAttribute_management(\ "random1_lyr", "ADD_TO_SELECTION", '"UniqueID" = {0}'.format(new_near_ID[k])) feature1 = arcpy.CopyFeatures_management(selected1, "feature1") del point0, point1, random0_points, random1_points arcpy.Delete_management('random0_lyr') arcpy.Delete_management('random1_lyr') # Now for the actual create of the coastal segments # Which include creation of polygon and splitting the contours as the corresponding points # STEPS NECESSARY FOR POLYGON CREATION # Let's first add geometry attributes to these points arcpy.AddGeometryAttributes_management(feature0, "POINT_X_Y_Z_M", "", "", "") arcpy.AddGeometryAttributes_management(feature1, "POINT_X_Y_Z_M", "", "", "") # Let's create lines that connects points from feature0 to feature1 # Initiate a POLYLINE feature class for these lines arcpy.CreateFeatureclass_management (arcpy.env.workspace, "connector_lines.shp", "POLYLINE") # Then for each of the points in feature0, get the correspondingin feature1 # And create a line for each of the two points with arcpy.da.SearchCursor(feature0, ["NEAR_FID", "POINT_X", "POINT_Y"]) as features0: for feat0 in features0: with arcpy.da.SearchCursor(feature1, ["UniqueID", "POINT_X", "POINT_Y"]) as features1: x=0 for feat1 in features1: x = x+1 theseTwoPoints = [] if feat0[0] == feat1[0]: # Get coordinates X0, Y0 = feat0[1], feat0[2] X1, Y1 = feat1[1], feat1[2] # Append coordinates theseTwoPoints.append(arcpy.PointGeometry(arcpy.Point(X0, Y0))) theseTwoPoints.append(arcpy.PointGeometry(arcpy.Point(X1, Y1))) # Create line from the coordinates subline = arcpy.PointsToLine_management(theseTwoPoints, "subline"+str(x)+".shp") # Append all lines into one feature lines = arcpy.Append_management(["subline"+str(x)+".shp"], "connector_lines.shp") # Then delete subline as it's now unnecessary arcpy.Delete_management(subline) continue del feat0, feat1, features0, features1 # Now that the connectors are created, let's split the segments # Before splitting contours into segments, let's integrate the points and the segments # Just in case, there are misalignment arcpy.Integrate_management([contour_at_mean_high_water, feature0]) arcpy.Integrate_management([contour_at_surge, feature1]) segments0 = arcpy.SplitLineAtPoint_management(contour_at_mean_high_water, feature0, "segments0.shp", "10 Feet") segments1 = arcpy.SplitLineAtPoint_management(contour_at_surge, feature1, "segments1.shp", "10 Feet") # And let's give fixed unique ID for each segment arcpy.CalculateField_management(segments0, "Id", "[FID]") arcpy.CalculateField_management(segments1, "Id", "[FID]") # Now with the split segments and connector lines, let's make segment polygon of the segments almost_segment_polygons = arcpy.FeatureToPolygon_management([segments0, segments1, lines],\ "almost_segment_polygons.shp") # Adding unique ID to the segment polygons arcpy.CalculateField_management(almost_segment_polygons, "Id", "[FID]") # The Feature to Polygon process also created polygons that are surrounded by polygons # These are because these areas are surrounded by flooded areas at surge. # They are above the surge and technically safe. So, let's remove them. arcpy.MakeFeatureLayer_management(almost_segment_polygons, 'almost_segment_polygons_lyr') arcpy.MakeFeatureLayer_management(segments0, 'segments0_lyr') # Only the polygons within the mean_high_water segments are at risk arcpy.SelectLayerByLocation_management('almost_segment_polygons_lyr', 'INTERSECT', 'segments0_lyr') final_without_length = arcpy.CopyFeatures_management('almost_segment_polygons_lyr', 'final.shp') arcpy.Delete_management('segments0_lyr') arcpy.Delete_management('almost_segment_polygons_lyr') # For the new polygons, let's add the corresponding seawall length # Let's add Length field to both first arcpy.AddField_management(final_without_length, "Length", "SHORT") arcpy.AddField_management(segments0, "Length", "SHORT") # Calculation of the length with arcpy.da.UpdateCursor(segments0, ["SHAPE@LENGTH", "Length"]) as segments_0: for segment_0 in segments_0: length = segment_0[0] segment_0[1] = length segments_0.updateRow(segment_0) del segment_0, segments_0 # With spatial join, let's add these results to the segment polygons final = spatialJoin(final_without_length, segments0, "Length", "Length", "max", "joined_segment.shp") # Delete the created but now unnecessary files arcpy.Delete_management(random0) arcpy.Delete_management(random1) # Stop the timer time2 = time.clock() arcpy.AddMessage("Seawall segments and regions successfully created. It took "\ +str(time2-time1)+" seconds") return final
print( "FlightTracks has already been renamed/FlightTracksOld already exists") #change work environment to scratch workspace arcpy.env.workspace = output_ws_plane FCList = arcpy.ListFeatureClasses('*') #Once everything has been checked the point to line tool is ran! #point to line lineField = "Filename" sortField = "ID" for fc in FCList: outfc = os.path.join(output_ws_plane, "{}_line".format(os.path.basename(fc))) arcpy.PointsToLine_management(fc, outfc, lineField, sortField) print("All points turned to line features") #Smooth lines REQUIRES ADVANCED LICENSE FCList = arcpy.ListFeatureClasses("*_line") for fc in FCList: outfc = os.path.join(output_ws_plane, "{}_smooth".format(os.path.basename(fc))) arcpy.SmoothLine_cartography(fc, outfc, "PAEK", "1000 Meters", "FIXED_CLOSED_ENDPOINT", "NO_CHECK") print("You're a Smooth Criminal!") #Add and calculate fields FCList = arcpy.ListFeatureClasses('*_smooth') #New field and field type AFields = [("Year", "LONG"), ("Month", "LONG"), ("Week", "LONG"),
def tableToPolyline(inputTable, inputCoordinateFormat, inputXField, inputYField, outputPolylineFeatures, inputLineField, inputSortField, inputSpatialReference): ''' Converts a table of vertices to one or more polyline features. inputTable - input table, each row is a vertex inputCoordinateFormat - coordinate notation format of input vertices inputXField - field in inputTable for vertex x-coordinate, or full coordinate inputYField - field in inputTable for vertex y-coordinate, or None outputPolylineFeatures - polyline feature class to create inputLineField - field in inputTable to identify separate polylines inputSortField - field in inputTable to sort vertices inputSpatialReference - spatial reference of input coordinates returns polyline feature class inputCoordinateFormat must be one of the following: * DD_1: Both longitude and latitude values are in a single field. Two values are separated by a space, a comma, or a slash. * DD_2: Longitude and latitude values are in two separate fields. * DDM_1: Both longitude and latitude values are in a single field. Two values are separated by a space, a comma, or a slash. * DDM_2: Longitude and latitude values are in two separate fields. * DMS_1: Both longitude and latitude values are in a single field. Two values are separated by a space, a comma, or a slash. * DMS_2: Longitude and latitude values are in two separate fields. * GARS: Global Area Reference System. Based on latitude and longitude, it divides and subdivides the world into cells. * GEOREF: World Geographic Reference System. A grid-based system that divides the world into 15-degree quadrangles and then subdivides into smaller quadrangles. * UTM_ZONES: The letter N or S after the UTM zone number designates only North or South hemisphere. * UTM_BANDS: The letter after the UTM zone number designates one of the 20 latitude bands. N or S does not designate a hemisphere. * USNG: United States National Grid. Almost exactly the same as MGRS but uses North American Datum 1983 (NAD83) as its datum. * MGRS: Military Grid Reference System. Follows the UTM coordinates and divides the world into 6-degree longitude and 20 latitude bands, but MGRS then further subdivides the grid zones into smaller 100,000-meter grids. These 100,000-meter grids are then divided into 10,000-meter, 1,000-meter, 100-meter, 10-meter, and 1-meter grids. ''' try: env.overwriteOutput = True deleteme = [] joinFieldName = "JoinID" scratch = '%scratchGDB%' if env.scratchWorkspace: scratch = env.scratchWorkspace inputSpatialReference = _checkSpatialRef(inputSpatialReference) copyRows = os.path.join(scratch, "copyRows") arcpy.CopyRows_management(inputTable, copyRows) addUniqueRowID(copyRows, joinFieldName) copyCCN = os.path.join(scratch, "copyCCN") arcpy.ConvertCoordinateNotation_management(copyRows, copyCCN, inputXField, inputYField, inputCoordinateFormat, "DD_NUMERIC", joinFieldName, inputSpatialReference) arcpy.PointsToLine_management(copyCCN, outputPolylineFeatures, inputLineField, inputSortField, "NO_CLOSE") return outputPolylineFeatures except arcpy.ExecuteError: # Get the tool error messages msgs = arcpy.GetMessages() arcpy.AddError(msgs) print(msgs) except: # Get the traceback object tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] # Concatenate information together concerning the error into a message string pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str( sys.exc_info()[1]) msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n" # Return python error messages for use in script tool or Python Window arcpy.AddError(pymsg) arcpy.AddError(msgs) # Print Python error messages for use in Python / Python Window print(pymsg + "\n") print(msgs) finally: if len(deleteme) > 0: # cleanup intermediate datasets if debug == True: arcpy.AddMessage("Removing intermediate datasets...") for i in deleteme: if debug == True: arcpy.AddMessage("Removing: " + str(i)) arcpy.Delete_management(i) if debug == True: arcpy.AddMessage("Done")
genf.write('{},{}\n'.format(str(minLong), str(minLat))) genf.write('{},{}\n'.format(str(minLong), str(maxLat))) genf.close() # convert XY file to .dbf table boxdbf = arcpy.CreateScratchName('xxx', '.dbf', '', scratch) boxdbf = os.path.basename(boxdbf) arcpy.TableToTable_conversion(os.path.join(scratch, 'xxxbox.csv'), scratch, boxdbf) # make XY event layer from .dbf table arcpy.MakeXYEventLayer_management(os.path.join(scratch, boxdbf), 'LONGITUDE', 'LATITUDE', 'boxlayer', xycs) # convert event layer to preliminary line feature class with PointsToLine_management arcpy.PointsToLine_management('boxlayer', 'xxMapOutline') # densify MapOutline arcpy.Densify_edit('xxMapOutline', 'DISTANCE', 0.0001) # project to correct spatial reference ### THIS ASSUMES THAT OUTPUT COORDINATE SYSTEM IS HARN AND WE ARE IN OREGON OR WASHINGTON!! if isNAD27: geotransformation = 'NAD_1927_To_NAD_1983_NADCON;NAD_1983_To_HARN_OR_WA' else: geotransformation = 'NAD_1983_To_HARN_OR_WA' geotransformation = '' arcpy.Project_management('xxMapOutline', 'MapOutline', outSpRef, geotransformation, xycs)
if arcpy.Exists(startEndtemp) == True: arcpy.Delete_management(startEndtemp) print "\nInfo: Data container is cleaned\n" # Get geospatial geometries from Latitude and Longitude in data file, exported as point layer arcpy.MakeXYEventLayer_management(smFilePath, smlatField, smlonField, smPointLyName, ukCoordSystem) # Define the produced SMARTscan Input Features smInputPoint = os.path.join(gdbDatabasePath, smPointLyName) smOutputLine = os.path.join(gdbDatabasePath, smLineLyName) # Get line layers and start & end node coordinate arcpy.FeatureClassToShapefile_conversion(smPointLyName, gdbDatabasePath) arcpy.PointsToLine_management(smInputPoint, smOutputLine, smlineField) print "\nInfo: SMARTscan geospatial points and lines are produced" # Get start node and end note layer arcpy.AddGeometryAttributes_management(smOutputLine, "LINE_START_MID_END") startLayer = arcpy.MakeXYEventLayer_management(smOutputLine, "START_X", "START_Y", "startLayer", ukCoordSystem) endLayer = arcpy.MakeXYEventLayer_management(smOutputLine, "END_X", "END_Y", "endLayer", ukCoordSystem) # Convert layer into geodatabase datafile arcpy.FeatureClassToFeatureClass_conversion(startLayer, gdbDatabasePath, "StartEN") arcpy.FeatureClassToFeatureClass_conversion(endLayer, gdbDatabasePath, "EndEN")
for text_file in text_file_list: in_table = os.path.join(in_folder, text_file) record = pd.read_csv(in_table) activity = (record['sport'].unique()) name = text_file.strip(".tcx.csv") XYoutfc = os.path.join("in_memory", name) print(name) layer_xy = arcpy.management.XYTableToPoint(in_table, XYoutfc, x_coords, y_coords, z_coords, arcpy.SpatialReference(4326)) layer_line = arcpy.PointsToLine_management(layer_xy, "line" + name, "", "") arcpy.AddField_management(layer_line, "activity", "TEXT", "", "", "", "", "", "", "") arcpy.Delete_management(layer) field = ['activity'] with arcpy.da.UpdateCursor(layer_line, field) as cursor: for row in cursor: row = activity cursor.updateRow(row) path_list.append(layer_line) arcpy.Merge_management(path_list, "merged_fc") arcpy.Delete_management("in_memory")
# create list of all circuit tables for subdir, dirs, files in os.walk( "c:/Users/151268/Documents/BFD/tables/prepped_tables"): for file in files: filepaths = subdir + os.sep + file if filepath.endswith(".csv"): print(filepaths) # Set constants x_coord = "long" y_coord = "lat" sp_ref = arcpy.SpatialReference(2249) # List circuit numbers, will need to manually change # Make sure these match up with filenames / csv files # Perhaps there is a better way? circuits = range(1, 11) print(circuits) # Make xy event layer by looping over filepaths for filepath, circuit in zip(filepaths, circuits): arcpy.MakeXYEventLayer_management(filepath, x_coord, y_coord, "event_layer", sp_ref) print("circuit " + circuit + " event created") arcpy.CopyFeatures_management(event_layer, "points_circuit_" + circuit) print("circuit " + circuit + " points created") arcpy.PointsToLine_management("points_circuit_" + circuit) # Copy features to geodatabase
costBndry = arcpy.FeatureToLine_management(costBndry, "in_memory" + os.sep + "xxline", "", "NO_ATTRIBUTES") costBndry = arcpy.Buffer_analysis(costBndry, "in_memory" + os.sep + "buf", CellSize + " Meters", "FULL", "ROUND", "NONE", "") costEnv = arcpy.FeatureToLine_management(costEnv, "in_memory" + os.sep + "xxenv", "", "NO_ATTRIBUTES") EnvPnt = arcpy.FeatureVerticesToPoints_management( costEnv, "in_memory" + os.sep + "xxpnt") fc = arcpy.MakeFeatureLayer_management(EnvPnt, "pLayer") oid_fieldname = arcpy.Describe(fc).OIDFieldName arcpy.SelectLayerByAttribute_management( fc, "NEW_SELECTION", oid_fieldname + " = 1 OR " + oid_fieldname + " = 3") diag_1 = arcpy.PointsToLine_management(fc, "in_memory" + os.sep + "diag_1") arcpy.SelectLayerByAttribute_management(fc, "SWITCH_SELECTION") diag_2 = arcpy.PointsToLine_management(fc, "in_memory" + os.sep + "diag_2") costEnv = arcpy.Merge_management([costEnv, diag_1, diag_2], "in_memory" + os.sep + "merge_line") costEnv = arcpy.FeatureToPolygon_management(costEnv, "in_memory" + os.sep + "xxenv", "", "NO_ATTRIBUTES") costSource = arcpy.Intersect_analysis([costBndry, costEnv], "in_memory" + os.sep + "sources") ##tmpExtent = arcpy.env.extent ##arcpy.env.extent = 'MAXOF' ## ###### ##fc = arcpy.Buffer_analysis(xxblank, "in_memory" + os.sep + "buf", CellSize + " Meters", "RIGHT", "FLAT", "NONE", "")