def main(SpatialReference, workingdir, gis_dir, grid_featureclass, grid_featureclass_proj, logfile): currentmessage = ( "\n\tInitializing process for intersecting withdrawal locations with model grid (takes a few seconds) . . .\n" ) print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) # Define the cup geodatabase and grid feature class cup_gdb = os.path.join(gis_dir, 'cup.gdb') arcpy.env.workspace = cup_gdb # --------------------------------------------- # Setup the new Event Layer # --------------------------------------------- # Set the local variables in_Table = os.path.join(workingdir, 'withdrawal_point_locations_and_rates.csv') x_coords = "XCoord" y_coords = "YCoord" cup_wells_layer_state_plane_north = 'cup_wells_layer_state_plane_north' cup_wells_layer = 'cup_wells_layer' cupWells_fc = r'cup_wells_fc' # feature class # Make the XY event layer... currentmessage = ("\tCleaning out old cup well event layer") print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) if arcpy.Exists(cup_wells_layer_state_plane_north): arcpy.Delete_management(cup_wells_layer_state_plane_north) # Make the XY event layer... currentmessage = ("\tImporting x,y coordinates for withdrawal points") print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) # syntax: MakeXYEventLayer_management (table, in_x_field, in_y_field, out_layer, {spatial_reference}, {in_z_field}) #arcpy.MakeXYEventLayer_management(in_Table, x_coords, y_coords, cup_wells_layer, grid_featureclass_proj) arcpy.MakeXYEventLayer_management(in_Table, x_coords, y_coords, cup_wells_layer_state_plane_north, SpatialReference) # --------------------------------------------- # Count the total number of added wells number_of_withdrawl_points = arcpy.GetCount_management( cup_wells_layer_state_plane_north) currentmessage = ("\t{0} withdrawal points imported\n".format( number_of_withdrawl_points)) print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) # --------------------------------------------- # Get the Event Layer saved into a FeatureClass # --------------------------------------------- # Remove any existing cup wells layer if arcpy.Exists(cup_wells_layer): arcpy.Delete_management(cup_wells_layer) # Apply well info to new projection layer (NFSEG is Albers) # arcpy.Project_management(Input, Output, Projection) arcpy.Project_management(cup_wells_layer_state_plane_north, cup_wells_layer, grid_featureclass_proj) currentmessage = ( '\tWithdrawal points projected\n\tSaving to feature class\n') print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) # Save to a feature class if arcpy.Exists(cupWells_fc): arcpy.Delete_management(cupWells_fc) arcpy.CopyFeatures_management(cup_wells_layer, cupWells_fc) currentmessage = ("\tSaved feature class\n") print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) # --------------------------------------------- # --------------------------------------------- # Find the Model Grid Cells that Intersect # with the coordinates of the well(s) # and save to gdb # # 1. Find the Model Grid Rows, Columns that # intersect the cup well X,Y coord # # 2. Output data to .csv file # # Process: Use the Identity function # - Replaced arcpy.Identity_analysis with # arpy.Intersect_analysis # because Identity was not available # with a basic ArcMap license # (20190917) # --------------------------------------------- # Set local parameters outFeatures = os.path.join(cup_gdb, "cup_wells_with_grid_info") csvOutputFile = os.path.join(workingdir, "wells_to_add.csv") currentmessage = ( "\tIntersecting withdrawal point locations with model grid ...\n") print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) # Find Model Row,Column points if arcpy.Exists(outFeatures): arcpy.Delete_management(outFeatures) # Find the intersting points # syntax: Intersect_analysis (in_features, out_feature_class, {join_attributes}, {cluster_tolerance}, {output_type}) #arcpy.Identity_analysis (cupWells_fc, grid_featureclass, outFeatures) arcpy.Intersect_analysis([cupWells_fc, grid_featureclass], outFeatures) currentmessage = ( '\tIntersection complete\n\tExporting well(s) row,column,layer information to .csv file ...\n' ) print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) # Export to a .csv file if arcpy.Exists(csvOutputFile): arcpy.Delete_management(csvOutputFile) arcpy.ExportXYv_stats(outFeatures, ["WellId", "layer", "row", "col", "Q_cfd"], "COMMA", csvOutputFile, "ADD_FIELD_NAMES") currentmessage = ( "\n\tFinished (row, col) identification for withdrawal points\n\n") print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) return
# Local variables: WCSS_SOURCE = "D:/SpatialAdapters/ArcCatalog/OLE DB ConnectionWCSS.odc/WCSS.BR_AXEBLADE_MV" WCSS_Layer = "WCSS_Layer" SIIGIS_WG_LAYER = "d:/SpatialAdapters/ArcCatalog/Connection to US1190SQLI01 as gismaster.sde/WellsGIS.GIS_MASTER.WG_BR_AXEBLADE" now = datetime.datetime.now() print "GisBuild of BR_AXEBLADE" print "Started: " + str(now) try: if arcpy.Exists(WCSS_SOURCE): # Process: Make XY Event Layer print "Source Table Exists" arcpy.MakeXYEventLayer_management(WCSS_SOURCE, "longitude", "latitude", WCSS_Layer, "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision", "") print "Make XY Layer Succeeded" if arcpy.Exists(WCSS_Layer): print "Proceed to Copy Features" # Process: Copy Features if arcpy.Exists(SIIGIS_WG_LAYER): print "Delete FeatureClass" arcpy.Delete_management(SIIGIS_WG_LAYER) print "Delete FeatureClass Succeeded" print "Begin Copy Features Management" arcpy.CopyFeatures_management(WCSS_Layer, SIIGIS_WG_LAYER, "", "0", "0", "0") print "All Features Copied\n" now = datetime.datetime.now() print "Ended: " + str(now) + "\n" except:
USGS_QUAD = r'L:\Data\GIS\USGS\Topographic Maps\USGS_24k_Topo_Map_Boundaries.shp' # --------------------------------------------------------------------------------------------------------------------- # CONVERSION TO GIS FEATURE CLASS-------------------------------------------------------------------------------------- # --------------------------------------------------------------------------------------------------------------------- arcpy.TableToTable_conversion(INPUT_CSV, Scratch_GDB, "downloaded_table") arcpy.AddMessage("Completed: Covert CSV to Table") tempEnvironment0 = arcpy.env.workspace arcpy.env.workspace = Scratch_GDB #HTMP QUERY NOT NECESARY FOR DRI #arcpy.MakeQueryTable_management(downloaded_table, "QueryTable", "ADD_VIRTUAL_KEY_FIELD", "", "", SQL_Expression) #arcpy.AddMessage("Completed: SQL Query") arcpy.MakeXYEventLayer_management(downloaded_table, "_longitude", "_latitude", "HTMP_XY_Event_Layer", GCS_WGS_1984_SpatRef) arcpy.AddMessage("Completed: Make XY Point Layer") arcpy.Project_management("HTMP_XY_Event_Layer", HTMP_Albers, NAD_1983_Albers_SpatRef) arcpy.AddMessage("Completed: Project to NAD_1983_Albers") # --------------------------------------------------------------------------------------------------------------------- # ONE TO ONE JOINS ---------------------------------------------------------------------------------------------------- # --------------------------------------------------------------------------------------------------------------------- arcpy.env.workspace = r'in_memory' #change to in_memory for performance (the Project tool above cannot use in_memory workspace) #Spatial Join (1) GovLands Intersect DW_FUNCTIONS.SpatJoin_1to1_Intersect("AGENCY_AREANAME", ["AGENCY_AREANAME"], HTMP_Albers, SCE_Gov_Lands_20190313, "HTMP_GovLands")
if action == "": action = "TA" nOk = nOk + 1 if ult_connection <> connIn: ult_connection = connIn arcpy.AddMessage("\n" + connIn) arcpy.AddMessage("\n" + action + " Input = " + tablein + " ==> " + tableOut) if arcpy.Exists(os.path.join(connIn, tablein)): if arcpy.Exists(os.path.join(connOut, tableOut)): if find_field(os.path.join(connIn, tablein), fieldX): if find_field(os.path.join(connIn, tablein), fieldY): if fieldX <> "" and fieldY <> "": sr = arcpy.SpatialReference(int(sheet1.cell(rownum, 4).value)) try: arcpy.MakeXYEventLayer_management(os.path.join(connIn, tablein), fieldX, fieldY, "salida_tmp", sr, "") if action == "T": arcpy.TruncateTable_management(os.path.join(connOut, tableOut)) elif action == "A": arcpy.Append_management("salida_tmp", os.path.join(connOut, tableOut), "TEST", "", "") elif action == "TA": arcpy.TruncateTable_management(os.path.join(connOut, tableOut)) arcpy.Append_management("salida_tmp", os.path.join(connOut, tableOut), "TEST", "", "") except: problem = arcpy.GetMessages() arcpy.AddMessage("\n==============================================") arcpy.AddMessage(problem) m = False if "MakeXYEventLayer" in problem:
of2longname = tblPrclLabel + "_" + of2 if fieldInfo.getFieldName(index) == of2longname: fieldInfo.setNewName(index, of2) index += 1 print('\tField names converted.') arcpy.MakeTableView_management("tmptbl", "tmptbl2", "", "", fieldInfo) print('\tMade table view.') arcpy.TableToTable_conversion("tmptbl2", labelGDBpath, tblCdstrLabel) ### http://gis.stackexchange.com/questions/48353/rename-feature-layer-fields ####################### End of block to set field names in parcel_label_pt correctly ############################################################################################ print('\tExported table.') arcpy.RemoveJoin_management("cadplyr") ### Create a point feature class based on the exported table # Make a temporary event layer arcpy.MakeXYEventLayer_management(tblCLblPath, xlbl, ylbl, "XYeventlyr", spRef) # Output the event layer to the point feature class arcpy.FeatureClassToFeatureClass_conversion("XYeventlyr", labelGDBpath, pLabelPt) endtime = datetime.datetime.now() elapsedtime = endtime - starttime print('{} DONE. Time taken... {} H:MM:SS.dddddd').format( os.path.basename(__file__), elapsedtime) ### Note that mxd should be opened to check extents and create annotations print( '***NOTE: next step/s = check data extents and create annotations... --> Open ArcGIS' ) except:
def crs5_prepare_for_labels(args): wkgFolder = args[0] labelGDBname = args[1] sdePath = args[2] dataSDEprefix = args[3] # log = args[4] # Set locations, etc labelGDBpath = os.path.join(wkgFolder, labelGDBname) fcCdstrPath = os.path.join(labelGDBpath, fcCadastre) fcCadP = fcCadastre + "_P" fcCadPPath = os.path.join(labelGDBpath, fcCadP) tblPLblPath = os.path.join(labelGDBpath, tblPrclLabel) tblCLblPath = os.path.join(labelGDBpath, tblCdstrLabel) # Set environment arcpy.env.workspace = wkgFolder arcpy.env.overwriteOutput = True arcpy.env.configkeyword = "GEOMETRY" # variables err_message = None try: # log function log_msg('calling {}'.format(script_name)) err_message = None ### Create labels GDB - check for existence first log_msg('Creating working labels GDB...') if arcpy.Exists(labelGDBpath): log_msg('WARNING: {} already exists!'.format(labelGDBpath)) else: arcpy.CreateFileGDB_management(wkgFolder, labelGDBname) ### Copy feature classes from staging database (SDE) to local GDB log_msg('Copying feature classes...') for fc in fcsToCopy: inFCname = dataSDEprefix + fc inFCpath = os.path.join(sdePath, inFCname) outFCpath = os.path.join(labelGDBpath, fc) # Check whether FC exists in GDB, if so - overwrite if arcpy.Exists(outFCpath): log_msg( 'WARNING: {} already exists - overwriting...'.format(fc)) # Check whether table exists in SDE, if so - continue if arcpy.Exists(inFCpath): arcpy.Copy_management(inFCpath, outFCpath) # Count features and report number - warn if not equal inCount = arcpy.GetCount_management(inFCpath).getOutput(0) outCount = arcpy.GetCount_management(outFCpath).getOutput(0) if inCount == outCount: log_msg('{0} - Copied {1} features to {2}'.format( inFCname, inCount, fc)) else: log_msg( 'ERROR: {0} features copied from {1} - {2} features resultant in {3}' .format(inCount, inFCname, outCount, fc)) else: err_message = '{} does not exist - exit...'.format(fc) return err_message ### Copy tables from staging database (SDE) to local GDB log_msg('Copying tables...') for tbl in tblsToCopy: inTBLname = dataSDEprefix + tbl inTBLpath = os.path.join(sdePath, inTBLname) outTBLpath = os.path.join(labelGDBpath, tbl) # Check whether table exists in GDB, if so - overwrite if arcpy.Exists(outTBLpath): log_msg( 'WARNING: {} already exists - overwriting...'.format(tbl)) # Check whether table exists in SDE, if so - continue if arcpy.Exists(inTBLpath): arcpy.Copy_management(inTBLpath, outTBLpath) # Count features and report number - warn if not equal inCount = arcpy.GetCount_management(inTBLpath).getOutput(0) outCount = arcpy.GetCount_management(outTBLpath).getOutput(0) if inCount == outCount: log_msg('{0} - Copied {1} entries to {2}'.format( inTBLname, inCount, tbl)) else: log_msg( 'ERROR: {0} features copied from {1} - {2} features resultant in {3}' .format(inCount, inTBLname, outCount, tbl)) else: err_message = '{} does not exist - exit...'.format(tbl) return err_message ### Work on cadastre dataset log_msg('Adding fields to cadastre...') # Change workspace location arcpy.env.workspace = labelGDBpath ## Add fields for label coordinates arcpy.AddField_management(fcCadastre, xlbl, "DOUBLE") arcpy.AddField_management(fcCadastre, ylbl, "DOUBLE") ## Calculate x,y values log_msg('calculating xlabel, ylabel field values ...') # Change workspace location with arcpy.da.UpdateCursor(fcCdstrPath, ["OID@", "SHAPE@", xlbl, ylbl]) as cursor: for row in cursor: lPt = row[1].labelPoint row[2] = lPt.X row[3] = lPt.Y cursor.updateRow(row) # Delete cursor and row objects del cursor, row ### Select "P" type parcels and export log_msg('Select P type parcels and export ...') # Check whether dataset exists already if arcpy.Exists(fcCadPPath): log_msg('"P" type parcel dataset already exists; overwriting...') else: log_msg('Exporting "P" type parcels...') # Select "P" type parcels delete_layer("cadastrelyr") arcpy.MakeFeatureLayer_management(fcCdstrPath, "cadastrelyr") parcelClause = '"PARCEL_CATEGORY" = ' + "'P'" arcpy.SelectLayerByAttribute_management("cadastrelyr", "NEW_SELECTION", parcelClause) # Export selected parcels arcpy.CopyFeatures_management("cadastrelyr", fcCadPPath) # print('\t{} created.').format(fcCadP) ### Join "P" parcel data to label table and export log_msg('Joining "P" type parcels to label table...') delete_layer("cadplyr") arcpy.MakeFeatureLayer_management(fcCadPPath, "cadplyr") arcpy.MakeTableView_management(tblPLblPath, "labelview") arcpy.AddJoin_management("cadplyr", joinFieldP1, tblPLblPath, joinFieldP2, "KEEP_COMMON") # # print('\tJoin successfully created...') # inCount = arcpy.GetCount_management("cadplyr").getOutput(0) # print('\tNumber of rows = {}').format(inCount) ############################################################################################ ####################### Block to set field names in parcel_label_pt correctly arcpy.TableToTable_conversion("cadplyr", labelGDBpath, "junktable") log_msg('Junk table created.') delete_layer("tmptbl") arcpy.MakeTableView_management("junktable", "tmptbl") log_msg('Describing temporary table...') desc = arcpy.Describe("tmptbl") fieldInfo = desc.fieldInfo index = 0 log_msg('Updating field names...') while index < fieldInfo.count: for of1 in outfieldsP1: of1longname = tblPrclLabel + "_" + of1 of1_1 = of1 + "_1" if fieldInfo.getFieldName(index) == of1longname: fieldInfo.setNewName(index, of1_1) for of2 in outfieldsP2: of2longname = tblPrclLabel + "_" + of2 if fieldInfo.getFieldName(index) == of2longname: fieldInfo.setNewName(index, of2) index += 1 log_msg('Field names converted.') delete_layer("tmptbl2") arcpy.MakeTableView_management("tmptbl", "tmptbl2", "", "", fieldInfo) log_msg('Made table view.') arcpy.TableToTable_conversion("tmptbl2", labelGDBpath, tblCdstrLabel) ### http://gis.stackexchange.com/questions/48353/rename-feature-layer-fields ####################### End of block to set field names in parcel_label_pt correctly ############################################################################################ log_msg('Exported table.') arcpy.RemoveJoin_management("cadplyr") ### Create a point feature class based on the exported table # Make a temporary event layer arcpy.MakeXYEventLayer_management(tblCLblPath, xlbl, ylbl, "XYeventlyr", spRef) # Output the event layer to the point feature class arcpy.FeatureClassToFeatureClass_conversion("XYeventlyr", labelGDBpath, pLabelPt) log_msg('Created "PARCEL_LABEL_PT" featureclass.') except Exception as e: print("ERROR: {}".format(e)) err_message = "ERROR while running {0}: {1}".format(script_name, e) return err_message, log_messages
def llenar_ly_taps(self): pathpuntos = os.path.join(self.pathgdb, "ly_taps") arcpy.CopyFeatures_management(os.path.join(self.gdb, "ly_taps"), pathpuntos) ly_taps = arcpy.MakeXYEventLayer_management(self.tabla, "X", "Y", "in_memory\\tabla_tmp", arcpy.SpatialReference(4326)) self.ly_taps = arcpy.CopyFeatures_management(ly_taps, os.path.join(self.scratch, "ly_taps")) fields_pathpuntos = ["MTREFER", "COMENTARIO", "COD_TAP", "MTDESDIR", "MTNUMERO", "MTPISO"] # "MTTIPO" fields_ly_taps = ["ITEM_PLAN", "CTO_SIROPE", "CTO_GIS_CMS", "DIRECCION", "NUMERO", "PISO"] # "TIPO" with arcpy.da.SearchCursor(self.ly_taps, ["SHAPE@X", "SHAPE@Y"] + fields_ly_taps) as sCur: with arcpy.da.InsertCursor(pathpuntos, ["SHAPE@X", "SHAPE@Y"] + fields_pathpuntos) as iCur: for row in sCur: iCur.insertRow(row) print("COD_TAP - MTCODNOD - MTTIPTRO - MTTRONCAL - MTEXTLIN - MTTAP - MTNUMPLA") i = 0 with arcpy.da.UpdateCursor(pathpuntos, ["SHAPE@X", "SHAPE@Y", "COD_TAP", "MTCODNOD", "MTTIPTRO", "MTTRONCAL", "MTEXTLIN", "MTTAP", "MTNUMPLA"]) as cursor: for x in cursor: i += 1 if x[2] != None: if len(x[2]) > 9: x[3] = x[2][0:2] x[4] = x[2][2] x[5] = x[2][3:6] x[6] = x[2][6:8] x[7] = x[2][8:10] x[8] = x[4] + x[5] else: print("el indice {} no tiene la cantidad de caracteres necesaria".format(i)) else: print("el indice {} no tiene datos".format(i)) cursor.updateRow(x) print("MTDESDIR - MTTIPVIA - MTTIPO - MTPISO") i = 0 with arcpy.da.UpdateCursor(pathpuntos, ["SHAPE@X", "SHAPE@Y", "MTDESDIR", "MTTIPVIA", "MTTIPO", "MTPISO"]) as cursor: for x in cursor: i += 1 mtdesdir = x[2] if mtdesdir != None: if len(mtdesdir) > 4: x[3] = mtdesdir[0:2] x[2] = mtdesdir[4:] else: print("el indice {} no tiene la cantidad de caracteres necesaria".format(i)) else: print("el indice {} no tiene datos".format(i)) if x[4] != None: if len(x[4]) > 4: x[4] = "B" if x[4][0] == "P" else x[4][0] x[5] = x[4][-2:] else: print("el indice {} no tiene la cantidad de caracteres necesaria".format(i)) else: print("el indice {} no tiene datos".format(i)) cursor.updateRow(x) print("NUMCOO_X - NUMCOO_Y - MTIMPEDA - MTNUMBOR - MTCNTBORLBR - MTCNTBOROCU - NRO_POSTE") with arcpy.da.UpdateCursor(pathpuntos, ["SHAPE@X", "SHAPE@Y", "NUMCOO_X", "NUMCOO_Y", "MTIMPEDA", "MTNUMBOR", "MTCNTBORLBR", "MTCNTBOROCU", "NRO_POSTE"]) as cursor: for x in cursor: x[2] = x[0] x[3] = x[1] x[4] = "99" x[5] = "08" x[6] = "8" x[7] = "0" x[8] = "0" cursor.updateRow(x)
checkLine = inp.readline() if checkLine == 'x,y,z\n': return file else: skipHeader = checkLine == 'x y z\n' return makeCSV(file, skipHeader) xyz = arcpy.GetParameterAsText(0) outRast = arcpy.GetParameterAsText(1) gridSize = arcpy.GetParameterAsText(2) sr = arcpy.GetParameterAsText(3) rastNames = outRast.split('\\') lastName = rastNames[len(rastNames) - 1] if (rastNames[len(rastNames) - 1][0] in '0123456789'): rastNames[len(rastNames) - 1] = 'a' + lastName outRast = '\\'.join(rastNames) arcpy.AddMessage( "Can't begin file names with a number. Changing name to " + outRast) arcpy.MakeXYEventLayer_management(checkHeader(xyz), 'x', 'y', 'temprastpoints', sr, 'z') rast = Idw('temprastpoints', "z", gridSize, 2, RadiusVariable(1, float(gridSize) / 2.)) arcpy.Delete_management('temprastpoints') rast.save(outRast) arcpy.AddMessage("Raster created.")
def EBK_ga(out_file, zField): normaltime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) arcpy.AddMessage(normaltime + ":" + out_file + "正在进行经验贝叶斯克里金插值...") outTableName = arcpy.ValidateTableName( os.path.basename(out_file.strip(".xls")), out_gdb) print(outTableName) outTable = os.path.join(out_gdb, outTableName) print('Converting sheet1 to {}'.format(outTable)) # Perform the conversion dbfTable = os.path.join(outdBASEPath, outTableName + '.dbf') try: arcpy.ExcelToTable_conversion(out_file, outTable, "Sheet1") # Excel to Table arcpy.TableToDBASE_conversion(outTable, outdBASEPath) #Table to dbf except Exception as err: print("{} is existing".format(dbfTable)) arcpy.AddMessage(err.message) # dbaseTableName = filename.strip(".xls") # print (dbaseTableName) # outTable = os.path.join(outgdb, dbaseTableName) # print (outTable) # arcpy.ExcelToTable_conversion(xlsTable, outTable, "Sheet1") x_coords = 'Long' #list(date[u'Long'].head()) y_coords = 'Lat' #list(date[u'Lat'].values) outLayerName = outTableName + '.lyr' outLayer = os.path.join(outLayerPath, outLayerName) spRef = "Coordinate Systems\Geographic Coordinate Systems\World\WGS 1984.prj" try: arcpy.MakeXYEventLayer_management(dbfTable, x_coords, y_coords, outLayerName, spRef) except Exception as err: arcpy.AddMessage("MakeXYEventLayer_management: " + outLayerName + " created Failed") arcpy.AddMessage(err.message) try: arcpy.SaveToLayerFile_management(outLayerName, outLayer) except Exception as err: arcpy.AddMessage("SaveToLayerFile_management: " + outLayer + " created Failed") arcpy.AddMessage(err.message) try: #lyr to shp arcpy.FeatureClassToShapefile_conversion(outLayer, outShpPath) except Exception as err: arcpy.AddMessage("FeatureClassToShapefile_conversion: " + outShpPath + " created Failed") arcpy.AddMessage(err.message) # Set local variables inPointFeatures = os.path.join(outShpPath, outTableName + '_lyr.shp') Output_geostatistical_layer = "" outRasNa = outTableName + '.tif' nt = time.strftime('%Y%m%d', time.localtime(time.time())) dt = time.strftime('%m%d%H', time.localtime(time.time())) outFilePath = "F:\\zouhangshuju\\" + nt + "\\" + dt + "\\" + zField + "\\" + "tif" try: os.makedirs(outFilePath) except: print("") outRaster = os.path.join(outFilePath, outRasNa) cellSize = 0.001 transformation = "NONE" maxLocalPoints = 50 overlapFactor = 0.5 numberSemivariograms = 100 # Set variables for search neighborhood radius = 0.14896191744041393 smooth = 0.2 try: #lyr to shp searchNeighbourhood = arcpy.SearchNeighborhoodSmoothCircular( radius, smooth) except Exception as err: arcpy.AddMessage("SearchNeighborhoodSmoothCircular: " + " Failed") arcpy.AddMessage(err.message) outputType = "PREDICTION" quantileValue = "" thresholdType = "" probabilityThreshold = "" semivariogram = "POWER" tempEnvironment0 = arcpy.env.extent arcpy.env.extent = Extent # Execute EmpiricalBayesianKriging try: arcpy.EmpiricalBayesianKriging_ga( inPointFeatures, zField, Output_geostatistical_layer, outRaster, cellSize, transformation, maxLocalPoints, overlapFactor, numberSemivariograms, searchNeighbourhood, outputType, quantileValue, thresholdType, probabilityThreshold) print('Converting {} to {}'.format(inPointFeatures, outRasNa)) arcpy.AddMessage(normaltime + ":" + "经验贝叶斯克里金插值完成") except Exception as err: arcpy.AddMessage("EmpiricalBayesianKriging_ga: " + " Failed") arcpy.AddMessage(err.message) arcpy.env.extent = tempEnvironment0 if (os.path.exists(outRaster)): normaltime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) arcpy.AddMessage(normaltime + ":" + outRaster + "开始划分污染区域范围") ExtractRange(outRaster, zField)
def main(SpatialReference_input, workingdir, gis_dir, logfile): currentmessage = ( "\n\tInitializing process for intersecting withdrawal locations with model grid (takes a few seconds) . . .\n" ) print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) my_gdb = os.path.join(workingdir, 'wellpkg_update.gdb') arcpy.env.workspace = my_gdb cup_gdb = os.path.join(gis_dir, 'cup.gdb') # Define the location where the map projections are located map_projections_dir = os.path.join(gis_dir, 'projections') # Set the local variables in_Table = os.path.join(workingdir, 'withdrawal_point_locations_and_rates.csv') #in_Table = "test_input.csv" x_coords = "XCoord" y_coords = "YCoord" cup_wells_layer_state_plane_north = 'cup_wells_layer_state_plane_north' cup_wells_layer = 'cup_wells_layer' cupWells_fc = r'cup_wells_fc' #cupWells_fc_exported_to_gis_dir = r'cup_wells' # Set the spatial reference if (SpatialReference_input == 'state_plane_north'): SpatialReference = os.path.join(map_projections_dir, 'state_plane_north.prj') elif (SpatialReference_input == 'utm_zone17N_linear_unit_meters_sjr'): SpatialReference = os.path.join( map_projections_dir, 'utm_zone17N_linear_unit_meters_sjr.prj') # END IF spRef_nfseg = os.path.join(map_projections_dir, 'nfseg_v1_1_grid.prj') # Make the XY event layer... currentmessage = ("\tCleaning out old layer") print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) if arcpy.Exists(cup_wells_layer_state_plane_north): arcpy.Delete_management(cup_wells_layer_state_plane_north) # Make the XY event layer... currentmessage = ("\tImporting x,y coordinates for withdrawal points") print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) #arcpy.MakeXYEventLayer_management(in_Table, x_coords, y_coords, cup_wells_layer, spRef_nfseg) arcpy.MakeXYEventLayer_management(in_Table, x_coords, y_coords, cup_wells_layer_state_plane_north, SpatialReference) # Print the total rows number_of_withdrawl_points = arcpy.GetCount_management( cup_wells_layer_state_plane_north) currentmessage = ("\tImported data for {0} withdrawal points\n".format( number_of_withdrawl_points)) print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) # project new cup wells layer if arcpy.Exists(cup_wells_layer): arcpy.Delete_management(cup_wells_layer) # Apply well info to new projection layer (NFSEG is Albers) # arcpy.Project_management(Input, Output, Projection) arcpy.Project_management(cup_wells_layer_state_plane_north, cup_wells_layer, spRef_nfseg) if arcpy.Exists(cupWells_fc): arcpy.Delete_management(cupWells_fc) currentmessage = ("\tProjected locations of {0} withdrawal points\n". format(number_of_withdrawl_points)) print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) # export to feature class arcpy.CopyFeatures_management(cup_wells_layer, cupWells_fc) currentmessage = ( "\tCopied layer containing withdrawal points to new feature class\n") print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) # Set local parameters inFeatures = cupWells_fc idFeatures = os.path.join(cup_gdb, "nfseg_v1_1_grid") outFeatures = os.path.join(my_gdb, "cup_wells_with_grid_info") outFeatures_for_export_to_cup_gdb = os.path.join( cup_gdb, "cup_wells_with_grid_info") csvOutputFile = os.path.join(workingdir, "wells_to_add.csv") currentmessage = ( "\tIntersecting withdrawal point locations with model grid ...\n") print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) # xoxoxoxoxoxoxoxoxoxoxoxoxoxoxoxoxoxoxoxox # 1. Find the Model Grid Rows, Columns that # intersect the cup well X,Y coord # # 2. Copy the new feature to cup.gdb # # 3. Output data to .csv file # # Process: Use the Identity function # - arcpy.Identity_analysis was # replaced with arpy.Intersect_analysis # to because Intersect works with # a basic ArcMap license # xoxoxoxoxoxoxoxoxoxoxoxoxoxoxoxoxoxoxoxox # 1. Find Model Rows, Column points if arcpy.Exists(outFeatures): arcpy.Delete_management(outFeatures) # Find the intersting points #arcpy.Identity_analysis (inFeatures, idFeatures, outFeatures) arcpy.Intersect_analysis([inFeatures, idFeatures], outFeatures) # 2. Copy feature to cup.gdb if arcpy.Exists(outFeatures_for_export_to_cup_gdb): arcpy.Delete_management(outFeatures_for_export_to_cup_gdb) currentmessage = ( "\tIdentity operation complete, now copy features to cup.gdb and export .csv file\n" ) print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) arcpy.CopyFeatures_management( outFeatures, os.path.join(cup_gdb, outFeatures_for_export_to_cup_gdb)) # 3. Export to a .csv file if arcpy.Exists(csvOutputFile): arcpy.Delete_management(csvOutputFile) arcpy.ExportXYv_stats(outFeatures, ["WellId", "layer", "row", "col", "Q_cfd"], "COMMA", csvOutputFile, "ADD_FIELD_NAMES") # ooooooooooooooooooooooooooooooooooooooooo currentmessage = ( "\nFinished (row, col) identification for withdrawal points\n\n") print(currentmessage) with open(logfile, 'a') as lf: lf.write(currentmessage) return
arcpy.env.overwriteOutput = 1 arcpy.env.workspace = outputGDB #todo: make Spatial Reference sr = arcpy.SpatialReference(4326) #todo: make XY Event Layer print "\nMake EMU event Layer" fi = arcpy.FieldInfo() arcpy.MakeTableView_management(in_table=csvFile, out_view="EMU_VIEW", field_info=fi) arcpy.MakeXYEventLayer_management(table="EMU_VIEW", in_x_field="Longitude_wgs84", in_y_field="Latitude_wgs84", out_layer="EMU", spatial_reference=sr) #todo: copy shapefile print "\nCopy EMU_test feature class" if arcpy.Exists("EMU_Smartrak"): arcpy.Delete_management("EMU_Smartrak") arcpy.CopyFeatures_management("EMU", "EMU_Smartrak") #todo: update Timetamp field and get Unique VehicleID lstVehicleID = [] print "\nUpdate Timestamp field" arcpy.MakeFeatureLayer_management("EMU_Smartrak", "EMU") recs = arcpy.UpdateCursor("EMU")
def getStations(clipShapefile, bufferSize, noaaDatabase, outputShapefile): user = os.environ['USERNAME'] installDir = arcpy.GetInstallInfo("desktop")["InstallDir"] installDir = installDir.replace('\\', '/') tempDir = os.environ['TEMP'] tempDir = tempDir.replace('\\', '/') # Input noaaFtpSite = 'ftp.ncdc.noaa.gov' if noaaDatabase == 'GHCN (Global Historical Climatology Network)': stationLink = 'pub/data/ghcn/daily/ghcnd-stations.txt' else: stationLink = 'pub/data/inventories/ISH-HISTORY.TXT' wgs84file = installDir + r'\Coordinate Systems\Geographic Coordinate Systems\World\WGS 1984.prj' if env.scratchWorkspace == None: env.scratchWorkspace = tempDir # Intermediate Files stationFile = tempDir + '/stationInfo.txt' cleanedFile = tempDir + '/stationInfo_cleaned.txt' globalStations = tempDir + '/globalStations.shp' buffer50Mile = tempDir + '/buffer50Mile.shp' localOutput = tempDir + '/stationLocations.shp' arcpy.AddMessage( "Retrieve station text file (may take awhile depending on server)...") tries = 0 while tries != 1000: tries += 1 try: arcpy.AddMessage(" Trying " + noaaFtpSite) ftp = FTP(noaaFtpSite) ftp.login() ftpCall = 'RETR ' + stationLink f = open(stationFile, 'w') ftp.retrbinary(ftpCall, f.write) ftp.quit() break except: arcpy.AddMessage(" Server error, waiting 5 seconds...") arcpy.AddMessage(traceback.print_exc()) time.sleep(5) if tries == 1000: raise Error("Tried server 1000 times...") if noaaDatabase == 'GHCN (Global Historical Climatology Network)': fixedWidths = [12, 9, 10, 7, 3, 31, 4, 4, 5] dtypes = np.dtype([('ID', 'S12')\ , ('LAT', float)\ , ('LON', float)\ , ('C4', 'S7')\ , ('STATE', 'S3')\ , ('NAME', 'S31')\ , ('GSN', 'S4')\ , ('HCN', 'S4')\ , ('C9', 'S5')]) else: fixedWidths = [7, 6, 30, 6, 3, 5, 7, 8, 10, 9, 9] dtypes = np.dtype([('USAF', 'S7')\ , ('WBAN', 'S6')\ , ('STATION NAME', 'S30')\ , ('CTRY', 'S6')\ , ('ST', 'S3')\ , ('CALL', 'S5')\ , ('LAT', float)\ , ('LON', float)\ , ('ELEV', float)\ , ('BEGIN', 'S9')\ , ('END', 'S9')]) # Use skiprows if version earlier than 1.3, otherwise skip_header if noaaDatabase == 'GHCN (Global Historical Climatology Network)': skiprows = 0 else: skiprows = 22 arcpy.AddMessage("Reading station text file on local disk...") if float(np.version.version[0:3]) >= 1.7: stationData = np.genfromtxt (stationFile, dtype = dtypes, skip_header = skiprows\ , delimiter = fixedWidths) else: stationData = np.genfromtxt (stationFile, dtype = dtypes, skiprows = skiprows\ , delimiter = fixedWidths) if noaaDatabase == 'GSOD (Global Summary of the Day)': stationData = stationData[stationData['LAT'] != -99999.0] stationData = stationData[stationData['LAT'] != 0.0] stationData = stationData[np.invert(np.isnan(stationData['LAT']))] stationData = stationData[stationData['LON'] != -99999.0] stationData = stationData[stationData['LON'] != 0.0] stationData = stationData[np.invert(np.isnan(stationData['LON']))] stationData['LAT'] = stationData['LAT'] / 1000. stationData['LON'] = stationData['LON'] / 1000. if noaaDatabase == 'GHCN (Global Historical Climatology Network)': stationData = stationData[['ID', 'LAT', 'LON']] stationData['ID'] = np.char.rstrip(stationData['ID'], ' ') fmt = '%s,%f,%f' else: stationData = stationData[[ 'USAF', 'WBAN', 'LAT', 'LON', 'BEGIN', 'END' ]] for col in ['USAF', 'WBAN', 'BEGIN', 'END']: stationData[col] = np.char.rstrip(stationData[col], ' ') stationData[col] = np.char.lstrip(stationData[col], ' ') fmt = '%s,%s,%f,%f,%s,%s' arcpy.AddMessage("Writing cleaned station file to local disk...") f = open(cleanedFile, 'w') f.write(','.join(stationData.dtype.names) + '\n') np.savetxt(f, stationData, fmt=fmt) f.close() arcpy.AddMessage("Creating shapefile and clipping...") arcpy.MakeXYEventLayer_management(cleanedFile, "LON", "LAT", "latLongPoints", wgs84file) arcpy.CopyFeatures_management("latLongPoints", globalStations) bufferSize = str(bufferSize) + " Miles" arcpy.Buffer_analysis(clipShapefile, buffer50Mile, bufferSize) env.outputCoordinateSystem = clipShapefile arcpy.Clip_analysis(globalStations, buffer50Mile, localOutput) arcpy.AddField_management(localOutput, 'X', 'FLOAT') arcpy.AddField_management(localOutput, 'Y', 'FLOAT') desc = arcpy.Describe(localOutput) shapefieldname = desc.ShapeFieldName rows = arcpy.UpdateCursor(localOutput) for row in rows: feat = row.getValue(shapefieldname) pnt = feat.getPart() row.X = pnt.X row.Y = pnt.Y rows.updateRow(row) del row, rows arcpy.DeleteField_management(localOutput, ['LAT', 'LON']) arcpy.CopyFeatures_management(localOutput, outputShapefile) #---------------# # Create metadata #---------------# metadataFile = os.path.dirname(outputShapefile) + '/README.txt' outputShapefile = os.path.basename(outputShapefile) timeNow = time.strftime("%a, %d %b %Y %H:%M:%S", time.gmtime()) scriptFile = r'"\code\climate\createShapefileOfClimateStationsInWisconsin_tool.py"' metadataOut = outputShapefile + " was created/changed by " + user + " on " + timeNow\ + ' using ' + scriptFile + '\n\n' f = open(metadataFile, 'a+') f.write(metadataOut) f.close()
seriouswt = float(arcpy.GetParameterAsText(3)) # user input weight for serious crashes nonseriouswt = float(arcpy.GetParameterAsText(4)) # user input weight for nonserious crashes possiblewt = float(arcpy.GetParameterAsText(5)) # user input weight for possible crashes IntersectionThreshold = arcpy.GetParameterAsText(6) # user input number of crashes to qualify an intersection as high crash SegmentThreshold = arcpy.GetParameterAsText(7) # User input number of crashes to qualify a segment as high crash # create geodatabase TimeDate = datetime.now() TimeDateStr = "CrashLocations" + TimeDate.strftime('%Y%m%d%H%M') outputGDB = arcpy.CreateFileGDB_management(GDBspot, TimeDateStr) # convert GCAT txt file to gdb table and add to map NewTable = arcpy.TableToTable_conversion(GCATfile, outputGDB, "GCAT_LUCWOO_nofreeways") # display xy data and export to new feature class PointFile = arcpy.MakeXYEventLayer_management(NewTable, "ODOT_LONGITUDE_NBR", "ODOT_LATITUDE_NBR", "GCAT_LUCWOO_xy", arcpy.SpatialReference("NAD 1983")) # dict of count fields and queries dict = {'fatalities_Count':"FATALITIES_NBR<>0", 'incapac_inj_count': "Incapac_injuries_NBR<>0 and fatalities_nbr=0", 'non_incapac_inj_count':"non_incapac_injuries_NBR<>0 and fatalities_nbr=0 and incapac_injuries_nbr=0", 'possible_inj_count':"possible_injuries_nbr<>0 and FATALITIES_NBR=0 and non_incapac_injuries_nbr=0 and incapac_injuries_nbr=0" } # add and populate fields for point layer for key in dict: arcpy.AddField_management(PointFile,key,"LONG") arcpy.SelectLayerByAttribute_management(PointFile, "NEW_SELECTION", dict[key]) arcpy.CalculateField_management(PointFile, key, 1) arcpy.SelectLayerByAttribute_management(PointFile, "Switch_selection") arcpy.CalculateField_management(PointFile, key, 0)
arcpy.env.overwriteOutput = True infilename = arcpy.GetParameterAsText(0) #as CSV infile = arcpy.GetParameter(0) outfile = arcpy.GetParameterAsText(1) #as GDB outname = outfile + '\\AllPoints' csvFile = os.path.basename(infilename) spRef = arcpy.SpatialReference( "NAD 1983 StatePlane Missouri East FIPS 2401 (US Feet)") if arcpy.Exists(outfile) == False: arcpy.AddMessage("Creating GDB...") arcpy.CreateFileGDB_management(os.path.dirname(outfile), os.path.basename(outfile)) arcpy.AddMessage("Copying Rows...") for inputs in infile: arcpy.AddMessage(inputs) if arcpy.Exists(outname) == False: arcpy.CopyRows_management(csvFile, outname) else: arcpy.Append_management(inputs, outfile + '/AllPoints', 'NO_TEST', '', '') arcpy.AddMessage("Making Point Features...") arcpy.MakeXYEventLayer_management(outname, "XCoord", "YCoord", "Temp_Points", spRef, "") arcpy.FeatureClassToFeatureClass_conversion("Temp_Points", outfile, 'STL_CRIME_POINTS') arcpy.Delete_management(outname)
arcpy.CheckOutExtension("Spatial") #Setup environment setting env.workspace = "F:\\NFIE_SI_2016\\groupProject\\iricOutput\\" #output location of the shapefiles outPathshp = "F:\\NFIE_SI_2016\\groupProject\\postprocessOutput\\shapefiles\\" #Path of your CSV Files csvDir = "F:\\NFIE_SI_2016\\groupProject\\iricOutput\\" #Name of the shapefile to create outFC = csvName + ".shp" #Add the XY data arcpy.MakeXYEventLayer_management(csvFile, "X", "Y", "tempLay", spatialRef) #Convert the XY data layer to a shapefile arcpy.FeatureClassToFeatureClass_conversion("tempLay", outPathshp, outFC) #Add fields I will need to the shapefile attribute table arcpy.AddField_management(outPathshp + outFC, "fldext", "TEXT") arcpy.AddField_management(outPathshp + outFC, "wd_2ft", "TEXT") arcpy.AddField_management(outPathshp + outFC, "fv_7mph", "TEXT") arcpy.AddField_management(outPathshp + outFC, "crit_2_7", "TEXT") upCur = arcpy.UpdateCursor(outPathshp + outFC) for row in upCur: if (row.Depth > 0): row.fldext = 1 else: row.fldext = 0 if (row.Depth >= 2): row.wd_2ft = 1 else: row.wd_2ft = 0
# Script arguments Jeopardy_CSV_File = arcpy.GetParameterAsText(0) if Jeopardy_CSV_File == '#' or not Jeopardy_CSV_File: Jeopardy_CSV_File = "C:\\Users\\jvwhit\\Documents\\GitHub\\Python-For-ArcGIS-2017\\Advanced_PythonForArcGIS\\Data\\Answers_Adv\\CSV\\JeopardyContestants_LatLon.csv" # provide a default value if unspecified # Local variables: JeopardyContestants_Table = "C:\\Users\\jvwhit\\Documents\\GitHub\\Python-For-ArcGIS-2017\\Advanced_PythonForArcGIS\\Data\\Answers_Adv\\Illinois.gdb\\JeopardyContestants_Table" Jeopardy_Contestants = "Jeopardy Contestants" Jeopardy_Contestants_Feature_Class_Output = "C:\\Users\\jvwhit\\Documents\\GitHub\\Python-For-ArcGIS-2017\\Advanced_PythonForArcGIS\\Data\\Answers_Adv\\Illinois.gdb\\JeopardyContestants" Jeopardy_Contestants_Buffer_Output = "C:\\Users\\jvwhit\\Documents\\GitHub\\Python-For-ArcGIS-2017\\Advanced_PythonForArcGIS\\Data\\Answers_Adv\\Illinois.gdb\\JeopardyContestants_Buffer" # Process: Copy Rows arcpy.CopyRows_management(Jeopardy_CSV_File, JeopardyContestants_Table, "") # Process: Make XY Event Layer arcpy.MakeXYEventLayer_management( JeopardyContestants_Table, "lon", "lat", Jeopardy_Contestants, "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision", "") # Process: Select arcpy.Select_analysis(Jeopardy_Contestants, Jeopardy_Contestants_Feature_Class_Output, "\"lat\" IS NOT NULL OR \"lon\" IS NOT NULL") # Process: Buffer arcpy.Buffer_analysis(Jeopardy_Contestants_Feature_Class_Output, Jeopardy_Contestants_Buffer_Output, "5 Miles", "FULL", "ROUND", "NONE", "", "GEODESIC")
if arcpy.Exists(r"K:\School\UTDallas Lab\GISC 6317\Project"): for file in arcpy.ListFiles("*.csv"): list.append(file) # Loop through the list to make individual csv files for x in list: in_Table = x x_coords = "Long" y_coords = "Lat" # Split name filename, file_extension = os.path.splitext(x) saved_layer = filename + ".lyr" # Make the XY event layer lyr = arcpy.MakeXYEventLayer_management(in_Table, x_coords, y_coords, filename) # Print the total rows count = arcpy.GetCount_management(filename) print count # Save to a layer file arcpy.SaveToLayerFile_management(filename, saved_layer) """ # Save layer file to a shapefile for added functionality arcpy.CopyFeatures_management(saved_Layer, "candidates.shp") """ print "Layer files saved"
print(best) savebest = "C:\\Users\\sjl170230\\Documents\\UTD_Viewshed_V3\\OutTables\\bestobservers.csv" best.to_csv(savebest) print('Best observers CSV exported to: {}'.format(savebest)) print('\n') print('PROCESS COMPLETE.') print('BEST OBSERVERS: ') print(best) print("NEW PART!!!!!!!!!!!!!!!") # Get spatial reference from surface spatialref = arcpy.Describe(arcpy.Raster(surface)).spatialReference # Make layer and save to feature class from bestobservers.csv arcpy.MakeXYEventLayer_management(savebest, 'POINT_X', 'POINT_Y', "BestObs_Lyr", spatialref) arcpy.FeatureClassToFeatureClass_conversion("BestObs_Lyr", path, "BestObservers") print("Best Observers feature class saved.") # Add blank text field to flight points for observers arcpy.AddField_management(flightpts, "Observers", "TEXT") arcpy.CalculateField_management(flightpts, "Observers", "\"\"", "PYTHON_9.3") print('Observers field added.') # For each pass... for c in range(0, count): print(c) # For each row in original flight points with arcpy.da.UpdateCursor(flightpts, ['OBJECTID', 'Observers']) as cursor: for row in cursor:
arcpy.Project_management(input_layer, os.path.join(scratch_gdb, "INPUT_LAYER_WGS84"), wgs_1984) wgs84_fc = os.path.join(scratch_gdb, "INPUT_LAYER_WGS84") # Input Layer input_lyr = arcpy.mapping.Layer(wgs84_fc) input_lyr.name = output_kmz_name arcpy.ApplySymbologyFromLayer_management (input_lyr, input_layer) # Create Input Labels arcpy.AddField_management(wgs84_fc, "XCENTROID", "DOUBLE") arcpy.CalculateField_management(wgs84_fc, "XCENTROID", "!SHAPE.CENTROID.X!","PYTHON_9.3") arcpy.AddField_management(wgs84_fc, "YCENTROID", "DOUBLE") arcpy.CalculateField_management(wgs84_fc, "YCENTROID", "!SHAPE.CENTROID.Y!","PYTHON_9.3") arcpy.MakeXYEventLayer_management (wgs84_fc, "XCENTROID", "YCENTROID", "KMZ_LABELS", wgs_1984) arcpy.CopyFeatures_management("KMZ_LABELS", os.path.join(scratch_gdb, "KMZ_LABELS")) input_labels = os.path.join(scratch_gdb, "KMZ_LABELS") arcpy.DeleteField_management(input_labels, ["XCENTROID", "YCENTROID"]) # Input Labels File mxd = arcpy.mapping.MapDocument(os.path.join(cwd, "KMZ_WITH_LABELS.mxd")) df = arcpy.mapping.ListDataFrames(mxd, "Layers")[0] input_labels_lyr = arcpy.mapping.ListLayers(mxd, "", df)[1] input_labels_lyr.replaceDataSource (scratch_gdb, "FILEGDB_WORKSPACE", "KMZ_LABELS") input_labels_lyr.labelClasses[0].expression = "[" + label_field + "]" kmz_group_lyr = arcpy.mapping.ListLayers(mxd, "", df)[0] arcpy.mapping.AddLayerToGroup(df, kmz_group_lyr, input_lyr, "TOP")
# ============================================================================== # CREATE A SHAPEFILE OF XY VALUES # First, a layer file needs to be created, after which it will be converted into # a point FeatureClass # ============================================================================== # Define the filepaths for .lyr and .shp files lyr = 'del_obst' out_lyr = r"C:filepath_to_output_lyr_file\output\testi_ajo_kopio\VSS_pnt.lyr" out_pnt_class = r"C:filepath_to_shp_file\output\VSS_pnt_to_class.shp" # Incorporate ArcPy module to create point feature class of the flight obstacles # to be shown on a map arcpy.MakeXYEventLayer_management(output_csv, 'Longitude', 'Latitude', lyr) # Check all rows are included print(arcpy.GetCount_management(lyr)) # First save as a .lyr-file arcpy.SaveToLayerFile_management(lyr, out_lyr) # Second, save the .lyr file as a shapefile arcpy.FeatureToPoint_management(out_lyr, out_pnt_class, "INSIDE") # ============================================================================== print('DATA PROCESSING IS READY!')
def execute(self, parameters, messages): """The source code of the tool.""" # local variables and env arcpy.CreateFileGDB_management("E:/gina/poker/gdb", parameters[0].valueAsText) arcpy.env.workspace = "E:/gina/poker/gdb/" + parameters[0].valueAsText + ".gdb" arcpy.env.overwriteOutput = True adnr_lo_shp = "E:/gina/poker/shp/wip/land_ownership_data/adnr_gls_dls_merge_20170823_v1.shp" pfrr_popn_places = "E:/gina/poker/shp/wip/popn_places_data/pokerflat_popn_places_gcs_wgs84_to_akalbers_2.shp" pipTable = "E:/gina/poker/dbf/predicted_impact_xy.dbf" pip_point_shp = "E:/gina/poker/pip/pip_point.shp" pip_point_3338 = "E:/gina/poker/pip/pip_point_3338.shp" pip_buffer_shp = "E:/gina/poker/pip/pip_buffer.shp" pip_range_rings_shp = "E:/gina/poker/pip/pip_range_rings.shp" pip_lo_in_buffer_shp = "E:/gina/poker/pip/pip_lo_in_buffer.shp" pip_lo_in_buf_sum_dbf = "E:/gina/poker/pip/pip_lo_in_buf_sum.dbf" pip_lo_in_buf_sum_csv = "E:/gina/poker/pip/pip_lo_in_buf_sum.csv" pip_popn_places_in_buffer_shp = "E:/gina/poker/pip/pip_popn_places_in_buffer.shp" x = parameters[1].valueAsText y = parameters[2].valueAsText r = parameters[3].valueAsText + " NauticalMiles" rr1 = (float(parameters[3].valueAsText))/3 rr2 = (rr1*2) rrs = str(rr1) + ";" + str(rr2) + ";" + r.split(" ")[0] pipLayer = "pipLayer1" srs = arcpy.SpatialReference("Alaska Albers Equal Area Conic") intersect_fc1 = [adnr_lo_shp, pip_buffer_shp] intersect_fc2 = [pfrr_popn_places, pip_buffer_shp] mxd = arcpy.mapping.MapDocument("current") dataframe = arcpy.mapping.ListDataFrames(mxd)[0] sourceLoSymbologyLayer = arcpy.mapping.Layer("E:/gina/poker/lyr/lo2.lyr") sourcePipSymbologyLayer = arcpy.mapping.Layer("E:/gina/poker/lyr/pip2.lyr") sourceRrsSymbologyLayer = arcpy.mapping.Layer("E:/gina/poker/lyr/rrs.lyr") # Process: Calculate Lon Field arcpy.CalculateField_management(pipTable, "Lon", x, "PYTHON", "") # Process: Calculate Lat Field arcpy.CalculateField_management(pipTable, "Lat", y, "PYTHON", "") # Process: Make XY Event Layer arcpy.MakeXYEventLayer_management(pipTable, "Lon", "Lat", pipLayer, "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision", "") # Process: Copy Features arcpy.CopyFeatures_management(pipLayer, pip_point_shp, "", "0", "0", "0") # Process: Project pip point arcpy.Project_management(pip_point_shp, pip_point_3338, srs) # Process: Buffer pip point arcpy.Buffer_analysis(pip_point_3338, pip_buffer_shp, r, "FULL", "ROUND", "NONE", "", "PLANAR") # Process: Multiple Ring Buffer arcpy.MultipleRingBuffer_analysis(pip_point_3338, pip_range_rings_shp, rrs, "NauticalMiles", "", "NONE", "FULL") # Process: Intersect pip buffer with land ownership arcpy.Intersect_analysis(intersect_fc1, pip_lo_in_buffer_shp, "ALL", "", "INPUT") # Process: Intersect pip buffer with popn places arcpy.Intersect_analysis(intersect_fc2, pip_popn_places_in_buffer_shp, "ALL", "", "INPUT") # Process: Make feature layers and add to the map ## pip feature class list fclist = arcpy.ListFeatureClasses() ## pip layer arcpy.MakeFeatureLayer_management(pip_point_3338, "Predicted Impact Point") ## land ownership layer arcpy.MakeFeatureLayer_management(pip_lo_in_buffer_shp, "Land Ownership within 3sigma of Predicted Impact Point") ## Range Rings arcpy.MakeFeatureLayer_management(pip_range_rings_shp, "Range Rings") ## populated places layer popn_places_records = int(arcpy.GetCount_management(pip_popn_places_in_buffer_shp).getOutput(0)) if popn_places_records > 0: arcpy.MakeFeatureLayer_management(pip_popn_places_in_buffer_shp, "Populated Places within 3sigma of Predicted Impact Point") addPipPopnPlacesLayer = arcpy.mapping.Layer("Populated Places within 3sigma of Predicted Impact Point") arcpy.mapping.AddLayer(dataframe, addPipPopnPlacesLayer) addPipPointLayer = arcpy.mapping.Layer("Predicted Impact Point") arcpy.mapping.AddLayer(dataframe, addPipPointLayer) add3sigmaLoLayer = arcpy.mapping.Layer("Land Ownership within 3sigma of Predicted Impact Point") arcpy.mapping.AddLayer(dataframe, add3sigmaLoLayer) addRangeRings = arcpy.mapping.Layer("Range Rings") arcpy.mapping.AddLayer(dataframe, addRangeRings) # Add and calc Acres field for intersected Land Ownership arcpy.AddField_management(pip_lo_in_buffer_shp, "Acres", "DOUBLE") arcpy.CalculateField_management(pip_lo_in_buffer_shp, "Acres", "!shape.area@acres!", "PYTHON_9.3", "") # Summarize intersected Land Ownership by Owner and total Acres arcpy.Statistics_analysis(pip_lo_in_buffer_shp, pip_lo_in_buf_sum_dbf, "Acres SUM", "OWNER") arcpy.MakeTableView_management(pip_lo_in_buf_sum_dbf) add3sigmaLoSumTbl = arcpy.mapping.TableView(pip_lo_in_buf_sum_dbf) arcpy.mapping.AddTableView(dataframe, add3sigmaLoSumTbl) # Symbolize and Refresh lo_layer = arcpy.mapping.ListLayers(mxd, "*Land Ownership within 3sigma of Predicted Impact Point*", dataframe)[0] arcpy.mapping.UpdateLayer(dataframe, lo_layer, sourceLoSymbologyLayer, True) lo_layer.symbology.addAllValues() pip_layer = arcpy.mapping.ListLayers(mxd, "*Predicted Impact Point*", dataframe)[0] arcpy.mapping.UpdateLayer(dataframe, pip_layer, sourcePipSymbologyLayer, True) rr_layer = arcpy.mapping.ListLayers(mxd, "*Range Rings*", dataframe)[0] arcpy.mapping.UpdateLayer(dataframe, rr_layer, sourceRrsSymbologyLayer, True) arcpy.RefreshTOC() arcpy.RefreshActiveView() # Populate Mission GDB mission_layers = [pip_point_3338, pip_lo_in_buffer_shp, pip_popn_places_in_buffer_shp, pip_range_rings_shp] arcpy.FeatureClassToGeodatabase_conversion(mission_layers, arcpy.env.workspace) return
# -*- coding:utf-8 -*- # Name: Table2shp.py # Copyright: Rima 2017/12/14 # Description: Display XY & define coordinate system as WGS1984 & converse to .shp # Requirements: 3D Analyst Extension # Import system modules import arcpy from arcpy import env print("begin") # set xy position and make .shp env.workspace = "C:\Users\hero\Desktop\workspace\geodata.gdb" outLocation = "C:\Users\hero\Desktop\workspace\geodata.gdb" tableList = arcpy.ListTables() for table in tableList: print table # excute MakeXYEventLayer & define coordinate system as GCS_WGS_1984 arcpy.MakeXYEventLayer_management(table, "东经", "北纬", table + "layer", 4326) # excute FeatureClassToFeatureClass arcpy.FeatureClassToFeatureClass_conversion(table + "layer", outLocation, table + "conv") print(table + "success")
def DrawRadialFlows(): # Get the value of the input parameter inTable = arcpy.GetParameterAsText(0) startX_field = arcpy.GetParameterAsText(1) startY_field = arcpy.GetParameterAsText(2) endX_field = arcpy.GetParameterAsText(3) endY_field = arcpy.GetParameterAsText(4) id_field = arcpy.GetParameterAsText(5) lineType_str = arcpy.GetParameterAsText(6) spRef = arcpy.GetParameterAsText(7) joinFields = arcpy.GetParameterAsText(8) joinFields = joinFields.split(";") isChecked_AddNodes = arcpy.GetParameter(9) outFlowsLyrName = arcpy.GetParameterAsText(10) outNodesLyrName = arcpy.GetParameterAsText(11) if inTable and inTable != "#": try: # create empty list to append all output layers outList = [] if isChecked_AddNodes and outNodesLyrName != '': # Make XY Event Layer (temporary) # Local variable: nodesXY = r"in_memory\nodes_lyr" arcpy.AddMessage('Creating Nodes at Flow Destinations ...') arcpy.SetProgressorLabel( 'Creating Nodes at Flow Destinations ...') arcpy.MakeXYEventLayer_management(table=inTable, in_x_field=endX_field, in_y_field=endY_field, out_layer=nodesXY) # Copy XY Event Layer to Feature Class nodesOutputFC = os.path.join(arcpy.env.scratchGDB, outNodesLyrName) arcpy.CopyFeatures_management(in_features=nodesXY, out_feature_class=nodesOutputFC) outList.append(nodesOutputFC) # XY To Line flowsOutputFC = os.path.join(arcpy.env.scratchGDB, outFlowsLyrName) arcpy.AddMessage('Saved Flow Lines to: ' + flowsOutputFC) arcpy.SetProgressorLabel('Creating Radial Flow Lines ...') if id_field: arcpy.XYToLine_management(in_table=inTable, out_featureclass=flowsOutputFC, startx_field=startX_field, starty_field=startY_field, endx_field=endX_field, endy_field=endY_field, line_type=lineType_str, id_field=id_field, spatial_reference=spRef) if joinFields[0] != '': ### IF any Join Fields are specified, then Copy Rows first and Join that to the input table ### arcpy.AddMessage('Creating Temporary Join Table ...') arcpy.SetProgressorLabel( 'Creating Temporary Join Table ...') ### Copy Rows from Input Table to make sure it has an OID ### outTable = r"in_memory\tempTable" arcpy.CopyRows_management(inTable, outTable) ### JOIN ### arcpy.AddMessage('Joining Selected Fields ...') arcpy.SetProgressorLabel('Joining Selected Fields ...') arcpy.JoinField_management(in_data=flowsOutputFC, in_field=id_field, join_table=outTable, join_field=id_field, fields=joinFields) else: arcpy.AddWarning( "WARNING: No join fields have been selected. Only the ID field will be copied to the output feature class!" ) else: arcpy.XYToLine_management(in_table=inTable, out_featureclass=flowsOutputFC, startx_field=startX_field, starty_field=startY_field, endx_field=endX_field, endy_field=endY_field, line_type=lineType_str, spatial_reference=spRef) outList.append(flowsOutputFC) # Send string of (derived) output parameters back to the tool results = ";".join(outList) # Send string of (derived) output parameters back to the tool arcpy.SetParameterAsText(12, results) arcpy.ResetProgressor() except Exception: e = sys.exc_info()[1] arcpy.AddError('An error occurred: {}'.format(e.args[0]))
#if(arcpy.GetParameterAsText(7) == ""): # saved_Layer = r"display_points2" #else: # saved_Layer = arcpy.GetParameterAsText(7) # Set the spatial reference spRef = r"Coordinate Systems\Geographic Coordinate System\World\WGS 1984" #spRef = r"Coordinate Systems\Projected Coordinate Systems\World\WGS_1984_Web_Mercator_Auxiliary_Sphere.prj" #spRef = arcpy.GetParameter(7) #spRef = r"Coordinate Systems\Projected Coordinate Systems\Utm\Nad 1983\NAD 1983 UTM Zone 11N.prj" # Make the XY event layer... for i in range(1, 10): try: arcpy.MakeXYEventLayer_management( tablePath, x_coords, y_coords, out_Layer, spRef) arcpy.SaveToLayerFile_management( out_Layer, saved_Layer) mxd = arcpy.mapping.MapDocument("CURRENT") dataFrame = arcpy.mapping.ListDataFrames(mxd, "*")[0] addlayer = arcpy.mapping.Layer(out_Layer) break except: if ("ERROR 000725" in str(arcpy.GetMessage(3))): saved_Layer_new = saved_Layer + "_" + str(i) out_Layer_new = out_Layer + "_" + str(i) try: arcpy.MakeXYEventLayer_management( tablePath, x_coords, y_coords, out_Layer_new, spRef) arcpy.SaveToLayerFile_management(
def mainFunction( dataFile, geometryType, inputCoordinateSystemName, outputCoordinateSystemName, xField, yField, spreadsheetUniqueID, output ): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Get coordinate systems and transformation inputCoordinateSystem, outputCoordinateSystem, transformation = getCoordinateDetails( inputCoordinateSystemName, outputCoordinateSystemName) # If url set as data file urlCheck = ['http', 'https'] if any(file in dataFile for file in urlCheck): printMessage("Downloading file from - " + dataFile + "...", "info") # Download the file from the link file = urllib2.urlopen(dataFile) fileName, fileExt = os.path.splitext(dataFile) # Download in chunks fileChunk = 16 * 1024 with open(os.path.join(arcpy.env.scratchFolder, "Data" + fileExt), 'wb') as output: while True: chunk = file.read(fileChunk) if not chunk: break # Write chunk to output file output.write(chunk) output.close() dataFile = os.path.join(arcpy.env.scratchFolder, "Data" + fileExt) # If data type is excel if dataFile.lower().endswith(('.xls', '.xlsx')): # If x and y fields provided if ((xField) and (yField)): # Get geometry type - line or polygon if ((geometryType.lower() == "line") or (geometryType.lower() == "polygon")): # If unique Id provided if (spreadsheetUniqueID): # Call function to get layer from spreadsheet output = spreadsheetToLinePolygon( dataFile, geometryType, xField, yField, spreadsheetUniqueID, inputCoordinateSystemName, inputCoordinateSystem, outputCoordinateSystemName, outputCoordinateSystem, transformation) else: printMessage( "Please provide a ID field in the spreadsheet to uniquely identify each feature...", "error") sys.exit() # Get geometry type - point else: # If projection needed if (transformation.lower() != "none"): printMessage("Importing Excel sheet...", "info") arcpy.ExcelToTable_conversion(dataFile, "in_memory\\Dataset", "") arcpy.MakeXYEventLayer_management( "in_memory\\Dataset", xField, yField, "InputLayer", inputCoordinateSystem, "") printMessage( "Projecting layer from " + inputCoordinateSystemName + " to " + outputCoordinateSystemName + "...", "info") arcpy.Project_management( "InputLayer", os.path.join(arcpy.env.scratchGDB, "Layer_Projected"), outputCoordinateSystem, transformation, inputCoordinateSystem, "NO_PRESERVE_SHAPE", "") printMessage("Creating layer...", "info") output = arcpy.MakeFeatureLayer_management( os.path.join(arcpy.env.scratchGDB, "Layer_Projected"), "Layer", "", "", "") else: printMessage("Importing Excel sheet...", "info") arcpy.ExcelToTable_conversion(dataFile, "in_memory\\Dataset", "") printMessage("Creating layer...", "info") output = arcpy.MakeXYEventLayer_management( "in_memory\\Dataset", xField, yField, "Layer", inputCoordinateSystem, "") else: printMessage( "Please provide an X and Y field for the Excel file...", "error") sys.exit() # If data type is shapefile elif dataFile.lower().endswith('.zip'): printMessage("Importing Shapefile...", "info") # Extract the zip file to a temporary location zip = zipfile.ZipFile(dataFile, mode="r") tempFolder = arcpy.CreateFolder_management( arcpy.env.scratchFolder, "Data-" + str(uuid.uuid1())) zip.extractall(str(tempFolder)) # Get the extracted shapefile shapefile = max(glob.iglob(str(tempFolder) + r"\*.shp"), key=os.path.getmtime) # If projection needed if (transformation.lower() != "none"): printMessage( "Projecting layer from " + inputCoordinateSystemName + " to " + outputCoordinateSystemName + "...", "info") arcpy.Project_management( shapefile, os.path.join(arcpy.env.scratchGDB, "Layer_Projected"), outputCoordinateSystem, transformation, inputCoordinateSystem, "NO_PRESERVE_SHAPE", "") printMessage("Creating layer...", "info") output = arcpy.MakeFeatureLayer_management( os.path.join(arcpy.env.scratchGDB, "Layer_Projected"), "Layer", "", "", "") else: printMessage("Creating layer...", "info") output = arcpy.MakeFeatureLayer_management( shapefile, "Layer", "", "", "") # If data type is gpx elif dataFile.lower().endswith('.gpx'): printMessage("Importing GPX...", "info") arcpy.GPXtoFeatures_conversion(dataFile, "in_memory\\Dataset") # If projection needed if (transformation.lower() != "none"): printMessage( "Projecting layer from " + inputCoordinateSystemName + " to " + outputCoordinateSystemName + "...", "info") arcpy.Project_management( "in_memory\\Dataset", os.path.join(arcpy.env.scratchGDB, "Layer_Projected"), outputCoordinateSystem, transformation, inputCoordinateSystem, "NO_PRESERVE_SHAPE", "") printMessage("Creating layer...", "info") output = arcpy.MakeFeatureLayer_management( os.path.join(arcpy.env.scratchGDB, "Layer_Projected"), "Layer", "", "", "") else: printMessage("Creating layer...", "info") output = arcpy.MakeFeatureLayer_management( "in_memory\\Dataset", "Layer", "", "", "") # If data type is kml elif dataFile.lower().endswith(('.kml', '.kmz')): # If kml geometry type provided if (geometryType): printMessage("Importing KML...", "info") arcpy.KMLToLayer_conversion(dataFile, arcpy.env.scratchFolder, "KML", "NO_GROUNDOVERLAY") outputGeodatabase = os.path.join(arcpy.env.scratchFolder, "KML.gdb") # Get the kml dataset as specified if (geometryType.lower() == "line"): kmlDataset = os.path.join(outputGeodatabase, "Placemarks\Polylines") elif (geometryType.lower() == "polygon"): kmlDataset = os.path.join(outputGeodatabase, "Placemarks\Polygons") else: kmlDataset = os.path.join(outputGeodatabase, "Placemarks\Points") # If projection needed if (transformation.lower() != "none"): printMessage( "Projecting layer from " + inputCoordinateSystemName + " to " + outputCoordinateSystemName + "...", "info") arcpy.Project_management( kmlDataset, os.path.join(arcpy.env.scratchGDB, "Layer_Projected"), outputCoordinateSystem, transformation, inputCoordinateSystem, "NO_PRESERVE_SHAPE", "") printMessage("Creating layer...", "info") output = arcpy.MakeFeatureLayer_management( os.path.join(arcpy.env.scratchGDB, "Layer_Projected"), "Layer", "", "", "") else: printMessage("Creating layer...", "info") output = arcpy.MakeFeatureLayer_management( kmlDataset, "Layer", "", "", "") else: printMessage( "Please provide a geometry type for the KML file...", "error") sys.exit() # If data type is csv elif dataFile.lower().endswith('.csv'): # If x and y fields provided if ((xField) and (yField)): # Get geometry type - line or polygon if ((geometryType.lower() == "line") or (geometryType.lower() == "polygon")): # If unique Id provided if (spreadsheetUniqueID): # Call function to get layer from spreadsheet output = spreadsheetToLinePolygon( dataFile, geometryType, xField, yField, spreadsheetUniqueID, inputCoordinateSystemName, inputCoordinateSystem, outputCoordinateSystemName, outputCoordinateSystem, transformation) else: printMessage( "Please provide a ID field in the spreadsheet to uniquely identify each feature...", "error") sys.exit() # Get geometry type - point else: # If projection needed if (transformation.lower() != "none"): printMessage("Importing CSV...", "info") arcpy.MakeXYEventLayer_management( dataFile, xField, yField, "InputLayer", inputCoordinateSystem, "") printMessage( "Projecting layer from " + inputCoordinateSystemName + " to " + outputCoordinateSystemName + "...", "info") arcpy.Project_management( "InputLayer", os.path.join(arcpy.env.scratchGDB, "Layer_Projected"), outputCoordinateSystem, transformation, inputCoordinateSystem, "NO_PRESERVE_SHAPE", "") printMessage("Creating layer...", "info") output = arcpy.MakeFeatureLayer_management( os.path.join(arcpy.env.scratchGDB, "Layer_Projected"), "Layer", "", "", "") else: printMessage("Importing CSV...", "info") printMessage("Creating layer...", "info") output = arcpy.MakeXYEventLayer_management( dataFile, xField, yField, "Layer", inputCoordinateSystem, "") else: printMessage( "Please provide an X and Y field for the CSV file...", "error") sys.exit() else: printMessage( "Not a valid data file. Please use .csv,.xls,.xlsx,.zip,.gpx,.kml or .kmz...", "error") sys.exit() # --------------------------------------- End of code --------------------------------------- # # If called from gp tool return the arcpy parameter if __name__ == '__main__': # Return the output if there is any if output: # If ArcGIS desktop installed if (arcgisDesktop == "true"): arcpy.SetParameter(7, output) # ArcGIS desktop not installed else: return output # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] # If arcpy error except arcpy.ExecuteError: # Build and show the error message errorMessage = arcpy.GetMessages(2) printMessage(errorMessage, "error") # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage) # If python error except Exception as e: errorMessage = "" # Build and show the error message # If many arguments if (e.args): for i in range(len(e.args)): if (i == 0): # Python version check if sys.version_info[0] >= 3: # Python 3.x errorMessage = str( e.args[i]).encode('utf-8').decode('utf-8') else: # Python 2.x errorMessage = unicode(e.args[i]).encode('utf-8') else: # Python version check if sys.version_info[0] >= 3: # Python 3.x errorMessage = errorMessage + " " + str( e.args[i]).encode('utf-8').decode('utf-8') else: # Python 2.x errorMessage = errorMessage + " " + unicode( e.args[i]).encode('utf-8') # Else just one argument else: errorMessage = e printMessage(errorMessage, "error") # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage)
# Fix the inflection points of the smoothed line (i.e. lateral offset tolerance) on the inFC line. ncurrentstep += 1 arcpy.AddMessage("Creating temporary inflection points feature - Step " + str(ncurrentstep) + "/" + str(nstep)) MakeToPts = arcpy.MakeFeatureLayer_management(ToPts, "%ScratchWorkspace%\\MakeToPts") Selection = arcpy.SelectLayerByAttribute_management(MakeToPts, "NEW_SELECTION", "\"Inflection\" = 1") NearTable = arcpy.GenerateNearTable_analysis(Selection, inFC, "%ScratchWorkspace%\\NearTable", "", "LOCATION", "NO_ANGLE", "") SpatialRef = arcpy.Describe(inFC).spatialReference ProxyPtsTEMP = arcpy.MakeXYEventLayer_management(NearTable, "NEAR_X", "NEAR_Y", "ProxyPtsTEMP", SpatialRef, "") PtsForInflLine = arcpy.CopyFeatures_management( ProxyPtsTEMP, "%ScratchWorkspace%\\PtsForInflLine") PtsForSplitting = arcpy.CopyFeatures_management( ProxyPtsTEMP, "%ScratchWorkspace%\\PtsForSplitting") arcpy.JoinField_management(PtsForInflLine, "IN_FID", ToPts, "OBJECTID", ["Order_ID", "ORIG_FID", "NEAR_X", "NEAR_Y"]) arcpy.JoinField_management(PtsForSplitting, "IN_FID", ToPts, "OBJECTID", ["Order_ID", "ORIG_FID", "NEAR_X", "NEAR_Y"]) # Shaping the inflection points ncurrentstep += 1 arcpy.AddMessage( "Formating inflection points feature in order to create the final inflection line - Step "
# df_species_total = pd.read_csv(r"D:\URI\Spring2021\NRS528\Data\05_Scripts\Challenge5_Anguilla_Pishagqua.csv") # df_Anguilla = df_species_total[df_species_total["Soil_name"]=="Anguilla"] # df_Pishagqua = df_species_total[df_species_total["Soil_name"]=="Pishagqua"] # df_Anguilla.to_csv('Challenge5_Anguilla.csv') # df_Pishagqua.to_csv('Challenge5_Pishagqua.csv') # 1. Convert Challenge5_Anguilla.csv to a shapefile. in_Table = r"Challenge5_Anguilla_Pishagqua.csv" x_coords = "lon" y_coords = "lat" out_Layer = "Anguilla" saved_Layer = r"Challenge5_Anguilla_Pishagqua_Output.shp" # Set the spatial reference spRef = arcpy.SpatialReference(4326) # 4326 == WGS 1984 lyr = arcpy.MakeXYEventLayer_management(in_Table, x_coords, y_coords, out_Layer, spRef, "") # Print the total rows print(arcpy.GetCount_management(out_Layer)) # Save to a layer file arcpy.CopyFeatures_management(lyr, saved_Layer) if arcpy.Exists(saved_Layer): print("Created file successfully!") # 2. Extact the Extent, i.e. XMin, XMax, YMin, YMax of the generated shapefile. desc = arcpy.Describe(saved_Layer) XMin = desc.extent.XMin XMax = desc.extent.XMax YMin = desc.extent.YMin YMax = desc.extent.YMax
altitude = arcpy.GetParameter(6) # Create folder to store all output files if arcpy.Exists(outfolderpath + r'\\' + name) == False: arcpy.CreateFolder_management(outfolderpath, name) # Establish workspace in which output files are generated arcpy.env.workspace = outfolderpath + r'\\' + name # Overwrite duplicate files per program execution arcpy.env.overwriteOutput = True # Retrieve 3D Analyst license for ArcGIS to generate files arcpy.CheckOutExtension('3D') # Create the shape file based off of a river's Cartesian coordinates arcpy.MakeXYEventLayer_management(coordinates, 'X', 'Y', name + '_Layer') arcpy.FeatureClassToFeatureClass_conversion(name + '_Layer', arcpy.env.workspace, name + '_Points') arcpy.AddMessage(name + '_Layer.shp generated.') # Read in ordered FID values with open(boundaryData, 'r') as file: reader = csv.reader(file) for num in reader: boundaryIDs.append(int(num[0])) # Look through coordinates to extract boundary points with arcpy.da.SearchCursor(arcpy.env.workspace + r'\\' + name + '_Points.shp', ['FID', 'X', 'Y']) as pointsCursor: for row in pointsCursor:
##out_folder = workspace ##out_gdb = "Locations.gdb" ###Create File GDB ##arcpy.CreateFileGDB_management(out_folder, out_gdb) ## ##print out_gdb," was created." #Set local variables: in_table = "Overton.xls\Coordinates$" x_coord = "Long" y_coord = "Lat" out_layer = "location_GCS1984.lyr" spat_ref = arcpy.SpatialReference(4326)#GCS_WGS_1984 #Make the XY event layer arcpy.MakeXYEventLayer_management(in_table, x_coord, y_coord, out_layer, spat_ref) print "Your layer ", out_layer," was created" #for debugging purposes #Save to feature class out_fc = "locationsFC_GCS1984" out_gdb = "Locations.gdb" arcpy.FeatureClassToFeatureClass_conversion(out_layer, out_gdb, out_fc) print "Your feature class ", out_fc," was created" #for debugging purposes #Update Workspace: env.workspace =r"C:\GIS_Work\Cphd_Locations\Locations.gdb" #2) change projection from GCS to PCS in_fc = out_fc out_fc2 = "Overton_Locations"
mlp.fit(x,y.values.ravel()) arcpy.AddMessage("Starting Training") #//////////////////////////////////making prediction/////////////////////////// tahmin=mlp.predict_proba(pre) mlp_cls=pd.DataFrame(data=tahmin,index=range(s_analiz),columns=["zeros","ones"]) K=pd.concat([koor,mlp_cls],axis=1) arcpy.AddMessage("Saving Prediction Data as mlp.csv") mlp_result=os.path.join(sf,"mlp.csv") K.to_csv(mlp_result,columns=["x","y","ones"]) #///////////////////////////////Saving Prediction Data as mlp.csv//////////// arcpy.AddMessage("mlp best loss value: {}".format(mlp.best_loss_))# if desired MLP best loss value can be obtained #//////////////////////////Creating Susceptibility map///////////////////////// arcpy.AddMessage("Analysis finished") mlp_sus_map=os.path.join(sf,"mlp_sus") arcpy.AddMessage("Creating SUSCEPTIBILITY Map and Calculating ROC ") arcpy.MakeXYEventLayer_management(mlp_result,"x","y","model",koordinat,"ones") arcpy.PointToRaster_conversion("model","ones",mlp_sus_map,"MOST_FREQUENT","",cell_size) arcpy.AddMessage("Susceptibility Map was created in {} folder as mlp_sus raster file".format(sf)) #////////////////////////////CALCULATING PERFORMANCE/////////////////////////// mx=float (arcpy.GetRasterProperties_management (mlp_sus_map, "MAXIMUM").getOutput (0)) mn=float (arcpy.GetRasterProperties_management (mlp_sus_map, "MINIMUM").getOutput (0)) e=(float(mx)-float(mn))/100 d=[] x=0 y=0 z=0 for f in range (100):