def getAuthoritativeData(self): self.logger.info('Load hurricane data') #Load active hurricane data from arcgis self.observedPositionFs = arcpy.FeatureSet() self.observedPositionFs.load(self.activeHurricaneUrl + self.observedPositionLayer + self.observedPositionQuery + self.querySuffix + self.serverTokenParam) self.observedTrackFs = arcpy.FeatureSet() self.observedTrackFs.load(self.activeHurricaneUrl + self.observedTrackLayer + self.observedTrackQuery + self.querySuffix + self.serverTokenParam) #self.forecastErrorConeFs = arcpy.FeatureSet() #self.forecastErrorConeFs.load(self.activeHurricaneUrl + self.forecastErrorConeLayer + self.forecastErrorConeQuery + self.querySuffix + self.serverTokenParam) self.forecastPositionFs = arcpy.FeatureSet() self.forecastPositionFs.load(self.activeHurricaneUrl + self.forecastPositionLayer + self.forecastPositionQuery + self.querySuffix + self.serverTokenParam) #load all JSON data from the ArcGIS feature sets self.posJson = json.loads(self.observedPositionFs.JSON) self.trackJson = json.loads(self.observedTrackFs.JSON) #self.forecastErrorConeJson = json.loads(self.forecastErrorConeFs.JSON) self.forecastPositionJson = json.loads(self.forecastPositionFs.JSON) self.processAuthoritativeData()
def geocodificarOSM(direccion): from geopy.geocoders import Nominatim import arcpy arcpy.env.overwriteOutput = True geolocalizador = Nominatim(user_agent="Google Chrome") # direccion= str(input("Escribe una direccion ")) direccionesGeo = geolocalizador.geocode(direccion, timeout=15) coordenadas = (str(direccionesGeo.latitude) + "," + str(direccionesGeo.longitude), ) print(coordenadas) # # Create an in_memory feature class to initially contain the coordinate pairs feature_class = arcpy.CreateFeatureclass_management( "in_memory", "tempfc", "POINT", spatial_reference="4326")[0] print(feature_class) feature_set = arcpy.FeatureSet() feature_set.load(feature_class) # # # # Open an insert cursor cursor = arcpy.da.InsertCursor(feature_class, ["SHAPE@XY"]) cursor.insertRow(coordenadas) # # # Create a FeatureSet object and load in_memory feature class feature_set = arcpy.FeatureSet() feature_set.load(feature_class)
def APIdownload(URL, name, itersize, ingeom, proj): if ingeom is str and IDrange = range(0,10000000, itersize) for i,j in itertools.izip(IDrange, IDrange[1:]): where = "OBJECTID>{0} AND OBJECTID<{1}".format(i,j) #&geometryType=esriGeometryPoint query = "?where={}&&text=&objectIds=&time=&geometry=&geometryType=esriGeometryPolygon&inSR=&\ spatialRel=esriSpatialRelIntersects&relationParam=&outFields=*&returnGeometry=true&returnTrueCurves=false&\ maxAllowableOffset=&geometryPrecision=&outSR=&returnIdsOnly=false&returnCountOnly=false&orderByFields=&\ groupByFieldsForStatistics=&outStatistics=&returnZ=false&returnM=false&gdbVersion=&returnDistinctValues=false&\ resultOffset=&resultRecordCount=&queryByDistance=&returnExtentsOnly=false&datumTransformation=¶meterValues=&\ rangeValues=&f=pjson".format(where) fsURL = URL + query fs = arcpy.FeatureSet() fs.load(fsURL) if long(arcpy.GetCount_management(fs)[0])>0 : outname = '{0}_{1}_{2}'.format(name,i,j) arcpy.CopyFeatures_management(fs, outname) print(outname) else: break print('Downloaded up to {} features'.format(i)) arcpy.Merge_management(arcpy.ListFeatureClasses('{}_*'.format(name)),output='{}'.format(name)) for fc in arcpy.ListFeatureClasses('{}_*'.format(name)): arcpy.Delete_management(fc)
def execute(self, params, messages): # define parameters gdb = params[0].valueAsText #establish rest endpoint urls eo_ptreps = r'https://maps.waterlandlife.org/arcgis/rest/services/PNHP/BioticsEdit/FeatureServer/0' eo_reps = r'https://maps.waterlandlife.org/arcgis/rest/services/PNHP/BioticsEdit/FeatureServer/1' eo_sourcept = r'https://maps.waterlandlife.org/arcgis/rest/services/PNHP/BioticsEdit/FeatureServer/2' eo_sourceln = r'https://maps.waterlandlife.org/arcgis/rest/services/PNHP/BioticsEdit/FeatureServer/3' eo_sourcepy = r'https://maps.waterlandlife.org/arcgis/rest/services/PNHP/BioticsEdit/FeatureServer/4' et = r'https://maps.waterlandlife.org/arcgis/rest/services/PNHP/BioticsEdit/FeatureServer/5' eo_ptreps_gdb = os.path.join(gdb,"eo_ptreps") eo_reps_gdb = os.path.join(gdb,"eo_reps") eo_sourcept_gdb = os.path.join(gdb,"eo_sourcept") eo_sourceln_gdb = os.path.join(gdb,"eo_sourceln") eo_sourcepy_gdb = os.path.join(gdb,"eo_sourcepy") et_gdb = os.path.join(gdb,"ET") feature_service_layers = [eo_ptreps,eo_reps,eo_sourcept,eo_sourceln,eo_sourcepy] gdb_layers = [eo_ptreps_gdb,eo_reps_gdb,eo_sourcept_gdb,eo_sourceln_gdb,eo_sourcepy_gdb] for f,g in zip(feature_service_layers,gdb_layers): fs = arcpy.FeatureSet() fs_load = fs.load(f) arcpy.AddMessage("Deleting features from "+f+" at "+ datetime.now().strftime("%H:%M:%S")) with arcpy.da.updateCursor(fs_load,"*") as cursor: for row in cursor: cursor.deleteRow() arcpy.AddMessage("Appending features to "+f+" at "+datetime.now().strftime("%H:%M:%S")) arcpy.Append_management(g,f,"NO_TEST")
def CreateFC(self, points, sr, name, z=None): try: #'-34.04 68.5,-34.05' coordpairs = points.split(';') pointGeometryList = [] coords = points.split(' ') for coordpair in coordpairs: pt = arcpy.Point() coords = coordpair.split(' ') pt.X = coords[0] pt.Y = coords[1] if z: pt.Z = z pointGeometry = arcpy.PointGeometry(pt, sr) pointGeometryList.append(pointGeometry) path = self.scratch + os.sep + 'scratch.gdb' + os.sep + name arcpy.AddMessage('path to sourcept: ' + path) arcpy.AddMessage(path) arcpy.CopyFeatures_management(pointGeometryList, path) fset = arcpy.FeatureSet() fset.load(path) self.pt = fset return fset except arcpy.ExecuteError: EH = ErrorHandling.ErrorHandling() line, filename, err = EH.trace() m = "Python error on " + line + " of " + __file__ + \ " : with error - " + err arcpy.AddError(m)
def main(): """ Main Function """ # Input Parameters layers_to_clip = arcpy.GetParameterAsText(0) area_of_interest = arcpy.GetParameter(1) # Check if valid AOI is provided. It should have at least 1 polygon # feature aoi_featset = arcpy.FeatureSet() aoi_featset.load(area_of_interest) aoi_feat_count = int(arcpy.GetCount_management(aoi_featset)[0]) if aoi_feat_count == 0: arcpy.AddError("Provided AOI has no polygon features." + " Please provide valid AOI for analysis.") return # Download data for provided layers layers_success = loop_req_layers(layers_to_clip, area_of_interest) if not layers_success: return # Create zip file of generated excel files for output output_zip_file = create_zip_folder() if not output_zip_file: return else: # Set generated zip file as output arcpy.AddMessage("Zip file created at : " + str(output_zip_file)) arcpy.SetParameter(2, output_zip_file)
def as_feature_set( dataset_path: Union[Path, str], *, field_names: Optional[Iterable[str]] = None, dataset_where_sql: Optional[str] = None, force_record_set: bool = False, ) -> arcpy.FeatureSet: """Return dataset as feature set. Args: dataset_path: Path to dataset. field_names: Collection of field names to include in output. If set to None, all fields will be included. dataset_where_sql: SQL where-clause property for dataset subselection. force_record_set: If True, return record set. If False, return feature set if spatial dataset & record set if non-spatial. """ dataset_path = Path(dataset_path) if field_names is not None: field_names = list(field_names) view = DatasetView(dataset_path, field_names=field_names, dataset_where_sql=dataset_where_sql) with view: if force_record_set or not view.is_spatial: return arcpy.RecordSet(table=view.name) return arcpy.FeatureSet(table=view.name)
def createBookmarksFromLayer(self, url, labelfield): import arcpy bmarks = [] try: where = '1=1' fields = '*' token = '' #todo when to use token? if (url.find("arcgis.com") > 0): token = self.user.token #The above variables construct the query query = "/query?where={}&outFields={}&returnGeometry=true&f=json&token={}".format( where, fields, token) fsURL = url + query fs = arcpy.FeatureSet() fs.load(fsURL) fieldnames = [labelfield, "SHAPE@"] wkid = "4326" #use geographic for bookmarks, easier to confirm myCursor = arcpy.da.SearchCursor(fs, fieldnames, "", wkid) for row in myCursor: bm = bookmark() extent = row[1].extent #handle points if extent.lowerLeft.X == extent.upperRight.X: myX = extent.lowerLeft.X myY = extent.lowerLeft.Y nTol = .05 myLL = arcpy.Point(myX - nTol, myY - nTol) myUR = arcpy.Point(myX + nTol, myY + nTol) bm.extent.xmin = myLL.X bm.extent.ymin = myLL.Y bm.extent.xmax = myUR.X bm.extent.ymax = myUR.Y else: bm.extent.xmin = extent.lowerLeft.X bm.extent.ymin = extent.lowerLeft.Y bm.extent.xmax = extent.upperRight.X bm.extent.ymax = extent.upperRight.Y bm.extent.SpatialReference.wkid = wkid bm.name = row[0].title() bmarks.append(bm.to_JSON2()) except ValueError as e: print 'Error: ' + e.message return bmarks
def testCountyEnvelope(self,Parcel): try: baseURL = "http://mapservices.legis.wisconsin.gov/arcgis/rest/services/WLIP/PARCELS/FeatureServer/0/query" where = str(Parcel.parcelid) query = "?f=json&where=UPPER(PARCELID)%20=%20UPPER(%27{}%27)&returnGeometry=true&spatialRel=esriSpatialRelIntersects&outFields=OBJECTID%2CPARCELID%2CTAXPARCELID%2CCONAME%2CPARCELSRC&outSR=3071&resultOffset=0&resultRecordCount=10000".format(where) fsURL = baseURL + query arcpy.AddMessage(fsURL) fs = arcpy.FeatureSet() fs.load(fsURL) with arcpy.da.UpdateCursor(fs,["SHAPE@XY"]) as cursorLTSB: for rowLTSB in cursorLTSB: v2x = round(rowLTSB[0][0],2) v1x = round(Parcel.shapeXY[0],2) v2y = round(rowLTSB[0][1],2) v1y = round(Parcel.shapeXY[1],2) if (v2x == v1x) and (v2y == v1y): arcpy.AddMessage("Parcel geometry validated.") return "Valid" else: arcpy.AddMessage("Parcel geometry not yet validated, will attempt another record.") return "Not Confirmed" # Call it valid If the query returns no features (failure to return features would not be caused by a misalignment) return "Valid" except: # Call it valid if an error happens (error would not be caused by a misalignment) return "Valid" return "Valid"
def featureclass_to_json(fc): # converts a feature class to a json dictionary representation featureSet = arcpy.FeatureSet( fc) # Load the feature layer into a feature set desc = arcpy.Describe( featureSet) # use the json property of the feature set return json.loads(desc.json)
def test__surfaceContainsPoint(self): ''' Check if elevation dataset contains the specified point ''' runToolMessage = ".....VisibilityUtilityTestCase.test__surfaceContainsPoint" arcpy.AddMessage(runToolMessage) Configuration.Logger.info(runToolMessage) # List of coordinates coordinates = [[-117.196717216, 34.046944853]] # Create an in_memory feature class to initially contain the coordinate pairs feature_class = arcpy.CreateFeatureclass_management( "in_memory", "tempfc", "POINT")[0] # Open an insert cursor with arcpy.da.InsertCursor(feature_class, ["SHAPE@XY"]) as cursor: # Iterate through list of coordinates and add to cursor for (x, y) in coordinates: cursor.insertRow([(x, y)]) # Create a FeatureSet object and load in_memory feature class feature_set = arcpy.FeatureSet() feature_set.load(feature_class) Point_Input = "in_memory\\tempPoints" arcpy.CopyFeatures_management(feature_set, Point_Input) self.assetEqual( True, Viewshed.surfaceContainsPoint(Point_Input, Viewshed.elevation))
def clip_service_layers(layer, clipping_polygon, output_name): """Clips map and feature service layers using the clippnig polygon.""" service_layer = task_utils.ServiceLayer(layer, clipping_polygon.extent.JSON, 'esriGeometryEnvelope') arcpy.env.overwriteOutput = True oid_groups = service_layer.object_ids out_features = None g = 0. group_cnt = service_layer.object_ids_cnt for group in oid_groups: g += 1 group = [oid for oid in group if oid] where = '{0} IN {1}'.format(service_layer.oid_field_name, tuple(group)) url = layer + "/query?where={}&outFields={}&returnGeometry=true&f=json".format(where, '*') feature_set = arcpy.FeatureSet() try: feature_set.load(url) except Exception: continue if not out_features: out_features = arcpy.Clip_analysis(feature_set, clipping_polygon, output_name) else: clip_features = arcpy.Clip_analysis(feature_set, clipping_polygon, 'in_memory/features') arcpy.Append_management(clip_features, out_features, 'NO_TEST') try: arcpy.Delete_management(clip_features) except arcpy.ExecuteError: pass status_writer.send_percent(float(g) / group_cnt * 100, '', 'clip_data_by_features')
def getAuthoritativeData(self): self.logger.info('Load wildfire data') #Load active fire data from arcgis self.wildfireReportsFs = arcpy.FeatureSet() self.wildfireReportsFs.load(self.wildFireActivityUrl + self.activeFireLayer + self.wildFireActivityQuery) self.processAuthoritativeData()
def getAuthoritativeData(self): self.logger.info('Load wildfire data') #Load active fire data from arcgis self.nwsReportsFs = arcpy.FeatureSet() self.nwsReportsFs.load(self.eventsBySizeSeverityUrl + self.eventsBySizeSeverityQuery + self.serverTokenParam) self.processAuthoritativeData()
def FetchCoordinates(path): features, coords = arcpy.FeatureSet(path), [] for row in arcpy.da.SearchCursor(features, ["SHAPE@XY"]): coords.append(row[0]) return numpy.array(coords)
def createViewshed(self): try: tempEnvironment0 = arcpy.env.extent arcpy.env.extent = self.buffer tempEnvironment1 = arcpy.env.cellSize arcpy.env.cellSize = self.cellsize arcpy.AddMessage("cellsize: " + str(arcpy.env.cellSize)) tempEnvironment2 = arcpy.env.mask arcpy.env.mask = self.buffer #outraster = sa.Viewshed(self.islyr, self.obsproc, 1, "FLAT_EARTH", 0.13) outraster = sa.Visibility(self.islyr, self.obsproc, analysis_type="FREQUENCY", nonvisible_cell_value="ZERO", z_factor=1, curvature_correction="CURVED_EARTH", refractivity_coefficient=0.13, observer_offset=self.height, outer_radius=self.radius, vertical_upper_angle=90, vertical_lower_angle=-90) #outrastertemp = os.path.join(r"C:\GEE\visibility", 'outvis') #outraster.save(outrastertemp) vshedtmp = os.path.join("in_memory", 'vshedtmp') vsheddis = os.path.join("in_memory", 'vsheddis') #vshed_proj = os.path.join(self.scratchgdb, 'vshedproj') arcpy.AddMessage("temp vshed fc:" + vshedtmp) arcpy.AddMessage("dissolved vshed fc: " + vsheddis) arcpy.env.extent = tempEnvironment0 arcpy.env.cellSize = tempEnvironment1 arcpy.env.mask = tempEnvironment2 arcpy.RasterToPolygon_conversion(outraster, vshedtmp, "NO_SIMPLIFY", "VALUE") arcpy.Dissolve_management(vshedtmp, vsheddis, "gridcode", "", "MULTI_PART", "DISSOLVE_LINES") ## if(self.wkidproc != self.wkidout): ## arcpy.AddMessage("Projecting output vshed...") ## arcpy.AddMessage("projected vshed fc: " + vshed_proj) ## arcpy.Project_management(vsheddis, vshed_proj, self.srOut) ## vshed=vshed_proj ## else: ## vshed=vsheddis #vistmp = os.path.join('in_memory', 'visibility') vis = os.path.join(self.scratchgdb, 'visibility') arcpy.AddMessage('creating output viewshed: ' + vis) arcpy.Clip_analysis(vsheddis, self.mask, vis, "") arcpy.AddMessage("Coppying to output...") #arcpy.CopyFeatures_management(vistmp, vis) fset = arcpy.FeatureSet() fset.load(vis) return fset except arcpy.ExecuteError: EH = ErrorHandling.ErrorHandling() line, filename, err = EH.trace() m = "Python error on " + line + " of " + __file__ + \ " : with error - " + err arcpy.AddError(m)
def featureclass_to_json(fc): """converts a feature class to JSON""" if arcpyFound == False: raise Exception("ArcPy is required to use this function") desc = arcpy.Describe(fc) if desc.dataType == "Table" or desc.dataType == "TableView": return recordset_to_json(table=fc) else: return arcpy.FeatureSet(fc).JSON
def build_feature_class(ships): arcpy.env.overwriteOutput = True arcpy.env.workspace = wk = arcpy.env.scratchGDB sr = arcpy.SpatialReference(4326) # if arcpy.Exists(fcname) is False: # changed - creates new FC each time and overwrites old arcpy.CreateFeatureclass_management(wk, fcname, "POINT", spatial_reference=sr) # NOQA arcpy.management.AddFields( fcname, [ ["ID", "TEXT", "ID", 50], # NOQA ["NAME", 'TEXT', "NAME", 50], # NOQA ["MMSI", 'TEXT', "MMSI", 20], # NOQA ['SHIP_TYPE', 'TEXT', 'SHIP_TYPE', 50], ["SHIP_CLASS", 'TEXT', "SHIP_CLASS", 50], ["FLAG", 'TEXT', "FLAG", 10], ["UPDATED_AT", 'TEXT', "UPDATED_AT", 100], # NOQA ["GEN_CLASS", 'TEXT', "GEN_CLASS", 50], ["IND_CLASS", 'TEXT', "IND_CLASS", 50], ["COURSE", "FLOAT", "COURSE", 10] ]) entslist = [ "ID", "NAME", "MMSI", "SHIP_TYPE", "SHIP_CLASS", "FLAG", "UPDATED_AT", "GEN_CLASS", "IND_CLASS", "COURSE", "SHAPE@XY" ] # noqa: E501 iCur = arcpy.da.InsertCursor(fcname, entslist) for ship in ships: adds = [] adds.append(ship.id) adds.append(ship.name) adds.append(ship.mmsi) adds.append(ship.ship_type) adds.append(ship.ship_class) adds.append(ship.flag) adds.append(ship.updated_at) adds.append(ship.gen_class) adds.append(ship.ind_class) course = ship.las_k_pos['course'] adds.append(course) lx = ship.las_k_pos['geometry']['coordinates'][1] ly = ship.las_k_pos['geometry']['coordinates'][0] adds.append((ly, lx)) iCur.insertRow(adds) fs = arcpy.FeatureSet() fs.load(os.path.join(wk, fcname)) arcpy.SetParameter(1, fs) del iCur
def rowsToJson(dataset): # converts a feature class/table to a json dictionary representation try: rows = arcpy.FeatureSet( dataset) # Load the feature layer into a feature set except: rows = arcpy.RecordSet( dataset) # Load the feature layer into a feature set desc = arcpy.Describe(rows) # use the json property of the feature set return json.loads(desc.json)
def sendfile_for_custom(recipient, f): try: msg = [] arcpy.ImportToolbox( "http://REMOVED/arcgis/services;CreateCustomAnalysis", "Custom") #get the uploaded file zipfold, errmsg = get_uploaded_folder(f, path) if errmsg: return errmsg if not zipfile.is_zipfile(zipfold): msg.append("The zipfile upload failed.") msg.append("The folder isn't a valid zip folder.") msg.append( "Please check that you're sending a compressed folder and try again." ) return msg #unzip the folder and extract the shapefile zipit = zipfile.ZipFile(zipfold, 'r') filelist, errmsg = extract_shp(zipit, path) if errmsg: return errmsg if filelist: #are there any valid shapefile names? for name in filelist: folderpth, rootname = os.path.split(name) shapename = rootname + ".shp" shapeloc = os.path.join(path, name + ".shp") # Run the server tool with input parameters set # This is an asynchronous tool so execution exits after tool is called boundary = arcpy.FeatureSet() boundary.load(shapeloc) result = arcpy.createCustomAnalysis_Custom( recipient, boundary, rootname) msg.append("Thank you.") msg.append("Processing is in progress.") msg.append("You will receive an email when the files are ready.") else: msg.append("The zipfile upload failed.") msg.append( "No valid shapefile names were found in the zip folder.") msg.append( "Please check the contents of your compressed folder and try again." ) except arcpy.ExecuteError: msg.append("We have a problem.") msg.append(arcpy.GetMessages()) except Exception: msg.append("We have a problem.") msg.append(traceback.format_exc()) return msg
def create_recordset(fc): """ create Record Set from Feature Class """ arcpy.AddMessage("Creating record set...") fs = arcpy.FeatureSet(fc) # create dict so convert_fs_to_rs function will work dic = json.loads(fs.JSON) recset_text = json.dumps(convert_fs_to_rs(dic)) # convert string to RecordSet object recset = arcpy.AsShape(recset_text, True) return recset
def execute(self, parameters, messages): """The source code of the tool.""" arcpy.SetProgressor("default", message="Accesing to a destinational resouse") # Acessing outpud data token = arcpy.GetSigninToken() portal_url = arcpy.GetActivePortalURL() gis = GIS(portal_url, token=token['token']) layer = FeatureLayer(parameters[0].valueAsText) arcpy.SetProgressorLabel("Prepearing input data") #Prepearing input data feature_set = arcpy.FeatureSet(parameters[1].valueAsText) feature_set_dict = json.loads(feature_set.JSON) # Matching parameter matching = parameters[2].value # Split features by number of threads list_of_lists = chunkIt(feature_set_dict['features'], parameters[3].value) # List of threads threads = [] arcpy.SetProgressorLabel("Starting threads") # Starting threads for feature_list in list_of_lists: threads.append( Thread(target=create_and_append, args=[ feature_list, arcpy.GetSigninToken(), portal_url, parameters[0].valueAsText, matching ])) threads[-1].start() # Joining all threads arcpy.SetProgressorLabel("Executing appendence") for thread in threads: thread.join() return
def crashes_as_json(buffs): """prepares crashes for GP Service by converting output to points as FeatureSet() Required: buffs -- intersection buffers """ crashes = summarize_crashes(buffs) # convert summarized buffer polygons to points points = arcpy.CreateUniqueName('CrashPoints', 'in_memory') # arcpy.FeatureToPoint requires advanced license,use polyfill so it's not required at ArcGIS Server level featureToPointPolyfill(crashes, points) # convert to JSON arcpy.SetParameter(1, arcpy.FeatureSet(points))
def sendfile_for_boundary(recipient, f): try: logfile = "" msg = [] path = dj_media_root logfile = open( os.path.join( path, "zipfiletest.txt"), 'w') logfile.write("media root: " + dj_media_root) logfile.write("recipient: " + recipient + "\nzip file name: " + f.name) arcpy.ImportToolbox("http://REMOVED/arcgis/services;CreateCustomShapefile", "CreateCustomShapefile") #get the uploaded file zipfold = get_uploaded_folder( f, path ) logfile.write("\nzip file after read in: " + zipfold) if not zipfile.is_zipfile( zipfold ): logfile.write("\nFolder isn't a valid zip folder") msg.append("Folder isn't a valid zip folder") return msg #unzip the folder and extract the shapefile zipit = zipfile.ZipFile( zipfold,'r' ) filelist = extract_shp( zipit, path ) logfile.write("\nafter zip extract: number of files = " + str(len(filelist))) if filelist: #are there any valid shapefile names? for name in filelist: folderpth, rootname = os.path.split( name ) shapename = rootname + ".shp" shapeloc = os.path.join( path, name+".shp") arcpy.AddMessage("name: " + rootname + " and fullname: " + shapeloc) # Run the server tool with input parameters set # This is an asynchronous tool so execution exits after tool is called boundary = arcpy.FeatureSet() boundary.load( shapeloc ) result = arcpy.CreateCustomShapefile_CustomShapefile(recipient, boundary, rootname) msg.append("Thank you") msg.append("Processing is in progress") msg.append("You will receive an email when the files are ready") else: msg.append("No valid shapefile names found in the zip folder") return msg except arcpy.ExecuteError: print(arcpy.GetMessages()) except Exception: print(traceback.format_exc()) finally: if logfile: logfile.close()
def createViewshed(self): try: tempEnvironment0 = arcpy.env.extent arcpy.env.extent = self.buffer tempEnvironment1 = arcpy.env.cellSize arcpy.env.cellSize = self.cellsize tempEnvironment2 = arcpy.env.mask arcpy.env.mask = self.buffer outraster = sa.Viewshed(self.islyr, self.obsproc, 1, "CURVED_EARTH", 0.13) #outrastertemp = os.path.join(self.scratch, 'outvis') #outraster.save(outrastertemp) vshedtmp = os.path.join("in_memory", 'vshedtmp') vsheddis = os.path.join("in_memory", 'vsheddis') #vshed_proj = os.path.join(self.scratchgdb, 'vshedproj') arcpy.AddMessage("temp vshed fc:" + vshedtmp) arcpy.AddMessage("dissolved vshed fc: " + vsheddis) arcpy.env.extent = tempEnvironment0 arcpy.env.cellSize = tempEnvironment1 arcpy.env.mask = tempEnvironment2 arcpy.RasterToPolygon_conversion(outraster, vshedtmp, "SIMPLIFY", "VALUE") arcpy.Dissolve_management(vshedtmp, vsheddis, "gridcode", "", "MULTI_PART", "DISSOLVE_LINES") ## if(self.wkidproc != self.wkidout): ## arcpy.AddMessage("Projecting output vshed...") ## arcpy.AddMessage("projected vshed fc: " + vshed_proj) ## arcpy.Project_management(vsheddis, vshed_proj, self.srOut) ## vshed=vshed_proj ## else: ## vshed=vsheddis #vistmp = os.path.join('in_memory', 'visibility') vis = os.path.join(self.scratchgdb, 'visibility') arcpy.AddMessage('creating output viewshed: ' + vis) arcpy.Clip_analysis(vsheddis, self.mask, vis, "") arcpy.AddMessage("Coppying to output...") #arcpy.CopyFeatures_management(vistmp, vis) fset = arcpy.FeatureSet() fset.load(vis) return fset except arcpy.ExecuteError: EH = ErrorHandling.ErrorHandling() line, filename, err = EH.trace() m = "Python error on " + line + " of " + __file__ + \ " : with error - " + err arcpy.AddError(m)
def QueryESRIFeatureServiceReturnFeatureSet(strAGS_URL, strToken, strWhere, strFields): try: strBaseURL= strAGS_URL + "/query" strQuery = "?where={}&outFields={}&returnGeometry=true&f=json&token={}".format(strWhere, strFields, strToken) print(strQuery) strFsURL = strBaseURL + strQuery print(strFsURL) fs = arcpy.FeatureSet() fs.load(strFsURL) return fs except Exception as e: import traceback, sys# If an error occurred, print line number and error message tb = sys.exc_info()[2] print("QueryESRIFeatureServiceReturnFeatureSet: Line %i" % tb.tb_lineno) print(e.message) return "error"
def getInputPointFCFromXY(x, y): ''' returns UTM projected point from X and Y coords (longitude, latitude) in WGS 84 ''' inPoint = arcpy.Point(x, y) inWGS84Point = arcpy.PointGeometry(inPoint) sr = arcpy.SpatialReference(4326) #GCS_WGS_1984? inWGS84Point.spatial_reference = sr # create an in_memory feature class to initially contain the input point fc = arcpy.CreateFeatureclass_management("in_memory", "tempfc", "POINT", None, "DISABLED", "DISABLED", sr)[0] # open and insert cursor with arcpy.da.InsertCursor(fc, ["SHAPE@"]) as cursor: cursor.insertRow([inWGS84Point]) # create a featureset object and load the fc inputFeatureSet = arcpy.FeatureSet() inputFeatureSet.load(fc) return fc
def featureclass_to_json(fc, sanitize=True): """ Converts a feature class of feature layer to json. Input: fc - string - path to feature class Output: JSON string """ featureSet = arcpy.FeatureSet( fc) # Load the feature layer into a feature set desc = arcpy.Describe( featureSet ) # this will allow us to use the json property of the feature set data = json.loads(desc.json) if sanitize: return __unicode_convert(data) return data
def merge_feature(in_file1, in_file2, output_file): """ Args: dir or name """ # Create the required FieldMap and FieldMappings objects fm_num_Doc = arcpy.FieldMap() fm_suit = arcpy.FieldMap() fms = arcpy.FieldMappings() # Get the field names of vegetation type and suiteter for both original # files file1_num_Doc = "num_Doc" file2_num_Doc = "num_Doc" file1_suit = "suit" file2_suit = "suit" # Add fields to their corresponding FieldMap objects fm_num_Doc.addInputField(in_file1, file1_num_Doc) fm_num_Doc.addInputField(in_file2, file2_num_Doc) fm_suit.addInputField(in_file1, file1_suit) fm_suit.addInputField(in_file2, file2_suit) # Set the output field properties for both FieldMap objects num_Doc_name = fm_num_Doc.outputField num_Doc_name.name = 'num_Doc' fm_num_Doc.outputField = num_Doc_name suit_name = fm_suit.outputField suit_name.name = 'suit' fm_suit.outputField = suit_name # Add the FieldMap objects to the FieldMappings object fms.addFieldMap(fm_num_Doc) fms.addFieldMap(fm_suit) if os.path.exists(output_file): arcpy.Delete_management(output_file) # Merge the two feature classes arcpy.Merge_management([in_file1, in_file2], output_file, fms) return arcpy.FeatureSet(output_file)
def update_featureclass_agol(base_URL, update_feature, count): #set the variable n to 0 to be used to query objectID n = 0 #template feature class to be used when creating feature class FC template = r"C:\PythonBook\Scripts\SanFrancisco.gdb\SanFrancisco\Bus_Stops" #create feature class in memory FC = arcpy.CreateFeatureclass_management("in_memory", "FC", "POINT", template, "DISABLED", "DISABLED", "", "", "0", "0", "0") #generate token token = return_token(service_url, username, password) #loop over request number of times where count is number of features/1000 for x in range(count): where = "OBJECTID>" + str(n) #url parameters query = "/query?where={}&returnGeometry=true&outSR=2227&outFields=*&f=json&token={}".format( where, token) fs_URL = base_URL + query #build the finla url fs = arcpy.FeatureSet() fs.load(fs_URL) arcpy.Append_management(fs, FC, "NO_TEST") n += 1000 #add 1000 to n for next query print n with arcpy.da.SearchCursor(FC, ['OID@', 'SHAPE@XY', "FACILITYID"]) as cursor: for row in cursor: objectid = row[0] #get the x value from 'SHAPE@XY' pointx = row[1][0] #get the y value from 'SHAPE@XY' pointy = row[1][1] fid = row[2] fid_sql = "FACILITYID = {0}".format(fid) with arcpy.da.UpdateCursor(update_feature, ['SHAPE@'], fid_sql) as cursor: for urow in cursor: print "FACILITYID updated is ", fid #update the location of point using the x and y values from the FC feature urow[0] = arcpy.Point(pointx, pointy) cursor.updateRow(urow)