def _get_covid_info(): """Fetches covid case count info from the OC Open Data Portal.""" data_item = GIS().content.get(settings.ARCGIS_ITEM_ID) dataset = pd.read_csv(data_item.get_data(try_json=False)) dataset.drop(dataset.tail(1).index, inplace=True) all_cities = list(dataset.keys()) bad_cities = ['DateSpecCollect', 'Unknown', 'Other', 'Total'] for bad_city in bad_cities: if bad_city in all_cities: all_cities.remove(bad_city) return all_cities, data_item, dataset
def get_feature_service_list( ): #functions to get Feature layer list from portal # Authentification on active portal using Python API (arcpy lib) token = arcpy.GetSigninToken() portal_url = arcpy.GetActivePortalURL() gis = GIS(portal_url, token=token['token']) # Get content of the Portal and get url's of layers and tables #content = gis.content search_results = gis.content.search(query='', item_type='Feature Service', max_items=200) layers_list = [] #Only layers not tables for itm in search_results: try: layers_list.extend([x.url for x in itm.layers]) except TypeError: pass return layers_list
def fetch_geojson( service_item_id: str, output_dir: str, selected_layers: Optional[Sequence[str]] = None, ) -> None: """ Save selected layers of the arcgis service item """ gis = GIS() item = gis.content.get(service_item_id) if selected_layers is not None: suggest_changing_selected_layers( service_item_id, [layer.properties.name for layer in item.layers], selected_layers, ) for layer in item.layers: if selected_layers is not None: if layer.properties.name not in selected_layers: continue results = layer.query(return_all_records=True, out_sr=4326) layer_id = layer.properties.id file_name = f"{service_item_id}_{layer_id}.json" print(f"Saving {layer.properties.name} layer to {file_name}") results.save(output_dir, file_name)
def downloadPictures(meterDate, direction, pictureLoc): print('Beginning ' + direction + ' Route Survey123 Image Download.') print('Logging into ArcGIS.') agoLogin = GIS(url=None, username=gis_user, password=gis_password) featureID = "" if direction == "North": featureID = "6e4bcc172e844736ad1aa8505537aed4" if direction == "South": featureID = "22af1b6e3ff04c0497fc12b57692b0e8" myFLItem = agoLogin.content.get(featureID) attachmentsLayer = myFLItem.layers[0] oidField = attachmentsLayer.properties.objectIdField myRecords = attachmentsLayer.query(where="CreationDate > '" + meterDate + " 12:00:00 AM' AND CreationDate < '" + meterDate + " 11:59:59 PM'", out_fields=oidField) for r in myRecords.features: myOID = r.get_value(oidField) attachments = attachmentsLayer.attachments.get_list(myOID) for attachment in attachments: myDownload = attachmentsLayer.attachments.download( oid=myOID, attachment_id=attachment['id'], save_path=pictureLoc) print("Downloaded:" + myDownload[0])
def init_GIS(portal_url): global gis gis = GIS( url=portal_url, username=app_credentials['username'], password=app_credentials['password'], verify_cert=False)
def create_and_append(feature_list=None, token=None, portal_url=None, service_url=None, matching=None): token = arcpy.GetSigninToken() portal_url = arcpy.GetActivePortalURL() gis = GIS(portal_url, token=token['token']) layer = FeatureLayer(service_url) features_to_append = [] for feature in feature_list: new_feature = {'attributes': {}, 'geometry': feature['geometry']} #Find fields and it's value for field in matching: new_feature['attributes'][field[0]] = [ x[1] for x in feature['attributes'].items() if x[0] == field[1] ][0] features_to_append.append(new_feature.copy()) if len(features_to_append) > 500: result = layer.edit_features(adds=features_to_append) features_to_append = [] if features_to_append: layer.edit_features(adds=features_to_append)
def read_gis(item_id='96c4fb36182f409a9b141f3bbaad6ab1', user=None, passw=None): print('Reading GIS data.') gis = GIS(username=user, password=passw) flayer = gis.content.get(item_id) df = pd.DataFrame.spatial.from_layer(flayer.layers[0]) print('Complete.') return df
def get_localities_layer(url, username, password): # Access to ArcGIS Online account gis = GIS(url, username, password) # Retrieve Field Localities by name, and access first layer in collection localities_layer = gis.content.search( query="title:Field Localities", item_type="Feature Layer")[0].layers[0] return localities_layer
def init_gis(username, password, portal_url, hfl_id): """Connect to the GIS, get the relevant HFS, return needed feature layer""" logger.info("Connecting to GIS portal") gis = GIS(portal_url, username, password) logger.info("Finished Connecting...Getting the HFL") hfl = gis.content.get(hfl_id) fl = hfl.layers[0] visit_table = hfl.tables[0] return fl, visit_table
def main(): # Start counter dtStart = datetime.now() logging.info("Initiating {}\n".format(__file__)) # Call the API api = covidTrackingAPI() try: # Create the GIS connection gis = GIS(profile='AGOL') logging.info("logged in as {}".format(gis.users.me.username)) logging.info("Getting layer from AGOL") # Variable for the hosted feature service ID, modify if the hosted feature service changes fcId = 'fa8f69c34bc8418cad7ba9e0cfa6b568' # Load the cases table from AGOL table = gis.content.get(fcId).tables[0] except Exception: logging.error('There was an error accessing the feature on AGOL.', exc_info=True) try: #Commit the edits to the feature service logging.info("Getting updated features") # Removes previous records since data is updated retroactively query = table.query(where='1=1', return_geometry=False, out_fields='*') # Process the table filtered_json = tableProcessing(api, query) add_features, update_features = buildUpdates(table, filtered_json) # Commit edits if len(update_features) > 0: logging.info("Updating {} records".format(len(update_features))) table.edit_features(updates=update_features) else: logging.info( "No updates available. Please verify table is populated and COVIDTracking API is up." ) if len(add_features) > 0: logging.info("Appending {} new records".format(len(add_features))) table.edit_features(adds=add_features) else: logging.info("No new records") logging.info("All data updated") except Exception: logging.error( 'There was an error appending the data to the table on AGOL.', exc_info=True) script_run = datetime.now() - dtStart logging.info("Script run time: {}".format(script_run)) logging.info("SCRIPT COMPLETE\n")
def get_localities(url, username, password): # Access to ArcGIS Online account gis = GIS(url, username, password) # Retrieve Field Localities by name, and access first layer in collection localities = gis.content.search(query="title:Field Localities", item_type="Feature Layer")[0].layers[0] # Create and return spatially enabled dataframe sdf = pd.DataFrame.spatial.from_layer(localities) return sdf
def __init__(self, url, username, password): gis = GIS(url, username, password) content = gis.content users = gis.users user = users.me query = f'+type:"Insights Workbook" +orgid:{user.orgId}' self._url = url self._user = user self._users = users.search("") self._org_id = user.orgId self._workbooks = [Workbook(item) for item in content.search(query)] self._gis = gis
def execute(self, parameters, messages): """The source code of the tool.""" arcpy.SetProgressor("default", message="Accesing to a destinational resouse") # Acessing outpud data token = arcpy.GetSigninToken() portal_url = arcpy.GetActivePortalURL() gis = GIS(portal_url, token=token['token']) layer = FeatureLayer(parameters[0].valueAsText) arcpy.SetProgressorLabel("Prepearing input data") #Prepearing input data feature_set = arcpy.FeatureSet(parameters[1].valueAsText) feature_set_dict = json.loads(feature_set.JSON) # Matching parameter matching = parameters[2].value # Split features by number of threads list_of_lists = chunkIt(feature_set_dict['features'], parameters[3].value) # List of threads threads = [] arcpy.SetProgressorLabel("Starting threads") # Starting threads for feature_list in list_of_lists: threads.append( Thread(target=create_and_append, args=[ feature_list, arcpy.GetSigninToken(), portal_url, parameters[0].valueAsText, matching ])) threads[-1].start() # Joining all threads arcpy.SetProgressorLabel("Executing appendence") for thread in threads: thread.join() return
def overwrite_csv(): #Overwrites hosted table global gis #Iniate GIS session username = "" password = "" PortalUrl = '' gis = GIS(PortalUrl, username, password) it_1 = gis.content.get('layer id') update_flayer = FeatureLayerCollection.fromitem(it_1) try: update_flayer.manager.overwrite('path to file') print('Item updated!') except: print('Update failed.')
def upload_data(in_geo_data, outDir, properties_dict): # SRC: https://developers.arcgis.com/labs/python/import-data/ simple_name = os.path.split(in_geo_data)[1] inFile = in_geo_data if arcpy.Describe(inFile).dataType != 'ShapeFile': arcpy.FeatureClassToFeatureClass_conversion(in_geo_data, outDir, simple_name) inFile = os.path.join(outDir, simple_name + '.shp') zipPath = os.path.join(outDir, simple_name + '.zip') with ZipFile(zipPath, 'w') as myzip: myzip.write(inFile) #login to AGOL using pro credentials gis = GIS('pro') # Upload the data using input properties and data location shp = gis.content.add(properties_dict, data=zipPath)
def __init__(self, url, username, password, analysis_folder=None, common_tags=None): self.gis = GIS(url=url, username=username, password=password) if analysis_folder: create_folder_result = self.gis.content.create_folder( analysis_folder) self.analysis_folder = analysis_folder else: self.analysis_folder = None if common_tags and isinstance(common_tags, list): self.common_tags = common_tags else: self.common_tags = []
def main(): logging.basicConfig(filename='app.log', filemode='a', format='%(asctime)s - %(message)s', level=logging.INFO) gis = GIS() item = gis.content.get("2d0c7fa5ea95405d8e8761801a456c4d") flayer = item.layers[0] logging.info("Received Idaho Data", exc_info=False) sdf = pd.DataFrame.spatial.from_layer(flayer) sdf = sdf.drop([ "shape_leng", "globalid", "created_user", "last_edited_user", "Shape__Length", "Shape__Area", "SHAPE" ], axis=1) sdf.to_csv("out/ID_" + datetime.now().strftime('%Y%m%d') + ".csv") logging.info("Wrote Idaho Data", exc_info=False)
def grabSurvey123(meterDate, direction, exportLoc): print('Beginning ' + direction + ' Route Survey123 Download.') print('Logging into .') agoLogin = GIS(url=None, username=gis_user, password=gis_password) month = meterDate[:2] day = meterDate[3:5] year = meterDate[6:10] featureID = "" if direction == "North": featureID = "6e4bcc172e844736ad1aa8505537aed4" if direction == "South": featureID = "22af1b6e3ff04c0497fc12b57692b0e8" itemToDownload = agoLogin.content.get(featureID) itemExportName = month + day exportParameters = { "layers": [{ "id": 0, "where": "CreationDate > '" + meterDate + " 12:00:00 AM' AND CreationDate < '" + meterDate + " 11:59:59 PM'" }] } print('Generating Excel in ArcGIS Online.') itemToDownload.export(title=itemExportName, export_format="Excel", parameters=exportParameters, wait=True) searchForExportedItem = agoLogin.content.search(query=itemExportName) exportedItemID = searchForExportedItem[0].id getTheExportedItem = agoLogin.content.get(exportedItemID) print('Downloading to: ' + exportLoc) getTheExportedItem.download(save_path=exportLoc, file_name="{}.xlsx".format(itemExportName)) print('Download Complete! Removing ArcGIS Online file.') getTheExportedItem.delete()
def agol_update(event, context): # this is for the lambda env data_url = os.environ['data_url'] portal_url = os.environ['portal_url'] portal_user = os.environ['portal_user'] portal_password = os.environ['portal_password'] portal_item = os.environ['portal_item'] file_name = os.environ['file'] os.chdir('/tmp/') api_data = requests.get(data_url).json() data_df = pandas.DataFrame(api_data) data_df.to_csv(file_name, index=False) ### updates agol service gis = GIS(url=portal_url, username=portal_user, password=portal_password) item = gis.content.get(portal_item) fs = item.layers[0].container fs.manager.overwrite(file_name)
def read_gis(item_id='96c4fb36182f409a9b141f3bbaad6ab1', user=None, passw=None): """ Extracts a spatially-enabled DataFrame from ArcGIS Online containg census and LAPD divisions data. Parameters: item_id: ID of FeatureLayer. user: ArcGIS username. passw: ArcGIS password. Returns: Spatially-enabled DataFrame. """ print('Reading GIS data.') gis = GIS(username=user, password=passw) flayer = gis.content.get(item_id) df = pd.DataFrame.spatial.from_layer(flayer.layers[0]) print('Complete.') return df
def fetch_geocoding_data_with_caching(input_data, reverse=False): global HIT_API_YET if reverse == True: # Convert longitude latitude pair into a string to serve as a key in the cache dictionary input_string = str(input_data[0]) + ', ' + str(input_data[1]) else: input_string = input_data if input_string in CACHE_DICTION: # print("** Pulling data from cache **") return CACHE_DICTION[input_string] else: print("** Fetching new data from API **") if not HIT_API_YET: gis = GIS() HIT_API_YET = True if reverse == False: data = geocode(input_string) # geocode() argument is a string else: data = reverse_geocode(input_data) # reverse_geocode() argument is a list CACHE_DICTION[input_string] = data cache_file_open = open(ARCGIS_CACHE_FILE_NAME, "w") cache_file_open.write(json.dumps(CACHE_DICTION, indent=4)) cache_file_open.close() return data
def main(): # Get lat and long points = generateRandomLatLong(1000) print(points) # Connect to gis gis = GIS() # Convert in batch astarttime = time.time() allGeoms = arcgis.geometry.project(geometries=points, in_sr=4326, out_sr=3857) aendtime = time.time() print(allGeoms) # Convert one by one singleGeoms = [] sstarttime = time.time() for i, point in enumerate(points): # convert form lat long to web mercator geom = arcgis.geometry.project(geometries=[points[i]], in_sr=4326, out_sr=3857)[0] singleGeoms.append(geom) sendtime = time.time() print(singleGeoms) # Compare order of elements for a, b in zip(singleGeoms, allGeoms): if (a != b): print("a does not equal b") return print("Time for single geometries: %f" % (sendtime - sstarttime)) print("Time for all geometries: %f" % (aendtime - astarttime))
from arcgis import GIS from getpass import getpass import glob, os os.getcwd() file_list = [os.path.join(os.getcwd(), f) for f in glob.glob("*.shp")] username = getpass("Enter username: "******"Enter password: "******"https://mcmaster.maps.arcgis.com/home/signin.html", username, password) for f in file_list: feature_properties = { 'title':f, 'description':'Buildings per Toronto neighborhoods', 'tags':'buildings, neighborhoods, toronto' } gis.content.add(item_properties = feature_properties, data =
if (os.path.exists(newZipFN)): print('Unable to Delete' + newZipFN) return False zipobj = zipfile.ZipFile(newZipFN, 'w') for infile in glob.glob(inShapefile.lower().replace(".shp", ".*")): if os.path.splitext(infile)[1].lower() != ".zip": print("Zipping %s" % (infile)) zipobj.write(infile, os.path.basename(infile), zipfile.ZIP_DEFLATED) zipobj.close() return True arcpy.AddMessage("Zipping temp file...") zipShapefilesInDir(stageFolder, localDir) arcpy.AddMessage("Connecting to portal...") gis = GIS(url=portal, username=user, password=pw) arcpy.AddMessage("Adding temp item to portal...") tempItem = gis.content.add({"title": outName}, outZip) arcpy.AddMessage("Publishing temp file...") tempLyr = tempItem.publish(None) arcpy.AddMessage("Deleting unnecessary files...") del outZip arcpy.Delete_management(tempBuff) arcpy.Delete_management(tempClip) arcpy.Delete_management(tempShp)
#!/usr/bin/env python # coding: utf-8 #import library from arcgis import GIS import csv import pandas as pd #connect to the organization #replace "USERNAME" with an AGO user #you can add ,"PASSWORD" if you dont want to type your password when the script is run (not recommended) gis = GIS("https://YOUR_AGO_ADDRESS.com", "YOUR_USERNAME") #You will be prompted for a password after the above code runs. all_items = gis.content.search('*', max_items=400) all_keys = list(all_items[0].keys()) len(all_items) #Below this line i iterate through all_items no_views = [] error_IDs = [] x = 0 for item in all_items: try: df = item.usage(date_range='30D', as_df=True) ID = list(item.values())[0] if df is not None: if x == 0: df = df.sum(axis=0, skipna=True) df['ID'] = ID
import dash import plotly.graph_objects as go import dash_core_components as dcc import dash_html_components as html from dash.dependencies import Input, Output from flask import Flask import plotly.express as px from arcgis import GIS import numpy as np import pandas as pd gis = GIS() item = gis.content.get("f7d1318260b14ac2b334e81e55ee5c9e#data") flayer = item.layers[0] daily_df = pd.DataFrame.spatial.from_layer(flayer) daily_df.rename(columns={ 'Cases': 'Total Cases', 'ActiveCases': 'Active Cases', 'HA_Name': 'Health Authority', 'Recovered': 'Recovered Cases' }, inplace=True) df = pd.read_csv( "http://www.bccdc.ca/Health-Info-Site/Documents/BCCDC_COVID19_Dashboard_Case_Details.csv" ) df.rename(columns={"Reported_Date": "Reported Date"}, inplace=True) most_recent = df['Reported Date'].iloc[-1] ## daily count of covid cases
direction = 'RIGHT' buffer = arcpy.Buffer_analysis(no_csd_uid, os.path.join(changesGDB, 'buffer_' + d), '5 METERS', direction) print('Making Spatial Join') sj = arcpy.SpatialJoin_analysis(buffer, CSD_data, os.path.join(changesGDB, d + '_sj'), join_operation= 'JOIN_ONE_TO_ONE', join_type= 'KEEP_ALL') joined = arcpy.AddJoin_management(no_csd_uid, 'OBJECTID', sj, 'TARGET_FID') print('Calculating CSD_UIDS') arcpy.CalculateField_management(joined, 'redline_geom.CSD_UID_' + d.upper(), '!{}.CSD_UID!'.format(d + '_sj'), 'PYTHON3') arcpy.RemoveJoin_management(no_csd_uid, d + '_sj') #--------------------------------------------------------------------------- # Upload Logic # Use pro login info as before gis = GIS('pro') print('Logged in as: ' + str(gis.properties.user.username)) # delete old versions if they exist for item in gis.content.search('title: ' + fl_title): item.delete() geom_changes = pd.DataFrame.spatial.from_featureclass(os.path.join(changesGDB, changes_layer), sr= '3347') print( 'Uploading feature layer with ' + str(len(geom_changes)) + ' records to AGOL') geom_fl = geom_changes.spatial.to_featurelayer( title= fl_title, gis= GIS('pro'), tags= 'NGD_AL, Redline, ' + str(date.today())) # Make into a feature layer collection to change properties geom_flc = FeatureLayerCollection.fromitem(geom_fl)
print(d) out_dir = os.path.join(target_folder, item_id) if not os.path.exists(out_dir): os.mkdir(out_dir) out_file = os.path.join(out_dir, 'info.json') with open(out_file, 'w') as f: f.write(json.dumps(item, indent=4)) data = item.get_data(try_json=True) if data: out_file = os.path.join(out_dir, 'data.json') with open(out_file, 'w') as f: f.write(json.dumps(data, indent=4)) item.download_thumbnail(out_dir) if item.type == "Feature Service": _get_service_gdb(out_dir, item) id_val = '4071068fe442459f87b276caf563e73c' save_dir = r'C:\tmp\aaa' gis = GIS(profile="agol_graphc") save_json(save_dir, id_val) print('Done')
from arcgis import GIS import pandas as pd from arcgis.features import GeoAccessor, GeoSeriesAccessor from arcgis.geocoding import batch_geocode from arcgis.features import SpatialDataFrame gis = GIS('https://siarcgisweb01.trssllc.com/portal/home', 'sazdrake') map1 = gis.map('New York, NY') map1.basemap = "osm" map1 address_frame = pd.read_csv('data/city_of_new_york.csv') address_shp = SpatialDataFrame.from_xy(address_frame, 'LON', 'LAT') address_shp.spatial.plot(map_widget=map1)
share_item(gis, service_url, type, item) # call our method if __name__ == '__main__': # ## Set up env and connect to gis env = Env() env.read_env() username = env.str('AGO_USERNAME') password = env.str('AGO_PASSWORD') ags_username = env.str('AGS_USERNAME') ags_password = env.str('AGS_PASSWORD') ags_url = env.str('AGS_URL') ago_url = env.str('AGO_URL') prefix = env.str('SERVER_PREFIX', default='') print(env) gis = GIS(ago_url, username, password) # ## Get a token # In[4]: token = get_token(ags_url, ags_username, ags_password) share_unshared_items(gis, ags_url, token, prefix)