def get_unused(instance, username=None, password=None): from arcgis.mapping import WebMap # logs into active portal in ArcGIS Pro target = connect(instance, username, password) # creates list of items of all map image, feature, vector tile and image services (up to 10000 of each) in active portal services = (target.content.search(query="NOT owner:esri*", item_type="Map Service", max_items=10000) + target.content.search(query="NOT owner:esri*", item_type="Feature Service", max_items=10000) + target.content.search(query="NOT owner:esri*", item_type="Vector Tile Service", max_items=10000) + target.content.search(query="NOT owner:esri*", item_type="Image Service", max_items=10000)) # creates list of items of all webmaps in active portal web_maps = target.content.search(query="NOT owner:esri*", item_type="Web Map", max_items=10000) # loops through list of webmap items for item in web_maps: # creates a WebMap object from input webmap item web_map = WebMap(item) # accesses basemap layer(s) in WebMap object basemaps = web_map.basemap['baseMapLayers'] # accesses layers in WebMap object layers = web_map.layers # loops through basemap layers for bm in basemaps: # tests whether the bm layer has a styleUrl(VTS) or url (everything else) if 'styleUrl' in bm.keys(): for service in services: if service.url in bm['styleUrl']: services.remove(service) elif 'url' in bm.keys(): for service in services: if service.url in bm['url']: services.remove(service) # loops through layers for layer in layers: # tests whether the layer has a styleUrl(VTS) or url (everything else) if hasattr(layer, 'styleUrl'): for service in services: if service.url in layer.styleUrl: services.remove(service) elif hasattr(layer, 'url'): for service in services: if service.url in layer.url: services.remove(service) print('The following services are not used in any Web Maps in {}'.format(target)) # as we have removed all services being used in active portal, print list of remaining unused services for service in services: print("{} | {}".format(service.title, target.url + r'home/item.html?id=' + service.id)) print("There are a total of {} unused services in your portal".format(str(len(services))))
def create_new_webmap(project_name, layer_names, *args): """ creates a web map, adds feature layers to web map, and defines properties for layers and the web map Args: project_name (str): name of project layer_names (list): list of layer names to be added to web map Raises: TypeError: if project name is not type of string TypeError: if layer names is not type of list TypeError: if layer name is not type of string """ if not isinstance(project_name, str): raise TypeError('expected project name to be type of str') if not isinstance(layer_names, list): raise TypeError('expected layer names to be type of list') for layer_name in layer_names: if not isinstance(layer_name, str): raise TypeError('expected layer name to be type of str') # get feature layers collection and update its properties feature_layers = get_feature_layers_collection(project_name) feature_layers_properties = get_properties_from_project( project_name=project_name, content_type='Feature Layer', project_additional_info=list(args)) feature_layers.update(item_properties=feature_layers_properties) protect_share_item(feature_layers) # create a new web map new_web_map = WebMap() print('creating a new web map') # add feature layers to the web map for feature_layer in feature_layers.layers: new_web_map.add_layer(layer=feature_layer) print('adding', feature_layers.title, 'to web map') # define properties for the web map web_map_properties = get_properties_from_project( project_name=project_name, content_type='WebMap', project_additional_info=list(args)) # create popups for map layers create_popups(web_map=new_web_map, project_name=project_name, layer_names=layer_names) # save the web map new_web_map.save(item_properties=web_map_properties) print('saving web map in portal')
def wm_search(services): arcpy.AddMessage('Searching' + ' webmaps in ' + arcpy.GetActivePortalURL()) web_maps = gis.content.search(query="", item_type="Web Map", max_items=10000) for item in web_maps: web_map = WebMap(item) layers = web_map.layers for layer in layers: # loops through all input services for service in services: try: if layer['layerType'] == "VectorTileLayer": if service.lower() in layer.styleUrl.lower(): arcpy.AddMessage( f"{item.title} | {layer.styleUrl}") elif service.lower() in layer.url.lower(): arcpy.AddMessage(f"{item.title} | {layer.url}") except: continue arcpy.AddMessage('Search Complete')
import os import json from IPython.display import display import arcgis from arcgis.gis import GIS, ContentManager from arcgis.mapping import WebMap #Enter portal details gis = GIS(url="", username="", password="") wm_Z1 = WebMap() folderpath = r'M:\Planning\GIS\GIS Staff\Tayyab\Hussam\DSS per users\CSVs Per User\Zone 1' zone1Areas = os.listdir(folderpath) #Loop through all the csvs in the mentioned folder, and add them in the GIS as layers, publish them, and add them to the above created web map for areacsv in zone1Areas: if areacsv != '.cekey': #Ignore Lock files csv_file = folderpath + '\\' + areacsv csv_item = gis.content.add({'type': 'CSV'}, csv_file) lyr = csv_item.publish() wm_Z3.add_layer(lyr) #Set Map properties and save. web_map_properties = { 'title': 'Zone 1 - GMT Distribution Substations Routine check MAP', 'snippet': '', 'tags': 'MEDC' } web_map_item = wm_Z3.save(item_properties=web_map_properties)
# In[17]: print(gensD7.drawing_info) # In[4]: #Make a connection to my portal gis2 = GIS("https://caltrans.maps.arcgis.com", "Saffia.Hossainzadeh") # In[6]: # Create a web map of this new data: # Generators from CEC intersected by District 7's polygon from arcgis.mapping import WebMap, WebScene wm = WebMap() wm.definition # In[7]: wm.add_layer(gensD7) # In[10]: web_map_properties = { 'title': 'Generators within District 7', 'snippet': 'This map service shows the generators that are within the Caltrans district 7 jurisdiction. The original data source is the California Energy Commission', 'tags': 'webmap creation from arcgis api for python' } web_map_item = wm.save(item_properties=web_map_properties)
print(resultSorted) # Create break values and labels break0 = int(resultSorted[1]) label0 = "0 - " + str(break0) break1 = int(resultSorted[2]) label1 = str(break0) + " - " + str(break1) break2 = int(resultSorted[3]) label2 = str(break1) + " - " + str(break2) break3 = int(resultSorted[4]) label3 = str(break2) + " - " + str(break3) # bring webmap webmapItem = '6ed1633b7839452fbd81455c411d22ab' item = Item(gisuser, webmapItem, itemdict=None) totalActionsMap = WebMap(item) # modify webmap # breaks totalActionsMap['operationalLayers'][0]['layerDefinition']['drawingInfo'][ 'renderer']['classBreakInfos'][0]['classMaxValue'] = break0 totalActionsMap['operationalLayers'][0]['layerDefinition']['drawingInfo'][ 'renderer']['classBreakInfos'][1]['classMaxValue'] = break1 totalActionsMap['operationalLayers'][0]['layerDefinition']['drawingInfo'][ 'renderer']['classBreakInfos'][2]['classMaxValue'] = break2 totalActionsMap['operationalLayers'][0]['layerDefinition']['drawingInfo'][ 'renderer']['classBreakInfos'][3]['classMaxValue'] = break3 # labels totalActionsMap['operationalLayers'][0]['layerDefinition']['drawingInfo'][ 'renderer']['classBreakInfos'][0]['label'] = label0
# Clone items new_items = target_gis.content.clone_items(source_items, folder='Forest Fire', copy_data=False, search_existing_items=False) print("Cloning items complete. Renaming items") # Customize items for current emergency target_group = target_gis.groups.search('Properties at risk')[0] for new_item in new_items: if new_item.type == 'Web Map': new_item.update({'title': 'Thomas fire responder map', 'snippet': 'Web map for assessing damage and saving properties at risk'}) new_item.share(groups=[target_group]) print('Renamed web map -> Thomas fire responder map') # add informative web layers to the web map wm = WebMap(new_item) wm.add_layer(modis_hotspots) wm.add_layer(parcels_at_risk_item) # empty web layer at first print(' Added informative web layers') elif new_item.type == 'Operation View': new_item.update({'title': 'Thomas fire response dashboard', 'snippet': 'Operations dashboard for assessing damage and saving properties at risk'}) new_item.share(groups=[target_group]) print('Renamed dashboard -> Thomas fire response dashboard') elif new_item.type == 'Feature Service': new_item.update({'title': 'Thomas fire assessment layer', 'snippet': 'Feature layer for collecting assessment report'}) new_item.share(groups=[target_group]) print('Renamed web layer -> Thomas fire assessment layer')
def processMap(processOrg, fldr, mapTitle, mpNum, mpLayerTags, termsOfUse): #Flatten and get the unique Tags mpTags = reduce(operator.concat, mpLayerTags) uniqueMpTags = set(mpTags) # Accesses the map metadata to understand the layers that are used in the map # Creates a list of the layers found styleFrontUrl = "http://worldmap.harvard.edu/geoserver/styles/" folderProcess = fldr #"HarvardForestDataMap" mTitle = mapTitle #'Harvard Forest Data Map' mapNum = mpNum #17305 procSpace = r'C:\projects\HarvardMap\Data\{}'.format(folderProcess) # Dictionary lookup for sld styles sldGeomLookup = {} sldGeomLookup['MultiPolygon'] = "Polygon" sldGeomLookup['MultiLineString'] = "Polyline" sldGeomLookup['Point'] = "Point" # log into the org #gis = GIS(profile='harvardworldmap') gis = processOrg #(profile="myorg") # Access the map inventory Excel file and load into a dataframe for later access #mapDF = pd.read_csv(r"C:\projects\HarvardMap\Data\maps_tab\maps_tab_excelExport.csv",sep=',') #seaMP = mapDF[mapDF.id == '{}'.format(mapNum)] # read in the Published map data layers from conversion script # Add to a dataframe for later access # Connect to the log table to determine tbIT = gis.content.get('5ca2378ea96e48e7b5739b6121557c3e') # c2169b562b4f407e8acf2e2b73b11a4a # 5ca2378ea96e48e7b5739b6121557c3e tbl = tbIT.tables[0] tbDF = tbl.query(where="mapname='{}'".format(mapTitle), as_df=True) lyrlgPubPath = '{}\{}PubLyrLog.txt'.format(procSpace, mTitle) #pubLyrDataFrame = pd.read_csv(lyrlgPubPath,index_col='indxCol') browser = webdriver.Chrome( executable_path=r"C:\projects\chromedriver_win32\chromedriver.exe") #url = "https://worldmap.harvard.edu/maps/17305/info/" url = "https://worldmap.harvard.edu/maps/{}".format(mapNum) # url = "http://trainingapps.esri.com/osiris/TrainingEvents/Search?PageSize=20&OrderBy=StartDate&Direction=asc&InstructorName={0}+{1}&StartDate=09%2F12%2F2016&EndDate=12%2F31%2F2016".format(firstName,lastName) browser.implicitly_wait(60) a = browser.get(url) # FireFox # browser = webdriver.Firefox(executable_path=r"C:\Software\geckodriver-v0.20.0-win64\geckodriver.exe") # browser.get(url) # opnFile = open(r"C:\Dev\Python\OsirisTools\pythclass.html",'r') # pgSource = BeautifulSoup(opnFile,'html.parser') pgSource = BeautifulSoup(browser.page_source, 'html.parser') # Create an empty webmap empty_webmap = WebMap() s = pgSource.head.find_all('script') jj = s[18] regex = re.compile('\n^(.*?){(.*?)$|,', re.MULTILINE) js_text = re.findall( regex, jj.string) #Change jj.text to jj.string based on error mapJsonpre = js_text[76][1] mapJson = '{0}{1}'.format('{', mapJsonpre[:-2]) mpJ = json.loads(mapJson) # Map Description from the JSON if mpJ['about']['introtext']: mapDesc = mpJ['about']['introtext'] else: mapDesc = mTitle # Map scale #if mpJ['map']['maxResolution']: #mapScale = ['map']['maxResolution'] grpDict = {} # Pull the group names into a list to reverse their order to preserve layer order from the map grpNames = [grp['group'] for grp in mpJ['map']['groups']] grpNames.reverse() for grp in grpNames: #mpJ['map']['groups']: grpName = grp #['group'] print('GroupName :: {} '.format(grpName)) grpLyrs = [] for lyr in mpJ['map']['layers']: if lyr['group'] == grpName: print(lyr['title']) #if lyr['visibility']: #if lyr['visibility']: #visibile layer start if lyr['title'] == 'Population Density (2010)km2': print('need to stop') # # use the name to link over to the item ID # if not 'geonode:' in lyr['name']: # llkUP = lyr['detail_url'].split("/")[-1] # print(llkUP) # else: # llkUP = lyr['name'] # isolate and test if the current layer is published vectordata #publyrLookupID = tbDF.query("geonodeSRC == '{}'".format(llkUP)) # search for the published Layer by snippet addLayer = gis.content.search(query='snippet:"{}"'.format( lyr['name']), item_type="Feature Layer") if len(addLayer) == 1: # Check for styles in layer if 'styles' in lyr and lyr['styles'] != '': lyrStylesUrl = styleFrontUrl sldFileURLTest = "{}{}".format(styleFrontUrl, lyr['styles']) if ".sld" in sldFileURLTest: sldFileURL = sldFileURLTest else: sldFileURL = "{}.sld".format(sldFileURLTest) else: #Check the layer info page lyrInfo = urllib.request.urlopen( 'http://worldmap.harvard.edu/data/{}'.format( lyr['name'])) lyrInfoRD = lyrInfo.read() lyrInfoRDDC = lyrInfoRD.decode('utf-8') lyrInfoSoup = BeautifulSoup(lyrInfoRDDC) spnTyle = lyrInfoSoup.find( 'span', attrs={'class': 'styles-list'}) if spnTyle: styles = spnTyle.find_all('a') if len(styles) > 0: sldFileURL = styles[0].attrs['href'] # Need to write in how to distinguish renderer type #simple_renderer = {"renderer": "autocast", "type": "simple"} # Check the style Geometry #test the url for a valid connectiono try: stylOpen = urllib.request.urlopen(sldFileURL).read() stylOpenDecode = stylOpen.decode('utf-8') if 'PolygonSymbolizer' in stylOpenDecode: sldGeom = 'Polygon' elif 'LineSymbolizer' in stylOpenDecode: sldGeom = 'Polyline' elif 'PointSymbolizer' in stylOpenDecode: sldGeom = 'Point' # Find the geometry type and pass it to the right render Geometry #sldGeom = sldGeomLookup[publyrLookupID.geometryType.values[0]] renren = "" if sldGeom == 'Point': pntRenderer = symbolsPoints.processPointSymbol( sldFileURL) renren = pntRenderer if sldGeom == 'Polygon': polyRenderer = symbolsPolygon.processPolygonSymbol( sldFileURL) if polyRenderer[1] == 'classbreaks': sdf = pd.DataFrame.spatial.from_layer( addLayer[0].layers[0]) if polyRenderer[0]['field'][0] == '_': sdfRenFLD = 'F{}'.format( polyRenderer[0]['field']) else: sdfRenFLD = polyRenderer[0]['field'] genRen = generate_renderer('Polygon', sdf, render_type='c', field=sdfRenFLD) brks = polyRenderer[0]['classBreakInfos'] genRen['classBreakInfos'] = brks polyRenderer.pop(0) polyRenderer.insert(0, genRen) renren = polyRenderer if sldGeom == 'Polyline': lnRenderer = symbolsLines.processLineSymbol( sldFileURL) if lnRenderer[1] == 'classbreaks': sdf = pd.DataFrame.spatial.from_layer( addLayer[0].layers[0]) if lnRenderer[0]['field'][0] == '_': sdfRenFLD = 'F{}'.format( lnRenderer[0]['field']) else: sdfRenFLD = lnRenderer[0]['field'] genRen = generate_renderer('Polyline', sdf, render_type='c', field=sdfRenFLD) brks = lnRenderer[0]['classBreakInfos'] genRen['classBreakInfos'] = brks lnRenderer.pop(0) lnRenderer.insert(0, genRen) renren = lnRenderer #renren[0]["renderer"] = "autocast" # Get the item from the org #addLayer = gis.content.search(query='snippet:"{}"') #addLayer = gis.content.get(publyrLookupID.itemID.values[0]) if renren != "": empty_webmap.add_layer( addLayer[0].layers[0], { "type": "FeatureLayer", "renderer": renren[0], 'visibility': lyr['visibility'] }) else: empty_webmap.add_layer( addLayer[0].layers[0], { "type": "FeatureLayer", 'visibility': lyr['visibility'] }) except: empty_webmap.add_layer( addLayer[0].layers[0], { "type": "FeatureLayer", 'visibility': lyr['visibility'] }) pass # Add an item update to remove the tracking snippet # addLayer.update(item_properties={'snippet': "{}".format(lyr['title'])}) grpLyrs.append(lyr) else: print('Current Layer {} does not have a featurelayer') grpLyrs.append(lyr) # # use the name to link over to the item ID # if not 'geonode:' in lyr['name']: # llkUP = lyr['detail_url'].split("/")[-1] # print(llkUP) # else: # llkUP = lyr['name'] # # # publyrLookupID = pubLyrDataFrame.query("geonodeSRC == '{}'".format(llkUP)) grpDict[grpName] = grpLyrs webmapprops = { 'title': '{}'.format(mTitle), 'description': mapDesc, 'licenseInfo': termsOfUse, 'snippet': ' ', 'tags': uniqueMpTags } empty_webmap.save(item_properties=webmapprops)
# # https://developers.arcgis.com/labs/python/display-a-web-map/ # # Display a web map # In[1]: #imports from arcgis.gis import GIS from arcgis.mapping import WebMap from IPython.display import display gis = GIS() webmap = gis.content.get('41281c51f9de45edaf1c8ed44bb10e30') display(webmap) la_parks_trails = WebMap(webmap) la_parks_trails # In[ ]: # Desafío # In[2]: operational_layers = la_parks_trails.layers n = len(operational_layers) print("El Webmap tiene {} capas".format(n)) # In[3]: for layer in operational_layers:
RangeTenure_JSON_File = r'S:\Gis\Python\AGO\Map Base JSON\RangeTenure_JSON.txt' RangePasture_JSON_File = r'S:\Gis\Python\AGO\Map Base JSON\RangePasture_JSON.txt' SoilSurvey_JSON_File = r'S:\Gis\Python\AGO\Map Base JSON\SoilSurvey_JSON.txt' LivestockLarge_JSON_File = r'S:\Gis\Python\AGO\Map Base JSON\LivestockLarge_JSON.txt' IndianReserves_JSON_File = r'S:\Gis\Python\AGO\Map Base JSON\IndianReserves_JSON.txt' Greenspace_JSON_File = r'S:\Gis\Python\AGO\Map Base JSON\Greenspace_JSON.txt' BCParks_JSON_File = r'S:\Gis\Python\AGO\Map Base JSON\BCParks_JSON.txt' NationalParks_JSON_File = r'S:\Gis\Python\AGO\Map Base JSON\NationalParks_JSON.txt' username = input("Username :") password = getpass.getpass(prompt='Password: '******'https://governmentofbc.maps.arcgis.com/', username, password) # Initialize New Web Map Content wm = WebMap() # Get Existing AGO Content (BCGW) Using Item ID ALR = gis.content.get('0ce279465c97462e8b583e6d67987bdd') AgCAP = gis.content.get('3fb6d970b6204c3aaee400c7a73d68e7') PMBC = gis.content.get('ce7fd87476b54100a3b158c9dae7e9b7') CrownTenures = gis.content.get('a009af6874154d53bafa540c29c5faf8') RangeTenure = gis.content.get('439b18bcd65b4fb0bf943876960b172a') RangePasture = gis.content.get('743270b1b89c45b49efb5d4241baf97a') SoilSurvey = gis.content.get('ee6879ca62194b88aee3a18f954a70f3') LivestockLarge = gis.content.get('1649d11f18f443ecae7003afa2df99b3') IndianReserve = gis.content.get('6f1e8fc7bd24460cad0ae533eb4acfe2') Greenspace = gis.content.get('7523d1849d6e46f5b997d833e849d88b') BCParks = gis.content.get('8d1d458346bd42adbedbd9754dac0b33') NationalParks = gis.content.get('5ade04eb4d7145eab978ba0cff1e6ba8')
from arcgis.mapping import WebMap from arcgis.gis import GIS import json with open('config.json', 'r') as f: config = json.load(f) gis = GIS(config['url'], config['username'], config['password']) wm = WebMap() os_boundaryline = gis.content.get("5b60fac33976436ab900e05eb1a33216") wm.add_layer(os_boundaryline) len(wm.layers) fillSymbolDiagonal = { "color": [0, 0, 0, 64], "outline": { "color": [0, 0, 0, 255], "width": 1, "type": "esriSLS", "style": "esriSLSSolid" }, "type": "esriSFS", "style": "esriSFSDiagonalCross" } i = 0 while i < len(wm.layers): if wm.definition.operationalLayers[i].title == "Historic counties": print("Layer found in index: ", i) wm.definition.operationalLayers[ i].title = "Historic counties (renamed)"
#API Reference https://developers.arcgis.com/rest/packaging/api-reference/create-map-area.htm from arcgis.gis import GIS from arcgis.mapping import WebMap import json with open('creds\creds.json') as json_data: # Grab the credential object d = json.load(json_data) # setup the portal gis = GIS(d[1]['portal'], d[1]['user'], d[1]['pass']) wm_itemid = 'c9db4302670347a4830e94927764187e' #get the webmap to use offline offline_map_item = gis.content.get(wm_itemid) offline_webmap = WebMap(offline_map_item) #loop through the webmap's bookmarks and try to create a map area for each one offline_areas = offline_webmap.offline_areas #Cleanup and remove existing preplanned areas for ids in offline_areas.list(): ids.delete() for bookmark in offline_webmap.definition.bookmarks: name = bookmark.name try: offline_area = offline_areas.create( area=name,
#!/usr/bin/env python # coding: utf-8 # Работа с веб-картами по какой-то причине не доступна, объекты не грузятся. # In[58]: from arcgis.mapping import WebMap empty_webmap = WebMap() # In[59]: empty_webmap.layers # In[61]: ebola_map = WebMap(ebola_map_item) # # Модуль arcgis.mapping # Использование виджета карты # In[4]:
def update_maps(q, old_id, new_layer): # for each map on the server, # Find a reference to the new layer # In the map, # Find the old layer # Remove it # Insert the new layer in the same place count = 0 msg = '' maps = gis.content.search(q, max_items=1000, sort_field="title", sort_order="asc", outside_org=False, item_type="Web Map") print("Found %d maps." % len(maps)) for map in maps: i_did_update = False map_info = "%s (%s) %s" % (map.title, map.owner, map.type) #display(web_map) count += 1 web_map = WEBMAP(map) for layer in web_map.layers: # I dont care about labels right now because i use a different workflow. # They can be directly replaced in Portal. # if layer.layerType == 'VectorTileLayer' and layer.title == old_label_name: # # I see you, old labels. # print("county labels updated") # i_did_update = True # web_map.remove_layer(layer) #web_map.add_layer(new_label) try: layerId = layer.itemId layerType = layer.layerType except AttributeError: layerId = '' layerType = '' if layerType == 'ArcGISTiledMapServiceLayer' and layerId == old_id: msg += map_info + "\n" msg += "\ttitle:\"%s\" id:%s itemId:%s\n" % (layer.title, layer.id, layerId) print("new layer", new_id) # ALAS ALAS # this puts the "added" layer at the TOP of the layer list. # making this script, well, useless web_map.remove_layer(layer) web_map.add_layer(new_layer, options={"title": "Clatsop County"}) i_did_update = True else: print("32%s \"%s\" %s" % (layerId, layer.title, layerType)) pass if i_did_update: # "update" will save changes made to the web map # alternatively "save" will create a new copy of the map. # Thumbnail can be a local file (to upload) or a URL. # UPDATE web_map.update() comment = "%s updated by \"%s\"" % (datestamp, myname) item = ITEM(gis, web_map.item.id) item.add_comment(comment) """ # SAVE old_title = web_map.item.title # I need to grab the existing thumbnail from Portal #watermarked_thumbnail = "thumbnail_" + datestring + ".jpg" #watermark("thumbnail.jpg", watermarked_thumbnail, old_title, (0,200,200,128)) # comment indicates new map created comment = "%s created from \"%s\" by \"%s\"" % (datestamp, web_map.item.title, myname) item = web_map.save(item_properties={ # These are REQUIRED "title": "CHANGED " + old_title + ' ' + datestamp, "snippet": web_map.item.snippet, "tags": web_map.item.tags }, #thumbnail = watermarked_thumbnail, # folder = "TESTING_Brian" ) item.add_comment(comment) """ print(msg) return count
# connect to EMEF's map server, get the points and the admin boundaries ef_points = MapImageLayer( 'https://geopub.epa.gov/arcgis/rest/services/EMEF/efpoints/MapServer/') ef_boundaries = MapImageLayer( 'https://geopub.epa.gov/arcgis/rest/services/EMEF/Boundaries/MapServer/') superfund_points = ef_points.layers[0] toxic_releases = ef_points.layers[1] counties = ef_boundaries.layers[5] states = ef_boundaries.layers[6] # we know which layers to add, so create blank webmap to add it to. This one has dark gray canvas darkGray = my_gis.content.search('title:dark', outside_org=True, item_type='web map')[0] my_webmap = WebMap(darkGray) # add the feature layers to the dark gray canvas map my_webmap.add_layer(counties) my_webmap.add_layer(states) my_webmap.add_layer(superfund_points) my_webmap.add_layer(toxic_releases) # give webmap a name (can be changed to user-input prompt) webmap_name = 'my webmap' # search for existing webmap, delete if necessary existing_webmap = my_gis.content.search(webmap_name, item_type="Web Map") if len(existing_webmap) > 0: answer = input(
password = "******") #Specify the desired existing web map using the query and item type# webmap_search = gis.content.search(query = "Knoxville_QC_Map_Live", item_type = "Web Map") #There should only be one result, but you still must specify its index, which is [0]# webmap_result = webmap_search[0] #Set the Title of the web map# name = webmap_result.title print ("The title of the map is " + name) print("\n") #Place the search result into the WebMap class# webmap = WebMap(webmap_result) #Collect and display a list of the layers within the web map# layersList = webmap.layers print ("Layers: \n") for item in layersList: print (item.title) print("\n") #Establish the required Item Properties# itemProperties = {'title':'Knoxville_QC_Map_Live_BACKUP_' + today, 'snippet':'Backup Knoxville_QC_Map_Live map created using Python API', 'tags':['automation', 'Backup']}
def main(): # logs into active portal in ArcGIS Pro gis = GIS('pro') arcpy.AddMessage("Logged into {} as {}".format( arcpy.GetActivePortalURL(), gis.properties['user']['username'])) # creates list of items of all map image, feature, vector tile and image services (up to 10000 of each) in active portal services = ( gis.content.search(query="", item_type="Map Service", max_items=10000) + gis.content.search( query="", item_type="Feature Service", max_items=10000) + gis.content.search( query="", item_type="Vector Tile Service", max_items=10000) + gis.content.search( query="", item_type="Image Service", max_items=10000)) arcpy.AddMessage('Searching webmaps in {}'.format( arcpy.GetActivePortalURL())) # creates list of items of all webmaps in active portal web_maps = gis.content.search(query="", item_type="Web Map", max_items=10000) # loops through list of webmap items for item in web_maps: # creates a WebMap object from input webmap item web_map = WebMap(item) # accesses basemap layer(s) in WebMap object basemaps = web_map.basemap['baseMapLayers'] # accesses layers in WebMap object layers = web_map.layers # loops through basemap layers for bm in basemaps: # tests whether the bm layer has a styleUrl(VTS) or url (everything else) if 'styleUrl' in bm.keys(): for service in services: if service.url in bm['styleUrl']: services.remove(service) elif 'url' in bm.keys(): for service in services: if service.url in bm['url']: services.remove(service) # loops through layers for layer in layers: # tests whether the layer has a styleUrl(VTS) or url (everything else) if hasattr(layer, 'styleUrl'): for service in services: if service.url in layer.styleUrl: services.remove(service) elif hasattr(layer, 'url'): for service in services: if service.url in layer.url: services.remove(service) arcpy.AddMessage( 'The following services are not used in any webmaps in {}'.format( arcpy.GetActivePortalURL())) # as we have removed all services being used in active portal, print list of remaining unused services for service in services: arcpy.AddMessage("{} | {}".format( service.title, arcpy.GetActivePortalURL() + r'home/item.html?id=' + service.id)) arcpy.AddMessage( "There are a total of {} unused services in your portal".format( str(len(services))))
def initialize_dro( dro_id, gis, dro_template_id='2df64ef2bc874bdb8393255375feb894', sit_template_id='a1dbcdad380840249d26ccc520d1c441', ops_template_id='e9c20858fdb342c9a6b0e514e7c9f9f7', dir_template_id='9e36639d9da741138b475e05b2f79f14' ): """Initializes mapping items for a disaster relief operation. Arguments: dro_id Disaster relief operation identifier. gis RCViewGIS object. dro_template_id Item ID of DRO feature file geodatabase template. sit_template_id Item ID of situational awareness web map template. ops_template_id Item ID of operations dashboard template. dir_template_id Item ID of director's brief story map template. """ # create DRO folder spinner = _RCSpinner('Creating folder') spinner.start() folders = gis.users.me.folders if not dro_id in [f['title'] for f in folders]: dro_folder = gis.content.create_folder(dro_id) if not dro_folder: spinner.fail('Failed to create DRO folder. Intialization aborted.') return else: dro_folder = [f for f in folders if f['title'] == dro_id][0] # copy DRO features template spinner.text = 'Copying features template' dro_id_under = re.sub('\W+', '_', dro_id) dro_template_item = gis.content.get(dro_template_id) dro_fgdb = dro_template_item.copy(title=dro_id_under + '_Features') move_result = dro_fgdb.move(dro_folder) if not move_result['success']: spinner.fail('Failed to move features template to DRO folder. Intialization aborted.') return # publish DRO feature service spinner.text = 'Publishing feature service' dro_features = dro_fgdb.publish() if not dro_features: spinner.fail('Failed to publish DRO feature service. Initialization aborted.') return # create situational awareness map spinner.text = 'Creating situational awareness map' sit_template_item = gis.content.get(sit_template_id) sit_map_item = sit_template_item.copy(title=dro_id + ' Situational Awareness Map') if not sit_map_item: spinner.fail('Failed to copy situational awareness map. Initialization aborted.') return move_result = sit_map_item.move(dro_folder) if not move_result['success']: spinner.fail('Failed to move situational awareness map to DRO folder. Intialization aborted.') return sit_map = WebMap(sit_map_item) add_result = sit_map.add_layer(dro_features) if not add_result: spinner.fail('Failed to add features to situational awareness map. Initialization aborted.') return update_result = sit_map.update() if not update_result: spinner.fail('Failed to update situational awareness map. Initialization aborted.') return # create operations dashboard spinner.text = 'Creating operations dashboard' ops_template_item = gis.content.get(ops_template_id) ops_item = ops_template_item.copy(title=dro_id + ' Operations Dashboard') move_result = ops_item.move(dro_folder) if not move_result['success']: spinner.fail('Failed to move operations dashboard to DRO folder. Intialization aborted.') return ops_template_data = ops_template_item.get_data() ops_table = dro_features.tables[0] ops_table_id = ops_table.properties('id') for widget in ops_template_data['widgets']: dataSource = widget['datasets'][0]['dataSource'] dataSource['itemId'] = dro_features.itemid dataSource['name'] = 'operations ({})'.format(dro_features.title) dataSource['layerId'] = ops_table_id update_result = ops_item.update(data=json.dumps(ops_template_data)) if not update_result: spinner.fail('Failed to update operations dashboard. Intialization aborted.') return # create director's brief dir_template_item = gis.content.get(dir_template_id) dir_item = dir_template_item.copy(title=dro_id + " Director's Brief") move_result = dir_item.move(dro_folder) if not move_result['success']: spinner.fail("Failed to move director's brief to DRO folder. Intialization aborted.") return dir_template_data = dir_template_item.get_data() dir_template_data['values']['title'] = dro_id + " Relief Operation Director's Brief" dir_template_data['values']['story']['entries'][0]['media']['webmap']['id'] = sit_map_item.id dir_template_data['values']['story']['entries'][1]['media']['webpage']['hash'] = '/' + ops_item.id dir_template_data['values']['story']['entries'][1]['media']['webpage']['url'] = 'https://maps.rcview.redcross.org/portal/apps/opsdashboard/index.html#/' + ops_item.id update_result = dir_item.update( item_properties={'url': 'https://maps.rcview.redcross.org/portal/apps/MapSeries/index.html?appid=' + dir_item.id}, data=json.dumps(dir_template_data) ) if not update_result: spinner.fail("Failed to update director's brief. Intialization aborted.") return spinner.succeed('Finished initializing DRO.')
def run(self): """Run method that performs all the real work""" # Create the dialog with elements (after translation) and keep reference # Only create GUI ONCE in callback, so that it will only load when the plugin is started if self.first_start == True: self.first_start = False self.dlg = GenerateWebMapWithWmtsDialog() # show the dialog self.dlg.show() # Run the dialog event loop result = self.dlg.exec_() # See if OK was pressed if result: # Do something useful here - delete the line containing pass and # substitute with your code. self.dlg.leUser project = QgsProject.instance() current_utc = datetime.datetime.utcnow().strftime( "%Y-%m-%d %H:%M:%S") # set web map title wmtitle = self.dlg.webmapname.text() snippet = self.dlg.snippet.text() #connect to a portal print('Connecting....') gis = GIS(username=self.dlg.leUser.text(), password=self.dlg.lePass.text(), verify_cert=False) user = gis.users.me.username # empty array for operationalyers to updated in the web map operationallayers = [] existingwm = gis.content.search(query=wmtitle, item_type='Web Map') def keepnonWMTSlayers(existingwebmap): opl = existingwebmap.layers for o in opl: try: print(f'Keeping {o.url} as Operational layers') operationallayers.append(dict(o)) except: print('Existing Planet layers will be updated') # createOpLayer creates the json dictionary for each planet layer that is the saved .aprx first map if the layers source # are not Planet tiled sourced WMTS layers they will passed over def createOpLayer(): global extentlower, extentmax layers = QgsProject.instance().mapLayers().values() for l in layers: split = l.source().split('url') templateurl = split[1][1:] if templateurl.startswith('https://tiles'): print( f'''Planet WMTS {templateurl} will be added to the Web Map as Operational Layer''' ) # token = gis._con.token # s = requests.Session() halfurl = templateurl.split('layers/') splitupurl = halfurl[1].split('/') itemid = splitupurl[0] apiurl = templateurl.split('api_key=') api_key = apiurl[1] wmtsurl = f'https://tiles.planet.com/data/v1/layers/wmts/{itemid}?api_key={api_key}' # proxies = { # # 'http': 'http://10.10.1.10:3128', # 'https': 'localhost:8899', # } response = requests.get(wmtsurl) xmlparsed = xmltodict.parse(response.text) # templateUrl = xmlparsed['Capabilities']['Contents']['Layer']['ResourceURL']['@template'] extentlower = xmlparsed['Capabilities']['Contents'][ 'Layer']['ows:WGS84BoundingBox']['ows:LowerCorner'] extentmax = xmlparsed['Capabilities']['Contents'][ 'Layer']['ows:WGS84BoundingBox']['ows:UpperCorner'] xyminmax = xmlparsed['Capabilities']['Contents'][ 'TileMatrixSet']['TileMatrix'][0]['TopLeftCorner'] extent = xyminmax.split(' ')[1] oplayerjson = { "templateUrl": f'{templateurl}{api_key}', "copyright": "Planet", "fullExtent": { "xmin": -20037508.342787, "ymin": -20037508.342787, "xmax": 20037508.342787, "ymax": 20037508.342787, "spatialReference": { "wkid": 102100 } }, "id": "WebTiled_54", "title": f"{l.name()}", "type": "WebTiledLayer", "layerType": "WebTiledLayer", "tileInfo": {}, "wmtsInfo": { "url": f'''{wmtsurl}''', "layerIdentifier": "Combined scene layer", "tileMatrixSet": "GoogleMapsCompatible23" }, "visibility": True, "opacity": 1, "minScale": 700000, "maxScale": 1500 } operationallayers.append(oplayerjson) # else: # print(f'Cant Get {glayer} data source url /n Make sure you only have 1 group level /non top of the actual wmts') # else: # print('Layer is not a planet WMTS or a higher level group layer with no data source') #createOpLayer function createOpLayer() # set the data parameter for the item properties dictionary of the new web map item data = { "operationalLayers": operationallayers, "baseMap": { "baseMapLayers": [{ "id": "defaultBasemap", "layerType": "ArcGISTiledMapServiceLayer", "url": "https://services.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer", "visibility": True, "opacity": 1, "title": "Topographic" }], "title": "Topographic" }, "spatialReference": { "wkid": 102100, "latestWkid": 3857 }, "authoringApp": "WebMapViewer", "authoringAppVersion": "4.7", "version": "2.8" } # Fill this out as you see fit, the key part is the "text" attribute item_properties_dict = { "type": "Web Map", "title": wmtitle, "description": f"Updated: {current_utc}", "tags": ["WebTiledLaye", "pythonapi"], "snippet": snippet, "text": data } # format the extent string so it can be used to set the initial extent of the web map lowerext = [str(extentlower).replace(' ', ', ')] upperext = [str(extentmax).replace(' ', ', ')] wmextent = [lowerext, upperext] cleanextent = str(wmextent).replace("'", "") existingwm = gis.content.search(wmtitle) if existingwm: webmap = WebMap(existingwm[0]) keepnonWMTSlayers(webmap) print(len(operationallayers)) # print(f # Update the extent item = gis.content.get(existingwm[0].id) print( f'''Update Operational Layers: {item.update(item_properties=item_properties_dict)}''' ) print( f'''Set Web Map Extent to WMTS Layer: {item.update(item_properties={'extent':cleanextent})}''' ) else: newmap = gis.content.add(item_properties=item_properties_dict) print(f'created new web map: {newmap}') wm = WebMap(newmap) # featurecollection = gis.content.get('') # permits = featurecollection.layers[10] # print(f'''Added OSMRE Permit Boundary Layer: {wm.add_layer(permits)}''') wm.update() # Update the extent item = gis.content.get(newmap.id) print( f'''Set Web Map Extent to WMTS Layer: {item.update(item_properties={'extent':cleanextent})}''' )
def _get_webmap_layer_items(self, webmap_item): output = [] for layer in WebMap(webmap_item).layers: if("itemId" in layer): output.append(self._gis.content.get(layer["itemId"])) return output
umfrom arcgis.gis import GIS from IPython.display import display gis = GIS("https://python.playground.esri.com/portal", "arcgis_python", "amazing_arcgis_123") wm_item = search_result[1] from arcgis.mapping import WebMap web_map_obj = WebMap(wm_item)
def main(args): # initialize logging formatter = logging.Formatter( "[%(asctime)s] [%(filename)30s:%(lineno)4s - %(funcName)30s()]\ [%(threadName)5s] [%(name)10.10s] [%(levelname)8s] %(message)s" ) # Grab the root logger logger = logging.getLogger() # Set the root logger logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL) logger.setLevel(logging.DEBUG) # Create a handler to print to the console sh = logging.StreamHandler(sys.stdout) sh.setFormatter(formatter) sh.setLevel(logging.INFO) # Create a handler to log to the specified file if args.log_file: rh = logging.handlers.RotatingFileHandler(args.log_file, mode='a', maxBytes=10485760) rh.setFormatter(formatter) rh.setLevel(logging.DEBUG) logger.addHandler(rh) # Add the handlers to the root logger logger.addHandler(sh) # Create the GIS logger.info("Authenticating...") gis = GIS(args.org, args.username, args.password, verify_cert=not args.skip_ssl_verification) if gis.properties["isPortal"]: raise RuntimeError("This script only works with ArcGIS Online") logger.info("Getting Workforce Project...") item = gis.content.get(args.project_id) if item is None: raise RuntimeError("Invalid Project Id") project = Project(gis.content.get(args.project_id)) try: if not project._is_v2_project: raise Exception( "The project provided is not a v2 project. You can only use v2 (offline-enabled) projects with this script" ) except AttributeError: raise Exception( "Cannot find the attribute is v2 project. Are you sure you have the API version 1.8.3 or greater installed? " "Check with `arcgis.__version__` in your Python console") logger.info("Phase 1: Joining assignments to assignment types...") d = int(datetime.datetime.now().timestamp()) assignments_to_types = create_joined_view( gis, project.assignments_layer, project.assignment_types_table, project._assignment_schema.assignment_type, project._assignment_types.global_id, f"{project.title} Intermediate View 0 {d}", assignment_fields, assignment_type_fields) logger.info("Phase 2: Joining assignments to workers...") assignments_to_workers = create_joined_view( gis, project.assignments_layer, project.workers_layer, project._assignment_schema.worker_id, project._worker_schema.global_id, f"{project.title} Intermediate View 1 {d}", assignment_fields, worker_fields, ) logger.info("Phase 3: Joining assignments to dispatchers...") assignments_to_dispatchers = create_joined_view( gis, project.assignments_layer, project.dispatchers_layer, project._assignment_schema.dispatcher_id, project._dispatcher_schema.global_id, f"{project.title} Intermediate View 2 {d}", assignment_fields, dispatcher_fields) change_source_field_name_to_joined_field_name(assignment_type_fields) change_source_field_name_to_joined_field_name(worker_fields) change_source_field_name_to_joined_field_name(dispatcher_fields) logger.info( "Phase 4: Joining assignments and assignment types to assignments and workers..." ) assignments_types_workers = create_joined_view( gis, assignments_to_types.layers[0], assignments_to_workers.layers[0], project._assignment_schema.global_id, project._assignment_schema.global_id, f"{project.title} Intermediate View 3 {d}", assignment_type_fields + assignment_fields, worker_fields) logger.info( "Phase 5: Joining assignments and types and workers to assignments and workers..." ) if args.name: name = args.name else: name = f"{project.title} Joined View {d}" final_item = create_joined_view( gis, assignments_types_workers.layers[0], assignments_to_dispatchers.layers[0], project._assignment_schema.global_id, project._assignment_schema.global_id, name, assignment_fields + assignment_type_fields + worker_fields, dispatcher_fields) logger.info(f"Final Item: {final_item.title}") if args.create_dashboard: logger.info("Creating dashboard") # create new webmap map_item = project.dispatcher_webmap.save( item_properties={ "title": project.title + " Dashboard Map", "tags": [], "snippet": "Dashboard Map" }) new_webmap = WebMap(map_item) # swizzle in joined layer instead of assignments layer for i, layer in enumerate(new_webmap.layers): if layer["id"] == "Assignments_0": new_webmap.remove_layer(layer) new_webmap.add_layer(final_item) new_webmap.layers[i]["id"] = "Assignments_0" break new_webmap.update() # clone dashboard with your data instead of our data item = gis.content.get("af7cd356c21a4ded87d8cdd452fd8be3") item_mapping = { '377b2b2014f24b0ab9b053d9b2fed113': final_item.id, 'e1904f5c56484163a021155f447adf34': project.workers_item.id, 'bb7d2b495ecc4ea7810b28f16ef71cba': new_webmap.item.id } cloned_items = gis.content.clone_items([item], item_mapping=item_mapping, search_existing_items=False) if len(cloned_items) == 0: raise ValueError("Creating dashboard failed") # Save new name and share to group logger.info("Updating title and sharing to project group") new_title = project.title + " Dashboard" cloned_items[0].update(item_properties={"title": new_title}) cloned_items[0].share(groups=[project.group]) logger.info("Dashboard creation completed") logger.info("Script completed")
def create_map_add_views(views_dict, web_map_title='web map title', web_map_snippet='web map snippet', web_map_tags='web map tags'): # create a map and add the views to it and then set the visibility of all them to false # create empty map web_map = WebMap() # add views to the map for key, value in views_dict.items(): print(key) map_renderer = { "renderer": "autocast", #This tells python to use JS autocasting "type": "classBreaks", "field": key, "minValue": 1 } map_renderer["visualVariables"] = [{ "type": "sizeInfo", "expression": "view.scale", "field": key, "stops": [{ "size": 1.5, "value": 50921 }, { "size": 0.75, "value": 159129 }, { "size": 0.375, "value": 636517 }, { "size": 0, "value": 1273034 }] }] map_renderer["classBreakInfos"] = [{ "symbol": { "color": [90, 106, 56, 255], "outline": { "color": [194, 194, 194, 64], "width": 0.75, "type": "esriSLS", "style": "esriSLSSolid" }, "type": "esriSFS", "style": "esriSFSSolid" }, "label": "1", "classMaxValue": 1 }, { "symbol": { "color": [117, 144, 67, 255], "outline": { "color": [194, 194, 194, 64], "width": 0.75, "type": "esriSLS", "style": "esriSLSSolid" }, "type": "esriSFS", "style": "esriSFSSolid" }, "label": "2", "classMaxValue": 2 }, { "symbol": { "color": [143, 178, 77, 255], "outline": { "color": [194, 194, 194, 64], "width": 0.75, "type": "esriSLS", "style": "esriSLSSolid" }, "type": "esriSFS", "style": "esriSFSSolid" }, "label": "3", "classMaxValue": 3 }, { "symbol": { "color": [200, 223, 158, 255], "outline": { "color": [194, 194, 194, 64], "width": 0.75, "type": "esriSLS", "style": "esriSLSSolid" }, "type": "esriSFS", "style": "esriSFSSolid" }, "label": "4", "classMaxValue": 4 }] web_map.add_layer( gis.content.get(views_dict[key]), { "type": "FeatureLayer", "renderer": map_renderer, "field_name": key, "minValue": 1 }) # save the map web_map_properties = { 'title': web_map_title, 'snippet': web_map_snippet, 'tags': web_map_tags } web_map_item = web_map.save(item_properties=web_map_properties) # get json data of the web map map_search = gis.content.search(web_map_title) map_item = map_search[0] map_json = map_item.get_data() # set visibility to false for layer in map_json['operationalLayers']: layer['visibility'] = False # update the json file of the web map item_properties = {"text": json.dumps(map_json)} item = gis.content.get(map_item.id) item.update(item_properties=item_properties) return web_map_item
#q = "owner:bwilson@CLATSOP" q = '*' list_of_maps = gis.content.search(q, item_type='web map', outside_org=False, max_items=5000) print("Maps found %d" % len(list_of_maps)) # Build a dictionary with each layer as the index # and a list of the maps that the layer participates in layer_dict = {} for item in list_of_maps: # Look up the layers. wm = WebMap(item) mapId = wm.item.id for l in wm.layers: try: layerId = l.itemId # print(itemId, l.layerType, l.title) if layerId not in layer_dict: layer_dict[layerId] = [] layer_dict[layerId].append(mapId) pass except Exception as e: #print(e) # print('??', l.id, l.layerType, l.title) layerId = l.id if layerId not in layer_dict: layer_dict[layerId] = []
config = configparser.ConfigParser() config.read('config.ini') gisUrl = config["ENVIRONMENT"]["portalurl"] config.read('credentials.ini') username, password = config["CREDENTIALS"]['portalusername'], config["CREDENTIALS"]['portalpassword'] objGIS = GIS(gisUrl, username, password) token = objGIS._con.token services = [] items=objGIS.content.search(query='*', item_type="Web Map", max_items=500) for item in items: if hasattr(item, "contentStatus"): print(f'Collecting service and layer info from {item.title} \n') wm_item = objGIS.content.get(item.id) wm = WebMap(wm_item) try: for layer in wm.layers: url = layer.url # print(url) parsed = urllib.parse.urlsplit(url) servicename= ((parsed.path.split('/')[-4]+'/'+parsed.path.split('/')[-3] +'/'+parsed.path.split('/')[-2]) + f"/{layer.title}") print(servicename +'\n') appInfo = (wm_item.title, servicename) #empty quotes to skip the OBJECTID field services.append(appInfo) except AttributeError: url = layer.templateUrl print(url) parsed = urllib.parse.urlsplit(url) servicename= ((parsed.path.split('/')[-8]+'/'+parsed.path.split('/')[-7]) + f"/{layer.title}")