def create_new_webmap(project_name, layer_names, *args): """ creates a web map, adds feature layers to web map, and defines properties for layers and the web map Args: project_name (str): name of project layer_names (list): list of layer names to be added to web map Raises: TypeError: if project name is not type of string TypeError: if layer names is not type of list TypeError: if layer name is not type of string """ if not isinstance(project_name, str): raise TypeError('expected project name to be type of str') if not isinstance(layer_names, list): raise TypeError('expected layer names to be type of list') for layer_name in layer_names: if not isinstance(layer_name, str): raise TypeError('expected layer name to be type of str') # get feature layers collection and update its properties feature_layers = get_feature_layers_collection(project_name) feature_layers_properties = get_properties_from_project( project_name=project_name, content_type='Feature Layer', project_additional_info=list(args)) feature_layers.update(item_properties=feature_layers_properties) protect_share_item(feature_layers) # create a new web map new_web_map = WebMap() print('creating a new web map') # add feature layers to the web map for feature_layer in feature_layers.layers: new_web_map.add_layer(layer=feature_layer) print('adding', feature_layers.title, 'to web map') # define properties for the web map web_map_properties = get_properties_from_project( project_name=project_name, content_type='WebMap', project_additional_info=list(args)) # create popups for map layers create_popups(web_map=new_web_map, project_name=project_name, layer_names=layer_names) # save the web map new_web_map.save(item_properties=web_map_properties) print('saving web map in portal')
new_items = target_gis.content.clone_items(source_items, folder='Forest Fire', copy_data=False, search_existing_items=False) print("Cloning items complete. Renaming items") # Customize items for current emergency target_group = target_gis.groups.search('Properties at risk')[0] for new_item in new_items: if new_item.type == 'Web Map': new_item.update({'title': 'Thomas fire responder map', 'snippet': 'Web map for assessing damage and saving properties at risk'}) new_item.share(groups=[target_group]) print('Renamed web map -> Thomas fire responder map') # add informative web layers to the web map wm = WebMap(new_item) wm.add_layer(modis_hotspots) wm.add_layer(parcels_at_risk_item) # empty web layer at first print(' Added informative web layers') elif new_item.type == 'Operation View': new_item.update({'title': 'Thomas fire response dashboard', 'snippet': 'Operations dashboard for assessing damage and saving properties at risk'}) new_item.share(groups=[target_group]) print('Renamed dashboard -> Thomas fire response dashboard') elif new_item.type == 'Feature Service': new_item.update({'title': 'Thomas fire assessment layer', 'snippet': 'Feature layer for collecting assessment report'}) new_item.share(groups=[target_group]) print('Renamed web layer -> Thomas fire assessment layer')
def main(args): # initialize logging formatter = logging.Formatter( "[%(asctime)s] [%(filename)30s:%(lineno)4s - %(funcName)30s()]\ [%(threadName)5s] [%(name)10.10s] [%(levelname)8s] %(message)s" ) # Grab the root logger logger = logging.getLogger() # Set the root logger logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL) logger.setLevel(logging.DEBUG) # Create a handler to print to the console sh = logging.StreamHandler(sys.stdout) sh.setFormatter(formatter) sh.setLevel(logging.INFO) # Create a handler to log to the specified file if args.log_file: rh = logging.handlers.RotatingFileHandler(args.log_file, mode='a', maxBytes=10485760) rh.setFormatter(formatter) rh.setLevel(logging.DEBUG) logger.addHandler(rh) # Add the handlers to the root logger logger.addHandler(sh) # Create the GIS logger.info("Authenticating...") gis = GIS(args.org, args.username, args.password, verify_cert=not args.skip_ssl_verification) if gis.properties["isPortal"]: raise RuntimeError("This script only works with ArcGIS Online") logger.info("Getting Workforce Project...") item = gis.content.get(args.project_id) if item is None: raise RuntimeError("Invalid Project Id") project = Project(gis.content.get(args.project_id)) try: if not project._is_v2_project: raise Exception( "The project provided is not a v2 project. You can only use v2 (offline-enabled) projects with this script" ) except AttributeError: raise Exception( "Cannot find the attribute is v2 project. Are you sure you have the API version 1.8.3 or greater installed? " "Check with `arcgis.__version__` in your Python console") logger.info("Phase 1: Joining assignments to assignment types...") d = int(datetime.datetime.now().timestamp()) assignments_to_types = create_joined_view( gis, project.assignments_layer, project.assignment_types_table, project._assignment_schema.assignment_type, project._assignment_types.global_id, f"{project.title} Intermediate View 0 {d}", assignment_fields, assignment_type_fields) logger.info("Phase 2: Joining assignments to workers...") assignments_to_workers = create_joined_view( gis, project.assignments_layer, project.workers_layer, project._assignment_schema.worker_id, project._worker_schema.global_id, f"{project.title} Intermediate View 1 {d}", assignment_fields, worker_fields, ) logger.info("Phase 3: Joining assignments to dispatchers...") assignments_to_dispatchers = create_joined_view( gis, project.assignments_layer, project.dispatchers_layer, project._assignment_schema.dispatcher_id, project._dispatcher_schema.global_id, f"{project.title} Intermediate View 2 {d}", assignment_fields, dispatcher_fields) change_source_field_name_to_joined_field_name(assignment_type_fields) change_source_field_name_to_joined_field_name(worker_fields) change_source_field_name_to_joined_field_name(dispatcher_fields) logger.info( "Phase 4: Joining assignments and assignment types to assignments and workers..." ) assignments_types_workers = create_joined_view( gis, assignments_to_types.layers[0], assignments_to_workers.layers[0], project._assignment_schema.global_id, project._assignment_schema.global_id, f"{project.title} Intermediate View 3 {d}", assignment_type_fields + assignment_fields, worker_fields) logger.info( "Phase 5: Joining assignments and types and workers to assignments and workers..." ) if args.name: name = args.name else: name = f"{project.title} Joined View {d}" final_item = create_joined_view( gis, assignments_types_workers.layers[0], assignments_to_dispatchers.layers[0], project._assignment_schema.global_id, project._assignment_schema.global_id, name, assignment_fields + assignment_type_fields + worker_fields, dispatcher_fields) logger.info(f"Final Item: {final_item.title}") if args.create_dashboard: logger.info("Creating dashboard") # create new webmap map_item = project.dispatcher_webmap.save( item_properties={ "title": project.title + " Dashboard Map", "tags": [], "snippet": "Dashboard Map" }) new_webmap = WebMap(map_item) # swizzle in joined layer instead of assignments layer for i, layer in enumerate(new_webmap.layers): if layer["id"] == "Assignments_0": new_webmap.remove_layer(layer) new_webmap.add_layer(final_item) new_webmap.layers[i]["id"] = "Assignments_0" break new_webmap.update() # clone dashboard with your data instead of our data item = gis.content.get("af7cd356c21a4ded87d8cdd452fd8be3") item_mapping = { '377b2b2014f24b0ab9b053d9b2fed113': final_item.id, 'e1904f5c56484163a021155f447adf34': project.workers_item.id, 'bb7d2b495ecc4ea7810b28f16ef71cba': new_webmap.item.id } cloned_items = gis.content.clone_items([item], item_mapping=item_mapping, search_existing_items=False) if len(cloned_items) == 0: raise ValueError("Creating dashboard failed") # Save new name and share to group logger.info("Updating title and sharing to project group") new_title = project.title + " Dashboard" cloned_items[0].update(item_properties={"title": new_title}) cloned_items[0].share(groups=[project.group]) logger.info("Dashboard creation completed") logger.info("Script completed")
def processMap(processOrg, fldr, mapTitle, mpNum, mpLayerTags, termsOfUse): #Flatten and get the unique Tags mpTags = reduce(operator.concat, mpLayerTags) uniqueMpTags = set(mpTags) # Accesses the map metadata to understand the layers that are used in the map # Creates a list of the layers found styleFrontUrl = "http://worldmap.harvard.edu/geoserver/styles/" folderProcess = fldr #"HarvardForestDataMap" mTitle = mapTitle #'Harvard Forest Data Map' mapNum = mpNum #17305 procSpace = r'C:\projects\HarvardMap\Data\{}'.format(folderProcess) # Dictionary lookup for sld styles sldGeomLookup = {} sldGeomLookup['MultiPolygon'] = "Polygon" sldGeomLookup['MultiLineString'] = "Polyline" sldGeomLookup['Point'] = "Point" # log into the org #gis = GIS(profile='harvardworldmap') gis = processOrg #(profile="myorg") # Access the map inventory Excel file and load into a dataframe for later access #mapDF = pd.read_csv(r"C:\projects\HarvardMap\Data\maps_tab\maps_tab_excelExport.csv",sep=',') #seaMP = mapDF[mapDF.id == '{}'.format(mapNum)] # read in the Published map data layers from conversion script # Add to a dataframe for later access # Connect to the log table to determine tbIT = gis.content.get('5ca2378ea96e48e7b5739b6121557c3e') # c2169b562b4f407e8acf2e2b73b11a4a # 5ca2378ea96e48e7b5739b6121557c3e tbl = tbIT.tables[0] tbDF = tbl.query(where="mapname='{}'".format(mapTitle), as_df=True) lyrlgPubPath = '{}\{}PubLyrLog.txt'.format(procSpace, mTitle) #pubLyrDataFrame = pd.read_csv(lyrlgPubPath,index_col='indxCol') browser = webdriver.Chrome( executable_path=r"C:\projects\chromedriver_win32\chromedriver.exe") #url = "https://worldmap.harvard.edu/maps/17305/info/" url = "https://worldmap.harvard.edu/maps/{}".format(mapNum) # url = "http://trainingapps.esri.com/osiris/TrainingEvents/Search?PageSize=20&OrderBy=StartDate&Direction=asc&InstructorName={0}+{1}&StartDate=09%2F12%2F2016&EndDate=12%2F31%2F2016".format(firstName,lastName) browser.implicitly_wait(60) a = browser.get(url) # FireFox # browser = webdriver.Firefox(executable_path=r"C:\Software\geckodriver-v0.20.0-win64\geckodriver.exe") # browser.get(url) # opnFile = open(r"C:\Dev\Python\OsirisTools\pythclass.html",'r') # pgSource = BeautifulSoup(opnFile,'html.parser') pgSource = BeautifulSoup(browser.page_source, 'html.parser') # Create an empty webmap empty_webmap = WebMap() s = pgSource.head.find_all('script') jj = s[18] regex = re.compile('\n^(.*?){(.*?)$|,', re.MULTILINE) js_text = re.findall( regex, jj.string) #Change jj.text to jj.string based on error mapJsonpre = js_text[76][1] mapJson = '{0}{1}'.format('{', mapJsonpre[:-2]) mpJ = json.loads(mapJson) # Map Description from the JSON if mpJ['about']['introtext']: mapDesc = mpJ['about']['introtext'] else: mapDesc = mTitle # Map scale #if mpJ['map']['maxResolution']: #mapScale = ['map']['maxResolution'] grpDict = {} # Pull the group names into a list to reverse their order to preserve layer order from the map grpNames = [grp['group'] for grp in mpJ['map']['groups']] grpNames.reverse() for grp in grpNames: #mpJ['map']['groups']: grpName = grp #['group'] print('GroupName :: {} '.format(grpName)) grpLyrs = [] for lyr in mpJ['map']['layers']: if lyr['group'] == grpName: print(lyr['title']) #if lyr['visibility']: #if lyr['visibility']: #visibile layer start if lyr['title'] == 'Population Density (2010)km2': print('need to stop') # # use the name to link over to the item ID # if not 'geonode:' in lyr['name']: # llkUP = lyr['detail_url'].split("/")[-1] # print(llkUP) # else: # llkUP = lyr['name'] # isolate and test if the current layer is published vectordata #publyrLookupID = tbDF.query("geonodeSRC == '{}'".format(llkUP)) # search for the published Layer by snippet addLayer = gis.content.search(query='snippet:"{}"'.format( lyr['name']), item_type="Feature Layer") if len(addLayer) == 1: # Check for styles in layer if 'styles' in lyr and lyr['styles'] != '': lyrStylesUrl = styleFrontUrl sldFileURLTest = "{}{}".format(styleFrontUrl, lyr['styles']) if ".sld" in sldFileURLTest: sldFileURL = sldFileURLTest else: sldFileURL = "{}.sld".format(sldFileURLTest) else: #Check the layer info page lyrInfo = urllib.request.urlopen( 'http://worldmap.harvard.edu/data/{}'.format( lyr['name'])) lyrInfoRD = lyrInfo.read() lyrInfoRDDC = lyrInfoRD.decode('utf-8') lyrInfoSoup = BeautifulSoup(lyrInfoRDDC) spnTyle = lyrInfoSoup.find( 'span', attrs={'class': 'styles-list'}) if spnTyle: styles = spnTyle.find_all('a') if len(styles) > 0: sldFileURL = styles[0].attrs['href'] # Need to write in how to distinguish renderer type #simple_renderer = {"renderer": "autocast", "type": "simple"} # Check the style Geometry #test the url for a valid connectiono try: stylOpen = urllib.request.urlopen(sldFileURL).read() stylOpenDecode = stylOpen.decode('utf-8') if 'PolygonSymbolizer' in stylOpenDecode: sldGeom = 'Polygon' elif 'LineSymbolizer' in stylOpenDecode: sldGeom = 'Polyline' elif 'PointSymbolizer' in stylOpenDecode: sldGeom = 'Point' # Find the geometry type and pass it to the right render Geometry #sldGeom = sldGeomLookup[publyrLookupID.geometryType.values[0]] renren = "" if sldGeom == 'Point': pntRenderer = symbolsPoints.processPointSymbol( sldFileURL) renren = pntRenderer if sldGeom == 'Polygon': polyRenderer = symbolsPolygon.processPolygonSymbol( sldFileURL) if polyRenderer[1] == 'classbreaks': sdf = pd.DataFrame.spatial.from_layer( addLayer[0].layers[0]) if polyRenderer[0]['field'][0] == '_': sdfRenFLD = 'F{}'.format( polyRenderer[0]['field']) else: sdfRenFLD = polyRenderer[0]['field'] genRen = generate_renderer('Polygon', sdf, render_type='c', field=sdfRenFLD) brks = polyRenderer[0]['classBreakInfos'] genRen['classBreakInfos'] = brks polyRenderer.pop(0) polyRenderer.insert(0, genRen) renren = polyRenderer if sldGeom == 'Polyline': lnRenderer = symbolsLines.processLineSymbol( sldFileURL) if lnRenderer[1] == 'classbreaks': sdf = pd.DataFrame.spatial.from_layer( addLayer[0].layers[0]) if lnRenderer[0]['field'][0] == '_': sdfRenFLD = 'F{}'.format( lnRenderer[0]['field']) else: sdfRenFLD = lnRenderer[0]['field'] genRen = generate_renderer('Polyline', sdf, render_type='c', field=sdfRenFLD) brks = lnRenderer[0]['classBreakInfos'] genRen['classBreakInfos'] = brks lnRenderer.pop(0) lnRenderer.insert(0, genRen) renren = lnRenderer #renren[0]["renderer"] = "autocast" # Get the item from the org #addLayer = gis.content.search(query='snippet:"{}"') #addLayer = gis.content.get(publyrLookupID.itemID.values[0]) if renren != "": empty_webmap.add_layer( addLayer[0].layers[0], { "type": "FeatureLayer", "renderer": renren[0], 'visibility': lyr['visibility'] }) else: empty_webmap.add_layer( addLayer[0].layers[0], { "type": "FeatureLayer", 'visibility': lyr['visibility'] }) except: empty_webmap.add_layer( addLayer[0].layers[0], { "type": "FeatureLayer", 'visibility': lyr['visibility'] }) pass # Add an item update to remove the tracking snippet # addLayer.update(item_properties={'snippet': "{}".format(lyr['title'])}) grpLyrs.append(lyr) else: print('Current Layer {} does not have a featurelayer') grpLyrs.append(lyr) # # use the name to link over to the item ID # if not 'geonode:' in lyr['name']: # llkUP = lyr['detail_url'].split("/")[-1] # print(llkUP) # else: # llkUP = lyr['name'] # # # publyrLookupID = pubLyrDataFrame.query("geonodeSRC == '{}'".format(llkUP)) grpDict[grpName] = grpLyrs webmapprops = { 'title': '{}'.format(mTitle), 'description': mapDesc, 'licenseInfo': termsOfUse, 'snippet': ' ', 'tags': uniqueMpTags } empty_webmap.save(item_properties=webmapprops)
def initialize_dro( dro_id, gis, dro_template_id='2df64ef2bc874bdb8393255375feb894', sit_template_id='a1dbcdad380840249d26ccc520d1c441', ops_template_id='e9c20858fdb342c9a6b0e514e7c9f9f7', dir_template_id='9e36639d9da741138b475e05b2f79f14' ): """Initializes mapping items for a disaster relief operation. Arguments: dro_id Disaster relief operation identifier. gis RCViewGIS object. dro_template_id Item ID of DRO feature file geodatabase template. sit_template_id Item ID of situational awareness web map template. ops_template_id Item ID of operations dashboard template. dir_template_id Item ID of director's brief story map template. """ # create DRO folder spinner = _RCSpinner('Creating folder') spinner.start() folders = gis.users.me.folders if not dro_id in [f['title'] for f in folders]: dro_folder = gis.content.create_folder(dro_id) if not dro_folder: spinner.fail('Failed to create DRO folder. Intialization aborted.') return else: dro_folder = [f for f in folders if f['title'] == dro_id][0] # copy DRO features template spinner.text = 'Copying features template' dro_id_under = re.sub('\W+', '_', dro_id) dro_template_item = gis.content.get(dro_template_id) dro_fgdb = dro_template_item.copy(title=dro_id_under + '_Features') move_result = dro_fgdb.move(dro_folder) if not move_result['success']: spinner.fail('Failed to move features template to DRO folder. Intialization aborted.') return # publish DRO feature service spinner.text = 'Publishing feature service' dro_features = dro_fgdb.publish() if not dro_features: spinner.fail('Failed to publish DRO feature service. Initialization aborted.') return # create situational awareness map spinner.text = 'Creating situational awareness map' sit_template_item = gis.content.get(sit_template_id) sit_map_item = sit_template_item.copy(title=dro_id + ' Situational Awareness Map') if not sit_map_item: spinner.fail('Failed to copy situational awareness map. Initialization aborted.') return move_result = sit_map_item.move(dro_folder) if not move_result['success']: spinner.fail('Failed to move situational awareness map to DRO folder. Intialization aborted.') return sit_map = WebMap(sit_map_item) add_result = sit_map.add_layer(dro_features) if not add_result: spinner.fail('Failed to add features to situational awareness map. Initialization aborted.') return update_result = sit_map.update() if not update_result: spinner.fail('Failed to update situational awareness map. Initialization aborted.') return # create operations dashboard spinner.text = 'Creating operations dashboard' ops_template_item = gis.content.get(ops_template_id) ops_item = ops_template_item.copy(title=dro_id + ' Operations Dashboard') move_result = ops_item.move(dro_folder) if not move_result['success']: spinner.fail('Failed to move operations dashboard to DRO folder. Intialization aborted.') return ops_template_data = ops_template_item.get_data() ops_table = dro_features.tables[0] ops_table_id = ops_table.properties('id') for widget in ops_template_data['widgets']: dataSource = widget['datasets'][0]['dataSource'] dataSource['itemId'] = dro_features.itemid dataSource['name'] = 'operations ({})'.format(dro_features.title) dataSource['layerId'] = ops_table_id update_result = ops_item.update(data=json.dumps(ops_template_data)) if not update_result: spinner.fail('Failed to update operations dashboard. Intialization aborted.') return # create director's brief dir_template_item = gis.content.get(dir_template_id) dir_item = dir_template_item.copy(title=dro_id + " Director's Brief") move_result = dir_item.move(dro_folder) if not move_result['success']: spinner.fail("Failed to move director's brief to DRO folder. Intialization aborted.") return dir_template_data = dir_template_item.get_data() dir_template_data['values']['title'] = dro_id + " Relief Operation Director's Brief" dir_template_data['values']['story']['entries'][0]['media']['webmap']['id'] = sit_map_item.id dir_template_data['values']['story']['entries'][1]['media']['webpage']['hash'] = '/' + ops_item.id dir_template_data['values']['story']['entries'][1]['media']['webpage']['url'] = 'https://maps.rcview.redcross.org/portal/apps/opsdashboard/index.html#/' + ops_item.id update_result = dir_item.update( item_properties={'url': 'https://maps.rcview.redcross.org/portal/apps/MapSeries/index.html?appid=' + dir_item.id}, data=json.dumps(dir_template_data) ) if not update_result: spinner.fail("Failed to update director's brief. Intialization aborted.") return spinner.succeed('Finished initializing DRO.')
SoilSurvey = gis.content.get('ee6879ca62194b88aee3a18f954a70f3') LivestockLarge = gis.content.get('1649d11f18f443ecae7003afa2df99b3') IndianReserve = gis.content.get('6f1e8fc7bd24460cad0ae533eb4acfe2') Greenspace = gis.content.get('7523d1849d6e46f5b997d833e849d88b') BCParks = gis.content.get('8d1d458346bd42adbedbd9754dac0b33') NationalParks = gis.content.get('5ade04eb4d7145eab978ba0cff1e6ba8') # List of Items to Add - Order Matters (First item will be at the bottom of the table of contents) Layers_toadd = [ NationalParks, BCParks, Greenspace, IndianReserve, LivestockLarge, SoilSurvey, RangePasture, RangeTenure, CrownTenures, PMBC, AgCAP, ALR ] # Iterate Trhough and Add Items to Map for layer in Layers_toadd: wm.add_layer(layer) # Dictionary of Items : Original Title, JSON File Path, New Title (if desired), Visibility True/False LayerDict = { 'ALC ALR Polygons': [ALR_JSON_File, 'Agricultural Land Reserve', True], 'Ag_Capability': [AgCAP_JSON_File, "Agricultural Capability", False], 'ParcelMap BC Parcel Fabric': [PMBC_JSON_File, None, False], 'TANTALIS - Crown Tenures': [CrownTenureAll_File, 'Crown Tenures (All)', False], 'Range Tenure': [RangeTenure_JSON_File, None, False], 'Range Pastures': [RangePasture_JSON_File, None, False], 'Soil Survey Spatial View': [SoilSurvey_JSON_File, 'Soil Survey', False], 'Livestock at Large Regulations in British Columbia': [LivestockLarge_JSON_File, 'Livestock at Large Regulations', False], 'Indian Reserves & Band Names - Administrative Boundaries': [IndianReserves_JSON_File, 'Indian Reserves', False],
#Make a connection to my portal gis2 = GIS("https://caltrans.maps.arcgis.com", "Saffia.Hossainzadeh") # In[6]: # Create a web map of this new data: # Generators from CEC intersected by District 7's polygon from arcgis.mapping import WebMap, WebScene wm = WebMap() wm.definition # In[7]: wm.add_layer(gensD7) # In[10]: web_map_properties = { 'title': 'Generators within District 7', 'snippet': 'This map service shows the generators that are within the Caltrans district 7 jurisdiction. The original data source is the California Energy Commission', 'tags': 'webmap creation from arcgis api for python' } web_map_item = wm.save(item_properties=web_map_properties) # In[11]: web_map_item
def create_map_add_views(views_dict, web_map_title='web map title', web_map_snippet='web map snippet', web_map_tags='web map tags'): # create a map and add the views to it and then set the visibility of all them to false # create empty map web_map = WebMap() # add views to the map for key, value in views_dict.items(): print(key) map_renderer = { "renderer": "autocast", #This tells python to use JS autocasting "type": "classBreaks", "field": key, "minValue": 1 } map_renderer["visualVariables"] = [{ "type": "sizeInfo", "expression": "view.scale", "field": key, "stops": [{ "size": 1.5, "value": 50921 }, { "size": 0.75, "value": 159129 }, { "size": 0.375, "value": 636517 }, { "size": 0, "value": 1273034 }] }] map_renderer["classBreakInfos"] = [{ "symbol": { "color": [90, 106, 56, 255], "outline": { "color": [194, 194, 194, 64], "width": 0.75, "type": "esriSLS", "style": "esriSLSSolid" }, "type": "esriSFS", "style": "esriSFSSolid" }, "label": "1", "classMaxValue": 1 }, { "symbol": { "color": [117, 144, 67, 255], "outline": { "color": [194, 194, 194, 64], "width": 0.75, "type": "esriSLS", "style": "esriSLSSolid" }, "type": "esriSFS", "style": "esriSFSSolid" }, "label": "2", "classMaxValue": 2 }, { "symbol": { "color": [143, 178, 77, 255], "outline": { "color": [194, 194, 194, 64], "width": 0.75, "type": "esriSLS", "style": "esriSLSSolid" }, "type": "esriSFS", "style": "esriSFSSolid" }, "label": "3", "classMaxValue": 3 }, { "symbol": { "color": [200, 223, 158, 255], "outline": { "color": [194, 194, 194, 64], "width": 0.75, "type": "esriSLS", "style": "esriSLSSolid" }, "type": "esriSFS", "style": "esriSFSSolid" }, "label": "4", "classMaxValue": 4 }] web_map.add_layer( gis.content.get(views_dict[key]), { "type": "FeatureLayer", "renderer": map_renderer, "field_name": key, "minValue": 1 }) # save the map web_map_properties = { 'title': web_map_title, 'snippet': web_map_snippet, 'tags': web_map_tags } web_map_item = web_map.save(item_properties=web_map_properties) # get json data of the web map map_search = gis.content.search(web_map_title) map_item = map_search[0] map_json = map_item.get_data() # set visibility to false for layer in map_json['operationalLayers']: layer['visibility'] = False # update the json file of the web map item_properties = {"text": json.dumps(map_json)} item = gis.content.get(map_item.id) item.update(item_properties=item_properties) return web_map_item
ef_boundaries = MapImageLayer( 'https://geopub.epa.gov/arcgis/rest/services/EMEF/Boundaries/MapServer/') superfund_points = ef_points.layers[0] toxic_releases = ef_points.layers[1] counties = ef_boundaries.layers[5] states = ef_boundaries.layers[6] # we know which layers to add, so create blank webmap to add it to. This one has dark gray canvas darkGray = my_gis.content.search('title:dark', outside_org=True, item_type='web map')[0] my_webmap = WebMap(darkGray) # add the feature layers to the dark gray canvas map my_webmap.add_layer(counties) my_webmap.add_layer(states) my_webmap.add_layer(superfund_points) my_webmap.add_layer(toxic_releases) # give webmap a name (can be changed to user-input prompt) webmap_name = 'my webmap' # search for existing webmap, delete if necessary existing_webmap = my_gis.content.search(webmap_name, item_type="Web Map") if len(existing_webmap) > 0: answer = input( '"{}" webmap exists, delete it? (y/n): '.format(webmap_name)) for i in existing_webmap: delete_map(answer, i, webmap_name)
from arcgis.mapping import WebMap from arcgis.gis import GIS import json with open('config.json', 'r') as f: config = json.load(f) gis = GIS(config['url'], config['username'], config['password']) wm = WebMap() os_boundaryline = gis.content.get("5b60fac33976436ab900e05eb1a33216") wm.add_layer(os_boundaryline) len(wm.layers) fillSymbolDiagonal = { "color": [0, 0, 0, 64], "outline": { "color": [0, 0, 0, 255], "width": 1, "type": "esriSLS", "style": "esriSLSSolid" }, "type": "esriSFS", "style": "esriSFSDiagonalCross" } i = 0 while i < len(wm.layers): if wm.definition.operationalLayers[i].title == "Historic counties": print("Layer found in index: ", i) wm.definition.operationalLayers[ i].title = "Historic counties (renamed)"
def update_maps(q, old_id, new_layer): # for each map on the server, # Find a reference to the new layer # In the map, # Find the old layer # Remove it # Insert the new layer in the same place count = 0 msg = '' maps = gis.content.search(q, max_items=1000, sort_field="title", sort_order="asc", outside_org=False, item_type="Web Map") print("Found %d maps." % len(maps)) for map in maps: i_did_update = False map_info = "%s (%s) %s" % (map.title, map.owner, map.type) #display(web_map) count += 1 web_map = WEBMAP(map) for layer in web_map.layers: # I dont care about labels right now because i use a different workflow. # They can be directly replaced in Portal. # if layer.layerType == 'VectorTileLayer' and layer.title == old_label_name: # # I see you, old labels. # print("county labels updated") # i_did_update = True # web_map.remove_layer(layer) #web_map.add_layer(new_label) try: layerId = layer.itemId layerType = layer.layerType except AttributeError: layerId = '' layerType = '' if layerType == 'ArcGISTiledMapServiceLayer' and layerId == old_id: msg += map_info + "\n" msg += "\ttitle:\"%s\" id:%s itemId:%s\n" % (layer.title, layer.id, layerId) print("new layer", new_id) # ALAS ALAS # this puts the "added" layer at the TOP of the layer list. # making this script, well, useless web_map.remove_layer(layer) web_map.add_layer(new_layer, options={"title": "Clatsop County"}) i_did_update = True else: print("32%s \"%s\" %s" % (layerId, layer.title, layerType)) pass if i_did_update: # "update" will save changes made to the web map # alternatively "save" will create a new copy of the map. # Thumbnail can be a local file (to upload) or a URL. # UPDATE web_map.update() comment = "%s updated by \"%s\"" % (datestamp, myname) item = ITEM(gis, web_map.item.id) item.add_comment(comment) """ # SAVE old_title = web_map.item.title # I need to grab the existing thumbnail from Portal #watermarked_thumbnail = "thumbnail_" + datestring + ".jpg" #watermark("thumbnail.jpg", watermarked_thumbnail, old_title, (0,200,200,128)) # comment indicates new map created comment = "%s created from \"%s\" by \"%s\"" % (datestamp, web_map.item.title, myname) item = web_map.save(item_properties={ # These are REQUIRED "title": "CHANGED " + old_title + ' ' + datestamp, "snippet": web_map.item.snippet, "tags": web_map.item.tags }, #thumbnail = watermarked_thumbnail, # folder = "TESTING_Brian" ) item.add_comment(comment) """ print(msg) return count