def overwrite_csv(username: str, password: str, new_df: DataFrame, old_csv_name: str): """ Overwrites the existing table/feature layer named `old_csv_name` using `new_df` Only works if `new_df` has the same columns as the old feature/table (Create an existing table/feature layer by manually uploading a csv to arcGIS and selecting the "Publish this file as a hosted layer" option) """ gis = GIS(url='https://www.arcgis.com', username=username, password=password) csv_file_name = f"{old_csv_name}.csv" new_df.to_csv(csv_file_name, index=False) old_item = gis.content.search(f"title: {old_csv_name}", 'Feature Layer')[0] old_feature_layer = FeatureLayerCollection.fromitem(old_item) logger.info( f"Overwriting feature layer named '{old_csv_name}'.... there will now be {len(new_df)} features." ) overwrite_response = old_feature_layer.manager.overwrite(csv_file_name) logger.info( f'Done overwriting feature layer. Response: {overwrite_response}') os.remove(csv_file_name)
def update_from_csv(gis: GIS, f): # Load csv from and add the csv as an item latest_csv_file = os.path.join(file_path(), f) # Add the csv as an item using der ids latest_csv_item = gis.content.get(files[f]) print("-----") print("Accessing feature server: " + latest_csv_item.url) print("Found feature layer %s on server" % latest_csv_item.title) # Get feature layer collection from item flc = FeatureLayerCollection.fromitem(latest_csv_item) print(type(flc)) print("Overwriting existing feature with %s ..." % f) # Overwrite old item with new item res = flc.manager.overwrite(latest_csv_file) print(res) # TODO: not working, the overwrite below removes the new field names again # update_fields_from_csv(gis, f, latest_csv_item) if f == 'dd-covid19-openzh-switzerland-latest.csv': print("Updating existing feature with %s ..." % f) update_fields_in_switzerland_latest_file(f, latest_csv_item)
def overwrite_service(weblyrname, service_def): item1 = gis.content.search('title: {}'.format(weblyrname)) web_item = item1[0] update_FLcollection = FeatureLayerCollection.fromitem(web_item) print("Overwriting {}".format(weblyrname)) update_FLcollection.manager.overwrite(service_def)
def overwrite_service(gis, data, item): #get item, create collection, overwrite collection feature_service = gis.content.get(item) feature_collection = FeatureLayerCollection.fromitem(feature_service) feature_collection.manager.overwrite(data) print("[SUCCESS]: overwrote item: {0} with data {1}".format(item, data))
def set_editability(self, layer): ''' Disallow edits if self.allow_edits is set to False ''' try: if self.allow_edits == False: capabilities_dict = {'capabilities':'Query', 'syncEnabled': False} published_flc = FeatureLayerCollection.fromitem(layer) published_flc.manager.update_definition(capabilities_dict) elif self.allow_edits == True: capabilities_dict = {'capabilities':'Create,Delete,Query,Update,Editing', 'syncEnabled': False} published_flc = FeatureLayerCollection.fromitem(layer) published_flc.manager.update_definition(capabilities_dict) except Exception as e: print(e.args[0]) raise
def uploadArc(): LOG("\nUpload COGCC files to ArcGIS Online directory at: " + time.asctime() + "\n\n") foldername = 'Nightly COGCC' gis.content.create_folder(foldername) def add(): time.sleep(5) gis.content.add(item_properties={"type": "Shapefile"}, data=baseDir + zipUpFolder + os.sep + items[item]["fileOut"] + zipExtension, folder=foldername) LOG("\t" + items[item]["fileOut"] + " was uploaded.\n") def publish(): time.sleep(5) file = gis.content.search(query="title:{} AND type:{}".format( items[item]["fileOut"], "Shapefile")) file = file[0] file.publish(publish_parameters={ "name": items[item]["fileOut"] + "_published", "maxRecordCount": 2000 }).layers[0] LOG("\t" + items[item]["fileOut"] + " was published.\n") # find file in list of files def findfile(files, filetitle): f = None for i in range(len(files)): if files[i].title == filetitle: f = files[i] break assert (f is not None) return f for item in items: file = gis.content.search(query="title:{} AND type:{}".format( items[item]["fileOut"], "Shapefile")) file2 = gis.content.search(query="title:{} AND type:{}".format( items[item]["fileOut"], "Feature")) if (len(file) is 0) or (len(file2) is 0): if len(file) is 0: add() if len(file2) is 0: publish() else: LOG("\toverwriting: " + items[item]["fileOut"] + "\n") filelist = gis.content.search(query="title:{} AND type:{}".format( items[item]["fileOut"], "Feature")) file = findfile(filelist, items[item]["fileOut"]) newfile = FeatureLayerCollection.fromitem(file) newfile.manager.overwrite(baseDir + zipUpFolder + os.sep + items[item]["fileOut"] + zipExtension) LOG("\t" + items[item]["fileOut"] + " was overwritten.\n") LOG("\n\tCOMPLETED: " + time.asctime() + "\n")
def overwrite_map(): #overwrites hosted map it_2 = gis.content.get('layer id') update_flayer = FeatureLayerCollection.fromitem(it_2) try: update_flayer.manager.overwrite('path to file') print("Map updated!") except: print("Update failed.")
def updateMap(): gis = GIS(username="******", password="******") search_result = gis.content.search(query="info", item_type="Feature Layer") suburb = search_result[0] print(search_result[0]) suburb_collection = FeatureLayerCollection.fromitem(suburb) print(suburb_collection.properties) suburb_collection.manager.overwrite('suburb_info-1589933179300.json')
def style_ago(gis, item, style, overwrites=DEFAULT_AGO_LAYER_CONFIG): """ Style feature with name item """ service = exact_find(gis, item, 'Feature ') print('\nStyle {}'.format(service)) layer = FeatureLayerCollection.fromitem(service) layer.layers[0].manager.update_definition(style) # add standard definitions for a secure FeatureService # (disable editing) layer.manager.update_definition(overwrites)
def upload_to_ago(**kwargs): gis = GIS("https://detroitmi.maps.arcgis.com", Variable.get('AGO_USER'), Variable.get('AGO_PASS')) from arcgis.features import FeatureLayerCollection # this is the ID of the FeatureLayer, not the ID of the .json file item = gis.content.get(kwargs['id']) flc = FeatureLayerCollection.fromitem(item) flc.manager.overwrite(kwargs['filepath'])
def uploadArc(gis, ziproot, sf_name, folder_name): gis.content.create_folder(folder_name) zipname = os.path.join(ziproot, sf_name) + '.zip' def add(): time.sleep(5) gis.content.add(item_properties={"type": "Shapefile"}, data=zipname, folder=folder_name) print("\t" + sf_name + " was uploaded.\n") def publish(): time.sleep(5) file = gis.content.search( query="title:{} AND type:{}".format(sf_name, "Shapefile")) file = file[0] file.publish(publish_parameters={ "name": sf_name + "_published", "maxRecordCount": 2000 }).layers[0] print("\t" + sf_name + " was published.\n") # find file in list of files def findfile(files, filetitle): f = None for i in range(len(files)): if files[i].title == filetitle: f = files[i] break assert (f is not None) return f file = gis.content.search( query="title:{} AND type:{}".format(sf_name, "Shapefile")) file2 = gis.content.search( query="title:{} AND type:{}".format(sf_name, "Feature")) if (len(file) is 0) or (len(file2) is 0): if len(file) is 0: add() if len(file2) is 0: publish() else: print("\toverwriting: " + sf_name + "\n") filelist = gis.content.search( query="title:{} AND type:{}".format(sf_name, "Feature")) file = findfile(filelist, sf_name) newfile = FeatureLayerCollection.fromitem(file) newfile.manager.overwrite(zipname) print("\t" + sf_name + " was overwritten.\n")
def create_joined_view(gis, source_layer, join_layer, primary_key_field, foreign_key_field, name, source_fields, join_fields): """ Create a joined layer view between 2 layers :param gis: The gis to create the layer with :param source_layer: The source layer to join :param join_layer: The layer to join with the source layer :param primary_key_field: The primary key field in the source layer :param foreign_key_field: The foreign key field in the join layer :param name: The name of the new layer that will be created :param source_fields: The list of field configuration objects in the source layer to keep in the resulting joined layer :param join_fields: The list of field configuration objects in the join layer to keep in the resulting joined layer :return: The new item """ new_item = gis.content.create_service( name=name, is_view=True, create_params={ "currentVersion": 10.7, "serviceDescription": "", "hasVersionedData": False, "supportsDisconnectedEditing": False, "hasStaticData": True, "maxRecordCount": 2000, "supportedQueryFormats": "JSON", "capabilities": "Query", "description": "", "copyrightText": "", "allowGeometryUpdates": False, "syncEnabled": False, "editorTrackingInfo": { "enableEditorTracking": False, "enableOwnershipAccessControl": False, "allowOthersToUpdate": True, "allowOthersToDelete": True }, "xssPreventionInfo": { "xssPreventionEnabled": True, "xssPreventionRule": "InputOnly", "xssInputRule": "rejectInvalid" }, "tables": [], "name": f"{name}" }) fc = FeatureLayerCollection.fromitem(new_item) layer_def = generate_layer_definition(source_layer, join_layer, primary_key_field, foreign_key_field, name, source_fields, join_fields) fc.manager.add_to_definition(layer_def) return new_item
def overwrite_csv(): #Overwrites hosted table global gis #Iniate GIS session username = "" password = "" PortalUrl = '' gis = GIS(PortalUrl, username, password) it_1 = gis.content.get('layer id') update_flayer = FeatureLayerCollection.fromitem(it_1) try: update_flayer.manager.overwrite('path to file') print('Item updated!') except: print('Update failed.')
def find_feature(search_text): # take a string and return the related layer layer_search = gis.content.search(search_text, item_type="Feature Layer Collection") if len(layer_search) < 1: print_text_log('Search Empty!') raise Exception('Search Empty!') print_text_log( f"{len(layer_search)} layers were found. Selecting the first one") found_names = [] for item in layer_search: found_names.append('{}; {}; {}'.format(str(item.title), str(item.type), str(item.id))) logger.debug(str(found_names)) lyr = FeatureLayerCollection.fromitem(layer_search[0]) return lyr
def overwrite_feature(username, password, new_df, old_feature_name): gis = GIS(url='https://www.arcgis.com', username=username, password=password) # print("Logged in as " + str(gis.properties.user.username)) csv_file_name = f"{old_feature_name}.csv" new_df.to_csv(csv_file_name, index=False) # get first search resul old_jobs_item = gis.content.search(f"title: {old_feature_name}", 'Feature Layer')[0] old_feature_layer = FeatureLayerCollection.fromitem(old_jobs_item) myprint( f"Overwriting feature layer.... there will now be {len(new_df)} features." ) old_feature_layer.manager.overwrite(csv_file_name) myprint('Done overwriting feature layer.') os.remove(csv_file_name)
def updateItemsByID(portal, config): url = portal['portal'] user = portal['user'] password = portal['password'] features = portal['features'] portal_type = portal['portal_type'] cf_gdb = config['cf_gdb'] # connect to the specified portal gis = GIS( url, user, password ) # connect to what portal you are interested in publishing to using variables user and password # cycles throug the list of features that are filted by portal # controled by either agol or enterprise boolean field to determine # if it a part of the list see the portal_config() return object for feature in features: print(f'Updating:') print(feature['title']) item_id = feature[f'{portal_type}_id'] feature_layer = gis.content.get(item_id) print(item_id) # get the feature layer collection by unique id feature_layer_collection = FeatureLayerCollection.fromitem( feature_layer) # feature_layer = FeatureLayer.fromitem(feature_layer) print(feature_layer_collection.properties.description) # Feature Layer Collection Docs # https://developers.arcgis.com/python/api-reference/arcgis.features.toc.html?highlight=update%20metadata#featurelayercollection feature_layer_collection.manager.overwrite( os.path.join(cf_gdb, f'{feature["title"]}_REGISTERED')) # Update definition docs (Update Metadata) # https://developers.arcgis.com/python/api-reference/arcgis.features.managers.html?highlight=update_defin#arcgis.features.managers.FeatureLayerCollectionManager.update_definition feature_layer_collection.manager.update_definition( {'description': feature['description']})
def publish(zipfile, gis, folder=None): """ Push layer to ArcgisOnline """ if folder: gis.content.create_folder(folder) name = os.path.split(zipfile)[1].replace('.zip', '') shapefile = exact_find(gis, name, 'Shapefile') if not shapefile: item_properties = {'title': name} shapefile = gis.content.add(item_properties, zipfile, folder=folder) print('Shapefile {} created'.format(shapefile)) # Don't use overwrite argument, it behaves very funny service = shapefile.publish() print('Shapefile {} published'.format(shapefile)) else: service = exact_find(gis, name, 'Feature Layer') print(service) layer = FeatureLayerCollection.fromitem(service) layer.manager.overwrite(zipfile) print('Feature Layer {} updated with {}'.format(service, shapefile)) service.share(everyone=True) print('Service {} shared with everyone'.format(service))
def upload_arcgis(): ARCGIS_USER = config.get("ARCGIS", "USER") ARCGIS_PW = config.get("ARCGIS", "PW") ARCGIS_URL = config.get("ARCGIS", "URL") gis = GIS(ARCGIS_URL, ARCGIS_USER, ARCGIS_PW) content_data = f"title:{FILENAME} type:CSV owner:{ARCGIS_USER}" items = gis.content.search(content_data) item_params = dict(title=FILENAME) if len(items) > 0: logger.info("Overwriting layer") items = gis.content.search(f"type: Feature Service owner:{ARCGIS_USER}") feature_layer_item = next((n for n in items if n.title == FILENAME)) feature_layer = FeatureLayerCollection.fromitem(feature_layer_item) feature_layer.manager.overwrite(TEMP_PATH) return logger.info("Uploading file to Arcgis") item = gis.content.add(item_params, data=TEMP_PATH) item.share(everyone=True) logger.info("Publishing layer") publish_params = dict( name=FILENAME, type="csv", locationType="coordinates", latitudeFieldName="Y", longitudeFieldName="X", ) item = item.publish(publish_parameters=publish_params, overwrite=overwrite) item.share(everyone=True)
def update_service_definition(args_parser): try: gis = GIS(args_parser.url, args_parser.username, args_parser.password) featureLayerItem = gis.content.get(args_parser.itemId) featureLayerCollection = FeatureLayerCollection.fromitem( featureLayerItem) layers = featureLayerCollection.manager.layers tables = featureLayerCollection.manager.tables arcpy.AddMessage("Updating Service Definition..") for layer in layers: layer_index = layers.index(layer) update_template(featureLayerCollection, layer, layer_index, False) for table in tables: table_index = tables.index(table) update_template(featureLayerCollection, table, table_index, True) arcpy.AddMessage("Updated Service Definition..") except Exception as e: arcpy.Fail(e)
def create_defined_view_for_service(self, item, item_properties): flc = FeatureLayerCollection.fromitem(item) try: view_item = flc.manager.create_view(name=item_properties['title'], allow_schema_changes=False) view_item.update( item_properties={ 'description': item_properties['description'], 'tags': item_properties['tags'], 'commentsEnabled': item_properties['commentsEnabled'], 'access': item_properties['access'] }) for a_layer in view_item.layers: if [ fd for fd in a_layer.manager.properties.fields if fd.name.upper() == item_properties['securityfield'].upper() ]: self.log(item_properties['viewDefinitionQuery']) a_layer.manager.update_definition({ "viewDefinitionQuery": item_properties['viewDefinitionQuery'] }) else: # filter out all the data and log self.log( f"{a_layer.url} layer in the view doesn't not have {item_properties['securityfield']} field - filtering ALL data!" ) a_layer.manager.update_definition( {"viewDefinitionQuery": "OBJECTID < 1"}) #raise Exception(f"layer in the view doesn't not have {item_properties['securityfield']} field - aborting!") return view_item except Exception as e: # the view exists but hasn't been shared with this group - so we shouldn't just delete it or re share it - log it! self.errorlog(e)
# Import libraries import pandas as pd from arcgis.gis import GIS from arcgis import features from arcgis.features import FeatureLayerCollection # Connect to the GIS gis = GIS("https://ddrobotec.maps.arcgis.com", "cybermax") # Load csv from and add the csv as an item canton_latest_csv_file = "../data-cantons-csv/dd-covid19-ch-cantons-latest.csv" #switzerland_latest_csv_file = "../data-switzerland-csv/dd-covid19-ch-switzerland-latest.csv" # Add the csv as an item canton_latest_item = gis.content.get("c646a9a1727743aaa4162235d798b058") #switzerland_latest_item = gis.content("1100451fd22d42c1bc2b18776429a8a4") feature_layer_collection = FeatureLayerCollection.fromitem(canton_latest_item) res = feature_layer_collection.manager.overwrite(canton_latest_csv_file) print(res)
# remove all '$' and ',' from Valuation field - assign the values back to the column being changed deepNestDF.Valuation = deepNestDF.Valuation.str.replace('$', '') deepNestDF.Valuation = deepNestDF.Valuation.str.replace(',', '') # remove YYYY- from date field deepNestDF.Issued_Date = deepNestDF.Issued_Date.str.slice(5) # convert to CSV deepNestDF.to_csv( file_csv_30, index=False ) #don't include an index column (no column header value in spreadsheet) print('Done converting to CSV!') #Update AGOL with new CSV print("") print("Overwriting existing AGOL data ...") csv_item = gis.content.get(agoID) #Find New Applications #print(csv_item) csv_flayer_collection = FeatureLayerCollection.fromitem( csv_item) #Get FeatureLayerCollection csv_flayer_collection.manager.overwrite( file_csv_30) #Update with overwrite csv_flayer = csv_item.layers[0] #there is only 1 layer print("") print("Issued permit count: {}".format( csv_flayer.query(return_count_only=True))) #New count print("") print("Stopping time: {}".format(time.asctime(time.localtime( time.time()))))
# ## Overwrite the feature layer # Let us overwrite the feature layer using the new csv file we just created. To overwrite, we will use the `overwrite()` method. # In[90]: #item id of the feature layer in AGOL Organization Engines_featureLayer_item = gis.content.get('Enter Feature Layer ID Here') # In[91]: from arcgis.features import FeatureLayerCollection Engines_flayer_collection = FeatureLayerCollection.fromitem(Engines_featureLayer_item) # ### Access the overwritten feature layer # Let us query the feature layer and verify the number of features has increased to `51`. # In[92]: #call the overwrite() method which can be accessed using the manager property Engines_flayer_collection.manager.overwrite('DFFM_Engines.csv (file location)') # In[93]:
# Upload Logic # Use pro login info as before gis = GIS('pro') print('Logged in as: ' + str(gis.properties.user.username)) # delete old versions if they exist for item in gis.content.search('title: ' + fl_title): item.delete() geom_changes = pd.DataFrame.spatial.from_featureclass(os.path.join(changesGDB, changes_layer), sr= '3347') print( 'Uploading feature layer with ' + str(len(geom_changes)) + ' records to AGOL') geom_fl = geom_changes.spatial.to_featurelayer( title= fl_title, gis= GIS('pro'), tags= 'NGD_AL, Redline, ' + str(date.today())) # Make into a feature layer collection to change properties geom_flc = FeatureLayerCollection.fromitem(geom_fl) #Change settings to allow extracts from other users desc = f''''Geometry changes from NGD_Redline extracted on {str(date.today())}. Date Range: From - {os.getenv('FROM_DATE_TIME')} To - {os.getenv('TO_DATE_TIME')} ''' geom_flc.manager.update_definition({'description' : desc, 'capabilities' : 'Query,Extract' }) #print(geom_flc.properties) print('Sharing Layer with NGD') geom_fl.share( groups= gis.groups.search('title:NGD')[0].groupid) print('Upload Complete')
def searchItems_addGNSSMetadataFields(args_parser): # Search ItemIds gis = GIS(args_parser.url, args_parser.username, args_parser.password) arcpy.AddMessage("Signed into organization..") itemId = args_parser.itemId try: featureLayerItem = gis.content.get(itemId) # Construct a FeatureLayerCollection from the portal item. featureLayerCollection = FeatureLayerCollection.fromitem( featureLayerItem) # Extract fields from Feature layer service definition featureLayerFields = featureLayerCollection.manager.layers[args_parser.layerIndex].properties[ 'fields'] if args_parser.layerIndex else \ featureLayerCollection.manager.layers[0].properties['fields'] # Feature Layer index featureLayerIndex = args_parser.layerIndex if args_parser.layerIndex else 0 # Check if the Feature layer is of type esriGeometryPoint if featureLayerCollection.manager.layers[featureLayerIndex].properties[ 'geometryType'] != 'esriGeometryPoint': arcpy.AddError("Feature layer is not a point layer") raise ValueError("Feature layer is not a point layer") # New fields which need to be added gnssMetadataFields = {'fields': []} # Operations list - Add, Update or delete GNSS Metadata fields. operations = [] arcpy.AddMessage("Adding/Updating gnss fields..") # Add/Update GNSS Metadata fields if not args_parser.remove: # ESRIGNSS_POSITIONSOURCETYPE positionsourcetypeField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_POSITIONSOURCETYPE' ] if positionsourcetypeField: # Field does exist check if the domain is set. if positionsourcetypeField[0]['domain'] == None: if ([ operation for operation in operations if operation == 'updateDefinition' ]): operations.append('updateDefinition') positionsourcetypeFieldIndex = featureLayerFields.index( positionsourcetypeField[0]) positionsourcetypeDomain = { 'type': 'codedValue', 'name': 'ESRI_POSITIONSOURCETYPE_DOMAIN', 'codedValues': [{ 'name': 'Unknown', 'code': 0 }, { 'name': 'User defined', 'code': 1 }, { 'name': 'Integrated (System) Location Provider', 'code': 2 }, { 'name': 'External GNSS Receiver', 'code': 3 }, { 'name': 'Network Location Provider', 'code': 4 }] } featureLayerFields[positionsourcetypeFieldIndex][ 'domain'] = positionsourcetypeDomain else: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_POSITIONSOURCETYPE', 'type': 'esriFieldTypeInteger', 'alias': 'Position source type', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': { 'type': 'codedValue', 'name': 'ESRI_POSITIONSOURCETYPE_DOMAIN', 'codedValues': [{ 'name': 'Unknown', 'code': 0 }, { 'name': 'User defined', 'code': 1 }, { 'name': 'Integrated (System) Location Provider', 'code': 2 }, { 'name': 'External GNSS Receiver', 'code': 3 }, { 'name': 'Network Location Provider', 'code': 4 }] }, 'defaultValue': None }) # ESRIGNSS_RECEIVER recieverField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_RECEIVER' ] if not recieverField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_RECEIVER', 'type': 'esriFieldTypeString', 'alias': 'Receiver Name', 'sqlType': 'sqlTypeOther', 'length': 50, 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_LATITUDE latitudeField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_LATITUDE' ] if not latitudeField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_LATITUDE', 'type': 'esriFieldTypeDouble', 'alias': 'Latitude', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_LONGITUDE longitudeField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_LONGITUDE' ] if not longitudeField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_LONGITUDE', 'type': 'esriFieldTypeDouble', 'alias': 'Longitude', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_ALTITUDE altitudeField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_ALTITUDE' ] if not altitudeField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_ALTITUDE', 'type': 'esriFieldTypeDouble', 'alias': 'Altitude', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_H_RMS horizontalAccuracyField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_H_RMS' ] if not horizontalAccuracyField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_H_RMS', 'type': 'esriFieldTypeDouble', 'alias': 'Horizontal Accuracy (m)', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_V_RMS verticalAccuracyField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_V_RMS' ] if not verticalAccuracyField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_V_RMS', 'type': 'esriFieldTypeDouble', 'alias': 'Vertical Accuracy (m)', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_FIXDATETIME fixTimeField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_FIXDATETIME' ] if not fixTimeField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_FIXDATETIME', 'type': 'esriFieldTypeDate', 'alias': 'Fix Time', 'sqlType': 'sqlTypeOther', 'length': 0, 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_FIXTYPE fixTypeField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_FIXTYPE' ] if fixTypeField: # Field does exist check if the domain is set. if fixTypeField[0]['domain'] == None: if ([ operation for operation in operations if operation == 'updateDefinition' ]): operations.append('updateDefinition') fixtypeFieldIndex = featureLayerFields.index( fixTypeField[0]) fixTypeDomain = { 'type': 'codedValue', 'name': 'ESRI_FIX_TYPE_DOMAIN', 'codedValues': [{ 'name': 'Fix not valid', 'code': 0 }, { 'name': 'GPS', 'code': 1 }, { 'name': 'Differential GPS', 'code': 2 }, { 'name': 'RTK Fixed', 'code': 4 }, { 'name': 'RTK Float', 'code': 5 }] } featureLayerFields[fixtypeFieldIndex][ 'domain'] = fixTypeDomain else: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_FIXTYPE', 'type': 'esriFieldTypeInteger', 'alias': 'Fix Type', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': { 'type': 'codedValue', 'name': 'ESRI_FIX_TYPE_DOMAIN', 'codedValues': [{ 'name': 'Fix not valid', 'code': 0 }, { 'name': 'GPS', 'code': 1 }, { 'name': 'Differential GPS', 'code': 2 }, { 'name': 'RTK Fixed', 'code': 4 }, { 'name': 'RTK Float', 'code': 5 }] }, 'defaultValue': None }) # ESRIGNSS_CORRECTIONAGE correctionAgeField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_CORRECTIONAGE' ] if not correctionAgeField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_CORRECTIONAGE', 'type': 'esriFieldTypeDouble', 'alias': 'Correction Age', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_STATIONID stationIdField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_STATIONID' ] if not stationIdField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_STATIONID', 'type': 'esriFieldTypeInteger', 'alias': 'Station ID', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_NUMSATS numstatsField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_NUMSATS' ] if numstatsField: # Field does exist check if the domain is set. if numstatsField[0]['domain'] == None: if ([ operation for operation in operations if operation == 'updateDefinition' ]): operations.append('updateDefinition') numSatellitesFieldIndex = featureLayerFields.index( numstatsField[0]) numSatellitesDomain = { 'type': 'range', 'name': 'ESRI_NUM_SATS_DOMAIN', 'range': [0, 99] } featureLayerFields[numSatellitesFieldIndex][ 'domain'] = numSatellitesDomain else: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_NUMSATS', 'type': 'esriFieldTypeInteger', 'alias': 'Number of Satellites', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': { 'type': 'range', 'name': 'ESRI_NUM_SATS_DOMAIN', 'range': [0, 99] }, 'defaultValue': None }) # ESRIGNSS_PDOP pdopField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_PDOP' ] if not pdopField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_PDOP', 'type': 'esriFieldTypeDouble', 'alias': 'PDOP', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_HDOP hdopField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_HDOP' ] if not hdopField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_HDOP', 'type': 'esriFieldTypeDouble', 'alias': 'HDOP', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_VDOP vdopField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_VDOP' ] if not vdopField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_VDOP', 'type': 'esriFieldTypeDouble', 'alias': 'VDOP', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_DIRECTION directionField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_DIRECTION' ] if not directionField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_DIRECTION', 'type': 'esriFieldTypeDouble', 'alias': 'Direction of travel (°)', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_SPEED speedField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_SPEED' ] if not speedField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_SPEED', 'type': 'esriFieldTypeDouble', 'alias': 'Speed (km/h)', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRISNSR_AZIMUTH azimuthField = [ field for field in featureLayerFields if field['name'] == 'ESRISNSR_AZIMUTH' ] if not azimuthField: gnssMetadataFields['fields'].append({ 'name': 'ESRISNSR_AZIMUTH', 'type': 'esriFieldTypeDouble', 'alias': 'Compass reading (°)', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_AVG_H_RMS averageHorizontalAccuracyField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_AVG_H_RMS' ] if not averageHorizontalAccuracyField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_AVG_H_RMS', 'type': 'esriFieldTypeDouble', 'alias': 'Average Horizontal Accuracy (m)', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_AVG_V_RMS averageVerticalAccuracyField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_AVG_V_RMS' ] if not averageVerticalAccuracyField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_AVG_V_RMS', 'type': 'esriFieldTypeDouble', 'alias': 'Average Vertical Accuracy (m)', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_AVG_POSITIONS averagePositionsField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_AVG_POSITIONS' ] if not averagePositionsField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_AVG_POSITIONS', 'type': 'esriFieldTypeInteger', 'alias': 'Averaged Positions', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # ESRIGNSS_H_STDDEV" standardDeviationField = [ field for field in featureLayerFields if field['name'] == 'ESRIGNSS_H_STDDEV' ] if not standardDeviationField: gnssMetadataFields['fields'].append({ 'name': 'ESRIGNSS_H_STDDEV', 'type': 'esriFieldTypeDouble', 'alias': 'Standard Deviation (m)', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None }) # Check if AddToDefinition operation needs to be added. initialFeatureLayerFieldsCount = len(featureLayerFields) if (len(gnssMetadataFields['fields']) + initialFeatureLayerFieldsCount ) > initialFeatureLayerFieldsCount: operations.append('addToDefinition') else: operations.append('deleteFromDefinition') gnssMetadataFields = { 'fields': [{ 'name': 'ESRIGNSS_DIRECTION' }, { 'name': 'ESRIGNSS_SPEED' }, { 'name': 'ESRISNSR_AZIMUTH' }, { 'name': 'ESRIGNSS_POSITIONSOURCETYPE' }, { 'name': 'ESRIGNSS_FIXDATETIME' }, { 'name': 'ESRIGNSS_RECEIVER' }, { 'name': 'ESRIGNSS_H_RMS' }, { 'name': 'ESRIGNSS_V_RMS' }, { 'name': 'ESRIGNSS_LATITUDE' }, { 'name': 'ESRIGNSS_LONGITUDE' }, { 'name': 'ESRIGNSS_ALTITUDE' }, { 'name': 'ESRIGNSS_PDOP' }, { 'name': 'ESRIGNSS_HDOP' }, { 'name': 'ESRIGNSS_VDOP' }, { 'name': 'ESRIGNSS_CORRECTIONAGE' }, { 'name': 'ESRIGNSS_FIXTYPE' }, { 'name': 'ESRIGNSS_STATIONID' }, { 'name': 'ESRIGNSS_AVG_H_RMS' }, { 'name': 'ESRIGNSS_AVG_V_RMS' }, { 'name': 'ESRIGNSS_AVG_POSITIONS' }, { 'name': 'ESRIGNSS_H_STDDEV' }, { 'name': 'ESRIGNSS_NUMSATS' }] } # Add/Delete/Modify service definition. for operation in operations: # Add if operation == 'addToDefinition': response = featureLayerCollection.manager.layers[ featureLayerIndex].add_to_definition(gnssMetadataFields) arcpy.AddMessage("Successfully added GNSS fields..") # Delete elif operation == 'deleteFromDefinition': response = featureLayerCollection.manager.layers[ featureLayerIndex].delete_from_definition( gnssMetadataFields) # Modify else: response = featureLayerCollection.manager.layers[ featureLayerIndex].update_definition(featureLayerFields) arcpy.AddMessage("Successfully updated GNSS fields..") result = response['success'] if not result: print('Failed to update Feature layer service definition..') else: print('Service definition updated successfully..') except Exception as e: arcpy.AddError(e)
def promote_sdg(goal_code=None, indicator_code=None, target_code=None, series_code=None, property_update_only=False): try: ## Production Site Changes # Search all the Items in Production Open Data Group user = gis_online_connection.users.get(online_username) admin_user = gis_online_connection.users.get(online_username_admin) if admin_user is None: return user_items = admin_user.items(folder='Open Data', max_items=800) for item in user_items: # Move these items into Archive folder under the Admin User print('Moving ' + item.title + ' to archive folder') item.move(folder="Historic Data", owner=online_username_admin) # Unshare the Items from Open Data Group (Production) display('unsharing item ' + item.title + " from the open data group") item.unshare(open_data_group_prod["id"]) # Update Tags (Remove Current add Historic) item_properties = {} item_properties["tags"] = item.tags if 'Current' in item_properties["tags"]: item_properties["tags"] = item_properties["tags"].remove( 'Current') item_properties["tags"].append('Historic') item.update(item_properties=item_properties) # Mark this item as depracated set_content_status(update_item=item, authoratative=False) ## Staging Site Changes # Get all the Items in the Open Data Folder user_items = user.items(folder='Open Data', max_items=800) for item in user_items: #Move all the CSV Files to the Open Data Folder of the Admin User # This will also move the Feature Service Layer if item.type == 'CSV': # Assign Item to the Admin User display('reassigning item ' + item.title + ' from ' + online_username + ' to ' + online_username_admin) item.reassign_to(online_username_admin, 'Open Data') # Update the Items in the Open Data Folder of the Admin User user_items = admin_user.items(folder='Open Data', max_items=800) for item in user_items: # Update Sharing to Public, Share with Open Data Group if item.type != 'CSV': display('updating sharing for item ' + item.title) item.share(everyone=True, org=True, groups=open_data_group_prod["id"], allow_members_to_edit=False) # Disable Editing on the Feature Service display('disable editing for ' + item.title) item_flc = FeatureLayerCollection.fromitem(item) update_dict2 = {"capabilities": "Query, Extract"} item_flc.manager.update_definition(update_dict2) # Unshare from Staging Group display('unsharing item ' + item.title + " from the staging group") item.unshare(open_data_group["id"]) display('enabling delete protection for: ' + item.title) item.protect(enable=True) # Tag as Current display('updating item properties for ' + item.title) item_properties = dict() item_properties["tags"] = item.tags.append('Current') item.update(item_properties=item_properties) # Mark this item as authoratative display('marking item ' + item.title + " as authortative") set_content_status(update_item=item, authoratative=True) except: traceback.print_exc() return
keepers = ['latitude', 'longitude', 'name', 'fips'] df2= rawCoordinate[keepers].copy() df2.loc[:,'SHAPE'] = df2.apply(lambda row : arcgis.geometry.Geometry({'x': float(row['longitude']), 'y': float(row['latitude']), 'spatialReference':{'wkid':2346}}), axis=1) #df2.loc[:,'ptvalid'] = df2.apply(lambda row : row['SHAPE'].is_valid, axis = 1) sdf = SpatialDataFrame(df2) sdf.to_featureclass(out_location = r'D:\Projects\ORPHANS\ORPHANS.gdb', out_name = 'fctesttt8') t=time.process_time() #layer = gis.content.import_data(sdf, title='Test3') sdf.to_featurelayer(title= 'prettyplease222', gis=gis) elapsedtime = time.process_time()-t print(elapsedtime) search_results = gis.content.search('title:prettyplease222, type: Feature Service') tooverwrite = search_results[0] from arcgis.features import FeatureLayerCollection flayer_coll = FeatureLayerCollection.fromitem(tooverwrite) flayer_coll.manager.overwrite(sdf)
# coding: utf-8 # Load the classes used from arcgis.gis import GIS from arcgis.features import FeatureLayerCollection # Logging in with my username and password, replace with your own import getpass password = getpass.getpass("Enter password: "******"https://www.arcgis.com", "jtedrick_melbournedev", password) #Looking for the layer to download via search; this is a search that returns one item layerSearch = gis.content.search(query="EXIF", item_type="Feature Layer") exportLayer = FeatureLayerCollection.fromitem(layerSearch[0]) # The exportLayer item has 2 things to check for attachments - the layers and the tables for lyr in exportLayer.layers: if lyr.attachments: query_features = lyr.query(where='1=1') oidField = query_features.object_id_field_name for f in query_features.features: f_id = f.get_value(oidField) attach_list = lyr.attachments.get_list(oid=f_id) for attach in attach_list: attach_id = attach['id'] lyr.attachments.download( oid=f_id, attachment_id=attach_id, save_path="/Users/jame6423/temp/attach")
'Feature Layer')[0] break print("Location Feature Layer Uploaded With ID: %s" % (locations_new_fl.id)) else: locations_new_fl = gis.content.search(data_name, 'Feature Layer')[0] print("Location Feature Layer Already Exists with ID: %s" % (locations_new_fl.id)) fl_layer = FeatureLayer.fromitem(locations_new_fl) fl_layer.manager.truncate() print("Location Data Truncated...") FeatureLayerCollection.fromitem(locations_new_fl).manager.overwrite( 'locations.csv') # Share FeatureLayer with Org locations_new_fl.share(everyone=False, org=True, groups=['CPE'], allow_members_to_edit=False) print("New Location Layer Shared with CPE Group") #Set Thumbnail of FeatureLayer locations_new_fl.update(thumbnail=r'thumbnail.jpg') print("Thumbnail Set") print("Item Sucessfully Published!") os.remove("locations.csv") print("CSV removed")
def overwrite(gis, id, filepath): from arcgis.features import FeatureLayerCollection item = gis.content.get(id) flc = FeatureLayerCollection.fromitem(item) flc.manager.overwrite(filepath) return item
except: arcpy.AddMessage("Failure to publish " + fcListAll[t]) t+=1 arcpy.AddMessage("Successfully published services to AGO.") # Remove underscores from titles arcpy.AddMessage("Removing underscores from titles...") # Search and create a list of content fc = gis.content.search(query="owner: TPP_GIS AND type: Feature Service AND tags: Quarterly",sort_field="title",sort_order="asc", max_items=100 ) # Loop through item list for item in fc: title = item.title newtitle = title.replace("_"," ") arcpy.AddMessage("Changing " + title + " to " + newtitle + "...") item.update(item_properties={'title':newtitle}) print (newtitle) arcpy.AddMessage("Enabling Export...") search_result= gis.content.search("owner: TPP_GIS AND type: Feature Service AND tags: Quarterly", sort_field="title", sort_order="asc", max_items=1000) b = 0 while b < (len(search_result)): item = search_result[b] flc = FeatureLayerCollection.fromitem(item) update_dict = {"capabilities": "Query,Extract"} flc.manager.update_definition(update_dict) arcpy.AddMessage(item) b+=1 number = len(fc) arcpy.AddMessage("Finished publishing " + str(number) + " layers!")
def searchItems_addGNSSMetadataFields(args_parser): # Search ItemIds gis = GIS(args_parser.url, args_parser.username, args_parser.password) arcpy.AddMessage("Signed into organization..") itemId = args_parser.itemId try: featureLayerItem = gis.content.get(itemId) # Construct a FeatureLayerCollection from the portal item. featureLayerCollection = FeatureLayerCollection.fromitem(featureLayerItem) # Extract fields from Feature layer service definition featureLayerFields = featureLayerCollection.manager.layers[args_parser.layerIndex].properties[ 'fields'] if args_parser.layerIndex else \ featureLayerCollection.manager.layers[0].properties['fields'] # Feature Layer index featureLayerIndex = args_parser.layerIndex if args_parser.layerIndex else 0 # Check if the Feature layer is of type esriGeometryPoint if featureLayerCollection.manager.layers[featureLayerIndex].properties['geometryType'] != 'esriGeometryPoint': arcpy.AddError("Feature layer is not a point layer") raise ValueError("Feature layer is not a point layer") # New fields which need to be added gnssMetadataFields = {'fields': []} # Operations list - Add, Update or delete GNSS Metadata fields. operations = [] arcpy.AddMessage("Adding/Updating gnss fields..") # Add/Update GNSS Metadata fields if not args_parser.remove: # ESRIGNSS_RECEIVER recieverField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_RECEIVER'] if not recieverField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_RECEIVER', 'type': 'esriFieldTypeString', 'alias': 'Receiver Name', 'sqlType': 'sqlTypeOther', 'length': 50, 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # ESRIGNSS_H_RMS horizontalAccuracyField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_H_RMS'] if not horizontalAccuracyField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_H_RMS', 'type': 'esriFieldTypeDouble', 'alias': 'Horizontal Accuracy (m)', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # ESRIGNSS_V_RMS verticalAccuracyField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_V_RMS'] if not verticalAccuracyField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_V_RMS', 'type': 'esriFieldTypeDouble', 'alias': 'Vertical Accuracy (m)', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # ESRIGNSS_LATITUDE latitudeField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_LATITUDE'] if not latitudeField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_LATITUDE', 'type': 'esriFieldTypeDouble', 'alias': 'Latitude', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # ESRIGNSS_LONGITUDE longitudeField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_LONGITUDE'] if not longitudeField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_LONGITUDE', 'type': 'esriFieldTypeDouble', 'alias': 'Longitude', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # ESRIGNSS_ALTITUDE altitudeField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_ALTITUDE'] if not altitudeField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_ALTITUDE', 'type': 'esriFieldTypeDouble', 'alias': 'Altitude', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # ESRIGNSS_PDOP pdopField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_PDOP'] if not pdopField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_PDOP', 'type': 'esriFieldTypeDouble', 'alias': 'PDOP', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # ESRIGNSS_HDOP hdopField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_HDOP'] if not hdopField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_HDOP', 'type': 'esriFieldTypeDouble', 'alias': 'HDOP', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # ESRIGNSS_VDOP vdopField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_VDOP'] if not vdopField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_VDOP', 'type': 'esriFieldTypeDouble', 'alias': 'VDOP', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # ESRIGNSS_FIXTYPE fixTypeField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_FIXTYPE'] if fixTypeField: # Field does exist check if the domain is set. if fixTypeField[0]['domain'] == None: if ([operation for operation in operations if operation == 'updateDefinition']): operations.append('updateDefinition') fixtypeFieldIndex = featureLayerFields.index(fixTypeField[0]) fixTypeDomain = {'type': 'codedValue', 'name': 'ESRI_FIX_TYPE_DOMAIN', 'codedValues': [{'name': 'Fix not valid', 'code': 0}, {'name': 'GPS', 'code': 1}, {'name': 'Differential GPS', 'code': 2}, {'name': 'RTK Fixed', 'code': 4}, {'name': 'RTK Float', 'code': 5}]} featureLayerFields[fixtypeFieldIndex]['domain'] = fixTypeDomain else: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_FIXTYPE', 'type': 'esriFieldTypeInteger', 'alias': 'Fix Type', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': {'type': 'codedValue', 'name': 'ESRI_FIX_TYPE_DOMAIN', 'codedValues': [{'name': 'Fix not valid', 'code': 0}, {'name': 'GPS', 'code': 1}, {'name': 'Differential GPS', 'code': 2}, {'name': 'RTK Fixed', 'code': 4}, {'name': 'RTK Float', 'code': 5}]}, 'defaultValue': None}) # ESRIGNSS_CORRECTIONAGE correctionAgeField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_CORRECTIONAGE'] if not correctionAgeField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_CORRECTIONAGE', 'type': 'esriFieldTypeDouble', 'alias': 'Correction Age', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # ESRIGNSS_STATIONID stationIdField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_STATIONID'] if stationIdField: # Field does exist check if the domain is set. if stationIdField[0]['domain'] == None: if ([operation for operation in operations if operation == 'updateDefinition']): operations.append('updateDefinition') stationIdFieldIndex = featureLayerFields.index(stationIdField[0]) stationIdDomain = {'type': 'range', 'name': 'ESRI_STATION_ID_DOMAIN', 'range': [0, 1023]} featureLayerFields[stationIdFieldIndex]['domain'] = stationIdDomain else: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_STATIONID', 'type': 'esriFieldTypeInteger', 'alias': 'Station ID', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': {'type': 'range', 'name': 'ESRI_STATION_ID_DOMAIN', 'range': [0, 1023]}, 'defaultValue': None}) # ESRIGNSS_NUMSATS numstatsField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_NUMSATS'] if numstatsField: # Field does exist check if the domain is set. if numstatsField[0]['domain'] == None: if ([operation for operation in operations if operation == 'updateDefinition']): operations.append('updateDefinition') numSatellitesFieldIndex = featureLayerFields.index(numstatsField[0]) numSatellitesDomain = {'type': 'range', 'name': 'ESRI_NUM_SATS_DOMAIN', 'range': [0, 99]} featureLayerFields[numSatellitesFieldIndex]['domain'] = numSatellitesDomain else: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_NUMSATS', 'type': 'esriFieldTypeInteger', 'alias': 'Number of Satellites', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': {'type': 'range', 'name': 'ESRI_NUM_SATS_DOMAIN', 'range': [0, 99]}, 'defaultValue': None}) # ESRIGNSS_FIXDATETIME fixTimeField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_FIXDATETIME'] if not fixTimeField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_FIXDATETIME', 'type': 'esriFieldTypeDate', 'alias': 'Fix Time', 'sqlType': 'sqlTypeOther', 'length': 0, 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # ESRIGNSS_AVG_H_RMS averageHorizontalAccuracyField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_AVG_H_RMS'] if not averageHorizontalAccuracyField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_AVG_H_RMS', 'type': 'esriFieldTypeDouble', 'alias': 'Average Horizontal Accuracy (m)', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # ESRIGNSS_AVG_V_RMS averageVerticalAccuracyField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_AVG_V_RMS'] if not averageVerticalAccuracyField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_AVG_V_RMS', 'type': 'esriFieldTypeDouble', 'alias': 'Average Vertical Accuracy (m)', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # ESRIGNSS_AVG_POSITIONS averagePositionsField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_AVG_POSITIONS'] if not averagePositionsField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_AVG_POSITIONS', 'type': 'esriFieldTypeInteger', 'alias': 'Averaged Positions', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # ESRIGNSS_H_STDDEV" standardDeviationField = [field for field in featureLayerFields if field['name'] == 'ESRIGNSS_H_STDDEV'] if not standardDeviationField: gnssMetadataFields['fields'].append({'name': 'ESRIGNSS_H_STDDEV', 'type': 'esriFieldTypeDouble', 'alias': 'Standard Deviation (m)', 'sqlType': 'sqlTypeOther', 'nullable': True, 'editable': True, 'domain': None, 'defaultValue': None}) # Check if AddToDefinition operation needs to be added. initialFeatureLayerFieldsCount = len(featureLayerFields) if (len(gnssMetadataFields[ 'fields']) + initialFeatureLayerFieldsCount) > initialFeatureLayerFieldsCount: operations.append('addToDefinition') else: operations.append('deleteFromDefinition') gnssMetadataFields = { 'fields': [{'name': 'ESRIGNSS_FIXDATETIME'}, {'name': 'ESRIGNSS_RECEIVER'}, {'name': 'ESRIGNSS_H_RMS'}, {'name': 'ESRIGNSS_V_RMS'}, {'name': 'ESRIGNSS_LATITUDE'}, {'name': 'ESRIGNSS_LONGITUDE'}, {'name': 'ESRIGNSS_ALTITUDE'}, {'name': 'ESRIGNSS_PDOP'}, {'name': 'ESRIGNSS_HDOP'}, {'name': 'ESRIGNSS_VDOP'}, {'name': 'ESRIGNSS_CORRECTIONAGE'}, {'name': 'ESRIGNSS_FIXTYPE'}, {'name': 'ESRIGNSS_STATIONID'}, {'name': 'ESRIGNSS_AVG_H_RMS'}, {'name': 'ESRIGNSS_AVG_V_RMS'}, {'name': 'ESRIGNSS_AVG_POSITIONS'}, {'name': 'ESRIGNSS_H_STDDEV'}, {'name': 'ESRIGNSS_NUMSATS'}]} # Add/Delete/Modify service definition. for operation in operations: # Add if operation == 'addToDefinition': response = featureLayerCollection.manager.layers[featureLayerIndex].add_to_definition( gnssMetadataFields) arcpy.AddMessage("Successfully added GNSS fields..") # Delete elif operation == 'deleteFromDefinition': response = featureLayerCollection.manager.layers[featureLayerIndex].delete_from_definition( gnssMetadataFields) # Modify else: response = featureLayerCollection.manager.layers[featureLayerIndex].update_definition( featureLayerFields) arcpy.AddMessage("Successfully updated GNSS fields..") result = response['success'] if not result: print('Failed to update Feature layer service definition..') else: print('Service definition updated successfully..') except Exception as e: arcpy.AddError(e)