def update_arcgis(arcuser, arcpassword, arcfeatureid, filename): filename = "/tmp/%s" % filename gis = arcgis.GIS("http://lahub.maps.arcgis.com", arcuser, arcpassword) gis_item = gis.content.get(arcfeatureid) gis_layer_collection = arcgis.features.FeatureLayerCollection.fromitem( gis_item) gis_layer_collection.manager.overwrite(filename)
def downloadPictures(meterDate, direction, pictureLoc): print('Beginning ' + direction + ' Route Survey123 Image Download.') print('Logging into ArcGIS.') agoLogin = arcgis.GIS(url=None, username=gis_user, password=gis_password) featureID = "" if direction == "North": featureID = "6e4bcc172e844736ad1aa8505537aed4" if direction == "South": featureID = "22af1b6e3ff04c0497fc12b57692b0e8" myFLItem = agoLogin.content.get(featureID) attachmentsLayer = myFLItem.layers[0] oidField = attachmentsLayer.properties.objectIdField myRecords = attachmentsLayer.query(where="CreationDate > '" + meterDate + " 12:00:00 AM' AND CreationDate < '" + meterDate + " 11:59:59 PM'", out_fields=oidField) for r in myRecords.features: myOID = r.get_value(oidField) attachments = attachmentsLayer.attachments.get_list(myOID) for attachment in attachments: myDownload = attachmentsLayer.attachments.download( oid=myOID, attachment_id=attachment['id'], save_path=pictureLoc) print("Downloaded:" + myDownload[0])
def update_code_75(**kwargs): # Connecting to ArcGIS arcconnection = BaseHook.get_connection("arcgis") gis = arcgis.GIS("http://lahub.maps.arcgis.com/", arcconnection.login, arcconnection.password) # Getting Code 75 data gis_item = gis.content.get(kwargs.get("arcfeatureid")) sdf = gis_item.layers[0].query().sdf sdf.drop("SHAPE", axis=1, inplace=True) # Creating table if does not exist engine = PostgresHook.get_hook("postgres_default").get_sqlalchemy_engine() engine.connect().execute('CREATE SCHEMA IF NOT EXISTS "public-health"') create_table_statement = """CREATE TABLE IF NOT EXISTS "public-health"."code75s" ( index BIGINT, "OBJECTID" BIGINT, address TEXT, cd FLOAT(53), closeddate TEXT, createddate TEXT, latitude FLOAT(53), longitude FLOAT(53), reasoncode BIGINT, resolutioncode TEXT, srnumber TEXT, status TEXT )""" engine.connect().execute(create_table_statement) # Deleting old records object_id_list = ",".join(list(sdf["OBJECTID"].astype(str))) engine.connect().execute( 'DELETE FROM "public-health".code75s WHERE "OBJECTID" IN (%s)' % object_id_list) # Sending updates and new records to postgres sdf.to_sql( "code75s", engine, schema="public-health", if_exists="append", )
def grabSurvey123(meterDate, direction, exportLoc): print('Beginning ' + direction + ' Route Survey123 Download.') print('Logging into ArcGIS.') agoLogin = arcgis.GIS(url=None, username=gis_user, password=gis_password) month = meterDate[:2] day = meterDate[3:5] year = meterDate[6:10] featureID = "" if direction == "North": featureID = "6e4bcc172e844736ad1aa8505537aed4" if direction == "South": featureID = "22af1b6e3ff04c0497fc12b57692b0e8" itemToDownload = agoLogin.content.get(featureID) itemExportName = month + day exportParameters = { "layers": [{ "id": 0, "where": "CreationDate > '" + meterDate + " 12:00:00 AM' AND CreationDate < '" + meterDate + " 11:59:59 PM'" }] } print('Generating Excel in ArcGIS Online.') itemToDownload.export(title=itemExportName, export_format="Excel", parameters=exportParameters, wait=True) searchForExportedItem = agoLogin.content.search(query=itemExportName) exportedItemID = searchForExportedItem[0].id getTheExportedItem = agoLogin.content.get(exportedItemID) print('Downloading to: ' + exportLoc) getTheExportedItem.download(save_path=exportLoc, file_name="{}.xlsx".format(itemExportName)) print('Download Complete! Removing ArcGIS Online file.') getTheExportedItem.delete()
def getArcGISConnection(securityinfo): """ Get a connection object for ArcGIS based on configuration options :return: Connection object for the ArcGIS service :rtype: arcresthelper.orgtools.orgtools :raises: RuntimeError if ArcGIS connection is not valid :raises: arcresthelper.common.ArcRestHelperError for ArcREST-related errors """ if not isinstance(securityinfo, dict): raise TypeError('Argument securityinfo type should be dict') try: arcGIS = arcgis.GIS(securityinfo['org_url'], securityinfo['username'], securityinfo['password']) except RuntimeError as exp: logger.error("RuntimeError: getArcGISConnection: {}".format(exp)) raise RuntimeError(str('ArcGIS connection invalid: {}'.format(exp))) return arcGIS
def create_route_for_courier(obj): if not obj.order_holder.address: restaurant_location = geolocator.geocode( obj.food.restaurant.address) customer_location = geolocator.geocode( obj.order_holder.customer.address) elif obj.order_holder.address: restaurant_location = geolocator.geocode( obj.food.restaurant.address) customer_location = geolocator.geocode(obj.order_holder.address) stops = f'{restaurant_location.latitude},{restaurant_location.longitude};' \ f'{customer_location.latitude},{customer_location.longitude}' api_key = os.environ.get('ARCGIS_KEY') portal = arcgis.GIS("https://www.arcgis.com", api_key=api_key) route = arcgis.network.RouteLayer( portal.properties.helperServices.route.url, gis=portal) result = route.solve(stops=stops, start_time="now", return_directions=True, directions_language="en") return result
####### # # Esri GIS Tech 2018 # Python Clone_items Example # # Author: AVZ # Esri Nederland (c) 2018 # import arcgis from arcgis import * import config gis = arcgis.GIS("http://arcgis.com", config.username, config.password) print("Connected to arcgis.com") # Create contentManager object for manipulating ArcGIS Content contentManager = arcgis.gis.ContentManager(gis) print("contentManager created") # Look for ArcGIS items that will be cloned itemsToClone = contentManager.search("<Search Parameter>") # Search parameter print("itemsToClone") print("=================") print(itemsToClone) # Print which items were cloned print("Cloned items") print("=================") print(contentManager.clone_items(itemsToClone, folder="<Your folderName>")) print("Python Scripted completed successfully")
def mainFunction(): # Add parameters sent to the script here e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # printMessage("Creating extent feature class for queries - " + os.path.join(arcpy.env.scratchGDB,"Extents") + "...","info") arcpy.CreateFishnet_management(os.path.join(arcpy.env.scratchGDB,"Extents"), extent.split(',')[0] + " " + extent.split(',')[1], extent.split(',')[0] + " " + str(float(extent.split(',')[1])+5), "", "", "5", "5", extent.split(',')[2] + " " + extent.split(',')[3], "NO_LABELS", "", "POLYGON") propertyIDsAdded = [] # Check if the feature class exists if arcpy.Exists(outputFeatureClass): printMessage("Feature class already exists - " + outputFeatureClass + "...","info") # Search the existing feature class with arcpy.da.SearchCursor(outputFeatureClass, "ID") as rows: for row in rows: # Add to property IDs added array propertyIDsAdded.append(row[0]) else: printMessage("Creating output feature class - " + outputFeatureClass + "...","info") arcpy.CreateFeatureclass_management(os.path.dirname(outputFeatureClass), os.path.basename(outputFeatureClass), "Point", "", "DISABLED", "DISABLED", "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision", "", "0", "0", "0") arcpy.AddField_management(outputFeatureClass, "ID", "TEXT", "", "", "255", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(outputFeatureClass, "Address", "TEXT", "", "", "255", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(outputFeatureClass, "Bedrooms", "SHORT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(outputFeatureClass, "Bathrooms", "SHORT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(outputFeatureClass, "SaleDate", "DATE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(outputFeatureClass, "SalePrice", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(outputFeatureClass, "DecadeBuilt", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(outputFeatureClass, "FloorArea", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(outputFeatureClass, "LandArea", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(outputFeatureClass, "CapitalValue", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(outputFeatureClass, "EstimatedValue", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(outputFeatureClass, "EstimatedRent", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") with arcpy.da.SearchCursor(os.path.join(arcpy.env.scratchGDB,"Extents"), "SHAPE@") as rows: for row in rows: # Setup the parameters for the web request parameters = urllib.parse.urlencode({ 'limit': '5000', 'just_sold': 'true', 'for_sale': 'false', 'sale_min':'0', 'sale_max':'0', 'num_bathrooms':'0', 'num_bedrooms':'0', 'display_rentals': 'false', 'for_rent': 'false', 'rent_bathrooms':'0', 'rent_bedrooms':'0', 'rent_max':'0', 'rent_min':'0', 'off_market':'false', 'off_market_bathrooms':'0', 'off_market_bedrooms':'0', 'off_market_max':'0', 'off_market_min':'0', 'use_expanded_bounding_box':'true', 'nw_lat':str(row[0].extent.YMax), # YMax 'nw_long':str(row[0].extent.XMin), # XMin 'se_lat':str(row[0].extent.YMin), # YMin 'se_long':str(row[0].extent.XMax) # XMax }) queryString = parameters.encode('utf-8') # Query web service printMessage("Querying web service - " + propertyMapWebService + " at extent - " + str(row[0].extent.XMin) + "," + str(row[0].extent.YMin) + "," + str(row[0].extent.XMax) + "," + str(row[0].extent.YMax) + "...","info") response = json.loads(urllib.request.urlopen(propertyMapWebService + "?" + parameters).read()) # Download data printMessage("Downloading data - " + os.path.join(tempfile.gettempdir(), "Download.json") + "...","info") with open(os.path.join(tempfile.gettempdir(), "Download.json"), 'w') as downloadFile: downloadFile.write(json.dumps(response)) # Open data file printMessage("Reading downloaded data...","info") propertyMapWebServicePropertyIDsAdded = [] with open(os.path.join(tempfile.gettempdir(), "Download.json")) as jsonFile: jsonData = json.load(jsonFile) printMessage("Processing " + str(len(jsonData["map_items"])) + " records (Duplicate property IDs will not be added)...","info") cursor = arcpy.da.InsertCursor(outputFeatureClass, ["ID","Address","Bedrooms","Bathrooms","SaleDate","SalePrice","DecadeBuilt","FloorArea","LandArea","CapitalValue","EstimatedValue","EstimatedRent","SHAPE@XY"]) # For each property returned for record in jsonData["map_items"]: # Get location geometry point = (record["point"]["long"],record["point"]["lat"]) # If the property has not been added yet if (record["id"] not in propertyIDsAdded): # Query the property card address,bedrooms,bathrooms,saleDate,salePrice = queryPropertyCard(propertyCardWebService,record["url"]) # Query the property details decadeBuilt,floorArea,landArea,capitalValue,estimatedValue,estimatedRent = queryPropertyDetails(propertyDetailsWebService,record["id"]) # Insert record into feature class cursor.insertRow([record["id"],address,bedrooms,bathrooms,saleDate,salePrice,decadeBuilt,floorArea,landArea,capitalValue,estimatedValue,estimatedRent,point]) # Add to property IDs added array propertyIDsAdded.append(record["id"]) propertyMapWebServicePropertyIDsAdded.append(record["id"]) printMessage("Added " + str(len(propertyMapWebServicePropertyIDsAdded)) + " records...","info") printMessage("Adding and calculating extra fields - " + outputFeatureClass + "...","info") arcpy.AddField_management(outputFeatureClass, "EstimateGrossYield", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(outputFeatureClass, "SalePercentAboveCV", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(outputFeatureClass, "EstimateGrossYield", "((!EstimatedRent!*52)/ !EstimatedValue!)*100", "PYTHON_9.3", "") arcpy.CalculateField_management(outputFeatureClass, "SalePercentAboveCV", "((!SalePrice!-!CapitalValue!)/ !CapitalValue!)*100", "PYTHON_9.3", "") resultCount = arcpy.GetCount_management(outputFeatureClass) printMessage("Number of records in " + outputFeatureClass + " - " + str(resultCount[0]) + "...","info") # Setup a temporary folder printMessage("Creating temporary folder...","info") tempFolder = arcpy.CreateFolder_management(arcpy.env.scratchFolder, "WebData-" + str(uuid.uuid1())) # Copy FGDB to this folder arcpy.Copy_management(arcpy.Describe(outputFeatureClass).path, os.path.join(str(tempFolder),arcpy.Describe(outputFeatureClass).path.split(".")[0].split("\\")[-1] + ".gdb"), "Workspace") # Zip up FGDB printMessage("Zipping up data - " + os.path.join(arcpy.env.scratchFolder,arcpy.Describe(outputFeatureClass).path.split(".")[0].split("\\")[-1] + ".zip") + "...","info") zippedFolder = zipfile.ZipFile(os.path.join(arcpy.env.scratchFolder,arcpy.Describe(outputFeatureClass).path.split(".")[0].split("\\")[-1] + ".zip"), "w", allowZip64=True) # Zip up the geodatabase root_len = len(os.path.abspath(str(tempFolder))) # For each of the directories in the folder for root, dirs, files in os.walk(str(tempFolder)): archive_root = os.path.abspath(root)[root_len:] # For each file for f in files: fullpath = os.path.join(root, f) archive_name = os.path.join(archive_root, f) zippedFolder.write(fullpath, archive_name) # Close zip file zippedFolder.close() printMessage("Connecting to GIS Portal - " + portalURL + "...","info") gisPortal = arcgis.GIS(url=portalURL, username=portalUser, password=portalPassword, verify_cert=False) # If portal ID provided if (portalItemFGDBID): # Get the portal item item = gisPortal.content.get(portalItemFGDBID) # Update the FGDB in portal printMessage("Uploading file geodatabase to portal - " + portalItemFGDBID + "...", "info") item.update({"title":portalItemTitle, "snippet":portalItemSummary, "description":portalItemDescription, "accessInformation":portalItemCredits, "tags":portalItemTitle}, os.path.join(arcpy.env.scratchFolder,arcpy.Describe(outputFeatureClass).path.split(".")[0].split("\\")[-1] + ".zip")) printMessage("Publishing feature service...", "info") serviceItem = item.publish() printMessage("Feature service has been published - " + serviceItem.id + "...","info") else: # Upload the FGDB to portal printMessage("Uploading file geodatabase to portal...", "info") item = gisPortal.content.add({"title":portalItemTitle, "snippet":portalItemSummary, "description":portalItemDescription, "accessInformation":portalItemCredits, "tags":portalItemTitle, "type":"File Geodatabase"}, os.path.join(arcpy.env.scratchFolder,arcpy.Describe(outputFeatureClass).path.split(".")[0].split("\\")[-1] + ".zip")) printMessage("File geodatabase uploaded - " + item.id + "...", "info") printMessage("Publishing feature service...", "info") serviceItem = item.publish() printMessage("Feature service has been published - " + serviceItem.id + "...","info") # --------------------------------------- End of code --------------------------------------- # # If called from ArcGIS GP tool if __name__ == '__main__': # Return the output if there is any if output: # If using ArcPy if (useArcPy == "true"): arcpy.SetParameter(1, output) # ArcGIS desktop not installed else: return output # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] # If error except Exception as e: # Build and show the error message # errorMessage = arcpy.GetMessages(2) errorMessage = "" # Build and show the error message # If many arguments if (e.args): for i in range(len(e.args)): if (i == 0): errorMessage = str(e.args[i]).encode('utf-8').decode('utf-8') else: errorMessage = errorMessage + " " + str(e.args[i]).encode('utf-8').decode('utf-8') # Else just one argument else: errorMessage = e printMessage(errorMessage,"error") # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage,None)
import arcgis from datetime import date from dotenv import find_dotenv, load_dotenv load_dotenv(find_dotenv()) gis_user = "******" gis_password = "******" agoLogin = arcgis.GIS(url=None, username=gis_user, password=gis_password) meterDate = input("Enter the date of the Meter Run (XX/XX/XXXX): ") month = meterDate[:2] day = meterDate[3:5] year = meterDate[6:10] if int(month) not in list(range(1, 13)): raise Exception("Invalid Month!") if int(day) not in list(range(1, 32)): raise Exception("Invalid Day!") try: int(year) except: raise Exception("Invalid Year!") itemToDownload = agoLogin.content.get("22af1b6e3ff04c0497fc12b57692b0e8") exportLoc = r"path to export location" itemExportName = "South Meter Route " + month + "_" + day + "_" + year exportParameters = { "layers": [{
def mainFunction( portalURL, portalUser, portalPassword, currentRole, currentLevel, newRole, newLevel ): # Add parameters sent to the script here e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Connect to GIS portal printMessage("Connecting to GIS Portal - " + portalURL + "...", "info") gisPortal = arcgis.GIS(url=portalURL, username=portalUser, password=portalPassword, verify_cert=False) # Get the role IDs currentRoleID = getRoleId(gisPortal, currentRole) newRoleID = getRoleId(gisPortal, newRole) # If the current role exists if currentRoleID: printMessage( "Retrieving all users with the role " + currentRole + "...", "info") # Get all users with this role users = [ user for user in gisPortal.users.search(max_users=10000) if user.roleId == currentRoleID ] printMessage("There are " + str(len(users)) + " users...", "info") if newRoleID: # For each user for user in users: # If the level is changing the users level does not match if ((currentLevel != newLevel) and (user.level != newLevel)): printMessage( "Changing " + user.username + " from level " + currentLevel + " to " + newLevel, "info") user.update_level(newLevel) printMessage( "Changing " + user.username + " from " + currentRole + " to " + newRole, "info") user.update_role(newRoleID) else: printMessage( "The role " + newRole + " does not exist in this site...", "error") else: printMessage( "The role " + currentRole + " does not exist in this site...", "error") # --------------------------------------- End of code --------------------------------------- # # If called from ArcGIS GP tool if __name__ == '__main__': # Return the output if there is any if output: # If using ArcPy if (useArcPy == "true"): arcpy.SetParameter(1, output) # ArcGIS desktop not installed else: return output # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] # If error except Exception as e: errorMessage = "" # Build and show the error message # If many arguments if (e.args): for i in range(len(e.args)): if (i == 0): errorMessage = str( e.args[i]).encode('utf-8').decode('utf-8') else: errorMessage = errorMessage + " " + str( e.args[i]).encode('utf-8').decode('utf-8') # Else just one argument else: errorMessage = e printMessage(errorMessage, "error") # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage)
def mainFunction(siteURL,adminUser,adminPassword,servicesFolder,serviceName,serviceAction): # Add parameters sent to the script here e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Connect to GIS site printMessage("Connecting to GIS site - " + siteURL + "...","info") gis = arcgis.GIS(url=siteURL, username=adminUser, password=adminPassword, verify_cert=False) # Get a list of servers in the site gisServers = gis.admin.servers.list() # If a service name is set if (serviceName): # Set GIS service found to false gisServiceFound = False else: # Set GIS service found to true gisServiceFound = True # For each server for gisServer in gisServers: # If a folder is set if (servicesFolder): # If the folder does not exist if servicesFolder not in gisServer.services.folders: printMessage("Services folder does not exist: " + servicesFolder, "error") # For each service in the root directory for gisService in gisServer.services.list(): # If no services folder set (Start/stop services in the root directory) if (servicesFolder is None) or (servicesFolder == ""): # If no service name set (Start/stop all services) if (serviceName is None) or (serviceName == ""): # Function - Start or stop the service startStopService(gisService, serviceAction) # Service name set (Just start/stop that service) else: # If the service name and type is equal to the one specified if (serviceName.lower() == gisService.properties.serviceName.lower() + "." + gisService.properties.type.lower()): # Function - Start or stop the service startStopService(gisService, serviceAction) # Set GIS service found to true gisServiceFound = True # For each folder for gisFolder in gisServer.services.folders: # If the services folder is equal to the one specified if (servicesFolder.lower() == gisFolder.lower()): # For each service in the folder for gisService in gisServer.services.list(folder=gisFolder): # If no service name set (Start/stop all services) if (serviceName is None) or (serviceName == ""): # Function - Start or stop the service startStopService(gisService, serviceAction) # Service name set (Just start/stop that service) else: # If the service name and type is equal to the one specified if (serviceName.lower() == gisService.properties.serviceName.lower() + "." + gisService.properties.type.lower()): # Function - Start or stop the service startStopService(gisService, serviceAction) # Set GIS service found to true gisServiceFound = True # If a service name was set but not found if (gisServiceFound == False): printMessage("Service does not exist: " + serviceName, "error") # --------------------------------------- End of code --------------------------------------- # # If called from ArcGIS GP tool if __name__ == '__main__': # Return the output if there is any if output: # If using ArcPy if (useArcPy == "true"): arcpy.SetParameter(1, output) # ArcGIS desktop not installed else: return output # Otherwise return the result else: # Return the output if there is any if output: return output printMessage("Process ended...","info") if (enableLogTable == "true"): # Log end message to table currentDate = datetime.datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S") logToTable({"Date": currentDate,"Process": os.path.basename(__file__).replace(".py",""),"Status": "Success","Organisation": None,"DataName": None,"Message": "Process ended...","RecordCount":None}) # If error except Exception as e: errorMessage = "" # Build and show the error message # If many arguments if (e.args): for i in range(len(e.args)): if (i == 0): errorMessage = str(e.args[i]).encode('utf-8').decode('utf-8') else: errorMessage = errorMessage + " " + str(e.args[i]).encode('utf-8').decode('utf-8') # Else just one argument else: errorMessage = e printMessage(errorMessage,"error") printMessage("Process ended...","info") if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage,None) if (enableLogTable == "true"): # Log end message to table currentDate = datetime.datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S") logToTable({"Date": currentDate,"Process": os.path.basename(__file__).replace(".py",""),"Status": "Fail","Organisation": None,"DataName": None,"Message": errorMessage,"RecordCount":None})
def mainFunction( sourcePortalURL, sourcePortalUser, sourcePortalPassword, targetPortalURL, targetPortalUser, targetPortalPassword ): # Add parameters sent to the script here e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Connect to GIS portals printMessage( "Connecting to Source GIS Portal - " + sourcePortalURL + "...", "info") sourceGISPortal = arcgis.GIS(url=sourcePortalURL, username=sourcePortalUser, password=sourcePortalPassword) printMessage( "Connecting to Target GIS Portal - " + targetPortalURL + "...", "info") targetGISPortal = arcgis.GIS(url=targetPortalURL, username=targetPortalUser, password=targetPortalPassword) # Create a list of system accounts that should not be modified systemUsers = [ 'system_publisher', 'esri_nav', 'esri_livingatlas', 'esri_boundaries', 'esri_demographics' ] # Get a list of all users sourceUsers = sourceGISPortal.users.search(query=None, sort_field='username', sort_order='asc', max_users=1000000, outside_org=False) targetUsers = targetGISPortal.users.search(query=None, sort_field='username', sort_order='asc', max_users=1000000, outside_org=False) # Create a list of groups to not copy groupsIgnore = [] # Get a list of all groups sourceGroups = sourceGISPortal.groups.search(query='', sort_field='title', sort_order='asc', max_groups=1000000, outside_org=False, categories=None) targetGroups = targetGISPortal.groups.search(query='', sort_field='title', sort_order='asc', max_groups=1000000, outside_org=False, categories=None) # Check if groups are already present in target portal for sourceGroup in sourceGroups: for targetGroup in targetGroups: if sourceGroup.title == targetGroup.title: printMessage( "Group already exists in target portal - " + targetGroup.title + "...", "warning") groupsIgnore.append(targetGroup.title) # Copy all the groups from source to target for group in sourceGroups: # If not ignoring the group if group.title not in groupsIgnore: if not group.owner in systemUsers: printMessage("Copying group - " + group.title + "...", "info") newGroup = copyGroup(sourceGISPortal, targetGISPortal, targetPortalUser, targetUsers, group) printMessage( "New group created - " + newGroup.groupid + "...", "info") # Get a list of all items in the portal sourceItems = {} for user in sourceUsers: num_items = 0 num_folders = 0 user_content = user.items() # Get item ids from root folder first for item in user_content: num_items += 1 sourceItems[item.itemid] = item # Get item ids from each of the folders next folders = user.folders for folder in folders: num_folders += 1 folder_items = user.items(folder=folder['title']) for item in folder_items: num_items += 1 sourceItems[item.itemid] = item # Get the group sharing information for each of the items for group in sourceGroups: # Iterate through each item shared to the source group for group_item in group.content(): # Get the item item = sourceItems[group_item.itemid] if item is not None: if not 'groups' in item: item['groups'] = [] # Assign the target portal's corresponding group's name item['groups'].append(group['title']) # Copy all content from source to target sourceTargetItems = {} for key in sourceItems.keys(): sourceItem = sourceItems[key] printMessage( "Copying {} \tfor\t {}".format(sourceItem.title, sourceItem.owner), "info") # Copy the item targetItem = copyItem(targetGISPortal, sourceItem) if targetItem: sourceTargetItems[key] = targetItem.itemid else: sourceTargetItems[key] = None # --------------------------------------- End of code --------------------------------------- # # If called from ArcGIS GP tool if __name__ == '__main__': # Return the output if there is any if output: # If using ArcPy if (useArcPy == "true"): arcpy.SetParameter(1, output) # ArcGIS desktop not installed else: return output # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] # If error except Exception as e: # Build and show the error message # errorMessage = arcpy.GetMessages(2) errorMessage = "" # Build and show the error message # If many arguments if (e.args): for i in range(len(e.args)): if (i == 0): errorMessage = str( e.args[i]).encode('utf-8').decode('utf-8') else: errorMessage = errorMessage + " " + str( e.args[i]).encode('utf-8').decode('utf-8') # Else just one argument else: errorMessage = e printMessage(errorMessage, "error") # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage)
def mainFunction( portalURL, portalUser, portalPassword, inactiveUsersCSV, inactiveUsersID, userPermissionsCSV, userPermissionsID, userContentCSV, userContentID ): # Add parameters sent to the script here e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Connect to GIS portal printMessage("Connecting to GIS Portal - " + portalURL + "...", "info") gisPortal = arcgis.GIS(url=portalURL, username=portalUser, password=portalPassword, verify_cert=False) # Get a list of all users users = gisPortal.users.search(query=None, sort_field='username', sort_order='asc', max_users=1000000, outside_org=False) # Get a list of all groups groups = gisPortal.groups.search(query='', sort_field='title', sort_order='asc', max_groups=1000000, outside_org=False, categories=None) # Create inactive users report inactiveUsersReport(gisPortal, inactiveUsersCSV, users) # If portal ID provided if (inactiveUsersID): printMessage( "Updating report in portal - " + inactiveUsersID + "...", "info") # Get the portal item inactiveUsersItem = gisPortal.content.get(inactiveUsersID) # Update the CSV in portal inactiveUsersItem.update(None, inactiveUsersCSV) # Create user permissions report userPermissionsReport(gisPortal, userPermissionsCSV, users, groups) # If portal ID provided if (userPermissionsID): printMessage( "Updating report in portal - " + userPermissionsID + "...", "info") # Get the portal item userPermissionsItem = gisPortal.content.get(userPermissionsID) # Update the CSV in portal userPermissionsItem.update(None, userPermissionsCSV) # Create user content report userContentReport(gisPortal, userContentCSV, users) # If portal ID provided if (userContentID): printMessage( "Updating report in portal - " + userContentID + "...", "info") # Get the portal item userContentItem = gisPortal.content.get(userContentID) # Update the CSV in portal userContentItem.update(None, userContentCSV) # --------------------------------------- End of code --------------------------------------- # # If called from ArcGIS GP tool if __name__ == '__main__': # Return the output if there is any if output: # If using ArcPy if (useArcPy == "true"): arcpy.SetParameter(1, output) # ArcGIS desktop not installed else: return output # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] # If error except Exception as e: errorMessage = "" # Build and show the error message # If many arguments if (e.args): for i in range(len(e.args)): if (i == 0): errorMessage = str( e.args[i]).encode('utf-8').decode('utf-8') else: errorMessage = errorMessage + " " + str( e.args[i]).encode('utf-8').decode('utf-8') # Else just one argument else: errorMessage = e printMessage(errorMessage, "error") # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage)
def main(): # Script version number print(f"\nRunning version: {version.version_str}") print( "\n=================================================================") print(f"Loading ini file") print("=================================================================") # The root directory of the script root_dir = os.path.dirname(os.path.abspath(__file__)) print(f"Project Root Directory: {root_dir}\n") # Load config ini file print("Loading input items from configuration file\n") # Routines for handling the loading and parsing of the config ini file # # A configuration file consists of sections, lead by a "[section]" header, # and followed by "name: value" entries, with continuations and such in # the style of RFC 822. config_ini_manager = ConfigManager(root=root_dir, file_name="config.ini") # Items we will analyze input_items = config_ini_manager.get_config_data(config_type="items") print( "\n=================================================================") print(f"Authenticate GIS profile") print("=================================================================") # TODO: Not the best way at all to get the profile property from the config file gis_profile = input_items[0]["profile"] # initialize GIS object gis = arcgis.GIS(profile=gis_profile) #gis = arcgis.GIS() # just check if there is a token to determine if it's a named user or anonymous if gis._con.token is None: print("Anonymous sign-in") else: # Eye candy # Get the installation properties and print to stdout # initialize User object user = gis.users.get(gis_profile) install_info = arcpy.GetInstallInfo() user_sys = User(user=user, install_info=install_info) user_sys.greeting() print( "\n=================================================================") print(f"Hydrating input data model from config file parameters") print("=================================================================") # Data model data_model_dict = {} # Number of items we are working with (derived from the config ini) item_count = len(input_items) if item_count < 1: raise ItemCountNotInRangeError(item_count) else: print(f"There are {item_count} items") for input_item in input_items: print(f"{input_item['id']}") data_model_dict.update({ input_item["id"]: { **input_item, **{ "token": gis._con.token } } }) print( "\n=================================================================") print(f"Setting up project and checking folders and directories") print("=================================================================") # Load status codes # TODO Move filename to config status_code_config_path = os.path.realpath(root_dir + r"\statusCodes.json") status_code_json_exist = FileManager.check_file_exist_by_pathlib( path=status_code_config_path) if status_code_json_exist is False: # TODO: At this point we really cannot move forward raise InputFileNotFoundError(status_code_config_path) else: status_codes_data_model = FileManager.open_file( path=status_code_config_path) # Load admin comments print( "\n=================================================================") print(f"Checking/Creating comments folder") print("=================================================================") # TODO Move filename to config admin_comments_file_path = os.path.realpath(root_dir + r"\comments.json") admin_comments_file_exist = FileManager.check_file_exist_by_pathlib( path=admin_comments_file_path) if admin_comments_file_exist is False: # TODO: At this point we really cannot move forward raise InputFileNotFoundError(admin_comments_file_path) else: admin_comments_data_model = FileManager.open_file( path=admin_comments_file_path) # retrieve the alf statuses print( "\n=================================================================") print("Retrieving and Processing Active Live Feed Processed files") print("=================================================================") alf_processor_queries = list( map(QueryEngine.prepare_alfp_query_params, data_model_dict.items())) alf_processor_response = QueryEngine.get_alfp_content( alf_processor_queries) alfp_content = list( map(QueryEngine.process_alfp_response, alf_processor_response)) alfp_dict = {} for content in alfp_content: # check if there is alfp content was successfully retrieved if content["success"]: unique_item_key = content["id"] alfp_dict.update({unique_item_key: content["content"]}) else: print(f"ERROR: No ALFP data on record for {content['id']}") # Read in the previous status output file print( "\n=================================================================") print("Loading status output from previous run") print("=================================================================") # TODO Move folder name to config # Directory where the output files are stored output_status_dir_path = os.path.realpath(root_dir + r"\output") # Create a new directory if it does not exists FileManager.create_new_folder(output_status_dir_path) # TODO Move filename to config # Build the path to status file. status_file = os.path.realpath(output_status_dir_path + r"\status.json") # Check file existence file_exist = FileManager.check_file_exist_by_pathlib(path=status_file) if file_exist: # The status' of all the items in the previous run previous_status_output = FileManager.open_file( path=status_file)["items"] # iterate through the items in the config file for key, value in data_model_dict.items(): print(f"{key}") # iterate through the item output in the status file for ele in previous_status_output: # item ID status_file_key = ele["id"] # if the item in the config file is also in the previous run, # merge the output from the previous run to the data model if key == status_file_key: merged_dict = {**ele, **value} data_model_dict.update({key: merged_dict}) else: # TODO What if the file does not exist? print(f"") # Historical "elapsed times" file directory # response time directory print( "\n=================================================================") print(f"Checking/Creating response time data folder") print("=================================================================") # TODO Move folder name to config response_time_data_dir = os.path.realpath(root_dir + r"\ResponseTimeData") # Create a new directory if it does not exists FileManager.create_new_folder(file_path=response_time_data_dir) # Create a new directory to hold the rss feeds (if it does not exist) print( "\n=================================================================") print( f"Checking/Creating RSS output folder and loading RSS template files") print("=================================================================") # TODO Move folder name to config rss_dir_path = os.path.realpath(root_dir + r"\rss") FileManager.create_new_folder(file_path=rss_dir_path) # Load RSS template # TODO Move filename to config rss_manager = RSSManager.RSS(root_dir + r"\rss_template.xml", root_dir + r"\rss_item_template.xml") # Event history print( "\n=================================================================") print(f"Checking/Creating status event history folder") print("=================================================================") event_history_dir_path = os.path.realpath(root_dir + r"\event_history") FileManager.create_new_folder(file_path=event_history_dir_path) print( "\n=================================================================") print(f"Current data and time") print("=================================================================") time_utils_response = TimeUtils.get_current_time_and_date() timestamp = time_utils_response["timestamp"] print(f"{time_utils_response['datetimeObj']}") print( "\n=================================================================") print(f"Validating item's unique key and meta-data") print("=================================================================") data_model_dict = ServiceValidator.validate_items( gis=gis, data_model=data_model_dict) print( "\n=================================================================") print(f"Validating services") print("=================================================================") data_model_dict = ServiceValidator.validate_services( data_model=data_model_dict) print( "\n=================================================================") print(f"Validating layers") print("=================================================================") data_model_dict = ServiceValidator.validate_layers( data_model=data_model_dict) print( "\n=================================================================") print(f"Retrieve usage statistics") print("=================================================================") data_model_dict = QueryEngine.get_usage_details(data_model=data_model_dict) print( "\n=================================================================") print(f"Retrieve feature counts") print("=================================================================") data_model_dict = QueryEngine.get_feature_counts( data_model=data_model_dict) print( "\n=================================================================") print(f"Analyze and process data") print("=================================================================") for key, value in data_model_dict.items(): item_id = key agol_is_valid = True item_is_valid = value["itemIsValid"] service_response = value["serviceResponse"] service_is_valid = service_response["success"] layers_are_valid = value["allLayersAreValid"] print(f"{item_id}\t{value['title']}") print(f"ArcGIS Online accessible: {agol_is_valid}") print(f"Item valid: {item_is_valid}") print(f"Service valid: {service_is_valid}") print(f"All layers valid: {layers_are_valid}\n") print("-------- RETRY COUNT ---------") # Process Retry Count service_retry_count = QueryEngine.get_retry_count( service_response["retryCount"]) print(f"Service Retry Count: {service_retry_count}") print("------------------------------\n") print("-------- ELAPSED TIME --------") # Retrieve the elapsed time of the query to the service (not the layers) service_elapsed_time = QueryEngine.get_service_elapsed_time( service_is_valid, service_response["response"]) print(f"Service Elapsed Time: {service_elapsed_time}") # Retrieve the average elapsed time of layers for the current service (layers only) print(f"Layers Elapsed times (individual)") layers_elapsed_time = QueryEngine.get_layers_average_elapsed_time( layers_elapsed_times=value['serviceLayersElapsedTimes']) print(f"Layers Elapsed Time (average): {layers_elapsed_time}") # Sum up the elapsed time for the service and the layers divided by 2 # We want the total elapsed time of the layers and the FS total_elapsed_time = (service_elapsed_time + layers_elapsed_time) / 2 print(f"Total Elapsed Time average: {total_elapsed_time}") print("------------------------------\n\n") # Obtain the total elapsed time and counts # path to output file # This file contains the: # item id # elapsed time # elapsed sums response_time_data_file_path = os.path.join(response_time_data_dir, item_id + "." + "json") # Check file existence. response_time_data_file_path_exist = FileManager.check_file_exist_by_pathlib( path=response_time_data_file_path) exclude_save = TimeUtils.is_now_excluded( value["exclude_time_ranges"], value["exclude_days"], value["exclude_specific_dates"], timestamp) print(f"Exclude response time data from save: {exclude_save}") # Does the file exist if not response_time_data_file_path_exist: # If file does not exist then create it. FileManager.create_new_file(response_time_data_file_path) FileManager.set_file_permission(response_time_data_file_path) if not exclude_save: FileManager.save(data={ "id": item_id, "elapsed_sums": total_elapsed_time, "elapsed_count": 1 }, path=response_time_data_file_path) # since it's our first entry, the average is the current elapsed time elapsed_times_average = total_elapsed_time else: # Retrieve the elapsed time DIVIDE by count print( f"Retrieving response time data from existing json file: {item_id}.json" ) response_time_data = FileManager.get_response_time_data( response_time_data_file_path) # total counts elapsed_times_count = response_time_data["elapsed_count"] print( f"Elapsed count (on file before update): {elapsed_times_count}" ) # sum of all times elapsed_times_sum = response_time_data["elapsed_sums"] print(f"Elapsed sums (on file before update): {elapsed_times_sum}") # calculated average elapsed_times_average = elapsed_times_sum / elapsed_times_count if not exclude_save: # update the response time data file FileManager.update_response_time_data( path=response_time_data_file_path, input_data={ "id": item_id, "elapsed_count": elapsed_times_count + 1, "elapsed_sums": elapsed_times_sum + total_elapsed_time }) print(f"Elapsed average: {elapsed_times_average}") # retrieve alfp details alfp_data = alfp_dict.get(item_id) if alfp_data is not None: # 10 digit Timestamp 'seconds since epoch' containing time of last # Successful Run (and Service update) when data was changed value.update({ "lastUpdateTimestamp": alfp_data.get("lastUpdateTimestamp", 0) }) # 10 digit Timestamp 'seconds since epoch' containing time of last # Failed run (or Service update failure) # feed_last_failure_timestamp = item["lastFailureTimestamp"] # 10 digit Timestamp 'seconds since epoch' containing time of last # run (having a Success, a Failure, or a No Action flag ('No Data # Updates') value.update( {"lastRunTimestamp": alfp_data.get("lastRunTimestamp", 0)}) # Average number of minutes between each successful run (or Service # update) value.update({ "avgUpdateIntervalMins": alfp_data.get("avgUpdateIntervalMins", 0) }) # Average number of minutes between each run value.update({ "avgFeedIntervalMins": alfp_data.get("avgFeedIntervalMins", 0) }) # value.update({ "consecutiveFailures": alfp_data.get("consecutiveFailures", 0) }) # value.update({"alfpLastStatus": alfp_data["lastStatus"]["code"]}) else: value.update({ "lastUpdateTimestamp": 0, "lastRunTimestamp": 0, "avgUpdateIntervalMins": 0, "avgFeedIntervalMins": 0, "consecutiveFailures": 0, "alfpLastStatus": 0 }) # initialize the status code status_code = StatusManager.get_status_code("000", status_codes_data_model) if all( [agol_is_valid, item_is_valid, service_is_valid, layers_are_valid]): print("AGOL, Item, Service checks normal") # 001 Check print("\nCHECKING 001") # Check elapsed time between now and the last updated time of the feed last_update_timestamp_diff = timestamp - value.get( "lastUpdateTimestamp", timestamp) # Check elapsed time between now and the last run time of the feed last_run_timestamp_diff = timestamp - value["lastRunTimestamp"] # If the Difference exceeds the average update interval by an interval of X, flag it last_update_timestamp_diff_minutes = last_update_timestamp_diff / 60 print( f"Last update timestamp delta:\t{last_update_timestamp_diff_minutes} seconds" ) # Average number of minutes between each successful run (or Service update) avg_update_int_threshold = int( value["average_update_interval_factor"] ) * value["avgUpdateIntervalMins"] print( f"Average update interval threshold: {avg_update_int_threshold}" ) if last_update_timestamp_diff_minutes > avg_update_int_threshold: status_code = StatusManager.get_status_code( "001", status_codes_data_model) print("\nCHECKING 002") # 002 Check last_run_timestamp_diff_minutes = last_run_timestamp_diff / 60 print( f"Last run timestamp delta:\t{last_run_timestamp_diff_minutes} seconds" ) # calculate the threshold (Average number of minutes between each run) avg_feed_int_threshold = int(value["average_feed_interval_factor"] ) * value["avgFeedIntervalMins"] print(f"Average Feed Interval threshold: {avg_feed_int_threshold}") if last_run_timestamp_diff_minutes > avg_feed_int_threshold: status_code = StatusManager.get_status_code( "002", status_codes_data_model) print("\nCHECKING 003") # 003 Check if value["alfpLastStatus"] == 2: if value["consecutiveFailures"] > int( value["consecutive_failures_threshold"]): status_code = StatusManager.get_status_code( "003", status_codes_data_model) print("\nCHECKING 004") # 004 Check if value["alfpLastStatus"] == 3: if value["consecutiveFailures"] > int( value["consecutive_failures_threshold"]): status_code = StatusManager.get_status_code( "004", status_codes_data_model) print("\nCHECKING 005") # 005 Check if value["alfpLastStatus"] == 1: if value["consecutiveFailures"] > int( value["consecutive_failures_threshold"]): status_code = StatusManager.get_status_code( "005", status_codes_data_model) print("\nCHECKING 006") # 006 Check if value["alfpLastStatus"] == -1: status_code = StatusManager.get_status_code( "006", status_codes_data_model) print("\nCHECKING 100") # 100 # Check retry count if service_retry_count > int(value["default_retry_count"]): status_code = StatusManager.get_status_code( "100", status_codes_data_model) print("\nCHECKING 101") # 101 # Check elapsed time # avg_elapsed_time_threshold = float(value["average_elapsed_time_factor"]) * float(elapsed_times_average) avg_elapsed_time_threshold = float( value["average_elapsed_time_factor"]) if total_elapsed_time > avg_elapsed_time_threshold: status_code = StatusManager.get_status_code( "101", status_codes_data_model) LoggingUtils.log_status_code_details(item_id, status_code) else: print("\nCHECKING 102, 201, 500, 501") # If we are at this point, then one or more of the Service states has failed # # The any() function returns True if any item in an iterable are true, otherwise it returns False. if any([agol_is_valid, item_is_valid, service_is_valid]): if service_is_valid: print(f"Service | Success") if item_is_valid: print( f"Item | Success | AGOL must be down, then why is the item accessible?" ) else: # 102 status_code = StatusManager.get_status_code( "102", status_codes_data_model) # 201 Check if layers_are_valid is not True: status_code = StatusManager.get_status_code( "201", status_codes_data_model) else: # 500 if item_is_valid: status_code = StatusManager.get_status_code( "500", status_codes_data_model) else: print(f"Item | Fail") # If ALL of the Service states are False, we have reached a critical failure in the system status_code = StatusManager.get_status_code( "501", status_codes_data_model) else: # If ALL of the Service states are False, we have reached a critical failure in the system status_code = StatusManager.get_status_code( "501", status_codes_data_model) LoggingUtils.log_status_code_details(item_id, status_code) # update/add status code in the data model # Add the Admin comments (if any) # Add the last build time # Add the status code # Add the current run time of the script value.update({ "comments": admin_comments_data_model.get(item_id, []), "lastBuildTime": time_utils_response["datetimeObj"].strftime( "%a, %d %b %Y %H:%M:%S +0000"), "status": status_code, "timestamp": timestamp }) # This file will hold a history of event changes current_events_file = os.path.realpath(event_history_dir_path + r"\status_history" + f"_{item_id}.json") # Check file existence status_history_file_exist = FileManager.check_file_exist_by_pathlib( path=current_events_file) # Build the path to RSS output file for the current item. This file is what the RSS reader reads. # There should be one output file for each service/item being monitored. rss_file_path = os.path.join( rss_dir_path, item_id + "." + value["rss_file_extension"]) # Check if the output file already exist rss_file_exist = FileManager.check_file_exist_by_pathlib( path=rss_file_path) if rss_file_exist: print(f"RSS file exist: {rss_file_path}") else: EventsManager.create_history_file(input_data=value, events_file=current_events_file) value.update({ "rss_items": rss_manager.build_item_nodes(input_data=value, events_file=current_events_file) }) # Update the RSS output file rss_manager.update_rss_contents(input_data=value, rss_file=rss_file_path) print(f"RSS file does not exist: {rss_file_path}") # If the file exist, check the status/comments between the item's previous status/code comment, and the # current status/code comment to determine if the existing RSS file should be updated. update_current_feed = StatusManager.update_rss_feed( previous_status_output=previous_status_output, item=value, status_codes_data_model=status_codes_data_model) # Check if we need to apply an update if update_current_feed: print(f"\nUpdate Required") print(f"---------- Events History --------") # This file will hold a history of event changes # current_events_file = os.path.realpath(event_history_dir_path + r"\status_history" + f"_{item_id}.json") # Check file existence # status_history_file_exist = FileManager.check_file_exist_by_pathlib(path=current_events_file) if status_history_file_exist: print(f"Checking events file: {current_events_file}") EventsManager.update_events_file( input_data=value, events_file=current_events_file) else: print(f"Creating events file: {current_events_file}") EventsManager.create_history_file( input_data=value, events_file=current_events_file) print(f"----------------------------------") print(f"\n---------- RSS Updates -----------") # Update the dictionary # rss_items is the placeholder in the main rss_template file value.update({ "rss_items": rss_manager.build_item_nodes(input_data=value, events_file=current_events_file) }) # Update the RSS output file rss_manager.update_rss_contents(input_data=value, rss_file=rss_file_path) print(f"----------------------------------") print( "\n=================================================================") print("Saving results") print(f"Output file path: {status_file}") print("=================================================================") # output file output_file = {"statusPreparedOn": timestamp, "items": []} # hydrate output file for key, value in data_model_dict.items(): output_file["items"].append({ "id": key, "title": value.get("title", value.get("missing_item_title")), "snippet": value.get("snippet", value.get("missing_item_snippet")), "comments": value.get("comments", ""), "lastUpdateTime": value.get("lastUpdateTimestamp", 0), "updateRate": value.get("avgUpdateIntervalMins", 0), "featureCount": value.get("featureCount", 0), "usage": value.get("usage"), "nextRunTimestamp": value.get("lastRunTimestamp", 0) + (value.get("avgFeedIntervalMins", 0) * 60), "status": { "code": value["status"]["code"] } }) # Pretty print dictionary # If file do not exist then create it. # TODO Not correct if not file_exist: FileManager.create_new_file(status_file) FileManager.set_file_permission(status_file) else: # open file print() FileManager.save(data=output_file, path=status_file) print("Script completed...")
import arcgis #from getpass import getpass #import tempfile from iteration_utilities import unique_everseen from iteration_utilities import duplicates print("Starting deletion script") print() # Initialise the AGOL session gis = arcgis.GIS("https://org.maps.arcgis.com", "adminuser", "pass") users = gis.users.search('adminuser') #print(users) user = users[0] print('Authenticated: ' + user.username) print() # Get list of all items in user's folder #print("Adding all user item id's to list") gdbs_to_delete = [] num_items = 0 num_gdb = 0 #user_content = user.items() root_content = user.items(folder=None, max_items=400) #Get item ids from root folder first for item in root_content:
def main(): args = cli_args() layer_name = args.layer_name logger.info(args) service_id = CONFIG["service_id"] layer_id = CONFIG["layers"][layer_name]["id"] segment_id_field = CONFIG["layers"][layer_name]["segment_id_field"] modified_date_field = CONFIG["layers"][layer_name]["modified_date_field"] gis = arcgis.GIS(url=URL, username=USERNAME, password=PASSWORD) service = gis.content.get(service_id) layer = service.layers[layer_id] date_filter = format_filter_date(args.date) logger.info(f"Getting {layer_name} features modified since {date_filter}") where = f"{modified_date_field} >= '{date_filter}' AND {segment_id_field} IS NOT NULL" # noqa: E501 features = layer.query( where=where, out_fields=["OBJECTID", modified_date_field, segment_id_field]) logger.info(f"{len(features)} features to process") if not features: return all_segment_ids = [] for feature in features: # replace stringy segment ids with lists of segment IDs segments_string = feature.attributes.get(segment_id_field) segments_as_ints = parse_segment_strings(segments_string) feature.attributes[segment_id_field] = segments_as_ints # collect all segment ids while we're at it all_segment_ids += segments_as_ints all_segment_ids = list(set(all_segment_ids)) # fetch all segment features for segment IDs we've collected segment_features = get_segment_features(all_segment_ids, gis) todos = [] for feature in features: # join segment feature geometries to our features segment_ids = feature.attributes.get(segment_id_field) feature_geom = build_geometry(segment_ids, segment_features) if not feature_geom: """ It is possible that we won't find a matching street segment feature given a segment ID. this could happen from a user mis-keying a segment ID or because a once-existing segment ID has been removed or modified from CTMs street segment layer. """ continue object_id = feature.attributes["OBJECTID"] todos.append({ "attributes": { "OBJECTID": object_id }, "geometry": feature_geom }) logger.info(f"Updating geometries for {len(todos)} features...") for features_chunk in chunks(todos, CHUNK_SIZE): logger.info(f"Uploading {len(features_chunk)} records...") res = layer.edit_features(updates=features_chunk, rollback_on_failure=False) utils.agol.handle_response(res)
def main(): args = utils.args.cli_args(["app-name", "container", "date"]) logger.info(args) container = args.container config = CONFIG.get(args.app_name).get(container) if not config: raise ValueError( f"No config entry found for app: {args.app_name}, container: {container}" ) location_field_id = config.get("location_field_id") service_id = config["service_id"] layer_id = config["layer_id"] item_type = config["item_type"] client_postgrest = utils.postgrest.Postgrest(PGREST_ENDPOINT, token=PGREST_JWT) metadata_knack = utils.postgrest.get_metadata(client_postgrest, APP_ID) app = knackpy.App(app_id=APP_ID, metadata=metadata_knack) logger.info(f"Downloading records from app {APP_ID}, container {container}.") filter_iso_date_str = format_filter_date(args.date) data = client_postgrest.select( "knack", params={ "select": "record", "app_id": f"eq.{APP_ID}", "container_id": f"eq.{container}", "updated_at": f"gte.{filter_iso_date_str}", }, order_by="record_id", ) logger.info(f"{len(data)} to process.") if not data: return app.data[container] = [r["record"] for r in data] records = app.get(container) fields_names_to_sanitize = [ f.name for f in app.field_defs if f.type in ["short_text", "paragraph_text"] and (f.obj == container or container in f.views) ] gis = arcgis.GIS(url=URL, username=USERNAME, password=PASSWORD) service = gis.content.get(service_id) if item_type == "layer": layer = service.layers[layer_id] elif item_type == "table": layer = service.tables[layer_id] else: raise ValueError(f"Unknown item_type specified: {item_type}") logger.info("Building features...") features = [ utils.agol.build_feature( record, SPATIAL_REFERENCE, location_field_id, fields_names_to_sanitize ) for record in records ] if not args.date: """ Completely replace destination data. arcgis does have layer.manager.truncate() method, but this method is not supported on the parent layer of parent-child relationships. So we truncate the layer by deleteing with a "where 1=1" expression. We use the "future" option to avoid request timeouts on large datasets. """ logger.info("Deleting all features...") res = resilient_layer_request( layer.delete_features, {"where": "1=1", "future": True} ) # returns a "<Future>" response class which does not appear to be documented while res._state != "FINISHED": logger.info(f"Response state: {res._state}. Sleeping for 1 second") time.sleep(1) utils.agol.handle_response(res._result) else: """ Simulate an upsert by deleting features from AGOL if they exist. The arcgis package does have a method that supports upserting: append() https://developers.arcgis.com/python/api-reference/arcgis.features.toc.html#featurelayer # noqa E501 However this method errored out on multiple datasets and i gave up. layer.append( edits=features, upsert=True, upsert_matching_field="id" ) """ logger.info(f"Deleting {len(features)} features...") key = "id" keys = [f'\'{f["attributes"][key]}\'' for f in features] for key_chunk in chunks(keys, 100): key_list_stringified = ",".join(key_chunk) res = resilient_layer_request( layer.delete_features, {"where": f"{key} in ({key_list_stringified})"} ) utils.agol.handle_response(res) logger.info("Uploading features...") for features_chunk in chunks(features, 500): logger.info("Uploading chunk...") res = resilient_layer_request( layer.edit_features, {"adds": features_chunk, "rollback_on_failure": False} ) utils.agol.handle_response(res)
def mainFunction( portalURL, portalUser, portalPassword, itemID, downloadLocation ): # Add parameters sent to the script here e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Connect to GIS portal printMessage("Connecting to GIS Portal - " + portalURL + "...", "info") gisPortal = arcgis.GIS(url=portalURL, username=portalUser, password=portalPassword, verify_cert=False) # If item ID is provided if (itemID): item = gisPortal.content.get(itemID) items = [] items.append(item) # Else get all items else: # Query all items in the portal items = gisPortal.content.search(query="", max_items=10000) # Create a new folder if it does not exist already if not os.path.exists( os.path.join(downloadLocation, "AGSOnlineBackup-" + time.strftime("%Y%m%d"))): os.makedirs( os.path.join(downloadLocation, "AGSOnlineBackup-" + time.strftime("%Y%m%d"))) downloadLocation = os.path.join( downloadLocation, "AGSOnlineBackup-" + time.strftime("%Y%m%d")) # For each item for item in items: # If item is a feature service if (item.type.lower() == "feature service"): # If a hosted service if "Hosted Service" in item.typeKeywords: printMessage( "Exporting data for feature service - " + item.id + " (Title - " + item.title + ")...", "info") fgdbItem = item.export(item.title, "File Geodatabase") printMessage("Downloading data...", "info") result = fgdbItem.download(downloadLocation) fgdbItem.delete() else: # Download the JSON data printMessage( "Downloading data from item - " + item.id + " (Title - " + item.title + ")...", "info") result = item.download(downloadLocation) elif (item.type.lower() == "code attachment"): printMessage( "Not downloading data for code attachment - " + item.id + " (Title - " + item.title + ")...", "warning") result = None else: # Download the JSON data printMessage( "Downloading data from item - " + item.id + " (Title - " + item.title + ")...", "info") result = item.download(downloadLocation) # If data is downloaded if result: # If there is no file extension filePath = result fileExtension = os.path.splitext(result)[1] if not fileExtension: # If the file aready exists if os.path.isfile(result + ".json"): # Delete file os.remove(result + ".json") os.rename(result, result + ".json") filePath = result + ".json" printMessage("Downloaded data to " + filePath, "info") else: printMessage( "There was an error downloading the data for item " + item.id, "error") # --------------------------------------- End of code --------------------------------------- # # If called from ArcGIS GP tool if __name__ == '__main__': # Return the output if there is any if output: # If using ArcPy if (useArcPy == "true"): arcpy.SetParameter(1, output) # ArcGIS desktop not installed else: return output # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] # If error except Exception as e: errorMessage = "" # Build and show the error message # If many arguments if (e.args): for i in range(len(e.args)): if (i == 0): errorMessage = str( e.args[i]).encode('utf-8').decode('utf-8') else: errorMessage = errorMessage + " " + str( e.args[i]).encode('utf-8').decode('utf-8') # Else just one argument else: errorMessage = e printMessage(errorMessage, "error") # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage)
if updateFile and updateExists is False: exit( "\n\a * ERROR * Unable to locate Update File '{}' *".format( updateFile)) # # Verify Login # print( "\nLoading Python API...", end="", flush=True) import arcgis # Import Python API print( "Ready!\n", flush=True) if not usingPro: getPassword = None if hasattr( arcgis.GIS, "_securely_get_password"): # Get password function from GIS object for Python API v1.6.0 getPassword = arcgis.GIS()._securely_get_password elif hasattr( arcgis.gis, "_impl") and hasattr( arcgis.gis._impl, "_profile"): # Get password function from Profile Manager for Python API v1.7.0+? getPassword = arcgis.gis._impl._profile.ProfileManager()._securely_get_password else: print( " - Cannot check password for Python API v{}, unable to securely get password!".format( arcgis.__version__)) if getPassword: # Check Profile Password gis_cfg_file_path = os.path.expanduser("~") + '/.arcgisprofile' if os.path.isfile( gis_cfg_file_path) and profile: # v1.5.0 # Load config, get username form Profile gisConfig = configparser.ConfigParser() gisConfig.read( gis_cfg_file_path) username = gisConfig[ profile][ "username"] if gisConfig.has_option( profile, "username") else None
def add_vector_tile_layers(vector_layers): """ Accepts the dictionary of vector layers, adds them to a map, creates the vector tile package locally, deletes the exisiting layer in AGOL if it exists, uploads the tile package to AGOL and then publishes the tile package to a hosted tile layer """ aprx = arcpy.mp.ArcGISProject(APRX_PATH) m = aprx.listMaps(MAP)[0] gis = arcgis.GIS(AGOL, USERNAME, PASSWORD) remove_all_layers(m) # Get the group object for the mobile suma AGOL group mobile_suma_group = gis.groups.get(GROUP_ID) LOGGER.debug(f'Group name: {mobile_suma_group.title} found') for k, v in vector_layers.items(): # First add a dummy layer to the map so that it sets the spatial # reference of the map to Web Mercator. This is nessecary to # publish to the AGOL/Google maps tiling scheme. LOGGER.info(f'creating spatial reference layer for {k} group') arcpy.MakeFeatureLayer_management(SPATIAL_REFERENCE_LAYER, 'spatial_reference_layer') m.addDataFromPath(SPATIAL_REFERENCE_LAYER) LOGGER.info(f'added spatial reference layer for {k}') for i in v: layer = arcpy.mp.LayerFile(os.path.join(QDL_PATH, i + ".lyr")) LOGGER.info(f'adding layer: {layer} to map') m.addLayer(layer) # Make sure all the layers are on turn_on_layers_in_map(m) # Create the vector tile package locally LOGGER.info('Creating Vector tile package locally') arcpy.CreateVectorTilePackage_management( m, f'mobile_suma_gis/vector_tile_packages/{k}.vtpk', 'ONLINE', ) LOGGER.info(f'finished creating vtpk') # Delete the old package and tile layer. The name of the old layer and # new layer must match or they will break links from web maps. # AGOL does not currently support overwriting tile # packages, only feature services LOGGER.info('Checkinig if content already exists on AGOL and' 'deleting if it does.') agol_items = gis.content.search( f'owner:{OWNER} AND title:{k} AND group:{GROUP_ID}') if len(agol_items) > 0: for agol_item in agol_items: LOGGER.info(f'Deleting {agol_item}') agol_item.delete() else: LOGGER.info('No items to delete from agol.') # Publish to AGOL. LOGGER.info('Adding vtpk to AGOL') vtpk = gis.content.add( { 'description': (f'VTPK for the {k} group' 'layer this layer is updated weekly.') }, f'mobile_suma_gis/vector_tile_packages/{k}.vtpk', folder='vector_tiles') # When using the share method you must use the gis group object. # Using the group id or name does not work, despite what the # documentation says. vtpk.share(org=True, groups=[mobile_suma_group]) LOGGER.info('Publishing hosted tile layer') publish = vtpk.publish() publish.share(org=True, groups=[mobile_suma_group]) # Remove the layers. All active layers in the map # will be included in the vtpk LOGGER.info('Removing layers from map.') remove_all_layers(m)
def mainFunction(mapFile,locator,localOutputLocation,portalURL,portalUser,portalPassword,mobileMapPackageID,title,description,tags): # Add parameters sent to the script here e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # printMessage("Creating mobile map package - " + localOutputLocation + "...","info") arcpy.management.CreateMobileMapPackage(mapFile, # Map file localOutputLocation, # Output map package locator, # Locator None, # Area of interest "DEFAULT", # Extent "SELECT", # CLIP or SELECT for extent option title, # Title description, # Summary description, # Description tags, # Tags "", # Credits "") # Usage limitations # If uploading to portal if (portalURL and portalUser and portalPassword and title and description and tags): # Connect to GIS portal printMessage("Connecting to GIS Portal - " + portalURL + "...","info") gisPortal = arcgis.GIS(url=portalURL, username=portalUser, password=portalPassword, verify_cert=False) # If ID provided if (mobileMapPackageID): printMessage("Updating existing mobile map package in portal - " + mobileMapPackageID,"info") # Get the portal item userContentItem = gisPortal.content.get(mobileMapPackageID) # Update the mmpk in portal userContentItem.update(None,localOutputLocation) else: printMessage("Uploading mobile map package to portal...","info") # Get all items for the user user = gisPortal.users.get(portalUser) userItems = user.items() itemExists = False # For each item for userItem in userItems: # If item already exists if (title.lower() == userItem.title.lower()): printMessage("Mobile Map Package already exists in portal - " + userItem.id + "...", "info") # Get the portal item userContentItem = gisPortal.content.get(userItem.id) printMessage("Updating existing mobile map package...", "info") # Update the mmpk in portal userContentItem.update(None,localOutputLocation) itemExists = True # If item doesn't exist in portal if (itemExists == False): # Upload the mmpk to portal item = gisPortal.content.add({"title":title,"type":"Mobile Map Package"},localOutputLocation) printMessage("Mobile Map Package uploaded - " + item.id + "...", "info") # --------------------------------------- End of code --------------------------------------- # # If called from ArcGIS GP tool if __name__ == '__main__': # Return the output if there is any if output: # If using ArcPy if (useArcPy == "true"): arcpy.SetParameter(1, output) # ArcGIS desktop not installed else: return output # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] # If error except Exception as e: errorMessage = "" # Build and show the error message # If many arguments if (e.args): for i in range(len(e.args)): if (i == 0): errorMessage = str(e.args[i]).encode('utf-8').decode('utf-8') else: errorMessage = errorMessage + " " + str(e.args[i]).encode('utf-8').decode('utf-8') # Else just one argument else: errorMessage = e printMessage(errorMessage,"error") # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage)
from events.earthquakes import update_earthquakes from events.fires import update_fires from events.storms import update_storms from events.tsunamis import update_tsunamis import arcgis import time import yaml import os with open('../config.yaml', 'r') as stream: config = yaml.load(stream) gis = arcgis.GIS(username=os.environ.get('AGOL_USERNAME'), password=os.environ.get('AGOL_PASSWORD')) if __name__ == '__main__': while True: update_earthquakes(config['bbox'], config['urls']['USGS'], config['translations']['earthquakes'], gis, config['item_ids']['earthquakes']) update_fires(config['bbox'], {k: config['urls'][k] for k in ['MODIS', 'VIIRS']}, config['translations']['fires'], gis, config['item_ids']['fires']) update_storms(config['bbox'], config['translations']['storms'], gis, config['item_ids']['storms']) update_tsunamis(config['bbox'], config['urls']['NOAA'], config['translations']['tsunamis'], gis, config['item_ids']['tsunamis']) print('\n\n')
def mainFunction( portalURL, portalUser, portalPassword, configFile, createFolder, downloadLocation, parallelProcessing ): # Add parameters sent to the script here e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Connect to GIS portal printMessage("Connecting to GIS Portal - {}...".format(portalURL), "info") gisPortal = arcgis.GIS(url=portalURL, username=portalUser, password=portalPassword, verify_cert=False) # If config file is provided itemDicts = [] configData = None if (configFile): # If config file is valid if (os.path.isfile(configFile)): # Open the JSON file with open(configFile) as jsonFile: configData = json.load(jsonFile) if "items" in configData: # For each item for itemConfig in configData["items"]: if "itemID" in itemConfig: # Get the item item = gisPortal.content.get( itemConfig["itemID"]) itemDict = {} itemDict["item"] = item itemDict["itemConfig"] = itemConfig itemDicts.append(itemDict) else: printMessage( "Configuration file is not valid, item ID parameter is not present - " + configFile + "...", "error") else: printMessage( "Configuration file is not valid, items list is not present - " + configFile + "...", "error") else: printMessage( "Configuration file is not valid - " + configFile + "...", "error") # Else get all items else: # Query all items in the portal items = gisPortal.content.search(query="", max_items=10000) for item in items: itemDict = {} itemDict["item"] = item itemDict["itemConfig"] = "" itemDicts.append(itemDict) # If there are items if len(itemDicts) > 0: # Setup a folder if necessary if (createFolder.lower() == "true"): if not os.path.exists( os.path.join(downloadLocation, "AGSBackup-" + time.strftime("%Y%m%d"))): # Create the folder printMessage( "Creating folder - " + os.path.join(downloadLocation, "AGSBackup-" + time.strftime("%Y%m%d")) + "...", "info") os.makedirs( os.path.join(downloadLocation, "AGSBackup-" + time.strftime("%Y%m%d"))) downloadLocation = os.path.join( downloadLocation, "AGSBackup-" + time.strftime("%Y%m%d")) # If using parallel processing to download the items if (parallelProcessing.lower() == "true"): # Download data for items - Pool items for processing multiprocessing.pool.ThreadPool().starmap( downloadItem, zip(itemDicts, itertools.repeat(configData), itertools.repeat(downloadLocation))) else: # For each item for itemDict in itemDicts: # Download data for item downloadItem(itemDict, configData, downloadLocation) # --------------------------------------- End of code --------------------------------------- # # If called from ArcGIS GP tool if __name__ == '__main__': # Return the output if there is any if output: # If using ArcPy if (useArcPy == "true"): arcpy.SetParameter(1, output) # ArcGIS desktop not installed else: return output # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] # If error except Exception as e: errorMessage = "" # Build and show the error message # If many arguments if (e.args): for i in range(len(e.args)): if (i == 0): errorMessage = str( e.args[i]).encode('utf-8').decode('utf-8') else: errorMessage = errorMessage + " " + str( e.args[i]).encode('utf-8').decode('utf-8') # Else just one argument else: errorMessage = e printMessage(errorMessage, "error") # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage)
def mainFunction( ): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Connect to GIS portal printMessage("Connecting to GIS Portal - " + portalURL + "...", "info") gisPortal = arcgis.GIS(url=portalURL, username=portalUser, password=portalPassword, verify_cert=False) # Query all web maps in the portal printMessage("Querying the web maps in portal...", "info") items = gisPortal.content.search(query="", item_type="Web Map", max_items=10000) printMessage( "There are " + str(len(items)) + " web maps in the portal...", "info") # Open the web maps CSV file and create writer csvWebmapsFileWrite = open(webmapsCSV, 'w', newline='') fieldNames = ["Title", "ID", "Web URL"] csvWebmapsWriter = csv.DictWriter(csvWebmapsFileWrite, fieldNames) csvWebmapsWriter.writeheader() # Open the services CSV file and create writer csvServicesFileWrite = open(servicesCSV, 'w', newline='') fieldNames = ["Web URL", "Count"] csvServicesWriter = csv.DictWriter(csvServicesFileWrite, fieldNames) csvServicesWriter.writeheader() # For each item webmapList = [] servicesList = [] serviceCountsList = [] for item in items: # Get the web map object webmap = arcgis.mapping.WebMap(item) for basemapLayer in webmap.basemap["baseMapLayers"]: # If layer has a URL reference if "url" in basemapLayer: # Add to web map list webmapDict = {} webmapDict["Title"] = item.title webmapDict["ID"] = item.id webmapDict["Web URL"] = basemapLayer["url"] webmapList.append(webmapDict) # Add to service list servicesList.append(basemapLayer["url"]) for layer in webmap.layers: # If layer has a URL reference if "url" in layer: # Add to web map list webmapDict = {} webmapDict["Title"] = item.title webmapDict["ID"] = item.id webmapDict["Web URL"] = layer.url webmapList.append(webmapDict) # Add to service list servicesList.append(layer.url) printMessage("Writing web map and service information to CSV...", "info") # Get a count of the number of times services are used in web maps serviceCounts = collections.Counter(servicesList).most_common() for key, value in serviceCounts: # Add to services count list serviceCountDict = {} serviceCountDict["Web URL"] = key serviceCountDict["Count"] = value serviceCountsList.append(serviceCountDict) # Write the lists to the CSV files csvWebmapsWriter.writerows(webmapList) csvServicesWriter.writerows(serviceCountsList) # Close the CSV files csvWebmapsFileWrite.close() csvServicesFileWrite.close() # --------------------------------------- End of code --------------------------------------- # # If called from ArcGIS GP tool if __name__ == '__main__': # Return the output if there is any if output: # If using ArcPy if (useArcPy == "true"): arcpy.SetParameter(1, output) # ArcGIS desktop not installed else: return output # Otherwise return the result else: # Return the output if there is any if output: return output printMessage("Process ended...", "info") if (enableLogTable == "true"): # Log end message to table currentDate = datetime.datetime.utcnow().strftime( "%d/%m/%Y %H:%M:%S") logToTable({ "Date": currentDate, "Process": os.path.basename(__file__).replace(".py", ""), "Status": "Success", "Organisation": None, "DataName": None, "Message": "Process ended...", "RecordCount": None }) # If error except Exception as e: errorMessage = "" # Build and show the error message # If many arguments if (e.args): for i in range(len(e.args)): if (i == 0): errorMessage = str( e.args[i]).encode('utf-8').decode('utf-8') else: errorMessage = errorMessage + " " + str( e.args[i]).encode('utf-8').decode('utf-8') # Else just one argument else: errorMessage = e printMessage(errorMessage, "error") printMessage("Process ended...", "info") if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage, None) if (enableLogTable == "true"): # Log end message to table currentDate = datetime.datetime.utcnow().strftime( "%d/%m/%Y %H:%M:%S") logToTable({ "Date": currentDate, "Process": os.path.basename(__file__).replace(".py", ""), "Status": "Fail", "Organisation": None, "DataName": None, "Message": errorMessage, "RecordCount": None })
def mainFunction( portalURL, portalUser, portalPassword, csvFileLocation, setItemType ): # Add parameters sent to the script here e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Connect to GIS portal printMessage("Connecting to GIS Portal - " + portalURL + "...", "info") gisPortal = arcgis.GIS(url=portalURL, username=portalUser, password=portalPassword, verify_cert=False) # If item type if (setItemType): printMessage( "Only creating content of type " + setItemType + "...", "info") # Read the csv file with open(csvFileLocation) as csvFile: reader = csv.DictReader(csvFile) # For each row in the CSV for row in reader: processRow = True # Get the item type from the CSV itemType = row["Type"] # If item type if (setItemType): # If the row does not equal the item type set, do not process if (setItemType.lower().replace(" ", "") != itemType.lower().replace(" ", "")): processRow = False # If processing this row if (processRow == True): # If a title is provided if (row["Title"].replace(" ", "")): # Set organisation sharing if row["Organisation Sharing"]: organisationSharing = row["Organisation Sharing"] else: organisationSharing = "Private" # Set group sharing if row["Group Sharing"]: # Create a list of group IDs to share with groupSharing = [] groupSharingTitles = row["Group Sharing"].split( ",") for groupTitle in groupSharingTitles: # Get the item ID of the group itemID = getIDforPortalItem( gisPortal, groupTitle, "group") groupSharing.append(itemID) groupSharing = ','.join(groupSharing) else: groupSharing = "" if (itemType.lower().replace(" ", "") == "group"): # Create group createGroup(gisPortal, row["Title"], row["Summary"], row["Description"], row["Tags"], row["Thumbnail"], organisationSharing, row["Members"]) elif (itemType.lower().replace(" ", "") == "webmap"): # Create web map createWebMap(gisPortal, row["Title"], row["Summary"], row["Description"], row["Tags"], row["Thumbnail"], organisationSharing, groupSharing, row["Data"]) elif (itemType.lower().replace(" ", "") == "webscene"): # Create web scene createWebScene(gisPortal, row["Title"], row["Summary"], row["Description"], row["Tags"], row["Thumbnail"], organisationSharing, groupSharing, row["Data"]) elif (itemType.lower().replace( " ", "") == "webmappingapplication"): # Create web mapping application createWebApplication(portalURL, gisPortal, row["Title"], row["Summary"], row["Description"], row["Tags"], row["Thumbnail"], organisationSharing, groupSharing, row["Data"]) elif (itemType.lower().replace(" ", "") == "dashboard"): # Create dashboard createDashboard(gisPortal, row["Title"], row["Summary"], row["Description"], row["Tags"], row["Thumbnail"], organisationSharing, groupSharing, row["Data"]) elif (itemType.lower().replace(" ", "") == "form"): # Create form createForm(gisPortal, row["Title"], row["Summary"], row["Description"], row["Tags"], row["Thumbnail"], organisationSharing, groupSharing, row["Data"]) elif (itemType.lower().replace(" ", "") == "tileservice"): # Create tile service createTileService(gisPortal, row["Title"], row["Summary"], row["Description"], row["Tags"], row["Thumbnail"], organisationSharing, groupSharing, row["Data"]) elif (itemType.lower().replace( " ", "") == "featureservice"): # Create feature service createFeatureService(gisPortal, row["Title"], row["Summary"], row["Description"], row["Tags"], row["Thumbnail"], organisationSharing, groupSharing, row["Data"]) elif (itemType.lower().replace( " ", "") == "featureserviceview"): # Create feature service view createFeatureServiceView( gisPortal, row["Title"], row["Summary"], row["Description"], row["Tags"], row["Thumbnail"], organisationSharing, groupSharing, row["Data"]) else: printMessage( row["Title"] + " item in CSV does not have a valid type set and will not be created...", "warning") else: printMessage( "Item in CSV does not have a title set and will not be created...", "warning") # --------------------------------------- End of code --------------------------------------- # # If called from ArcGIS GP tool if __name__ == '__main__': # Return the output if there is any if output: # If using ArcPy if (useArcPy == "true"): arcpy.SetParameter(1, output) # ArcGIS desktop not installed else: return output # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] # If error except Exception as e: # Build and show the error message # errorMessage = arcpy.GetMessages(2) errorMessage = "" # Build and show the error message # If many arguments if (e.args): for i in range(len(e.args)): if (i == 0): errorMessage = str( e.args[i]).encode('utf-8').decode('utf-8') else: errorMessage = errorMessage + " " + str( e.args[i]).encode('utf-8').decode('utf-8') # Else just one argument else: errorMessage = e printMessage(errorMessage, "error") # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail("Python Script Error", errorMessage, None)