def setUpClass(self):  # pylint: disable=bad-classmethod-argument
        self.maxDiff = None

        tbx_path = os.path.join(os.path.dirname(CWD), "LargeNetworkAnalysisTools.pyt")
        arcpy.ImportToolbox(tbx_path)

        self.input_data_folder = os.path.join(CWD, "TestInput")
        sf_gdb = os.path.join(self.input_data_folder, "SanFrancisco.gdb")
        self.origins = os.path.join(sf_gdb, "Analysis", "TractCentroids")
        self.destinations = os.path.join(sf_gdb, "Analysis", "Hospitals")
        self.local_nd = os.path.join(sf_gdb, "Transportation", "Streets_ND")
        tms = arcpy.nax.GetTravelModes(self.local_nd)
        self.local_tm_time = tms["Driving Time"]
        self.local_tm_dist = tms["Driving Distance"]
        self.portal_nd = portal_credentials.PORTAL_URL  # Must be arcgis.com for test to work
        self.portal_tm = portal_credentials.PORTAL_TRAVEL_MODE

        arcpy.SignInToPortal(self.portal_nd, portal_credentials.PORTAL_USERNAME, portal_credentials.PORTAL_PASSWORD)

        # Create a unique output directory and gdb for this test
        self.output_folder = os.path.join(
            CWD, "TestOutput", "Output_Tool_" + datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
        os.makedirs(self.output_folder)
        self.output_gdb = os.path.join(self.output_folder, "outputs.gdb")
        arcpy.management.CreateFileGDB(os.path.dirname(self.output_gdb), os.path.basename(self.output_gdb))
Example #2
0
    def setUpClass(self):  # pylint: disable=bad-classmethod-argument
        self.maxDiff = None

        self.input_data_folder = os.path.join(CWD, "TestInput")
        self.sf_gdb = os.path.join(self.input_data_folder, "SanFrancisco.gdb")
        self.origins = os.path.join(self.sf_gdb, "Analysis", "TractCentroids")
        self.destinations = os.path.join(self.sf_gdb, "Analysis", "Hospitals")
        self.local_nd = os.path.join(self.sf_gdb, "Transportation",
                                     "Streets_ND")
        self.local_tm_time = "Driving Time"
        self.local_tm_dist = "Driving Distance"
        self.portal_nd = portal_credentials.PORTAL_URL
        self.portal_tm = portal_credentials.PORTAL_TRAVEL_MODE

        arcpy.SignInToPortal(self.portal_nd,
                             portal_credentials.PORTAL_USERNAME,
                             portal_credentials.PORTAL_PASSWORD)

        # Create a unique output directory and gdb for this test
        self.output_folder = os.path.join(
            CWD, "TestOutput", "Output_SolveLargeODCM_" +
            datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
        os.makedirs(self.output_folder)
        self.output_gdb = os.path.join(self.output_folder, "outputs.gdb")
        arcpy.management.CreateFileGDB(os.path.dirname(self.output_gdb),
                                       os.path.basename(self.output_gdb))

        # Copy some data to the output gdb to serve as barriers. Do not use tutorial data directly as input because
        # the tests will write network location fields to it, and we don't want to modify the user's original data.
        self.barriers = os.path.join(self.output_gdb, "Barriers")
        arcpy.management.Copy(
            os.path.join(self.sf_gdb, "Analysis", "CentralDepots"),
            self.barriers)

        self.od_args = {
            "origins": self.origins,
            "destinations": self.destinations,
            "network_data_source": self.local_nd,
            "travel_mode": self.local_tm_dist,
            "output_od_lines": os.path.join(self.output_gdb, "OutODLines"),
            "output_origins": os.path.join(self.output_gdb, "OutOrigins"),
            "output_destinations": os.path.join(self.output_gdb,
                                                "OutDestinations"),
            "chunk_size": 20,
            "max_processes": 4,
            "time_units": "Minutes",
            "distance_units": "Miles",
            "cutoff": 2,
            "num_destinations": 1,
            "precalculate_network_locations": True,
            "barriers": [self.barriers]
        }
Example #3
0
    def sign_into_portal(self):
        portal_config = self.config['portal']
        portal_url = arcpy.GetActivePortalURL()
        if 'username' in portal_config and 'password' in portal_config:
            try:
                portal_info = arcpy.SignInToPortal(portal_url,
                                                   portal_config['username'],
                                                   portal_config['password'])
                token = portal_info['token']
            except (ValueError, KeyError):
                return None
        elif 'app_id' in portal_config and 'refresh_token' in portal_config:
            try:
                payload = {
                    'client_id': portal_config['app_id'],
                    'refresh_token': portal_config['refresh_token'],
                    'grant_type': 'refresh_token'
                }

                req = requests.post(portal_url + '/sharing/rest/oauth2/token',
                                    data=payload,
                                    verify=False)
                req_json = req.json()
                token = req_json['access_token']
            except (ValueError, KeyError):
                return None
        elif 'app_id' in portal_config and 'app_secret' in portal_config:
            try:
                payload = {
                    'client_id': portal_config['app_id'],
                    'client_secret': portal_config['app_secret'],
                    'grant_type': 'client_credentials'
                }

                req = requests.post(portal_url + '/sharing/rest/oauth2/token',
                                    data=payload,
                                    verify=False)
                req_json = req.json()
                token = req_json['access_token']
            except (ValueError, KeyError):
                return None
        else:
            infos = arcpy.GetSigninToken()
            if infos:
                token = infos['token']
            else:
                return None

        self.token = token
        return self.token
Example #4
0
 def create_sd_files_from_map(self,
                              map_name=None,
                              pro_prjx=None,
                              service_id=""):
     sds = {}
     try:
         the_prj = pro_prjx if pro_prjx else self._config.baseprjx
         aprx = arcpy.mp.ArcGISProject(the_prj)
         if map_name in [m.name for m in aprx.listMaps()]:
             service_object = getattr(self._config, map_name.lower())
             service_name = service_object["servicename"]
             try:
                 service_name = service_name % service_id
             except Exception as e:
                 self.errorlog(str(e))
                 self.log("This is an expected error. ;-)")
             self.log(f"Service name:{service_name}")
             sddraft_file = TempFileName.generate_temporary_file_name(
                 suffix=".sddraft")
             sdfile = TempFileName.generate_temporary_file_name(
                 suffix=".sd")
             self.log(f"Creating:{sddraft_file},{sdfile}")
             m = aprx.listMaps(map_name)[0]
             arcpy.SignInToPortal(self._config.portal, self._config.user,
                                  self._config.password)
             #sharing_draft = m.getWebLayerSharingDraft("HOSTING_SERVER", "FEATURE", service_name)
             # change for JXT
             sharing_draft = m.getWebLayerSharingDraft(
                 "FEDERATED_SERVER", "MAP_IMAGE", service_name)
             sharing_draft.federatedServerUrl = self._config.fed_server
             sharing_draft.copyDataToServer = True
             # change to Map Image server
             sharing_draft.summary = service_object["summary"]
             sharing_draft.tags = service_object["tags"]
             sharing_draft.description = service_object[
                 "description"] + f'<br/>Item updated on {datetime.datetime.now().strftime("%d %B %Y %H:%M:%S")} by automated script.'
             sharing_draft.credits = ""
             sharing_draft.exportToSDDraft(sddraft_file)
             arcpy.StageService_server(sddraft_file, sdfile)
             sds[service_name] = sdfile
     except Exception as e:
         self.errorlog(e)
     return sds
Example #5
0
    def create_report(self):
        """
        Returns a list of dicts whose keys are column headings and values are the column values:
        [{itemid: 'some_uuid', title: 'AGOL title', ...} ...]
        """
        self.logger.info('Creating AGOL Usage Report...')
        item_info_dicts = []

        try:
            arcpy.SignInToPortal(credentials.ORG, credentials.USERNAME,
                                 credentials.PASSWORD)
        except NameError as ex:  #: Travis CI throws a NameError on arcpy calls.
            self.logger.info(ex)

        org = tools.Organization(self.logger, credentials.ORG,
                                 credentials.USERNAME, credentials.PASSWORD)
        folders = org.get_users_folders()
        items = org.get_feature_services_in_folders(folders)
        open_data_groups = org.get_open_data_groups()

        metatable = tools.Metatable(self.logger)
        sgid_fields = [
            'TABLENAME', 'AGOL_ITEM_ID', 'AGOL_PUBLISHED_NAME', 'Authoritative'
        ]
        agol_fields = [
            'TABLENAME', 'AGOL_ITEM_ID', 'AGOL_PUBLISHED_NAME', 'CATEGORY'
        ]
        metatable.read_metatable(credentials.SGID_METATABLE, sgid_fields)
        metatable.read_metatable(credentials.AGOL_METATABLE, agol_fields)

        for item_tuple in items:
            item, folder = item_tuple
            metatable_category = None
            if item.itemid in metatable.metatable_dict:
                metatable_category = metatable.metatable_dict[
                    item.itemid].category
            item_info_dicts.append(
                org.get_item_info(item, open_data_groups, folder,
                                  metatable_category))

        return item_info_dicts
Example #6
0
def get_token(base_url: str) -> Optional[str]:
    """get token"""
    token: Optional[str] = None
    try:
        hosted_feature_service: bool = arcpy.GetParameter(0)
        ags_service: bool = arcpy.GetParameter(1)
        portal_url: str = arcpy.GetParameterAsText(2)
        username: str = arcpy.GetParameterAsText(7)
        password: str = arcpy.GetParameterAsText(8)
        if not (is_blank(username) or is_blank(password)):
            arcpy.AddMessage('\nGenerating Token\n')
            # Generate token for hosted feature service
            if hosted_feature_service:
                arcpy.SignInToPortal(portal_url, username, password)
                token = arcpy.GetSigninToken()['token']
            # Generate token for AGS feature service
            elif ags_service:
                token = generate_token(base_url, username, password)
            if token is None:
                raise ValueError('Error generate token')
    except:
        raise Exception('Error generate token')
    return token
Example #7
0
def UploadGIS_Service():
    try:
        print('-' * 50)
        print("Sign into ArcGIS portal ...")
        callbackUserData = arcpy.SignInToPortal(**args.userData)
        args.userData.update(callbackUserData)

        print("Uploading GIS layers ...")
        sharingDraft = map3D.getWebLayerSharingDraft(args.serverType,
                                                     args.serviceType,
                                                     args.service)
        sharingDraft.summary = args.sharingDraftAttr['summary']
        sharingDraft.tags = args.sharingDraftAttr['tags']
        sharingDraft.description = args.sharingDraftAttr['description']
        sharingDraft.overwriteExistingService = args.sharingDraftAttr[
            'overwriteExistingService']
        sharingDraft.exportToSDDraft(args.sddraftOutputFilename)
        arcpy.server.StageService(args.sddraftOutputFilename,
                                  args.sdOutputFilename)

        outServiceDefinitions = arcpy.server.UploadServiceDefinition(
            **args.uploadAttr)
        for i in range(outServiceDefinitions.outputCount):
            args.serviceDefinitions[args.serviceDefinitionFeatures[
                i]] = outServiceDefinitions.getOutput(i)

        print('Successfully uploaded GIS service!')
        print('Service definitions:')
        pprint(args.serviceDefinitions)

        with open(args.serviceDefinitionsJSON, 'w', encoding='utf-8') as f:
            json.dump(args.serviceDefinitions, f, ensure_ascii=False)

    except Exception as e:
        print(f'\n----------\nError in UploadGIS_Service:\n  {e}\n')

    print('-' * 50)
Example #8
0
def publish_results():
    # Sign in to portal
    arcpy.SignInToPortal("https://esriau.maps.arcgis.com/","","")
    
    # Set output file names
    outdir = os.path.join(os.path.dirname(__file__),  'sd')
    service = "FS_WazeEllipses_KL"
    sddraft_filename = service + ".sddraft"
    sddraft_output_filename = os.path.join(outdir, sddraft_filename)
    
    # Reference map to publish
    aprx = arcpy.mp.ArcGISProject(os.path.join(os.path.dirname(__file__),  'WazeForPublishing.aprx'))
    m = aprx.listMaps("Waze For Publishing")[0]
    
    # Create FeatureSharingDraft and set service properties
    sharing_draft = m.getWebLayerSharingDraft("HOSTING_SERVER", "FEATURE", service)
    sharing_draft.overwriteExistingService = "True"
    sharing_draft.portalFolder = "Waze"
    sharing_draft.summary = "Waze Ellipses created from a python script that pulls latest Waze data for Kuala Lumpur"
    sharing_draft.tags = "Waze, BGT, Kuala Lumpur"
    sharing_draft.description = "Latest Waze Jam incidents downloaded and a density clustering performed.  Ellipses generated around the main clusters"
    sharing_draft.credits = "Waze CCP"
    sharing_draft.useLimitations = "Demo Purposes Only"
    
    # Create Service Definition Draft file
    sharing_draft.exportToSDDraft(sddraft_output_filename)
    
    # Stage Service
    sd_filename = service + ".sd"
    sd_output_filename = os.path.join(outdir, sd_filename)
    arcpy.StageService_server(sddraft_output_filename, sd_output_filename)
    
    # Share to portal
    print("Uploading Service Definition for publishing")
    arcpy.UploadServiceDefinition_server(sd_output_filename, "My Hosted Services", in_override="OVERRIDE_DEFINITION", in_public="PUBLIC")
    
    print("Successfully Uploaded & Published.")
import arcpy
import config
import os
import sys

arcpy.CheckOutExtension("Spatial")
arcpy.env.overwriteOutput = True

# Set workspace and scratch folder
ws = arcpy.env.workspace = config.workspace
sc = arcpy.env.scratchworkspace = config.scratchworkspace

# Sign in Portal
try:
    arcpy.SignInToPortal('https://{}/portal/'.format(config.portalname),
                         config.username, config.password)
    print('Signed in to portal')
except:
    print("Signin error:", sys.exc_info()[0])

# Set input and output data then run the model
intbx = 'hotspot.tbx'
inputfc = os.path.join('HotSpotData.gdb', 'calls')
try:
    inputRaster = arcpy.management.MakeRasterLayer('stowe_elev.tif',
                                                   'outRlayer1')
    distance_method = 'Manhattan'
    arcpy.ImportToolbox(intbx)
    result_item = arcpy.hotspotscript_hotspot(inputfc, inputRaster,
                                              distance_method)
    print("Tool runs successfully.")
Example #10
0
def one_function_to_rule_them_all(common_info: CommonInfo,
                                  specific_info: SpecificInfo):
    '''Calls all the previous functions in appropriate order

    Args:
        common_info (CommonInfo): Info common to all layers (wfh and operator)
        specific_info (SpecificInfo): Info specific to a particular layer (wfh or operator)

    Raises:
        NotImplementedError: If a method other than 'wfh' or 'operator' is provided
    '''

    print('Getting AGOL references...')
    password = getpass('Enter Password: '******'Cleaning up scratch areas...')
    if arcpy.Exists(str(common_info.scratch_gdb)):
        print(f'Deleting existing {common_info.scratch_gdb}...')
        arcpy.management.Delete(str(common_info.scratch_gdb))
    print(f'Creating {common_info.scratch_gdb}...')
    arcpy.management.CreateFileGDB(str(common_info.scratch_gdb.parent),
                                   str(common_info.scratch_gdb.name))

    dhrm_data = get_dhrm_dataframe(common_info.employee_data_path)

    if specific_info.method == 'wfh':
        get_wfh_eins(specific_info.data_source, dhrm_data,
                     common_info.csv_path)
    elif specific_info.method == 'operator':
        get_operator_eins(specific_info.data_source, dhrm_data,
                          common_info.csv_path)
    else:
        raise NotImplementedError(
            f'Method {specific_info.method} not recognized...')

    geocode_points(
        str(common_info.csv_path),
        str(common_info.geocoded_points_path),
        str(common_info.locator_path),
        'real_addr',
        'real_zip',
    )

    hex_bin(str(common_info.geocoded_points_path),
            str(common_info.hex_fc_path),
            str(common_info.hexes_fc_path),
            simple_count=specific_info.simple_summary,
            within_table=str(common_info.within_table_path))

    remove_single_count_hexes(str(common_info.hexes_fc_path),
                              str(common_info.trimmed_hex_fc_path))

    sharing_layer, sharing_map = add_layer_to_map(
        str(common_info.project_path), common_info.map_name,
        str(common_info.trimmed_hex_fc_path))

    update_agol_feature_service(sharing_map, sharing_layer, sd_item, fs_item,
                                specific_info)
Example #11
0
print(f'Current date and time: {dt.datetime.now()}')


# Start timer and print start time in UTC
start_time = time.time()
readable_start = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime())
print("The script start time is {}".format(readable_start))


# Set variables, get AGOL username and password
portal_url = arcpy.GetActivePortalURL()
print(portal_url)

user = getpass.getpass(prompt='    Enter arcgis.com username:\n')
pw = getpass.getpass(prompt='    Enter arcgis.com password:\n')
arcpy.SignInToPortal(portal_url, user, pw)
del pw

# Updated count numbers are copied from table at 'https://coronavirus.utah.gov/case-counts/'
# CSV file with updates should be named 'COVID_Case_Counts_latest.csv'
# Update this 'work_dir' variable with the folder you store the updated CSV in
work_dir = r'C:\COVID19'
updates = pd.read_csv(os.path.join(work_dir, 'COVID_Case_Counts_latest.csv'))
updates.sort_values('Jurisdiction', inplace=True)


# TEST layer
# counts_service = r'https://services1.arcgis.com/99lidPhWCzftIe9K/ArcGIS/rest/services/EMN_Cases_by_LHD_TEST_v3/FeatureServer/0'
# TEST table
# counts_by_day = r'https://services1.arcgis.com/99lidPhWCzftIe9K/arcgis/rest/services/EMN_Cases_by_LHD_by_Day_TEST_v3/FeatureServer/0'
Example #12
0
    def setup(self):
        """
        Sets up the Auditor by logging into the ArcGIS org, getting all the items and folders, and reading in the
        metatable(s). To be called in __init__().
        In case of multiple calls (ie, for retry()), all data are re-instantiated/initialized.
        """

        #: temp_dir used by fixes.metadata_fix() to hold xml of sde metadata
        temp_dir = Path(arcpy.env.scratchFolder, 'auditor')
        if not temp_dir.exists():
            if self.verbose:
                print(f'Creating temp directory {temp_dir}...')
            temp_dir.mkdir()

        self.log.info(f'Logging into {credentials.ORG} as {credentials.USERNAME}')

        self.gis = arcgis.gis.GIS(credentials.ORG, credentials.USERNAME, credentials.PASSWORD)

        #: Make sure ArcGIS Pro is properly logged in
        arcpy.SignInToPortal(arcpy.GetActivePortalURL(), credentials.USERNAME, credentials.PASSWORD)

        user_item = self.gis.users.me  # pylint: disable=no-member

        #: Build dict of folders. 'None' gives us the root folder.
        if self.verbose:
            print(f'Getting {self.username}\'s folders...')
        folders = {None: None}
        for folder in user_item.folders:
            folders[folder['id']] = folder['title']

        self.items_to_check = []  #: Clear this out again in case retry calls setup() multiple times.

        #: Get item object and it's correspond folder for each relevant item
        if self.verbose:
            print('Getting item objects...')

        #: User-provided list
        if self.item_ids:
            for item_id in self.item_ids:
                item = self.gis.content.get(item_id)  # pylint: disable=no-member
                if not item:
                    raise ValueError(f'Item {item_id} not found')
                self.items_to_check.append(item)
                try:
                    self.itemid_and_folder[item.itemid] = folders[item.ownerFolder]
                except KeyError:
                    raise ValueError(f'Folder id {item.ownerFolder} not found (wrong user?)')

        #: No user-provided item ids, get all hosted feature services in every folder
        else:
            for _, name in folders.items():
                for item in user_item.items(name, 1000):
                    if item.type == 'Feature Service':
                        self.items_to_check.append(item)
                        self.itemid_and_folder[item.itemid] = name

        #: Read the metatable into memory as a dictionary based on itemid.
        #: Getting this once so we don't have to re-read every iteration
        if self.verbose:
            print('Getting metatable info...')

        self.metatable = Metatable()

        sgid_fields = ['TABLENAME', 'AGOL_ITEM_ID', 'AGOL_PUBLISHED_NAME', 'Authoritative']
        agol_fields = ['TABLENAME', 'AGOL_ITEM_ID', 'AGOL_PUBLISHED_NAME', 'CATEGORY']
        self.metatable.read_metatable(self.sgid_table, sgid_fields)
        self.metatable.read_metatable(self.agol_table, agol_fields)

        if self.metatable.duplicate_keys:
            raise RuntimeError(f'Duplicate AGOL item IDs found in metatables: {self.metatable.duplicate_keys}')

        #: Get the groups
        if self.verbose:
            print('Getting groups...')
        groups = self.gis.groups.search('title:*')  # pylint: disable=no-member
        self.groups_dict = {g.title: g.id for g in groups}
Example #13
0
### Title: SEA1_AttachmentViewer_AutoPublisher.py
### Description: This Python script publishes a refreshed hosted feature layer to ArcGIS Online, which is then
###              consumed in Attachment Viewer. This script is intended to be triggered on a nightly basis in
###              order for new data to be visible from the previous day.
### Company: Foresite Group
### Author: Darren Foster
### MUST RUN IN PYTHON 3.6 ###

import arcpy
import os
from arcgis.gis import GIS
from datetime import datetime
arcpy.env.overwriteOutput = True

### Sign in to portal (In this case, sign into ArcGIS Online) ###
arcpy.SignInToPortal('https://www.arcgis.com', '$$$$$', '$$$$$')

### Set constants ###
OUT_DIR = r'\\fginc-file\GIS\Foster\AutoPublishing\SEA1_AttachmentViewer'
SERVICE = 'SEA1_AttViewer_HFS'
SD_DRAFT = SERVICE + '.sddraft'
SD_FILENAME = SERVICE + '.sd'
LOG = r'\\fginc-file\GIS\Foster\AutoPublishing\SEA1_AttachmentViewer\Logger.txt'

### Construct a GIS object ###
gis = GIS('https://www.arcgis.com', '$$$$$', '$$$$$')

### Paths to the Service Definition and the Draft ###
sdDraftOutput = os.path.join(OUT_DIR, SD_DRAFT)
sdPath = os.path.join(OUT_DIR, SD_FILENAME)
# changes the os directory using the full path name
os.chdir(path)

# store current working directory
folder_dir = os.getcwd()

# find and store all files with .shp ending
shps = glob.glob(folder_dir + '\*.shp')

for shp_path in shps:
  m.addDataFromPath(shp_path)

aprx.save()

arcpy.SignInToPortal('https://www.arcgis.com', 'REPLACE THIS: USER_NAME', 'REPLACE THIS: USER_PASSWORD')

# Set output file names
# where to store the ssd files
outdir = r"REPLACE THIS: LOCATION TO STORE THE .SSD FILES"

# what to name it once published
service = "REPLACE THIS: NAME OF FEATURE LAYER"

sddraft_filename = service + ".sddraft"
sddraft_output_filename = os.path.join(outdir, sddraft_filename)

# Reference map to publish
# get the current map document
aprx = arcpy.mp.ArcGISProject("CURRENT")
Example #15
0
def CreateSDFile(configFile):
    try:
        with open(configFile) as json_config:
            config = json.load(json_config)
    except Exception as e:
        arcpy.AddMessage("Failed to load config.")
        return

    try:
        toolbox = config["toolbox"]
        alias = config["alias"]
        toolName = config["toolName"]
        toolArgs = config["toolArgs"]
        sddraft = config["sdDraft"]
        sd = config["sd"]
        portalurl = config["portalURL"]
        portalusername = config["portalUsername"]
        portalpassword = config["portalPassword"]
        serverURL = config["serverURL"]
        serviceName = config["serviceName"]
        serverType = config["serverType"]
        connectionFilePath = config["connectionFilePath"]
        copyDataToServer = config["copyDataToServer"]
        folderName = config["folderName"]
        summary = config["summary"]
        tags = config["tags"]
        executionType = config["executionType"]
        resultMapServer = config["resultMapServer"]
        showMessages = config["showMessages"]
        maximumRecords = config["maximumRecords"]
        minInstances = config["minInstances"]
        maxInstances = config["maxInstances"]
        maxUsageTime = config["maxUsageTime"]
        maxWaitTime = config["maxWaitTime"]
        maxIdleTime = config["maxIdleTime"]
        constantValues = config["constantValues"]
    except KeyError as keyErr:
        arcpy.AddMessage(f"Config file missing value: {keyErr}")
        return
    except Exception as e:
        arcpy.AddMessage(f"Error occured in retreiving config values: {e}")
        return

    arcpy.AddMessage("Successfuly read all configuration values.")
    arcpy.ImportToolbox(toolbox, alias)
    arcpy.AddMessage(arcpy.GetMessages(0))
    customToolMethod = getattr(arcpy, f"{toolName}_{alias}")
    result = customToolMethod(*toolArgs)
    arcpy.AddMessage(arcpy.GetMessages(0))
    arcpy.SignInToPortal(portalurl, portalusername, portalpassword)
    arcpy.AddMessage(arcpy.GetMessages(0))

    analyzeMessages = arcpy.CreateGPSDDraft(
        result,
        sddraft,
        serviceName,
        server_type=serverType,
        connection_file_path=connectionFilePath,
        copy_data_to_server=copyDataToServer,
        folder_name=folderName,
        summary=summary,
        tags=tags,
        executionType=executionType,
        resultMapServer=resultMapServer,
        showMessages=showMessages,
        maximumRecords=maximumRecords,
        minInstances=minInstances,
        maxInstances=maxInstances,
        maxUsageTime=maxUsageTime,
        maxWaitTime=maxWaitTime,
        maxIdleTime=maxIdleTime,
        constantValues=constantValues)
    arcpy.AddMessage(arcpy.GetMessages(0))

    # Stage and upload the service if the sddraft analysis did not
    # contain errors
    if analyzeMessages['errors'] == {}:
        # Execute StageService
        arcpy.StageService_server(sddraft, sd)
        arcpy.AddMessage(arcpy.GetMessages(0))
        # Execute UploadServiceDefinition
        # Use URL to a federated server
        arcpy.UploadServiceDefinition_server(sd, serverURL)
        arcpy.AddMessage(arcpy.GetMessages(0))
    else:
        # If the sddraft analysis contained errors, display them
        print(analyzeMessages['errors'])
Example #16
0
    gis = GIS(Config.PORTAL_URL, Config.PORTAL_USER, Config.PORTAL_PASSWORD)
    if update_service_definition(gis, sdname, service_name):
        print("Service definition has been updated.")

    else:
        print("Uploading definition using \"%s\" %s" % (ags_file, folder))

        # Upload the service definition to SERVER
        # In theory everything needed to publish the service is already in the SD file.
        # https://pro.arcgis.com/en/pro-app/latest/tool-reference/server/upload-service-definition.htm
        # You can override permissions, ownership, groups here too.
        try:
            # In theory ags_file could be Config.SERVER_URL but then where does it get authenticated?
            # arcpy.server.UploadServiceDefinition(sdname, ags_file, in_startupType="STARTED")
            # in_startupType HAS TO BE "STARTED" else no service is started on the SERVER.

            rval = arcpy.SignInToPortal(Config.PORTAL_URL, Config.PORTAL_USER,
                                        Config.PORTAL_PASSWORD)
            rval = arcpy.server.UploadServiceDefinition(
                sdname, Config.SERVER_URL, in_startupType="STARTED")
        except Exception as e:
            print("Upload failed.", e)

    service = Config.SERVER_URL + '/rest/services/'
    if folder:
        service += folder + "/"
    service += service_name + '/MapServer'
    print("Map Image published successfully - ", service)

    fetch_tile(service, "C:/temp/output.png")
    SERVER = "1.2.3.4"
    PORT = "25"
    FROM = "*****@*****.**"
    MAILDOMAIN = '@ME.COM'
    # Data Steward getting the email. Needs to be their email address...without @nps.gov at the end
    userList=["BIG_FOOT"]
    # get a list of usernames from the list of named tuples returned from ListUsers
    userNames = [u for u in userList]
    # take the userNames list and make email addresses by appending the appropriate suffix.
    emailList = [name +  MAILDOMAIN for name in userNames]
    TO = emailList
    # Grab date for the email
    DATE = d

    # Sign in to portal
    arcpy.SignInToPortal('https://www.arcgis.com', 'USER', 'PASSWORD')

    # Set output file names
    outdir = r"C:\PRODUCTION\GRSM_EXOTICS"
    sd_filename = service + ".sd"
    sd_output_filename = os.path.join(outdir, sd_filename)
    sddraft_filename = service + ".sddraft"
    sddraft_output_filename = os.path.join(outdir, sddraft_filename)
    #Delete any left over SD files from failed previous run
    try:
        os.remove(sd_output_filename)
        print("Successfully deleted ", sd_output_filename)
    except:
        print("Error while deleting file ", sd_output_filename, ", perhaps it doesn't exist")
    try:
        os.remove(sddraft_output_filename)
Example #18
0
sdPath = r"C:\temp\UC2018\PublishingSamples\Output\%s.sd" % (serviceName)
restEndPoint = "https://services.arcgis.com/EguFTd9xPXEoDtC7/arcgis/rest/services/"
queryCriteria = "where=&objectIds=1&time=&geometry=&geometryType=esriGeometryEnvelope&inSR=&spatialRel=esriSpatialRelIntersects&resultType=none&distance=0.0&units=esriSRUnit_Meter&returnGeodetic=false&outFields=&returnGeometry=true&multipatchOption=xyFootprint&maxAllowableOffset=&geometryPrecision=&outSR=&returnIdsOnly=false&returnCountOnly=false&returnExtentOnly=false&returnDistinctValues=false&orderByFields=&groupByFieldsForStatistics=&outStatistics=&resultOffset=&resultRecordCount=&returnZ=false&returnM=false&quantizationParameters=&sqlFormat=none"
baseJSONFile = r'baselines\base_' + serviceName + '_queryResult_Id1.json'

# list the AGO or enterprise url and credentials here
portalURL = r'https://www.arcgis.com'
cred_detail = []
with open("secure/AGO_pass.txt") as f:
        for line in f:
            cred_detail.append(line.splitlines())
username = cred_detail[0][0]
password = cred_detail[1][0]

# Sign into AGO and set as active portal
arcpy.SignInToPortal(portalURL, username, password)

# Maintain a reference of an ArcGISProject object pointing to your project
aprx = arcpy.mp.ArcGISProject(aprxPath)

# Maintain a reference of a Map object pointing to your desired map
m = aprx.listMaps('Map1')[0]

''' the first step to automate the publishing of a map, layer, or list of layers to a hosted web layer using ArcPy, in new object-oriented approach.
   
   Use "getWebLayerSharingDraft" method to create a FeatureSharingDraft object (reference: http://pro.arcgis.com/en/pro-app/arcpy/sharing/featuresharingdraft-class.htm)
   Syntax = getWebLayerSharingDraft (server_type, service_type, service_name, {...})
   
   Then to a Service Definition Draft (.sddraft) file with "exportToSDDraft" method.
   Syntax = exportToSDDraft (out_sddraft)
'''
Example #19
0
    def process(self):

        #: Set up paths and directories
        feature_service_name = secrets.FEATURE_SERVICE_NAME

        temp_csv_dir = os.path.join(arcpy.env.scratchFolder, 'fleet')
        temp_fc_path = os.path.join(arcpy.env.scratchGDB, feature_service_name)
        sddraft_path = os.path.join(arcpy.env.scratchFolder,
                                    f'{feature_service_name}.sddraft')
        sd_path = sddraft_path[:-5]

        paths = [temp_csv_dir, temp_fc_path, sddraft_path, sd_path]
        for item in paths:
            if arcpy.Exists(item):
                self.log.info(f'Deleting {item} prior to use...')
                arcpy.Delete_management(item)
        os.mkdir(temp_csv_dir)

        if not secrets.KNOWNHOSTS or not os.path.isfile(secrets.KNOWNHOSTS):
            raise FileNotFoundError(
                f'known_hosts file {secrets.KNOWNHOSTS} not found. Please create with ssh-keyscan.'
            )

        #: Download all the files in the upload folder on sftp to temp_csv_dir
        self.log.info(
            f'Downloading all files from {secrets.SFTP_HOST}/upload...')
        connection_opts = pysftp.CnOpts(knownhosts=secrets.KNOWNHOSTS)
        with pysftp.Connection(secrets.SFTP_HOST,
                               username=secrets.SFTP_USERNAME,
                               password=secrets.SFTP_PASSWORD,
                               cnopts=connection_opts) as sftp:
            sftp.get_d('upload', temp_csv_dir, preserve_mtime=True)

        #: Get the latest file
        source_path, source_date = self.get_latest_csv(temp_csv_dir,
                                                       previous_days=7)

        self.log.info(
            f'Converting {source_path} to feature class {temp_fc_path}...')
        wgs84 = arcpy.SpatialReference(4326)
        result = arcpy.management.XYTableToPoint(source_path,
                                                 temp_fc_path,
                                                 'LONGITUDE',
                                                 'LATITUDE',
                                                 coordinate_system=wgs84)
        self.log.debug(result.getMessages())

        try_count = 1
        while True:
            try:
                self.log.info(f'Updating service, try {try_count} of 3...')

                self.log.info(
                    f'Connecting to AGOL as {secrets.AGOL_USERNAME}...')
                gis = arcgis.gis.GIS('https://www.arcgis.com',
                                     secrets.AGOL_USERNAME,
                                     secrets.AGOL_PASSWORD)
                sd_item = gis.content.get(secrets.SD_ITEM_ID)

                self.log.info('Getting map and layer...')
                arcpy.SignInToPortal(arcpy.GetActivePortalURL(),
                                     secrets.AGOL_USERNAME,
                                     secrets.AGOL_PASSWORD)
                layer, fleet_map = self.get_map_layer(secrets.PROJECT_PATH,
                                                      temp_fc_path)

                #: draft, stage, update, publish
                self.log.info('Staging and updating...')
                self.update_agol_feature_service(fleet_map, layer,
                                                 feature_service_name,
                                                 sddraft_path, sd_path,
                                                 sd_item)

                #: Update item description
                self.log.info('Updating item description...')
                feature_item = gis.content.get(secrets.FEATURES_ITEM_ID)
                year = source_date[:4]
                month = source_date[4:6]
                day = source_date[6:]
                description = f'Vehicle location data obtained from Fleet; updated on {year}-{month}-{day}'
                feature_item.update(
                    item_properties={'description': description})

            except Exception as e:
                err_msg = f'Error on attempt {try_count} of 3; retrying.'
                self.log.exception(err_msg)

                #: Fail for good if 3x retry fails, otherwise increment, sleep,
                #: and retry
                if try_count > 3:
                    err_msg = 'Connection errors; giving up after 3 retries'
                    self.log.exception(err_msg)
                    raise e
                sleep(try_count**2)
                try_count += 1
                continue

            #: If we haven't gotten an error, break out of while True.
            break
password = config.get('ago', 'password')
proxy = config.get('ago', 'proxy')

# Set arcpy options
arcpy.SetLogHistory(False)
arcpy.env.workspace = os.getcwd()
arcpy.env.overwriteOutput = True

# Define where the .aprx files are located
prjPath = config.get('local', 'prjPath')

# Get a list of the .aprx files to stage the services
aprx_files = glob.glob(prjPath)

# For some reason SDDraft tool needs an explicit arcpy AGO login now
arcpy.SignInToPortal(portal, user, password)

# Define the AGO organization to connect to
gis = GIS(portal, user, password, proxy_host=proxy, proxy_port=8080)

# Files have to be created sequentially because ArcGIS Pro doesn't like multiprocessing
for aprx in aprx_files:
    prj = arcpy.mp.ArcGISProject(aprx)
    prj_mp = prj.listMaps()[0]
    try:
        if checks(prj_mp):
            print(prj_mp.name)
            create_sd(prj_mp.name)
            logger.info('{} service definition successfully created.'.format(
                prj_mp.name))
        else:
Example #21
0
    "\"Viernes\",\"Sabado\",\"Domingo\")\n    day = inicio.weekday()\n    daystring = weekDay"
    +
    "s[day]\n    label = \'El \' + daystring + \' lloverá \' + str(mm) + \'mm en esta región\'\n    return(l"
    + "abel)")

#import arcgis project aprx
aprx = arcpy.mp.ArcGISProject(project_folder +
                              r'\pronosticos\pronosticos.aprx')

#save project
print('saving project')
aprx.save()
print('project saved')

#connect to arcgis portal
arcpy.SignInToPortal("https://sig.icafe.cr/portal", "adminicafe", "icafesig")

#Create sddraft
print("Creating sddraft")
m = aprx.listMaps('Map')[0]
service = "pronosticos_image"
sddraft_filename = service + ".sddraft"
sddraft_output_filename = os.path.join(project_folder, sddraft_filename)

sharing_draft = m.getWebLayerSharingDraft("FEDERATED_SERVER", "MAP_IMAGE",
                                          service)
sharing_draft.federatedServerUrl = 'https://sig.icafe.cr/server'
sharing_draft.summary = "Pronosticos de precipitación"
sharing_draft.tags = "Pronosticos Precipitación"
sharing_draft.overwriteExistingService = True
sharing_draft.portalFolder = 'Pronosticos'
import arcpy, os
from arcgis.gis import GIS
from arcgis.features import FeatureLayerCollection
print("INITIALIZING SCRIPT EXECUTION")

arcpy.env.overwriteOutput=1

#port = 'https://www.arcgis.com'
port = arcpy.GetParameterAsText(0)
#user = '******'
user = arcpy.GetParameterAsText(1)
#passw = '#'
passw = arcpy.GetParameterAsText(2)


arcpy.SignInToPortal(port, user, passw)
gis = GIS(port, user, passw)

open_data = gis.groups.search('title:Apex Open Data')
group = open_data[0]


direct = arcpy.GetParameterAsText(3)
#direct = r"C:\Users\Jlong\Documents\ArcGIS\Projects\PublishAGOLdata"
mapdoc = arcpy.GetParameterAsText(4)
#mapdoc = r"C:\Users\Jlong\Documents\ArcGIS\Projects/PublishAGOLdata/PublishAGOLdata.aprx"

agol_serv_con = 'My Hosted Services'
aprx = arcpy.mp.ArcGISProject(mapdoc)

'''
Example #23
0
def main():
    # -----------------------------------------------------------------------------
    # Constants
    # -----------------------------------------------------------------------------
    DEFAULT_AGO_PORTAL_URL = "https://governmentofbc.maps.arcgis.com"  # Can also reference a local portal
    DEFAULT_AGO_PORTAL_USER = "******"
    DEFAULT_AGO_FOLDER = "ago_feature_layers_hosted_active"
    # -----------------------------------------------------------------------------
    # Parameters
    # -----------------------------------------------------------------------------
    argParser = argparse.ArgumentParser(
        description=
        "Publishes Feature Classes from a ArcGIS Pro project and updates or creates new Feature Layer AGO Items"
    )
    argParser.add_argument(
        '-user',
        dest='user',
        action='store',
        default=DEFAULT_AGO_PORTAL_USER,
        required=False,
        help='the ArcGIS Online User to publish AGO Items with')
    argParser.add_argument('-pwd',
                           dest='password',
                           action='store',
                           default=None,
                           required=True,
                           help='the ArcGIS Online User password')
    argParser.add_argument(
        '-path',
        dest='prjPath',
        action='store',
        default=None,
        required=True,
        help='the full path and name to the ArcGIS Pro Project')
    argParser.add_argument('-portal',
                           dest='portal',
                           action='store',
                           default=DEFAULT_AGO_PORTAL_URL,
                           required=False,
                           help='AGO portal URL')
    argParser.add_argument(
        '-folder',
        dest='shrFolder',
        action='store',
        default=DEFAULT_AGO_FOLDER,
        required=False,
        help=
        'The ID of the folder that contains all hosted items managed by this script'
    )
    argParser.add_argument(
        '-fcs',
        dest='fcs',
        action='store',
        default=None,
        required=True,
        help=
        'The text file that contains a list of feature classes and update times i.e. E:/apps_data/userdata/scripts/public_2_ago/layers_for_ago2.txt'
    )

    try:
        args = argParser.parse_args()
    except argparse.ArgumentError as e:
        argParser.print_help()
        sys.exit(1)

    start_time = time.time()
    now = time.ctime(int(start_time))
    print('Start Time: ' + now)

    print('Connecting to {}'.format(args.portal))
    arcpy.SignInToPortal(args.portal,
                         username=args.user,
                         password=args.password)
    gis = GIS(url=args.portal,
              username=args.user,
              password=args.password,
              verify_cert=False)

    prjPath = args.prjPath.replace('/', '\\')
    print('Open ArcPro Project ' + prjPath)
    prj = arcpy.mp.ArcGISProject(prjPath)

    print("Logged in as " + str(gis.properties.user.username))

    query_dict = {
        'f': 'json',
        'username': args.user,
        'password': args.password,
        'referer': args.portal
    }
    url = args.portal + "/sharing/rest/generateToken"
    r = requests.post(url + "?f=json", params=query_dict)
    t = json.loads(r.text)
    token = t['token']
    print('token:  ' + token)

    if "token" not in r.text:
        print('cound not get token, error')
        sys.exit(1)

    fn = args.fcs
    with open(fn, 'r') as f:
        for line in f:
            print('')
            print('***********************************************')
            x = line.split(";")
            cron = x[0]
            print('cron expression - ' + cron)
            stalemin = int(x[1])
            print('Stale Minutes - ' + str(stalemin))
            title = x[2]
            print('Title - ' + title)
            metadataurl = x[3]
            print('Metadata Url - ' + metadataurl)
            agourl = x[4]
            print('AGO Item URL - ' + agourl)
            #find GUID of ITEM
            guid = agourl.split('id=')[1]

            #check to see the modified date of the AGO item
            #agoitem = gis.content.get(guid)
            print("search for " + title + ' owner - ' + args.user)
            #agoitemlist = gis.content.search(query="title:"+ title +" AND owner:" + args.user, item_type='Feature Layer')
            agoitemlist = gis.content.search(query="id:" + guid +
                                             " AND owner:" + args.user,
                                             item_type='Feature Layer')

            if agoitemlist:
                agoitem = agoitemlist[0]
                print('found ' + agoitem.url)
            else:
                print('not found ' + title)

            if agoitem:
                #print (agoitem)
                #print (agoitem.modified)
                #print (agoitem.url)
                # 2: request the json data for the feature

                query_dict = {"f": "json", "token": token}

                jsonResponse = requests.get(agoitem.url + "/0",
                                            params=query_dict)
                # lastEditDate is in the editingInfo section of the json response
                # to access other sections, change "editingInfo" to the section name ("types" for example)
                # using OrderedDict keeps the file ordered as sent by server, but is not necessary
                #print(jsonResponse.text)
                resptxt = jsonResponse.text

                if resptxt.find("editingInfo") > 0:
                    t = json.loads(jsonResponse.text)
                    lastEditDate = t['editingInfo']['lastEditDate']
                else:
                    jsonResponse = requests.get(agoitem.url + "/1",
                                                params=query_dict)
                    #print(jsonResponse.text)
                    resptxt = jsonResponse.text
                    if resptxt.find("editingInfo") > 0:
                        t = json.loads(jsonResponse.text)
                        lastEditDate = t['editingInfo']['lastEditDate']
                    else:
                        jsonResponse = requests.get(agoitem.url + "/2",
                                                    params=query_dict)
                        resptxt = jsonResponse.text
                        if resptxt.find("editingInfo") > 0:
                            t = json.loads(jsonResponse.text)
                            lastEditDate = t['editingInfo']['lastEditDate']
                        else:
                            jsonResponse = requests.get(agoitem.url + "/3",
                                                        params=query_dict)
                            resptxt = jsonResponse.text
                            if resptxt.find("editingInfo") > 0:
                                t = json.loads(jsonResponse.text)
                                lastEditDate = t['editingInfo']['lastEditDate']

                print(lastEditDate)
                if not lastEditDate:
                    lastEditDate = agoitem.modified
                    print(
                        'using AGO ITEM modified date as feature service edit date is null'
                    )

                editTime = lastEditDate / 1000
                print("Last Edited: " +
                      time.strftime('%c', time.localtime(editTime)))

                converted_d1 = datetime.datetime.fromtimestamp(
                    round(lastEditDate / 1000))
                current_time = datetime.datetime.now()

                #print (current_time)
                #print (converted_d1)
                dateage = current_time - converted_d1
                print(dateage)
                age = int(dateage.total_seconds() / 60)
                print(age)
                diff = stalemin - age
                if diff < 0:
                    print("Older than stale minutes so running update  " +
                          str(diff))
                    # run PRO->SD->feature layer steps
                    publish2ago(DEFAULT_AGO_FOLDER, agoitem, gis, metadataurl,
                                prjPath, prj, args.user, guid)

                else:
                    print("Younger than stale minutes so skipping update  " +
                          str(diff))
            else:
                print(guid + " Not found in AGO - " + title)
Example #24
0
import arcpy

# ArcGIS Online にサイン イン (自身のArcGIS Online のユーザー名、パスワードを入力)
arcpy.SignInToPortal("https://arcgis.com", "ユーザー名", "パスワード")

# プロジェクト ファイルのオブジェクト取得
aprx = arcpy.mp.ArcGISProject("CURRENT")

# マップとレイヤーを取得
map = aprx.listMaps()[0]
lyrs = map.listLayers()
# 作成するサービス定義ドラフトファイル、サービス定義ファイルのファイル名を設定
sddraft_output = r"C:\data\output\sample.sddraft"
sd_output = r"C:\data\output\sample.sd"

# サービス定義ドラフトの作成
sharing_draft = map.getWebLayerSharingDraft("HOSTING_SERVER", "FEATURE",
                                            "Sample", lyrs)
sharing_draft.exportToSDDraft(sddraft_output)

# サービス定義ファイルの作成(サービスのステージング ツールの使用)
arcpy.StageService_server(sddraft_output, sd_output)
# ArcGIS Online にアップロード(サービス定義のアップロード ツールの使用)
arcpy.UploadServiceDefinition_server(sd_output, "HOSTING_SERVER")
Example #25
0
with arcpy.EnvManager(
        scratchWorkspace=
        r"C:\Users\ASF\Documents\COVID19\Disasters\Watermaps\Watermaps.gdb",
        workspace=
        r"C:\Users\ASF\Documents\COVID19\Disasters\Watermaps\Watermaps.gdb"):
    try:
        arcpy.AID.AIDISDP(md_rtc, aid_rtc, None)
    except:
        print("AID errors generated and ignored.")
        pass
print('RTC AID package complete.')

# update image services
import keyring
pw = keyring.get_password("portal_creds", "hkristenson_ASF")
arcpy.SignInToPortal(r'https://asf-daac.maps.arcgis.com/', 'hkristenson_ASF',
                     pw)

# print('Updating Watermap Extent Image Service...')
# arcpy.AID.MAIDIS("asf-daac", "Update Service", "test", "None", "HKH_Watermaps_Clipped", None, aid_wm, "Dedicated Instance", "Watermap Extent products generated from Sentinel-1 SAR imagery over flood-prone regions in the Hindu Kush Himalayan (HKH) region for the 2022 monsoon season, generated by ASF.", "Imagery products processed by ASF DAAC HyP3 2022 using GAMMA software. Contains modified Copernicus Sentinel data 2022, processed by ESA.", '', False, False, True, None, None, None, None)
# print('Watermap Extent Image Service updated.')
print('Updating Watermap Extent Image Service...')
arcpy.AID.MAIDIS(
    "asf-daac", "Update Service", "HKH", "None", "HKH_Watermap_Extent", None,
    aid_wm, "Dedicated Instance",
    "Watermap Extent products generated from Sentinel-1 SAR imagery over flood-prone regions in the Hindu Kush Himalayan (HKH) region for the 2022 monsoon season, generated by ASF.",
    "Imagery products processed by ASF DAAC HyP3 2022 using GAMMA software. Contains modified Copernicus Sentinel data 2022, processed by ESA.",
    '', False, False, True, None, None, None, None)
print('Watermap Extent Image Service updated.')
print('Updating RGB Image Service...')
arcpy.AID.MAIDIS(
    "asf-daac", "Update Service", "HKH", "None", "HKH_RGB", None, aid_rgb,
             "\n Information Technology  " \
             " \n GIS Administrator " \
             "\n P: 241-3409-7515 "
    message = """From: {0}\r\nTo: {1}\r\nSubject: {2}\r\n\

    {3}\r\n\n\n {4}\r\n\n\n {5}""".format(FROM, ", ".join(TO), subject,
                                          header_mesg, text, footer)

    server = smtplib.SMTP(SERVER)
    server.sendmail(FROM, TO, message)
    server.quit()


arcpy.env.overwriteOutput = 1

arcpy.SignInToPortal('https://www.arcgis.com', 'jlong_apexnc', '#')

projdoc = r"C:\Users\Jlong\Documents\ArcGIS\Projects\RecollectPublishServicesLGDB\RecollectPublishServicesLGDB.aprx"
direct = r"C:\Users\Jlong\Documents\ArcGIS\Projects\RecollectPublishServicesLGDB"


def email(x):
    auto_email(["*****@*****.**"],
               subject="ERROR DETECTED: Recollect Data "
               "Import/Ovewrite/Dataupload"
               "Script",
               text=str(x))


def create_service_definition(map_proj, sname, mpname, proj_dir, weblyrname):
    agol_serv_con = 'My Hosted Services'