Beispiel #1
0
 def test_known_cloud_missing_endpoint(self):
     # New endpoints in cloud config should be saved in config for the known clouds
     with mock.patch('azure.cli.core.cloud.CLOUD_CONFIG_FILE', tempfile.mkstemp()[1]) as\
             config_file:
         # Save the clouds to config to get started
         init_known_clouds()
         cloud = get_cloud(AZURE_PUBLIC_CLOUD.name)
         self.assertEqual(cloud.endpoints.batch_resource_id,
                          AZURE_PUBLIC_CLOUD.endpoints.batch_resource_id)
         # Remove an endpoint from the cloud config (leaving other config values as is)
         config = get_config_parser()
         config.read(config_file)
         config.remove_option(AZURE_PUBLIC_CLOUD.name,
                              'endpoint_batch_resource_id')
         with open(config_file, 'w') as cf:
             config.write(cf)
         # Verify that it was removed
         config.read(config_file)
         self.assertFalse(
             config.has_option(AZURE_PUBLIC_CLOUD.name,
                               'endpoint_batch_resource_id'))
         # Init the known clouds again (this should add the missing endpoint)
         init_known_clouds(force=True)
         config.read(config_file)
         # The missing endpoint should have been added by init_known_clouds as 'force' was used.
         self.assertTrue(
             config.has_option(AZURE_PUBLIC_CLOUD.name,
                               'endpoint_batch_resource_id'),
             'Expected the missing endpoint to be added but it was not.')
         actual_val = config.get(AZURE_PUBLIC_CLOUD.name,
                                 'endpoint_batch_resource_id')
         expected_val = AZURE_PUBLIC_CLOUD.endpoints.batch_resource_id
         self.assertEqual(actual_val, expected_val)
Beispiel #2
0
    def test_get_clouds_concurrent(self):
        with mock.patch('azure.cli.core.cloud.CLOUD_CONFIG_FILE',
                        tempfile.mkstemp()[1]) as config_file:
            init_known_clouds()

            pool_size = 100
            p = multiprocessing.Pool(pool_size)
            p.map(_helper_get_clouds, range(pool_size))
            p.close()
            p.join()
            # Check we can read the file with no exceptions
            config = get_config_parser()
            config.read(config_file)
            for kc in KNOWN_CLOUDS:
                get_cloud(kc.name)
Beispiel #3
0
def _authentication_context_factory(tenant, cache):
    import adal
    authority_url = CLOUD.endpoints.active_directory
    is_adfs = authority_url.lower().endswith('/adfs')
    if not is_adfs:
        authority_url = authority_url + '/' + (tenant or _COMMON_TENANT)
    return adal.AuthenticationContext(authority_url,
                                      cache=cache,
                                      api_version=None,
                                      validate_authority=(not is_adfs))


_AUTH_CTX_FACTORY = _authentication_context_factory

init_known_clouds(force=True)
CLOUD = get_active_cloud()

logger.debug('Current cloud config:\n%s', str(CLOUD))


def _load_tokens_from_file(file_path):
    all_entries = []
    if os.path.isfile(file_path):
        all_entries = get_file_json(file_path, throw_on_empty=False) or []
    return all_entries


def _delete_file(file_path):
    try:
        os.remove(file_path)
Beispiel #4
0
_COMMON_TENANT = 'common'


def _authentication_context_factory(tenant, cache):
    import adal
    authority_url = CLOUD.endpoints.active_directory
    is_adfs = authority_url.lower().endswith('/adfs')
    if not is_adfs:
        authority_url = authority_url + '/' + (tenant or _COMMON_TENANT)
    return adal.AuthenticationContext(authority_url, cache=cache, api_version=None,
                                      validate_authority=(not is_adfs))


_AUTH_CTX_FACTORY = _authentication_context_factory

init_known_clouds(force=True)
CLOUD = get_active_cloud()

logger.debug('Current cloud config:\n%s', str(CLOUD))


def _load_tokens_from_file(file_path):
    all_entries = []
    if os.path.isfile(file_path):
        all_entries = get_file_json(file_path, throw_on_empty=False) or []
    return all_entries


def _delete_file(file_path):
    try:
        os.remove(file_path)