def get_queue_client(self, queue, **kwargs): # type: (Union[QueueProperties, str], Optional[Any]) -> QueueClient """Get a client to interact with the specified queue. The queue need not already exist. :param queue: The queue. This can either be the name of the queue, or an instance of QueueProperties. :type queue: str or ~azure.storage.queue.QueueProperties :returns: A :class:`~azure.storage.queue.QueueClient` object. :rtype: ~azure.storage.queue.QueueClient .. admonition:: Example: .. literalinclude:: ../samples/queue_samples_service.py :start-after: [START get_queue_client] :end-before: [END get_queue_client] :language: python :dedent: 8 :caption: Get the queue client. """ try: queue_name = queue.name except AttributeError: queue_name = queue _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport ), # pylint: disable = protected-access policies=self._pipeline. _impl_policies # pylint: disable = protected-access ) return QueueClient(self.url, queue_name=queue_name, credential=self.credential, key_resolver_function=self.key_resolver_function, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, api_version=self.api_version, _pipeline=_pipeline, _configuration=self._config, _location_mode=self._location_mode, _hosts=self._hosts, **kwargs)
def test_basic_requests_separate_session(self): session = requests.Session() request = HttpRequest("GET", "https://bing.com") policies = [ UserAgentPolicy("myusergant"), RedirectPolicy() ] transport = RequestsTransport(session=session, session_owner=False) with Pipeline(transport, policies=policies) as pipeline: response = pipeline.run(request) assert transport.session assert response.http_response.status_code == 200 transport.close() assert transport.session transport.session.close()
def get_file_client( self, file_path # type: Union[FileProperties, str] ): # type: (...) -> DataLakeFileClient """Get a client to interact with the specified file. The file need not already exist. :param file_path: The file with which to interact. This can either be the path of the file(from root directory), or an instance of FileProperties. eg. directory/subdirectory/file :type file_path: str or ~azure.storage.filedatalake.FileProperties :returns: A DataLakeFileClient. :rtype: ~azure.storage.filedatalake..DataLakeFileClient .. admonition:: Example: .. literalinclude:: ../samples/datalake_samples_file_system.py :start-after: [START get_file_client_from_file_system] :end-before: [END get_file_client_from_file_system] :language: python :dedent: 8 :caption: Getting the file client to interact with a specific file. """ try: file_path = file_path.name except AttributeError: pass _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport ), # pylint: disable = protected-access policies=self._pipeline. _impl_policies # pylint: disable = protected-access ) return DataLakeFileClient( self.url, self.file_system_name, file_path=file_path, credential=self._raw_credential, _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function)
def get_directory_client( self, directory # type: Union[DirectoryProperties, str] ): # type: (...) -> DataLakeDirectoryClient """Get a client to interact with the specified directory. The directory need not already exist. :param directory: The directory with which to interact. This can either be the name of the directory, or an instance of DirectoryProperties. :type directory: str or ~azure.storage.filedatalake.DirectoryProperties :returns: A DataLakeDirectoryClient. :rtype: ~azure.storage.filedatalake.DataLakeDirectoryClient .. admonition:: Example: .. literalinclude:: ../samples/datalake_samples_file_system.py :start-after: [START get_directory_client_from_file_system] :end-before: [END get_directory_client_from_file_system] :language: python :dedent: 8 :caption: Getting the directory client to interact with a specific directory. """ try: directory_name = directory.name except AttributeError: directory_name = directory _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport ), # pylint: disable = protected-access policies=self._pipeline. _impl_policies # pylint: disable = protected-access ) return DataLakeDirectoryClient( self.url, self.file_system_name, directory_name=directory_name, credential=self._raw_credential, _configuration=self._config, _pipeline=_pipeline, _hosts=self._hosts, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function)
def get_file_system_client( self, file_system # type: Union[FileSystemProperties, str] ): # type: (...) -> FileSystemClient """Get a client to interact with the specified file system. The file system need not already exist. :param file_system: The file system. This can either be the name of the file system, or an instance of FileSystemProperties. :type file_system: str or ~azure.storage.filedatalake.FileSystemProperties :returns: A FileSystemClient. :rtype: ~azure.storage.filedatalake.FileSystemClient .. admonition:: Example: .. literalinclude:: ../samples/datalake_samples_file_system.py :start-after: [START create_file_system_client_from_service] :end-before: [END create_file_system_client_from_service] :language: python :dedent: 8 :caption: Getting the file system client to interact with a specific file system. """ try: file_system_name = file_system.name except AttributeError: file_system_name = file_system _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport ), # pylint: disable = protected-access policies=self._pipeline. _impl_policies # pylint: disable = protected-access ) return FileSystemClient( self.url, file_system_name, credential=self._raw_credential, _configuration=self._config, _pipeline=_pipeline, _hosts=self._hosts, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function)
def test_token_expiration(): """policy should not use a cached token which has expired""" url = get_random_url() expires_on = time.time() + 3600 first_token = "*" second_token = "**" token = AccessToken(first_token, expires_on) def get_token(*_, **__): return token credential = Mock(get_token=Mock(wraps=get_token)) transport = validating_transport( requests=[ Request(), Request( required_headers={"Authorization": "Bearer " + first_token}), Request( required_headers={"Authorization": "Bearer " + first_token}), Request( required_headers={"Authorization": "Bearer " + second_token}), ], responses=[ mock_response( status_code=401, headers={ "WWW-Authenticate": 'Bearer authorization="{}", resource=foo'.format(url) }) ] + [mock_response()] * 3, ) pipeline = Pipeline(policies=[ChallengeAuthPolicy(credential=credential)], transport=transport) for _ in range(2): pipeline.run(HttpRequest("GET", url)) assert credential.get_token.call_count == 1 token = AccessToken(second_token, time.time() + 3600) with patch("time.time", lambda: expires_on): pipeline.run(HttpRequest("GET", url)) assert credential.get_token.call_count == 2
def get_repository_client(self, repository, **kwargs): # type: (str, Dict[str, Any]) -> ContainerRepositoryClient """Get a repository client :param str repository: The repository to create a client for :returns: :class:`~azure.containerregistry.ContainerRepositoryClient` :raises: None """ _pipeline = Pipeline( transport=TransportWrapper( self._client._client._pipeline._transport), # pylint: disable=protected-access policies=self._client._client._pipeline._impl_policies, # pylint: disable=protected-access ) return ContainerRepositoryClient(self._endpoint, repository, credential=self._credential, pipeline=_pipeline, **kwargs)
def test_bearer_policy_cannot_complete_challenge(http_request): """BearerTokenCredentialPolicy should return the 401 response when it can't complete its challenge""" expected_scope = "scope" expected_token = AccessToken("***", int(time.time()) + 3600) credential = Mock(get_token=Mock(return_value=expected_token)) expected_response = Mock( status_code=401, headers={"WWW-Authenticate": 'Basic realm="localhost"'}) transport = Mock(send=Mock(return_value=expected_response)) policies = [BearerTokenCredentialPolicy(credential, expected_scope)] pipeline = Pipeline(transport=transport, policies=policies) response = pipeline.run(http_request("GET", "https://localhost")) assert response.http_response is expected_response assert transport.send.call_count == 1 credential.get_token.assert_called_once_with(expected_scope)
def _create_appconfig_pipeline(self, **kwargs): transport = kwargs.get('transport') policies = kwargs.get('policies') if policies is None: # [] is a valid policy list policies = [ self.config.headers_policy, self.config.user_agent_policy, AppConfigRequestsCredentialsPolicy(self.config.credentials), self.config.retry_policy, self.config.logging_policy, # HTTP request/response log DistributedTracingPolicy(), ] if not transport: transport = RequestsTransport(**kwargs) return Pipeline(transport, policies)
def _build_pipeline(self, config=None, policies=None, transport=None, **kwargs): config = config or self._create_config(**kwargs) policies = policies or [ ContentDecodePolicy(), config.user_agent_policy, config.proxy_policy, config.retry_policy, config.logging_policy, DistributedTracingPolicy(**kwargs), HttpLoggingPolicy(**kwargs), ] if not transport: transport = RequestsTransport(**kwargs) return Pipeline(transport=transport, policies=policies)
def test_bearer_policy_send(): """The bearer token policy should invoke the next policy's send method and return the result""" expected_request = HttpRequest("GET", "https://spam.eggs") expected_response = Mock() def verify_request(request): assert request.http_request is expected_request return expected_response fake_credential = Mock(get_token=lambda _: AccessToken("", 0)) policies = [ BearerTokenCredentialPolicy(fake_credential, "scope"), Mock(send=verify_request) ] response = Pipeline(transport=Mock(), policies=policies).run(expected_request) assert response is expected_response
def test_bearer_policy_adds_header(): """The bearer token policy should add a header containing a token from its credential""" expected_token = AccessToken("expected_token", 0) def verify_authorization_header(request): assert request.http_request.headers[ "Authorization"] == "Bearer {}".format(expected_token.token) fake_credential = Mock(get_token=Mock(return_value=expected_token)) policies = [ BearerTokenCredentialPolicy(fake_credential, "scope"), Mock(send=verify_authorization_header) ] Pipeline(transport=Mock(), policies=policies).run(HttpRequest("GET", "https://spam.eggs")) assert fake_credential.get_token.call_count == 1
def test_retry_seekable_file(): class MockTransport(HttpTransport): def __init__(self): self._first = True def __exit__(self, exc_type, exc_val, exc_tb): pass def close(self): pass def open(self): pass def send(self, request, **kwargs): # type: (PipelineRequest, Any) -> PipelineResponse if self._first: self._first = False for value in request.files.values(): name, body = value[0], value[1] if name and body and hasattr(body, 'read'): body.seek(0,2) raise AzureError('fail on first') for value in request.files.values(): name, body = value[0], value[1] if name and body and hasattr(body, 'read'): position = body.tell() assert not position response = HttpResponse(request, None) response.status_code = 400 return response file = tempfile.NamedTemporaryFile(delete=False) file.write(b'Lots of dataaaa') file.close() http_request = HttpRequest('GET', 'http://127.0.0.1/') headers = {'Content-Type': "multipart/form-data"} http_request.headers = headers with open(file.name, 'rb') as f: form_data_content = { 'fileContent': f, 'fileName': f.name, } http_request.set_formdata_body(form_data_content) http_retry = RetryPolicy(retry_total=1) pipeline = Pipeline(MockTransport(), [http_retry]) pipeline.run(http_request) os.unlink(f.name)
def test_claims_challenge(): """ARMChallengeAuthenticationPolicy should pass claims from an authentication challenge to its credential""" first_token = AccessToken("first", int(time.time()) + 3600) second_token = AccessToken("second", int(time.time()) + 3600) tokens = (t for t in (first_token, second_token)) expected_claims = '{"access_token": {"essential": "true"}' expected_scope = "scope" challenge = 'Bearer authorization_uri="https://localhost", error=".", error_description=".", claims="{}"'.format( base64.b64encode(expected_claims.encode()).decode() ) responses = (r for r in (Mock(status_code=401, headers={"WWW-Authenticate": challenge}), Mock(status_code=200))) def send(request): res = next(responses) if res.status_code == 401: expected_token = first_token.token else: expected_token = second_token.token assert request.headers["Authorization"] == "Bearer " + expected_token return res def get_token(*scopes, **kwargs): assert scopes == (expected_scope,) return next(tokens) credential = Mock(get_token=Mock(wraps=get_token)) transport = Mock(send=Mock(wraps=send)) policies = [ARMChallengeAuthenticationPolicy(credential, expected_scope)] pipeline = Pipeline(transport=transport, policies=policies) response = pipeline.run(HttpRequest("GET", "https://localhost")) assert response.http_response.status_code == 200 assert transport.send.call_count == 2 assert credential.get_token.call_count == 2 credential.get_token.assert_called_with(expected_scope, claims=expected_claims) with pytest.raises(StopIteration): next(tokens) with pytest.raises(StopIteration): next(responses)
def test_response_streaming_error_behavior(): # Test to reproduce https://github.com/Azure/azure-sdk-for-python/issues/16723 block_size = 103 total_response_size = 500 req_response = requests.Response() req_request = requests.Request() class FakeStreamWithConnectionError: # fake object for urllib3.response.HTTPResponse def stream(self, chunk_size, decode_content=False): assert chunk_size == block_size left = total_response_size while left > 0: if left <= block_size: raise requests.exceptions.ConnectionError() data = b"X" * min(chunk_size, left) left -= len(data) yield data def close(self): pass req_response.raw = FakeStreamWithConnectionError() response = RequestsTransportResponse( req_request, req_response, block_size, ) def mock_run(self, *args, **kwargs): return PipelineResponse( None, requests.Response(), None, ) transport = RequestsTransport() pipeline = Pipeline(transport) pipeline.run = mock_run downloader = response.stream_download(pipeline) with pytest.raises(requests.exceptions.ConnectionError): full_response = b"".join(downloader)
def get_container_client(self, container): # type: (Union[ContainerProperties, str]) -> ContainerClient """Get a client to interact with the specified container. The container need not already exist. :param container: The container. This can either be the name of the container, or an instance of ContainerProperties. :type container: str or ~azure.storage.blob.ContainerProperties :returns: A ContainerClient. :rtype: ~azure.storage.blob.ContainerClient .. admonition:: Example: .. literalinclude:: ../samples/blob_samples_service.py :start-after: [START bsc_get_container_client] :end-before: [END bsc_get_container_client] :language: python :dedent: 8 :caption: Getting the container client to interact with a specific container. """ try: container_name = container.name except AttributeError: container_name = container _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport ), # pylint: disable = protected-access policies=self._pipeline. _impl_policies # pylint: disable = protected-access ) return ContainerClient( self.url, container_name=container_name, credential=self.credential, api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function)
def test_register_failed_policy(): """Protocol: - We call the provider and get a 409 provider error - Now we POST register provider and get "Registering" - This POST failed """ provider_url = ("https://management.azure.com/" "subscriptions/12345678-9abc-def0-0000-000000000000/" "resourceGroups/clitest.rg000001/" "providers/Microsoft.Sql/servers/ygserver123?api-version=2014-04-01") provider_error = ('{"error":{"code":"MissingSubscriptionRegistration", ' '"message":"The subscription registration is in \'Unregistered\' state. ' 'The subscription must be registered to use namespace \'Microsoft.Sql\'. ' 'See https://aka.ms/rps-not-found for how to register subscriptions."}}') provider_success = '{"success": true}' httpretty.register_uri(httpretty.PUT, provider_url, responses=[ httpretty.Response(body=provider_error, status=409), httpretty.Response(body=provider_success), ], content_type="application/json") register_post_url = ("https://management.azure.com/" "subscriptions/12345678-9abc-def0-0000-000000000000/" "providers/Microsoft.Sql/register?api-version=2016-02-01") httpretty.register_uri(httpretty.POST, register_post_url, status=409, content_type="application/json") request = HttpRequest("PUT", provider_url) policies = [ ARMAutoResourceProviderRegistrationPolicy(), ] with Pipeline(RequestsTransport(), policies=policies) as pipeline: response = pipeline.run(request) assert response.http_response.status_code == 409
def get_share_client(self, share, snapshot=None): # type: (Union[ShareProperties, str],Optional[Union[Dict[str, Any], str]]) -> ShareClient """Get a client to interact with the specified share. The share need not already exist. :param share: The share. This can either be the name of the share, or an instance of ShareProperties. :type share: str or ~azure.storage.fileshare.ShareProperties :param str snapshot: An optional share snapshot on which to operate. This can be the snapshot ID string or the response returned from :func:`create_snapshot`. :returns: A ShareClient. :rtype: ~azure.storage.fileshare.ShareClient .. admonition:: Example: .. literalinclude:: ../samples/file_samples_service.py :start-after: [START get_share_client] :end-before: [END get_share_client] :language: python :dedent: 8 :caption: Gets the share client. """ try: share_name = share.name except AttributeError: share_name = share _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport ), # pylint: disable = protected-access policies=self._pipeline. _impl_policies # pylint: disable = protected-access ) return ShareClient(self.url, share_name=share_name, snapshot=snapshot, credential=self.credential, api_version=self.api_version, _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode)
def test_multipart_send_with_context(): transport = mock.MagicMock(spec=HttpTransport) header_policy = HeadersPolicy( {'x-ms-date': 'Thu, 14 Jun 2018 16:46:54 GMT'}) req0 = HttpRequest("DELETE", "/container0/blob0") req1 = HttpRequest("DELETE", "/container1/blob1") request = HttpRequest("POST", "http://account.blob.core.windows.net/?comp=batch") request.set_multipart_mixed( req0, req1, policies=[header_policy], boundary= "batch_357de4f7-6d0b-4e02-8cd2-6361411a9525", # Fix it so test are deterministic headers={'Accept': 'application/json'}) with Pipeline(transport) as pipeline: pipeline.run(request) assert request.body == ( b'--batch_357de4f7-6d0b-4e02-8cd2-6361411a9525\r\n' b'Content-Type: application/http\r\n' b'Content-Transfer-Encoding: binary\r\n' b'Content-ID: 0\r\n' b'\r\n' b'DELETE /container0/blob0 HTTP/1.1\r\n' b'x-ms-date: Thu, 14 Jun 2018 16:46:54 GMT\r\n' b'Accept: application/json\r\n' b'\r\n' b'\r\n' b'--batch_357de4f7-6d0b-4e02-8cd2-6361411a9525\r\n' b'Content-Type: application/http\r\n' b'Content-Transfer-Encoding: binary\r\n' b'Content-ID: 1\r\n' b'\r\n' b'DELETE /container1/blob1 HTTP/1.1\r\n' b'x-ms-date: Thu, 14 Jun 2018 16:46:54 GMT\r\n' b'Accept: application/json\r\n' b'\r\n' b'\r\n' b'--batch_357de4f7-6d0b-4e02-8cd2-6361411a9525--\r\n')
def _build_pipeline(config=None, policies=None, transport=None, **kwargs): # type: (Optional[Configuration], Optional[PolicyList], Optional[HttpTransport], **Any) -> Pipeline config = config or _create_config(**kwargs) if policies is None: # [] is a valid policy list policies = [ ContentDecodePolicy(), config.user_agent_policy, config.proxy_policy, config.retry_policy, config.logging_policy, DistributedTracingPolicy(**kwargs), HttpLoggingPolicy(**kwargs), ] if not transport: transport = RequestsTransport(**kwargs) return Pipeline(transport=transport, policies=policies)
def test_example_pipeline(): # [START build_pipeline] from azure.core.pipeline import Pipeline from azure.core.pipeline.policies import RedirectPolicy, UserAgentPolicy from azure.core.pipeline.transport import RequestsTransport, HttpRequest # example: create request and policies request = HttpRequest("GET", "https://bing.com") policies = [ UserAgentPolicy("myuseragent"), RedirectPolicy() ] # run the pipeline with Pipeline(transport=RequestsTransport(), policies=policies) as pipeline: response = pipeline.run(request) # [END build_pipeline] assert pipeline._transport.session is None assert response.http_response.status_code == 200
def get_form_recognizer_client(self, **kwargs): # type: (Any) -> FormRecognizerClient """Get an instance of a FormRecognizerClient from FormTrainingClient. :rtype: ~azure.ai.formrecognizer.FormRecognizerClient :return: A FormRecognizerClient """ _pipeline = Pipeline(transport=TransportWrapper( self._client._client._pipeline._transport), policies=self._client._client._pipeline. _impl_policies) # type: Pipeline client = FormRecognizerClient(endpoint=self._endpoint, credential=self._credential, pipeline=_pipeline, **kwargs) # need to share config, but can't pass as a keyword into client client._client._config = self._client._client._config return client
def get_file_client(self, file_path): # type: (str) -> FileClient """Get a client to interact with the specified file. The file need not already exist. :param str file_path: Path to the specified file. :returns: A File Client. :rtype: ~azure.storage.file.FileClient """ _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access policies=self._pipeline._impl_policies # pylint: disable = protected-access ) return FileClient( self.url, share_name=self.share_name, file_path=file_path, snapshot=self.snapshot, credential=self.credential, _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode)
def __init__( self, config=None, # type: Optional[Configuration] policies=None, # type: Optional[Iterable[HTTPPolicy]] transport=None, # type: Optional[HttpTransport] **kwargs # type: Any ): # type: (...) -> None config = config or self._create_config(**kwargs) policies = policies or [ ContentDecodePolicy(), config.retry_policy, config.logging_policy, DistributedTracingPolicy(), ] if not transport: transport = RequestsTransport(**kwargs) self._pipeline = Pipeline(transport=transport, policies=policies) super(AuthnClient, self).__init__(**kwargs)
def test_retry_timeout(http_request): timeout = 1 def send(request, **kwargs): assert kwargs[ "connection_timeout"] <= timeout, "policy should set connection_timeout not to exceed timeout" raise ServiceResponseError("oops") transport = Mock( spec=HttpTransport, send=Mock(wraps=send), connection_config=ConnectionConfiguration(connection_timeout=timeout * 2), sleep=time.sleep, ) pipeline = Pipeline(transport, [RetryPolicy(timeout=timeout)]) with pytest.raises(ServiceResponseTimeoutError): response = pipeline.run(http_request("GET", "http://localhost/"))
def test_example_raw_response_hook(): def callback(response): response.http_response.status_code = 200 response.http_response.headers["custom_header"] = "CustomHeader" from azure.core.pipeline import Pipeline from azure.core.pipeline.policies import RedirectPolicy, UserAgentPolicy from azure.core.pipeline.transport import RequestsTransport, HttpRequest from azure.core.pipeline.policies import CustomHookPolicy request = HttpRequest("GET", "https://bing.com") policies = [ CustomHookPolicy(raw_response_hook=callback) ] with Pipeline(transport=RequestsTransport(), policies=policies) as pipeline: response = pipeline.run(request) assert response.http_response.status_code == 200 assert response.http_response.headers["custom_header"] == "CustomHeader"
def test_timeout_defaults(): """When "timeout" is not set, the policy should not override the transport's timeout configuration""" def send(request, **kwargs): for arg in ("connection_timeout", "read_timeout"): assert arg not in kwargs, "policy should defer to transport configuration when not given a timeout" response = HttpResponse(request, None) response.status_code = 200 return response transport = Mock( spec_set=HttpTransport, send=Mock(wraps=send), sleep=Mock(side_effect=Exception("policy should not sleep: its first send succeeded")), ) pipeline = Pipeline(transport, [RetryPolicy()]) pipeline.run(HttpRequest("GET", "http://127.0.0.1/")) assert transport.send.call_count == 1, "policy should not retry: its first send succeeded"
def get_directory_client(self, directory_path=None): # type: (Optional[str]) -> DirectoryClient """Get a client to interact with the specified directory. The directory need not already exist. :param str directory_path: Path to the specified directory. :returns: A Directory Client. :rtype: ~azure.storage.file.DirectoryClient """ _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access policies=self._pipeline._impl_policies # pylint: disable = protected-access ) return DirectoryClient( self.url, share_name=self.share_name, directory_path=directory_path or "", snapshot=self.snapshot, credential=self.credential, _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode)
def test_policy(): # ensure the test starts with an empty cache HttpChallengeCache.clear() expected_scope = "https://challenge.resource/.default" expected_token = "expected_token" challenge = Mock( status_code=401, headers={ "WWW-Authenticate": 'Bearer authorization="https://login.authority.net/tenant", resource={}' .format(expected_scope) }, ) success = Mock(status_code=200) data = {"spam": "eggs"} responses = (r for r in (challenge, success)) def send(request): response = next(responses) if response is challenge: # this is the first request assert not request.body assert request.headers["Content-Length"] == "0" elif response is success: # this is the second request assert request.body == data assert expected_token in request.headers["Authorization"] return response def get_token(*scopes): assert len(scopes) is 1 assert scopes[0] == expected_scope return AccessToken(expected_token, 0) credential = Mock(get_token=Mock(wraps=get_token)) pipeline = Pipeline(policies=[ChallengeAuthPolicy(credential=credential)], transport=Mock(send=send)) pipeline.run(HttpRequest("POST", "https://azure.service", data=data)) assert credential.get_token.call_count == 1
def _build_pipeline(self, config, transport, **kwargs): # type: (Configuration, HttpTransport, **Any) -> Pipeline logging_policy = HttpLoggingPolicy(**kwargs) logging_policy.allowed_header_names.add("x-ms-keyvault-network-info") policies = [ config.headers_policy, config.user_agent_policy, config.proxy_policy, config.redirect_policy, config.retry_policy, config.authentication_policy, config.logging_policy, DistributedTracingPolicy(**kwargs), logging_policy, ] if transport is None: transport = RequestsTransport(**kwargs) return Pipeline(transport, policies=policies)