def test_get_queue_client_expired_token(self):
        """
        Exception handler should deal with a bad token by clearing
        cache and retrying.  So if we provide a bad token followed
        by a real one in our mock, we expect it to end up getting
        the real token.
        """
        real_token = StorageUtilities.get_storage_token(self.session)

        with patch('c7n_azure.storage_utils.QueueService.create_queue'
                   ) as create_mock:
            with patch(
                    'c7n_azure.storage_utils.StorageUtilities.get_storage_token'
            ) as token_mock:
                error = AzureHttpError('', 403)
                error.error_code = 'AuthenticationFailed'

                # Two side effects: one with a bad token and an error,
                # and one with a good token and no error
                create_mock.side_effect = [error, None]
                token_mock.side_effect = [TokenCredential('fake'), real_token]

                url = "https://fake.queue.core.windows.net/testcc"
                queue_service, queue_name = \
                    StorageUtilities.get_queue_client_by_uri(url, self.session)

                # We end up with the real token (after a retry)
                self.assertEqual(real_token, queue_service.authentication)
Example #2
0
    def _get_azure_storage_token():
        tenant_id = os.getenv("AZ_TENANT_ID", "")
        client_id = os.getenv("AZ_CLIENT_ID", "")
        client_secret = os.getenv("AZ_CLIENT_SECRET", "")
        subscription_id = os.getenv("AZ_SUBSCRIPTION_ID", "")

        if tenant_id == "" or client_id == "" or client_secret == "" or subscription_id == "":
            return None

        # note the SP must have "Storage Blob Data Owner" perms for this to work
        import adal
        from azure.storage.common import TokenCredential

        authority_url = "https://login.microsoftonline.com/" + tenant_id

        context = adal.AuthenticationContext(authority_url)

        token = context.acquire_token_with_client_credentials(
            "https://storage.azure.com/",
            client_id,
            client_secret)

        token_credential = TokenCredential(token["accessToken"])

        logging.info("Retrieved SP token credential for client_id: %s", client_id)

        return token_credential
    def test_token_credential(self):
        token_credential = TokenCredential(self.generate_oauth_token())

        # Action 1: make sure token works
        service = QueueService(self.settings.STORAGE_ACCOUNT_NAME, token_credential=token_credential)
        queues = list(service.list_queues())
        self.assertIsNotNone(queues)

        # Action 2: change token value to make request fail
        token_credential.token = "YOU SHALL NOT PASS"
        with self.assertRaises(AzureException):
            queues = list(service.list_queues())
            self.assertIsNone(queues)

        # Action 3: update token to make it working again
        token_credential.token = self.generate_oauth_token()
        queues = list(service.list_queues())
        self.assertIsNotNone(queues)
 def _get_service(self):
     token_credential = TokenCredential(self.get_token_func())
     service = BlockBlobService(token_credential=token_credential,
                                account_name=self.aad_account_name,
                                socket_timeout=120)
     service.retry = ExponentialRetry(initial_backoff=5,
                                      increment_base=5,
                                      max_attempts=5).retry
     return service
    def test_token_credential(self):
        token_credential = TokenCredential(self.generate_oauth_token())

        # Action 1: make sure token works
        service = BlockBlobService(self.settings.OAUTH_STORAGE_ACCOUNT_NAME,
                                   token_credential=token_credential)
        result = service.exists("test")
        self.assertIsNotNone(result)

        # Action 2: change token value to make request fail
        token_credential.token = "YOU SHALL NOT PASS"
        with self.assertRaises(AzureException):
            result = service.exists("test")
            self.assertIsNone(result)

        # Action 3: update token to make it working again
        token_credential.token = self.generate_oauth_token()
        result = service.exists("test")
        self.assertIsNotNone(result)
Example #6
0
 def blob_service(self):
     self._get_or_create_storage_account()
     if not self._block_blob_service:
         if self._access_token:
             token_credential = TokenCredential(self._access_token)
             self._block_blob_service = BlockBlobService(
                 account_name=self.storage_account,
                 token_credential=token_credential)
         else:
             self._block_blob_service = BlockBlobService(
                 account_name=self.storage_account,
                 account_key=self.access_key_result.keys[0].value)
     return self._block_blob_service
Example #7
0
File: core.py Project: j-r77/adlfs
    def _get_token_from_service_principal(self):
        from azure.common.credentials import ServicePrincipalCredentials
        from azure.storage.common import TokenCredential

        sp_cred = ServicePrincipalCredentials(
            client_id=self.client_id,
            secret=self.client_secret,
            tenant=self.tenant_id,
            resource="https://storage.azure.com/",
        )

        token_cred = TokenCredential(sp_cred.token["access_token"])
        return token_cred
Example #8
0
    def __init__(self,
                 auth: AzureAuth,
                 account_name: str,
                 container_name: str,
                 base_path: str = ""):
        Storage.__init__(self, base_path=base_path)

        self.container_name = container_name
        self.blob_service = BlockBlobService(
            account_name=account_name,
            account_key=auth.key_token,
            token_credential=TokenCredential(auth.service_principal_token),
            connection_string=auth.connection_string_token)
Example #9
0
 def get_storage_token(session):
     if session.resource_namespace != RESOURCE_STORAGE:
         session = session.get_session_for_resource(RESOURCE_STORAGE)
     return TokenCredential(session.get_bearer_token())
Example #10
0
 def _get_user_delegation_key(self, key_start_time, key_expiry_time):
     token_credential = TokenCredential(self.generate_oauth_token())
     service = BlockBlobService(self.settings.OAUTH_STORAGE_ACCOUNT_NAME,
                                token_credential=token_credential)
     return service.get_user_delegation_key(key_start_time, key_expiry_time)
Example #11
0
def save_aad_auditlogs(auditlog_type, tenant_id, client_id, client_secret,
                       storage_account, storage_container):
    METADATA_LAST_DATETIME = "last_datetime"
    METADATA_LAST_EXECUTION = "last_execution"

    log("Save " + auditlog_type + " to " + storage_account + "/" +
        storage_container)

    # Create AAD authentication context to use for obtaining access tokens
    auth_context = AuthenticationContext(AAD_ENDPOINT_URI + tenant_id)

    # Get access token for storage.azure.com
    storage_token_response = auth_context.acquire_token_with_client_credentials(
        "https://storage.azure.com/", client_id, client_secret)

    # Create Azure Blob service client
    blob_service = BlockBlobService(storage_account,
                                    endpoint_suffix=STORAGE_ENDPOINT_SUFFIX,
                                    token_credential=TokenCredential(
                                        storage_token_response['accessToken']))

    # Create container if it does not yet exist
    blob_service.create_container(storage_container, fail_on_exist=False)

    # Get datetime of last record from container metadata
    # NOTE: Date strings have nanosecond precision so would require numpy.datetime64 for parsing
    container_metadata = blob_service.get_container_metadata(storage_container)
    last_datetime = ""
    if METADATA_LAST_DATETIME in container_metadata:
        last_datetime = container_metadata[METADATA_LAST_DATETIME]
    else:
        last_datetime = datetime.strftime(datetime.now() - timedelta(days=90),
                                          "%Y-%m-%dT%H:%M:%S.%fZ")

    log("Previous value container last_datetime=" + last_datetime + "")

    # Get access token for graph.microsoft.com
    graph_token_response = auth_context.acquire_token_with_client_credentials(
        GRAPH_ENDPOINT_URI, client_id, client_secret)

    # Initial request filtered by latest date time with a batch of 500
    if auditlog_type == "directoryAudits":
        datetime_record_name = "activityDateTime"
        graph_uri = (GRAPH_ENDPOINT_URI +
                     'beta/auditLogs/directoryAudits?$top=500&$filter=' +
                     datetime_record_name + ' gt ' + last_datetime)
    elif auditlog_type == "signIns":
        datetime_record_name = "createdDateTime"
        graph_uri = (GRAPH_ENDPOINT_URI +
                     'beta/auditLogs/signIns?$top=500&$filter=' +
                     datetime_record_name + ' gt ' + last_datetime)
    else:
        log("Unknown auditlog_type = " + auditlog_type)
        return

    max_record_datetime = last_datetime

    # May need to loop multiple times to get all of the data and retry throttled requestes with status code 429
    request_count = 0
    error_count = 0
    max_requests = 100
    max_errors = 50
    while request_count < max_requests and error_count < max_errors:
        request_count += 1

        # Issue Graph API request
        session = requests.Session()
        session.headers.update(
            {'Authorization': "Bearer " + graph_token_response['accessToken']})
        response = session.get(graph_uri)
        content_length = len(response.content)
        response_json = response.json()

        log("Get " + graph_uri + " returned status_code=" +
            str(response.status_code) + "; content_length=" +
            str(content_length) + "; requests=" + str(request_count) + "/" +
            str(max_requests) + "; errors=" + str(error_count) + "/" +
            str(max_errors))

        if response.status_code != 200:
            error_count += 1
            log("*** ERROR ***")
            log("Headers: " + str(response.headers))
            log("Content: " + response.text)

            if response.status_code == 403:
                # Exit immediately
                log("Permission denied, existing.")
                return
            elif response.status_code == 429:
                # Pause for longer when throttled
                log("Request was throttled, waiting 10 seconds...")
                time.sleep(10.0)
                continue
            else:
                # Pause before retry
                log("Waiting 5 seconds...")
                time.sleep(5.0)
                continue

        # Check if received valid response
        if 'value' in response_json:
            count = len(response_json['value'])

            # Records are ordered in descending order by activityDateTime/createdDateTime, so first record is the
            # newest and last is the oldest
            if count > 0:
                last_record_datetime = response_json['value'][0][
                    datetime_record_name]
                first_record_datetime = response_json['value'][
                    count - 1][datetime_record_name]

                # Upload logs to blob storage
                blob_name = ("logs_" + first_record_datetime.replace(":", "") +
                             "_" + last_record_datetime.replace(":", "") +
                             "_" + str(count) + ".json")
                blob_service.create_blob_from_text(
                    storage_container,
                    blob_name,
                    json.dumps(response_json),
                    encoding='utf-8',
                    content_settings=ContentSettings(
                        content_type='application/json'))

                log("Uploaded " + blob_name + " to " + storage_account + "/" +
                    storage_container)

                if last_record_datetime > max_record_datetime:
                    max_record_datetime = last_record_datetime
            else:
                log("No new data")

            # If there is next page, go to next page. Otherwise, break out of the loop.
            if "@odata.nextLink" in response_json:
                graph_uri = response_json['@odata.nextLink']
                log("Next page found " + graph_uri)
            else:
                break

    # Record the last activityDateTime to filter next set of logs
    blob_service.set_container_metadata(storage_container,
                                        metadata={
                                            METADATA_LAST_DATETIME:
                                            max_record_datetime,
                                            METADATA_LAST_EXECUTION:
                                            str(datetime.now())
                                        })
    log("Recorded new container last_datetime=" + max_record_datetime)
 def setUp(self):
     super(StorageClientTest, self).setUp()
     self.account_name = self.settings.STORAGE_ACCOUNT_NAME
     self.account_key = self.settings.STORAGE_ACCOUNT_KEY
     self.sas_token = '?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D'
     self.token_credential = TokenCredential('initial_token')
Example #13
0
 def __init__(self):
     # https://docs.microsoft.com/en-us/azure/storage/common/storage-auth-aad-app
     token = TokenCredential(auth.get_token(STORAGE_ENDPOINT))
     self.blob_service = BlockBlobService(
         STORAGE_ACCOUNT, token_credential=token['accessToken'])
Example #14
0
 def get_storage_token(session):
     if session.resource_endpoint != STORAGE_AUTH_ENDPOINT:
         session = session.get_session_for_resource(STORAGE_AUTH_ENDPOINT)
     return TokenCredential(session.get_bearer_token())