def create_iam_role(self):
     role_name = 'ets-role-name-1-%s' % random_chars(10)
     parsed = self.iam_client.create_role(
         RoleName=role_name, AssumeRolePolicyDocument=DEFAULT_ROLE_POLICY)
     arn = parsed['Role']['Arn']
     self.addCleanup(self.iam_client.delete_role, RoleName=role_name)
     return arn
Exemplo n.º 2
0
 def create_bucket(self):
     bucket_name = 'ets-bucket-1-%s' % random_chars(50)
     self.s3_client.create_bucket(Bucket=bucket_name)
     waiter = self.s3_client.get_waiter('bucket_exists')
     waiter.wait(Bucket=bucket_name)
     self.addCleanup(self.s3_client.delete_bucket, Bucket=bucket_name)
     return bucket_name
Exemplo n.º 3
0
    def _wait_for_assume_role(self, role_arn, access_key, secret_key,
                              token, attempts, delay):
        # "Why not use the policy simulator?" you might ask. The answer is
        # that the policy simulator will return success far before you can
        # actually make the calls.
        client = self.parent_session.create_client(
            'sts', aws_access_key_id=access_key,
            aws_secret_access_key=secret_key, aws_session_token=token
        )
        attempts_remaining = attempts
        role_session_name = random_chars(10)
        while attempts_remaining > 0:
            attempts_remaining -= 1
            try:
                result = client.assume_role(
                    RoleArn=role_arn, RoleSessionName=role_session_name)
                return result['Credentials']
            except ClientError as e:
                code = e.response.get('Error', {}).get('Code')
                if code in ["InvalidClientTokenId", "AccessDenied"]:
                    time.sleep(delay)
                else:
                    raise

        raise Exception("Unable to assume role %s" % role_arn)
Exemplo n.º 4
0
    def wait_for_assume_role(self,
                             role_arn,
                             access_key,
                             secret_key,
                             token=None,
                             attempts=30,
                             delay=10):
        # "Why not use the policy simulator?" you might ask. The answer is
        # that the policy simulator will return success far before you can
        # actually make the calls.
        client = self.parent_session.create_client(
            'sts',
            aws_access_key_id=access_key,
            aws_secret_access_key=secret_key,
            aws_session_token=token)
        attempts_remaining = attempts
        role_session_name = random_chars(10)
        while attempts_remaining > 0:
            attempts_remaining -= 1
            try:
                result = client.assume_role(RoleArn=role_arn,
                                            RoleSessionName=role_session_name)
                return result['Credentials']
            except ClientError as e:
                code = e.response.get('Error', {}).get('Code')
                if code in ["InvalidClientTokenId", "AccessDenied"]:
                    time.sleep(delay)
                else:
                    raise

        raise Exception("Unable to assume role %s" % role_arn)
 def setUpClass(cls):
     cls.session = ibm_botocore.session.get_session()
     cls.stream_name = 'botocore-test-%s' % random_chars(10)
     client = cls.session.create_client('kinesis', cls.REGION)
     client.create_stream(StreamName=cls.stream_name,
                          ShardCount=1)
     waiter = client.get_waiter('stream_exists')
     waiter.wait(StreamName=cls.stream_name)
Exemplo n.º 6
0
 def setUpClass(cls):
     cls.session = botocore.session.get_session()
     cls.stream_name = 'botocore-test-%s' % random_chars(10)
     client = cls.session.create_client('kinesis', cls.REGION)
     client.create_stream(StreamName=cls.stream_name,
                          ShardCount=1)
     waiter = client.get_waiter('stream_exists')
     waiter.wait(StreamName=cls.stream_name)
 def create_iam_role(self):
     role_name = 'ets-role-name-1-%s' % random_chars(10)
     parsed = self.iam_client.create_role(
         RoleName=role_name,
         AssumeRolePolicyDocument=DEFAULT_ROLE_POLICY)
     arn = parsed['Role']['Arn']
     self.addCleanup(
         self.iam_client.delete_role, RoleName=role_name)
     return arn
Exemplo n.º 8
0
 def test_create_table_and_wait(self):
     table_name = "botocoretest-%s" % random_chars(10)
     self.client.create_table(
         TableName=table_name,
         ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
         KeySchema=[{"AttributeName": "foo", "KeyType": "HASH"}],
         AttributeDefinitions=[{"AttributeName": "foo", "AttributeType": "S"}],
     )
     self.addCleanup(self.client.delete_table, TableName=table_name)
     waiter = self.client.get_waiter("table_exists")
     waiter.wait(TableName=table_name)
     parsed = self.client.describe_table(TableName=table_name)
     self.assertEqual(parsed["Table"]["TableStatus"], "ACTIVE")
Exemplo n.º 9
0
 def create_describe_scaling_response(self, page_size=200):
     """Create a valid describe_scaling_activities response."""
     page = []
     date = datetime.now()
     for _ in range(page_size):
         page.append({
             'AutoScalingGroupName': 'test',
             'ActivityId': random_chars(10),
             'Cause': 'test',
             'StartTime': date,
             'StatusCode': '200',
         })
     return {'Activities': page}
Exemplo n.º 10
0
 def create_describe_scaling_response(self, page_size=200):
     """Create a valid describe_scaling_activities response."""
     page = []
     date = datetime.now()
     for _ in range(page_size):
         page.append({
             'AutoScalingGroupName': 'test',
             'ActivityId': random_chars(10),
             'Cause': 'test',
             'StartTime': date,
             'StatusCode': '200',
         })
     return {'Activities': page}
Exemplo n.º 11
0
    def _setup_scaling_pagination(
        self, page_size=200, max_items=100, total_items=600
    ):
        """
        Add to the stubber to test paginating describe_scaling_activities.

        WARNING: This only handles cases where max_items cleanly divides
        page_size.
        """
        requests_per_page = page_size / max_items
        if requests_per_page != ceil(requests_per_page):
            raise NotImplementedError(
                "This only handles setup where max_items is less than "
                "page_size and where max_items evenly divides page_size."
            )
        requests_per_page = int(requests_per_page)
        num_pages = int(ceil(total_items / page_size))

        previous_next_token = None
        for i in range(num_pages):
            page = self.create_describe_scaling_response(page_size=page_size)

            # Don't create a next_token for the final page
            if i + 1 == num_pages:
                next_token = None
            else:
                next_token = random_chars(10)

            expected_args = {}
            if previous_next_token:
                expected_args['StartingToken'] = previous_next_token

            # The same page may be accessed multiple times because we are
            # truncating it at max_items
            for _ in range(requests_per_page - 1):
                # The page is copied because the paginator will modify the
                # response object, causing issues when using the stubber.
                self.stubber.add_response(
                    'describe_scaling_activities', page.copy()
                )

            if next_token is not None:
                page['NextToken'] = next_token

            # Copying the page here isn't necessary because it is about to
            # be blown away anyway.
            self.stubber.add_response(
                'describe_scaling_activities', page
            )

            previous_next_token = next_token
Exemplo n.º 12
0
    def _setup_scaling_pagination(self, page_size=200, max_items=100,
                                 total_items=600):
        """
        Add to the stubber to test paginating describe_scaling_activities.

        WARNING: This only handles cases where max_items cleanly divides
        page_size.
        """
        requests_per_page = page_size / max_items
        if requests_per_page != ceil(requests_per_page):
            raise NotImplementedError(
                "This only handles setup where max_items is less than "
                "page_size and where max_items evenly divides page_size."
            )
        requests_per_page = int(requests_per_page)
        num_pages = int(ceil(total_items / page_size))

        previous_next_token = None
        for i in range(num_pages):
            page = self.create_describe_scaling_response(page_size=page_size)

            # Don't create a next_token for the final page
            if i + 1 == num_pages:
                next_token = None
            else:
                next_token = random_chars(10)

            expected_args = {}
            if previous_next_token:
                expected_args['StartingToken'] = previous_next_token

            # The same page may be accessed multiple times because we are
            # truncating it at max_items
            for _ in range(requests_per_page - 1):
                # The page is copied because the paginator will modify the
                # response object, causing issues when using the stubber.
                self.stubber.add_response(
                    'describe_scaling_activities', page.copy()
                )

            if next_token is not None:
                page['NextToken'] = next_token

            # Copying the page here isn't necessary because it is about to
            # be blown away anyway.
            self.stubber.add_response(
                'describe_scaling_activities', page
            )

            previous_next_token = next_token
    def test_create_pipeline(self):
        # In order to create a pipeline, we need to create 2 s3 buckets
        # and 1 iam role.
        input_bucket = self.create_bucket()
        output_bucket = self.create_bucket()
        role = self.create_iam_role()
        pipeline_name = 'botocore-test-create-%s' % random_chars(10)

        parsed = self.client.create_pipeline(
            InputBucket=input_bucket, OutputBucket=output_bucket,
            Role=role, Name=pipeline_name,
            Notifications={'Progressing': '', 'Completed': '',
                           'Warning': '', 'Error': ''})
        pipeline_id = parsed['Pipeline']['Id']
        self.addCleanup(self.client.delete_pipeline, Id=pipeline_id)
        self.assertIn('Pipeline', parsed)
    def test_create_pipeline(self):
        # In order to create a pipeline, we need to create 2 s3 buckets
        # and 1 iam role.
        input_bucket = self.create_bucket()
        output_bucket = self.create_bucket()
        role = self.create_iam_role()
        pipeline_name = 'botocore-test-create-%s' % random_chars(10)

        parsed = self.client.create_pipeline(
            InputBucket=input_bucket, OutputBucket=output_bucket,
            Role=role, Name=pipeline_name,
            Notifications={'Progressing': '', 'Completed': '',
                           'Warning': '', 'Error': ''})
        pipeline_id = parsed['Pipeline']['Id']
        self.addCleanup(self.client.delete_pipeline, Id=pipeline_id)
        self.assertIn('Pipeline', parsed)
Exemplo n.º 15
0
 def test_create_table_and_wait(self):
     table_name = 'botocoretest-%s' % random_chars(10)
     self.client.create_table(TableName=table_name,
                              ProvisionedThroughput={
                                  "ReadCapacityUnits": 5,
                                  "WriteCapacityUnits": 5
                              },
                              KeySchema=[{
                                  "AttributeName": "foo",
                                  "KeyType": "HASH"
                              }],
                              AttributeDefinitions=[{
                                  "AttributeName": "foo",
                                  "AttributeType": "S"
                              }])
     self.addCleanup(self.client.delete_table, TableName=table_name)
     waiter = self.client.get_waiter('table_exists')
     waiter.wait(TableName=table_name)
     parsed = self.client.describe_table(TableName=table_name)
     self.assertEqual(parsed['Table']['TableStatus'], 'ACTIVE')
Exemplo n.º 16
0
def random_bucketname():
    return 'botocoretest-' + random_chars(10)
Exemplo n.º 17
0
def random_bucketname():
    # 63 is the max bucket length.
    bucket_name = "botocoretest"
    return bucket_name + random_chars(63 - len(bucket_name))
Exemplo n.º 18
0
def random_bucketname():
    # 63 is the max bucket length.
    bucket_name = 'botocoretest'
    return bucket_name + random_chars(63 - len(bucket_name))
Exemplo n.º 19
0
 def test_can_create_and_delete_identity_pool(self):
     pool_name = 'test%s' % random_chars(10)
     response = self.client.create_identity_pool(
         IdentityPoolName=pool_name, AllowUnauthenticatedIdentities=True)
     self.client.delete_identity_pool(
         IdentityPoolId=response['IdentityPoolId'])
Exemplo n.º 20
0
 def random_name(self):
     return 'botocoretest-' + random_chars(10)
Exemplo n.º 21
0
 def create_random_credentials(self):
     return Credentials(
         'fake-%s' % random_chars(15),
         'fake-%s' % random_chars(35),
         'fake-%s' % random_chars(45)
     )
Exemplo n.º 22
0
 def test_handles_errors_with_template_body(self):
     # GetTemplate has a customization in handlers.py, so we're ensuring
     # it handles the case when a stack does not exist.
     with self.assertRaises(ClientError):
         self.client.get_template(StackName='does-not-exist-%s' %
                                  random_chars(10))
Exemplo n.º 23
0
 def setUp(self):
     self.session = botocore.session.get_session()
     self.client = self.session.create_client('s3', region_name='us-west-2')
     self.bucket_name = 'botocoretest%s' % random_chars(50)
Exemplo n.º 24
0
def random_bucketname():
    return 'botocoretest-' + random_chars(10)
 def test_can_create_and_delete_identity_pool(self):
     pool_name = 'test%s' % random_chars(10)
     response = self.client.create_identity_pool(
         IdentityPoolName=pool_name, AllowUnauthenticatedIdentities=True)
     self.client.delete_identity_pool(IdentityPoolId=response['IdentityPoolId'])
Exemplo n.º 26
0
 def create_random_credentials(self):
     return Credentials('fake-%s' % random_chars(15),
                        'fake-%s' % random_chars(35),
                        'fake-%s' % random_chars(45))
 def create_bucket(self):
     bucket_name = 'ets-bucket-1-%s' % random_chars(50)
     self.s3_client.create_bucket(Bucket=bucket_name)
     self.addCleanup(
         self.s3_client.delete_bucket, Bucket=bucket_name)
     return bucket_name
 def test_handles_errors_with_template_body(self):
     # GetTemplate has a customization in handlers.py, so we're ensuring
     # it handles the case when a stack does not exist.
     with self.assertRaises(ClientError):
         self.client.get_template(
             StackName='does-not-exist-%s' % random_chars(10))
Exemplo n.º 29
0
 def setUp(self):
     self.session = botocore.session.get_session()
     self.client = self.session.create_client('s3', region_name='us-west-2')
     self.bucket_name = 'botocoretest%s' % random_chars(50)
Exemplo n.º 30
0
 def random_name(self):
     return 'botocoretest-' + random_chars(10)
 def create_bucket(self):
     bucket_name = 'ets-bucket-1-%s' % random_chars(50)
     self.s3_client.create_bucket(Bucket=bucket_name)
     self.addCleanup(
         self.s3_client.delete_bucket, Bucket=bucket_name)
     return bucket_name