示例#1
0
  def test_gets_bucket_with_different_fields_scopes(
      self, fields_scope, number_api_calls, expected_metadata):
    # Minimum amount of response data to pass Boto Stubber validation and
    # verify each API endpoint has returned a response.
    get_bucket_api_call_and_response = [
        ('get_bucket_location', {'LocationConstraint': ''}),
        ('get_bucket_cors', {'CORSRules': []}),
        ('get_bucket_lifecycle_configuration', {'Rules': []}),
        ('get_bucket_logging', {
            'LoggingEnabled': {
                'TargetBucket': '', 'TargetGrants': [], 'TargetPrefix': ''}}),
        ('get_bucket_request_payment', {'Payer': 'BucketOwner'}),
        ('get_bucket_versioning', {}),
        ('get_bucket_website', {}),
        ('get_bucket_acl', {'Owner': {}, 'Grants': []})
    ]

    # Don't loop over extra API functions to avoid the Stubber
    # complaining about expected uncalled functions.
    for method, response in get_bucket_api_call_and_response[:number_api_calls]:
      self.stubber.add_response(method=method,
                                expected_params={'Bucket': BUCKET_NAME},
                                service_response=response)

    expected = s3_resource_reference.S3BucketResource(
        storage_url.CloudUrl(SCHEME, BUCKET_NAME), metadata=expected_metadata)
    with self.stubber:
      observed = self.s3_api.get_bucket(BUCKET_NAME, fields_scope)
      self.assertEqual(observed, expected)
    def create_bucket(self,
                      bucket_resource,
                      request_config,
                      fields_scope=None):
        """See super class."""
        del fields_scope  # Unused in S3 client.

        resource_args = request_config.resource_args
        if resource_args.location:
            client = _create_client(resource_args.location)
            location_constraint = resource_args.location
        else:
            client = self.client
            location_constraint = boto3.session.Session().region_name
        metadata = client.create_bucket(
            Bucket=bucket_resource.storage_url.bucket_name,
            CreateBucketConfiguration={
                'LocationConstraint': location_constraint
            })

        if (resource_args.cors_file_path or resource_args.labels_file_path
                or resource_args.lifecycle_file_path
                or resource_args.log_bucket or resource_args.log_object_prefix
                or resource_args.requester_pays or resource_args.versioning
                or resource_args.web_error_page
                or resource_args.web_main_page_suffix):
            return self.patch_bucket(bucket_resource, request_config)

        backend_location = metadata.get('Location')
        return s3_resource_reference.S3BucketResource(
            bucket_resource.storage_url,
            location=backend_location,
            metadata=metadata)
示例#3
0
  def test_list_buckets_translates_response_to_resources(self, fields_scope):
    names = ['bucket1', 'bucket2']
    cloud_urls = [storage_url.CloudUrl(SCHEME, name) for name in names]
    dates = [datetime.datetime(1, 1, 1), datetime.datetime(2, 1, 1)]
    owner_data = {
        'DisplayName': OWNER_NAME,
        'ID': OWNER_ID,
    }
    s3_response = {
        'Buckets': [
            {'Name': name, 'CreationDate': date}
            for name, date in zip(names, dates)
        ],
        'Owner': owner_data,
    }
    self.stubber.add_response(
        method='list_buckets', expected_params={}, service_response=s3_response)

    expected = []
    for name, url, date in zip(names, cloud_urls, dates):
      expected.append(s3_resource_reference.S3BucketResource(
          url, metadata={'Bucket': {'Name': name, 'CreationDate': date},
                         'Owner': owner_data}))
    with self.stubber:
      observed = self.s3_api.list_buckets(fields_scope=fields_scope)
      self.assertCountEqual(observed, expected)
示例#4
0
  def test_gets_bucket_with_individual_calls_failing(self, failing_method_name):
    # Method name, method name in error, metadata key, API response, and
    # flag describing if API response has key mirroring metadata key.
    get_bucket_api_call_data = [
        (
            'get_bucket_location', 'GetBucketLocation', 'LocationConstraint',
            {'LocationConstraint': ''}, True
        ),
        (
            'get_bucket_cors', 'GetBucketCors', 'CORSRules',
            {'CORSRules': []}, True
        ),
        (
            'get_bucket_lifecycle_configuration',
            'GetBucketLifecycleConfiguration', 'LifecycleConfiguration',
            {'Rules': []}, False
        ),
        (
            'get_bucket_logging', 'GetBucketLogging', 'LoggingEnabled',
            {'LoggingEnabled': {
                'TargetBucket': '', 'TargetGrants': [], 'TargetPrefix': ''}},
            True
        ),
        (
            'get_bucket_request_payment', 'GetBucketRequestPayment', 'Payer',
            {'Payer': 'BucketOwner'}, True
        ),
        (
            'get_bucket_versioning', 'GetBucketVersioning', 'Versioning', {},
            False
        ),
        (
            'get_bucket_website', 'GetBucketWebsite', 'Website', {}, False
        ),
        (
            'get_bucket_acl', 'GetBucketAcl', 'ACL',
            {'Owner': {}, 'Grants': []}, False
        ),
    ]
    expected_metadata = {'Name': BUCKET_NAME}
    for (method_name, error_string_of_method_name, metadata_key, response,
         result_has_key) in get_bucket_api_call_data:
      if failing_method_name == method_name:
        self.stubber.add_client_error(method=method_name)
        expected_metadata[metadata_key] = errors.S3ApiError(
            'An error occurred () when calling the {} operation: '
            .format(error_string_of_method_name))
      else:
        self.stubber.add_response(method=method_name,
                                  expected_params={'Bucket': BUCKET_NAME},
                                  service_response=response)
        expected_metadata[metadata_key] = (
            response[metadata_key] if result_has_key else response)

    expected_resource = s3_resource_reference.S3BucketResource(
        storage_url.CloudUrl(SCHEME, BUCKET_NAME), metadata=expected_metadata)
    with self.stubber:
      observed_resource = self.s3_api.get_bucket(
          BUCKET_NAME, fields_scope=cloud_api.FieldsScope.FULL)
      self.assertEqual(observed_resource, expected_resource)
示例#5
0
  def test_gets_bucket_receives_all_api_errors(self):
    get_bucket_api_method_names_and_metadata_key = [
        ('get_bucket_location', 'GetBucketLocation', 'LocationConstraint'),
        ('get_bucket_cors', 'GetBucketCors', 'CORSRules'),
        ('get_bucket_lifecycle_configuration',
         'GetBucketLifecycleConfiguration', 'LifecycleConfiguration'),
        ('get_bucket_logging', 'GetBucketLogging', 'LoggingEnabled'),
        ('get_bucket_request_payment', 'GetBucketRequestPayment', 'Payer'),
        ('get_bucket_versioning', 'GetBucketVersioning', 'Versioning'),
        ('get_bucket_website', 'GetBucketWebsite', 'Website'),
        ('get_bucket_acl', 'GetBucketAcl', 'ACL'),
    ]
    expected_metadata = {'Name': BUCKET_NAME}
    for (method_name, error_string_of_method_name,
         metadata_key) in get_bucket_api_method_names_and_metadata_key:
      self.stubber.add_client_error(method=method_name)
      expected_metadata[metadata_key] = errors.S3ApiError(
          'An error occurred () when calling the {} operation: '
          .format(error_string_of_method_name))

    expected_resource = s3_resource_reference.S3BucketResource(
        storage_url.CloudUrl(SCHEME, BUCKET_NAME), metadata=expected_metadata)
    with self.stubber:
      observed_resource = self.s3_api.get_bucket(
          BUCKET_NAME, fields_scope=cloud_api.FieldsScope.FULL)
      self.assertEqual(observed_resource, expected_resource)
示例#6
0
 def list_buckets(self, fields_scope=cloud_api.FieldsScope.NO_ACL):
   """See super class."""
   try:
     response = self.client.list_buckets()
     for bucket in response['Buckets']:
       if fields_scope == cloud_api.FieldsScope.FULL:
         yield self.get_bucket(bucket['Name'], fields_scope)
       else:
         yield s3_resource_reference.S3BucketResource(
             storage_url.CloudUrl(
                 storage_url.ProviderPrefix.S3, bucket['Name']),
             metadata={'Bucket': bucket, 'Owner': response['Owner']})
   except botocore.exceptions.ClientError as error:
     core_exceptions.reraise(errors.S3ApiError(error))
    def get_bucket(self,
                   bucket_name,
                   fields_scope=cloud_api.FieldsScope.NO_ACL):
        """See super class."""
        metadata = {'Name': bucket_name}
        # TODO (b/168716392): As new commands are implemented, they may want
        # specific error handling for different methods.
        try:
            # Low-bandwidth way to determine if bucket exists for FieldsScope.SHORT.
            metadata.update(
                self.client.get_bucket_location(Bucket=bucket_name))
        except botocore.exceptions.ClientError as error:
            _raise_if_not_found_error(error, bucket_name)

            metadata['LocationConstraint'] = errors.S3ApiError(error)

        if fields_scope is not cloud_api.FieldsScope.SHORT:
            # Data for FieldsScope.NO_ACL.
            for key, api_call, result_has_key in [
                ('CORSRules', self.client.get_bucket_cors, True),
                ('ServerSideEncryptionConfiguration',
                 self.client.get_bucket_encryption, True),
                ('LifecycleConfiguration',
                 self.client.get_bucket_lifecycle_configuration, False),
                ('LoggingEnabled', self.client.get_bucket_logging, True),
                ('Payer', self.client.get_bucket_request_payment, True),
                ('Versioning', self.client.get_bucket_versioning, False),
                ('Website', self.client.get_bucket_website, False),
            ]:
                try:
                    api_result = api_call(Bucket=bucket_name)
                    # Some results are wrapped in dictionaries with keys matching "key".
                    metadata[key] = api_result.get(
                        key) if result_has_key else api_result
                except botocore.exceptions.ClientError as error:
                    metadata[key] = errors.S3ApiError(error)

            # User requested ACL's with FieldsScope.FULL.
            if fields_scope is cloud_api.FieldsScope.FULL:
                try:
                    metadata['ACL'] = self.client.get_bucket_acl(
                        Bucket=bucket_name)
                except botocore.exceptions.ClientError as error:
                    metadata['ACL'] = errors.S3ApiError(error)

        return s3_resource_reference.S3BucketResource(storage_url.CloudUrl(
            storage_url.ProviderPrefix.S3, bucket_name),
                                                      metadata=metadata)
示例#8
0
    def create_bucket(self, bucket_resource, fields_scope=None):
        """See super class."""
        del fields_scope  # Unused in S3 client.

        if bucket_resource.retention_period:
            raise ValueError(
                'S3 API does not accept retention_period argument for create_bucket.'
            )
        if bucket_resource.storage_class:
            raise ValueError(
                'S3 API does not accept storage_class argument for create_bucket.'
            )
        if bucket_resource.uniform_bucket_level_access:
            raise ValueError(
                'S3 API does not accept uniform_bucket_level_access argument for create_bucket.'
            )

        if bucket_resource.location:
            # Create client with appropriate endpoint for creating regional bucket.
            client = boto3.client(storage_url.ProviderPrefix.S3.value,
                                  region_name=bucket_resource.location)
            create_bucket_configuration = {
                'LocationConstraint': bucket_resource.location
            }
        else:
            client = self.client
            # Must match client's default regional endpoint.
            create_bucket_configuration = {
                'LocationConstraint': boto3.session.Session().region_name
            }

        metadata = client.create_bucket(
            Bucket=bucket_resource.storage_url.bucket_name,
            CreateBucketConfiguration=create_bucket_configuration)
        backend_location = metadata.get('Location')
        return s3_resource_reference.S3BucketResource(
            bucket_resource.storage_url,
            location=backend_location,
            metadata=metadata)
示例#9
0
  def test_dumps_bucket_metadata_with_errors_in_correct_format(self):
    time = datetime.datetime(1111, 1, 1, tzinfo=datetime.timezone.utc)
    cors_error = errors.S3ApiError(
        'An error occurred () when calling the GetBucketCors operation: ')
    resource = s3_resource_reference.S3BucketResource(
        storage_url.CloudUrl(
            storage_url.ProviderPrefix.S3, bucket_name='bucket'),
        metadata={
            'Error': cors_error,
            'List': [{'TimeInListInDict': time}],
            'Nested': {'ZeroInt': 0, 'DoubleNested': {'NestedTime': time}},
            'String': 'abc',
            'Time': time})

    expected_dump = textwrap.dedent("""\
    {
      "url": "s3://bucket",
      "type": "cloud_bucket",
      "metadata": {
        "Error": "An error occurred () when calling the GetBucketCors operation: ",
        "List": [
          {
            "TimeInListInDict": "1111-01-01T00:00:00+0000"
          }
        ],
        "Nested": {
          "DoubleNested": {
            "NestedTime": "1111-01-01T00:00:00+0000"
          },
          "ZeroInt": 0
        },
        "String": "abc",
        "Time": "1111-01-01T00:00:00+0000"
      }
    }""")
    self.assertEqual(resource.get_metadata_dump(), expected_dump)