Ejemplo n.º 1
0
    def get_object_metadata(self,
                            bucket_name,
                            object_name,
                            generation=None,
                            fields_scope=None):
        """See super class."""
        request = {'Bucket': bucket_name, 'Key': object_name}

        # The VersionId keyword argument to head_object is not nullable if it is
        # present, so only include it in the function call if it has a value.
        if generation is not None:
            request['VersionId'] = generation

        try:
            object_dict = self.client.head_object(**request)
        except botocore.exceptions.ClientError as error:
            core_exceptions.reraise(errors.S3ApiError(error))

        # User requested ACL's with FieldsScope.FULL.
        if fields_scope is cloud_api.FieldsScope.FULL:
            try:
                acl_response = self.client.get_object_acl(**request)
                acl_response.pop('ResponseMetadata', None)
                object_dict['ACL'] = acl_response
            except botocore.exceptions.ClientError as error:
                object_dict['ACL'] = errors.S3ApiError(error)

        return self._get_object_resource_from_s3_response(
            object_dict, bucket_name, object_name)
    def get_bucket(self,
                   bucket_name,
                   fields_scope=cloud_api.FieldsScope.NO_ACL):
        """See super class."""
        metadata = {'Name': bucket_name}
        # TODO (b/168716392): As new commands are implemented, they may want
        # specific error handling for different methods.
        try:
            # Low-bandwidth way to determine if bucket exists for FieldsScope.SHORT.
            metadata.update(
                self.client.get_bucket_location(Bucket=bucket_name))
        except botocore.exceptions.ClientError as error:
            _raise_if_not_found_error(error, bucket_name)

            metadata['LocationConstraint'] = errors.S3ApiError(error)

        if fields_scope is not cloud_api.FieldsScope.SHORT:
            # Data for FieldsScope.NO_ACL.
            for key, api_call, result_has_key in [
                ('CORSRules', self.client.get_bucket_cors, True),
                ('ServerSideEncryptionConfiguration',
                 self.client.get_bucket_encryption, True),
                ('LifecycleConfiguration',
                 self.client.get_bucket_lifecycle_configuration, False),
                ('LoggingEnabled', self.client.get_bucket_logging, True),
                ('Payer', self.client.get_bucket_request_payment, True),
                ('Versioning', self.client.get_bucket_versioning, False),
                ('Website', self.client.get_bucket_website, False),
            ]:
                try:
                    api_result = api_call(Bucket=bucket_name)
                    # Some results are wrapped in dictionaries with keys matching "key".
                    metadata[key] = api_result.get(
                        key) if result_has_key else api_result
                except botocore.exceptions.ClientError as error:
                    metadata[key] = errors.S3ApiError(error)

            # User requested ACL's with FieldsScope.FULL.
            if fields_scope is cloud_api.FieldsScope.FULL:
                try:
                    metadata['ACL'] = self.client.get_bucket_acl(
                        Bucket=bucket_name)
                except botocore.exceptions.ClientError as error:
                    metadata['ACL'] = errors.S3ApiError(error)

        return s3_resource_reference.S3BucketResource(storage_url.CloudUrl(
            storage_url.ProviderPrefix.S3, bucket_name),
                                                      metadata=metadata)
Ejemplo n.º 3
0
    def upload_object(self,
                      upload_stream,
                      upload_resource,
                      progress_callback=None,
                      request_config=None):
        """See super class."""
        # TODO(b/160998556): Implement resumable upload.
        del progress_callback

        kwargs = {
            'Bucket': upload_resource.storage_url.bucket_name,
            'Key': upload_resource.storage_url.object_name,
            'Body': upload_stream.read()
        }
        if request_config and request_config.predefined_acl_string:
            kwargs['ACL'] = _translate_predefined_acl_string_to_s3(
                request_config.predefined_acl_string)

        try:
            response = self.client.put_object(**kwargs)
            return self._get_object_resource_from_s3_response(
                response, upload_resource.storage_url.bucket_name,
                upload_resource.storage_url.object_name)
        except botocore.exceptions.ClientError as error:
            core_exceptions.reraise(errors.S3ApiError(error))
Ejemplo n.º 4
0
  def CopyObject(self,
                 source_resource,
                 destination_resource,
                 progress_callback=None,
                 request_config=None):
    """See super class."""
    del progress_callback

    source_kwargs = {'Bucket': source_resource.storage_url.bucket_name,
                     'Key': source_resource.storage_url.object_name}
    if source_resource.storage_url.generation:
      source_kwargs['VersionId'] = source_resource.storage_url.generation

    kwargs = {'Bucket': destination_resource.storage_url.bucket_name,
              'Key': destination_resource.storage_url.object_name,
              'CopySource': source_kwargs}

    if request_config and request_config.predefined_acl_string:
      kwargs['ACL'] = _TranslatePredefinedAclStringToS3(
          request_config.predefined_acl_string)

    try:
      response = self.client.copy_object(**kwargs)
      return self._GetObjectResourceFromS3Response(
          response, kwargs['Bucket'], kwargs['Key'])
    except botocore.exceptions.ClientError as error:
      core_exceptions.reraise(errors.S3ApiError(error))
Ejemplo n.º 5
0
  def test_gets_bucket_with_individual_calls_failing(self, failing_method_name):
    # Method name, method name in error, metadata key, API response, and
    # flag describing if API response has key mirroring metadata key.
    get_bucket_api_call_data = [
        (
            'get_bucket_location', 'GetBucketLocation', 'LocationConstraint',
            {'LocationConstraint': ''}, True
        ),
        (
            'get_bucket_cors', 'GetBucketCors', 'CORSRules',
            {'CORSRules': []}, True
        ),
        (
            'get_bucket_lifecycle_configuration',
            'GetBucketLifecycleConfiguration', 'LifecycleConfiguration',
            {'Rules': []}, False
        ),
        (
            'get_bucket_logging', 'GetBucketLogging', 'LoggingEnabled',
            {'LoggingEnabled': {
                'TargetBucket': '', 'TargetGrants': [], 'TargetPrefix': ''}},
            True
        ),
        (
            'get_bucket_request_payment', 'GetBucketRequestPayment', 'Payer',
            {'Payer': 'BucketOwner'}, True
        ),
        (
            'get_bucket_versioning', 'GetBucketVersioning', 'Versioning', {},
            False
        ),
        (
            'get_bucket_website', 'GetBucketWebsite', 'Website', {}, False
        ),
        (
            'get_bucket_acl', 'GetBucketAcl', 'ACL',
            {'Owner': {}, 'Grants': []}, False
        ),
    ]
    expected_metadata = {'Name': BUCKET_NAME}
    for (method_name, error_string_of_method_name, metadata_key, response,
         result_has_key) in get_bucket_api_call_data:
      if failing_method_name == method_name:
        self.stubber.add_client_error(method=method_name)
        expected_metadata[metadata_key] = errors.S3ApiError(
            'An error occurred () when calling the {} operation: '
            .format(error_string_of_method_name))
      else:
        self.stubber.add_response(method=method_name,
                                  expected_params={'Bucket': BUCKET_NAME},
                                  service_response=response)
        expected_metadata[metadata_key] = (
            response[metadata_key] if result_has_key else response)

    expected_resource = s3_resource_reference.S3BucketResource(
        storage_url.CloudUrl(SCHEME, BUCKET_NAME), metadata=expected_metadata)
    with self.stubber:
      observed_resource = self.s3_api.get_bucket(
          BUCKET_NAME, fields_scope=cloud_api.FieldsScope.FULL)
      self.assertEqual(observed_resource, expected_resource)
Ejemplo n.º 6
0
  def test_gets_bucket_receives_all_api_errors(self):
    get_bucket_api_method_names_and_metadata_key = [
        ('get_bucket_location', 'GetBucketLocation', 'LocationConstraint'),
        ('get_bucket_cors', 'GetBucketCors', 'CORSRules'),
        ('get_bucket_lifecycle_configuration',
         'GetBucketLifecycleConfiguration', 'LifecycleConfiguration'),
        ('get_bucket_logging', 'GetBucketLogging', 'LoggingEnabled'),
        ('get_bucket_request_payment', 'GetBucketRequestPayment', 'Payer'),
        ('get_bucket_versioning', 'GetBucketVersioning', 'Versioning'),
        ('get_bucket_website', 'GetBucketWebsite', 'Website'),
        ('get_bucket_acl', 'GetBucketAcl', 'ACL'),
    ]
    expected_metadata = {'Name': BUCKET_NAME}
    for (method_name, error_string_of_method_name,
         metadata_key) in get_bucket_api_method_names_and_metadata_key:
      self.stubber.add_client_error(method=method_name)
      expected_metadata[metadata_key] = errors.S3ApiError(
          'An error occurred () when calling the {} operation: '
          .format(error_string_of_method_name))

    expected_resource = s3_resource_reference.S3BucketResource(
        storage_url.CloudUrl(SCHEME, BUCKET_NAME), metadata=expected_metadata)
    with self.stubber:
      observed_resource = self.s3_api.get_bucket(
          BUCKET_NAME, fields_scope=cloud_api.FieldsScope.FULL)
      self.assertEqual(observed_resource, expected_resource)
 def _make_patch_request(self, bucket_resource, patch_function,
                         patch_kwargs):
     patch_kwargs['Bucket'] = bucket_resource.storage_url.bucket_name
     try:
         patch_function(**patch_kwargs)
     except botocore.exceptions.ClientError as error:
         _raise_if_not_found_error(error,
                                   bucket_resource.storage_url.bucket_name)
         log.error(errors.S3ApiError(error))
Ejemplo n.º 8
0
 def ListBuckets(self, fields_scope=None):
   """See super class."""
   try:
     response = self.client.list_buckets()
     for bucket in response['Buckets']:
       yield resource_reference.BucketResource(
           storage_url.CloudUrl(storage_url.ProviderPrefix.S3,
                                bucket['Name']),
           metadata={'Bucket': bucket, 'Owner': response['Owner']})
   except botocore.exceptions.ClientError as error:
     core_exceptions.reraise(errors.S3ApiError(error))
Ejemplo n.º 9
0
 def list_buckets(self, fields_scope=cloud_api.FieldsScope.NO_ACL):
   """See super class."""
   try:
     response = self.client.list_buckets()
     for bucket in response['Buckets']:
       if fields_scope == cloud_api.FieldsScope.FULL:
         yield self.get_bucket(bucket['Name'], fields_scope)
       else:
         yield s3_resource_reference.S3BucketResource(
             storage_url.CloudUrl(
                 storage_url.ProviderPrefix.S3, bucket['Name']),
             metadata={'Bucket': bucket, 'Owner': response['Owner']})
   except botocore.exceptions.ClientError as error:
     core_exceptions.reraise(errors.S3ApiError(error))
Ejemplo n.º 10
0
    def upload_object(self,
                      source_stream,
                      destination_resource,
                      progress_callback=None,
                      request_config=None,
                      serialization_data=None,
                      tracker_callback=None,
                      upload_strategy=cloud_api.UploadStrategy.SIMPLE):
        """See super class."""
        del progress_callback, serialization_data, tracker_callback

        if upload_strategy != cloud_api.UploadStrategy.SIMPLE:
            raise command_errors.Error('Invalid upload strategy: {}.'.format(
                upload_strategy.value))

        if request_config is None:
            request_config = cloud_api.RequestConfig()

        # All fields common to both put_object and upload_fileobj are added
        # to the extra_args dict.
        extra_args = {}

        if request_config.predefined_acl_string:
            extra_args['ACL'] = _translate_predefined_acl_string_to_s3(
                request_config.predefined_acl_string)

        if request_config.md5_hash:
            # The upload_fileobj method can perform multipart uploads, so it cannot
            # validate with user-provided MD5 hashes. Hence we use the put_object API
            # method if MD5 validation is requested.
            if request_config.size > MAX_PUT_OBJECT_SIZE:
                raise errors.S3ApiError(
                    'Cannot upload to destination: {url} because MD5 validation can'
                    ' only be performed for file size <= {maxsize} Bytes. Current file'
                    ' size is {filesize} Bytes. You can remove the MD5 validation'
                    ' requirement to complete the upload'.format(
                        url=destination_resource.storage_url.url_string,
                        maxsize=MAX_PUT_OBJECT_SIZE,
                        filesize=request_config.size))
            extra_args['ContentMD5'] = request_config.md5_hash
            return self._upload_using_put_object(source_stream,
                                                 destination_resource,
                                                 extra_args)
        else:
            # We default to calling the upload_fileobj method provided by boto3 which
            # is a managed-transfer utility that can perform mulitpart uploads
            # automatically. It can be used for non-seekable source_streams as well.
            return self._upload_using_managed_transfer_utility(
                source_stream, destination_resource, extra_args)
Ejemplo n.º 11
0
 def ListObjects(self,
                 bucket_name,
                 prefix='',
                 delimiter='',
                 all_versions=None,
                 fields_scope=None):
   """See super class."""
   try:
     paginator = self.client.get_paginator('list_objects_v2')
     page_iterator = paginator.paginate(
         Bucket=bucket_name, Prefix=prefix, Delimiter=delimiter)
     for page in page_iterator:
       for object_dict in page.get('Contents', []):
         yield self._GetObjectResourceFromS3Response(object_dict, bucket_name)
       for prefix_dict in page.get('CommonPrefixes', []):
         prefix = prefix_dict['Prefix']
         yield self._GetPrefixResourceFromS3Response(prefix_dict, bucket_name)
   except botocore.exceptions.ClientError as error:
     core_exceptions.reraise(errors.S3ApiError(error))
 def list_objects(self,
                  bucket_name,
                  prefix=None,
                  delimiter=None,
                  all_versions=False,
                  fields_scope=None):
     """See super class."""
     if all_versions:
         api_method_name = 'list_object_versions'
         objects_key = 'Versions'
     else:
         api_method_name = 'list_objects_v2'
         objects_key = 'Contents'
     try:
         paginator = self.client.get_paginator(api_method_name)
         page_iterator = paginator.paginate(
             Bucket=bucket_name,
             Prefix=prefix if prefix is not None else '',
             Delimiter=delimiter if delimiter is not None else '')
         for page in page_iterator:
             for object_dict in page.get(objects_key, []):
                 if fields_scope is cloud_api.FieldsScope.FULL:
                     # The metadata present in the list_objects_v2 response or the
                     # list_object_versions response is not enough
                     # for a FULL scope. Hence, calling the GetObjectMetadata method
                     # to get the additonal metadata and ACLs information.
                     yield self.get_object_metadata(
                         bucket_name=bucket_name,
                         object_name=object_dict['Key'],
                         request_config=request_config_factory.
                         get_request_config(
                             storage_url.CloudUrl(
                                 scheme=storage_url.ProviderPrefix.S3)),
                         generation=object_dict.get('VersionId'),
                         fields_scope=fields_scope)
                 else:
                     yield s3_metadata_util.get_object_resource_from_s3_response(
                         object_dict, bucket_name)
             for prefix_dict in page.get('CommonPrefixes', []):
                 yield s3_metadata_util.get_prefix_resource_from_s3_response(
                     prefix_dict, bucket_name)
     except botocore.exceptions.ClientError as error:
         core_exceptions.reraise(errors.S3ApiError(error))
Ejemplo n.º 13
0
  def GetObjectMetadata(self,
                        bucket_name,
                        object_name,
                        generation=None,
                        fields_scope=None):
    """See super class."""
    request = {'Bucket': bucket_name, 'Key': object_name}

    # The VersionId keyword argument to head_object is not nullable if it is
    # present, so only include it in the function call if it has a value.
    if generation is not None:
      request['VersionId'] = generation

    try:
      object_dict = self.client.head_object(**request)
    except botocore.exceptions.ClientError as error:
      core_exceptions.reraise(errors.S3ApiError(error))

    return self._GetObjectResourceFromS3Response(
        object_dict, bucket_name, object_name)
Ejemplo n.º 14
0
  def test_get_object_metadata_populates_acls_with_error_message(self):
    self.stubber.add_response(
        'head_object',
        service_response={},
        expected_params={
            'Bucket': BUCKET_NAME,
            'Key': OBJECT_NAME
        })
    self.stubber.add_client_error('get_object_acl')

    with self.stubber:
      object_resource = self.s3_api.get_object_metadata(
          BUCKET_NAME, OBJECT_NAME, fields_scope=cloud_api.FieldsScope.FULL)

      expected_resource = s3_resource_reference.S3ObjectResource(
          storage_url.CloudUrl(SCHEME, BUCKET_NAME, OBJECT_NAME),
          metadata={
              'ACL':
                  errors.S3ApiError('An error occurred () when calling the'
                                    ' GetObjectAcl operation: ')
          })
      self.assertEqual(object_resource, expected_resource)
    def get_object_metadata(self,
                            bucket_name,
                            object_name,
                            request_config=None,
                            generation=None,
                            fields_scope=None):
        """See super class."""
        del request_config
        request = {'Bucket': bucket_name, 'Key': object_name}

        # The VersionId keyword argument to head_object is not nullable if it is
        # present, so only include it in the function call if it has a value.
        if generation is not None:
            request['VersionId'] = generation

        try:
            object_dict = self.client.head_object(**request)
        except botocore.exceptions.ClientError as e:
            _raise_if_not_found_error(
                e,
                storage_url.CloudUrl(storage_url.ProviderPrefix.S3,
                                     bucket_name, object_name,
                                     generation).url_string)
            raise e

        # User requested ACL's with FieldsScope.FULL.
        if fields_scope is cloud_api.FieldsScope.FULL:
            try:
                acl_response = self.client.get_object_acl(**request)
                acl_response.pop('ResponseMetadata', None)
                object_dict['ACL'] = acl_response
            except botocore.exceptions.ClientError as error:
                object_dict['ACL'] = errors.S3ApiError(error)

        return s3_metadata_util.get_object_resource_from_s3_response(
            object_dict, bucket_name, object_name)
Ejemplo n.º 16
0
 def DownloadObject(self,
                    bucket_name,
                    object_name,
                    download_stream,
                    compressed_encoding=False,
                    decryption_wrapper=None,
                    digesters=None,
                    download_strategy=cloud_api.DownloadStrategy.ONE_SHOT,
                    generation=None,
                    object_size=None,
                    progress_callback=None,
                    serialization_data=None,
                    start_byte=0,
                    end_byte=None):
   """See super class."""
   kwargs = {'Bucket': bucket_name, 'Key': object_name}
   if generation:
     kwargs['VersionId'] = generation
   try:
     response = self.client.get_object(**kwargs)
     download_stream.write(response['Body'].read())
     return response.get('ContentEncoding', None)
   except botocore.exceptions.ClientError as error:
     core_exceptions.reraise(errors.S3ApiError(error))
Ejemplo n.º 17
0
  def test_dumps_object_metadata_with_errors_in_correct_format(self):
    time = datetime.datetime(1111, 1, 1)
    cors_error = errors.S3ApiError(
        'An error occurred () when calling the GetBucketCors operation: ')
    resource = s3_resource_reference.S3ObjectResource(
        storage_url.CloudUrl(
            storage_url.ProviderPrefix.S3, bucket_name='bucket'),
        metadata={
            'Error': cors_error,
            'List': [{'TimeInListInDict': time}],
            'Nested': {'ZeroInt': 0, 'DoubleNested': {'NestedTime': time}},
            'String': 'abc',
            'Time': time})

    expected_dump = textwrap.dedent("""\
    {
      "url": "s3://bucket",
      "type": "cloud_object",
      "metadata": {
        "Error": "An error occurred () when calling the GetBucketCors operation: ",
        "List": [
          {
            "TimeInListInDict": "1111-01-01T00:00:00"
          }
        ],
        "Nested": {
          "DoubleNested": {
            "NestedTime": "1111-01-01T00:00:00"
          },
          "ZeroInt": 0
        },
        "String": "abc",
        "Time": "1111-01-01T00:00:00"
      }
    }""")
    self.assertEqual(resource.get_metadata_dump(), expected_dump)
Ejemplo n.º 18
0
    def get_object_metadata(self,
                            bucket_name,
                            object_name,
                            generation=None,
                            fields_scope=None):
        """See super class."""
        request = {'Bucket': bucket_name, 'Key': object_name}

        # The VersionId keyword argument to head_object is not nullable if it is
        # present, so only include it in the function call if it has a value.
        if generation is not None:
            request['VersionId'] = generation

        try:
            object_dict = self.client.head_object(**request)
        except botocore.exceptions.ClientError as e:
            if e.response.get('ResponseMetadata',
                              {}).get('HTTPStatusCode') == 404:
                # Allows custom error handling.
                raise errors.NotFoundError('Object not found: {}'.format(
                    storage_url.CloudUrl(storage_url.ProviderPrefix.S3,
                                         bucket_name, object_name,
                                         generation).url_string))
            raise e

        # User requested ACL's with FieldsScope.FULL.
        if fields_scope is cloud_api.FieldsScope.FULL:
            try:
                acl_response = self.client.get_object_acl(**request)
                acl_response.pop('ResponseMetadata', None)
                object_dict['ACL'] = acl_response
            except botocore.exceptions.ClientError as error:
                object_dict['ACL'] = errors.S3ApiError(error)

        return _get_object_resource_from_s3_response(object_dict, bucket_name,
                                                     object_name)
Ejemplo n.º 19
0
    def patch_bucket(self,
                     bucket_resource,
                     request_config=None,
                     fields_scope=cloud_api.FieldsScope.NO_ACL):
        """See super class."""
        del fields_scope, request_config  # Unused.

        if ('FullACLConfiguration' in bucket_resource.metadata
                or 'ACL' in bucket_resource.metadata):
            try:
                if 'FullACLConfiguration' in bucket_resource.metadata:
                    # Can contain canned ACL and other settings.
                    # Takes priority over 'ACL' metadata key.
                    kwargs = bucket_resource.metadata['FullACLConfiguration']
                else:
                    # Data returned by get_bucket_acl.
                    kwargs = {
                        'AccessControlPolicy': bucket_resource.metadata['ACL']
                    }
                kwargs['Bucket'] = bucket_resource.name
                self.client.put_bucket_acl(**kwargs)
            except botocore.exceptions.ClientError as error:
                _raise_if_not_found_error(error, bucket_resource.name)
                # Don't return any ACL information in case the failure affected both
                # metadata keys.
                bucket_resource.metadata.pop('FullACLConfiguration', None)
                bucket_resource.metadata.pop('ACL', None)
                log.error(errors.S3ApiError(error))

        patchable_metadata = {  # Key -> (client function, function kwargs).
            'CORSRules': (
                self.client.put_bucket_cors,
                {'CORSConfiguration': {
                    'CORSRules': bucket_resource.metadata.get('CORSRules'),
                }}),
            'ServerSideEncryptionConfiguration': (
                self.client.put_bucket_encryption,
                {'ServerSideEncryptionConfiguration': bucket_resource.metadata.get(
                    'ServerSideEncryptionConfiguration'),
                }),
            'LifecycleConfiguration': (
                self.client.put_bucket_lifecycle_configuration,
                {'LifecycleConfiguration': bucket_resource.metadata.get(
                    'LifecycleConfiguration'),
                }),
            'LoggingEnabled': (
                self.client.put_bucket_logging,
                {'BucketLoggingStatus': {
                    'LoggingEnabled': bucket_resource.metadata.get(
                        'LoggingEnabled'),
                }}),
            'Payer': (
                self.client.put_bucket_request_payment,
                {'RequestPaymentConfiguration': {
                    'Payer': bucket_resource.metadata.get('Payer'),
                }}),
            'Versioning': (
                self.client.put_bucket_versioning,
                {'VersioningConfiguration': bucket_resource.metadata.get(
                    'Versioning'),
                }),
            'Website': (
                self.client.put_bucket_website,
                {'WebsiteConfiguration': bucket_resource.metadata.get('Website')}),
        }
        for metadata_key, (patch_function,
                           patch_kwargs) in patchable_metadata.items():
            if metadata_key not in bucket_resource.metadata:
                continue

            patch_kwargs['Bucket'] = bucket_resource.name
            try:
                patch_function(**patch_kwargs)
            except botocore.exceptions.ClientError as error:
                _raise_if_not_found_error(error, bucket_resource.name)
                log.error(errors.S3ApiError(error))
                del bucket_resource.metadata[metadata_key]

        return bucket_resource