def test_object_resources_non_equal_metadata(self):
        resource1 = resource_reference.ObjectResource(storage_url.CloudUrl(
            storage_url.ProviderPrefix.GCS, bucket_name='bucket'),
                                                      metadata={1: 2})
        resource2 = resource_reference.ObjectResource(storage_url.CloudUrl(
            storage_url.ProviderPrefix.GCS, bucket_name='bucket'),
                                                      metadata={1: 3})

        self.assertNotEqual(resource1, resource2)
    def test_bucket_resources_non_equal_etags(self):
        resource1 = resource_reference.BucketResource(storage_url.CloudUrl(
            storage_url.ProviderPrefix.GCS, bucket_name='bucket'),
                                                      etag='e1')
        resource2 = resource_reference.BucketResource(storage_url.CloudUrl(
            storage_url.ProviderPrefix.GCS, bucket_name='bucket'),
                                                      etag='e2')

        self.assertNotEqual(resource1, resource2)
    def test_object_resources_non_equal_types(self):
        resource1 = resource_reference.ObjectResource(
            storage_url.CloudUrl(storage_url.ProviderPrefix.GCS,
                                 bucket_name='bucket'))
        resource2 = resource_reference.BucketResource(
            storage_url.CloudUrl(storage_url.ProviderPrefix.GCS,
                                 bucket_name='bucket'))

        self.assertNotEqual(resource1, resource2)
    def test_resources_non_equal_storage_urls(self):
        resource1 = resource_reference.Resource(
            storage_url.CloudUrl(storage_url.ProviderPrefix.GCS,
                                 bucket_name='bucket1'))

        resource2 = resource_reference.Resource(
            storage_url.CloudUrl(storage_url.ProviderPrefix.GCS,
                                 bucket_name='bucket2'))

        self.assertNotEqual(resource1, resource2)
    def test_equal_resources(self):
        resource1 = resource_reference.CloudResource(
            storage_url.CloudUrl(storage_url.ProviderPrefix.GCS,
                                 bucket_name='bucket'))

        resource2 = resource_reference.CloudResource(
            storage_url.CloudUrl(storage_url.ProviderPrefix.GCS,
                                 bucket_name='bucket'))

        self.assertEqual(resource1, resource2)
    def test_equal_bucket_resources(self):
        resource1 = resource_reference.BucketResource(storage_url.CloudUrl(
            storage_url.ProviderPrefix.GCS, bucket_name='bucket'),
                                                      etag='e',
                                                      metadata={})
        resource2 = resource_reference.BucketResource(storage_url.CloudUrl(
            storage_url.ProviderPrefix.GCS, bucket_name='bucket'),
                                                      etag='e',
                                                      metadata={})

        self.assertEqual(resource1, resource2)
    def test_resources_non_equal_types(self):
        resource1 = resource_reference.Resource(
            storage_url.CloudUrl(storage_url.ProviderPrefix.GCS,
                                 bucket_name='bucket'))

        # Get another class with a storage_url attribute to test that type
        # comparison happens.
        OtherClass = collections.namedtuple('OtherClass', ['storage_url'])
        resource2 = OtherClass(storage_url=storage_url.CloudUrl(
            storage_url.ProviderPrefix.GCS, bucket_name='bucket'))

        self.assertNotEqual(resource1, resource2)
    def test_object_resources_non_equal_generations(self):
        resource1 = resource_reference.ObjectResource(
            storage_url.CloudUrl(storage_url.ProviderPrefix.GCS,
                                 bucket_name='b',
                                 object_name='o',
                                 generation='g'))
        resource2 = resource_reference.ObjectResource(
            storage_url.CloudUrl(storage_url.ProviderPrefix.GCS,
                                 bucket_name='b',
                                 object_name='o',
                                 generation='g2'))

        self.assertNotEqual(resource1, resource2)
Exemple #9
0
    def Run(self, args):
        """Command execution logic."""
        if args.path:
            storage_urls = [
                storage_url.storage_url_from_string(path) for path in args.path
            ]
            for url in storage_urls:
                if not isinstance(url, storage_url.CloudUrl):
                    raise errors.InvalidUrlError(
                        'Ls only works for cloud URLs.'
                        ' Error for: {}'.format(url.url_string))
        else:
            storage_urls = [storage_url.CloudUrl(cloud_api.DEFAULT_PROVIDER)]

        display_detail = cloud_list_task.DisplayDetail.SHORT
        if args.full:
            display_detail = cloud_list_task.DisplayDetail.FULL
        if args.json:
            display_detail = cloud_list_task.DisplayDetail.JSON
        if args.long:
            display_detail = cloud_list_task.DisplayDetail.LONG

        tasks = []
        for url in storage_urls:
            tasks.append(
                cloud_list_task.CloudListTask(url,
                                              all_versions=args.all_versions,
                                              display_detail=display_detail,
                                              include_etag=args.etag,
                                              recursion_flag=args.recursive))
        task_executor.ExecuteTasks(tasks, is_parallel=False)
Exemple #10
0
  def test_gets_bucket_with_different_fields_scopes(
      self, fields_scope, number_api_calls, expected_metadata):
    # Minimum amount of response data to pass Boto Stubber validation and
    # verify each API endpoint has returned a response.
    get_bucket_api_call_and_response = [
        ('get_bucket_location', {'LocationConstraint': ''}),
        ('get_bucket_cors', {'CORSRules': []}),
        ('get_bucket_lifecycle_configuration', {'Rules': []}),
        ('get_bucket_logging', {
            'LoggingEnabled': {
                'TargetBucket': '', 'TargetGrants': [], 'TargetPrefix': ''}}),
        ('get_bucket_request_payment', {'Payer': 'BucketOwner'}),
        ('get_bucket_versioning', {}),
        ('get_bucket_website', {}),
        ('get_bucket_acl', {'Owner': {}, 'Grants': []})
    ]

    # Don't loop over extra API functions to avoid the Stubber
    # complaining about expected uncalled functions.
    for method, response in get_bucket_api_call_and_response[:number_api_calls]:
      self.stubber.add_response(method=method,
                                expected_params={'Bucket': BUCKET_NAME},
                                service_response=response)

    expected = s3_resource_reference.S3BucketResource(
        storage_url.CloudUrl(SCHEME, BUCKET_NAME), metadata=expected_metadata)
    with self.stubber:
      observed = self.s3_api.get_bucket(BUCKET_NAME, fields_scope)
      self.assertEqual(observed, expected)
Exemple #11
0
  def test_get_object_metadata_populates_acls_with_full_fieldscope(self):
    expected_request_params = {'Bucket': BUCKET_NAME, 'Key': OBJECT_NAME}
    self.stubber.add_response(
        'head_object',
        service_response={},
        expected_params=expected_request_params)
    self.stubber.add_response(
        'get_object_acl',
        service_response={
            'Grants': [],
            'Owner': {
                'ID': '1'
            },
            'ResponseMetadata': {},
        },
        expected_params=expected_request_params)

    with self.stubber:
      object_resource = self.s3_api.get_object_metadata(
          BUCKET_NAME, OBJECT_NAME, fields_scope=cloud_api.FieldsScope.FULL)

      expected_resource = s3_resource_reference.S3ObjectResource(
          storage_url.CloudUrl(SCHEME, BUCKET_NAME, OBJECT_NAME),
          metadata={
              'ACL': {
                  'Grants': [],
                  'Owner': {
                      'ID': '1'
                  },
              },
          })
      self.assertEqual(object_resource, expected_resource)
Exemple #12
0
  def test_list_buckets_translates_response_to_resources(self, fields_scope):
    names = ['bucket1', 'bucket2']
    cloud_urls = [storage_url.CloudUrl(SCHEME, name) for name in names]
    dates = [datetime.datetime(1, 1, 1), datetime.datetime(2, 1, 1)]
    owner_data = {
        'DisplayName': OWNER_NAME,
        'ID': OWNER_ID,
    }
    s3_response = {
        'Buckets': [
            {'Name': name, 'CreationDate': date}
            for name, date in zip(names, dates)
        ],
        'Owner': owner_data,
    }
    self.stubber.add_response(
        method='list_buckets', expected_params={}, service_response=s3_response)

    expected = []
    for name, url, date in zip(names, cloud_urls, dates):
      expected.append(s3_resource_reference.S3BucketResource(
          url, metadata={'Bucket': {'Name': name, 'CreationDate': date},
                         'Owner': owner_data}))
    with self.stubber:
      observed = self.s3_api.list_buckets(fields_scope=fields_scope)
      self.assertCountEqual(observed, expected)
Exemple #13
0
  def test_gets_bucket_with_individual_calls_failing(self, failing_method_name):
    # Method name, method name in error, metadata key, API response, and
    # flag describing if API response has key mirroring metadata key.
    get_bucket_api_call_data = [
        (
            'get_bucket_location', 'GetBucketLocation', 'LocationConstraint',
            {'LocationConstraint': ''}, True
        ),
        (
            'get_bucket_cors', 'GetBucketCors', 'CORSRules',
            {'CORSRules': []}, True
        ),
        (
            'get_bucket_lifecycle_configuration',
            'GetBucketLifecycleConfiguration', 'LifecycleConfiguration',
            {'Rules': []}, False
        ),
        (
            'get_bucket_logging', 'GetBucketLogging', 'LoggingEnabled',
            {'LoggingEnabled': {
                'TargetBucket': '', 'TargetGrants': [], 'TargetPrefix': ''}},
            True
        ),
        (
            'get_bucket_request_payment', 'GetBucketRequestPayment', 'Payer',
            {'Payer': 'BucketOwner'}, True
        ),
        (
            'get_bucket_versioning', 'GetBucketVersioning', 'Versioning', {},
            False
        ),
        (
            'get_bucket_website', 'GetBucketWebsite', 'Website', {}, False
        ),
        (
            'get_bucket_acl', 'GetBucketAcl', 'ACL',
            {'Owner': {}, 'Grants': []}, False
        ),
    ]
    expected_metadata = {'Name': BUCKET_NAME}
    for (method_name, error_string_of_method_name, metadata_key, response,
         result_has_key) in get_bucket_api_call_data:
      if failing_method_name == method_name:
        self.stubber.add_client_error(method=method_name)
        expected_metadata[metadata_key] = errors.S3ApiError(
            'An error occurred () when calling the {} operation: '
            .format(error_string_of_method_name))
      else:
        self.stubber.add_response(method=method_name,
                                  expected_params={'Bucket': BUCKET_NAME},
                                  service_response=response)
        expected_metadata[metadata_key] = (
            response[metadata_key] if result_has_key else response)

    expected_resource = s3_resource_reference.S3BucketResource(
        storage_url.CloudUrl(SCHEME, BUCKET_NAME), metadata=expected_metadata)
    with self.stubber:
      observed_resource = self.s3_api.get_bucket(
          BUCKET_NAME, fields_scope=cloud_api.FieldsScope.FULL)
      self.assertEqual(observed_resource, expected_resource)
Exemple #14
0
  def test_gets_bucket_receives_all_api_errors(self):
    get_bucket_api_method_names_and_metadata_key = [
        ('get_bucket_location', 'GetBucketLocation', 'LocationConstraint'),
        ('get_bucket_cors', 'GetBucketCors', 'CORSRules'),
        ('get_bucket_lifecycle_configuration',
         'GetBucketLifecycleConfiguration', 'LifecycleConfiguration'),
        ('get_bucket_logging', 'GetBucketLogging', 'LoggingEnabled'),
        ('get_bucket_request_payment', 'GetBucketRequestPayment', 'Payer'),
        ('get_bucket_versioning', 'GetBucketVersioning', 'Versioning'),
        ('get_bucket_website', 'GetBucketWebsite', 'Website'),
        ('get_bucket_acl', 'GetBucketAcl', 'ACL'),
    ]
    expected_metadata = {'Name': BUCKET_NAME}
    for (method_name, error_string_of_method_name,
         metadata_key) in get_bucket_api_method_names_and_metadata_key:
      self.stubber.add_client_error(method=method_name)
      expected_metadata[metadata_key] = errors.S3ApiError(
          'An error occurred () when calling the {} operation: '
          .format(error_string_of_method_name))

    expected_resource = s3_resource_reference.S3BucketResource(
        storage_url.CloudUrl(SCHEME, BUCKET_NAME), metadata=expected_metadata)
    with self.stubber:
      observed_resource = self.s3_api.get_bucket(
          BUCKET_NAME, fields_scope=cloud_api.FieldsScope.FULL)
      self.assertEqual(observed_resource, expected_resource)
Exemple #15
0
    def get_object_metadata(self,
                            bucket_name,
                            object_name,
                            generation=None,
                            fields_scope=None):
        """See super class."""
        request = {'Bucket': bucket_name, 'Key': object_name}

        # The VersionId keyword argument to head_object is not nullable if it is
        # present, so only include it in the function call if it has a value.
        if generation is not None:
            request['VersionId'] = generation

        try:
            object_dict = self.client.head_object(**request)
        except botocore.exceptions.ClientError as e:
            _raise_if_not_found_error(
                e,
                storage_url.CloudUrl(storage_url.ProviderPrefix.S3,
                                     bucket_name, object_name,
                                     generation).url_string)
            raise e

        # User requested ACL's with FieldsScope.FULL.
        if fields_scope is cloud_api.FieldsScope.FULL:
            try:
                acl_response = self.client.get_object_acl(**request)
                acl_response.pop('ResponseMetadata', None)
                object_dict['ACL'] = acl_response
            except botocore.exceptions.ClientError as error:
                object_dict['ACL'] = errors.S3ApiError(error)

        return _get_object_resource_from_s3_response(object_dict, bucket_name,
                                                     object_name)
Exemple #16
0
  def Run(self, args):
    """Command execution logic."""
    encryption_util.initialize_key_store(args)
    if args.path:
      storage_urls = [storage_url.storage_url_from_string(path)
                      for path in args.path]
      for url in storage_urls:
        if not isinstance(url, storage_url.CloudUrl):
          raise errors.InvalidUrlError('Ls only works for cloud URLs.'
                                       ' Error for: {}'.format(url.url_string))
    else:
      storage_urls = [storage_url.CloudUrl(cloud_api.DEFAULT_PROVIDER)]

    if args.full:
      display_detail = cloud_list_task.DisplayDetail.FULL
    elif args.json:
      display_detail = cloud_list_task.DisplayDetail.JSON
    elif args.long:
      display_detail = cloud_list_task.DisplayDetail.LONG
    else:
      display_detail = cloud_list_task.DisplayDetail.SHORT

    tasks = []
    for url in storage_urls:
      tasks.append(
          cloud_list_task.CloudListTask(
              url,
              all_versions=args.all_versions,
              buckets_flag=args.buckets,
              display_detail=display_detail,
              include_etag=args.etag,
              readable_sizes=args.readable_sizes,
              recursion_flag=args.recursive))
    task_executor.execute_tasks(tasks, parallelizable=False)
def get_temporary_component_resource(source_resource, destination_resource,
                                     random_prefix, component_id):
    """Gets a temporary component destination resource for a composite upload.

  Args:
    source_resource (resource_reference.FileObjectResource): The upload source.
    destination_resource (resource_reference.ObjectResource|UnknownResource):
      The upload destination.
    random_prefix (str): Added to temporary component names to avoid collisions
      between different instances of the CLI uploading to the same destination.
    component_id (int): An id that's not shared by any other component in this
      transfer.

  Returns:
    A resource_reference.UnknownResource representing the component's
    destination.
  """
    component_object_name = _get_temporary_component_name(
        source_resource, random_prefix, component_id)

    destination_url = destination_resource.storage_url
    component_url = storage_url.CloudUrl(destination_url.scheme,
                                         destination_url.bucket_name,
                                         component_object_name)

    return resource_reference.UnknownResource(component_url)
Exemple #18
0
  def get_object_metadata(self,
                          bucket_name,
                          object_name,
                          generation=None,
                          fields_scope=cloud_api.FieldsScope.NO_ACL):
    """See super class."""

    # S3 requires a string, but GCS uses an int for generation.
    if generation:
      generation = int(generation)

    projection = self._GetProjection(
        fields_scope, self.messages.StorageObjectsGetRequest)

    # TODO(b/160238394) Decrypt metadata fields if necessary.
    try:
      object_metadata = self.client.objects.Get(
          self.messages.StorageObjectsGetRequest(
              bucket=bucket_name,
              object=object_name,
              generation=generation,
              projection=projection))
    except apitools_exceptions.HttpNotFoundError:
      raise cloud_errors.NotFoundError(
          'Object not found: {}'.format(storage_url.CloudUrl(
              storage_url.ProviderPrefix.GCS, bucket_name, object_name,
              generation).url_string)
      )
    return _object_resource_from_metadata(object_metadata)
    def _upload_using_managed_transfer_utility(self, source_stream,
                                               destination_resource,
                                               extra_args):
        """Uploads the data using boto3's managed transfer utility.

    Calls the upload_fileobj method which performs multi-threaded multipart
    upload automatically. Performs slightly better than put_object API method.
    However, upload_fileobj cannot perform data intergrity checks and we have
    to use put_object method in such cases.

    Args:
      source_stream (a file-like object): A file-like object to upload. At a
        minimum, it must implement the read method, and must return bytes.
      destination_resource (resource_reference.ObjectResource|UnknownResource):
        Represents the metadata for the destination object.
      extra_args (dict): Extra arguments that may be passed to the client
        operation.

    Returns:
      resource_reference.ObjectResource with uploaded object's metadata.
    """
        bucket_name = destination_resource.storage_url.bucket_name
        object_name = destination_resource.storage_url.object_name
        self.client.upload_fileobj(Fileobj=source_stream,
                                   Bucket=bucket_name,
                                   Key=object_name,
                                   ExtraArgs=extra_args)
        return self.get_object_metadata(
            bucket_name, object_name,
            request_config_factory.get_request_config(
                storage_url.CloudUrl(scheme=storage_url.ProviderPrefix.S3)))
def get_object_resource_from_metadata(metadata):
    """Helper method to generate a ObjectResource instance from GCS metadata.

  Args:
    metadata (messages.Object): Extract resource properties from this.

  Returns:
    ObjectResource with properties populated by metadata.
  """
    if metadata.generation is not None:
        # Generation may be 0 integer, which is valid although falsy.
        generation = str(metadata.generation)
    else:
        generation = None
    url = storage_url.CloudUrl(scheme=storage_url.ProviderPrefix.GCS,
                               bucket_name=metadata.bucket,
                               object_name=metadata.name,
                               generation=generation)

    if metadata.customerEncryption:
        key_hash = metadata.customerEncryption.keySha256
    else:
        key_hash = None

    return gcs_resource_reference.GcsObjectResource(
        url,
        content_type=metadata.contentType,
        creation_time=metadata.timeCreated,
        decryption_key_hash=key_hash,
        etag=metadata.etag,
        crc32c_hash=metadata.crc32c,
        md5_hash=metadata.md5Hash,
        metadata=metadata,
        metageneration=metadata.metageneration,
        size=metadata.size)
Exemple #21
0
  def test_raises_api_error(self):
    self.stubber.add_client_error('put_object', http_status_code=404)

    upload_resource = resource_reference.UnknownResource(
        storage_url.CloudUrl(SCHEME, BUCKET_NAME, OBJECT_NAME))
    with self.stubber:
      with self.assertRaises(errors.S3ApiError):
        self.s3_api.upload_object(io.BytesIO(BINARY_DATA), upload_resource)
Exemple #22
0
 def test_upload_raises_error_for_unrecognized_predefined_acl(self):
   upload_resource = resource_reference.UnknownResource(
       storage_url.CloudUrl(SCHEME, BUCKET_NAME, OBJECT_NAME))
   with self.assertRaisesRegex(
       ValueError, ('Could not translate predefined_acl_string fake_acl to'
                    ' AWS-accepted ACL.')):
     self.s3_api.upload_object(
         io.BytesIO(BINARY_DATA),
         upload_resource,
         request_config=cloud_api.RequestConfig('fake_acl'))
Exemple #23
0
 def ListBuckets(self, fields_scope=None):
   """See super class."""
   try:
     response = self.client.list_buckets()
     for bucket in response['Buckets']:
       yield resource_reference.BucketResource(
           storage_url.CloudUrl(storage_url.ProviderPrefix.S3,
                                bucket['Name']),
           metadata={'Bucket': bucket, 'Owner': response['Owner']})
   except botocore.exceptions.ClientError as error:
     core_exceptions.reraise(errors.S3ApiError(error))
Exemple #24
0
def _bucket_resource_from_metadata(metadata):
  """Helper method to generate a BucketResource instance from GCS metadata.

  Args:
    metadata (messages.Bucket): Extract resource properties from this.

  Returns:
    BucketResource with properties populated by metadata.
  """
  url = storage_url.CloudUrl(scheme=storage_url.ProviderPrefix.GCS,
                             bucket_name=metadata.name)
  return gcs_resource_reference.GcsBucketResource(
      url, etag=metadata.etag, metadata=metadata)
    def list_objects(self,
                     bucket_name,
                     prefix=None,
                     delimiter=None,
                     all_versions=None,
                     fields_scope=cloud_api.FieldsScope.NO_ACL):
        """See super class."""
        projection = self._get_projection(
            fields_scope, self.messages.StorageObjectsListRequest)
        global_params = None
        if fields_scope == cloud_api.FieldsScope.SHORT:
            global_params = self.messages.StandardQueryParameters()
            global_params.fields = (
                'prefixes,items/name,items/size,items/generation,nextPageToken'
            )

        object_list = None
        while True:
            apitools_request = self.messages.StorageObjectsListRequest(
                bucket=bucket_name,
                prefix=prefix,
                delimiter=delimiter,
                versions=all_versions,
                projection=projection,
                pageToken=object_list.nextPageToken if object_list else None,
                maxResults=cloud_api.NUM_ITEMS_PER_LIST_PAGE)

            try:
                object_list = self.client.objects.List(
                    apitools_request, global_params=global_params)
            except apitools_exceptions.HttpError as e:
                core_exceptions.reraise(
                    cloud_errors.translate_error(e, _ERROR_TRANSLATION))

            # Yield objects.
            # TODO(b/160238394) Decrypt metadata fields if necessary.
            for object_metadata in object_list.items:
                object_metadata.bucket = bucket_name
                yield gcs_metadata_util.get_object_resource_from_metadata(
                    object_metadata)

            # Yield prefixes.
            for prefix_string in object_list.prefixes:
                yield resource_reference.PrefixResource(storage_url.CloudUrl(
                    scheme=storage_url.ProviderPrefix.GCS,
                    bucket_name=bucket_name,
                    object_name=prefix_string),
                                                        prefix=prefix_string)

            if not object_list.nextPageToken:
                break
    def _list_objects_side_effect(self, *unused_args, **kwargs):
        """Mock the list_objects API method sourcing data from TEST_OBJECT_NAMES.

    Args:
      **kwargs (dict): Contains arguments dict for list_objects.

    Yields:
      resource_reference.Resource instances consisting of list of
        ObjectResource and/or PrefixResource instanes
        filtered based on the request prefix and request delimiter.
    """
        objects = []
        prefixes = set([])

        request_prefix = kwargs['prefix'] or ''
        request_delimiter = kwargs['delimiter']
        filtered_object_suffixes = [
            object_name[len(request_prefix):]
            for object_name in TEST_OBJECT_NAMES
            if object_name.startswith(request_prefix)
        ]

        for object_suffix in filtered_object_suffixes:
            if request_delimiter:
                name, _, suffix = object_suffix.partition(request_delimiter)
                if not suffix:  # Leaf object.
                    objects.append(
                        self.messages.Object(name=request_prefix +
                                             object_suffix,
                                             bucket=TEST_BUCKET_NAME))
                else:
                    prefixes.add('%s%s%s' %
                                 (request_prefix, name, request_delimiter))
            else:
                objects.append(
                    self.messages.Object(name=request_prefix + object_suffix,
                                         bucket=TEST_BUCKET_NAME))

        prefixes = sorted(list(prefixes))
        objects = self.messages.Objects(items=objects, prefixes=prefixes)

        for o in objects.items:
            yield gcs_api._object_resource_from_metadata(o)

        for prefix_string in objects.prefixes:
            yield resource_reference.PrefixResource(
                storage_url_object=storage_url.CloudUrl(
                    scheme=cloud_api.DEFAULT_PROVIDER,
                    bucket_name=kwargs['bucket_name'],
                    object_name=prefix_string),
                prefix=prefix_string)
Exemple #27
0
  def test_list_objects_with_full_fields_scope(self):
    # Test with more than one object to ensure that get_object_metadata call
    # is being made for each object.
    objects = [{'Key': 'obj1'}, {'Key': 'obj2'}]
    self.stubber.add_response(
        'list_objects_v2',
        service_response={
            'Name': BUCKET_NAME,
            'Contents': objects,
        },
        expected_params={
            'Bucket': BUCKET_NAME,
            'Prefix': '',
            'Delimiter': '',
        })

    expected_resources = []
    for object_dict in objects:
      # For each object, we expect a call to the head_object and get_object_acl
      # API methods.
      self.stubber.add_response(
          'head_object',
          service_response={'ETag': 'e'},
          expected_params={
              'Bucket': BUCKET_NAME,
              'Key': object_dict['Key']
          })
      self.stubber.add_response(
          'get_object_acl',
          service_response={'Grants': []},
          expected_params={
              'Bucket': BUCKET_NAME,
              'Key': object_dict['Key']
          })

      expected_resources.append(
          s3_resource_reference.S3ObjectResource(
              storage_url.CloudUrl(SCHEME, BUCKET_NAME, object_dict['Key']),
              etag='e',
              metadata={
                  'ETag': 'e',
                  'ACL': {
                      'Grants': []
                  }
              }))

    with self.stubber:
      observed = self.s3_api.list_objects(
          BUCKET_NAME, fields_scope=cloud_api.FieldsScope.FULL)
      self.assertEqual(list(observed), expected_resources)
Exemple #28
0
  def test_uploads_with_unknown_resource(self):
    params = {'Bucket': BUCKET_NAME, 'Key': OBJECT_NAME, 'Body': BINARY_DATA}
    response = {'ETag': ETAG}
    self.stubber.add_response(
        'put_object', service_response=response, expected_params=params)
    upload_resource = resource_reference.UnknownResource(
        storage_url.CloudUrl(SCHEME, BUCKET_NAME, OBJECT_NAME))

    expected_resource = self.s3_api._get_object_resource_from_s3_response(
        response, BUCKET_NAME, OBJECT_NAME)
    with self.stubber:
      observed_resource = self.s3_api.upload_object(
          io.BytesIO(BINARY_DATA), upload_resource)
      self.assertEqual(observed_resource, expected_resource)
Exemple #29
0
 def list_buckets(self, fields_scope=cloud_api.FieldsScope.NO_ACL):
   """See super class."""
   try:
     response = self.client.list_buckets()
     for bucket in response['Buckets']:
       if fields_scope == cloud_api.FieldsScope.FULL:
         yield self.get_bucket(bucket['Name'], fields_scope)
       else:
         yield s3_resource_reference.S3BucketResource(
             storage_url.CloudUrl(
                 storage_url.ProviderPrefix.S3, bucket['Name']),
             metadata={'Bucket': bucket, 'Owner': response['Owner']})
   except botocore.exceptions.ClientError as error:
     core_exceptions.reraise(errors.S3ApiError(error))
Exemple #30
0
    def _get_prefix_resource_from_s3_response(self, prefix_dict, bucket_name):
        """Creates resource_reference.PrefixResource from S3 API response.

    Args:
      prefix_dict (dict): The S3 API response representing a prefix.
      bucket_name (str): Bucket for the prefix.

    Returns:
      A resource_reference.PrefixResource instance.
    """
        prefix = prefix_dict['Prefix']
        return resource_reference.PrefixResource(storage_url.CloudUrl(
            scheme=self.scheme, bucket_name=bucket_name, object_name=prefix),
                                                 prefix=prefix)