class Bucket(_PropertyMixin):
    """A class representing a Bucket on Cloud Storage.

    :type client: :class:`google.cloud.storage.client.Client`
    :param client: A client which holds credentials and project configuration
                   for the bucket (which requires a project).

    :type name: str
    :param name: The name of the bucket.
    """

    _MAX_OBJECTS_FOR_ITERATION = 256
    """Maximum number of existing objects allowed in iteration.

    This is used in Bucket.delete() and Bucket.make_public().
    """

    _STORAGE_CLASSES = ('STANDARD', 'NEARLINE', 'DURABLE_REDUCED_AVAILABILITY',
                        'MULTI_REGIONAL', 'REGIONAL', 'COLDLINE')

    def __init__(self, client, name=None):
        super(Bucket, self).__init__(name=name)
        self._client = client
        self._acl = BucketACL(self)
        self._default_object_acl = DefaultObjectACL(self)

    def __repr__(self):
        return '<Bucket: %s>' % self.name

    @property
    def client(self):
        """The client bound to this bucket."""
        return self._client

    def blob(self, blob_name, chunk_size=None, encryption_key=None):
        """Factory constructor for blob object.

        .. note::
          This will not make an HTTP request; it simply instantiates
          a blob object owned by this bucket.

        :type blob_name: str
        :param blob_name: The name of the blob to be instantiated.

        :type chunk_size: int
        :param chunk_size: The size of a chunk of data whenever iterating
                           (1 MB). This must be a multiple of 256 KB per the
                           API specification.

        :type encryption_key: bytes
        :param encryption_key:
            Optional 32 byte encryption key for customer-supplied encryption.

        :rtype: :class:`google.cloud.storage.blob.Blob`
        :returns: The blob object created.
        """
        return Blob(name=blob_name,
                    bucket=self,
                    chunk_size=chunk_size,
                    encryption_key=encryption_key)

    def exists(self, client=None):
        """Determines whether or not this bucket exists.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :rtype: bool
        :returns: True if the bucket exists in Cloud Storage.
        """
        client = self._require_client(client)
        try:
            # We only need the status code (200 or not) so we seek to
            # minimize the returned payload.
            query_params = {'fields': 'name'}
            # We intentionally pass `_target_object=None` since fields=name
            # would limit the local properties.
            client.connection.api_request(method='GET',
                                          path=self.path,
                                          query_params=query_params,
                                          _target_object=None)
            # NOTE: This will not fail immediately in a batch. However, when
            #       Batch.finish() is called, the resulting `NotFound` will be
            #       raised.
            return True
        except NotFound:
            return False

    def create(self, client=None):
        """Creates current bucket.

        If the bucket already exists, will raise
        :class:`google.cloud.exceptions.Conflict`.

        This implements "storage.buckets.insert".

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.
        """
        client = self._require_client(client)
        query_params = {'project': client.project}
        properties = {key: self._properties[key] for key in self._changes}
        properties['name'] = self.name
        api_response = client.connection.api_request(method='POST',
                                                     path='/b',
                                                     query_params=query_params,
                                                     data=properties,
                                                     _target_object=self)
        self._set_properties(api_response)

    @property
    def acl(self):
        """Create our ACL on demand."""
        return self._acl

    @property
    def default_object_acl(self):
        """Create our defaultObjectACL on demand."""
        return self._default_object_acl

    @staticmethod
    def path_helper(bucket_name):
        """Relative URL path for a bucket.

        :type bucket_name: str
        :param bucket_name: The bucket name in the path.

        :rtype: str
        :returns: The relative URL path for ``bucket_name``.
        """
        return '/b/' + bucket_name

    @property
    def path(self):
        """The URL path to this bucket."""
        if not self.name:
            raise ValueError('Cannot determine path without bucket name.')

        return self.path_helper(self.name)

    def get_blob(self, blob_name, client=None):
        """Get a blob object by name.

        This will return None if the blob doesn't exist::

          >>> from google.cloud import storage
          >>> client = storage.Client()
          >>> bucket = client.get_bucket('my-bucket')
          >>> print(bucket.get_blob('/path/to/blob.txt'))
          <Blob: my-bucket, /path/to/blob.txt>
          >>> print(bucket.get_blob('/does-not-exist.txt'))
          None

        :type blob_name: str
        :param blob_name: The name of the blob to retrieve.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :rtype: :class:`google.cloud.storage.blob.Blob` or None
        :returns: The blob object if it exists, otherwise None.
        """
        client = self._require_client(client)
        blob = Blob(bucket=self, name=blob_name)
        try:
            response = client.connection.api_request(method='GET',
                                                     path=blob.path,
                                                     _target_object=blob)
            # NOTE: We assume response.get('name') matches `blob_name`.
            blob._set_properties(response)
            # NOTE: This will not fail immediately in a batch. However, when
            #       Batch.finish() is called, the resulting `NotFound` will be
            #       raised.
            return blob
        except NotFound:
            return None

    def list_blobs(self,
                   max_results=None,
                   page_token=None,
                   prefix=None,
                   delimiter=None,
                   versions=None,
                   projection='noAcl',
                   fields=None,
                   client=None):
        """Return an iterator used to find blobs in the bucket.

        :type max_results: int
        :param max_results: (Optional) Maximum number of blobs to return.

        :type page_token: str
        :param page_token: (Optional) Opaque marker for the next "page" of
                           blobs. If not passed, will return the first page
                           of blobs.

        :type prefix: str
        :param prefix: (Optional) prefix used to filter blobs.

        :type delimiter: str
        :param delimiter: (Optional) Delimiter, used with ``prefix`` to
                          emulate hierarchy.

        :type versions: bool
        :param versions: (Optional) Whether object versions should be returned
                         as separate blobs.

        :type projection: str
        :param projection: (Optional) If used, must be 'full' or 'noAcl'.
                           Defaults to ``'noAcl'``. Specifies the set of
                           properties to return.

        :type fields: str
        :param fields: (Optional) Selector specifying which fields to include
                       in a partial response. Must be a list of fields. For
                       example to get a partial response with just the next
                       page token and the language of each blob returned:
                       ``'items/contentLanguage,nextPageToken'``.

        :type client: :class:`~google.cloud.storage.client.Client`
        :param client: (Optional) The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :rtype: :class:`~google.cloud.iterator.Iterator`
        :returns: Iterator of all :class:`~google.cloud.storage.blob.Blob`
                  in this bucket matching the arguments.
        """
        extra_params = {}

        if prefix is not None:
            extra_params['prefix'] = prefix

        if delimiter is not None:
            extra_params['delimiter'] = delimiter

        if versions is not None:
            extra_params['versions'] = versions

        extra_params['projection'] = projection

        if fields is not None:
            extra_params['fields'] = fields

        client = self._require_client(client)
        path = self.path + '/o'
        iterator = Iterator(client=client,
                            path=path,
                            item_to_value=_item_to_blob,
                            page_token=page_token,
                            max_results=max_results,
                            extra_params=extra_params,
                            page_start=_blobs_page_start)
        iterator.bucket = self
        iterator.prefixes = set()
        return iterator

    def delete(self, force=False, client=None):
        """Delete this bucket.

        The bucket **must** be empty in order to submit a delete request. If
        ``force=True`` is passed, this will first attempt to delete all the
        objects / blobs in the bucket (i.e. try to empty the bucket).

        If the bucket doesn't exist, this will raise
        :class:`google.cloud.exceptions.NotFound`.  If the bucket is not empty
        (and ``force=False``), will raise
        :class:`google.cloud.exceptions.Conflict`.

        If ``force=True`` and the bucket contains more than 256 objects / blobs
        this will cowardly refuse to delete the objects (or the bucket). This
        is to prevent accidental bucket deletion and to prevent extremely long
        runtime of this method.

        :type force: bool
        :param force: If True, empties the bucket's objects then deletes it.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :raises: :class:`ValueError` if ``force`` is ``True`` and the bucket
                 contains more than 256 objects / blobs.
        """
        client = self._require_client(client)
        if force:
            blobs = list(
                self.list_blobs(max_results=self._MAX_OBJECTS_FOR_ITERATION +
                                1,
                                client=client))
            if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
                message = ('Refusing to delete bucket with more than '
                           '%d objects. If you actually want to delete '
                           'this bucket, please delete the objects '
                           'yourself before calling Bucket.delete().') % (
                               self._MAX_OBJECTS_FOR_ITERATION, )
                raise ValueError(message)

            # Ignore 404 errors on delete.
            self.delete_blobs(blobs, on_error=lambda blob: None, client=client)

        # We intentionally pass `_target_object=None` since a DELETE
        # request has no response value (whether in a standard request or
        # in a batch request).
        client.connection.api_request(method='DELETE',
                                      path=self.path,
                                      _target_object=None)

    def delete_blob(self, blob_name, client=None):
        """Deletes a blob from the current bucket.

        If the blob isn't found (backend 404), raises a
        :class:`google.cloud.exceptions.NotFound`.

        For example::

          >>> from google.cloud.exceptions import NotFound
          >>> from google.cloud import storage
          >>> client = storage.Client()
          >>> bucket = client.get_bucket('my-bucket')
          >>> print(bucket.list_blobs())
          [<Blob: my-bucket, my-file.txt>]
          >>> bucket.delete_blob('my-file.txt')
          >>> try:
          ...   bucket.delete_blob('doesnt-exist')
          ... except NotFound:
          ...   pass

        :type blob_name: str
        :param blob_name: A blob name to delete.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :raises: :class:`google.cloud.exceptions.NotFound` (to suppress
                 the exception, call ``delete_blobs``, passing a no-op
                 ``on_error`` callback, e.g.::

                 >>> bucket.delete_blobs([blob], on_error=lambda blob: None)
        """
        client = self._require_client(client)
        blob_path = Blob.path_helper(self.path, blob_name)
        # We intentionally pass `_target_object=None` since a DELETE
        # request has no response value (whether in a standard request or
        # in a batch request).
        client.connection.api_request(method='DELETE',
                                      path=blob_path,
                                      _target_object=None)

    def delete_blobs(self, blobs, on_error=None, client=None):
        """Deletes a list of blobs from the current bucket.

        Uses :func:`Bucket.delete_blob` to delete each individual blob.

        :type blobs: list of string or :class:`google.cloud.storage.blob.Blob`
        :param blobs: A list of blob names or Blob objects to delete.

        :type on_error: a callable taking (blob)
        :param on_error: If not ``None``, called once for each blob raising
                         :class:`google.cloud.exceptions.NotFound`;
                         otherwise, the exception is propagated.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :raises: :class:`google.cloud.exceptions.NotFound` (if
                 `on_error` is not passed).
        """
        for blob in blobs:
            try:
                blob_name = blob
                if not isinstance(blob_name, six.string_types):
                    blob_name = blob.name
                self.delete_blob(blob_name, client=client)
            except NotFound:
                if on_error is not None:
                    on_error(blob)
                else:
                    raise

    def copy_blob(self,
                  blob,
                  destination_bucket,
                  new_name=None,
                  client=None,
                  preserve_acl=True):
        """Copy the given blob to the given bucket, optionally with a new name.

        :type blob: :class:`google.cloud.storage.blob.Blob`
        :param blob: The blob to be copied.

        :type destination_bucket: :class:`google.cloud.storage.bucket.Bucket`
        :param destination_bucket: The bucket into which the blob should be
                                   copied.

        :type new_name: str
        :param new_name: (optional) the new name for the copied file.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :type preserve_acl: bool
        :param preserve_acl: Optional. Copies ACL from old blob to new blob.
                             Default: True.

        :rtype: :class:`google.cloud.storage.blob.Blob`
        :returns: The new Blob.
        """
        client = self._require_client(client)
        if new_name is None:
            new_name = blob.name
        new_blob = Blob(bucket=destination_bucket, name=new_name)
        api_path = blob.path + '/copyTo' + new_blob.path
        copy_result = client.connection.api_request(method='POST',
                                                    path=api_path,
                                                    _target_object=new_blob)
        if not preserve_acl:
            new_blob.acl.save(acl={}, client=client)
        new_blob._set_properties(copy_result)
        return new_blob

    def rename_blob(self, blob, new_name, client=None):
        """Rename the given blob using copy and delete operations.

        Effectively, copies blob to the same bucket with a new name, then
        deletes the blob.

        .. warning::

          This method will first duplicate the data and then delete the
          old blob.  This means that with very large objects renaming
          could be a very (temporarily) costly or a very slow operation.

        :type blob: :class:`google.cloud.storage.blob.Blob`
        :param blob: The blob to be renamed.

        :type new_name: str
        :param new_name: The new name for this blob.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :rtype: :class:`Blob`
        :returns: The newly-renamed blob.
        """
        new_blob = self.copy_blob(blob, self, new_name, client=client)
        blob.delete(client=client)
        return new_blob

    @property
    def cors(self):
        """Retrieve CORS policies configured for this bucket.

        See: http://www.w3.org/TR/cors/ and
             https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: list of dictionaries
        :returns: A sequence of mappings describing each CORS policy.
        """
        return [
            copy.deepcopy(policy)
            for policy in self._properties.get('cors', ())
        ]

    @cors.setter
    def cors(self, entries):
        """Set CORS policies configured for this bucket.

        See: http://www.w3.org/TR/cors/ and
             https://cloud.google.com/storage/docs/json_api/v1/buckets

        :type entries: list of dictionaries
        :param entries: A sequence of mappings describing each CORS policy.
        """
        self._patch_property('cors', entries)

    @property
    def etag(self):
        """Retrieve the ETag for the bucket.

        See: http://tools.ietf.org/html/rfc2616#section-3.11 and
             https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: str or ``NoneType``
        :returns: The bucket etag or ``None`` if the property is not
                  set locally.
        """
        return self._properties.get('etag')

    @property
    def id(self):
        """Retrieve the ID for the bucket.

        See: https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: str or ``NoneType``
        :returns: The ID of the bucket or ``None`` if the property is not
                  set locally.
        """
        return self._properties.get('id')

    @property
    def lifecycle_rules(self):
        """Lifecycle rules configured for this bucket.

        See: https://cloud.google.com/storage/docs/lifecycle and
             https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: list(dict)
        :returns: A sequence of mappings describing each lifecycle rule.
        """
        info = self._properties.get('lifecycle', {})
        return [copy.deepcopy(rule) for rule in info.get('rule', ())]

    @lifecycle_rules.setter
    def lifecycle_rules(self, rules):
        self._patch_property('lifecycle', {'rule': rules})

    location = _scalar_property('location')
    """Retrieve location configured for this bucket.

    See: https://cloud.google.com/storage/docs/json_api/v1/buckets and
    https://cloud.google.com/storage/docs/concepts-techniques#specifyinglocations

    If the property is not set locally, returns ``None``.

    :rtype: str or ``NoneType``
    """

    def get_logging(self):
        """Return info about access logging for this bucket.

        See: https://cloud.google.com/storage/docs/accesslogs#status

        :rtype: dict or None
        :returns: a dict w/ keys, ``logBucket`` and ``logObjectPrefix``
                  (if logging is enabled), or None (if not).
        """
        info = self._properties.get('logging')
        return copy.deepcopy(info)

    def enable_logging(self, bucket_name, object_prefix=''):
        """Enable access logging for this bucket.

        See: https://cloud.google.com/storage/docs/accesslogs#delivery

        :type bucket_name: str
        :param bucket_name: name of bucket in which to store access logs

        :type object_prefix: str
        :param object_prefix: prefix for access log filenames
        """
        info = {'logBucket': bucket_name, 'logObjectPrefix': object_prefix}
        self._patch_property('logging', info)

    def disable_logging(self):
        """Disable access logging for this bucket.

        See: https://cloud.google.com/storage/docs/accesslogs#disabling
        """
        self._patch_property('logging', None)

    @property
    def metageneration(self):
        """Retrieve the metageneration for the bucket.

        See: https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: int or ``NoneType``
        :returns: The metageneration of the bucket or ``None`` if the property
                  is not set locally.
        """
        metageneration = self._properties.get('metageneration')
        if metageneration is not None:
            return int(metageneration)

    @property
    def owner(self):
        """Retrieve info about the owner of the bucket.

        See: https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: dict or ``NoneType``
        :returns: Mapping of owner's role/ID. If the property is not set
                  locally, returns ``None``.
        """
        return copy.deepcopy(self._properties.get('owner'))

    @property
    def project_number(self):
        """Retrieve the number of the project to which the bucket is assigned.

        See: https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: int or ``NoneType``
        :returns: The project number that owns the bucket or ``None`` if the
                  property is not set locally.
        """
        project_number = self._properties.get('projectNumber')
        if project_number is not None:
            return int(project_number)

    @property
    def self_link(self):
        """Retrieve the URI for the bucket.

        See: https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: str or ``NoneType``
        :returns: The self link for the bucket or ``None`` if the property is
                  not set locally.
        """
        return self._properties.get('selfLink')

    @property
    def storage_class(self):
        """Retrieve the storage class for the bucket.

        See: https://cloud.google.com/storage/docs/storage-classes

        :rtype: str or ``NoneType``
        :returns: If set, one of "MULTI_REGIONAL", "REGIONAL",
                  "NEARLINE", "COLDLINE", "STANDARD", or
                  "DURABLE_REDUCED_AVAILABILITY", else ``None``.
        """
        return self._properties.get('storageClass')

    @storage_class.setter
    def storage_class(self, value):
        """Set the storage class for the bucket.

        See: https://cloud.google.com/storage/docs/storage-classes

        :type value: str
        :param value: one of "MULTI_REGIONAL", "REGIONAL", "NEARLINE",
                      "COLDLINE", "STANDARD", or "DURABLE_REDUCED_AVAILABILITY"
        """
        if value not in self._STORAGE_CLASSES:
            raise ValueError('Invalid storage class: %s' % (value, ))
        self._patch_property('storageClass', value)

    @property
    def time_created(self):
        """Retrieve the timestamp at which the bucket was created.

        See: https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: :class:`datetime.datetime` or ``NoneType``
        :returns: Datetime object parsed from RFC3339 valid timestamp, or
                  ``None`` if the property is not set locally.
        """
        value = self._properties.get('timeCreated')
        if value is not None:
            return _rfc3339_to_datetime(value)

    @property
    def versioning_enabled(self):
        """Is versioning enabled for this bucket?

        See:  https://cloud.google.com/storage/docs/object-versioning for
        details.

        :rtype: bool
        :returns: True if enabled, else False.
        """
        versioning = self._properties.get('versioning', {})
        return versioning.get('enabled', False)

    @versioning_enabled.setter
    def versioning_enabled(self, value):
        """Enable versioning for this bucket.

        See:  https://cloud.google.com/storage/docs/object-versioning for
        details.

        :type value: convertible to boolean
        :param value: should versioning be anabled for the bucket?
        """
        self._patch_property('versioning', {'enabled': bool(value)})

    def configure_website(self, main_page_suffix=None, not_found_page=None):
        """Configure website-related properties.

        See: https://developers.google.com/storage/docs/website-configuration

        .. note::
          This (apparently) only works
          if your bucket name is a domain name
          (and to do that, you need to get approved somehow...).

        If you want this bucket to host a website, just provide the name
        of an index page and a page to use when a blob isn't found::

          >>> from google.cloud import storage
          >>> client = storage.Client()
          >>> bucket = client.get_bucket(bucket_name)
          >>> bucket.configure_website('index.html', '404.html')

        You probably should also make the whole bucket public::

          >>> bucket.make_public(recursive=True, future=True)

        This says: "Make the bucket public, and all the stuff already in
        the bucket, and anything else I add to the bucket.  Just make it
        all public."

        :type main_page_suffix: str
        :param main_page_suffix: The page to use as the main page
                                 of a directory.
                                 Typically something like index.html.

        :type not_found_page: str
        :param not_found_page: The file to use when a page isn't found.
        """
        data = {
            'mainPageSuffix': main_page_suffix,
            'notFoundPage': not_found_page,
        }
        self._patch_property('website', data)

    def disable_website(self):
        """Disable the website configuration for this bucket.

        This is really just a shortcut for setting the website-related
        attributes to ``None``.
        """
        return self.configure_website(None, None)

    def make_public(self, recursive=False, future=False, client=None):
        """Make a bucket public.

        If ``recursive=True`` and the bucket contains more than 256
        objects / blobs this will cowardly refuse to make the objects public.
        This is to prevent extremely long runtime of this method.

        :type recursive: bool
        :param recursive: If True, this will make all blobs inside the bucket
                          public as well.

        :type future: bool
        :param future: If True, this will make all objects created in the
                       future public as well.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.
        """
        self.acl.all().grant_read()
        self.acl.save(client=client)

        if future:
            doa = self.default_object_acl
            if not doa.loaded:
                doa.reload(client=client)
            doa.all().grant_read()
            doa.save(client=client)

        if recursive:
            blobs = list(
                self.list_blobs(projection='full',
                                max_results=self._MAX_OBJECTS_FOR_ITERATION +
                                1,
                                client=client))
            if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
                message = (
                    'Refusing to make public recursively with more than '
                    '%d objects. If you actually want to make every object '
                    'in this bucket public, please do it on the objects '
                    'yourself.') % (self._MAX_OBJECTS_FOR_ITERATION, )
                raise ValueError(message)

            for blob in blobs:
                blob.acl.all().grant_read()
                blob.acl.save(client=client)
Exemple #2
0
class Blob(_PropertyMixin):
    """A wrapper around Cloud Storage's concept of an ``Object``.

    :type name: str
    :param name: The name of the blob.  This corresponds to the
                 unique path of the object in the bucket.

    :type bucket: :class:`google.cloud.storage.bucket.Bucket`
    :param bucket: The bucket to which this blob belongs.

    :type chunk_size: int
    :param chunk_size: The size of a chunk of data whenever iterating (1 MB).
                       This must be a multiple of 256 KB per the API
                       specification.

    :type encryption_key: bytes
    :param encryption_key:
        Optional 32 byte encryption key for customer-supplied encryption.
        See https://cloud.google.com/storage/docs/encryption#customer-supplied
    """

    _chunk_size = None  # Default value for each instance.

    _CHUNK_SIZE_MULTIPLE = 256 * 1024
    """Number (256 KB, in bytes) that must divide the chunk size."""

    _STORAGE_CLASSES = (
        'NEARLINE',
        'MULTI_REGIONAL',
        'REGIONAL',
        'COLDLINE',
        'STANDARD',  # alias for MULTI_REGIONAL/REGIONAL, based on location
    )
    """Allowed values for :attr:`storage_class`.

    See:
    https://cloud.google.com/storage/docs/json_api/v1/objects#storageClass
    https://cloud.google.com/storage/docs/per-object-storage-class

    .. note::
       This list does not include 'DURABLE_REDUCED_AVAILABILITY', which
       is only documented for buckets (and deprectated.

    .. note::
       The documentation does *not* mention 'STANDARD', but it is the value
       assigned by the back-end for objects created in buckets with 'STANDARD'
       set as their 'storage_class'.
    """
    def __init__(self, name, bucket, chunk_size=None, encryption_key=None):
        super(Blob, self).__init__(name=name)

        self.chunk_size = chunk_size  # Check that setter accepts value.
        self.bucket = bucket
        self._acl = ObjectACL(self)
        self._encryption_key = encryption_key

    @property
    def chunk_size(self):
        """Get the blob's default chunk size.

        :rtype: int or ``NoneType``
        :returns: The current blob's chunk size, if it is set.
        """
        return self._chunk_size

    @chunk_size.setter
    def chunk_size(self, value):
        """Set the blob's default chunk size.

        :type value: int
        :param value: (Optional) The current blob's chunk size, if it is set.

        :raises: :class:`ValueError` if ``value`` is not ``None`` and is not a
                 multiple of 256 KB.
        """
        if value is not None and value % self._CHUNK_SIZE_MULTIPLE != 0:
            raise ValueError('Chunk size must be a multiple of %d.' %
                             (self._CHUNK_SIZE_MULTIPLE, ))
        self._chunk_size = value

    @staticmethod
    def path_helper(bucket_path, blob_name):
        """Relative URL path for a blob.

        :type bucket_path: str
        :param bucket_path: The URL path for a bucket.

        :type blob_name: str
        :param blob_name: The name of the blob.

        :rtype: str
        :returns: The relative URL path for ``blob_name``.
        """
        return bucket_path + '/o/' + quote(blob_name, safe='')

    @property
    def acl(self):
        """Create our ACL on demand."""
        return self._acl

    def __repr__(self):
        if self.bucket:
            bucket_name = self.bucket.name
        else:
            bucket_name = None

        return '<Blob: %s, %s>' % (bucket_name, self.name)

    @property
    def path(self):
        """Getter property for the URL path to this Blob.

        :rtype: str
        :returns: The URL path to this Blob.
        """
        if not self.name:
            raise ValueError('Cannot determine path without a blob name.')

        return self.path_helper(self.bucket.path, self.name)

    @property
    def client(self):
        """The client bound to this blob."""
        return self.bucket.client

    @property
    def public_url(self):
        """The public URL for this blob's object.

        :rtype: `string`
        :returns: The public URL for this blob.
        """
        return '{storage_base_url}/{bucket_name}/{quoted_name}'.format(
            storage_base_url='https://storage.googleapis.com',
            bucket_name=self.bucket.name,
            quoted_name=quote(self.name, safe=''))

    def generate_signed_url(self,
                            expiration,
                            method='GET',
                            content_type=None,
                            generation=None,
                            response_disposition=None,
                            response_type=None,
                            client=None,
                            credentials=None):
        """Generates a signed URL for this blob.

        .. note::

            If you are on Google Compute Engine, you can't generate a signed
            URL. Follow `Issue 922`_ for updates on this. If you'd like to
            be able to generate a signed URL from GCE, you can use a standard
            service account from a JSON file rather than a GCE service account.

        .. _Issue 922: https://github.com/GoogleCloudPlatform/\
                       google-cloud-python/issues/922

        If you have a blob that you want to allow access to for a set
        amount of time, you can use this method to generate a URL that
        is only valid within a certain time period.

        This is particularly useful if you don't want publicly
        accessible blobs, but don't want to require users to explicitly
        log in.

        :type expiration: int, long, datetime.datetime, datetime.timedelta
        :param expiration: When the signed URL should expire.

        :type method: str
        :param method: The HTTP verb that will be used when requesting the URL.

        :type content_type: str
        :param content_type: (Optional) The content type of the object
                             referenced by ``resource``.

        :type generation: str
        :param generation: (Optional) A value that indicates which generation
                           of the resource to fetch.

        :type response_disposition: str
        :param response_disposition: (Optional) Content disposition of
                                     responses to requests for the signed URL.
                                     For example, to enable the signed URL
                                     to initiate a file of ``blog.png``, use
                                     the value
                                     ``'attachment; filename=blob.png'``.

        :type response_type: str
        :param response_type: (Optional) Content type of responses to requests
                              for the signed URL. Used to over-ride the content
                              type of the underlying blob/object.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: (Optional) The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.


        :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
                           :class:`NoneType`
        :param credentials: (Optional) The OAuth2 credentials to use to sign
                            the URL. Defaults to the credentials stored on the
                            client used.

        :rtype: str
        :returns: A signed URL you can use to access the resource
                  until expiration.
        """
        resource = '/{bucket_name}/{quoted_name}'.format(
            bucket_name=self.bucket.name,
            quoted_name=quote(self.name, safe=''))

        if credentials is None:
            client = self._require_client(client)
            credentials = client._base_connection.credentials

        return generate_signed_url(credentials,
                                   resource=resource,
                                   api_access_endpoint=_API_ACCESS_ENDPOINT,
                                   expiration=expiration,
                                   method=method,
                                   content_type=content_type,
                                   response_type=response_type,
                                   response_disposition=response_disposition,
                                   generation=generation)

    def exists(self, client=None):
        """Determines whether or not this blob exists.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :rtype: bool
        :returns: True if the blob exists in Cloud Storage.
        """
        client = self._require_client(client)
        try:
            # We only need the status code (200 or not) so we seek to
            # minimize the returned payload.
            query_params = {'fields': 'name'}
            # We intentionally pass `_target_object=None` since fields=name
            # would limit the local properties.
            client._connection.api_request(method='GET',
                                           path=self.path,
                                           query_params=query_params,
                                           _target_object=None)
            # NOTE: This will not fail immediately in a batch. However, when
            #       Batch.finish() is called, the resulting `NotFound` will be
            #       raised.
            return True
        except NotFound:
            return False

    def delete(self, client=None):
        """Deletes a blob from Cloud Storage.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :rtype: :class:`Blob`
        :returns: The blob that was just deleted.
        :raises: :class:`google.cloud.exceptions.NotFound`
                 (propagated from
                 :meth:`google.cloud.storage.bucket.Bucket.delete_blob`).
        """
        return self.bucket.delete_blob(self.name, client=client)

    def download_to_file(self, file_obj, client=None):
        """Download the contents of this blob into a file-like object.

        .. note::

           If the server-set property, :attr:`media_link`, is not yet
           initialized, makes an additional API request to load it.

         Downloading a file that has been encrypted with a `customer-supplied`_
         encryption key:

         .. literalinclude:: storage_snippets.py
            :start-after: [START download_to_file]
            :end-before: [END download_to_file]

        The ``encryption_key`` should be a str or bytes with a length of at
        least 32.

        .. _customer-supplied: https://cloud.google.com/storage/docs/\
                               encryption#customer-supplied

        :type file_obj: file
        :param file_obj: A file handle to which to write the blob's data.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :raises: :class:`google.cloud.exceptions.NotFound`
        """
        client = self._require_client(client)
        if self.media_link is None:  # not yet loaded
            self.reload()

        download_url = self.media_link

        # Use apitools 'Download' facility.
        download = Download.from_stream(file_obj)

        if self.chunk_size is not None:
            download.chunksize = self.chunk_size

        headers = _get_encryption_headers(self._encryption_key)

        request = Request(download_url, 'GET', headers)

        # Use ``_base_connection`` rather ``_connection`` since the current
        # connection may be a batch. A batch wraps a client's connection,
        # but does not store the ``http`` object. The rest (API_BASE_URL and
        # build_api_url) are also defined on the Batch class, but we just
        # use the wrapped connection since it has all three (http,
        # API_BASE_URL and build_api_url).
        download.initialize_download(request, client._base_connection.http)

    def download_to_filename(self, filename, client=None):
        """Download the contents of this blob into a named file.

        :type filename: str
        :param filename: A filename to be passed to ``open``.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :raises: :class:`google.cloud.exceptions.NotFound`
        """
        with open(filename, 'wb') as file_obj:
            self.download_to_file(file_obj, client=client)

        mtime = time.mktime(self.updated.timetuple())
        os.utime(file_obj.name, (mtime, mtime))

    def download_as_string(self, client=None):
        """Download the contents of this blob as a string.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :rtype: bytes
        :returns: The data stored in this blob.
        :raises: :class:`google.cloud.exceptions.NotFound`
        """
        string_buffer = BytesIO()
        self.download_to_file(string_buffer, client=client)
        return string_buffer.getvalue()

    def _create_upload(self,
                       client,
                       file_obj=None,
                       size=None,
                       content_type=None,
                       chunk_size=None,
                       strategy=None,
                       extra_headers=None):
        """Helper for upload methods.

        Creates a :class:`google.cloud.core.streaming.Upload` object to handle
        the details of uploading a file to Cloud Storage.

        :type client: :class:`~google.cloud.storage.client.Client` or
            ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
            to the ``client`` stored on the blob's bucket.

        :type file_obj: file
        :param file_obj: A file handle open for reading.

        :type size: int
        :param size: The size of the upload, in bytes.

        :type content_type: str
        :param content_type: Optional type of content being uploaded.

        :type chunk_size: int
        :param chunk_size: The size of each chunk when doing resumable and
            media uploads.

        :type strategy: str
        :param strategy: Either
            :attr:`google.cloud.core.streaming.transfer.SIMPLE_UPLOAD` or
            :attr:`google.cloud.core.streaming.transfer.RESUMABLE_UPLOAD`.

        :type extra_headers: dict
        :param extra_headers: Additional headers to be sent with the upload
            initiation request.

        :rtype: Tuple[google.cloud.core.streaming.Upload,
                      google.cloud.core.streaming.Request,
                      google.cloud.core.streaming.Response]
        :returns: The Upload object, the upload HTTP request, and the upload
                  initiation response.
        """

        client = self._require_client(client)

        # Use ``_base_connection`` rather ``_connection`` since the current
        # connection may be a batch. A batch wraps a client's connection,
        # but does not store the ``http`` object. The rest (API_BASE_URL and
        # build_api_url) are also defined on the Batch class, but we just
        # use the wrapped connection since it has all three (http,
        # API_BASE_URL and build_api_url).
        connection = client._base_connection

        content_type = (content_type or self._properties.get('contentType')
                        or 'application/octet-stream')

        headers = {
            'Accept': 'application/json',
            'Accept-Encoding': 'gzip, deflate',
            'User-Agent': connection.USER_AGENT,
        }

        if extra_headers:
            headers.update(extra_headers)

        headers.update(_get_encryption_headers(self._encryption_key))

        # Use apitools' Upload functionality
        upload = Upload(file_obj,
                        content_type,
                        total_size=size,
                        auto_transfer=False)

        if chunk_size is not None:
            upload.chunksize = chunk_size

        if strategy is not None:
            upload.strategy = RESUMABLE_UPLOAD

        url_builder = _UrlBuilder(bucket_name=self.bucket.name,
                                  object_name=self.name)
        upload_config = _UploadConfig()

        # Temporary URL until strategy is determined.
        base_url = connection.API_BASE_URL + '/upload'
        upload_url = connection.build_api_url(api_base_url=base_url,
                                              path=self.bucket.path + '/o')

        # Configure the upload request parameters.
        request = Request(upload_url, 'POST', headers)

        if self._properties:
            headers['content-type'] = 'application/json'
            request.body = json.dumps(self._properties)

        upload.configure_request(upload_config, request, url_builder)

        # Configure final URL
        query_params = url_builder.query_params
        base_url = connection.API_BASE_URL + '/upload'
        request.url = connection.build_api_url(api_base_url=base_url,
                                               path=self.bucket.path + '/o',
                                               query_params=query_params)

        # Start the upload session
        response = upload.initialize_upload(request, connection.http)

        return upload, request, response

    @staticmethod
    def _check_response_error(request, http_response):
        """Helper for :meth:`upload_from_file`."""
        info = http_response.info
        status = int(info['status'])
        if not 200 <= status < 300:
            faux_response = httplib2.Response({'status': status})
            raise make_exception(faux_response,
                                 http_response.content,
                                 error_info=request.url)

    def upload_from_file(self,
                         file_obj,
                         rewind=False,
                         size=None,
                         content_type=None,
                         num_retries=6,
                         client=None):
        """Upload the contents of this blob from a file-like object.

        The content type of the upload will either be
        - The value passed in to the function (if any)
        - The value stored on the current blob
        - The default value of 'application/octet-stream'

        .. note::
           The effect of uploading to an existing blob depends on the
           "versioning" and "lifecycle" policies defined on the blob's
           bucket.  In the absence of those policies, upload will
           overwrite any existing contents.

           See the `object versioning
           <https://cloud.google.com/storage/docs/object-versioning>`_ and
           `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
           API documents for details.

        Uploading a file with a `customer-supplied`_ encryption key:

        .. literalinclude:: storage_snippets.py
            :start-after: [START upload_from_file]
            :end-before: [END upload_from_file]

        The ``encryption_key`` should be a str or bytes with a length of at
        least 32.

        .. _customer-supplied: https://cloud.google.com/storage/docs/\
                               encryption#customer-supplied

        :type file_obj: file
        :param file_obj: A file handle open for reading.

        :type rewind: bool
        :param rewind: If True, seek to the beginning of the file handle before
                       writing the file to Cloud Storage.

        :type size: int
        :param size: The number of bytes to read from the file handle.
                     If not provided, we'll try to guess the size using
                     :func:`os.fstat`. (If the file handle is not from the
                     filesystem this won't be possible.)

        :type content_type: str
        :param content_type: Optional type of content being uploaded.

        :type num_retries: int
        :param num_retries: Number of upload retries. Defaults to 6.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :raises: :class:`ValueError` if size is not passed in and can not be
                 determined; :class:`google.cloud.exceptions.GoogleCloudError`
                 if the upload response returns an error status.
        """
        client = self._require_client(client)
        # Use ``_base_connection`` rather ``_connection`` since the current
        # connection may be a batch. A batch wraps a client's connection,
        # but does not store the ``http`` object. The rest (API_BASE_URL and
        # build_api_url) are also defined on the Batch class, but we just
        # use the wrapped connection since it has all three (http,
        # API_BASE_URL and build_api_url).
        connection = client._base_connection

        # Rewind the file if desired.
        if rewind:
            file_obj.seek(0, os.SEEK_SET)

        # Get the basic stats about the file.
        total_bytes = size
        if total_bytes is None:
            if hasattr(file_obj, 'fileno'):
                try:
                    total_bytes = os.fstat(file_obj.fileno()).st_size
                except (OSError, UnsupportedOperation):
                    pass  # Assuming fd is not an actual file (maybe socket).

        chunk_size = None
        strategy = None
        if self.chunk_size is not None:
            chunk_size = self.chunk_size

            if total_bytes is None:
                strategy = RESUMABLE_UPLOAD
        elif total_bytes is None:
            raise ValueError('total bytes could not be determined. Please '
                             'pass an explicit size, or supply a chunk size '
                             'for a streaming transfer.')

        upload, request, _ = self._create_upload(client,
                                                 file_obj=file_obj,
                                                 size=total_bytes,
                                                 content_type=content_type,
                                                 chunk_size=chunk_size,
                                                 strategy=strategy)

        if upload.strategy == RESUMABLE_UPLOAD:
            http_response = upload.stream_file(use_chunks=True)
        else:
            http_response = make_api_request(connection.http,
                                             request,
                                             retries=num_retries)

        self._check_response_error(request, http_response)
        response_content = http_response.content

        if not isinstance(response_content,
                          six.string_types):  # pragma: NO COVER  Python3
            response_content = response_content.decode('utf-8')
        self._set_properties(json.loads(response_content))

    def upload_from_filename(self, filename, content_type=None, client=None):
        """Upload this blob's contents from the content of a named file.

        The content type of the upload will either be
        - The value passed in to the function (if any)
        - The value stored on the current blob
        - The value given by mimetypes.guess_type

        .. note::
           The effect of uploading to an existing blob depends on the
           "versioning" and "lifecycle" policies defined on the blob's
           bucket.  In the absence of those policies, upload will
           overwrite any existing contents.

           See the `object versioning
           <https://cloud.google.com/storage/docs/object-versioning>`_ and
           `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
           API documents for details.

        :type filename: str
        :param filename: The path to the file.

        :type content_type: str
        :param content_type: Optional type of content being uploaded.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.
        """
        content_type = content_type or self._properties.get('contentType')
        if content_type is None:
            content_type, _ = mimetypes.guess_type(filename)

        with open(filename, 'rb') as file_obj:
            self.upload_from_file(file_obj,
                                  content_type=content_type,
                                  client=client)

    def upload_from_string(self, data, content_type='text/plain', client=None):
        """Upload contents of this blob from the provided string.

        .. note::
           The effect of uploading to an existing blob depends on the
           "versioning" and "lifecycle" policies defined on the blob's
           bucket.  In the absence of those policies, upload will
           overwrite any existing contents.

           See the `object versioning
           <https://cloud.google.com/storage/docs/object-versioning>`_ and
           `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
           API documents for details.

        :type data: bytes or str
        :param data: The data to store in this blob.  If the value is
                     text, it will be encoded as UTF-8.

        :type content_type: str
        :param content_type: Optional type of content being uploaded. Defaults
                             to ``'text/plain'``.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.
        """
        if isinstance(data, six.text_type):
            data = data.encode('utf-8')
        string_buffer = BytesIO()
        string_buffer.write(data)
        self.upload_from_file(file_obj=string_buffer,
                              rewind=True,
                              size=len(data),
                              content_type=content_type,
                              client=client)

    def create_resumable_upload_session(self,
                                        content_type=None,
                                        size=None,
                                        origin=None,
                                        client=None):
        """Create a resumable upload session.

        Resumable upload sessions allow you to start an upload session from
        one client and complete the session in another. This method is called
        by the initiator to set the metadata and limits. The initiator then
        passes the session URL to the client that will upload the binary data.
        The client performs a PUT request on the session URL to complete the
        upload. This process allows untrusted clients to upload to an
        access-controlled bucket. For more details, see the
        `documentation on signed URLs`_.

        .. _documentation on signed URLs: https://cloud.google.com/storage\
                /docs/access-control/signed-urls#signing-resumable

        The content type of the upload will either be
        - The value passed in to the function (if any)
        - The value stored on the current blob
        - The default value of 'application/octet-stream'

        .. note::
           The effect of uploading to an existing blob depends on the
           "versioning" and "lifecycle" policies defined on the blob's
           bucket.  In the absence of those policies, upload will
           overwrite any existing contents.

           See the `object versioning
           <https://cloud.google.com/storage/docs/object-versioning>`_ and
           `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
           API documents for details.

        If :attr:`encryption_key` is set, the blob will be `encrypted`_.

        .. _encrypted: https://cloud.google.com/storage/docs/\
                               encryption#customer-supplied

        :type size: int
        :param size: Optional, the maximum number of bytes that can be
            uploaded using this session. If the size is not known when creating
            the session, this should be left blank.

        :type content_type: str
        :param content_type: Optional type of content being uploaded. This can
            be used to restrict the allowed file type that can be uploaded
            to the size.

        :type origin: str
        :param origin: Optional origin. If set, the upload can only be
            completed by a user-agent that uploads from the given origin. This
            can be useful when passing the session to a web client.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :rtype: str
        :returns: The resumable upload session URL. The upload can be
            completed by making an HTTP PUT request with the file's contents.

        :raises: :class:`google.cloud.exceptions.GoogleCloudError`
                 if the session creation response returns an error status.
        """

        extra_headers = {}

        if origin is not None:
            # This header is specifically for client-side uploads, it
            # determines the origins allowed for CORS.
            extra_headers['Origin'] = origin

        _, _, start_response = self._create_upload(client,
                                                   size=size,
                                                   content_type=content_type,
                                                   strategy=RESUMABLE_UPLOAD,
                                                   extra_headers=extra_headers)

        # The location header contains the session URL. This can be used
        # to continue the upload.
        resumable_upload_session_url = start_response.info['location']

        return resumable_upload_session_url

    def make_public(self, client=None):
        """Make this blob public giving all users read access.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.
        """
        self.acl.all().grant_read()
        self.acl.save(client=client)

    def compose(self, sources, client=None):
        """Concatenate source blobs into this one.

        :type sources: list of :class:`Blob`
        :param sources: blobs whose contents will be composed into this blob.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :raises: :exc:`ValueError` if this blob does not have its
                 :attr:`content_type` set.
        """
        if self.content_type is None:
            raise ValueError("Destination 'content_type' not set.")
        client = self._require_client(client)
        request = {
            'sourceObjects': [{
                'name': source.name
            } for source in sources],
            'destination': self._properties.copy(),
        }
        api_response = client._connection.api_request(method='POST',
                                                      path=self.path +
                                                      '/compose',
                                                      data=request,
                                                      _target_object=self)
        self._set_properties(api_response)

    def rewrite(self, source, token=None, client=None):
        """Rewrite source blob into this one.

        :type source: :class:`Blob`
        :param source: blob whose contents will be rewritten into this blob.

        :type token: str
        :param token: Optional. Token returned from an earlier, not-completed
                       call to rewrite the same source blob.  If passed,
                       result will include updated status, total bytes written.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :rtype: tuple
        :returns: ``(token, bytes_rewritten, total_bytes)``, where ``token``
                  is a rewrite token (``None`` if the rewrite is complete),
                  ``bytes_rewritten`` is the number of bytes rewritten so far,
                  and ``total_bytes`` is the total number of bytes to be
                  rewritten.
        """
        client = self._require_client(client)
        headers = _get_encryption_headers(self._encryption_key)
        headers.update(
            _get_encryption_headers(source._encryption_key, source=True))

        if token:
            query_params = {'rewriteToken': token}
        else:
            query_params = {}

        api_response = client._connection.api_request(
            method='POST',
            path=source.path + '/rewriteTo' + self.path,
            query_params=query_params,
            data=self._properties,
            headers=headers,
            _target_object=self)
        self._set_properties(api_response['resource'])
        rewritten = int(api_response['totalBytesRewritten'])
        size = int(api_response['objectSize'])

        if api_response['done']:
            return None, rewritten, size

        return api_response['rewriteToken'], rewritten, size

    def update_storage_class(self, new_class, client=None):
        """Update blob's storage class via a rewrite-in-place.

        See:
        https://cloud.google.com/storage/docs/per-object-storage-class

        :type new_class: str
        :param new_class: new storage class for the object

        :type client: :class:`~google.cloud.storage.client.Client`
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.
        """
        if new_class not in self._STORAGE_CLASSES:
            raise ValueError("Invalid storage class: %s" % (new_class, ))

        client = self._require_client(client)
        headers = _get_encryption_headers(self._encryption_key)
        headers.update(
            _get_encryption_headers(self._encryption_key, source=True))

        api_response = client._connection.api_request(
            method='POST',
            path=self.path + '/rewriteTo' + self.path,
            data={'storageClass': new_class},
            headers=headers,
            _target_object=self)
        self._set_properties(api_response['resource'])

    cache_control = _scalar_property('cacheControl')
    """HTTP 'Cache-Control' header for this object.

    See: https://tools.ietf.org/html/rfc7234#section-5.2 and
         https://cloud.google.com/storage/docs/json_api/v1/objects

    If the property is not set locally, returns ``None``.

    :rtype: str or ``NoneType``
    """

    content_disposition = _scalar_property('contentDisposition')
    """HTTP 'Content-Disposition' header for this object.

    See: https://tools.ietf.org/html/rfc6266 and
         https://cloud.google.com/storage/docs/json_api/v1/objects

    If the property is not set locally, returns ``None``.

    :rtype: str or ``NoneType``
    """

    content_encoding = _scalar_property('contentEncoding')
    """HTTP 'Content-Encoding' header for this object.

    See: https://tools.ietf.org/html/rfc7231#section-3.1.2.2 and
         https://cloud.google.com/storage/docs/json_api/v1/objects

    If the property is not set locally, returns ``None``.

    :rtype: str or ``NoneType``
    """

    content_language = _scalar_property('contentLanguage')
    """HTTP 'Content-Language' header for this object.

    See: http://tools.ietf.org/html/bcp47 and
         https://cloud.google.com/storage/docs/json_api/v1/objects

    If the property is not set locally, returns ``None``.

    :rtype: str or ``NoneType``
    """

    content_type = _scalar_property('contentType')
    """HTTP 'Content-Type' header for this object.

    See: https://tools.ietf.org/html/rfc2616#section-14.17 and
         https://cloud.google.com/storage/docs/json_api/v1/objects

    If the property is not set locally, returns ``None``.

    :rtype: str or ``NoneType``
    """

    crc32c = _scalar_property('crc32c')
    """CRC32C checksum for this object.

    See: http://tools.ietf.org/html/rfc4960#appendix-B and
         https://cloud.google.com/storage/docs/json_api/v1/objects

    If the property is not set locally, returns ``None``.

    :rtype: str or ``NoneType``
    """

    @property
    def component_count(self):
        """Number of underlying components that make up this object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: int or ``NoneType``
        :returns: The component count (in case of a composed object) or
                  ``None`` if the property is not set locally. This property
                  will not be set on objects not created via ``compose``.
        """
        component_count = self._properties.get('componentCount')
        if component_count is not None:
            return int(component_count)

    @property
    def etag(self):
        """Retrieve the ETag for the object.

        See: http://tools.ietf.org/html/rfc2616#section-3.11 and
             https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: str or ``NoneType``
        :returns: The blob etag or ``None`` if the property is not set locally.
        """
        return self._properties.get('etag')

    @property
    def generation(self):
        """Retrieve the generation for the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: int or ``NoneType``
        :returns: The generation of the blob or ``None`` if the property
                  is not set locally.
        """
        generation = self._properties.get('generation')
        if generation is not None:
            return int(generation)

    @property
    def id(self):
        """Retrieve the ID for the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: str or ``NoneType``
        :returns: The ID of the blob or ``None`` if the property is not
                  set locally.
        """
        return self._properties.get('id')

    md5_hash = _scalar_property('md5Hash')
    """MD5 hash for this object.

    See: http://tools.ietf.org/html/rfc4960#appendix-B and
         https://cloud.google.com/storage/docs/json_api/v1/objects

    If the property is not set locally, returns ``None``.

    :rtype: str or ``NoneType``
    """

    @property
    def media_link(self):
        """Retrieve the media download URI for the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: str or ``NoneType``
        :returns: The media link for the blob or ``None`` if the property is
                  not set locally.
        """
        return self._properties.get('mediaLink')

    @property
    def metadata(self):
        """Retrieve arbitrary/application specific metadata for the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: dict or ``NoneType``
        :returns: The metadata associated with the blob or ``None`` if the
                  property is not set locally.
        """
        return copy.deepcopy(self._properties.get('metadata'))

    @metadata.setter
    def metadata(self, value):
        """Update arbitrary/application specific metadata for the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :type value: dict
        :param value: (Optional) The blob metadata to set.
        """
        self._patch_property('metadata', value)

    @property
    def metageneration(self):
        """Retrieve the metageneration for the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: int or ``NoneType``
        :returns: The metageneration of the blob or ``None`` if the property
                  is not set locally.
        """
        metageneration = self._properties.get('metageneration')
        if metageneration is not None:
            return int(metageneration)

    @property
    def owner(self):
        """Retrieve info about the owner of the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: dict or ``NoneType``
        :returns: Mapping of owner's role/ID. If the property is not set
                  locally, returns ``None``.
        """
        return copy.deepcopy(self._properties.get('owner'))

    @property
    def self_link(self):
        """Retrieve the URI for the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: str or ``NoneType``
        :returns: The self link for the blob or ``None`` if the property is
                  not set locally.
        """
        return self._properties.get('selfLink')

    @property
    def size(self):
        """Size of the object, in bytes.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: int or ``NoneType``
        :returns: The size of the blob or ``None`` if the property
                  is not set locally.
        """
        size = self._properties.get('size')
        if size is not None:
            return int(size)

    @property
    def storage_class(self):
        """Retrieve the storage class for the object.

        See: https://cloud.google.com/storage/docs/storage-classes

        :rtype: str or ``NoneType``
        :returns: If set, one of "MULTI_REGIONAL", "REGIONAL",
                  "NEARLINE", "COLDLINE", "STANDARD", or
                  "DURABLE_REDUCED_AVAILABILITY", else ``None``.
        """
        return self._properties.get('storageClass')

    @property
    def time_deleted(self):
        """Retrieve the timestamp at which the object was deleted.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: :class:`datetime.datetime` or ``NoneType``
        :returns: Datetime object parsed from RFC3339 valid timestamp, or
                  ``None`` if the property is not set locally. If the blob has
                  not been deleted, this will never be set.
        """
        value = self._properties.get('timeDeleted')
        if value is not None:
            return _rfc3339_to_datetime(value)

    @property
    def time_created(self):
        """Retrieve the timestamp at which the object was created.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: :class:`datetime.datetime` or ``NoneType``
        :returns: Datetime object parsed from RFC3339 valid timestamp, or
                  ``None`` if the property is not set locally.
        """
        value = self._properties.get('timeCreated')
        if value is not None:
            return _rfc3339_to_datetime(value)

    @property
    def updated(self):
        """Retrieve the timestamp at which the object was updated.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: :class:`datetime.datetime` or ``NoneType``
        :returns: Datetime object parsed from RFC3339 valid timestamp, or
                  ``None`` if the property is not set locally.
        """
        value = self._properties.get('updated')
        if value is not None:
            return _rfc3339_to_datetime(value)
    def _call_fut(self, fieldName):
        from google.cloud.storage._helpers import _scalar_property

        return _scalar_property(fieldName)
Exemple #4
0
class Bucket(_PropertyMixin):
    """A class representing a Bucket on Cloud Storage.

    :type client: :class:`google.cloud.storage.client.Client`
    :param client: A client which holds credentials and project configuration
                   for the bucket (which requires a project).

    :type name: str
    :param name: The name of the bucket. Bucket names must start and end with a
                 number or letter.

    :type user_project: str
    :param user_project: (Optional) the project ID to be billed for API
                         requests made via this instance.
    """

    _MAX_OBJECTS_FOR_ITERATION = 256
    """Maximum number of existing objects allowed in iteration.

    This is used in Bucket.delete() and Bucket.make_public().
    """

    _STORAGE_CLASSES = (
        'MULTI_REGIONAL',
        'REGIONAL',
        'NEARLINE',
        'COLDLINE',
        'STANDARD',  # alias for MULTI_REGIONAL/REGIONAL, based on location
        'DURABLE_REDUCED_AVAILABILITY',  # deprecated
    )
    """Allowed values for :attr:`storage_class`.

    See
    https://cloud.google.com/storage/docs/json_api/v1/buckets#storageClass
    https://cloud.google.com/storage/docs/storage-classes
    """
    def __init__(self, client, name=None, user_project=None):
        name = _validate_name(name)
        super(Bucket, self).__init__(name=name)
        self._client = client
        self._acl = BucketACL(self)
        self._default_object_acl = DefaultObjectACL(self)
        self._user_project = user_project

    def __repr__(self):
        return '<Bucket: %s>' % (self.name, )

    @property
    def client(self):
        """The client bound to this bucket."""
        return self._client

    @property
    def user_project(self):
        """Project ID to be billed for API requests made via this bucket.

        If unset, API requests are billed to the bucket owner.

        :rtype: str
        """
        return self._user_project

    def blob(self, blob_name, chunk_size=None, encryption_key=None):
        """Factory constructor for blob object.

        .. note::
          This will not make an HTTP request; it simply instantiates
          a blob object owned by this bucket.

        :type blob_name: str
        :param blob_name: The name of the blob to be instantiated.

        :type chunk_size: int
        :param chunk_size: The size of a chunk of data whenever iterating
                           (1 MB). This must be a multiple of 256 KB per the
                           API specification.

        :type encryption_key: bytes
        :param encryption_key:
            Optional 32 byte encryption key for customer-supplied encryption.

        :rtype: :class:`google.cloud.storage.blob.Blob`
        :returns: The blob object created.
        """
        return Blob(name=blob_name,
                    bucket=self,
                    chunk_size=chunk_size,
                    encryption_key=encryption_key)

    def exists(self, client=None):
        """Determines whether or not this bucket exists.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :rtype: bool
        :returns: True if the bucket exists in Cloud Storage.
        """
        client = self._require_client(client)
        # We only need the status code (200 or not) so we seek to
        # minimize the returned payload.
        query_params = {'fields': 'name'}

        if self.user_project is not None:
            query_params['userProject'] = self.user_project

        try:
            # We intentionally pass `_target_object=None` since fields=name
            # would limit the local properties.
            client._connection.api_request(method='GET',
                                           path=self.path,
                                           query_params=query_params,
                                           _target_object=None)
            # NOTE: This will not fail immediately in a batch. However, when
            #       Batch.finish() is called, the resulting `NotFound` will be
            #       raised.
            return True
        except NotFound:
            return False

    def create(self, client=None):
        """Creates current bucket.

        If the bucket already exists, will raise
        :class:`google.cloud.exceptions.Conflict`.

        This implements "storage.buckets.insert".

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.
        """
        if self.user_project is not None:
            raise ValueError("Cannot create bucket with 'user_project' set.")

        client = self._require_client(client)
        query_params = {'project': client.project}
        properties = {key: self._properties[key] for key in self._changes}
        properties['name'] = self.name
        api_response = client._connection.api_request(
            method='POST',
            path='/b',
            query_params=query_params,
            data=properties,
            _target_object=self)
        self._set_properties(api_response)

    @property
    def acl(self):
        """Create our ACL on demand."""
        return self._acl

    @property
    def default_object_acl(self):
        """Create our defaultObjectACL on demand."""
        return self._default_object_acl

    @staticmethod
    def path_helper(bucket_name):
        """Relative URL path for a bucket.

        :type bucket_name: str
        :param bucket_name: The bucket name in the path.

        :rtype: str
        :returns: The relative URL path for ``bucket_name``.
        """
        return '/b/' + bucket_name

    @property
    def path(self):
        """The URL path to this bucket."""
        if not self.name:
            raise ValueError('Cannot determine path without bucket name.')

        return self.path_helper(self.name)

    def get_blob(self, blob_name, client=None):
        """Get a blob object by name.

        This will return None if the blob doesn't exist:

        .. literalinclude:: snippets.py
          :start-after: [START get_blob]
          :end-before: [END get_blob]

        :type blob_name: str
        :param blob_name: The name of the blob to retrieve.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :rtype: :class:`google.cloud.storage.blob.Blob` or None
        :returns: The blob object if it exists, otherwise None.
        """
        client = self._require_client(client)
        query_params = {}

        if self.user_project is not None:
            query_params['userProject'] = self.user_project

        blob = Blob(bucket=self, name=blob_name)
        try:
            response = client._connection.api_request(
                method='GET',
                path=blob.path,
                query_params=query_params,
                _target_object=blob)
            # NOTE: We assume response.get('name') matches `blob_name`.
            blob._set_properties(response)
            # NOTE: This will not fail immediately in a batch. However, when
            #       Batch.finish() is called, the resulting `NotFound` will be
            #       raised.
            return blob
        except NotFound:
            return None

    def list_blobs(self,
                   max_results=None,
                   page_token=None,
                   prefix=None,
                   delimiter=None,
                   versions=None,
                   projection='noAcl',
                   fields=None,
                   client=None):
        """Return an iterator used to find blobs in the bucket.

        :type max_results: int
        :param max_results: (Optional) Maximum number of blobs to return.

        :type page_token: str
        :param page_token: (Optional) Opaque marker for the next "page" of
                           blobs. If not passed, will return the first page
                           of blobs.

        :type prefix: str
        :param prefix: (Optional) prefix used to filter blobs.

        :type delimiter: str
        :param delimiter: (Optional) Delimiter, used with ``prefix`` to
                          emulate hierarchy.

        :type versions: bool
        :param versions: (Optional) Whether object versions should be returned
                         as separate blobs.

        :type projection: str
        :param projection: (Optional) If used, must be 'full' or 'noAcl'.
                           Defaults to ``'noAcl'``. Specifies the set of
                           properties to return.

        :type fields: str
        :param fields: (Optional) Selector specifying which fields to include
                       in a partial response. Must be a list of fields. For
                       example to get a partial response with just the next
                       page token and the language of each blob returned:
                       ``'items/contentLanguage,nextPageToken'``.

        :type client: :class:`~google.cloud.storage.client.Client`
        :param client: (Optional) The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :rtype: :class:`~google.cloud.iterator.Iterator`
        :returns: Iterator of all :class:`~google.cloud.storage.blob.Blob`
                  in this bucket matching the arguments.
        """
        extra_params = {'projection': projection}

        if prefix is not None:
            extra_params['prefix'] = prefix

        if delimiter is not None:
            extra_params['delimiter'] = delimiter

        if versions is not None:
            extra_params['versions'] = versions

        if fields is not None:
            extra_params['fields'] = fields

        if self.user_project is not None:
            extra_params['userProject'] = self.user_project

        client = self._require_client(client)
        path = self.path + '/o'
        iterator = HTTPIterator(client=client,
                                path=path,
                                item_to_value=_item_to_blob,
                                page_token=page_token,
                                max_results=max_results,
                                extra_params=extra_params,
                                page_start=_blobs_page_start)
        iterator.bucket = self
        iterator.prefixes = set()
        return iterator

    def delete(self, force=False, client=None):
        """Delete this bucket.

        The bucket **must** be empty in order to submit a delete request. If
        ``force=True`` is passed, this will first attempt to delete all the
        objects / blobs in the bucket (i.e. try to empty the bucket).

        If the bucket doesn't exist, this will raise
        :class:`google.cloud.exceptions.NotFound`.  If the bucket is not empty
        (and ``force=False``), will raise
        :class:`google.cloud.exceptions.Conflict`.

        If ``force=True`` and the bucket contains more than 256 objects / blobs
        this will cowardly refuse to delete the objects (or the bucket). This
        is to prevent accidental bucket deletion and to prevent extremely long
        runtime of this method.

        :type force: bool
        :param force: If True, empties the bucket's objects then deletes it.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :raises: :class:`ValueError` if ``force`` is ``True`` and the bucket
                 contains more than 256 objects / blobs.
        """
        client = self._require_client(client)
        query_params = {}

        if self.user_project is not None:
            query_params['userProject'] = self.user_project

        if force:
            blobs = list(
                self.list_blobs(max_results=self._MAX_OBJECTS_FOR_ITERATION +
                                1,
                                client=client))
            if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
                message = ('Refusing to delete bucket with more than '
                           '%d objects. If you actually want to delete '
                           'this bucket, please delete the objects '
                           'yourself before calling Bucket.delete().') % (
                               self._MAX_OBJECTS_FOR_ITERATION, )
                raise ValueError(message)

            # Ignore 404 errors on delete.
            self.delete_blobs(blobs, on_error=lambda blob: None, client=client)

        # We intentionally pass `_target_object=None` since a DELETE
        # request has no response value (whether in a standard request or
        # in a batch request).
        client._connection.api_request(method='DELETE',
                                       path=self.path,
                                       query_params=query_params,
                                       _target_object=None)

    def delete_blob(self, blob_name, client=None):
        """Deletes a blob from the current bucket.

        If the blob isn't found (backend 404), raises a
        :class:`google.cloud.exceptions.NotFound`.

        For example:

        .. literalinclude:: snippets.py
          :start-after: [START delete_blob]
          :end-before: [END delete_blob]

        :type blob_name: str
        :param blob_name: A blob name to delete.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :raises: :class:`google.cloud.exceptions.NotFound` (to suppress
                 the exception, call ``delete_blobs``, passing a no-op
                 ``on_error`` callback, e.g.:

        .. literalinclude:: snippets.py
            :start-after: [START delete_blobs]
            :end-before: [END delete_blobs]

        """
        client = self._require_client(client)
        query_params = {}

        if self.user_project is not None:
            query_params['userProject'] = self.user_project

        blob_path = Blob.path_helper(self.path, blob_name)
        # We intentionally pass `_target_object=None` since a DELETE
        # request has no response value (whether in a standard request or
        # in a batch request).
        client._connection.api_request(method='DELETE',
                                       path=blob_path,
                                       query_params=query_params,
                                       _target_object=None)

    def delete_blobs(self, blobs, on_error=None, client=None):
        """Deletes a list of blobs from the current bucket.

        Uses :meth:`delete_blob` to delete each individual blob.

        :type blobs: list
        :param blobs: A list of :class:`~google.cloud.storage.blob.Blob`-s or
                      blob names to delete.

        :type on_error: callable
        :param on_error: (Optional) Takes single argument: ``blob``. Called
                         called once for each blob raising
                         :class:`~google.cloud.exceptions.NotFound`;
                         otherwise, the exception is propagated.

        :type client: :class:`~google.cloud.storage.client.Client`
        :param client: (Optional) The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :raises: :class:`~google.cloud.exceptions.NotFound` (if
                 `on_error` is not passed).
        """
        for blob in blobs:
            try:
                blob_name = blob
                if not isinstance(blob_name, six.string_types):
                    blob_name = blob.name
                self.delete_blob(blob_name, client=client)
            except NotFound:
                if on_error is not None:
                    on_error(blob)
                else:
                    raise

    def copy_blob(self,
                  blob,
                  destination_bucket,
                  new_name=None,
                  client=None,
                  preserve_acl=True):
        """Copy the given blob to the given bucket, optionally with a new name.

        :type blob: :class:`google.cloud.storage.blob.Blob`
        :param blob: The blob to be copied.

        :type destination_bucket: :class:`google.cloud.storage.bucket.Bucket`
        :param destination_bucket: The bucket into which the blob should be
                                   copied.

        :type new_name: str
        :param new_name: (optional) the new name for the copied file.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :type preserve_acl: bool
        :param preserve_acl: Optional. Copies ACL from old blob to new blob.
                             Default: True.

        :rtype: :class:`google.cloud.storage.blob.Blob`
        :returns: The new Blob.
        """
        client = self._require_client(client)
        query_params = {}

        if self.user_project is not None:
            query_params['userProject'] = self.user_project

        if new_name is None:
            new_name = blob.name

        new_blob = Blob(bucket=destination_bucket, name=new_name)
        api_path = blob.path + '/copyTo' + new_blob.path
        copy_result = client._connection.api_request(
            method='POST',
            path=api_path,
            query_params=query_params,
            _target_object=new_blob,
        )

        if not preserve_acl:
            new_blob.acl.save(acl={}, client=client)

        new_blob._set_properties(copy_result)
        return new_blob

    def rename_blob(self, blob, new_name, client=None):
        """Rename the given blob using copy and delete operations.

        Effectively, copies blob to the same bucket with a new name, then
        deletes the blob.

        .. warning::

          This method will first duplicate the data and then delete the
          old blob.  This means that with very large objects renaming
          could be a very (temporarily) costly or a very slow operation.

        :type blob: :class:`google.cloud.storage.blob.Blob`
        :param blob: The blob to be renamed.

        :type new_name: str
        :param new_name: The new name for this blob.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :rtype: :class:`Blob`
        :returns: The newly-renamed blob.
        """
        new_blob = self.copy_blob(blob, self, new_name, client=client)
        blob.delete(client=client)
        return new_blob

    @property
    def cors(self):
        """Retrieve or set CORS policies configured for this bucket.

        See http://www.w3.org/TR/cors/ and
             https://cloud.google.com/storage/docs/json_api/v1/buckets

        :setter: Set CORS policies for this bucket.
        :getter: Gets the CORS policies for this bucket.

        :rtype: list of dictionaries
        :returns: A sequence of mappings describing each CORS policy.
        """
        return [
            copy.deepcopy(policy)
            for policy in self._properties.get('cors', ())
        ]

    @cors.setter
    def cors(self, entries):
        """Set CORS policies configured for this bucket.

        See http://www.w3.org/TR/cors/ and
             https://cloud.google.com/storage/docs/json_api/v1/buckets

        :type entries: list of dictionaries
        :param entries: A sequence of mappings describing each CORS policy.
        """
        self._patch_property('cors', entries)

    @property
    def labels(self):
        """Retrieve or set CORS policies configured for this bucket.

        See
        https://cloud.google.com/storage/docs/json_api/v1/buckets#labels

        :setter: Set labels for this bucket.
        :getter: Gets the labels for this bucket.

        :rtype: :class:`dict`
        :returns: Name-value pairs (string->string) labelling the bucket.
        """
        labels = self._properties.get('labels')
        if labels is None:
            return {}
        return copy.deepcopy(labels)

    @labels.setter
    def labels(self, mapping):
        """Set CORS policies configured for this bucket.

        See
        https://cloud.google.com/storage/docs/json_api/v1/buckets#labels

        :type mapping: :class:`dict`
        :param mapping: Name-value pairs (string->string) labelling the bucket.
        """
        self._patch_property('labels', copy.deepcopy(mapping))

    @property
    def etag(self):
        """Retrieve the ETag for the bucket.

        See https://tools.ietf.org/html/rfc2616#section-3.11 and
             https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: str or ``NoneType``
        :returns: The bucket etag or ``None`` if the property is not
                  set locally.
        """
        return self._properties.get('etag')

    @property
    def id(self):
        """Retrieve the ID for the bucket.

        See https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: str or ``NoneType``
        :returns: The ID of the bucket or ``None`` if the property is not
                  set locally.
        """
        return self._properties.get('id')

    @property
    def lifecycle_rules(self):
        """Retrieve or set lifecycle rules configured for this bucket.

        See https://cloud.google.com/storage/docs/lifecycle and
             https://cloud.google.com/storage/docs/json_api/v1/buckets

        :setter: Set lifestyle rules for this bucket.
        :getter: Gets the lifestyle rules for this bucket.

        :rtype: list(dict)
        :returns: A sequence of mappings describing each lifecycle rule.
        """
        info = self._properties.get('lifecycle', {})
        return [copy.deepcopy(rule) for rule in info.get('rule', ())]

    @lifecycle_rules.setter
    def lifecycle_rules(self, rules):
        """Set lifestyle rules configured for this bucket.

        See https://cloud.google.com/storage/docs/lifecycle and
             https://cloud.google.com/storage/docs/json_api/v1/buckets

        :type entries: list of dictionaries
        :param entries: A sequence of mappings describing each lifecycle rule.
        """
        self._patch_property('lifecycle', {'rule': rules})

    location = _scalar_property('location')
    """Retrieve location configured for this bucket.

    See https://cloud.google.com/storage/docs/json_api/v1/buckets and
    https://cloud.google.com/storage/docs/bucket-locations

    If the property is not set locally, returns ``None``.

    :rtype: str or ``NoneType``
    """

    def get_logging(self):
        """Return info about access logging for this bucket.

        See https://cloud.google.com/storage/docs/access-logs#status

        :rtype: dict or None
        :returns: a dict w/ keys, ``logBucket`` and ``logObjectPrefix``
                  (if logging is enabled), or None (if not).
        """
        info = self._properties.get('logging')
        return copy.deepcopy(info)

    def enable_logging(self, bucket_name, object_prefix=''):
        """Enable access logging for this bucket.

        See https://cloud.google.com/storage/docs/access-logs

        :type bucket_name: str
        :param bucket_name: name of bucket in which to store access logs

        :type object_prefix: str
        :param object_prefix: prefix for access log filenames
        """
        info = {'logBucket': bucket_name, 'logObjectPrefix': object_prefix}
        self._patch_property('logging', info)

    def disable_logging(self):
        """Disable access logging for this bucket.

        See https://cloud.google.com/storage/docs/access-logs#disabling
        """
        self._patch_property('logging', None)

    @property
    def metageneration(self):
        """Retrieve the metageneration for the bucket.

        See https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: int or ``NoneType``
        :returns: The metageneration of the bucket or ``None`` if the property
                  is not set locally.
        """
        metageneration = self._properties.get('metageneration')
        if metageneration is not None:
            return int(metageneration)

    @property
    def owner(self):
        """Retrieve info about the owner of the bucket.

        See https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: dict or ``NoneType``
        :returns: Mapping of owner's role/ID. If the property is not set
                  locally, returns ``None``.
        """
        return copy.deepcopy(self._properties.get('owner'))

    @property
    def project_number(self):
        """Retrieve the number of the project to which the bucket is assigned.

        See https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: int or ``NoneType``
        :returns: The project number that owns the bucket or ``None`` if the
                  property is not set locally.
        """
        project_number = self._properties.get('projectNumber')
        if project_number is not None:
            return int(project_number)

    @property
    def self_link(self):
        """Retrieve the URI for the bucket.

        See https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: str or ``NoneType``
        :returns: The self link for the bucket or ``None`` if the property is
                  not set locally.
        """
        return self._properties.get('selfLink')

    @property
    def storage_class(self):
        """Retrieve or set the storage class for the bucket.

        See https://cloud.google.com/storage/docs/storage-classes

        :setter: Set the storage class for this bucket.
        :getter: Gets the the storage class for this bucket.

        :rtype: str or ``NoneType``
        :returns: If set, one of "MULTI_REGIONAL", "REGIONAL",
                  "NEARLINE", "COLDLINE", "STANDARD", or
                  "DURABLE_REDUCED_AVAILABILITY", else ``None``.
        """
        return self._properties.get('storageClass')

    @storage_class.setter
    def storage_class(self, value):
        """Set the storage class for the bucket.

        See https://cloud.google.com/storage/docs/storage-classes

        :type value: str
        :param value: one of "MULTI_REGIONAL", "REGIONAL", "NEARLINE",
                      "COLDLINE", "STANDARD", or "DURABLE_REDUCED_AVAILABILITY"
        """
        if value not in self._STORAGE_CLASSES:
            raise ValueError('Invalid storage class: %s' % (value, ))
        self._patch_property('storageClass', value)

    @property
    def time_created(self):
        """Retrieve the timestamp at which the bucket was created.

        See https://cloud.google.com/storage/docs/json_api/v1/buckets

        :rtype: :class:`datetime.datetime` or ``NoneType``
        :returns: Datetime object parsed from RFC3339 valid timestamp, or
                  ``None`` if the property is not set locally.
        """
        value = self._properties.get('timeCreated')
        if value is not None:
            return _rfc3339_to_datetime(value)

    @property
    def versioning_enabled(self):
        """Is versioning enabled for this bucket?

        See  https://cloud.google.com/storage/docs/object-versioning for
        details.

        :setter: Update whether versioning is enabled for this bucket.
        :getter: Query whether versioning is enabled for this bucket.

        :rtype: bool
        :returns: True if enabled, else False.
        """
        versioning = self._properties.get('versioning', {})
        return versioning.get('enabled', False)

    @versioning_enabled.setter
    def versioning_enabled(self, value):
        """Enable versioning for this bucket.

        See  https://cloud.google.com/storage/docs/object-versioning for
        details.

        :type value: convertible to boolean
        :param value: should versioning be enabled for the bucket?
        """
        self._patch_property('versioning', {'enabled': bool(value)})

    @property
    def requester_pays(self):
        """Does the requester pay for API requests for this bucket?

        .. note::

           No public docs exist yet for the "requester pays" feature.

        :setter: Update whether requester pays for this bucket.
        :getter: Query whether requester pays for this bucket.

        :rtype: bool
        :returns: True if requester pays for API requests for the bucket,
                  else False.
        """
        versioning = self._properties.get('billing', {})
        return versioning.get('requesterPays', False)

    @requester_pays.setter
    def requester_pays(self, value):
        """Update whether requester pays for API requests for this bucket.

        See  https://cloud.google.com/storage/docs/<DOCS-MISSING> for
        details.

        :type value: convertible to boolean
        :param value: should requester pay for API requests for the bucket?
        """
        self._patch_property('billing', {'requesterPays': bool(value)})

    def configure_website(self, main_page_suffix=None, not_found_page=None):
        """Configure website-related properties.

        See https://cloud.google.com/storage/docs/hosting-static-website

        .. note::
          This (apparently) only works
          if your bucket name is a domain name
          (and to do that, you need to get approved somehow...).

        If you want this bucket to host a website, just provide the name
        of an index page and a page to use when a blob isn't found:

        .. literalinclude:: snippets.py
          :start-after: [START configure_website]
          :end-before: [END configure_website]

        You probably should also make the whole bucket public:

        .. literalinclude:: snippets.py
            :start-after: [START make_public]
            :end-before: [END make_public]

        This says: "Make the bucket public, and all the stuff already in
        the bucket, and anything else I add to the bucket.  Just make it
        all public."

        :type main_page_suffix: str
        :param main_page_suffix: The page to use as the main page
                                 of a directory.
                                 Typically something like index.html.

        :type not_found_page: str
        :param not_found_page: The file to use when a page isn't found.
        """
        data = {
            'mainPageSuffix': main_page_suffix,
            'notFoundPage': not_found_page,
        }
        self._patch_property('website', data)

    def disable_website(self):
        """Disable the website configuration for this bucket.

        This is really just a shortcut for setting the website-related
        attributes to ``None``.
        """
        return self.configure_website(None, None)

    def get_iam_policy(self, client=None):
        """Retrieve the IAM policy for the bucket.

        See
        https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :rtype: :class:`google.cloud.iam.Policy`
        :returns: the policy instance, based on the resource returned from
                  the ``getIamPolicy`` API request.
        """
        client = self._require_client(client)
        query_params = {}

        if self.user_project is not None:
            query_params['userProject'] = self.user_project

        info = client._connection.api_request(method='GET',
                                              path='%s/iam' % (self.path, ),
                                              query_params=query_params,
                                              _target_object=None)
        return Policy.from_api_repr(info)

    def set_iam_policy(self, policy, client=None):
        """Update the IAM policy for the bucket.

        See
        https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy

        :type policy: :class:`google.cloud.iam.Policy`
        :param policy: policy instance used to update bucket's IAM policy.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :rtype: :class:`google.cloud.iam.Policy`
        :returns: the policy instance, based on the resource returned from
                  the ``setIamPolicy`` API request.
        """
        client = self._require_client(client)
        query_params = {}

        if self.user_project is not None:
            query_params['userProject'] = self.user_project

        resource = policy.to_api_repr()
        resource['resourceId'] = self.path
        info = client._connection.api_request(method='PUT',
                                              path='%s/iam' % (self.path, ),
                                              query_params=query_params,
                                              data=resource,
                                              _target_object=None)
        return Policy.from_api_repr(info)

    def test_iam_permissions(self, permissions, client=None):
        """API call:  test permissions

        See
        https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions

        :type permissions: list of string
        :param permissions: the permissions to check

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :rtype: list of string
        :returns: the permissions returned by the ``testIamPermissions`` API
                  request.
        """
        client = self._require_client(client)
        query_params = {'permissions': permissions}

        if self.user_project is not None:
            query_params['userProject'] = self.user_project

        path = '%s/iam/testPermissions' % (self.path, )
        resp = client._connection.api_request(method='GET',
                                              path=path,
                                              query_params=query_params)
        return resp.get('permissions', [])

    def make_public(self, recursive=False, future=False, client=None):
        """Make a bucket public.

        If ``recursive=True`` and the bucket contains more than 256
        objects / blobs this will cowardly refuse to make the objects public.
        This is to prevent extremely long runtime of this method.

        :type recursive: bool
        :param recursive: If True, this will make all blobs inside the bucket
                          public as well.

        :type future: bool
        :param future: If True, this will make all objects created in the
                       future public as well.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.
        """
        self.acl.all().grant_read()
        self.acl.save(client=client)

        if future:
            doa = self.default_object_acl
            if not doa.loaded:
                doa.reload(client=client)
            doa.all().grant_read()
            doa.save(client=client)

        if recursive:
            blobs = list(
                self.list_blobs(projection='full',
                                max_results=self._MAX_OBJECTS_FOR_ITERATION +
                                1,
                                client=client))
            if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
                message = (
                    'Refusing to make public recursively with more than '
                    '%d objects. If you actually want to make every object '
                    'in this bucket public, please do it on the objects '
                    'yourself.') % (self._MAX_OBJECTS_FOR_ITERATION, )
                raise ValueError(message)

            for blob in blobs:
                blob.acl.all().grant_read()
                blob.acl.save(client=client)

    def generate_upload_policy(self, conditions, expiration=None, client=None):
        """Create a signed upload policy for uploading objects.

        This method generates and signs a policy document. You can use
        `policy documents`_ to allow visitors to a website to upload files to
        Google Cloud Storage without giving them direct write access.

        For example:

        .. literalinclude:: snippets.py
            :start-after: [START policy_document]
            :end-before: [END policy_document]

        .. _policy documents:
            https://cloud.google.com/storage/docs/xml-api\
            /post-object#policydocument

        :type expiration: datetime
        :param expiration: Optional expiration in UTC. If not specified, the
                           policy will expire in 1 hour.

        :type conditions: list
        :param conditions: A list of conditions as described in the
                          `policy documents`_ documentation.

        :type client: :class:`~google.cloud.storage.client.Client`
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current bucket.

        :rtype: dict
        :returns: A dictionary of (form field name, form field value) of form
                  fields that should be added to your HTML upload form in order
                  to attach the signature.
        """
        client = self._require_client(client)
        credentials = client._base_connection.credentials

        if not isinstance(credentials, google.auth.credentials.Signing):
            auth_uri = ('http://google-cloud-python.readthedocs.io/en/latest/'
                        'google-cloud-auth.html#setting-up-a-service-account')
            raise AttributeError('you need a private key to sign credentials.'
                                 'the credentials you are currently using %s '
                                 'just contains a token. see %s for more '
                                 'details.' % (type(credentials), auth_uri))

        if expiration is None:
            expiration = _NOW() + datetime.timedelta(hours=1)

        conditions = conditions + [
            {
                'bucket': self.name
            },
        ]

        policy_document = {
            'expiration': _datetime_to_rfc3339(expiration),
            'conditions': conditions,
        }

        encoded_policy_document = base64.b64encode(
            json.dumps(policy_document).encode('utf-8'))
        signature = base64.b64encode(
            credentials.sign_bytes(encoded_policy_document))

        fields = {
            'bucket': self.name,
            'GoogleAccessId': credentials.signer_email,
            'policy': encoded_policy_document.decode('utf-8'),
            'signature': signature.decode('utf-8'),
        }

        return fields
    def _call_fut(self, fieldName):
        from google.cloud.storage._helpers import _scalar_property

        return _scalar_property(fieldName)
class Blob(_PropertyMixin):
    """A wrapper around Cloud Storage's concept of an ``Object``.

    :type name: string
    :param name: The name of the blob.  This corresponds to the
                 unique path of the object in the bucket.

    :type bucket: :class:`google.cloud.storage.bucket.Bucket`
    :param bucket: The bucket to which this blob belongs.

    :type chunk_size: integer
    :param chunk_size: The size of a chunk of data whenever iterating (1 MB).
                       This must be a multiple of 256 KB per the API
                       specification.
    """

    _chunk_size = None  # Default value for each instance.

    _CHUNK_SIZE_MULTIPLE = 256 * 1024
    """Number (256 KB, in bytes) that must divide the chunk size."""
    def __init__(self, name, bucket, chunk_size=None):
        super(Blob, self).__init__(name=name)

        self.chunk_size = chunk_size  # Check that setter accepts value.
        self.bucket = bucket
        self._acl = ObjectACL(self)

    @property
    def chunk_size(self):
        """Get the blob's default chunk size.

        :rtype: integer or ``NoneType``
        :returns: The current blob's chunk size, if it is set.
        """
        return self._chunk_size

    @chunk_size.setter
    def chunk_size(self, value):
        """Set the blob's default chunk size.

        :type value: integer or ``NoneType``
        :param value: The current blob's chunk size, if it is set.

        :raises: :class:`ValueError` if ``value`` is not ``None`` and is not a
                 multiple of 256 KB.
        """
        if value is not None and value % self._CHUNK_SIZE_MULTIPLE != 0:
            raise ValueError('Chunk size must be a multiple of %d.' %
                             (self._CHUNK_SIZE_MULTIPLE, ))
        self._chunk_size = value

    @staticmethod
    def path_helper(bucket_path, blob_name):
        """Relative URL path for a blob.

        :type bucket_path: string
        :param bucket_path: The URL path for a bucket.

        :type blob_name: string
        :param blob_name: The name of the blob.

        :rtype: string
        :returns: The relative URL path for ``blob_name``.
        """
        return bucket_path + '/o/' + quote(blob_name, safe='')

    @property
    def acl(self):
        """Create our ACL on demand."""
        return self._acl

    def __repr__(self):
        if self.bucket:
            bucket_name = self.bucket.name
        else:
            bucket_name = None

        return '<Blob: %s, %s>' % (bucket_name, self.name)

    @property
    def path(self):
        """Getter property for the URL path to this Blob.

        :rtype: string
        :returns: The URL path to this Blob.
        """
        if not self.name:
            raise ValueError('Cannot determine path without a blob name.')

        return self.path_helper(self.bucket.path, self.name)

    @property
    def client(self):
        """The client bound to this blob."""
        return self.bucket.client

    @property
    def public_url(self):
        """The public URL for this blob's object.

        :rtype: `string`
        :returns: The public URL for this blob.
        """
        return '{storage_base_url}/{bucket_name}/{quoted_name}'.format(
            storage_base_url='https://storage.googleapis.com',
            bucket_name=self.bucket.name,
            quoted_name=quote(self.name, safe=''))

    def generate_signed_url(self,
                            expiration,
                            method='GET',
                            content_type=None,
                            generation=None,
                            response_disposition=None,
                            response_type=None,
                            client=None,
                            credentials=None):
        """Generates a signed URL for this blob.

        .. note::

            If you are on Google Compute Engine, you can't generate a signed
            URL. Follow `Issue 922`_ for updates on this. If you'd like to
            be able to generate a signed URL from GCE, you can use a standard
            service account from a JSON file rather than a GCE service account.

        .. _Issue 922: https://github.com/GoogleCloudPlatform/\
                       google-cloud-python/issues/922

        If you have a blob that you want to allow access to for a set
        amount of time, you can use this method to generate a URL that
        is only valid within a certain time period.

        This is particularly useful if you don't want publicly
        accessible blobs, but don't want to require users to explicitly
        log in.

        :type expiration: int, long, datetime.datetime, datetime.timedelta
        :param expiration: When the signed URL should expire.

        :type method: str
        :param method: The HTTP verb that will be used when requesting the URL.

        :type content_type: str
        :param content_type: (Optional) The content type of the object
                             referenced by ``resource``.

        :type generation: str
        :param generation: (Optional) A value that indicates which generation
                           of the resource to fetch.

        :type response_disposition: str
        :param response_disposition: (Optional) Content disposition of
                                     responses to requests for the signed URL.
                                     For example, to enable the signed URL
                                     to initiate a file of ``blog.png``, use
                                     the value
                                     ``'attachment; filename=blob.png'``.

        :type response_type: str
        :param response_type: (Optional) Content type of responses to requests
                              for the signed URL. Used to over-ride the content
                              type of the underlying blob/object.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: (Optional) The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.


        :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
                           :class:`NoneType`
        :param credentials: (Optional) The OAuth2 credentials to use to sign
                            the URL. Defaults to the credentials stored on the
                            client used.

        :rtype: str
        :returns: A signed URL you can use to access the resource
                  until expiration.
        """
        resource = '/{bucket_name}/{quoted_name}'.format(
            bucket_name=self.bucket.name,
            quoted_name=quote(self.name, safe=''))

        if credentials is None:
            client = self._require_client(client)
            credentials = client._connection.credentials

        return generate_signed_url(credentials,
                                   resource=resource,
                                   api_access_endpoint=_API_ACCESS_ENDPOINT,
                                   expiration=expiration,
                                   method=method,
                                   content_type=content_type,
                                   response_type=response_type,
                                   response_disposition=response_disposition,
                                   generation=generation)

    def exists(self, client=None):
        """Determines whether or not this blob exists.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :rtype: boolean
        :returns: True if the blob exists in Cloud Storage.
        """
        client = self._require_client(client)
        try:
            # We only need the status code (200 or not) so we seek to
            # minimize the returned payload.
            query_params = {'fields': 'name'}
            # We intentionally pass `_target_object=None` since fields=name
            # would limit the local properties.
            client.connection.api_request(method='GET',
                                          path=self.path,
                                          query_params=query_params,
                                          _target_object=None)
            # NOTE: This will not fail immediately in a batch. However, when
            #       Batch.finish() is called, the resulting `NotFound` will be
            #       raised.
            return True
        except NotFound:
            return False

    def delete(self, client=None):
        """Deletes a blob from Cloud Storage.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :rtype: :class:`Blob`
        :returns: The blob that was just deleted.
        :raises: :class:`google.cloud.exceptions.NotFound`
                 (propagated from
                 :meth:`google.cloud.storage.bucket.Bucket.delete_blob`).
        """
        return self.bucket.delete_blob(self.name, client=client)

    def download_to_file(self, file_obj, encryption_key=None, client=None):
        """Download the contents of this blob into a file-like object.

        .. note::

           If the server-set property, :attr:`media_link`, is not yet
           initialized, makes an additional API request to load it.

         Downloading a file that has been encrypted with a `customer-supplied`_
         encryption key::

            >>> from google.cloud import storage
            >>> from google.cloud.storage import Blob

            >>> client = storage.Client(project='my-project')
            >>> bucket = client.get_bucket('my-bucket')
            >>> encryption_key = 'aa426195405adee2c8081bb9e7e74b19'
            >>> blob = Blob('secure-data', bucket)
            >>> with open('/tmp/my-secure-file', 'wb') as file_obj:
            >>>     blob.download_to_file(file_obj,
            ...                           encryption_key=encryption_key)

        The ``encryption_key`` should be a str or bytes with a length of at
        least 32.

        .. _customer-supplied: https://cloud.google.com/storage/docs/\
                               encryption#customer-supplied

        :type file_obj: file
        :param file_obj: A file handle to which to write the blob's data.

        :type encryption_key: str or bytes
        :param encryption_key: Optional 32 byte encryption key for
                               customer-supplied encryption.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :raises: :class:`google.cloud.exceptions.NotFound`
        """
        client = self._require_client(client)
        if self.media_link is None:  # not yet loaded
            self.reload()

        download_url = self.media_link

        # Use apitools 'Download' facility.
        download = Download.from_stream(file_obj)

        if self.chunk_size is not None:
            download.chunksize = self.chunk_size

        headers = {}
        if encryption_key:
            _set_encryption_headers(encryption_key, headers)

        request = Request(download_url, 'GET', headers)

        # Use the private ``_connection`` rather than the public
        # ``.connection``, since the public connection may be a batch. A
        # batch wraps a client's connection, but does not store the `http`
        # object. The rest (API_BASE_URL and build_api_url) are also defined
        # on the Batch class, but we just use the wrapped connection since
        # it has all three (http, API_BASE_URL and build_api_url).
        download.initialize_download(request, client._connection.http)

    def download_to_filename(self, filename, encryption_key=None, client=None):
        """Download the contents of this blob into a named file.

        :type filename: string
        :param filename: A filename to be passed to ``open``.

        :type encryption_key: str or bytes
        :param encryption_key: Optional 32 byte encryption key for
                               customer-supplied encryption.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :raises: :class:`google.cloud.exceptions.NotFound`
        """
        with open(filename, 'wb') as file_obj:
            self.download_to_file(file_obj,
                                  encryption_key=encryption_key,
                                  client=client)

        mtime = time.mktime(self.updated.timetuple())
        os.utime(file_obj.name, (mtime, mtime))

    def download_as_string(self, encryption_key=None, client=None):
        """Download the contents of this blob as a string.

        :type encryption_key: str or bytes
        :param encryption_key: Optional 32 byte encryption key for
                               customer-supplied encryption.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :rtype: bytes
        :returns: The data stored in this blob.
        :raises: :class:`google.cloud.exceptions.NotFound`
        """
        string_buffer = BytesIO()
        self.download_to_file(string_buffer,
                              encryption_key=encryption_key,
                              client=client)
        return string_buffer.getvalue()

    @staticmethod
    def _check_response_error(request, http_response):
        """Helper for :meth:`upload_from_file`."""
        info = http_response.info
        status = int(info['status'])
        if not 200 <= status < 300:
            faux_response = httplib2.Response({'status': status})
            raise make_exception(faux_response,
                                 http_response.content,
                                 error_info=request.url)

    # pylint: disable=too-many-locals
    def upload_from_file(self,
                         file_obj,
                         rewind=False,
                         size=None,
                         encryption_key=None,
                         content_type=None,
                         num_retries=6,
                         client=None):
        """Upload the contents of this blob from a file-like object.

        The content type of the upload will either be
        - The value passed in to the function (if any)
        - The value stored on the current blob
        - The default value of 'application/octet-stream'

        .. note::
           The effect of uploading to an existing blob depends on the
           "versioning" and "lifecycle" policies defined on the blob's
           bucket.  In the absence of those policies, upload will
           overwrite any existing contents.

           See the `object versioning
           <https://cloud.google.com/storage/docs/object-versioning>`_ and
           `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
           API documents for details.

        Uploading a file with a `customer-supplied`_ encryption key::

            >>> from google.cloud import storage
            >>> from google.cloud.storage import Blob

            >>> client = storage.Client(project='my-project')
            >>> bucket = client.get_bucket('my-bucket')
            >>> encryption_key = 'aa426195405adee2c8081bb9e7e74b19'
            >>> blob = Blob('secure-data', bucket)
            >>> with open('my-file', 'rb') as my_file:
            >>>     blob.upload_from_file(my_file,
            ...                           encryption_key=encryption_key)

        The ``encryption_key`` should be a str or bytes with a length of at
        least 32.

        .. _customer-supplied: https://cloud.google.com/storage/docs/\
                               encryption#customer-supplied

        :type file_obj: file
        :param file_obj: A file handle open for reading.

        :type rewind: boolean
        :param rewind: If True, seek to the beginning of the file handle before
                       writing the file to Cloud Storage.

        :type size: int
        :param size: The number of bytes to read from the file handle.
                     If not provided, we'll try to guess the size using
                     :func:`os.fstat`. (If the file handle is not from the
                     filesystem this won't be possible.)

        :type encryption_key: str or bytes
        :param encryption_key: Optional 32 byte encryption key for
                               customer-supplied encryption.

        :type content_type: string or ``NoneType``
        :param content_type: Optional type of content being uploaded.

        :type num_retries: integer
        :param num_retries: Number of upload retries. Defaults to 6.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :raises: :class:`ValueError` if size is not passed in and can not be
                 determined; :class:`google.cloud.exceptions.GoogleCloudError`
                 if the upload response returns an error status.
        """
        client = self._require_client(client)
        # Use the private ``_connection`` rather than the public
        # ``.connection``, since the public connection may be a batch. A
        # batch wraps a client's connection, but does not store the `http`
        # object. The rest (API_BASE_URL and build_api_url) are also defined
        # on the Batch class, but we just use the wrapped connection since
        # it has all three (http, API_BASE_URL and build_api_url).
        connection = client._connection
        content_type = (content_type or self._properties.get('contentType')
                        or 'application/octet-stream')

        # Rewind the file if desired.
        if rewind:
            file_obj.seek(0, os.SEEK_SET)

        # Get the basic stats about the file.
        total_bytes = size
        if total_bytes is None:
            if hasattr(file_obj, 'fileno'):
                try:
                    total_bytes = os.fstat(file_obj.fileno()).st_size
                except (OSError, UnsupportedOperation):
                    pass  # Assuming fd is not an actual file (maybe socket).

        headers = {
            'Accept': 'application/json',
            'Accept-Encoding': 'gzip, deflate',
            'User-Agent': connection.USER_AGENT,
        }

        if encryption_key:
            _set_encryption_headers(encryption_key, headers)

        upload = Upload(file_obj,
                        content_type,
                        total_bytes,
                        auto_transfer=False)

        if self.chunk_size is not None:
            upload.chunksize = self.chunk_size

            if total_bytes is None:
                upload.strategy = RESUMABLE_UPLOAD
        elif total_bytes is None:
            raise ValueError('total bytes could not be determined. Please '
                             'pass an explicit size, or supply a chunk size '
                             'for a streaming transfer.')

        url_builder = _UrlBuilder(bucket_name=self.bucket.name,
                                  object_name=self.name)
        upload_config = _UploadConfig()

        # Temporary URL, until we know simple vs. resumable.
        base_url = connection.API_BASE_URL + '/upload'
        upload_url = connection.build_api_url(api_base_url=base_url,
                                              path=self.bucket.path + '/o')

        # Use apitools 'Upload' facility.
        request = Request(upload_url, 'POST', headers)

        upload.configure_request(upload_config, request, url_builder)
        query_params = url_builder.query_params
        base_url = connection.API_BASE_URL + '/upload'
        request.url = connection.build_api_url(api_base_url=base_url,
                                               path=self.bucket.path + '/o',
                                               query_params=query_params)
        upload.initialize_upload(request, connection.http)

        if upload.strategy == RESUMABLE_UPLOAD:
            http_response = upload.stream_file(use_chunks=True)
        else:
            http_response = make_api_request(connection.http,
                                             request,
                                             retries=num_retries)

        self._check_response_error(request, http_response)
        response_content = http_response.content

        if not isinstance(response_content,
                          six.string_types):  # pragma: NO COVER  Python3
            response_content = response_content.decode('utf-8')
        self._set_properties(json.loads(response_content))

    # pylint: enable=too-many-locals

    def upload_from_filename(self,
                             filename,
                             content_type=None,
                             encryption_key=None,
                             client=None):
        """Upload this blob's contents from the content of a named file.

        The content type of the upload will either be
        - The value passed in to the function (if any)
        - The value stored on the current blob
        - The value given by mimetypes.guess_type

        .. note::
           The effect of uploading to an existing blob depends on the
           "versioning" and "lifecycle" policies defined on the blob's
           bucket.  In the absence of those policies, upload will
           overwrite any existing contents.

           See the `object versioning
           <https://cloud.google.com/storage/docs/object-versioning>`_ and
           `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
           API documents for details.

        :type filename: string
        :param filename: The path to the file.

        :type content_type: string or ``NoneType``
        :param content_type: Optional type of content being uploaded.

        :type encryption_key: str or bytes
        :param encryption_key: Optional 32 byte encryption key for
                               customer-supplied encryption.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.
        """
        content_type = content_type or self._properties.get('contentType')
        if content_type is None:
            content_type, _ = mimetypes.guess_type(filename)

        with open(filename, 'rb') as file_obj:
            self.upload_from_file(file_obj,
                                  content_type=content_type,
                                  encryption_key=encryption_key,
                                  client=client)

    def upload_from_string(self,
                           data,
                           content_type='text/plain',
                           encryption_key=None,
                           client=None):
        """Upload contents of this blob from the provided string.

        .. note::
           The effect of uploading to an existing blob depends on the
           "versioning" and "lifecycle" policies defined on the blob's
           bucket.  In the absence of those policies, upload will
           overwrite any existing contents.

           See the `object versioning
           <https://cloud.google.com/storage/docs/object-versioning>`_ and
           `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
           API documents for details.

        :type data: bytes or text
        :param data: The data to store in this blob.  If the value is
                     text, it will be encoded as UTF-8.

        :type content_type: string
        :param content_type: Optional type of content being uploaded. Defaults
                             to ``'text/plain'``.

        :type encryption_key: str or bytes
        :param encryption_key: Optional 32 byte encryption key for
                               customer-supplied encryption.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.
        """
        if isinstance(data, six.text_type):
            data = data.encode('utf-8')
        string_buffer = BytesIO()
        string_buffer.write(data)
        self.upload_from_file(file_obj=string_buffer,
                              rewind=True,
                              size=len(data),
                              content_type=content_type,
                              encryption_key=encryption_key,
                              client=client)

    def make_public(self, client=None):
        """Make this blob public giving all users read access.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.
        """
        self.acl.all().grant_read()
        self.acl.save(client=client)

    cache_control = _scalar_property('cacheControl')
    """HTTP 'Cache-Control' header for this object.

    See: https://tools.ietf.org/html/rfc7234#section-5.2 and
         https://cloud.google.com/storage/docs/json_api/v1/objects

    If the property is not set locally, returns ``None``.

    :rtype: string or ``NoneType``
    """

    content_disposition = _scalar_property('contentDisposition')
    """HTTP 'Content-Disposition' header for this object.

    See: https://tools.ietf.org/html/rfc6266 and
         https://cloud.google.com/storage/docs/json_api/v1/objects

    If the property is not set locally, returns ``None``.

    :rtype: string or ``NoneType``
    """

    content_encoding = _scalar_property('contentEncoding')
    """HTTP 'Content-Encoding' header for this object.

    See: https://tools.ietf.org/html/rfc7231#section-3.1.2.2 and
         https://cloud.google.com/storage/docs/json_api/v1/objects

    If the property is not set locally, returns ``None``.

    :rtype: string or ``NoneType``
    """

    content_language = _scalar_property('contentLanguage')
    """HTTP 'Content-Language' header for this object.

    See: http://tools.ietf.org/html/bcp47 and
         https://cloud.google.com/storage/docs/json_api/v1/objects

    If the property is not set locally, returns ``None``.

    :rtype: string or ``NoneType``
    """

    content_type = _scalar_property('contentType')
    """HTTP 'Content-Type' header for this object.

    See: https://tools.ietf.org/html/rfc2616#section-14.17 and
         https://cloud.google.com/storage/docs/json_api/v1/objects

    If the property is not set locally, returns ``None``.

    :rtype: string or ``NoneType``
    """

    crc32c = _scalar_property('crc32c')
    """CRC32C checksum for this object.

    See: http://tools.ietf.org/html/rfc4960#appendix-B and
         https://cloud.google.com/storage/docs/json_api/v1/objects

    If the property is not set locally, returns ``None``.

    :rtype: string or ``NoneType``
    """

    @property
    def component_count(self):
        """Number of underlying components that make up this object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: integer or ``NoneType``
        :returns: The component count (in case of a composed object) or
                  ``None`` if the property is not set locally. This property
                  will not be set on objects not created via ``compose``.
        """
        component_count = self._properties.get('componentCount')
        if component_count is not None:
            return int(component_count)

    @property
    def etag(self):
        """Retrieve the ETag for the object.

        See: http://tools.ietf.org/html/rfc2616#section-3.11 and
             https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: string or ``NoneType``
        :returns: The blob etag or ``None`` if the property is not set locally.
        """
        return self._properties.get('etag')

    @property
    def generation(self):
        """Retrieve the generation for the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: integer or ``NoneType``
        :returns: The generation of the blob or ``None`` if the property
                  is not set locally.
        """
        generation = self._properties.get('generation')
        if generation is not None:
            return int(generation)

    @property
    def id(self):
        """Retrieve the ID for the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: string or ``NoneType``
        :returns: The ID of the blob or ``None`` if the property is not
                  set locally.
        """
        return self._properties.get('id')

    md5_hash = _scalar_property('md5Hash')
    """MD5 hash for this object.

    See: http://tools.ietf.org/html/rfc4960#appendix-B and
         https://cloud.google.com/storage/docs/json_api/v1/objects

    If the property is not set locally, returns ``None``.

    :rtype: string or ``NoneType``
    """

    @property
    def media_link(self):
        """Retrieve the media download URI for the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: string or ``NoneType``
        :returns: The media link for the blob or ``None`` if the property is
                  not set locally.
        """
        return self._properties.get('mediaLink')

    @property
    def metadata(self):
        """Retrieve arbitrary/application specific metadata for the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: dict or ``NoneType``
        :returns: The metadata associated with the blob or ``None`` if the
                  property is not set locally.
        """
        return copy.deepcopy(self._properties.get('metadata'))

    @metadata.setter
    def metadata(self, value):
        """Update arbitrary/application specific metadata for the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :type value: dict or ``NoneType``
        :param value: The blob metadata to set.
        """
        self._patch_property('metadata', value)

    @property
    def metageneration(self):
        """Retrieve the metageneration for the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: integer or ``NoneType``
        :returns: The metageneration of the blob or ``None`` if the property
                  is not set locally.
        """
        metageneration = self._properties.get('metageneration')
        if metageneration is not None:
            return int(metageneration)

    @property
    def owner(self):
        """Retrieve info about the owner of the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: dict or ``NoneType``
        :returns: Mapping of owner's role/ID. If the property is not set
                  locally, returns ``None``.
        """
        return copy.deepcopy(self._properties.get('owner'))

    @property
    def self_link(self):
        """Retrieve the URI for the object.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: string or ``NoneType``
        :returns: The self link for the blob or ``None`` if the property is
                  not set locally.
        """
        return self._properties.get('selfLink')

    @property
    def size(self):
        """Size of the object, in bytes.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: integer or ``NoneType``
        :returns: The size of the blob or ``None`` if the property
                  is not set locally.
        """
        size = self._properties.get('size')
        if size is not None:
            return int(size)

    @property
    def storage_class(self):
        """Retrieve the storage class for the object.

        See: https://cloud.google.com/storage/docs/storage-classes
        https://cloud.google.com/storage/docs/nearline-storage
        https://cloud.google.com/storage/docs/durable-reduced-availability

        :rtype: string or ``NoneType``
        :returns: If set, one of "STANDARD", "NEARLINE", or
                  "DURABLE_REDUCED_AVAILABILITY", else ``None``.
        """
        return self._properties.get('storageClass')

    @property
    def time_deleted(self):
        """Retrieve the timestamp at which the object was deleted.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: :class:`datetime.datetime` or ``NoneType``
        :returns: Datetime object parsed from RFC3339 valid timestamp, or
                  ``None`` if the property is not set locally. If the blob has
                  not been deleted, this will never be set.
        """
        value = self._properties.get('timeDeleted')
        if value is not None:
            return _rfc3339_to_datetime(value)

    @property
    def updated(self):
        """Retrieve the timestamp at which the object was updated.

        See: https://cloud.google.com/storage/docs/json_api/v1/objects

        :rtype: :class:`datetime.datetime` or ``NoneType``
        :returns: Datetime object parsed from RFC3339 valid timestamp, or
                  ``None`` if the property is not set locally.
        """
        value = self._properties.get('updated')
        if value is not None:
            return _rfc3339_to_datetime(value)