Ejemplo n.º 1
0
def lookup_bucket(bucket_name, connection=None):
    """Get a bucket by name, returning None if not found.

    You can use this if you would rather checking for a None value
    than catching an exception::

      >>> from gcloud import storage
      >>> bucket = storage.lookup_bucket('doesnt-exist')
      >>> print bucket
      None
      >>> bucket = storage.lookup_bucket('my-bucket')
      >>> print bucket
      <Bucket: my-bucket>

    :type bucket_name: string
    :param bucket_name: The name of the bucket to get.

    :type connection: :class:`gcloud.storage.connection.Connection` or
                      ``NoneType``
    :param connection: Optional. The connection to use when sending requests.
                       If not provided, falls back to default.

    :rtype: :class:`gcloud.storage.bucket.Bucket`
    :returns: The bucket matching the name provided or None if not found.
    """
    connection = _require_connection(connection)
    try:
        return get_bucket(bucket_name, connection=connection)
    except NotFound:
        return None
Ejemplo n.º 2
0
def create_bucket(bucket_name, project=None, connection=None):
    """Create a new bucket.

    For example::

      >>> from gcloud import storage
      >>> bucket = storage.create_bucket('my-bucket')
      >>> print bucket
      <Bucket: my-bucket>

    This implements "storage.buckets.insert".

    If the bucket already exists, will raise
    :class:`gcloud.exceptions.Conflict`.

    :type project: string
    :param project: Optional. The project to use when creating bucket.
                    If not provided, falls back to default.

    :type bucket_name: string
    :param bucket_name: The bucket name to create.

    :type connection: :class:`gcloud.storage.connection.Connection` or
                      ``NoneType``
    :param connection: Optional. The connection to use when sending requests.
                       If not provided, falls back to default.

    :rtype: :class:`gcloud.storage.bucket.Bucket`
    :returns: The newly created bucket.
    """
    connection = _require_connection(connection)
    bucket = Bucket(bucket_name)
    bucket.create(project, connection=connection)
    return bucket
Ejemplo n.º 3
0
def create_bucket(bucket_name, project=None, connection=None):
    """Create a new bucket.

    For example::

      >>> from gcloud import storage
      >>> bucket = storage.create_bucket('my-bucket')
      >>> print bucket
      <Bucket: my-bucket>

    This implements "storage.buckets.insert".

    If the bucket already exists, will raise
    :class:`gcloud.exceptions.Conflict`.

    :type project: string
    :param project: Optional. The project to use when creating bucket.
                    If not provided, falls back to default.

    :type bucket_name: string
    :param bucket_name: The bucket name to create.

    :type connection: :class:`gcloud.storage.connection.Connection` or
                      ``NoneType``
    :param connection: Optional. The connection to use when sending requests.
                       If not provided, falls back to default.

    :rtype: :class:`gcloud.storage.bucket.Bucket`
    :returns: The newly created bucket.
    """
    connection = _require_connection(connection)
    bucket = Bucket(bucket_name, connection=connection)
    bucket.create(project, connection=connection)
    return bucket
Ejemplo n.º 4
0
    def delete_blobs(self, blobs, on_error=None, connection=None):
        """Deletes a list of blobs from the current bucket.

        Uses :func:`Bucket.delete_blob` to delete each individual blob.

        :type blobs: list of string or :class:`gcloud.storage.blob.Blob`
        :param blobs: A list of blob names or Blob objects to delete.

        :type on_error: a callable taking (blob)
        :param on_error: If not ``None``, called once for each blob raising
                         :class:`gcloud.exceptions.NotFound`;
                         otherwise, the exception is propagated.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :raises: :class:`gcloud.exceptions.NotFound` (if
                 `on_error` is not passed).
        """
        connection = _require_connection(connection)
        for blob in blobs:
            try:
                blob_name = blob
                if not isinstance(blob_name, six.string_types):
                    blob_name = blob.name
                self.delete_blob(blob_name, connection=connection)
            except NotFound:
                if on_error is not None:
                    on_error(blob)
                else:
                    raise
Ejemplo n.º 5
0
    def save(self, acl=None, connection=None):
        """Save this ACL for the current bucket.

        :type acl: :class:`gcloud.storage.acl.ACL`, or a compatible list.
        :param acl: The ACL object to save.  If left blank, this will save
                    current entries.

        :type connection: :class:`gcloud.storage.connection.Connection` or None
        :param connection: explicit connection to use for API request;
                           defaults to instance property.
        """
        if acl is None:
            acl = self
            save_to_backend = acl.loaded
        else:
            save_to_backend = True

        if save_to_backend:
            path = self.save_path
            connection = _require_connection(connection)
            result = connection.api_request(
                method='PATCH',
                path=path,
                data={self._URL_PATH_ELEM: list(acl)},
                query_params={'projection': 'full'})
            self.entities.clear()
            for entry in result.get(self._URL_PATH_ELEM, ()):
                self.add_entity(self.entity_from_dict(entry))
            self.loaded = True
Ejemplo n.º 6
0
def get_bucket(bucket_name, connection=None):
    """Get a bucket by name.

    If the bucket isn't found, this will raise a
    :class:`gcloud.storage.exceptions.NotFound`.

    For example::

      >>> from gcloud import storage
      >>> from gcloud.exceptions import NotFound
      >>> try:
      >>>   bucket = storage.get_bucket('my-bucket')
      >>> except NotFound:
      >>>   print 'Sorry, that bucket does not exist!'

    This implements "storage.buckets.get".

    :type bucket_name: string
    :param bucket_name: The name of the bucket to get.

    :type connection: :class:`gcloud.storage.connection.Connection` or
                      ``NoneType``
    :param connection: Optional. The connection to use when sending requests.
                       If not provided, falls back to default.

    :rtype: :class:`gcloud.storage.bucket.Bucket`
    :returns: The bucket matching the name provided.
    :raises: :class:`gcloud.exceptions.NotFound`
    """
    connection = _require_connection(connection)
    bucket = Bucket(bucket_name)
    bucket.reload(connection=connection)
    return bucket
Ejemplo n.º 7
0
    def download_to_file(self, file_obj, connection=None):
        """Download the contents of this blob into a file-like object.

        :type file_obj: file
        :param file_obj: A file handle to which to write the blob's data.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :raises: :class:`gcloud.exceptions.NotFound`
        """
        connection = _require_connection(connection)
        download_url = self.media_link

        # Use apitools 'Download' facility.
        download = transfer.Download.FromStream(file_obj, auto_transfer=False)
        headers = {}
        if self.chunk_size is not None:
            download.chunksize = self.chunk_size
            headers['Range'] = 'bytes=0-%d' % (self.chunk_size - 1, )
        request = http_wrapper.Request(download_url, 'GET', headers)

        download.InitializeDownload(request, connection.http)

        # Should we be passing callbacks through from caller?  We can't
        # pass them as None, because apitools wants to print to the console
        # by default.
        download.StreamInChunks(callback=lambda *args: None,
                                finish_callback=lambda *args: None)
Ejemplo n.º 8
0
    def create(self, project=None, connection=None):
        """Creates current bucket.

        If the bucket already exists, will raise
        :class:`gcloud.exceptions.Conflict`.

        This implements "storage.buckets.insert".

        :type project: string
        :param project: Optional. The project to use when creating bucket.
                        If not provided, falls back to default.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :rtype: :class:`gcloud.storage.bucket.Bucket`
        :returns: The newly created bucket.
        :raises: :class:`EnvironmentError` if the project is not given and
                 can't be inferred.
        """
        connection = _require_connection(connection)
        if project is None:
            project = get_default_project()
        if project is None:
            raise EnvironmentError('Project could not be inferred '
                                   'from environment.')

        query_params = {'project': project}
        api_response = connection.api_request(
            method='POST', path='/b', query_params=query_params,
            data={'name': self.name}, _target_object=self)
        self._set_properties(api_response)
Ejemplo n.º 9
0
    def delete_blobs(self, blobs, on_error=None, connection=None):
        """Deletes a list of blobs from the current bucket.

        Uses :func:`Bucket.delete_blob` to delete each individual blob.

        :type blobs: list of string or :class:`gcloud.storage.blob.Blob`
        :param blobs: A list of blob names or Blob objects to delete.

        :type on_error: a callable taking (blob)
        :param on_error: If not ``None``, called once for each blob raising
                         :class:`gcloud.exceptions.NotFound`;
                         otherwise, the exception is propagated.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :raises: :class:`gcloud.exceptions.NotFound` (if
                 `on_error` is not passed).
        """
        connection = _require_connection(connection)
        for blob in blobs:
            try:
                blob_name = blob
                if not isinstance(blob_name, six.string_types):
                    blob_name = blob.name
                self.delete_blob(blob_name, connection=connection)
            except NotFound:
                if on_error is not None:
                    on_error(blob)
                else:
                    raise
Ejemplo n.º 10
0
 def __init__(self, bucket, extra_params=None, connection=None):
     connection = _require_connection(connection)
     self.bucket = bucket
     self.prefixes = ()
     super(_BlobIterator, self).__init__(
         connection=connection, path=bucket.path + '/o',
         extra_params=extra_params)
Ejemplo n.º 11
0
def get_bucket(bucket_name, connection=None):
    """Get a bucket by name.

    If the bucket isn't found, this will raise a
    :class:`gcloud.storage.exceptions.NotFound`.

    For example::

      >>> from gcloud import storage
      >>> from gcloud.exceptions import NotFound
      >>> try:
      >>>   bucket = storage.get_bucket('my-bucket')
      >>> except NotFound:
      >>>   print 'Sorry, that bucket does not exist!'

    This implements "storage.buckets.get".

    :type bucket_name: string
    :param bucket_name: The name of the bucket to get.

    :type connection: :class:`gcloud.storage.connection.Connection` or
                      ``NoneType``
    :param connection: Optional. The connection to use when sending requests.
                       If not provided, falls back to default.

    :rtype: :class:`gcloud.storage.bucket.Bucket`
    :returns: The bucket matching the name provided.
    :raises: :class:`gcloud.exceptions.NotFound`
    """
    connection = _require_connection(connection)
    bucket = Bucket(bucket_name, connection=connection)
    bucket.reload(connection=connection)
    return bucket
Ejemplo n.º 12
0
    def copy_blob(blob, destination_bucket, new_name=None,
                  connection=None):
        """Copy the given blob to the given bucket, optionally with a new name.

        :type blob: string or :class:`gcloud.storage.blob.Blob`
        :param blob: The blob to be copied.

        :type destination_bucket: :class:`gcloud.storage.bucket.Bucket`
        :param destination_bucket: The bucket into which the blob should be
                                   copied.

        :type new_name: string
        :param new_name: (optional) the new name for the copied file.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :rtype: :class:`gcloud.storage.blob.Blob`
        :returns: The new Blob.
        """
        connection = _require_connection(connection)
        if new_name is None:
            new_name = blob.name
        new_blob = Blob(bucket=destination_bucket, name=new_name)
        api_path = blob.path + '/copyTo' + new_blob.path
        copy_result = connection.api_request(method='POST', path=api_path,
                                             _target_object=new_blob)
        new_blob._set_properties(copy_result)
        return new_blob
Ejemplo n.º 13
0
    def save(self, acl=None, connection=None):
        """Save this ACL for the current bucket.

        :type acl: :class:`gcloud.storage.acl.ACL`, or a compatible list.
        :param acl: The ACL object to save.  If left blank, this will save
                    current entries.

        :type connection: :class:`gcloud.storage.connection.Connection` or None
        :param connection: explicit connection to use for API request;
                           defaults to instance property.
        """
        if acl is None:
            acl = self
            save_to_backend = acl.loaded
        else:
            save_to_backend = True

        if save_to_backend:
            path = self.save_path
            connection = _require_connection(connection)
            result = connection.api_request(
                method='PATCH',
                path=path,
                data={self._URL_PATH_ELEM: list(acl)},
                query_params={'projection': 'full'})
            self.entities.clear()
            for entry in result.get(self._URL_PATH_ELEM, ()):
                self.add_entity(self.entity_from_dict(entry))
            self.loaded = True
Ejemplo n.º 14
0
    def exists(self, connection=None):
        """Determines whether or not this bucket exists.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :rtype: boolean
        :returns: True if the bucket exists in Cloud Storage.
        """
        connection = _require_connection(connection)
        try:
            # We only need the status code (200 or not) so we seek to
            # minimize the returned payload.
            query_params = {'fields': 'name'}
            # We intentionally pass `_target_object=None` since fields=name
            # would limit the local properties.
            connection.api_request(method='GET',
                                   path=self.path,
                                   query_params=query_params,
                                   _target_object=None)
            # NOTE: This will not fail immediately in a batch. However, when
            #       Batch.finish() is called, the resulting `NotFound` will be
            #       raised.
            return True
        except NotFound:
            return False
Ejemplo n.º 15
0
    def download_to_file(self, file_obj, connection=None):
        """Download the contents of this blob into a file-like object.

        :type file_obj: file
        :param file_obj: A file handle to which to write the blob's data.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :raises: :class:`gcloud.exceptions.NotFound`
        """
        connection = _require_connection(connection)
        download_url = self.media_link

        # Use apitools 'Download' facility.
        download = transfer.Download.FromStream(file_obj, auto_transfer=False)
        headers = {}
        if self.chunk_size is not None:
            download.chunksize = self.chunk_size
            headers['Range'] = 'bytes=0-%d' % (self.chunk_size - 1,)
        request = http_wrapper.Request(download_url, 'GET', headers)

        download.InitializeDownload(request, connection.http)

        # Should we be passing callbacks through from caller?  We can't
        # pass them as None, because apitools wants to print to the console
        # by default.
        download.StreamInChunks(callback=lambda *args: None,
                                finish_callback=lambda *args: None)
Ejemplo n.º 16
0
    def rename(self, new_name, connection=None):
        """Renames this blob using copy and delete operations.

        Effectively, copies blob to the same bucket with a new name, then
        deletes the blob.

        .. warning::
          This method will first duplicate the data and then delete the
          old blob.  This means that with very large objects renaming
          could be a very (temporarily) costly or a very slow operation.

        :type new_name: string
        :param new_name: The new name for this blob.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :rtype: :class:`Blob`
        :returns: The newly-copied blob.
        """
        connection = _require_connection(connection)
        new_blob = self.bucket.copy_blob(self, self.bucket, new_name,
                                         connection=connection)
        self.delete(connection=connection)
        return new_blob
Ejemplo n.º 17
0
def lookup_bucket(bucket_name, connection=None):
    """Get a bucket by name, returning None if not found.

    You can use this if you would rather checking for a None value
    than catching an exception::

      >>> from gcloud import storage
      >>> bucket = storage.lookup_bucket('doesnt-exist')
      >>> print bucket
      None
      >>> bucket = storage.lookup_bucket('my-bucket')
      >>> print bucket
      <Bucket: my-bucket>

    :type bucket_name: string
    :param bucket_name: The name of the bucket to get.

    :type connection: :class:`gcloud.storage.connection.Connection` or
                      ``NoneType``
    :param connection: Optional. The connection to use when sending requests.
                       If not provided, falls back to default.

    :rtype: :class:`gcloud.storage.bucket.Bucket`
    :returns: The bucket matching the name provided or None if not found.
    """
    connection = _require_connection(connection)
    try:
        return get_bucket(bucket_name, connection=connection)
    except NotFound:
        return None
Ejemplo n.º 18
0
    def copy_blob(blob, destination_bucket, new_name=None, connection=None):
        """Copy the given blob to the given bucket, optionally with a new name.

        :type blob: string or :class:`gcloud.storage.blob.Blob`
        :param blob: The blob to be copied.

        :type destination_bucket: :class:`gcloud.storage.bucket.Bucket`
        :param destination_bucket: The bucket into which the blob should be
                                   copied.

        :type new_name: string
        :param new_name: (optional) the new name for the copied file.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :rtype: :class:`gcloud.storage.blob.Blob`
        :returns: The new Blob.
        """
        connection = _require_connection(connection)
        if new_name is None:
            new_name = blob.name
        new_blob = Blob(bucket=destination_bucket, name=new_name)
        api_path = blob.path + '/copyTo' + new_blob.path
        copy_result = connection.api_request(method='POST',
                                             path=api_path,
                                             _target_object=new_blob)
        new_blob._set_properties(copy_result)
        return new_blob
Ejemplo n.º 19
0
    def exists(self, connection=None):
        """Determines whether or not this blob exists.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :rtype: boolean
        :returns: True if the blob exists in Cloud Storage.
        """
        connection = _require_connection(connection)
        try:
            # We only need the status code (200 or not) so we seek to
            # minimize the returned payload.
            query_params = {'fields': 'name'}
            # We intentionally pass `_target_object=None` since fields=name
            # would limit the local properties.
            connection.api_request(method='GET', path=self.path,
                                   query_params=query_params,
                                   _target_object=None)
            # NOTE: This will not fail immediately in a batch. However, when
            #       Batch.finish() is called, the resulting `NotFound` will be
            #       raised.
            return True
        except NotFound:
            return False
Ejemplo n.º 20
0
    def delete(self, force=False, connection=None):
        """Delete this bucket.

        The bucket **must** be empty in order to submit a delete request. If
        ``force=True`` is passed, this will first attempt to delete all the
        objects / blobs in the bucket (i.e. try to empty the bucket).

        If the bucket doesn't exist, this will raise
        :class:`gcloud.exceptions.NotFound`.  If the bucket is not empty
        (and ``force=False``), will raise :class:`gcloud.exceptions.Conflict`.

        If ``force=True`` and the bucket contains more than 256 objects / blobs
        this will cowardly refuse to delete the objects (or the bucket). This
        is to prevent accidental bucket deletion and to prevent extremely long
        runtime of this method.

        :type force: boolean
        :param force: If True, empties the bucket's objects then deletes it.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :raises: :class:`ValueError` if ``force`` is ``True`` and the bucket
                 contains more than 256 objects / blobs.
        """
        connection = _require_connection(connection)
        if force:
            blobs = list(
                self.list_blobs(
                    max_results=self._MAX_OBJECTS_FOR_BUCKET_DELETE + 1))
            if len(blobs) > self._MAX_OBJECTS_FOR_BUCKET_DELETE:
                message = ('Refusing to delete bucket with more than '
                           '%d objects. If you actually want to delete '
                           'this bucket, please delete the objects '
                           'yourself before calling Bucket.delete().') % (
                               self._MAX_OBJECTS_FOR_BUCKET_DELETE, )
                raise ValueError(message)

            # Ignore 404 errors on delete.
            self.delete_blobs(blobs,
                              on_error=lambda blob: None,
                              connection=connection)

        # We intentionally pass `_target_object=None` since a DELETE
        # request has no response value (whether in a standard request or
        # in a batch request).
        connection.api_request(method='DELETE',
                               path=self.path,
                               _target_object=None)
Ejemplo n.º 21
0
    def delete(self, force=False, connection=None):
        """Delete this bucket.

        The bucket **must** be empty in order to submit a delete request. If
        ``force=True`` is passed, this will first attempt to delete all the
        objects / blobs in the bucket (i.e. try to empty the bucket).

        If the bucket doesn't exist, this will raise
        :class:`gcloud.exceptions.NotFound`.  If the bucket is not empty
        (and ``force=False``), will raise :class:`gcloud.exceptions.Conflict`.

        If ``force=True`` and the bucket contains more than 256 objects / blobs
        this will cowardly refuse to delete the objects (or the bucket). This
        is to prevent accidental bucket deletion and to prevent extremely long
        runtime of this method.

        :type force: boolean
        :param force: If True, empties the bucket's objects then deletes it.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :raises: :class:`ValueError` if ``force`` is ``True`` and the bucket
                 contains more than 256 objects / blobs.
        """
        connection = _require_connection(connection)
        if force:
            blobs = list(self.list_blobs(
                max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
                connection=connection))
            if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
                message = (
                    'Refusing to delete bucket with more than '
                    '%d objects. If you actually want to delete '
                    'this bucket, please delete the objects '
                    'yourself before calling Bucket.delete().'
                ) % (self._MAX_OBJECTS_FOR_ITERATION,)
                raise ValueError(message)

            # Ignore 404 errors on delete.
            self.delete_blobs(blobs, on_error=lambda blob: None,
                              connection=connection)

        # We intentionally pass `_target_object=None` since a DELETE
        # request has no response value (whether in a standard request or
        # in a batch request).
        connection.api_request(method='DELETE', path=self.path,
                               _target_object=None)
Ejemplo n.º 22
0
    def make_public(self, recursive=False, future=False, connection=None):
        """Make a bucket public.

        If ``recursive=True`` and the bucket contains more than 256
        objects / blobs this will cowardly refuse to make the objects public.
        This is to prevent extremely long runtime of this method.

        :type recursive: boolean
        :param recursive: If True, this will make all blobs inside the bucket
                          public as well.

        :type future: boolean
        :param future: If True, this will make all objects created in the
                       future public as well.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.
        """
        connection = _require_connection(connection)

        self.acl.all().grant_read()
        self.acl.save(connection=connection)

        if future:
            doa = self.default_object_acl
            if not doa.loaded:
                doa.reload(connection=connection)
            doa.all().grant_read()
            doa.save(connection=connection)

        if recursive:
            blobs = list(self.list_blobs(
                projection='full',
                max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
                connection=connection))
            if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
                message = (
                    'Refusing to make public recursively with more than '
                    '%d objects. If you actually want to make every object '
                    'in this bucket public, please do it on the objects '
                    'yourself.'
                ) % (self._MAX_OBJECTS_FOR_ITERATION,)
                raise ValueError(message)

            for blob in blobs:
                blob.acl.all().grant_read()
                blob.acl.save(connection=connection)
Ejemplo n.º 23
0
    def delete(self, connection=None):
        """Deletes a blob from Cloud Storage.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :rtype: :class:`Blob`
        :returns: The blob that was just deleted.
        :raises: :class:`gcloud.exceptions.NotFound`
                 (propagated from
                 :meth:`gcloud.storage.bucket.Bucket.delete_blob`).
        """
        connection = _require_connection(connection)
        return self.bucket.delete_blob(self.name, connection=connection)
Ejemplo n.º 24
0
    def reload(self, connection=None):
        """Reload the ACL data from Cloud Storage.

        :type connection: :class:`gcloud.storage.connection.Connection` or None
        :param connection: explicit connection to use for API request;
                           defaults to instance property.
        """
        path = self.reload_path
        connection = _require_connection(connection)

        self.entities.clear()

        found = connection.api_request(method='GET', path=path)
        self.loaded = True
        for entry in found.get('items', ()):
            self.add_entity(self.entity_from_dict(entry))
Ejemplo n.º 25
0
    def _client_or_connection(client):
        """Temporary method to get a connection from a client.

        If the client is null, gets the connection from the environment.

        :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to default connection.

        :rtype: :class:`gcloud.storage.connection.Connection`
        :returns: The connection determined from the ``client`` or environment.
        """
        if client is None:
            return _require_connection()
        else:
            return client.connection
Ejemplo n.º 26
0
    def reload(self, connection=None):
        """Reload the ACL data from Cloud Storage.

        :type connection: :class:`gcloud.storage.connection.Connection` or None
        :param connection: explicit connection to use for API request;
                           defaults to instance property.
        """
        path = self.reload_path
        connection = _require_connection(connection)

        self.entities.clear()

        found = connection.api_request(method='GET', path=path)
        self.loaded = True
        for entry in found.get('items', ()):
            self.add_entity(self.entity_from_dict(entry))
Ejemplo n.º 27
0
    def generate_signed_url(self,
                            expiration,
                            method='GET',
                            connection=None,
                            credentials=None):
        """Generates a signed URL for this blob.

        If you have a blob that you want to allow access to for a set
        amount of time, you can use this method to generate a URL that
        is only valid within a certain time period.

        This is particularly useful if you don't want publicly
        accessible blobs, but don't want to require users to explicitly
        log in.

        :type expiration: int, long, datetime.datetime, datetime.timedelta
        :param expiration: When the signed URL should expire.

        :type method: string
        :param method: The HTTP verb that will be used when requesting the URL.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
                           :class:`NoneType`
        :param credentials: The OAuth2 credentials to use to sign the URL.

        :rtype: string
        :returns: A signed URL you can use to access the resource
                  until expiration.
        """
        resource = '/{bucket_name}/{quoted_name}'.format(
            bucket_name=self.bucket.name,
            quoted_name=quote(self.name, safe=''))

        if credentials is None:
            connection = _require_connection(connection)
            credentials = connection.credentials

        return generate_signed_url(credentials,
                                   resource=resource,
                                   api_access_endpoint=_API_ACCESS_ENDPOINT,
                                   expiration=expiration,
                                   method=method)
Ejemplo n.º 28
0
    def generate_signed_url(self, expiration, method="GET", connection=None, credentials=None):
        """Generates a signed URL for this blob.

        If you have a blob that you want to allow access to for a set
        amount of time, you can use this method to generate a URL that
        is only valid within a certain time period.

        This is particularly useful if you don't want publicly
        accessible blobs, but don't want to require users to explicitly
        log in.

        :type expiration: int, long, datetime.datetime, datetime.timedelta
        :param expiration: When the signed URL should expire.

        :type method: string
        :param method: The HTTP verb that will be used when requesting the URL.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
                           :class:`NoneType`
        :param credentials: The OAuth2 credentials to use to sign the URL.

        :rtype: string
        :returns: A signed URL you can use to access the resource
                  until expiration.
        """
        resource = "/{bucket_name}/{quoted_name}".format(
            bucket_name=self.bucket.name, quoted_name=quote(self.name, safe="")
        )

        if credentials is None:
            connection = _require_connection(connection)
            credentials = connection.credentials

        return generate_signed_url(
            credentials,
            resource=resource,
            api_access_endpoint=_API_ACCESS_ENDPOINT,
            expiration=expiration,
            method=method,
        )
Ejemplo n.º 29
0
    def delete_blob(self, blob_name, connection=None):
        """Deletes a blob from the current bucket.

        If the blob isn't found (backend 404), raises a
        :class:`gcloud.exceptions.NotFound`.

        For example::

          >>> from gcloud.exceptions import NotFound
          >>> from gcloud import storage
          >>> connection = storage.get_connection()
          >>> bucket = storage.get_bucket('my-bucket', connection=connection)
          >>> print bucket.list_blobs()
          [<Blob: my-bucket, my-file.txt>]
          >>> bucket.delete_blob('my-file.txt')
          >>> try:
          ...   bucket.delete_blob('doesnt-exist')
          ... except NotFound:
          ...   pass

        :type blob_name: string
        :param blob_name: A blob name to delete.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :raises: :class:`gcloud.exceptions.NotFound` (to suppress
                 the exception, call ``delete_blobs``, passing a no-op
                 ``on_error`` callback, e.g.::

                 >>> bucket.delete_blobs([blob], on_error=lambda blob: None)
        """
        connection = _require_connection(connection)
        blob_path = Blob.path_helper(self.path, blob_name)
        # We intentionally pass `_target_object=None` since a DELETE
        # request has no response value (whether in a standard request or
        # in a batch request).
        connection.api_request(method='DELETE',
                               path=blob_path,
                               _target_object=None)
Ejemplo n.º 30
0
    def delete_blob(self, blob_name, connection=None):
        """Deletes a blob from the current bucket.

        If the blob isn't found (backend 404), raises a
        :class:`gcloud.exceptions.NotFound`.

        For example::

          >>> from gcloud.exceptions import NotFound
          >>> from gcloud import storage
          >>> connection = storage.get_connection()
          >>> bucket = storage.get_bucket('my-bucket', connection=connection)
          >>> print bucket.list_blobs()
          [<Blob: my-bucket, my-file.txt>]
          >>> bucket.delete_blob('my-file.txt')
          >>> try:
          ...   bucket.delete_blob('doesnt-exist')
          ... except NotFound:
          ...   pass

        :type blob_name: string
        :param blob_name: A blob name to delete.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :raises: :class:`gcloud.exceptions.NotFound` (to suppress
                 the exception, call ``delete_blobs``, passing a no-op
                 ``on_error`` callback, e.g.::

                 >>> bucket.delete_blobs([blob], on_error=lambda blob: None)
        """
        connection = _require_connection(connection)
        blob_path = Blob.path_helper(self.path, blob_name)
        # We intentionally pass `_target_object=None` since a DELETE
        # request has no response value (whether in a standard request or
        # in a batch request).
        connection.api_request(method='DELETE', path=blob_path,
                               _target_object=None)
Ejemplo n.º 31
0
    def get_blob(self, blob_name, connection=None):
        """Get a blob object by name.

        This will return None if the blob doesn't exist::

          >>> from gcloud import storage
          >>> connection = storage.get_connection()
          >>> bucket = storage.get_bucket('my-bucket', connection=connection)
          >>> print bucket.get_blob('/path/to/blob.txt')
          <Blob: my-bucket, /path/to/blob.txt>
          >>> print bucket.get_blob('/does-not-exist.txt')
          None

        :type blob_name: string
        :param blob_name: The name of the blob to retrieve.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :rtype: :class:`gcloud.storage.blob.Blob` or None
        :returns: The blob object if it exists, otherwise None.
        """
        connection = _require_connection(connection)
        blob = Blob(bucket=self, name=blob_name)
        try:
            response = connection.api_request(method='GET',
                                              path=blob.path,
                                              _target_object=blob)
            # NOTE: We assume response.get('name') matches `blob_name`.
            blob._set_properties(response)
            # NOTE: This will not fail immediately in a batch. However, when
            #       Batch.finish() is called, the resulting `NotFound` will be
            #       raised.
            return blob
        except NotFound:
            return None
Ejemplo n.º 32
0
    def get_blob(self, blob_name, connection=None):
        """Get a blob object by name.

        This will return None if the blob doesn't exist::

          >>> from gcloud import storage
          >>> connection = storage.get_connection()
          >>> bucket = storage.get_bucket('my-bucket', connection=connection)
          >>> print bucket.get_blob('/path/to/blob.txt')
          <Blob: my-bucket, /path/to/blob.txt>
          >>> print bucket.get_blob('/does-not-exist.txt')
          None

        :type blob_name: string
        :param blob_name: The name of the blob to retrieve.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :rtype: :class:`gcloud.storage.blob.Blob` or None
        :returns: The blob object if it exists, otherwise None.
        """
        connection = _require_connection(connection)
        blob = Blob(bucket=self, name=blob_name)
        try:
            response = connection.api_request(
                method='GET', path=blob.path, _target_object=blob)
            # NOTE: We assume response.get('name') matches `blob_name`.
            blob._set_properties(response)
            # NOTE: This will not fail immediately in a batch. However, when
            #       Batch.finish() is called, the resulting `NotFound` will be
            #       raised.
            return blob
        except NotFound:
            return None
Ejemplo n.º 33
0
    def create(self, project=None, connection=None):
        """Creates current bucket.

        If the bucket already exists, will raise
        :class:`gcloud.exceptions.Conflict`.

        This implements "storage.buckets.insert".

        :type project: string
        :param project: Optional. The project to use when creating bucket.
                        If not provided, falls back to default.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.

        :rtype: :class:`gcloud.storage.bucket.Bucket`
        :returns: The newly created bucket.
        :raises: :class:`EnvironmentError` if the project is not given and
                 can't be inferred.
        """
        connection = _require_connection(connection)
        if project is None:
            project = get_default_project()
        if project is None:
            raise EnvironmentError('Project could not be inferred '
                                   'from environment.')

        query_params = {'project': project}
        api_response = connection.api_request(method='POST',
                                              path='/b',
                                              query_params=query_params,
                                              data={'name': self.name},
                                              _target_object=self)
        self._set_properties(api_response)
Ejemplo n.º 34
0
 def __init__(self, connection, extra_params=None):
     connection = _require_connection(connection)
     super(_BucketIterator, self).__init__(connection=connection, path='/b',
                                           extra_params=extra_params)
Ejemplo n.º 35
0
def list_buckets(project=None,
                 max_results=None,
                 page_token=None,
                 prefix=None,
                 projection='noAcl',
                 fields=None,
                 connection=None):
    """Get all buckets in the project.

    This will not populate the list of blobs available in each
    bucket.

      >>> from gcloud import storage
      >>> for bucket in storage.list_buckets():
      >>>   print bucket

    This implements "storage.buckets.list".

    :type project: string or ``NoneType``
    :param project: Optional. The project to use when listing all buckets.
                    If not provided, falls back to default.

    :type max_results: integer or ``NoneType``
    :param max_results: Optional. Maximum number of buckets to return.

    :type page_token: string or ``NoneType``
    :param page_token: Optional. Opaque marker for the next "page" of buckets.
                       If not passed, will return the first page of buckets.

    :type prefix: string or ``NoneType``
    :param prefix: Optional. Filter results to buckets whose names begin with
                   this prefix.

    :type projection: string or ``NoneType``
    :param projection: If used, must be 'full' or 'noAcl'. Defaults to
                       'noAcl'. Specifies the set of properties to return.

    :type fields: string or ``NoneType``
    :param fields: Selector specifying which fields to include in a
                   partial response. Must be a list of fields. For example
                   to get a partial response with just the next page token
                   and the language of each bucket returned:
                   'items/id,nextPageToken'

    :type connection: :class:`gcloud.storage.connection.Connection` or
                      ``NoneType``
    :param connection: Optional. The connection to use when sending requests.
                       If not provided, falls back to default.

    :rtype: iterable of :class:`gcloud.storage.bucket.Bucket` objects.
    :returns: All buckets belonging to this project.
    """
    connection = _require_connection(connection)
    if project is None:
        project = get_default_project()
    extra_params = {'project': project}

    if max_results is not None:
        extra_params['maxResults'] = max_results

    if prefix is not None:
        extra_params['prefix'] = prefix

    extra_params['projection'] = projection

    if fields is not None:
        extra_params['fields'] = fields

    result = _BucketIterator(connection=connection, extra_params=extra_params)
    # Page token must be handled specially since the base `Iterator`
    # class has it as a reserved property.
    if page_token is not None:
        result.next_page_token = page_token
    return result
Ejemplo n.º 36
0
    def upload_from_file(self,
                         file_obj,
                         rewind=False,
                         size=None,
                         content_type=None,
                         num_retries=6,
                         connection=None):
        """Upload the contents of this blob from a file-like object.

        The content type of the upload will either be
        - The value passed in to the function (if any)
        - The value stored on the current blob
        - The default value of 'application/octet-stream'

        .. note::
           The effect of uploading to an existing blob depends on the
           "versioning" and "lifecycle" policies defined on the blob's
           bucket.  In the absence of those policies, upload will
           overwrite any existing contents.

           See the `object versioning
           <https://cloud.google.com/storage/docs/object-versioning>`_ and
           `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
           API documents for details.

        :type file_obj: file
        :param file_obj: A file handle open for reading.

        :type rewind: boolean
        :param rewind: If True, seek to the beginning of the file handle before
                       writing the file to Cloud Storage.

        :type size: int
        :param size: The number of bytes to read from the file handle.
                     If not provided, we'll try to guess the size using
                     :func:`os.fstat`

        :type content_type: string or ``NoneType``
        :param content_type: Optional type of content being uploaded.

        :type num_retries: integer
        :param num_retries: Number of upload retries. Defaults to 6.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.
        """
        connection = _require_connection(connection)
        content_type = (content_type or self._properties.get('contentType')
                        or 'application/octet-stream')

        # Rewind the file if desired.
        if rewind:
            file_obj.seek(0, os.SEEK_SET)

        # Get the basic stats about the file.
        total_bytes = size or os.fstat(file_obj.fileno()).st_size
        headers = {
            'Accept': 'application/json',
            'Accept-Encoding': 'gzip, deflate',
            'User-Agent': connection.USER_AGENT,
        }

        upload = transfer.Upload(file_obj,
                                 content_type,
                                 total_bytes,
                                 auto_transfer=False,
                                 chunksize=self.chunk_size)

        url_builder = _UrlBuilder(bucket_name=self.bucket.name,
                                  object_name=self.name)
        upload_config = _UploadConfig()

        # Temporary URL, until we know simple vs. resumable.
        base_url = connection.API_BASE_URL + '/upload'
        upload_url = connection.build_api_url(api_base_url=base_url,
                                              path=self.bucket.path + '/o')

        # Use apitools 'Upload' facility.
        request = http_wrapper.Request(upload_url, 'POST', headers)

        upload.ConfigureRequest(upload_config, request, url_builder)
        query_params = url_builder.query_params
        base_url = connection.API_BASE_URL + '/upload'
        request.url = connection.build_api_url(api_base_url=base_url,
                                               path=self.bucket.path + '/o',
                                               query_params=query_params)
        upload.InitializeUpload(request, connection.http)

        # Should we be passing callbacks through from caller?  We can't
        # pass them as None, because apitools wants to print to the console
        # by default.
        if upload.strategy == transfer.RESUMABLE_UPLOAD:
            http_response = upload.StreamInChunks(
                callback=lambda *args: None,
                finish_callback=lambda *args: None)
        else:
            http_response = http_wrapper.MakeRequest(connection.http,
                                                     request,
                                                     retries=num_retries)
        response_content = http_response.content
        if not isinstance(response_content,
                          six.string_types):  # pragma: NO COVER  Python3
            response_content = response_content.decode('utf-8')
        self._set_properties(json.loads(response_content))
Ejemplo n.º 37
0
 def _callFUT(self, connection=None):
     from gcloud.storage._helpers import _require_connection
     return _require_connection(connection=connection)
Ejemplo n.º 38
0
def list_buckets(project=None, max_results=None, page_token=None, prefix=None,
                 projection='noAcl', fields=None, connection=None):
    """Get all buckets in the project.

    This will not populate the list of blobs available in each
    bucket.

      >>> from gcloud import storage
      >>> for bucket in storage.list_buckets():
      >>>   print bucket

    This implements "storage.buckets.list".

    :type project: string or ``NoneType``
    :param project: Optional. The project to use when listing all buckets.
                    If not provided, falls back to default.

    :type max_results: integer or ``NoneType``
    :param max_results: Optional. Maximum number of buckets to return.

    :type page_token: string or ``NoneType``
    :param page_token: Optional. Opaque marker for the next "page" of buckets.
                       If not passed, will return the first page of buckets.

    :type prefix: string or ``NoneType``
    :param prefix: Optional. Filter results to buckets whose names begin with
                   this prefix.

    :type projection: string or ``NoneType``
    :param projection: If used, must be 'full' or 'noAcl'. Defaults to
                       'noAcl'. Specifies the set of properties to return.

    :type fields: string or ``NoneType``
    :param fields: Selector specifying which fields to include in a
                   partial response. Must be a list of fields. For example
                   to get a partial response with just the next page token
                   and the language of each bucket returned:
                   'items/id,nextPageToken'

    :type connection: :class:`gcloud.storage.connection.Connection` or
                      ``NoneType``
    :param connection: Optional. The connection to use when sending requests.
                       If not provided, falls back to default.

    :rtype: iterable of :class:`gcloud.storage.bucket.Bucket` objects.
    :returns: All buckets belonging to this project.
    """
    connection = _require_connection(connection)
    if project is None:
        project = get_default_project()
    extra_params = {'project': project}

    if max_results is not None:
        extra_params['maxResults'] = max_results

    if prefix is not None:
        extra_params['prefix'] = prefix

    extra_params['projection'] = projection

    if fields is not None:
        extra_params['fields'] = fields

    result = _BucketIterator(connection=connection,
                             extra_params=extra_params)
    # Page token must be handled specially since the base `Iterator`
    # class has it as a reserved property.
    if page_token is not None:
        result.next_page_token = page_token
    return iter(result)
Ejemplo n.º 39
0
    def upload_from_file(self, file_obj, rewind=False, size=None,
                         content_type=None, num_retries=6, connection=None):
        """Upload the contents of this blob from a file-like object.

        The content type of the upload will either be
        - The value passed in to the function (if any)
        - The value stored on the current blob
        - The default value of 'application/octet-stream'

        .. note::
           The effect of uploading to an existing blob depends on the
           "versioning" and "lifecycle" policies defined on the blob's
           bucket.  In the absence of those policies, upload will
           overwrite any existing contents.

           See the `object versioning
           <https://cloud.google.com/storage/docs/object-versioning>`_ and
           `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
           API documents for details.

        :type file_obj: file
        :param file_obj: A file handle open for reading.

        :type rewind: boolean
        :param rewind: If True, seek to the beginning of the file handle before
                       writing the file to Cloud Storage.

        :type size: int
        :param size: The number of bytes to read from the file handle.
                     If not provided, we'll try to guess the size using
                     :func:`os.fstat`

        :type content_type: string or ``NoneType``
        :param content_type: Optional type of content being uploaded.

        :type num_retries: integer
        :param num_retries: Number of upload retries. Defaults to 6.

        :type connection: :class:`gcloud.storage.connection.Connection` or
                          ``NoneType``
        :param connection: Optional. The connection to use when sending
                           requests. If not provided, falls back to default.
        """
        connection = _require_connection(connection)
        content_type = (content_type or self._properties.get('contentType') or
                        'application/octet-stream')

        # Rewind the file if desired.
        if rewind:
            file_obj.seek(0, os.SEEK_SET)

        # Get the basic stats about the file.
        total_bytes = size or os.fstat(file_obj.fileno()).st_size
        headers = {
            'Accept': 'application/json',
            'Accept-Encoding': 'gzip, deflate',
            'User-Agent': connection.USER_AGENT,
        }

        upload = transfer.Upload(file_obj, content_type, total_bytes,
                                 auto_transfer=False,
                                 chunksize=self.chunk_size)

        url_builder = _UrlBuilder(bucket_name=self.bucket.name,
                                  object_name=self.name)
        upload_config = _UploadConfig()

        # Temporary URL, until we know simple vs. resumable.
        base_url = connection.API_BASE_URL + '/upload'
        upload_url = connection.build_api_url(api_base_url=base_url,
                                              path=self.bucket.path + '/o')

        # Use apitools 'Upload' facility.
        request = http_wrapper.Request(upload_url, 'POST', headers)

        upload.ConfigureRequest(upload_config, request, url_builder)
        query_params = url_builder.query_params
        base_url = connection.API_BASE_URL + '/upload'
        request.url = connection.build_api_url(api_base_url=base_url,
                                               path=self.bucket.path + '/o',
                                               query_params=query_params)
        upload.InitializeUpload(request, connection.http)

        # Should we be passing callbacks through from caller?  We can't
        # pass them as None, because apitools wants to print to the console
        # by default.
        if upload.strategy == transfer.RESUMABLE_UPLOAD:
            http_response = upload.StreamInChunks(
                callback=lambda *args: None,
                finish_callback=lambda *args: None)
        else:
            http_response = http_wrapper.MakeRequest(connection.http, request,
                                                     retries=num_retries)
        response_content = http_response.content
        if not isinstance(response_content,
                          six.string_types):  # pragma: NO COVER  Python3
            response_content = response_content.decode('utf-8')
        self._set_properties(json.loads(response_content))
Ejemplo n.º 40
0
 def _callFUT(self, connection=None):
     from gcloud.storage._helpers import _require_connection
     return _require_connection(connection=connection)