def copy_blob(self, blob, destination_bucket, new_name=None, client=None): """Copy the given blob to the given bucket, optionally with a new name. :type blob: :class:`gcloud.storage.blob.Blob` :param blob: The blob to be copied. :type destination_bucket: :class:`gcloud.storage.bucket.Bucket` :param destination_bucket: The bucket into which the blob should be copied. :type new_name: string :param new_name: (optional) the new name for the copied file. :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: :class:`gcloud.storage.blob.Blob` :returns: The new Blob. """ client = self._require_client(client) if new_name is None: new_name = blob.name new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + '/copyTo' + new_blob.path copy_result = client.connection.api_request(method='POST', path=api_path, _target_object=new_blob) new_blob._set_properties(copy_result) return new_blob
def _makeOne(self, *args, **kw): from gcloud.storage.blob import Blob properties = kw.pop("properties", None) blob = Blob(*args, **kw) blob._properties = properties or {} return blob
def get_blob(self, blob_name, client=None): """Get a blob object by name. This will return None if the blob doesn't exist:: >>> from gcloud import storage >>> client = storage.Client() >>> bucket = client.get_bucket('my-bucket') >>> print bucket.get_blob('/path/to/blob.txt') <Blob: my-bucket, /path/to/blob.txt> >>> print bucket.get_blob('/does-not-exist.txt') None :type blob_name: string :param blob_name: The name of the blob to retrieve. :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to default connection. :rtype: :class:`gcloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ connection = self._client_or_connection(client) blob = Blob(bucket=self, name=blob_name) try: response = connection.api_request(method="GET", path=blob.path, _target_object=blob) # NOTE: We assume response.get('name') matches `blob_name`. blob._set_properties(response) # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. return blob except NotFound: return None
def copy_blob(blob, destination_bucket, new_name=None, connection=None): """Copy the given blob to the given bucket, optionally with a new name. :type blob: string or :class:`gcloud.storage.blob.Blob` :param blob: The blob to be copied. :type destination_bucket: :class:`gcloud.storage.bucket.Bucket` :param destination_bucket: The bucket into which the blob should be copied. :type new_name: string :param new_name: (optional) the new name for the copied file. :type connection: :class:`gcloud.storage.connection.Connection` or ``NoneType`` :param connection: Optional. The connection to use when sending requests. If not provided, falls back to default. :rtype: :class:`gcloud.storage.blob.Blob` :returns: The new Blob. """ connection = _require_connection(connection) if new_name is None: new_name = blob.name new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + '/copyTo' + new_blob.path copy_result = connection.api_request(method='POST', path=api_path, _target_object=new_blob) new_blob._set_properties(copy_result) return new_blob
def get_blob(self, blob_name): """Get a blob object by name. This will return None if the blob doesn't exist:: >>> from gcloud import storage >>> connection = storage.get_connection() >>> bucket = storage.get_bucket('my-bucket', connection=connection) >>> print bucket.get_blob('/path/to/blob.txt') <Blob: my-bucket, /path/to/blob.txt> >>> print bucket.get_blob('/does-not-exist.txt') None :type blob_name: string :param blob_name: The name of the blob to retrieve. :rtype: :class:`gcloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ blob = Blob(bucket=self, name=blob_name) try: response = self.connection.api_request(method='GET', path=blob.path) name = response.get('name') # Expect this to be blob_name blob = Blob(name, bucket=self) blob._properties = response return blob except NotFound: return None
def copy_blob(self, blob, destination_bucket, new_name=None, client=None): """Copy the given blob to the given bucket, optionally with a new name. :type blob: :class:`gcloud.storage.blob.Blob` :param blob: The blob to be copied. :type destination_bucket: :class:`gcloud.storage.bucket.Bucket` :param destination_bucket: The bucket into which the blob should be copied. :type new_name: string :param new_name: (optional) the new name for the copied file. :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: :class:`gcloud.storage.blob.Blob` :returns: The new Blob. """ client = self._require_client(client) if new_name is None: new_name = blob.name new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + '/copyTo' + new_blob.path copy_result = client.connection.api_request( method='POST', path=api_path, _target_object=new_blob) new_blob._set_properties(copy_result) return new_blob
def upload_blob(self, source_path, destination_path, destination_bucket_name, content_type): destination_bucket = self.__client.get_bucket(destination_bucket_name) blob = Blob(destination_path, destination_bucket) with open(source_path, 'rb') as f: blob.upload_from_file(f, content_type=content_type)
def __init__(self, name, mode, storage, buffer_size=None): self.name = name self._mode = mode self._storage = storage self.blob = Blob(self.name, storage.bucket) self._file = None self._is_dirty = False
def _save(self, name, content): """ Saves new content to the file specified by name. The content should be a proper File object or any python file-like object, ready to be read from the beginning. """ path = os.path.join(self._location, content.name) if self._location else content.name blob = Blob(path, self._bucket) blob.upload_from_file(content, size=content.size) return blob.name
class GoogleCloudFile(File): def __init__(self, name, mode, storage, buffer_size=None): self.name = name self._mode = mode self._storage = storage self.blob = Blob(self.name, storage.bucket) self._file = None self._is_dirty = False @property def size(self): return self.blob.size def _get_file(self): if self._file is None: self._file = SpooledTemporaryFile( max_size=self._storage.max_memory_size, suffix=".GSStorageFile", dir=setting("FILE_UPLOAD_TEMP_DIR", None)) if 'r' in self._mode: self._is_dirty = False self.blob.download_to_file(self._file) self._file.seek(0) return self._file def _set_file(self, value): self._file = value file = property(_get_file, _set_file) def read(self, *args, **kwargs): if 'r' not in self._mode: raise AttributeError("File was not opened in read mode.") return super(GoogleCloudFile, self).read(*args, **kwargs) def write(self, content, *args, **kwargs): if 'w' not in self._mode: raise AttributeError("File was not opened in write mode.") self._is_dirty = True return super(GoogleCloudFile, self).write(force_bytes(content), *args, **kwargs) def close(self): if self._file is not None: if self._is_dirty: self.file.seek(0) content_type, _ = mimetypes.guess_type(self.name) content_type = getattr(self.file, 'content_type', content_type) size = getattr(self.file, 'size') self.blob.upload_from_file(self.file, content_type=content_type, size=size) self._file.close() self._file = None
def get_items_from_response(self, response): """Yield :class:`.storage.blob.Blob` items from response. :type response: dict :param response: The JSON API response for a page of blobs. """ self.prefixes = tuple(response.get('prefixes', ())) for item in response.get('items', []): name = item.get('name') blob = Blob(name, bucket=self.bucket) blob._set_properties(item) yield blob
def get_items_from_response(self, response): """Yield :class:`.storage.blob.Blob` items from response. :type response: dict :param response: The JSON API response for a page of blobs. """ self.prefixes = tuple(response.get('prefixes', ())) for item in response.get('items', []): name = item.get('name') blob = Blob(name, bucket=self.bucket) blob._properties = item yield blob
def get_items_from_response(self, response): """Yield :class:`.storage.blob.Blob` items from response. :type response: dict :param response: The JSON API response for a page of blobs. """ self._current_prefixes = tuple(response.get("prefixes", ())) self.prefixes.update(self._current_prefixes) for item in response.get("items", []): name = item.get("name") blob = Blob(name, bucket=self.bucket) blob._set_properties(item) yield blob
def copy_blob(self, blob, destination_bucket, new_name=None, client=None, versions=False): """Copy the given blob to the given bucket, optionally with a new name. :type blob: :class:`gcloud.storage.blob.Blob` :param blob: The blob to be copied. :type destination_bucket: :class:`gcloud.storage.bucket.Bucket` :param destination_bucket: The bucket into which the blob should be copied. :type new_name: string :param new_name: (optional) the new name for the copied file. :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type versions: boolean :param versions: Optional. Copy each version. :rtype: :class:`gcloud.storage.blob.Blob` :returns: The new Blob if versions is ``False``, or will return a list of new blob versions, and their old blob version counterparts. """ client = self._require_client(client) if new_name is None: new_name = blob.name tmp_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + '/copyTo' + tmp_blob.path del tmp_blob # TODO(tsinha): Support multi-page results from list_blobs old_blobs = list(self.list_blobs(prefix=blob.name, versions=versions)) new_blobs = [] for old_blob in old_blobs: new_blob = Blob(bucket=destination_bucket, name=new_name) copy_result = client.connection.api_request( method='POST', path=api_path, query_params={'sourceGeneration': old_blob.generation}, _target_object=new_blob) new_blob._set_properties(copy_result) new_blobs.append(new_blob) if versions: return (new_blobs, old_blobs) else: return new_blobs[0]
def delete_blob(self, blob_name): """Deletes a blob from the current bucket. If the blob isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. For example:: >>> from gcloud.exceptions import NotFound >>> from gcloud import storage >>> connection = storage.get_connection() >>> bucket = storage.get_bucket('my-bucket', connection=connection) >>> print bucket.list_blobs() [<Blob: my-bucket, my-file.txt>] >>> bucket.delete_blob('my-file.txt') >>> try: ... bucket.delete_blob('doesnt-exist') ... except NotFound: ... pass :type blob_name: string :param blob_name: A blob name to delete. :raises: :class:`gcloud.exceptions.NotFound` (to suppress the exception, call ``delete_blobs``, passing a no-op ``on_error`` callback, e.g.:: >>> bucket.delete_blobs([blob], on_error=lambda blob: None) """ blob_path = Blob.path_helper(self.path, blob_name) self.connection.api_request(method='DELETE', path=blob_path)
def get_blob(self, blob): """Get a blob object by name. This will return None if the blob doesn't exist:: >>> from gcloud import storage >>> connection = storage.get_connection(project) >>> bucket = connection.get_bucket('my-bucket') >>> print bucket.get_blob('/path/to/blob.txt') <Blob: my-bucket, /path/to/blob.txt> >>> print bucket.get_blob('/does-not-exist.txt') None :type blob: string or :class:`gcloud.storage.blob.Blob` :param blob: The name of the blob to retrieve. :rtype: :class:`gcloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ # Coerce this -- either from a Blob or a string. blob = self.new_blob(blob) try: response = self.connection.api_request(method='GET', path=blob.path) return Blob(None, bucket=self, properties=response) except NotFound: return None
def upload_file_object(self, file_obj, blob_name=None, connection=None): """Shortcut method to upload a file object into this bucket. Use this method to quickly put a local file in Cloud Storage. For example:: >>> from gcloud import storage >>> connection = storage.get_connection() >>> bucket = storage.get_bucket('my-bucket', connection=connection) >>> bucket.upload_file(open('~/my-file.txt'), 'remote-text-file.txt') >>> print bucket.list_blobs() [<Blob: my-bucket, remote-text-file.txt>] If you don't provide a blob name, we will try to upload the file using the local filename (**not** the complete path):: >>> from gcloud import storage >>> connection = storage.get_connection() >>> bucket = storage.get_bucket('my-bucket', connection=connection) >>> bucket.upload_file(open('~/my-file.txt')) >>> print bucket.list_blobs() [<Blob: my-bucket, my-file.txt>] :type file_obj: file :param file_obj: A file handle open for reading. :type blob_name: string :param blob_name: The name of the blob to upload the file to. If this is blank, we will try to upload the file to the root of the bucket with the same name as on your local file system. :type connection: :class:`gcloud.storage.connection.Connection` or ``NoneType`` :param connection: Optional. The connection to use when sending requests. If not provided, falls back to default. :rtype: :class:`Blob` :returns: The updated Blob object. """ if blob_name is None: blob_name = os.path.basename(file_obj.name) blob = Blob(bucket=self, name=blob_name) blob.upload_from_file(file_obj, connection=connection) return blob
def get_blob(self, blob_name, client=None, generation=None): """Get a blob object by name. This will return None if the blob doesn't exist:: >>> from gcloud import storage >>> client = storage.Client() >>> bucket = client.get_bucket('my-bucket') >>> print bucket.get_blob('/path/to/blob.txt') <Blob: my-bucket, /path/to/blob.txt> >>> print bucket.get_blob('/does-not-exist.txt') None >>> print bucket.get_blob( ... '/path/to/versioned_blob.txt', ... generation=generation_id) <Blob: my-bucket, /path/to/versioned_blob.txt> :type blob_name: string :param blob_name: The name of the blob to retrieve. :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type generation: int :param generation: Optional. The generation id to retrieve in a bucket that supports versioning. :rtype: :class:`gcloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ client = self._require_client(client) blob = Blob(bucket=self, name=blob_name, generation=generation) blob_path, query_params = blob.path_with_params try: response = client.connection.api_request( method='GET', path=blob_path, query_params=query_params, _target_object=blob) # NOTE: We assume response.get('name') matches `blob_name`. blob._set_properties(response) # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. return blob except NotFound: return None
def upload_file(self, filename, blob_name=None, client=None): """Shortcut method to upload a file into this bucket. Use this method to quickly put a local file in Cloud Storage. For example:: >>> from gcloud import storage >>> client = storage.Client() >>> bucket = client.get_bucket('my-bucket') >>> bucket.upload_file('~/my-file.txt', 'remote-text-file.txt') >>> print bucket.list_blobs() [<Blob: my-bucket, remote-text-file.txt>] If you don't provide a blob name, we will try to upload the file using the local filename (**not** the complete path):: >>> from gcloud import storage >>> client = storage.Client() >>> bucket = client.get_bucket('my-bucket') >>> bucket.upload_file('~/my-file.txt') >>> print bucket.list_blobs() [<Blob: my-bucket, my-file.txt>] :type filename: string :param filename: Local path to the file you want to upload. :type blob_name: string :param blob_name: The name of the blob to upload the file to. If this is blank, we will try to upload the file to the root of the bucket with the same name as on your local file system. :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: :class:`Blob` :returns: The updated Blob object. """ if blob_name is None: blob_name = os.path.basename(filename) blob = Blob(bucket=self, name=blob_name) blob.upload_from_filename(filename, client=client) return blob
def get_items_from_response(self, response): """Yield :class:`.storage.blob.Blob` items from response. :type response: dict :param response: The JSON API response for a page of blobs. """ self.prefixes = tuple(response.get('prefixes', ())) for item in response.get('items', []): yield Blob(None, properties=item, bucket=self.bucket)
def copydir(self, path): """ Copy the contents of the local directory given by path to google cloud. Maintain the same directory structure on remote. This is (intentionally) a blocking call, so clients can report errors if the transfer fails. :type path: string :param path: relative or absolute path to the directory that needs to be copied :return: True when transfer is complete :raises OSError: path doesn't exist or permission denied :raises ValueError: if the library cannot determine the file size :raises gcloud.exceptions.GCloudError: if upload status gives error response """ if not os.access(path, os.R_OK): raise OSError('Permission denied') for filename in find_files(path): blob = Blob(filename, self) blob.upload_from_filename(filename) return True
def upload_file(self, filename, blob_name=None): """Shortcut method to upload a file into this bucket. Use this method to quickly put a local file in Cloud Storage. For example:: >>> from gcloud import storage >>> connection = storage.get_connection() >>> bucket = storage.get_bucket('my-bucket', connection=connection) >>> bucket.upload_file('~/my-file.txt', 'remote-text-file.txt') >>> print bucket.get_all_blobs() [<Blob: my-bucket, remote-text-file.txt>] If you don't provide a blob name, we will try to upload the file using the local filename (**not** the complete path):: >>> from gcloud import storage >>> connection = storage.get_connection() >>> bucket = storage.get_bucket('my-bucket', connection=connection) >>> bucket.upload_file('~/my-file.txt') >>> print bucket.get_all_blobs() [<Blob: my-bucket, my-file.txt>] :type filename: string :param filename: Local path to the file you want to upload. :type blob_name: string :param blob_name: The name of the blob to upload the file to. If this is blank, we will try to upload the file to the root of the bucket with the same name as on your local file system. :rtype: :class:`Blob` :returns: The updated Blob object. """ if blob_name is None: blob_name = os.path.basename(filename) blob = Blob(bucket=self, name=blob_name) blob.upload_from_filename(filename) return blob
def get_blob(self, blob_name, connection=None): """Get a blob object by name. This will return None if the blob doesn't exist:: >>> from gcloud import storage >>> connection = storage.get_connection() >>> bucket = storage.get_bucket('my-bucket', connection=connection) >>> print bucket.get_blob('/path/to/blob.txt') <Blob: my-bucket, /path/to/blob.txt> >>> print bucket.get_blob('/does-not-exist.txt') None :type blob_name: string :param blob_name: The name of the blob to retrieve. :type connection: :class:`gcloud.storage.connection.Connection` or ``NoneType`` :param connection: Optional. The connection to use when sending requests. If not provided, falls back to default. :rtype: :class:`gcloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ connection = _require_connection(connection) blob = Blob(bucket=self, name=blob_name) try: response = connection.api_request(method='GET', path=blob.path, _target_object=blob) # NOTE: We assume response.get('name') matches `blob_name`. blob._set_properties(response) # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. return blob except NotFound: return None
def copy_blob(self, blob, destination_bucket, new_name=None): """Copy the given blob to the given bucket, optionally with a new name. :type blob: string or :class:`gcloud.storage.blob.Blob` :param blob: The blob to be copied. :type destination_bucket: :class:`gcloud.storage.bucket.Bucket` :param destination_bucket: The bucket into which the blob should be copied. :type new_name: string :param new_name: (optional) the new name for the copied file. :rtype: :class:`gcloud.storage.blob.Blob` :returns: The new Blob. """ if new_name is None: new_name = blob.name new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + '/copyTo' + new_blob.path copy_result = self.connection.api_request(method='POST', path=api_path) new_blob._properties = copy_result return new_blob
def delete_blob(self, blob_name, client=None, generation=None): """Deletes a blob from the current bucket. If the blob isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. For example:: >>> from gcloud.exceptions import NotFound >>> from gcloud import storage >>> client = storage.Client() >>> bucket = client.get_bucket('my-bucket') >>> print bucket.list_blobs() [<Blob: my-bucket, my-file.txt>] >>> bucket.delete_blob('my-file.txt') >>> try: ... bucket.delete_blob('doesnt-exist') ... except NotFound: ... pass :type blob_name: string :param blob_name: A blob name to delete. :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type generation: int :param generation: Optional. The generation of this object to delete. Only works on buckets with versioning enabled. :raises: :class:`gcloud.exceptions.NotFound` (to suppress the exception, call ``delete_blobs``, passing a no-op ``on_error`` callback, e.g.:: >>> bucket.delete_blobs([blob], on_error=lambda blob: None) """ client = self._require_client(client) blob_path = Blob.path_helper(self.path, blob_name) query_params = {} if generation is not None: query_params = {'generation': generation} # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). client.connection.api_request(method='DELETE', path=blob_path, query_params=query_params, _target_object=None)
def blob(self, blob_name, chunk_size=None): """Factory constructor for blob object. .. note:: This will not make an HTTP request; it simply instantiates a blob object owned by this bucket. :type blob_name: string :param blob_name: The name of the blob to be instantiated. :type chunk_size: integer :param chunk_size: The size of a chunk of data whenever iterating (1 MB). This must be a multiple of 256 KB per the API specification. :rtype: :class:`gcloud.storage.blob.Blob` :returns: The blob object created. """ return Blob(name=blob_name, bucket=self, chunk_size=chunk_size)
def delete_blob(self, blob_name, connection=None): """Deletes a blob from the current bucket. If the blob isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. For example:: >>> from gcloud.exceptions import NotFound >>> from gcloud import storage >>> connection = storage.get_connection() >>> bucket = storage.get_bucket('my-bucket', connection=connection) >>> print bucket.list_blobs() [<Blob: my-bucket, my-file.txt>] >>> bucket.delete_blob('my-file.txt') >>> try: ... bucket.delete_blob('doesnt-exist') ... except NotFound: ... pass :type blob_name: string :param blob_name: A blob name to delete. :type connection: :class:`gcloud.storage.connection.Connection` or ``NoneType`` :param connection: Optional. The connection to use when sending requests. If not provided, falls back to default. :raises: :class:`gcloud.exceptions.NotFound` (to suppress the exception, call ``delete_blobs``, passing a no-op ``on_error`` callback, e.g.:: >>> bucket.delete_blobs([blob], on_error=lambda blob: None) """ connection = _require_connection(connection) blob_path = Blob.path_helper(self.path, blob_name) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). connection.api_request(method='DELETE', path=blob_path, _target_object=None)
def new_blob(self, blob): """Given path name (or Blob), return a :class:`Blob` object. This is really useful when you're not sure if you have a ``Blob`` instance or a string path name. Given either of those types, this returns the corresponding ``Blob``. :type blob: string or :class:`gcloud.storage.blob.Blob` :param blob: A path name or actual blob object. :rtype: :class:`gcloud.storage.blob.Blob` :returns: A Blob object with the path provided. """ if isinstance(blob, Blob): return blob if isinstance(blob, six.string_types): return Blob(bucket=self, name=blob) raise TypeError('Invalid blob: %s' % blob)
def __contains__(self, blob_name): blob = Blob(blob_name, bucket=self) return blob.exists()
def _makeOne(self, *args, **kw): from gcloud.storage.blob import Blob properties = kw.pop('properties', None) blob = Blob(*args, **kw) blob._properties = properties or {} return blob
def exists(self, name): """ Returns True if a file referenced by the given name already exists in the storage system, or False if the name is available for a new file. """ return Blob(name, self._bucket).exists()
def _makeOne(self, *args, **kw): from gcloud.storage.blob import Blob return Blob(*args, **kw)
def _open(self, name, mode): """ Retrieves the specified file from storage. """ return Blob(name, self._bucket)
def url(self, name): """ Returns an absolute URL where the file's contents can be accessed directly by a Web browser. """ return Blob(name, self._bucket).public_url
def test_new_blob_existing(self): from gcloud.storage.blob import Blob bucket = self._makeOne() existing = Blob(None, bucket=bucket) self.assertTrue(bucket.new_blob(existing) is existing)