def copy_blob(self, blob, destination_bucket, new_name=None, client=None): """Copy the given blob to the given bucket, optionally with a new name. :type blob: :class:`google.cloud.storage.blob.Blob` :param blob: The blob to be copied. :type destination_bucket: :class:`google.cloud.storage.bucket.Bucket` :param destination_bucket: The bucket into which the blob should be copied. :type new_name: string :param new_name: (optional) the new name for the copied file. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: :class:`google.cloud.storage.blob.Blob` :returns: The new Blob. """ client = self._require_client(client) if new_name is None: new_name = blob.name new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + '/copyTo' + new_blob.path copy_result = client.connection.api_request( method='POST', path=api_path, _target_object=new_blob) new_blob._set_properties(copy_result) return new_blob
def get_blob(self, blob_name, client=None): """Get a blob object by name. This will return None if the blob doesn't exist: .. literalinclude:: storage_snippets.py :start-after: [START get_blob] :end-before: [END get_blob] :type blob_name: str :param blob_name: The name of the blob to retrieve. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: :class:`google.cloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ client = self._require_client(client) blob = Blob(bucket=self, name=blob_name) try: response = client._connection.api_request( method='GET', path=blob.path, _target_object=blob) # NOTE: We assume response.get('name') matches `blob_name`. blob._set_properties(response) # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. return blob except NotFound: return None
def copy_blob(self, blob, destination_bucket, new_name=None, client=None, preserve_acl=True, source_generation=None): """Copy the given blob to the given bucket, optionally with a new name. If :attr:`user_project` is set, bills the API request to that project. :type blob: :class:`google.cloud.storage.blob.Blob` :param blob: The blob to be copied. :type destination_bucket: :class:`google.cloud.storage.bucket.Bucket` :param destination_bucket: The bucket into which the blob should be copied. :type new_name: str :param new_name: (optional) the new name for the copied file. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type preserve_acl: bool :param preserve_acl: Optional. Copies ACL from old blob to new blob. Default: True. :type source_generation: long :param source_generation: Optional. The generation of the blob to be copied. :rtype: :class:`google.cloud.storage.blob.Blob` :returns: The new Blob. """ client = self._require_client(client) query_params = {} if self.user_project is not None: query_params['userProject'] = self.user_project if source_generation is not None: query_params['sourceGeneration'] = source_generation if new_name is None: new_name = blob.name new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + '/copyTo' + new_blob.path copy_result = client._connection.api_request( method='POST', path=api_path, query_params=query_params, _target_object=new_blob, ) if not preserve_acl: new_blob.acl.save(acl={}, client=client) new_blob._set_properties(copy_result) return new_blob
class GoogleCloudFile(File): def __init__(self, name, mode, storage): self.name = name self.mime_type = mimetypes.guess_type(name)[0] self._mode = mode self._storage = storage self.blob = storage.bucket.get_blob(name) if not self.blob and 'w' in mode: self.blob = Blob(self.name, storage.bucket) self._file = None self._is_dirty = False @property def size(self): return self.blob.size def _get_file(self): if self._file is None: self._file = SpooledTemporaryFile( max_size=self._storage.max_memory_size, suffix=".GSStorageFile", dir=setting("FILE_UPLOAD_TEMP_DIR", None) ) if 'r' in self._mode: self._is_dirty = False self.blob.download_to_file(self._file) self._file.seek(0) return self._file def _set_file(self, value): self._file = value file = property(_get_file, _set_file) def read(self, num_bytes=None): if 'r' not in self._mode: raise AttributeError("File was not opened in read mode.") if num_bytes is None: num_bytes = -1 return super(GoogleCloudFile, self).read(num_bytes) def write(self, content): if 'w' not in self._mode: raise AttributeError("File was not opened in write mode.") self._is_dirty = True return super(GoogleCloudFile, self).write(force_bytes(content)) def close(self): if self._file is not None: if self._is_dirty: self.file.seek(0) self.blob.upload_from_file(self.file, content_type=self.mime_type) self._file.close() self._file = None
def get_blob(self, blob_name, client=None, encryption_key=None, **kwargs): """Get a blob object by name. This will return None if the blob doesn't exist: .. literalinclude:: snippets.py :start-after: [START get_blob] :end-before: [END get_blob] If :attr:`user_project` is set, bills the API request to that project. :type blob_name: str :param blob_name: The name of the blob to retrieve. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type encryption_key: bytes :param encryption_key: Optional 32 byte encryption key for customer-supplied encryption. See https://cloud.google.com/storage/docs/encryption#customer-supplied. :type kwargs: dict :param kwargs: Keyword arguments to pass to the :class:`~google.cloud.storage.blob.Blob` constructor. :rtype: :class:`google.cloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ client = self._require_client(client) query_params = {} if self.user_project is not None: query_params['userProject'] = self.user_project blob = Blob(bucket=self, name=blob_name, encryption_key=encryption_key, **kwargs) try: headers = _get_encryption_headers(encryption_key) response = client._connection.api_request( method='GET', path=blob.path, query_params=query_params, headers=headers, _target_object=blob, ) # NOTE: We assume response.get('name') matches `blob_name`. blob._set_properties(response) # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. return blob except NotFound: return None
def get_items_from_response(self, response): """Yield :class:`.storage.blob.Blob` items from response. :type response: dict :param response: The JSON API response for a page of blobs. """ self._current_prefixes = tuple(response.get('prefixes', ())) self.prefixes.update(self._current_prefixes) for item in response.get('items', []): name = item.get('name') blob = Blob(name, bucket=self.bucket) blob._set_properties(item) yield blob
def download_blob_to_file(self, blob_or_uri, file_obj, start=None, end=None): """Download the contents of a blob object or blob URI into a file-like object. Args: blob_or_uri (Union[ \ :class:`~google.cloud.storage.blob.Blob`, \ str, \ ]): The blob resource to pass or URI to download. file_obj (file): A file handle to which to write the blob's data. start (int): Optional. The first byte in a range to be downloaded. end (int): Optional. The last byte in a range to be downloaded. Examples: Download a blob using using a blob resource. >>> from google.cloud import storage >>> client = storage.Client() >>> bucket = client.get_bucket('my-bucket-name') >>> blob = storage.Blob('path/to/blob', bucket) >>> with open('file-to-download-to') as file_obj: >>> client.download_blob_to_file(blob, file_obj) # API request. Download a blob using a URI. >>> from google.cloud import storage >>> client = storage.Client() >>> with open('file-to-download-to') as file_obj: >>> client.download_blob_to_file( >>> 'gs://bucket_name/path/to/blob', file) """ try: blob_or_uri.download_to_file(file_obj, client=self, start=start, end=end) except AttributeError: scheme, netloc, path, query, frag = urlsplit(blob_or_uri) if scheme != "gs": raise ValueError("URI scheme must be gs") bucket = Bucket(self, name=netloc) blob_or_uri = Blob(path, bucket) blob_or_uri.download_to_file(file_obj, client=self, start=start, end=end)
def __init__(self, name, mode, storage): self.name = name self._mode = mode self._storage = storage self.blob = storage.bucket.get_blob(name) if not self.blob and 'w' in mode: self.blob = Blob(self.name, storage.bucket) self._file = None self._is_dirty = False
def save_file(self, file, filename, folder=None, randomize=False, extensions=None, acl=None, replace=False, headers=None): """ :param filename: local filename :param folder: relative path of sub-folder :param randomize: randomize the filename :param extensions: iterable of allowed extensions, if not default :param acl: ACL policy (if None then uses default) :returns: modified filename """ extensions = extensions or self.extensions if not self.filename_allowed(filename, extensions): raise FileNotAllowed() filename = utils.secure_filename( os.path.basename(filename) ) if randomize: filename = utils.random_filename(filename) if folder: filename = folder + "/" + filename content_type, _ = mimetypes.guess_type(filename) content_type = content_type or 'application/octet-stream' blob = self.get_bucket().get_blob(filename) # If the file exist and we explicitely asked not to replace it: ignore it. if blob and not replace: return filename # If the file doesn't exist: create it. if not blob: blob = Blob(filename, self.get_bucket()) blob.cache_control = self.cache_control file.seek(0) acl = acl or self.acl blob.upload_from_file(file, rewind=True, content_type=content_type, predefined_acl=acl) return filename
def _item_to_blob(iterator, item): """Convert a JSON blob to the native object. .. note:: This assumes that the ``bucket`` attribute has been added to the iterator after being created. :type iterator: :class:`~google.cloud.iterator.Iterator` :param iterator: The iterator that has retrieved the item. :type item: dict :param item: An item to be converted to a blob. :rtype: :class:`.Blob` :returns: The next blob in the page. """ name = item.get('name') blob = Blob(name, bucket=iterator.bucket) blob._set_properties(item) return blob
def save(self, name, fobj, max_length=None, blob_object=None): if not blob_object: blob = Blob(name, self.bucket) else: blob = blob_object # force the current file to be at file location 0, to # because that's what google wants # determine the current file's mimetype based on the name content_type = self._determine_content_type(name) fobj.seek(0) if self._is_file_empty(fobj): logging.warning("Stopping the upload of an empty file: {}".format(name)) return name blob.upload_from_file( fobj, content_type=content_type, ) return name
def get_blob(self, blob_name, client=None): """Get a blob object by name. This will return None if the blob doesn't exist:: >>> from google.cloud import storage >>> client = storage.Client() >>> bucket = client.get_bucket('my-bucket') >>> print bucket.get_blob('/path/to/blob.txt') <Blob: my-bucket, /path/to/blob.txt> >>> print bucket.get_blob('/does-not-exist.txt') None :type blob_name: string :param blob_name: The name of the blob to retrieve. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: :class:`google.cloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ client = self._require_client(client) blob = Blob(bucket=self, name=blob_name) try: response = client.connection.api_request( method='GET', path=blob.path, _target_object=blob) # NOTE: We assume response.get('name') matches `blob_name`. blob._set_properties(response) # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. return blob except NotFound: return None
def delete_blob(self, blob_name, client=None): """Deletes a blob from the current bucket. If the blob isn't found (backend 404), raises a :class:`google.cloud.exceptions.NotFound`. For example: .. literalinclude:: snippets.py :start-after: [START delete_blob] :end-before: [END delete_blob] If :attr:`user_project` is set, bills the API request to that project. :type blob_name: str :param blob_name: A blob name to delete. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :raises: :class:`google.cloud.exceptions.NotFound` (to suppress the exception, call ``delete_blobs``, passing a no-op ``on_error`` callback, e.g.: .. literalinclude:: snippets.py :start-after: [START delete_blobs] :end-before: [END delete_blobs] """ client = self._require_client(client) query_params = {} if self.user_project is not None: query_params['userProject'] = self.user_project blob_path = Blob.path_helper(self.path, blob_name) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). client._connection.api_request( method='DELETE', path=blob_path, query_params=query_params, _target_object=None)
def delete_blob(self, blob_name, client=None): """Deletes a blob from the current bucket. If the blob isn't found (backend 404), raises a :class:`google.cloud.exceptions.NotFound`. For example:: >>> from google.cloud.exceptions import NotFound >>> from google.cloud import storage >>> client = storage.Client() >>> bucket = client.get_bucket('my-bucket') >>> print bucket.list_blobs() [<Blob: my-bucket, my-file.txt>] >>> bucket.delete_blob('my-file.txt') >>> try: ... bucket.delete_blob('doesnt-exist') ... except NotFound: ... pass :type blob_name: string :param blob_name: A blob name to delete. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :raises: :class:`google.cloud.exceptions.NotFound` (to suppress the exception, call ``delete_blobs``, passing a no-op ``on_error`` callback, e.g.:: >>> bucket.delete_blobs([blob], on_error=lambda blob: None) """ client = self._require_client(client) blob_path = Blob.path_helper(self.path, blob_name) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). client.connection.api_request(method='DELETE', path=blob_path, _target_object=None)