def download_blob_to_file(self, blob_or_uri, file_obj, start=None, end=None): """Download the contents of a blob object or blob URI into a file-like object. Args: blob_or_uri (Union[ \ :class:`~google.cloud.storage.blob.Blob`, \ str, \ ]): The blob resource to pass or URI to download. file_obj (file): A file handle to which to write the blob's data. start (int): Optional. The first byte in a range to be downloaded. end (int): Optional. The last byte in a range to be downloaded. Examples: Download a blob using using a blob resource. >>> from google.cloud import storage >>> client = storage.Client() >>> bucket = client.get_bucket('my-bucket-name') >>> blob = storage.Blob('path/to/blob', bucket) >>> with open('file-to-download-to') as file_obj: >>> client.download_blob_to_file(blob, file) # API request. Download a blob using a URI. >>> from google.cloud import storage >>> client = storage.Client() >>> with open('file-to-download-to') as file_obj: >>> client.download_blob_to_file( >>> 'gs://bucket_name/path/to/blob', file) """ try: blob_or_uri.download_to_file(file_obj, client=self, start=start, end=end) except AttributeError: scheme, netloc, path, query, frag = urlsplit(blob_or_uri) if scheme != "gs": raise ValueError("URI scheme must be gs") bucket = Bucket(self, name=netloc) blob_or_uri = Blob(path, bucket) blob_or_uri.download_to_file(file_obj, client=self, start=start, end=end)
def __init__(self, blob: Blob, chunk_size: int = default_chunk_size, async_queue: Optional[AsyncQueue] = None): assert chunk_size >= 1 if blob.size is None: blob.reload() if blob.chunk_size is None: # Induce google.cloud.storage.blob to use either google.resumable_media.requests.ChunkedDownload or # google.resumable_media.requests.RawChunkedDownload, which do not attempt to perform data-integrity checks # for chunk downloads (checksum headers are not available for chunks). blob.chunk_size = ceil( chunk_size / _BLOB_CHUNK_SIZE_UNIT) * _BLOB_CHUNK_SIZE_UNIT self.blob = blob self.chunk_size = chunk_size self._buffer = bytearray() self._pos = 0 self.number_of_chunks = ceil( self.blob.size / self.chunk_size) if 0 < self.blob.size else 1 self._unfetched_chunks = [i for i in range(self.number_of_chunks)] self.future_chunk_downloads: Optional[AsyncQueue] if async_queue is not None: self.future_chunk_downloads = async_queue for chunk_number in self._unfetched_chunks: self.future_chunk_downloads.put(self._fetch_chunk, chunk_number) else: self.future_chunk_downloads = None
def upload(file_path): if bucket is None: return name = file_path.replace(os.path.abspath(os.curdir) + '/', '') blob = Blob(name, bucket) blob.upload_from_filename(file_path)
def save(self, name, fobj, max_length=None): blob = Blob(name, self.bucket) # force the current file to be at file location 0, to # because that's what google wants fobj.seek(0) blob.upload_from_file(fobj) return name
def copy_blob(self, blob, destination_bucket, new_name=None, client=None): """Copy the given blob to the given bucket, optionally with a new name. :type blob: :class:`google.cloud.storage.blob.Blob` :param blob: The blob to be copied. :type destination_bucket: :class:`google.cloud.storage.bucket.Bucket` :param destination_bucket: The bucket into which the blob should be copied. :type new_name: string :param new_name: (optional) the new name for the copied file. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: :class:`google.cloud.storage.blob.Blob` :returns: The new Blob. """ client = self._require_client(client) if new_name is None: new_name = blob.name new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + '/copyTo' + new_blob.path copy_result = client.connection.api_request( method='POST', path=api_path, _target_object=new_blob) new_blob._set_properties(copy_result) return new_blob
def _transform_mobile_image( self, original_image_blob: Blob, new_image_blob: Blob ) -> Optional[Blob]: """ Create smaller image size to be served on mobile devices. :param Blob original_image_blob: Original image blob. :param Blob new_image_blob: New newly created Blob for mobile image. :returns: Optional[Blob] """ img_meta = self._get_image_meta(original_image_blob) img_bytes = original_image_blob.download_as_bytes() if img_bytes: stream = BytesIO(img_bytes) im = Image.open(stream) try: with BytesIO() as output: new_image = im.reduce(2) new_image.save(output, format=img_meta["format"]) new_image_blob.upload_from_string( output.getvalue(), content_type=img_meta["content-type"] ) LOGGER.success(f"Created mobile image `{new_image_blob.name}`") return new_image_blob except GoogleCloudError as e: LOGGER.error( f"GoogleCloudError while saving mobile image `{new_image_blob.name}`: {e}" ) except Exception as e: LOGGER.error( f"Unexpected exception while saving mobile image `{new_image_blob.name}`: {e}" )
def get_blob(self, blob_name, client=None): """Get a blob object by name. This will return None if the blob doesn't exist: .. literalinclude:: storage_snippets.py :start-after: [START get_blob] :end-before: [END get_blob] :type blob_name: str :param blob_name: The name of the blob to retrieve. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: :class:`google.cloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ client = self._require_client(client) blob = Blob(bucket=self, name=blob_name) try: response = client._connection.api_request( method='GET', path=blob.path, _target_object=blob) # NOTE: We assume response.get('name') matches `blob_name`. blob._set_properties(response) # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. return blob except NotFound: return None
def get_blob(self, blob_name, client=None): """Get a blob object by name. This will return None if the blob doesn't exist: .. literalinclude:: storage_snippets.py :start-after: [START get_blob] :end-before: [END get_blob] :type blob_name: str :param blob_name: The name of the blob to retrieve. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: :class:`google.cloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ client = self._require_client(client) blob = Blob(bucket=self, name=blob_name) try: response = client._connection.api_request(method='GET', path=blob.path, _target_object=blob) # NOTE: We assume response.get('name') matches `blob_name`. blob._set_properties(response) # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. return blob except NotFound: return None
def copy_blob(self, blob, destination_bucket, new_name=None, client=None): """Copy the given blob to the given bucket, optionally with a new name. :type blob: :class:`google.cloud.storage.blob.Blob` :param blob: The blob to be copied. :type destination_bucket: :class:`google.cloud.storage.bucket.Bucket` :param destination_bucket: The bucket into which the blob should be copied. :type new_name: string :param new_name: (optional) the new name for the copied file. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: :class:`google.cloud.storage.blob.Blob` :returns: The new Blob. """ client = self._require_client(client) if new_name is None: new_name = blob.name new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + '/copyTo' + new_blob.path copy_result = client.connection.api_request(method='POST', path=api_path, _target_object=new_blob) new_blob._set_properties(copy_result) return new_blob
def save(self, name, fobj, max_length=None, blob_object=None): if not blob_object: blob = Blob(name, self.bucket) else: blob = blob_object # force the current file to be at file location 0, to # because that's what google wants # determine the current file's mimetype based on the name content_type = self._determine_content_type(name) fobj.seek(0) if self._is_file_empty(fobj): logging.warning( "Stopping the upload of an empty file: {}".format(name)) return name # set a max-age of 5 if we're uploading to content/databases if self.is_database_file(name): blob.cache_control = 'private, max-age={}, no-transform'.format( CONTENT_DATABASES_MAX_AGE) blob.upload_from_file( fobj, content_type=content_type, ) return name
def _upload(bucket: Bucket, src: str, prefix=None): dest = src.replace(os.path.abspath(os.curdir) + '/', '') if prefix: dest = os.path.join(prefix, dest) blob = Blob(dest, bucket) print(f'Uploading:{dest}') blob.upload_from_filename(src)
def __init__(self, blob: Blob, chunk_size: int = default_chunk_size): assert chunk_size >= 1 if blob.size is None: blob.reload() self.blob = blob self.chunk_size = chunk_size self._buffer = bytearray() self._unfetched_chunks = list(range(self.number_of_chunks()))
def get_blob(self, blob_name, client=None, encryption_key=None, **kwargs): """Get a blob object by name. This will return None if the blob doesn't exist: .. literalinclude:: snippets.py :start-after: [START get_blob] :end-before: [END get_blob] If :attr:`user_project` is set, bills the API request to that project. :type blob_name: str :param blob_name: The name of the blob to retrieve. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type encryption_key: bytes :param encryption_key: Optional 32 byte encryption key for customer-supplied encryption. See https://cloud.google.com/storage/docs/encryption#customer-supplied. :type kwargs: dict :param kwargs: Keyword arguments to pass to the :class:`~google.cloud.storage.blob.Blob` constructor. :rtype: :class:`google.cloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ client = self._require_client(client) query_params = {} if self.user_project is not None: query_params['userProject'] = self.user_project blob = Blob(bucket=self, name=blob_name, encryption_key=encryption_key, **kwargs) try: headers = _get_encryption_headers(encryption_key) response = client._connection.api_request( method='GET', path=blob.path, query_params=query_params, headers=headers, _target_object=blob, ) # NOTE: We assume response.get('name') matches `blob_name`. blob._set_properties(response) # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. return blob except NotFound: return None
def copy_blob(self, blob, destination_bucket, new_name=None, client=None, preserve_acl=True, source_generation=None): """Copy the given blob to the given bucket, optionally with a new name. If :attr:`user_project` is set, bills the API request to that project. :type blob: :class:`google.cloud.storage.blob.Blob` :param blob: The blob to be copied. :type destination_bucket: :class:`google.cloud.storage.bucket.Bucket` :param destination_bucket: The bucket into which the blob should be copied. :type new_name: str :param new_name: (optional) the new name for the copied file. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type preserve_acl: bool :param preserve_acl: Optional. Copies ACL from old blob to new blob. Default: True. :type source_generation: long :param source_generation: Optional. The generation of the blob to be copied. :rtype: :class:`google.cloud.storage.blob.Blob` :returns: The new Blob. """ client = self._require_client(client) query_params = {} if self.user_project is not None: query_params['userProject'] = self.user_project if source_generation is not None: query_params['sourceGeneration'] = source_generation if new_name is None: new_name = blob.name new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + '/copyTo' + new_blob.path copy_result = client._connection.api_request( method='POST', path=api_path, query_params=query_params, _target_object=new_blob, ) if not preserve_acl: new_blob.acl.save(acl={}, client=client) new_blob._set_properties(copy_result) return new_blob
def upload(file_path): if bucket is None: return # remove prefix /app name = file_path.replace(os.path.abspath(os.curdir) + '/', '') blob = Blob(name, bucket) logging.info('uploading {}'.format(name)) blob.upload_from_filename(file_path)
def __init__(self, name, mode, storage): self.name = name self._mode = mode self._storage = storage self.blob = storage.bucket.get_blob(name) if not self.blob and 'w' in mode: self.blob = Blob(self.name, storage.bucket) self._file = None self._is_dirty = False
class GoogleCloudFile(File): def __init__(self, name, mode, storage): self.name = name self.mime_type = mimetypes.guess_type(name)[0] self._mode = mode self._storage = storage self.blob = storage.bucket.get_blob(name) if not self.blob and 'w' in mode: self.blob = Blob(self.name, storage.bucket) self._file = None self._is_dirty = False @property def size(self): return self.blob.size def _get_file(self): if self._file is None: self._file = SpooledTemporaryFile( max_size=self._storage.max_memory_size, suffix=".GSStorageFile", dir=setting("FILE_UPLOAD_TEMP_DIR", None) ) if 'r' in self._mode: self._is_dirty = False self.blob.download_to_file(self._file) self._file.seek(0) return self._file def _set_file(self, value): self._file = value file = property(_get_file, _set_file) def read(self, num_bytes=None): if 'r' not in self._mode: raise AttributeError("File was not opened in read mode.") if num_bytes is None: num_bytes = -1 return super(GoogleCloudFile, self).read(num_bytes) def write(self, content): if 'w' not in self._mode: raise AttributeError("File was not opened in write mode.") self._is_dirty = True return super(GoogleCloudFile, self).write(force_bytes(content)) def close(self): if self._file is not None: if self._is_dirty: self.file.seek(0) self.blob.upload_from_file(self.file, content_type=self.mime_type) self._file.close() self._file = None
def put_object(self, bucket_name, key, obj, **kwargs): try: bucket = self.client.get_bucket(bucket_name) blob = Blob(key, bucket) blob.upload_from_string(obj) except Exception as e: logging.exception( 'Exception in [GoogleCloudStorage.put_object] with bucket_name {} and key {}' .format(bucket_name, key)) raise e
def upload(file_path): if bucket is None: return name = file_path.replace(os.path.abspath(os.curdir) + '/', '') blob = Blob(name, bucket) try: blob.upload_from_filename(file_path, timeout=300) except Exception as e: print(str(e))
def copy_blob(self, blob, destination_bucket, new_name=None, client=None, preserve_acl=True): """Copy the given blob to the given bucket, optionally with a new name. If :attr:`user_project` is set, bills the API request to that project. :type blob: :class:`google.cloud.storage.blob.Blob` :param blob: The blob to be copied. :type destination_bucket: :class:`google.cloud.storage.bucket.Bucket` :param destination_bucket: The bucket into which the blob should be copied. :type new_name: str :param new_name: (optional) the new name for the copied file. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type preserve_acl: bool :param preserve_acl: Optional. Copies ACL from old blob to new blob. Default: True. :rtype: :class:`google.cloud.storage.blob.Blob` :returns: The new Blob. """ client = self._require_client(client) query_params = {} if self.user_project is not None: query_params['userProject'] = self.user_project if new_name is None: new_name = blob.name new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + '/copyTo' + new_blob.path copy_result = client._connection.api_request( method='POST', path=api_path, query_params=query_params, _target_object=new_blob, ) if not preserve_acl: new_blob.acl.save(acl={}, client=client) new_blob._set_properties(copy_result) return new_blob
def get_items_from_response(self, response): """Yield :class:`.storage.blob.Blob` items from response. :type response: dict :param response: The JSON API response for a page of blobs. """ self._current_prefixes = tuple(response.get('prefixes', ())) self.prefixes.update(self._current_prefixes) for item in response.get('items', []): name = item.get('name') blob = Blob(name, bucket=self.bucket) blob._set_properties(item) yield blob
def _item_to_value(self, item): """Convert a JSON blob to the native object. :type item: dict :param item: An item to be converted to a blob. :rtype: :class:`.Blob` :returns: The next blob in the page. """ name = item.get('name') blob = Blob(name, bucket=self._parent.bucket) blob._set_properties(item) return blob
def test_move_files(storage): from_bucket = mock.create_autospec(Bucket) from_bucket.name = 'FROM_BUCKET' to_bucket = mock.create_autospec(Bucket) to_bucket.name = 'TO_BUCKET' blob1 = Blob('doc1.txt', bucket=from_bucket) blob2 = Blob('doc2.txt', bucket=from_bucket) from_bucket.list_blobs.return_value = [blob1, blob2] prefix = 'vibe-messages-final' storage.client.move_files(prefix, from_bucket, to_bucket) from_bucket.copy_blob.assert_any_call(blob1, to_bucket) from_bucket.copy_blob.assert_any_call(blob2, to_bucket) from_bucket.list_blobs.assert_called_once_with(prefix=prefix)
def _download_blob(blob: Blob, dst_url: str) -> None: """ Downloads and saves a single large binary object to the local file system. """ download_path = os.path.join(dst_url, blob.name) if not os.path.exists(os.path.dirname(download_path)): try: os.makedirs(os.path.dirname(download_path)) except FileExistsError: # It just only need to prevent an error with parallel download # When makedirs could be called a several times pass LOG.info(f'Downloading "{blob.name}" to "{download_path}"') blob.download_to_filename(download_path)
def save_file(self, file, filename, folder=None, randomize=False, extensions=None, acl=None, replace=False, headers=None): """ :param filename: local filename :param folder: relative path of sub-folder :param randomize: randomize the filename :param extensions: iterable of allowed extensions, if not default :param acl: ACL policy (if None then uses default) :returns: modified filename """ extensions = extensions or self.extensions if not self.filename_allowed(filename, extensions): raise FileNotAllowed() filename = utils.secure_filename(os.path.basename(filename)) if randomize: filename = utils.random_filename(filename) if folder: filename = folder + "/" + filename content_type, _ = mimetypes.guess_type(filename) content_type = content_type or 'application/octet-stream' blob = self.get_bucket().get_blob(filename) # If the file exist and we explicitely asked not to replace it: ignore it. if blob and not replace: return filename # If the file doesn't exist: create it. if not blob: blob = Blob(filename, self.get_bucket()) blob.cache_control = self.cache_control file.seek(0) acl = acl or self.acl blob.upload_from_file(file, rewind=True, content_type=content_type, predefined_acl=acl) return filename
def download_blob_to_file(self, blob_or_uri, file_obj, start=None, end=None): """Download the contents of a blob object or blob URI into a file-like object. Args: blob_or_uri (Union[ \ :class:`~google.cloud.storage.blob.Blob`, \ str, \ ]): The blob resource to pass or URI to download. file_obj (file): A file handle to which to write the blob's data. start (int): Optional. The first byte in a range to be downloaded. end (int): Optional. The last byte in a range to be downloaded. Examples: Download a blob using using a blob resource. >>> from google.cloud import storage >>> client = storage.Client() >>> bucket = client.get_bucket('my-bucket-name') >>> blob = storage.Blob('path/to/blob', bucket) >>> with open('file-to-download-to') as file_obj: >>> client.download_blob_to_file(blob, file_obj) # API request. Download a blob using a URI. >>> from google.cloud import storage >>> client = storage.Client() >>> with open('file-to-download-to') as file_obj: >>> client.download_blob_to_file( >>> 'gs://bucket_name/path/to/blob', file) """ try: blob_or_uri.download_to_file(file_obj, client=self, start=start, end=end) except AttributeError: scheme, netloc, path, query, frag = urlsplit(blob_or_uri) if scheme != "gs": raise ValueError("URI scheme must be gs") bucket = Bucket(self, name=netloc) blob_or_uri = Blob(path, bucket) blob_or_uri.download_to_file(file_obj, client=self, start=start, end=end)
def generate_signed_url(self, url, expiration_time=30): """ url: GS url "gs://xzy/test/test.pdf" expiration_time: validity of signed url by default 30 mins. """ u = urlparse.urlsplit(url) if u.scheme == 'gs': url = u.path[1:] a = Blob(url, self.__bucket) expiry = (datetime.datetime.now() + datetime.timedelta(minutes=expiration_time)).timetuple() expiry = int(time.mktime(expiry)) return a.generate_signed_url(expiry) else: return "Invalid GS url"
def _make_one(*args, **kw): from google.cloud.storage.blob import Blob properties = kw.pop('properties', {}) blob = Blob(*args, **kw) blob._properties.update(properties) return blob
def blob(self, blob_name, chunk_size=None, encryption_key=None): """Factory constructor for blob object. .. note:: This will not make an HTTP request; it simply instantiates a blob object owned by this bucket. :type blob_name: str :param blob_name: The name of the blob to be instantiated. :type chunk_size: int :param chunk_size: The size of a chunk of data whenever iterating (1 MB). This must be a multiple of 256 KB per the API specification. :type encryption_key: bytes :param encryption_key: Optional 32 byte encryption key for customer-supplied encryption. :rtype: :class:`google.cloud.storage.blob.Blob` :returns: The blob object created. """ return Blob(name=blob_name, bucket=self, chunk_size=chunk_size, encryption_key=encryption_key)
def _read(spath: str): if _is_local(spath): with open(spath, "r") as f: return f.read() blob = Blob.from_string(spath, gcs_client()) return blob.download_as_string().decode("utf-8")
def _exists(spath: str) -> bool: if _is_local(spath): return exists(spath) client = storage.Client() blob = Blob.from_string(spath, client) return blob.exists()
def test_has_file(storage): bucket = mock.create_autospec(Bucket) bucket.name = 'FROM_BUCKET' blob = Blob('doc1.txt', bucket=bucket) bucket.list_blobs.return_value = [blob] assert storage.client.has_file(bucket=bucket) empty_bucket = mock.create_autospec(Bucket) empty_bucket.name = 'FROM_BUCKET' has_file_cond = not CloudStorage.factory('PROJECT').has_file(bucket=empty_bucket) assert has_file_cond
def save_file(self, file, filename, folder=None, randomize=False, extensions=None, acl=None, replace=False, headers=None): """ :param filename: local filename :param folder: relative path of sub-folder :param randomize: randomize the filename :param extensions: iterable of allowed extensions, if not default :param acl: ACL policy (if None then uses default) :returns: modified filename """ extensions = extensions or self.extensions if not self.filename_allowed(filename, extensions): raise FileNotAllowed() filename = utils.secure_filename( os.path.basename(filename) ) if randomize: filename = utils.random_filename(filename) if folder: filename = folder + "/" + filename content_type, _ = mimetypes.guess_type(filename) content_type = content_type or 'application/octet-stream' blob = self.get_bucket().get_blob(filename) # If the file exist and we explicitely asked not to replace it: ignore it. if blob and not replace: return filename # If the file doesn't exist: create it. if not blob: blob = Blob(filename, self.get_bucket()) blob.cache_control = self.cache_control file.seek(0) acl = acl or self.acl blob.upload_from_file(file, rewind=True, content_type=content_type, predefined_acl=acl) return filename
def save(self, name, fobj, max_length=None, blob_object=None): if not blob_object: blob = Blob(name, self.bucket) else: blob = blob_object # force the current file to be at file location 0, to # because that's what google wants # determine the current file's mimetype based on the name content_type = self._determine_content_type(name) fobj.seek(0) blob.upload_from_file( fobj, content_type=content_type, ) return name
def generate_signed_url(output_uri): expiration_time = datetime.timedelta(minutes=5) blob = Blob.from_string(output_uri, client=client) signed_url = blob.generate_signed_url(expiration=expiration_time, version='v4', response_disposition='attachment') return signed_url
def _item_to_blob(iterator, item): """Convert a JSON blob to the native object. .. note:: This assumes that the ``bucket`` attribute has been added to the iterator after being created. :type iterator: :class:`~google.cloud.iterator.Iterator` :param iterator: The iterator that has retrieved the item. :type item: dict :param item: An item to be converted to a blob. :rtype: :class:`.Blob` :returns: The next blob in the page. """ name = item.get('name') blob = Blob(name, bucket=iterator.bucket) blob._set_properties(item) return blob
def save(self, name, fobj, max_length=None, blob_object=None): if not blob_object: blob = Blob(name, self.bucket) else: blob = blob_object # force the current file to be at file location 0, to # because that's what google wants # determine the current file's mimetype based on the name content_type = self._determine_content_type(name) fobj.seek(0) if self._is_file_empty(fobj): logging.warning("Stopping the upload of an empty file: {}".format(name)) return name blob.upload_from_file( fobj, content_type=content_type, ) return name
def get_blob(self, blob_name, client=None): """Get a blob object by name. This will return None if the blob doesn't exist:: >>> from google.cloud import storage >>> client = storage.Client() >>> bucket = client.get_bucket('my-bucket') >>> print bucket.get_blob('/path/to/blob.txt') <Blob: my-bucket, /path/to/blob.txt> >>> print bucket.get_blob('/does-not-exist.txt') None :type blob_name: string :param blob_name: The name of the blob to retrieve. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: :class:`google.cloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ client = self._require_client(client) blob = Blob(bucket=self, name=blob_name) try: response = client.connection.api_request( method='GET', path=blob.path, _target_object=blob) # NOTE: We assume response.get('name') matches `blob_name`. blob._set_properties(response) # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. return blob except NotFound: return None
def delete_blob(self, blob_name, client=None): """Deletes a blob from the current bucket. If the blob isn't found (backend 404), raises a :class:`google.cloud.exceptions.NotFound`. For example: .. literalinclude:: snippets.py :start-after: [START delete_blob] :end-before: [END delete_blob] If :attr:`user_project` is set, bills the API request to that project. :type blob_name: str :param blob_name: A blob name to delete. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :raises: :class:`google.cloud.exceptions.NotFound` (to suppress the exception, call ``delete_blobs``, passing a no-op ``on_error`` callback, e.g.: .. literalinclude:: snippets.py :start-after: [START delete_blobs] :end-before: [END delete_blobs] """ client = self._require_client(client) query_params = {} if self.user_project is not None: query_params['userProject'] = self.user_project blob_path = Blob.path_helper(self.path, blob_name) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). client._connection.api_request( method='DELETE', path=blob_path, query_params=query_params, _target_object=None)
def delete_blob(self, blob_name, client=None): """Deletes a blob from the current bucket. If the blob isn't found (backend 404), raises a :class:`google.cloud.exceptions.NotFound`. For example:: >>> from google.cloud.exceptions import NotFound >>> from google.cloud import storage >>> client = storage.Client() >>> bucket = client.get_bucket('my-bucket') >>> print bucket.list_blobs() [<Blob: my-bucket, my-file.txt>] >>> bucket.delete_blob('my-file.txt') >>> try: ... bucket.delete_blob('doesnt-exist') ... except NotFound: ... pass :type blob_name: string :param blob_name: A blob name to delete. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :raises: :class:`google.cloud.exceptions.NotFound` (to suppress the exception, call ``delete_blobs``, passing a no-op ``on_error`` callback, e.g.:: >>> bucket.delete_blobs([blob], on_error=lambda blob: None) """ client = self._require_client(client) blob_path = Blob.path_helper(self.path, blob_name) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). client.connection.api_request(method='DELETE', path=blob_path, _target_object=None)