def upload_blob(source: str, destination: str): """Uploads a file to the bucket.""" # source = "local/path/to/file" # destination = "gs://your-bucket-name/storage-object-name" storage_client = storage.Client() bucket = Bucket.from_string(destination, storage_client) blob = Blob.from_string(destination, storage_client) blob.upload_from_filename(source) print(f"File {source} uploaded to destination.")
def sign_url(uri, expiration=None, content_type=None): parsed = urlparse(uri) if parsed.scheme == 'gs': from google.cloud.storage import Client, Blob global cs_client if cs_client is None: cs_client = Client() blob = Blob.from_string(uri, client=cs_client) return blob.generate_signed_url(expiration=expiration, content_type=content_type) else: raise NotImplementedError
for bucket in buckets: print(bucket.name) # output bucket_name # get all the blobs in a bucket bucket = client.get_bucket("bucket_name") blobs = list(bucket.list_blobs()) for blob in blobs: print (blob) # print the blobs: blob_name #check if the blob exists assert isinstance(bucket.get_blob('blob_name'), Blob) #get the blob from path my_blob = Blob.from_string("gs://bucket_name/blob_name") # List the files in a folder files = bucket.list_blobs(prefix='folder_name') for f in files: print(f.name) def download_blob(bucket_name, source_blob_name, destination_file_name): """Downloads a blob from the bucket.""" storage_client = storage.Client() bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(source_blob_name) blob.download_to_filename(destination_file_name)
def clean_up_remote_files(files): storage_client = storage.Client() for file_uri in files: if file_uri.scheme == "gs": blob = Blob.from_string(file_uri.geturl(), client=storage_client) blob.delete()