def _upload_blob_gcp( project_name: str, bucket_name: str, source_file_name: str, destination_blob_name: str, ): """ Upload blob to GCP storage bucket Example ------- >>> _upload_blob_gcp(project_name='GCP-Essentials', bucket_name='test-pycaret-gcp', \ source_file_name='model-101.pkl', destination_blob_name='model-101.pkl') Parameters ---------- project_name : str A Project name on GCP Platform (Must have been created from console). bucket_name : str Name of the storage bucket to be created if does not exists already. source_file_name : str A blob/file name to copy to GCP destination_blob_name : str Name of the destination file to be stored on GCP Returns ------- None """ logger = get_logger() # bucket_name = "your-bucket-name" # source_file_name = "local/path/to/file" # destination_blob_name = "storage-object-name" from google.cloud import storage import google.auth.exceptions try: storage_client = storage.Client(project_name) except google.auth.exceptions.DefaultCredentialsError: logger.error( 'Environment variable GOOGLE_APPLICATION_CREDENTIALS not set. For more information,' ' please see https://cloud.google.com/docs/authentication/getting-started' ) raise ValueError( 'Environment variable GOOGLE_APPLICATION_CREDENTIALS not set. For more information,' ' please see https://cloud.google.com/docs/authentication/getting-started' ) bucket = storage_client.bucket(bucket_name) blob = bucket.blob(destination_blob_name) blob.upload_from_filename(source_file_name) logger.info("File {} uploaded to {}.".format(source_file_name, destination_blob_name))
def _upload_blob_gcp( project_name: str, bucket_name: str, source_file_name: str, destination_blob_name: str, ): """ Upload blob to GCP storage bucket Example ------- >>> _upload_blob_gcp(project_name='GCP-Essentials', bucket_name='test-pycaret-gcp', \ source_file_name='model-101.pkl', destination_blob_name='model-101.pkl') Parameters ---------- project_name : str A Project name on GCP Platform (Must have been created from console). bucket_name : str Name of the storage bucket to be created if does not exists already. source_file_name : str A blob/file name to copy to GCP destination_blob_name : str Name of the destination file to be stored on GCP Returns ------- None """ logger = get_logger() # bucket_name = "your-bucket-name" # source_file_name = "local/path/to/file" # destination_blob_name = "storage-object-name" from google.cloud import storage storage_client = storage.Client(project_name) bucket = storage_client.bucket(bucket_name) blob = bucket.blob(destination_blob_name) blob.upload_from_filename(source_file_name) logger.info( "File {} uploaded to {}.".format(source_file_name, destination_blob_name) )
def upload_disk(self): credentials = service_account.Credentials.from_service_account_info( self.credentials) gcs = storage.Client(project=self.project_id, credentials=credentials) try: bucket = gcs.get_bucket(bucket_or_name=self.bucket_name) except TypeError: raise KumoException("Error while get bucket on gcp gcs.") blob_name = "{0}{1}{2}".format(self.server_name, ".", "vhd") blob = bucket.blob(blob_name=blob_name) filename = "/home/ubuntu/volume/{0}".format(blob_name) try: blob.upload_from_filename(filename=filename) except TypeError: raise KumoException("Error while create bucket on gcp gcs.") disk = "/home/ubuntu/volume/{0}".format(blob_name) print("disk size (bytes): {}".format(os.path.getsize(disk))) os.remove(disk) print("disk status: deleted")