def upload_image(self, location: str, label: str, image_bytes, metadata: dict) -> typing.Optional[str]: blob = Blob(f"{location}/{label}", self.bucket) blob.metadata = metadata blob.upload_from_file(image_bytes, content_type="image/png") blob.make_public() return blob.public_url
class Uploder(): def __init__(self): self.IMAGE_DIR = "/home/dumingzhex/Projects/WintersWrath/webspider/Image/" self.storage_client = storage.Client() try: self.bucket = self.storage_client.get_bucket('argus_space') print("bucket") except Exception as e: print(e) print('Sorry, that bucket does not exist!') def generator(self, file_name): #encryption_key = 'c7f32af42e45e85b9848a6a14dd2a8f6' self.blob = Blob(file_name, self.bucket, encryption_key=None) self.blob.upload_from_filename(self.IMAGE_DIR + file_name) self.blob.make_public() def get_media_link(self): return self.blob.media_link def get_public_link(self): return self.blob.public_url def get_dir(self, dir_name): return os.listdir(dir_name)
def upload_public_file(client, bkt, file_name): # file_name in Blob constructor is the file name you want to have on GCS blob = Blob(file_name, bkt) # file_name in open function is the one that actually sits on your hard drive with open(file_name, 'rb') as my_file: blob.upload_from_file(my_file) # after uploading the blob, we set it to public, so that it's accessible with a simple link blob.make_public(client)
def upload_to_gcp(src_path, gcp_path): print('###### start upload from %s to %s' % (src_path, gcp_path)) client = storage.Client.from_service_account_json(settings.GCP_KEY_PATH) bucket = client.get_bucket(settings.BUCKET_NAME) blob = Blob(gcp_path, bucket) blob.upload_from_filename(src_path) blob.make_public() print('##### upload success: %s' % blob.public_url) return blob.public_url
def uploadNewPhoto(self, file, bucket): blob = Blob(file, bucket) # TODO: create csv/tuple for the input? with open(file, "rb") as my_file: blob.upload_from_file(my_file) blob.make_public() return r"gs://" + bucket.name + r"/" + file
def get_gcloud_url(stream): filename = ''.join( random.choice(string.ascii_lowercase + string.digits) for i in range(16)) + ".png" client = storage.Client() bucket = client.get_bucket('hackgt-catalyst2018-photostyle') blob = Blob(filename, bucket) blob.upload_from_file(stream, content_type="image/png", client=client) blob.make_public(client=client) return blob.public_url
def _save(self, name, content): name = os.path.basename(name) new_name = name count = 0 while True: blob = Blob(new_name, self.bucket, chunk_size=1024 * 256) if not blob.exists(): break count += 1 new_name = name + '.%d' % count blob.upload_from_file(content) blob.make_public() return new_name
class GCloudStorage(object): def __init__(self, bucket_name, project_name): self.client = None self.blob = None self.bucket = bucket_name self.__create_client(project_name=project_name) self.__create_bucket(bucket_name=bucket_name) def __create_client(self, project_name): credentials = service_account.Credentials.from_service_account_file( KEY_PATH) self.client = storage.Client(project_name, credentials=credentials) def __create_bucket(self, bucket_name): self.bucket = self.client.get_bucket(bucket_name) def upload_file(self, file_name, file_stream): self.blob = Blob(file_name, self.bucket) self.blob.upload_from_file(file_stream) self.blob.make_public()
class Uploder(): def __init__(self): self.storage_client = storage.Client() try: self.bucket = self.storage_client.get_bucket('argus_space') logging.debug("成功获得GCP存储空间.") except Exception as e: logging.error('指定存储空间不存在,请检查GCP.') def generator(self, file_name): #encryption_key = 'c7f32af42e45e85b9848a6a14dd2a8f6' self.blob = Blob(file_name, self.bucket, encryption_key=None) self.blob.upload_from_filename(file_name) self.blob.make_public() def get_media_link(self): return self.blob.media_link def get_public_link(self): return self.blob.public_url def get_dir(self, dir_name): return os.listdir(dir_name)
def patched_scale(self, factory=None, **parameters): key = self.hash(**parameters) storage = self.storage info = self.get_info_by_hash(key) if info is not None and self._modified_since(info['modified']): del storage[info['uid']] # invalidate when the image was updated info = None if info is None and factory: result = factory(**parameters) if result is not None: # storage will be modified: # good time to also cleanup #self._cleanup() # commented line data, format, dimensions = result width, height = dimensions uid = str(uuid4()) gblob = Blob(uid, BUCKET) #added line blob = data.open('r') #added line gblob.content_type = 'image/%s' % format.lower() #added line gblob.upload_from_file(blob) #added line blob.close() #added line gblob.make_public() #added line google_url = gblob.public_url #added line del data #added line info = dict( uid=uid, data=None, width=width, height=height, #patched line s,data=data,data=None mimetype='image/%s' % format.lower(), key=key, modified=self.modified_time, google_url=google_url) #patched line, added google_url storage[uid] = info return info
def publish_file(self, file_path, publish_path, mime_type=None): bucket = self.client.bucket(self._publication_bucket) blob = Blob(publish_path, bucket) blob.upload_from_filename(file_path, content_type=mime_type) blob.make_public(client=self.client) return blob.public_url