def __init__(self, key, secret, owner=None, date_str=None): super().__init__(self) """Initializes a GCPCloudStorage object https://libcloud.readthedocs.io/en/latest/storage/drivers/google_storage.html Args: key: service account email secret: service account private key owner: owner metadata (default: {None}) date_str: date created metadata (default: {None}) """ self.key = key self.secret = None self.container_name = 'smcs-123' if isJson(secret): self.secret = secret['private_key'] else: self.secret = secret self.driver = GoogleStorageDriver(key=self.key, secret=self.secret) self.container = driver.get_container(container_name='smcs-123') self.metaData = {'meta_data': {}} setMetaData(owner, date_str) pass
def upload_log_tarballs(): log_upload_info = os.getenv("LOG_UPLOAD_INFO") if log_upload_info is None or log_upload_info.strip() == "": print("No upload key was provided. Log tarballs will not be uploaded.", file=sys.stderr) exit(0) platform, bucket_name, encoded_data = log_upload_info.split(":") driver: StorageDriver = None container: Container = None if platform == "gcp": from libcloud.storage.drivers.google_storage import GoogleStorageDriver data = None try: data = json.loads(base64.b64decode(encoded_data).decode("utf8")) except: raise Exception("Invalid base64 data or resultant JSON file.") driver = GoogleStorageDriver(key=data["client_email"], secret=data["private_key"]) container = driver.get_container(bucket_name) else: print( "Platform not supported. Ensure your key is formatted correctly as {platform}:{bucket_name}:{encoded_data}.", file=sys.stderr) exit(os.EX_CONFIG) tarball_glob = os.path.join(gh.root, "designs", "*", "runs", "*.tar.gz") tarballs = glob.glob(tarball_glob) if len(tarballs) == 0: print("No tarballs found.", file=sys.stderr) for tarball in tarballs: design_name = os.path.basename( os.path.dirname(os.path.dirname(tarball))) tarball_name = f"{design_name}.tar.gz" final_key = os.path.join(gh.run_id, tarball_name) try: driver.upload_object(file_path=tarball, container=container, object_name=final_key) print(f"Uploaded {design_name}'s tarball to {final_key}.") except Exception as e: print(e, file=sys.stderr) print(traceback.format_exc(), file=sys.stderr) print(f"Failed to upload tarball for {design_name}, skipping…", file=sys.stderr) print("Done.")
def connect_storage(self): with io.open(os.path.expanduser(self.config.key_file), 'r', encoding='utf-8') as json_fi: credentials = json.load(json_fi) driver = GoogleStorageDriver(key=credentials['client_email'], secret=credentials['private_key'], project=credentials['project_id']) return driver
class GCPCloudStorage(CloudStorage): def __init__(self, key, secret, owner=None, date_str=None): super().__init__(self) """Initializes a GCPCloudStorage object https://libcloud.readthedocs.io/en/latest/storage/drivers/google_storage.html Args: key: service account email secret: service account private key owner: owner metadata (default: {None}) date_str: date created metadata (default: {None}) """ self.key = key self.secret = None self.container_name = 'smcs-123' if isJson(secret): self.secret = secret['private_key'] else: self.secret = secret self.driver = GoogleStorageDriver(key=self.key, secret=self.secret) self.container = driver.get_container(container_name='smcs-123') self.metaData = {'meta_data': {}} setMetaData(owner, date_str) pass def setMetaData(self, owner, date_str, object_name): if owner: metaData['meta_data']['owner'] = owner if date_str: metaData['meta_data']['created'] = date_str metaData['meta_data'][''] = object_name def createContainer(self, container_name): self.driver.create_container(container_name) def deleteContainer(self, container_name): self.driver.delete_container() def listFiles(self, container_name): pass def uploadFile(self): pass def uploadFiles(self): pass def getFile(self, file_name) -> bytes: obj = driver.get_object(self.container_name, file_name) gen = driver.download_object_as_stream(obj) file_as_bytes = next(gen) return file_as_bytes def getFiles(self, files): file_list = [] for f in files: file_list.append(getFile(f)) def isJson(myjson): try: json_object = json.loads(myjson) except ValueError: return False return True # FILE_PATH = 'rabbit.svg' # cls = get_driver(Provider.GOOGLE_STORAGE) # driver = GoogleStorageDriver(key=client_email, secret=private_key, ...) # container = driver.get_container(container_name='SMCS') # extra = {'meta_data': {'owner': 'myuser', 'created': '2018-11-14'}} # with open(FILE_PATH, 'rb') as iterator: # obj = driver.upload_object_via_stream(iterator=iterator, # container=container, # object_name='rabbit.svg', # extra=extra)
def _get_driver(): return GoogleStorageDriver(key=COMPUTE_ENGINE_SERVICE_ACCOUNT, secret=PEM_FILE, project=PROJECT_NAME)
def get_driver(self): driver = GoogleStorageDriver(self.credentials['client_email'], self.credentials_path, project=self.credentials["project_id"]) return driver