def gs_copy(self, src: str, dest: str, context: str): """ Copy a google storage (gs://) object context must be one of {'local', 'remote'}, which specifies where the command should be run When uploading to gs://, the destination gs:// directory does not have to exist When downloading from gs:// the destination parent directory must exist """ assert context in {'local', 'remote'} gs_obj = src if src.startswith('gs://') else dest try: components = gs_obj[5:].split('/') bucket = _getblob_bucket(None, components[0], None) blobs = { blob.name for page in bucket.list_blobs( prefix='/'.join(components[1:]), fields='items/name,nextPageToken').pages for blob in page } if len([ blob for blob in blobs if blob == '/'.join(components[1:]) ]) == 0: # This is a directory canine_logging.print("Copying directory:", gs_obj) return self.gs_dircp(src, os.path.dirname(dest), context) except: # If there is an exception, or the above condition is false # Procede as a regular gs_copy traceback.print_exc() command = "gsutil -o GSUtil:check_hashes=if_fast_else_skip -o GSUtil:parallel_composite_upload_threshold=150M {} cp {} {}".format( '-u {}'.format(self.project) if self.get_requester_pays(gs_obj) else '', src, dest) if context == 'remote': rc, sout, serr = self.backend.invoke(command, True) check_call(command, rc, sout, serr) else: subprocess.check_call(command, shell=True)
def get_user_session(): """ Gets an authorized session usable for the Lapdog API. Currently not sufficent for register and submit endpoints, which may require additional credentials to authenticate buckets. """ global _GLOBAL_LD_TOKEN_INTERNAL with _GLOBAL_LD_LOCK_INTERNAL: if _GLOBAL_LD_TOKEN_INTERNAL is None: try: _GLOBAL_LD_TOKEN_INTERNAL = LapdogToken() except BadDomain: print("LapdogToken system does not support non-Broad emails") print("Switching to legacy OAuth system") _GLOBAL_LD_TOKEN_INTERNAL = lambda x: None # dummy object _GLOBAL_LD_LOCK_INTERNAL.authorized_session = generate_default_session( scopes=[ 'profile', 'email', 'openid', 'https://www.googleapis.com/auth/devstorage.read_write', ]) try: from hound.client import _getblob_bucket for blob in _getblob_bucket(None, 'lapdog-alerts', None).list_blobs(): content = json.loads(blob.download_as_string().decode()) if content['type'] == 'critical': text = content[ 'text'] if 'text' in content else content['content'] print(crayons.red("Critical Alert:"), text) except: traceback.print_exc() warnings.warn( "Unable to check Lapdog global alerts during startup") return _GLOBAL_LD_TOKEN_INTERNAL.authorized_session