def gcs_maker(populate=False): gcs = GCSFileSystem(TEST_PROJECT, token=GOOGLE_TOKEN) gcs.invalidate_cache() try: try: gcs.mkdir(TEST_BUCKET, default_acl="authenticatedread", acl="publicReadWrite") except gcsfs.utils.HttpError: pass # ensure we're empty. gcs.rm(TEST_BUCKET, recursive=True) for k in [a, b, c, d]: try: gcs.rm(k) except FileNotFoundError: pass if populate: for flist in [files, csv_files, text_files]: for fname, data in flist.items(): with gcs.open(TEST_BUCKET + "/" + fname, "wb") as f: f.write(data) gcs.invalidate_cache() yield gcs finally: for f in gcs.find(TEST_BUCKET): try: gcs.rm(f) except: # noqa: E722 pass
def gcs_maker(populate=False, **kwargs): gcs = GCSFileSystem(TEST_PROJECT, token=GOOGLE_TOKEN, **kwargs) gcs.invalidate_cache() try: # ensure we're empty. try: gcs.rm(TEST_BUCKET, recursive=True) except FileNotFoundError: pass try: gcs.mkdir( TEST_BUCKET, default_acl="authenticatedread", acl="publicReadWrite" ) except Exception: pass if populate: gcs.pipe({TEST_BUCKET + "/" + k: v for k, v in allfiles.items()}) gcs.invalidate_cache() yield gcs finally: try: gcs.rm(gcs.find(TEST_BUCKET)) except: # noqa: E722 pass
def gcs_maker(populate=False): gcs = GCSFileSystem(TEST_PROJECT, token=GOOGLE_TOKEN) gcs.invalidate_cache() try: try: gcs.mkdir(TEST_BUCKET, default_acl="authenticatedread", acl="publicReadWrite") except: pass for k in [a, b, c, d]: try: gcs.rm(k) except: pass if populate: for flist in [files, csv_files, text_files]: for fname, data in flist.items(): with gcs.open(TEST_BUCKET+'/'+fname, 'wb') as f: f.write(data) yield gcs finally: for f in gcs.find(TEST_BUCKET): try: gcs.rm(f) except: pass