def test_read_keys_from_bucket(): with gcs_maker(True) as gcs: for k, data in files.items(): file_contents = gcs.cat("/".join([TEST_BUCKET, k])) assert file_contents == data assert all( gcs.cat("/".join([TEST_BUCKET, k])) == gcs.cat( "gcs://" + "/".join([TEST_BUCKET, k])) for k in files)
def test_readline(): with gcs_maker(True) as gcs: all_items = chain.from_iterable( [files.items(), csv_files.items(), text_files.items()] ) for k, data in all_items: with gcs.open("/".join([TEST_BUCKET, k]), "rb") as f: result = f.readline() expected = data.split(b"\n")[0] + (b"\n" if data.count(b"\n") else b"") assert result == expected
def test_readline(token_restore): with gcs_maker(True) as gcs: all_items = chain.from_iterable([ files.items(), csv_files.items(), text_files.items() ]) for k, data in all_items: with gcs.open('/'.join([TEST_BUCKET, k]), 'rb') as f: result = f.readline() expected = data.split(b'\n')[0] + (b'\n' if data.count(b'\n') else b'') assert result == expected