async def test_blob_storage_get_blob(aiohttp_server: _TestServerFactory, make_client: _MakeClient) -> None: bucket_name = "foo" key = "text.txt" mtime1 = datetime.now() body = b"W" * 1000 async def handler(request: web.Request) -> web.StreamResponse: assert "b3" in request.headers assert request.path == f"/blob/o/{bucket_name}/{key}" assert request.match_info == {"bucket": bucket_name, "path": key} resp = web.StreamResponse(status=200) resp.headers.update({"ETag": '"12312908asd"'}) resp.last_modified = mtime1 resp.content_length = len(body) resp.content_type = "plain/text" await resp.prepare(request) await resp.write(body) return resp app = web.Application() app.router.add_get(BlobUrlRotes.GET_OBJECT, handler) srv = await aiohttp_server(app) async with make_client(srv.make_url("/")) as client: async with client.blob_storage.get_blob(bucket_name, key=key) as ret: assert ret.stats == BlobListing( key=key, size=1000, modification_time=int(mtime1.timestamp()), bucket_name=bucket_name, ) assert await ret.body_stream.read() == body
async def test_blob_storage_list_blobs(aiohttp_server: _TestServerFactory, make_client: _MakeClient) -> None: bucket_name = "foo" continuation_token = "cool_token" mtime1 = datetime.now() mtime2 = datetime.now() PAGE1_JSON = { "contents": [{ "key": "test.json", "size": 213, "last_modified": mtime1.timestamp() }], "common_prefixes": [{ "prefix": "empty/" }, { "prefix": "folder1/" }], "is_truncated": True, "continuation_token": continuation_token, } PAGE2_JSON = { "contents": [ { "key": "test1.txt", "size": 111, "last_modified": mtime1.timestamp() }, { "key": "test2.txt", "size": 222, "last_modified": mtime2.timestamp() }, ], "common_prefixes": [{ "prefix": "folder2/" }], "is_truncated": False, "continuation_token": None, } async def handler(request: web.Request) -> web.Response: assert "b3" in request.headers assert request.path == BlobUrlRotes.LIST_OBJECTS.format( bucket=bucket_name) if "continuation_token" not in request.query: assert request.query["recursive"] == "false" assert request.query["max_keys"] in ("3", "6") return web.json_response(PAGE1_JSON) else: assert request.query == { "recursive": "false", "max_keys": "3", "continuation_token": continuation_token, } return web.json_response(PAGE2_JSON) app = web.Application() app.router.add_get(BlobUrlRotes.LIST_OBJECTS, handler) srv = await aiohttp_server(app) async with make_client(srv.make_url("/")) as client: ret = await client.blob_storage.list_blobs(bucket_name, max_keys=3) assert ret == ( [ BlobListing( key="test.json", size=213, modification_time=int(mtime1.timestamp()), bucket_name=bucket_name, ), ], [ PrefixListing(prefix="empty/", bucket_name=bucket_name), PrefixListing(prefix="folder1/", bucket_name=bucket_name), ], ) async with make_client(srv.make_url("/")) as client: ret = await client.blob_storage.list_blobs(bucket_name, max_keys=6) assert ret == ( [ BlobListing( key="test.json", size=213, modification_time=int(mtime1.timestamp()), bucket_name=bucket_name, ), BlobListing( key="test1.txt", size=111, modification_time=int(mtime1.timestamp()), bucket_name=bucket_name, ), BlobListing( key="test2.txt", size=222, modification_time=int(mtime2.timestamp()), bucket_name=bucket_name, ), ], [ PrefixListing(prefix="empty/", bucket_name=bucket_name), PrefixListing(prefix="folder1/", bucket_name=bucket_name), PrefixListing(prefix="folder2/", bucket_name=bucket_name), ], )
async def test_blob_storage_list_blobs_recursive( aiohttp_server: _TestServerFactory, make_client: _MakeClient) -> None: bucket_name = "foo" mtime1 = datetime.now() mtime2 = datetime.now() PAGE1_JSON = { "contents": [ { "key": "folder1/xxx.txt", "size": 1, "last_modified": mtime1.timestamp() }, { "key": "folder1/yyy.json", "size": 2, "last_modified": mtime2.timestamp() }, { "key": "folder2/big_file", "size": 120 * 1024 * 1024, "last_modified": mtime1.timestamp(), }, ], "common_prefixes": [], "is_truncated": False, } async def handler(request: web.Request) -> web.Response: assert "b3" in request.headers assert request.path == BlobUrlRotes.LIST_OBJECTS.format( bucket=bucket_name) assert request.query == { "recursive": "true", "max_keys": "1000", "prefix": "folder", } return web.json_response(PAGE1_JSON) app = web.Application() app.router.add_get(BlobUrlRotes.LIST_OBJECTS, handler) srv = await aiohttp_server(app) async with make_client(srv.make_url("/")) as client: ret = await client.blob_storage.list_blobs(bucket_name, recursive=True, prefix="folder") assert ret == ( [ BlobListing( key="folder1/xxx.txt", size=1, modification_time=int(mtime1.timestamp()), bucket_name=bucket_name, ), BlobListing( key="folder1/yyy.json", size=2, modification_time=int(mtime2.timestamp()), bucket_name=bucket_name, ), BlobListing( key="folder2/big_file", size=120 * 1024 * 1024, modification_time=int(mtime1.timestamp()), bucket_name=bucket_name, ), ], [], )
class TestBlobFormatter: buckets = [ BucketListing( name="neuro-my-bucket", creation_time=int(datetime(2018, 1, 1, 3).timestamp()), permission=Action.MANAGE, ), BucketListing( name="neuro-public-bucket", creation_time=int(datetime(2018, 1, 1, 13, 1, 5).timestamp()), permission=Action.READ, ), BucketListing( name="neuro-shared-bucket", creation_time=int(datetime(2018, 1, 1, 17, 2, 4).timestamp()), permission=Action.WRITE, ), ] blobs = [ BlobListing( bucket_name="neuro-public-bucket", key="file1024.txt", modification_time=int(datetime(2018, 1, 1, 14, 0, 0).timestamp()), size=1024, ), BlobListing( bucket_name="neuro-public-bucket", key="file_bigger.txt", modification_time=int(datetime(2018, 1, 1).timestamp()), size=1_024_001, ), BlobListing( bucket_name="neuro-shared-bucket", key="folder2/info.txt", modification_time=int(datetime(2018, 1, 2).timestamp()), size=240, ), BlobListing( bucket_name="neuro-shared-bucket", key="folder2/", modification_time=int(datetime(2018, 1, 2).timestamp()), size=0, ), ] folders = [ PrefixListing(bucket_name="neuro-public-bucket", prefix="folder1/"), PrefixListing(bucket_name="neuro-shared-bucket", prefix="folder2/"), ] list_results: List[ListResult] = ( cast(List[ListResult], blobs) + cast(List[ListResult], folders) ) def test_simple_formatter(self) -> None: formatter = SimpleBlobFormatter(color=False) assert list(formatter(self.list_results)) == [ "blob:neuro-public-bucket/file1024.txt", "blob:neuro-public-bucket/file_bigger.txt", "blob:neuro-shared-bucket/folder2/info.txt", "blob:neuro-shared-bucket/folder2/", "blob:neuro-public-bucket/folder1/", "blob:neuro-shared-bucket/folder2/", ] assert list(formatter(self.buckets)) == [ "blob:neuro-my-bucket", "blob:neuro-public-bucket", "blob:neuro-shared-bucket", ] def test_long_formatter(self) -> None: formatter = LongBlobFormatter(human_readable=False, color=False) assert list(formatter(self.list_results)) == [ " 1024 2018-01-01 14:00:00 blob:neuro-public-bucket/file1024.txt", " 1024001 2018-01-01 00:00:00 blob:neuro-public-bucket/file_bigger.txt", " 240 2018-01-02 00:00:00 blob:neuro-shared-bucket/folder2/info.txt", " 0 2018-01-02 00:00:00 blob:neuro-shared-bucket/folder2/", " blob:neuro-public-bucket/folder1/", " blob:neuro-shared-bucket/folder2/", ] assert list(formatter(self.buckets)) == [ "m 2018-01-01 03:00:00 blob:neuro-my-bucket", "r 2018-01-01 13:01:05 blob:neuro-public-bucket", "w 2018-01-01 17:02:04 blob:neuro-shared-bucket", ] formatter = LongBlobFormatter(human_readable=True, color=False) assert list(formatter(self.list_results)) == [ " 1.0K 2018-01-01 14:00:00 blob:neuro-public-bucket/file1024.txt", " 1000.0K 2018-01-01 00:00:00 blob:neuro-public-bucket/file_bigger.txt", " 240 2018-01-02 00:00:00 blob:neuro-shared-bucket/folder2/info.txt", " 0 2018-01-02 00:00:00 blob:neuro-shared-bucket/folder2/", " blob:neuro-public-bucket/folder1/", " blob:neuro-shared-bucket/folder2/", ] assert list(formatter(self.buckets)) == [ "m 2018-01-01 03:00:00 blob:neuro-my-bucket", "r 2018-01-01 13:01:05 blob:neuro-public-bucket", "w 2018-01-01 17:02:04 blob:neuro-shared-bucket", ] @pytest.mark.parametrize( "formatter", [ (SimpleBlobFormatter(color=False)), (LongBlobFormatter(human_readable=False, color=False)), ], ) def test_formatter_with_empty_files(self, formatter: BaseBlobFormatter) -> None: files: List[LsResult] = [] assert [] == list(formatter(files))