def test_lazybfs_policy(live_server, aiosession, event_loop, source_tree_policy, tmp_requests): open(tmp_requests, "w").close() api_url = url_for("index", _external=True) nodes_data = MerkleNodeInfo() init_merkle_node_info(source_tree_policy, nodes_data, {"known"}) policy = LazyBFS(source_tree_policy, nodes_data) client = Client(api_url, aiosession) event_loop.run_until_complete(policy.run(client)) backend_swhids_requests = get_backend_swhids_order(tmp_requests) assert (backend_swhids_requests[0] == "swh:1:dir:fe8cd7076bef324eb8865f818ef08617879022ce") # the second request must contain 3 SWHIDs related to directories and one content dir_count, cnt_count = 0, 0 for swhid in backend_swhids_requests[1:5]: if CoreSWHID.from_string(swhid).object_type == ObjectType.DIRECTORY: dir_count += 1 else: cnt_count += 1 assert dir_count == 3 assert cnt_count == 1 # the last swhid must be a content related to the unknown directory # "sample-folder-policy/toexclude" assert (backend_swhids_requests[5] == "swh:1:cnt:5f1cfce26640056bed3710cfaf3062a6a326a119")
def test_add_origin(event_loop, live_server, aiosession, source_tree, nodes_data): api_url = url_for("index", _external=True) init_merkle_node_info(source_tree, nodes_data, {"known", "origin"}) client = Client(api_url, aiosession) event_loop.run_until_complete(add_origin(source_tree, nodes_data, client)) for node, attrs in nodes_data.items(): assert attrs["origin"] == fake_origin[str(source_tree.swhid())]
async def test_greedy_bfs_get_nodes_chunks(live_server, aiosession, big_source_tree): api_url = url_for("index", _external=True) nodes_data = MerkleNodeInfo() init_merkle_node_info(big_source_tree, nodes_data, {"known"}) policy = GreedyBFS(big_source_tree, nodes_data) client = Client(api_url, aiosession) chunks = [ n_chunk async for n_chunk in policy.get_nodes_chunks( client, source_size(big_source_tree)) ] assert len(chunks) == 2 assert chunks[1][-1].object_type == "content"
def test_greedy_bfs_policy(live_server, event_loop, aiosession, big_source_tree, tmp_requests): open(tmp_requests, "w").close() api_url = url_for("index", _external=True) nodes_data = MerkleNodeInfo() init_merkle_node_info(big_source_tree, nodes_data, {"known"}) policy = GreedyBFS(big_source_tree, nodes_data) client = Client(api_url, aiosession) event_loop.run_until_complete(policy.run(client)) backend_swhids_requests = get_backend_swhids_order(tmp_requests) last_swhid = backend_swhids_requests[-1] assert CoreSWHID.from_string(last_swhid).object_type == ObjectType.CONTENT
def test_file_priority_policy(live_server, aiosession, event_loop, source_tree_policy, tmp_requests): open(tmp_requests, "w").close() api_url = url_for("index", _external=True) nodes_data = MerkleNodeInfo() init_merkle_node_info(source_tree_policy, nodes_data, {"known"}) policy = FilePriority(source_tree_policy, nodes_data) client = Client(api_url, aiosession) event_loop.run_until_complete(policy.run(client)) backend_swhids_requests = get_backend_swhids_order(tmp_requests) for swhid in backend_swhids_requests[0:4]: assert CoreSWHID.from_string(swhid).object_type == ObjectType.CONTENT for swhid in backend_swhids_requests[5:]: assert CoreSWHID.from_string(swhid).object_type == ObjectType.DIRECTORY
def test_init_merkle_not_supported_node_info(source_tree): nodes_data = MerkleNodeInfo() with pytest.raises(Exception): init_merkle_node_info(source_tree, nodes_data, {"unsupported_info"})
def test_init_merkle_supported_node_info(source_tree): nodes_data = MerkleNodeInfo() init_merkle_node_info(source_tree, nodes_data, {"known", "origin"}) for _, node_attrs in nodes_data.items(): assert "known" and "origin" in node_attrs.keys()