def benchmark_full_construction( sizes: Tuple[Tuple[str, int, int], ...], ) -> None: base_content_length = max(size for name, size, iteration_count in sizes) base_content = b"".join((hashlib.sha256(i.to_bytes(32, "big")).digest() for i in range((base_content_length + 31) // 32))) proof_performances: List[Tuple[float, int]] = [] for name, size, iteration_count in sizes: content = base_content[:size] start_at = time.monotonic() for _ in range(iteration_count): compute_proof(content, sedes=content_sedes) end_at = time.monotonic() proof_performances.append((end_at - start_at, iteration_count)) proofs_per_second = tuple(iteration_count / elapsed for (elapsed, iteration_count) in proof_performances) table_header = ("name", "size", "elapsed", "proofs/sec", "iterations") table_rows = tuple( (name, size, elapsed, rate, iteration_count) for ((name, size, iteration_count), (elapsed, _), rate) in zip(sizes, proof_performances, proofs_per_second)) table = texttable.Texttable() table.set_cols_align(("l", "r", "r", "r", "r")) table.header(table_header) table.add_rows(table_rows, header=False) logger.info("\n######################################") logger.info("benchmark: Proof Generation Speed") logger.info("######################################\n") logger.info(table.draw())
def test_ssz_partial_proof_fuzzy(data): content = data.draw(st.binary(min_size=1, max_size=10240)) content_length = len(content) slice_start = data.draw( st.integers(min_value=0, max_value=max(0, content_length - 1))) slice_stop = data.draw( st.integers(min_value=slice_start, max_value=content_length)) data_slice = slice(slice_start, slice_stop) full_proof = compute_proof(content, sedes=content_sedes) slice_length = max(0, data_slice.stop - data_slice.start - 1) partial_proof = full_proof.to_partial( start_at=data_slice.start, partial_data_length=slice_length, ) assert partial_proof.get_hash_tree_root() == full_proof.get_hash_tree_root( ) validate_proof(partial_proof) assert is_proof_valid(partial_proof) partial = partial_proof.get_proven_data() data_from_partial = partial[data_slice] assert data_from_partial == content[data_slice]
def test_partial_proof_serialization_and_deserialization(data): content = data.draw(st.binary(min_size=1, max_size=10240)) slice_start = data.draw(st.integers(min_value=0, max_value=len(content) - 1)) slice_stop = data.draw(st.integers(min_value=slice_start, max_value=len(content))) data_slice = slice(slice_start, slice_stop) full_proof = compute_proof(content, sedes=content_sedes) slice_length = max(0, data_slice.stop - data_slice.start) partial_proof = full_proof.to_partial( start_at=data_slice.start, partial_data_length=slice_length, ) assert partial_proof.get_hash_tree_root() == full_proof.get_hash_tree_root() serialized = partial_proof.serialize() result = Proof.deserialize(serialized) validate_proof(result) assert result == partial_proof partial = result.get_proven_data() data_from_partial = partial[data_slice] assert data_from_partial == content[data_slice]
async def test_advertisement_collector_acks_false_if_advertisements_already_known( alice, bob, alice_alexandria_client, bob_alexandria_network, autojump_clock, ): content = ContentFactory(2048) proof = compute_proof(content, sedes=content_sedes) advertisement = AdvertisementFactory( private_key=bob.private_key, hash_tree_root=proof.get_hash_tree_root(), ) bob_alexandria_network.commons_content_storage.set_content( advertisement.content_key, content, ) bob_alexandria_network.local_advertisement_db.add(advertisement) with pytest.raises(trio.TooSlowError): with trio.fail_after(10): async with bob_alexandria_network.advertisement_collector.new_advertisement.subscribe_and_wait( ): # noqa: E501 ack_message = await alice_alexandria_client.advertise( bob.node_id, bob.endpoint, advertisements=(advertisement, ), ) assert len(ack_message.payload.acked) == 1 assert ack_message.payload.acked[0] is False
async def test_advertisement_collector_handle_existing_valid_remote_advertisement( alice, bob, alice_alexandria_network, bob_alexandria_network, autojump_clock, ): content = ContentFactory(2048) proof = compute_proof(content, sedes=content_sedes) ad_collector = alice_alexandria_network.advertisement_collector advertisement = AdvertisementFactory( private_key=bob.private_key, hash_tree_root=proof.get_hash_tree_root(), ) bob_alexandria_network.commons_content_storage.set_content( advertisement.content_key, content, ) alice_alexandria_network.remote_advertisement_db.add(advertisement) assert alice_alexandria_network.remote_advertisement_db.exists( advertisement) async with ad_collector.new_advertisement.subscribe() as subscription: with trio.fail_after(5): await ad_collector.handle_advertisement(advertisement) with pytest.raises(trio.TooSlowError): with trio.fail_after(5): await subscription.receive() assert alice_alexandria_network.remote_advertisement_db.exists( advertisement)
async def test_advertisement_collector_handle_new_valid_remote_advertisement( alice, bob, alice_alexandria_network, bob_alexandria_network, ): content = ContentFactory(2048) proof = compute_proof(content, sedes=content_sedes) ad_collector = alice_alexandria_network.advertisement_collector advertisement = AdvertisementFactory( private_key=bob.private_key, hash_tree_root=proof.get_hash_tree_root(), ) bob_alexandria_network.commons_content_storage.set_content( advertisement.content_key, content, ) assert not alice_alexandria_network.remote_advertisement_db.exists( advertisement) with trio.fail_after(5): async with ad_collector.new_advertisement.subscribe_and_wait(): await ad_collector.handle_advertisement(advertisement) assert alice_alexandria_network.remote_advertisement_db.exists( advertisement)
def benchmark_serialization( benchmark_name: str, content: bytes, segments: Tuple[Tuple[str, Tuple[int, int]], ...], ) -> None: full_proof = compute_proof(content, sedes=content_sedes) proofs = tuple( full_proof.to_partial(start_at=start_at, partial_data_length=partial_data_length) for (name, (start_at, partial_data_length)) in segments) serialized_proofs = tuple(proof.serialize() for proof in proofs) sizes = tuple( len(serialized_proof) for serialized_proof in serialized_proofs) table_header = ("name", "start", "end", "length", "size") table_rows = tuple((name, start_at, start_at + partial_data_length, partial_data_length, size) for ((name, (start_at, partial_data_length)), size) in zip(segments, sizes)) table = texttable.Texttable() table.set_cols_align(("l", "r", "r", "r", "r")) table.header(table_header) table.add_rows(table_rows, header=False) logger.info("\n##########################") logger.info(f"benchmark: {benchmark_name}") logger.info("##########################\n") logger.info(table.draw())
async def test_content_provider_restricts_max_chunks( alice, bob, alice_alexandria_network, bob_alexandria_client, ): content = ContentFactory(length=1024 * 10) content_key = b"test-content-key" content_storage = MemoryContentStorage({content_key: content}) proof = compute_proof(content, sedes=content_sedes) content_provider = ContentProvider(bob_alexandria_client, (content_storage, ), max_chunks_per_request=16) async with background_trio_service(content_provider): # this ensures that the subscription is in place. await content_provider.ready() with trio.fail_after(2): proof = await alice_alexandria_network.get_content_proof( bob.node_id, hash_tree_root=proof.get_hash_tree_root(), content_key=content_key, start_chunk_index=0, max_chunks=100, endpoint=bob.endpoint, ) validate_proof(proof) num_leaf_elements = len( tuple(element for element in proof.elements if len(element.path) == proof.path_bit_length)) assert num_leaf_elements == 16
async def test_content_provider_serves_large_content( alice, bob, alice_alexandria_network, bob_alexandria_client, ): content = ContentFactory(length=1024 * 10) content_key = b"test-content-key" content_storage = MemoryContentStorage({content_key: content}) proof = compute_proof(content, sedes=content_sedes) content_provider = ContentProvider(bob_alexandria_client, (content_storage, )) async with background_trio_service(content_provider): async with alice_alexandria_network.client.subscribe( ContentMessage) as subscription: # this ensures that the subscription is in place. await content_provider.ready() with trio.fail_after(2): content_retrieval_ctx = alice_alexandria_network.retrieve_content( content_key, proof.get_hash_tree_root(), ) async with content_retrieval_ctx as content_retrieval: await content_retrieval.node_queue.add(bob.node_id) result = await content_retrieval.wait_content_proof() validate_proof(result) result_data = result.get_proven_data() assert result_data[0:len(content)] == content response = await subscription.receive() assert response.message.payload.is_proof is True
def test_proof_get_elements_under_api(): content = bytes((i for i in range(160))) proof = compute_proof(content, sedes=short_content_sedes) # paths in this proof: # # 0: 00000 # 1: 00001 # 2: 00010 # 3: 00011 # 4: 00100 # 5: 00101 # 6: 0011 # 7: 01 # 8: 1 path_0 = (False, False, False, False, False) path_1 = (False, False, False, False, True) path_2 = (False, False, False, True, False) path_3 = (False, False, False, True, True) path_4 = (False, False, True, False, False) path_5 = (False, False, True, False, True) path_6 = (False, False, True, True) path_7 = (False, True) path_8 = (True, ) def to_paths(elements): return tuple(el.path for el in elements) under_0000 = proof.get_elements_under((False, False, False, False)) assert to_paths(under_0000) == (path_0, path_1) under_000 = proof.get_elements_under((False, False, False)) assert to_paths(under_000) == (path_0, path_1, path_2, path_3) under_00 = proof.get_elements_under((False, False)) assert to_paths(under_00) == ( path_0, path_1, path_2, path_3, path_4, path_5, path_6, ) under_0001 = proof.get_elements_under((False, False, False, True)) assert to_paths(under_0001) == (path_2, path_3) under_001 = proof.get_elements_under((False, False, True)) assert to_paths(under_001) == (path_4, path_5, path_6) under_01 = proof.get_elements_under((False, True)) assert to_paths(under_01) == (path_7, ) under_1 = proof.get_elements_under((True, )) assert to_paths(under_1) == (path_8, )
def test_proof_get_element_api(): content = bytes((i for i in range(160))) proof = compute_proof(content, sedes=short_content_sedes) # paths in this proof: # # 0: 00000 # 1: 00001 # 2: 00010 # 3: 00011 # 4: 00100 # 5: 00101 # 6: 0011 # 7: 01 # 8: 1 path_0 = (False, False, False, False, False) path_1 = (False, False, False, False, True) path_2 = (False, False, False, True, False) path_3 = (False, False, False, True, True) path_4 = (False, False, True, False, False) path_5 = (False, False, True, False, True) path_6 = (False, False, True, True) path_7 = (False, True) path_8 = (True, ) el_0 = proof.get_element(path_0) el_1 = proof.get_element(path_1) el_2 = proof.get_element(path_2) el_3 = proof.get_element(path_3) el_4 = proof.get_element(path_4) el_5 = proof.get_element(path_5) el_6 = proof.get_element(path_6) el_7 = proof.get_element(path_7) el_8 = proof.get_element(path_8) assert el_0.path == path_0 assert el_1.path == path_1 assert el_2.path == path_2 assert el_3.path == path_3 assert el_4.path == path_4 assert el_5.path == path_5 assert el_6.path == path_6 assert el_7.path == path_7 assert el_8.path == path_8 with pytest.raises(IndexError): proof.get_element((False, )) with pytest.raises(IndexError): proof.get_element((False, False)) with pytest.raises(IndexError): proof.get_element((False, True, False)) with pytest.raises(IndexError): proof.get_element((False, True, True)) with pytest.raises(IndexError): proof.get_element((True, False))
def test_full_proof_serialization_and_deserialization(content): proof = compute_proof(content, sedes=content_sedes) serialized = proof.serialize() result = Proof.deserialize(serialized) assert result.get_hash_tree_root() == proof.get_hash_tree_root() validate_proof(result) assert result == proof
async def test_content_retrieval_with_griefing_peer_sending_tiny_chunks( alice, bob, alice_alexandria_network, bob_alexandria_client, ): content = ContentFactory(length=1024 * 10) proof = compute_proof(content, sedes=content_sedes) content_retrieval = ContentRetrieval( alice_alexandria_network, content_key=b"test-key", hash_tree_root=proof.get_hash_tree_root(), ) async with bob_alexandria_client.subscribe( GetContentMessage) as subscription: async with trio.open_nursery() as nursery: async def _serve(): async for request in subscription: start_at = request.message.payload.start_chunk_index * 32 # We only every return a proof for 1 chunk of data end_at = min(len(content), start_at + 32) partial = proof.to_partial(start_at, end_at - start_at) payload = partial.serialize() await bob_alexandria_client.send_content( request.sender_node_id, request.sender_endpoint, is_proof=True, payload=payload, request_id=request.request_id, ) nursery.start_soon(_serve) await content_retrieval.node_queue.add(bob.node_id) with trio.fail_after(10): async with background_trio_service(content_retrieval): with trio.fail_after(5): result = await content_retrieval.wait_content_proof() validate_proof(result) result_data = result.get_proven_data() assert result_data[0:len(content)] == content nursery.cancel_scope.cancel()
async def test_alexandria_network_get_content_proof_api( alice, bob, alice_alexandria_network, bob_alexandria_client, content_size, ): content = ContentFactory(length=content_size) proof = compute_proof(content, sedes=content_sedes) async with bob_alexandria_client.subscribe( GetContentMessage) as subscription: async with trio.open_nursery() as nursery: async def _serve(): request = await subscription.receive() if content_size > 1024: partial = proof.to_partial( request.message.payload.start_chunk_index * 32, request.message.payload.max_chunks * 32, ) payload = partial.serialize() is_proof = True else: payload = content is_proof = False await bob_alexandria_client.send_content( request.sender_node_id, request.sender_endpoint, is_proof=is_proof, payload=payload, request_id=request.request_id, ) nursery.start_soon(_serve) with trio.fail_after(2): partial = await alice_alexandria_network.get_content_proof( bob.node_id, hash_tree_root=proof.get_hash_tree_root(), content_key=b"test-content-key", start_chunk_index=0, max_chunks=16, ) validate_proof(partial) partial_data = partial.get_proven_data() assert partial_data[0:16 * 32] == content[0:16 * 32]
def test_ssz_proof_get_missing_segments_last_chunk_not_full(): content = ContentFactory(180) # 12 bytes short full_proof = compute_proof(content, sedes=short_content_sedes) assert tuple(full_proof.get_missing_segments()) == () partial = full_proof.to_partial(64, 64) missing_segments = tuple(partial.get_missing_segments()) assert len(missing_segments) == 2 segment_a, segment_b = missing_segments assert segment_a.start_at == 0 assert segment_a.length == 64 assert segment_b.start_at == 128 assert segment_b.length == 52
def test_ssz_full_proofs(content): expected_hash_tree_root = get_hash_tree_root(content, sedes=content_sedes) proof = compute_proof(content, sedes=content_sedes) validate_proof(proof) assert is_proof_valid(proof) assert proof.get_hash_tree_root() == expected_hash_tree_root proven_data_segments = proof.get_proven_data_segments() assert len(proven_data_segments) == 1 start_index, proven_data = proven_data_segments[0] assert start_index == 0 assert proven_data == content proven_data = proof.get_proven_data() assert proven_data[0:len(content)] == content
def test_ssz_proof_get_missing_segments_only_head(): content = ContentFactory(512) full_proof = compute_proof(content, sedes=short_content_sedes) assert tuple(full_proof.get_missing_segments()) == () partial = full_proof.to_partial(160, 352) missing_segments = tuple(partial.get_missing_segments()) assert len(missing_segments) == 1 segment = missing_segments[0] assert segment.start_at == 0 # The length is 128 instead of 160 because the segment just before the # partial boundary gets included as part of the proof since the proof # starts in the middle of two sibling leaf nodes. assert segment.length == 128
def test_ssz_partial_proof_merge_fuzzy(data): content = data.draw(st.binary(min_size=1, max_size=10240)) full_proof = compute_proof(content, sedes=content_sedes) slice_a_start = data.draw( st.integers(min_value=0, max_value=max(0, len(content) - 1))) slice_a_stop = data.draw( st.integers(min_value=slice_a_start, max_value=len(content))) data_slice_a = slice(slice_a_start, slice_a_stop) slice_a_length = max(0, data_slice_a.stop - data_slice_a.start - 1) slice_b_start = data.draw( st.integers(min_value=0, max_value=max(0, len(content) - 1))) slice_b_stop = data.draw( st.integers(min_value=slice_b_start, max_value=len(content))) data_slice_b = slice(slice_b_start, slice_b_stop) slice_b_length = max(0, data_slice_b.stop - data_slice_b.start - 1) partial_a = full_proof.to_partial( start_at=data_slice_a.start, partial_data_length=slice_a_length, ) partial_a_data = partial_a.get_proven_data() partial_b = full_proof.to_partial( start_at=data_slice_b.start, partial_data_length=slice_b_length, ) partial_b_data = partial_b.get_proven_data() combined_proof = partial_a.merge(partial_b) assert combined_proof.get_hash_tree_root( ) == full_proof.get_hash_tree_root() validate_proof(combined_proof) combined_data = combined_proof.get_proven_data() assert combined_data[data_slice_a] == partial_a_data[data_slice_a] assert combined_data[data_slice_a] == content[data_slice_a] assert combined_data[data_slice_b] == partial_b_data[data_slice_b] assert combined_data[data_slice_b] == content[data_slice_b]
def test_ssz_partial_proof_construction(content, data_slice): full_proof = compute_proof(content, sedes=short_content_sedes) slice_length = data_slice.stop - data_slice.start partial_proof = full_proof.to_partial( start_at=data_slice.start, partial_data_length=slice_length, ) assert partial_proof.get_hash_tree_root() == full_proof.get_hash_tree_root( ) validate_proof(partial_proof) assert is_proof_valid(partial_proof) partial = partial_proof.get_proven_data() data_from_partial = partial[data_slice] assert data_from_partial == content[data_slice]
async def test_alexandria_network_get_content( tester, alice, ): content = ContentFactory(4096) proof = compute_proof(content, sedes=content_sedes) hash_tree_root = proof.get_hash_tree_root() content_key = b"test-key" async with AsyncExitStack() as stack: networks = await stack.enter_async_context( tester.alexandria.network_group(4)) for network in networks: advertisement = Advertisement.create( content_key=content_key, hash_tree_root=hash_tree_root, private_key=network.client.local_private_key, ) network.local_advertisement_db.add(advertisement) network.pinned_content_storage.set_content(content_key, content) # give the the network some time to interconnect. with trio.fail_after(30): for _ in range(1000): await trio.lowlevel.checkpoint() bootnodes = tuple(network.enr_manager.enr for network in networks[:2]) alice_alexandria_network = await stack.enter_async_context( alice.alexandria.network(bootnodes=bootnodes)) # give alice some time to interconnect too with trio.fail_after(30): for _ in range(1000): await trio.lowlevel.checkpoint() with trio.fail_after(60): result = await alice_alexandria_network.get_content( content_key, hash_tree_root=hash_tree_root) assert result == proof assert result.get_content() == content
def test_ssz_proof_get_missing_segments_only_middle(): content = ContentFactory(512) full_proof = compute_proof(content, sedes=short_content_sedes) assert tuple(full_proof.get_missing_segments()) == () head_proof = full_proof.to_partial(0, 160) tail_proof = full_proof.to_partial(352, 160) partial = head_proof.merge(tail_proof) missing_segments = tuple(partial.get_missing_segments()) assert len(missing_segments) == 1 segment = missing_segments[0] # The segmeent starts at 192 because the starting proof ends between two # sibling nodes, causing an extra leaf node to be included. assert segment.start_at == 192 # The length is 128 instead of 192 because the tail segment # starts in the middle of two sibling leaves which causes one extra leaf # to be included. assert segment.length == 128
async def test_advertisement_collector_validate_remote_fail_custody_proof_check( bob, alice_alexandria_network, bob_alexandria_network, autojump_clock, ): content = ContentFactory(2048) proof = compute_proof(content, sedes=content_sedes) ad_collector = alice_alexandria_network.advertisement_collector advertisement = AdvertisementFactory( private_key=bob.private_key, hash_tree_root=proof.get_hash_tree_root(), ) async with bob_alexandria_network.client.subscribe( GetContentMessage) as subscription: did_serve_initial_proof = False async with trio.open_nursery() as nursery: did_serve_initial_proof = True async def _respond(): request = await subscription.receive() partial = proof.to_partial(0, 64) await bob_alexandria_network.client.send_content( request.sender_node_id, request.sender_endpoint, is_proof=True, payload=partial.serialize(), request_id=request.request_id, ) nursery.start_soon(_respond) with pytest.raises(ValidationError, match="Proof of custody check failed"): await ad_collector.validate_advertisement(advertisement) assert did_serve_initial_proof is True
def test_ssz_partial_proof_merge(): full_proof = compute_proof(CONTENT_12345, sedes=short_content_sedes) proof_a = full_proof.to_partial(0, 64) proof_b = full_proof.to_partial(64, 64) proof_a_data = proof_a.get_proven_data() proof_b_data = proof_b.get_proven_data() with pytest.raises(IndexError): proof_a_data[64:128] with pytest.raises(IndexError): proof_b_data[0:64] with pytest.raises(IndexError): proof_a_data[0:128] with pytest.raises(IndexError): proof_b_data[0:128] combined_proof = proof_a.merge(proof_b) validate_proof(combined_proof) combined_data = combined_proof.get_proven_data() assert combined_data[0:128] == CONTENT_12345[0:128]
def test_full_proof_get_padding_elements_api(content, expected): proof = compute_proof(content, sedes=short_content_sedes) elements = proof.get_padding_elements() paths = tuple(el.path for el in elements) assert paths == expected
def test_proof_get_minimal_proof_elements_mixed_depths(): r""" 0: X / \ / \ 1: 0 1 / \ (length) / \ / \ / \ / \ / \ / \ 2: 0 1 / \ / \ / \ / \ / \ / \ 3: 0 1 0 1 / \ / \ / \ / \ / \ / \ / \ / \ 4: 0 1 0 1 X X 0 1 / \ / \ / \ / \ / \ / \ / \ / \ 5: 0 1 X X 0 1 0 1 X X X X 0 1 X X Nodes marked with `X` have already been collapsed Tests: A: |<--------->| B: |<----------------------->| """ full_proof = compute_proof(CONTENT_512, sedes=short_content_sedes) section_a = full_proof.get_elements_under(p(0, 0, 0, 0)) section_b = full_proof.get_minimal_proof_elements(2, 2) section_c = full_proof.get_elements_under(p(0, 0, 1)) section_d = full_proof.get_minimal_proof_elements(8, 4) section_e = full_proof.get_elements_under(p(0, 1, 1, 0)) section_f = full_proof.get_minimal_proof_elements(14, 2) section_g = (full_proof.get_element((True, )), ) sparse_elements = sum( ( section_a, section_b, section_c, section_d, section_e, section_f, section_g, ), (), ) sparse_proof = Proof( elements=sparse_elements, sedes=short_content_sedes, ) validate_proof(sparse_proof) # spans nodes at different levels elements_a = sparse_proof.get_minimal_proof_elements(0, 4) assert len(elements_a) == 1 assert elements_a[0].path == p(0, 0, 0) assert elements_a[0].value == hash_eth2( hash_eth2(CONTENT_512[0:64]) + hash_eth2(CONTENT_512[64:128])) elements_b = sparse_proof.get_minimal_proof_elements(8, 8) assert len(elements_b) == 1 assert elements_b[0].path == p(0, 1) hash_010 = hash_eth2( hash_eth2(CONTENT_512[256:320]) + hash_eth2(CONTENT_512[320:384])) hash_011 = hash_eth2( hash_eth2(CONTENT_512[384:448]) + hash_eth2(CONTENT_512[448:512])) assert elements_b[0].value == hash_eth2(hash_010 + hash_011)
def test_proof_get_minimal_proof_elements_all_leaves(): r""" 0: X / \ / \ 1: 0 1 / \ (length) / \ / \ / \ / \ / \ / \ 2: 0 1 / \ / \ / \ / \ / \ / \ 3: 0 1 0 1 / \ / \ / \ / \ / \ / \ / \ / \ 4: 0 1 0 1 0 1 0 1 / \ / \ / \ / \ / \ / \ / \ / \ 5: 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 Tests: A: |-| B: |-| C: |-| D: |<--->| E: |<--->| F: |<->| G: |<----->| H: |<----->| I: |<----------------------->| """ proof = compute_proof(CONTENT_512, sedes=short_content_sedes) # Just a single node elements_a = proof.get_minimal_proof_elements(0, 1) assert len(elements_a) == 1 assert elements_a[0].path == p(0, 0, 0, 0, 0) assert elements_a[0].value == CONTENT_512[:32] elements_b = proof.get_minimal_proof_elements(1, 1) assert len(elements_b) == 1 assert elements_b[0].path == p(0, 0, 0, 0, 1) assert elements_b[0].value == CONTENT_512[32:64] elements_c = proof.get_minimal_proof_elements(7, 1) assert len(elements_c) == 1 assert elements_c[0].path == p(0, 0, 1, 1, 1) assert elements_c[0].value == CONTENT_512[224:256] # pair of sibling nodes elements_d = proof.get_minimal_proof_elements(0, 2) assert len(elements_d) == 1 assert elements_d[0].path == p(0, 0, 0, 0) assert elements_d[0].value == hash_eth2(CONTENT_512[0:64]) elements_e = proof.get_minimal_proof_elements(4, 2) assert len(elements_e) == 1 assert elements_e[0].path == p(0, 0, 1, 0) assert elements_e[0].value == hash_eth2(CONTENT_512[128:192]) # pair of non-sibling nodes elements_f = proof.get_minimal_proof_elements(1, 2) assert len(elements_f) == 2 assert elements_f[0].path == p(0, 0, 0, 0, 1) assert elements_f[0].value == CONTENT_512[32:64] assert elements_f[1].path == p(0, 0, 0, 1, 0) assert elements_f[1].value == CONTENT_512[64:96] # disjoint sets of three elements_g = proof.get_minimal_proof_elements(0, 3) assert len(elements_g) == 2 assert elements_g[0].path == p(0, 0, 0, 0) assert elements_g[0].value == hash_eth2(CONTENT_512[0:64]) assert elements_g[1].path == p(0, 0, 0, 1, 0) assert elements_g[1].value == CONTENT_512[64:96] elements_h = proof.get_minimal_proof_elements(1, 3) assert len(elements_h) == 2 assert elements_h[0].path == p(0, 0, 0, 0, 1) assert elements_h[0].value == CONTENT_512[32:64] assert elements_h[1].path == p(0, 0, 0, 1) assert elements_h[1].value == hash_eth2(CONTENT_512[64:128]) # span of 8 elements_i = proof.get_minimal_proof_elements(1, 8) assert len(elements_i) == 4 assert elements_i[0].path == p(0, 0, 0, 0, 1) assert elements_i[0].value == CONTENT_512[32:64] assert elements_i[1].path == p(0, 0, 0, 1) assert elements_i[1].value == hash_eth2(CONTENT_512[64:128]) assert elements_i[2].path == p(0, 0, 1) assert elements_i[2].value == hash_eth2( hash_eth2(CONTENT_512[128:192]) + hash_eth2(CONTENT_512[192:256])) assert elements_i[3].path == p(0, 1, 0, 0, 0) assert elements_i[3].value == CONTENT_512[256:288]
def short_proof(): data = b"\x01" * 32 + b"\x02" * 32 + b"\x03" * 32 + b"\x04" * 32 + b"\x05" * 32 proof = compute_proof(data, sedes=short_content_sedes) return proof
def test_proof_get_first_padding_chunk_path_api(content, expected): proof = compute_proof(content, sedes=short_content_sedes) actual = proof.get_first_padding_chunk_path() assert actual == expected
def test_proof_get_elements_api(): content = bytes((i for i in range(160))) proof = compute_proof(content, sedes=short_content_sedes) # paths in this proof: # # 0: 00000 # 1: 00001 # 2: 00010 # 3: 00011 # 4: 00100 # 5: 00101 # 6: 0011 # 7: 01 # 8: 1 path_0 = (False, False, False, False, False) path_1 = (False, False, False, False, True) path_2 = (False, False, False, True, False) path_3 = (False, False, False, True, True) path_4 = (False, False, True, False, False) path_5 = (False, False, True, False, True) path_6 = (False, False, True, True) path_7 = (False, True) path_8 = (True, ) paths = ( path_0, path_1, path_2, path_3, path_4, path_5, path_6, path_7, path_8, ) def to_paths(elements): return tuple(el.path for el in elements) between_all = proof.get_elements() assert to_paths(between_all) == paths between_none_00000 = proof.get_elements(right=path_0) assert to_paths(between_none_00000) == () between_none_00000_inclusive = proof.get_elements(right=path_0, right_inclusive=True) assert to_paths(between_none_00000_inclusive) == (path_0, ) between_00000_00000 = proof.get_elements(left=path_0, right=path_0) assert to_paths(between_00000_00000) == () between_00000_00000_inclusive = proof.get_elements( left=path_0, right=path_0, right_inclusive=True, ) assert to_paths(between_00000_00000_inclusive) == (path_0, ) between_none_00001 = proof.get_elements(right=path_1) assert to_paths(between_none_00001) == (path_0, ) between_none_00001_inclusive = proof.get_elements(right=path_1, right_inclusive=True) assert to_paths(between_none_00001_inclusive) == (path_0, path_1) between_0_00001 = proof.get_elements(left=(False, ), right=path_1) assert to_paths(between_0_00001) == (path_0, ) between_0_00001_inclusive = proof.get_elements( left=(False, ), right=path_1, right_inclusive=True, ) assert to_paths(between_0_00001_inclusive) == (path_0, path_1) between_00000_00001 = proof.get_elements(left=path_0, right=path_1) assert to_paths(between_00000_00001) == (path_0, ) between_00000_00001_inclusive = proof.get_elements( left=path_0, right=path_1, right_inclusive=True, ) assert to_paths(between_00000_00001_inclusive) == (path_0, path_1) between_00001_00001 = proof.get_elements(left=path_1, right=path_1) assert to_paths(between_00001_00001) == () between_00001_00001 = proof.get_elements( left=path_1, right=path_1, right_inclusive=True, ) assert to_paths(between_00001_00001) == (path_1, ) between_00000_00002 = proof.get_elements(left=path_0, right=path_2) assert to_paths(between_00000_00002) == (path_0, path_1) between_00001_00002 = proof.get_elements(left=path_1, right=path_2) assert to_paths(between_00001_00002) == (path_1, ) between_0000_0001_inclusive = proof.get_elements(left=p(0, 0, 0, 0), right=p(0, 0, 0, 1)) assert to_paths(between_0000_0001_inclusive) == (path_0, path_1) between_0000_0010_inclusive = proof.get_elements(left=p(0, 0, 0, 0), right=p(0, 0, 1, 0)) assert to_paths(between_0000_0010_inclusive) == (path_0, path_1, path_2, path_3) between_0001_none = proof.get_elements(left=p(0, 0, 0, 1)) assert to_paths(between_0001_none) == paths[2:] between_0001_1 = proof.get_elements(left=p(0, 0, 0, 1), right=p(1)) assert to_paths(between_0001_1) == paths[2:-1]
def test_proof_get_first_padding_chunk_path_full_proof(): content = bytes((i % 256 for i in range(512))) proof = compute_proof(content, sedes=short_content_sedes) with pytest.raises(IndexError): proof.get_first_padding_chunk_path()