Example #1
0
def test_partial_proof_serialization_and_deserialization(data):
    content = data.draw(st.binary(min_size=1, max_size=10240))

    slice_start = data.draw(st.integers(min_value=0, max_value=len(content) - 1))
    slice_stop = data.draw(st.integers(min_value=slice_start, max_value=len(content)))
    data_slice = slice(slice_start, slice_stop)

    full_proof = compute_proof(content, sedes=content_sedes)

    slice_length = max(0, data_slice.stop - data_slice.start)

    partial_proof = full_proof.to_partial(
        start_at=data_slice.start, partial_data_length=slice_length,
    )
    assert partial_proof.get_hash_tree_root() == full_proof.get_hash_tree_root()

    serialized = partial_proof.serialize()
    result = Proof.deserialize(serialized)

    validate_proof(result)

    assert result == partial_proof

    partial = result.get_proven_data()
    data_from_partial = partial[data_slice]
    assert data_from_partial == content[data_slice]
Example #2
0
def test_ssz_partial_proof_fuzzy(data):
    content = data.draw(st.binary(min_size=1, max_size=10240))
    content_length = len(content)

    slice_start = data.draw(
        st.integers(min_value=0, max_value=max(0, content_length - 1)))
    slice_stop = data.draw(
        st.integers(min_value=slice_start, max_value=content_length))
    data_slice = slice(slice_start, slice_stop)

    full_proof = compute_proof(content, sedes=content_sedes)

    slice_length = max(0, data_slice.stop - data_slice.start - 1)

    partial_proof = full_proof.to_partial(
        start_at=data_slice.start,
        partial_data_length=slice_length,
    )
    assert partial_proof.get_hash_tree_root() == full_proof.get_hash_tree_root(
    )

    validate_proof(partial_proof)
    assert is_proof_valid(partial_proof)

    partial = partial_proof.get_proven_data()
    data_from_partial = partial[data_slice]
    assert data_from_partial == content[data_slice]
Example #3
0
async def test_content_provider_restricts_max_chunks(
    alice,
    bob,
    alice_alexandria_network,
    bob_alexandria_client,
):
    content = ContentFactory(length=1024 * 10)
    content_key = b"test-content-key"
    content_storage = MemoryContentStorage({content_key: content})
    proof = compute_proof(content, sedes=content_sedes)

    content_provider = ContentProvider(bob_alexandria_client,
                                       (content_storage, ),
                                       max_chunks_per_request=16)
    async with background_trio_service(content_provider):
        # this ensures that the subscription is in place.
        await content_provider.ready()

        with trio.fail_after(2):
            proof = await alice_alexandria_network.get_content_proof(
                bob.node_id,
                hash_tree_root=proof.get_hash_tree_root(),
                content_key=content_key,
                start_chunk_index=0,
                max_chunks=100,
                endpoint=bob.endpoint,
            )
            validate_proof(proof)
            num_leaf_elements = len(
                tuple(element for element in proof.elements
                      if len(element.path) == proof.path_bit_length))
            assert num_leaf_elements == 16
Example #4
0
async def test_content_provider_serves_large_content(
    alice,
    bob,
    alice_alexandria_network,
    bob_alexandria_client,
):
    content = ContentFactory(length=1024 * 10)
    content_key = b"test-content-key"
    content_storage = MemoryContentStorage({content_key: content})
    proof = compute_proof(content, sedes=content_sedes)

    content_provider = ContentProvider(bob_alexandria_client,
                                       (content_storage, ))
    async with background_trio_service(content_provider):
        async with alice_alexandria_network.client.subscribe(
                ContentMessage) as subscription:
            # this ensures that the subscription is in place.
            await content_provider.ready()

            with trio.fail_after(2):
                content_retrieval_ctx = alice_alexandria_network.retrieve_content(
                    content_key,
                    proof.get_hash_tree_root(),
                )
                async with content_retrieval_ctx as content_retrieval:
                    await content_retrieval.node_queue.add(bob.node_id)
                    result = await content_retrieval.wait_content_proof()

            validate_proof(result)
            result_data = result.get_proven_data()
            assert result_data[0:len(content)] == content

            response = await subscription.receive()
            assert response.message.payload.is_proof is True
Example #5
0
def test_full_proof_serialization_and_deserialization(content):
    proof = compute_proof(content, sedes=content_sedes)

    serialized = proof.serialize()
    result = Proof.deserialize(serialized)

    assert result.get_hash_tree_root() == proof.get_hash_tree_root()

    validate_proof(result)

    assert result == proof
Example #6
0
async def test_content_retrieval_with_griefing_peer_sending_tiny_chunks(
    alice,
    bob,
    alice_alexandria_network,
    bob_alexandria_client,
):
    content = ContentFactory(length=1024 * 10)
    proof = compute_proof(content, sedes=content_sedes)

    content_retrieval = ContentRetrieval(
        alice_alexandria_network,
        content_key=b"test-key",
        hash_tree_root=proof.get_hash_tree_root(),
    )

    async with bob_alexandria_client.subscribe(
            GetContentMessage) as subscription:
        async with trio.open_nursery() as nursery:

            async def _serve():
                async for request in subscription:
                    start_at = request.message.payload.start_chunk_index * 32
                    # We only every return a proof for 1 chunk of data
                    end_at = min(len(content), start_at + 32)
                    partial = proof.to_partial(start_at, end_at - start_at)
                    payload = partial.serialize()
                    await bob_alexandria_client.send_content(
                        request.sender_node_id,
                        request.sender_endpoint,
                        is_proof=True,
                        payload=payload,
                        request_id=request.request_id,
                    )

            nursery.start_soon(_serve)

            await content_retrieval.node_queue.add(bob.node_id)

            with trio.fail_after(10):
                async with background_trio_service(content_retrieval):
                    with trio.fail_after(5):
                        result = await content_retrieval.wait_content_proof()

                validate_proof(result)
                result_data = result.get_proven_data()
                assert result_data[0:len(content)] == content

            nursery.cancel_scope.cancel()
Example #7
0
async def test_alexandria_network_get_content_proof_api(
    alice,
    bob,
    alice_alexandria_network,
    bob_alexandria_client,
    content_size,
):
    content = ContentFactory(length=content_size)
    proof = compute_proof(content, sedes=content_sedes)

    async with bob_alexandria_client.subscribe(
            GetContentMessage) as subscription:
        async with trio.open_nursery() as nursery:

            async def _serve():
                request = await subscription.receive()
                if content_size > 1024:
                    partial = proof.to_partial(
                        request.message.payload.start_chunk_index * 32,
                        request.message.payload.max_chunks * 32,
                    )
                    payload = partial.serialize()
                    is_proof = True
                else:
                    payload = content
                    is_proof = False
                await bob_alexandria_client.send_content(
                    request.sender_node_id,
                    request.sender_endpoint,
                    is_proof=is_proof,
                    payload=payload,
                    request_id=request.request_id,
                )

            nursery.start_soon(_serve)

            with trio.fail_after(2):
                partial = await alice_alexandria_network.get_content_proof(
                    bob.node_id,
                    hash_tree_root=proof.get_hash_tree_root(),
                    content_key=b"test-content-key",
                    start_chunk_index=0,
                    max_chunks=16,
                )
                validate_proof(partial)
                partial_data = partial.get_proven_data()
                assert partial_data[0:16 * 32] == content[0:16 * 32]
Example #8
0
def test_ssz_partial_proof_merge_fuzzy(data):
    content = data.draw(st.binary(min_size=1, max_size=10240))

    full_proof = compute_proof(content, sedes=content_sedes)

    slice_a_start = data.draw(
        st.integers(min_value=0, max_value=max(0,
                                               len(content) - 1)))
    slice_a_stop = data.draw(
        st.integers(min_value=slice_a_start, max_value=len(content)))
    data_slice_a = slice(slice_a_start, slice_a_stop)
    slice_a_length = max(0, data_slice_a.stop - data_slice_a.start - 1)

    slice_b_start = data.draw(
        st.integers(min_value=0, max_value=max(0,
                                               len(content) - 1)))
    slice_b_stop = data.draw(
        st.integers(min_value=slice_b_start, max_value=len(content)))
    data_slice_b = slice(slice_b_start, slice_b_stop)
    slice_b_length = max(0, data_slice_b.stop - data_slice_b.start - 1)

    partial_a = full_proof.to_partial(
        start_at=data_slice_a.start,
        partial_data_length=slice_a_length,
    )
    partial_a_data = partial_a.get_proven_data()

    partial_b = full_proof.to_partial(
        start_at=data_slice_b.start,
        partial_data_length=slice_b_length,
    )
    partial_b_data = partial_b.get_proven_data()

    combined_proof = partial_a.merge(partial_b)
    assert combined_proof.get_hash_tree_root(
    ) == full_proof.get_hash_tree_root()

    validate_proof(combined_proof)

    combined_data = combined_proof.get_proven_data()

    assert combined_data[data_slice_a] == partial_a_data[data_slice_a]
    assert combined_data[data_slice_a] == content[data_slice_a]

    assert combined_data[data_slice_b] == partial_b_data[data_slice_b]
    assert combined_data[data_slice_b] == content[data_slice_b]
Example #9
0
def test_ssz_full_proofs(content):
    expected_hash_tree_root = get_hash_tree_root(content, sedes=content_sedes)
    proof = compute_proof(content, sedes=content_sedes)

    validate_proof(proof)
    assert is_proof_valid(proof)
    assert proof.get_hash_tree_root() == expected_hash_tree_root

    proven_data_segments = proof.get_proven_data_segments()

    assert len(proven_data_segments) == 1
    start_index, proven_data = proven_data_segments[0]
    assert start_index == 0
    assert proven_data == content

    proven_data = proof.get_proven_data()
    assert proven_data[0:len(content)] == content
Example #10
0
def test_ssz_partial_proof_construction(content, data_slice):
    full_proof = compute_proof(content, sedes=short_content_sedes)

    slice_length = data_slice.stop - data_slice.start

    partial_proof = full_proof.to_partial(
        start_at=data_slice.start,
        partial_data_length=slice_length,
    )
    assert partial_proof.get_hash_tree_root() == full_proof.get_hash_tree_root(
    )

    validate_proof(partial_proof)
    assert is_proof_valid(partial_proof)

    partial = partial_proof.get_proven_data()
    data_from_partial = partial[data_slice]
    assert data_from_partial == content[data_slice]
Example #11
0
def test_ssz_partial_proof_merge():
    full_proof = compute_proof(CONTENT_12345, sedes=short_content_sedes)

    proof_a = full_proof.to_partial(0, 64)
    proof_b = full_proof.to_partial(64, 64)

    proof_a_data = proof_a.get_proven_data()
    proof_b_data = proof_b.get_proven_data()

    with pytest.raises(IndexError):
        proof_a_data[64:128]
    with pytest.raises(IndexError):
        proof_b_data[0:64]
    with pytest.raises(IndexError):
        proof_a_data[0:128]
    with pytest.raises(IndexError):
        proof_b_data[0:128]

    combined_proof = proof_a.merge(proof_b)
    validate_proof(combined_proof)

    combined_data = combined_proof.get_proven_data()

    assert combined_data[0:128] == CONTENT_12345[0:128]
Example #12
0
    async def _worker(
        self, worker_id: int, send_channel: trio.abc.SendChannel[Proof]
    ) -> None:
        worker_name = f"Worker[{self.content_key.hex()}:{worker_id}]"
        while True:
            async with self.node_queue.reserve() as node_id:
                self.logger.debug("%s: reserved node: %s", worker_name, node_id.hex())
                async with self._segment_queue.reserve() as segment:
                    start_data_index, data_length = segment

                    start_chunk_index = start_data_index // CHUNK_SIZE
                    max_chunks = get_chunk_count_for_data_length(data_length)

                    self.logger.debug(
                        "%s: reserved chunk: start_index=%d  max_chunks=%d",
                        worker_name,
                        start_chunk_index,
                        max_chunks,
                    )

                    try:
                        proof = await self._network.get_content_proof(
                            node_id,
                            hash_tree_root=self.hash_tree_root,
                            content_key=self.content_key,
                            start_chunk_index=start_chunk_index,
                            max_chunks=max_chunks,
                        )
                    except trio.TooSlowError:
                        self.logger.debug(
                            "%s: timeout: node=%s", worker_name, node_id.hex(),
                        )
                        continue

                    try:
                        validate_proof(proof)
                    except ValidationError:
                        self.logger.debug(
                            "%s: removing node for sending invalid proof: node=%s",
                            worker_name,
                            node_id.hex(),
                        )
                        # If a peer gives us an invalid proof, remove them
                        # from rotation.
                        await self.node_queue.remove(node_id)
                        continue

                    # check that the proof contains at minimum the first chunk we requested.
                    if not proof.has_chunk(start_chunk_index):
                        self.logger.debug(
                            "%s: removing node for not returning requested chunk: node=%s",
                            worker_name,
                            node_id.hex(),
                        )
                        # If the peer didn't include the start chunk,
                        # remove them from rotation.
                        await self.node_queue.remove(node_id)
                        continue

                    self.logger.debug(
                        "%s: sending partial proof: node=%s",
                        worker_name,
                        node_id.hex(),
                    )
                    await send_channel.send(proof)

                    # Determine if there are any subsections to this
                    # segment that are still missing.
                    remaining_segments = segment.intersection(
                        tuple(proof.get_missing_segments())
                    )

                    # This *timeout* ensures that the workers will not deadlock on
                    # a full `segment_queue`.  In the case where we hit this
                    # timeout we may end up re-requesting a proof that we already
                    # have but that is *ok* and doesn't cause anything to break.
                    with trio.move_on_after(2):
                        # Remove the segment and push any still missing
                        # sub-segments onto the queue
                        for sub_segment in remaining_segments:
                            await self._segment_queue.add(sub_segment)

                        # It is important that the removal happen *after*
                        # we push the sub-segments on, otherwise, if we
                        # timeout after the main segment has been removed
                        # but before the sub-segments have been added,
                        # we'll lose track of the still missing
                        # sub-segments.
                        await self._segment_queue.remove(segment)
Example #13
0
def test_proof_get_minimal_proof_elements_mixed_depths():
    r"""
    0:                                X
                                     / \
                                   /     \
    1:                            0       1
                                 / \   (length)
                               /     \
                             /         \
                           /             \
                         /                 \
                       /                     \
                     /                         \
    2:              0                           1
                  /   \                       /   \
                /       \                   /       \
              /           \               /           \
    3:       0             1             0             1
            / \           / \           / \           / \
           /   \         /   \         /   \         /   \
    4:    0     1       0     1       X     X       0     1
         / \   / \     / \   / \     / \   / \     / \   / \
    5:  0   1 X   X   0   1 0   1   X   X X   X   0   1 X   X

    Nodes marked with `X` have already been collapsed

    Tests:
    A: |<--------->|
    B:                             |<----------------------->|
    """
    full_proof = compute_proof(CONTENT_512, sedes=short_content_sedes)

    section_a = full_proof.get_elements_under(p(0, 0, 0, 0))
    section_b = full_proof.get_minimal_proof_elements(2, 2)
    section_c = full_proof.get_elements_under(p(0, 0, 1))
    section_d = full_proof.get_minimal_proof_elements(8, 4)
    section_e = full_proof.get_elements_under(p(0, 1, 1, 0))
    section_f = full_proof.get_minimal_proof_elements(14, 2)
    section_g = (full_proof.get_element((True, )), )

    sparse_elements = sum(
        (
            section_a,
            section_b,
            section_c,
            section_d,
            section_e,
            section_f,
            section_g,
        ),
        (),
    )
    sparse_proof = Proof(
        elements=sparse_elements,
        sedes=short_content_sedes,
    )
    validate_proof(sparse_proof)

    # spans nodes at different levels
    elements_a = sparse_proof.get_minimal_proof_elements(0, 4)
    assert len(elements_a) == 1
    assert elements_a[0].path == p(0, 0, 0)
    assert elements_a[0].value == hash_eth2(
        hash_eth2(CONTENT_512[0:64]) + hash_eth2(CONTENT_512[64:128]))

    elements_b = sparse_proof.get_minimal_proof_elements(8, 8)
    assert len(elements_b) == 1
    assert elements_b[0].path == p(0, 1)
    hash_010 = hash_eth2(
        hash_eth2(CONTENT_512[256:320]) + hash_eth2(CONTENT_512[320:384]))
    hash_011 = hash_eth2(
        hash_eth2(CONTENT_512[384:448]) + hash_eth2(CONTENT_512[448:512]))
    assert elements_b[0].value == hash_eth2(hash_010 + hash_011)