Beispiel #1
0
    def handle_ATOMIC_BATCH(self, sock: BufferedSocket) -> None:
        kv_pair_and_delete_count_data = sock.read_exactly(DOUBLE_LEN_BYTES)
        kv_pair_count, delete_count = struct.unpack(
            '<II', kv_pair_and_delete_count_data)
        total_kv_count = 2 * kv_pair_count

        if kv_pair_count or delete_count:
            kv_and_delete_sizes_data = sock.read_exactly(
                DOUBLE_LEN_BYTES * kv_pair_count + LEN_BYTES * delete_count)
            fmt_str = '<' + 'I' * (total_kv_count + delete_count)
            kv_and_delete_sizes = struct.unpack(fmt_str,
                                                kv_and_delete_sizes_data)

            kv_sizes = kv_and_delete_sizes[:total_kv_count]
            delete_sizes = kv_and_delete_sizes[total_kv_count:total_kv_count +
                                               delete_count]

            with self.db.atomic_batch() as batch:
                for key_size, value_size in partition(2, kv_sizes):
                    combined_size = key_size + value_size
                    key_and_value_data = sock.read_exactly(combined_size)
                    key = key_and_value_data[:key_size]
                    value = key_and_value_data[key_size:]
                    batch[key] = value
                for key_size in delete_sizes:
                    key = sock.read_exactly(key_size)
                    del batch[key]

        sock.sendall(SUCCESS_BYTE)
Beispiel #2
0
def hash_layer(child_layer: Sequence[bytes]) -> Tuple[Hash32, ...]:
    if len(child_layer) % 2 != 0:
        raise ValueError("Layer must have an even number of elements")

    child_pairs = partition(2, child_layer)
    parent_layer = tuple(
        hash_eth2(left_child + right_child)
        for left_child, right_child in child_pairs)
    return parent_layer
Beispiel #3
0
    def mset(self, *args: Union[int, Hash32]) -> "HashTree":
        if len(args) % 2 != 0:
            raise TypeError(
                f"mset must be called with an even number of arguments, got {len(args)}"
            )

        evolver = self.evolver()
        for index, value in partition(2, args):
            evolver[index] = value
        return evolver.persistent()
Beispiel #4
0
def nibbles_to_bytes(nibbles):
    if any(nibble not in VALID_NIBBLES for nibble in nibbles):
        raise InvalidNibbles(
            "Nibbles contained invalid value.  Must be constrained between [0, 15]"
        )

    if len(nibbles) % 2:
        raise InvalidNibbles("Nibbles must be even in length")

    value = bytes(REVERSE_NIBBLES_LOOKUP[pair] for pair in partition(2, nibbles))
    return value
Beispiel #5
0
def hash_layer(child_layer: RawHashTreeLayer,
               layer_index: int) -> RawHashTreeLayer:
    if len(child_layer) % 2 == 0:
        padded_child_layer = child_layer
    else:
        padded_child_layer = child_layer.append(ZERO_HASHES[layer_index])

    child_pairs = partition(2, padded_child_layer)
    parent_layer = pvector(
        hash_eth2(left_child + right_child)
        for left_child, right_child in child_pairs)
    return parent_layer
Beispiel #6
0
def _attach_committees_to_block_tree(state, block_tree, committees_by_slot,
                                     config, forking_asymmetry):
    for level, committees in zip(_iter_block_tree_by_slot(block_tree),
                                 committees_by_slot):
        block_count = len(level)
        partitions = partition(block_count, committees)
        for block, committee in zip(_iter_block_level_by_block(level),
                                    partitions):
            if forking_asymmetry:
                if random.choice([True, False]):
                    # random drop out
                    continue
            _attach_committee_to_block(block, first(committee))
Beispiel #7
0
def get_appended_chunks(
        *, appended_elements: Sequence[bytes], element_size: int,
        num_padding_elements: int) -> Generator[Hash32, None, None]:
    """Get the sequence of appended chunks."""
    if len(appended_elements) <= num_padding_elements:
        return

    elements_per_chunk = CHUNK_SIZE // element_size

    chunk_partitioned_elements = partition(
        elements_per_chunk,
        appended_elements[num_padding_elements:],
        pad=b"\x00" * element_size,
    )
    for elements_in_chunk in chunk_partitioned_elements:
        yield Hash32(b"".join(elements_in_chunk))
Beispiel #8
0
    def _deserialize_stream(self, stream: IO[bytes]) -> Iterable[TDeserialized]:
        if self.element_sedes.is_fixed_sized:
            element_size = self.element_sedes.get_fixed_size()
            data = stream.read()
            if len(data) % element_size != 0:
                raise DeserializationError(
                    f"Invalid length. List is comprised of a fixed size sedes "
                    f"but total serialized data is not an even multiple of the "
                    f"element size. data length: {len(data)}  element size: "
                    f"{element_size}"
                )
            for segment in partition(element_size, data):
                yield self.element_sedes.deserialize(segment)
        else:
            stream_zero_loc = stream.tell()
            try:
                first_offset = s_decode_offset(stream)
            except DeserializationError:
                if stream.tell() == stream_zero_loc:
                    # Empty list
                    return
                else:
                    raise

            num_remaining_offset_bytes = first_offset - stream.tell()
            if num_remaining_offset_bytes % OFFSET_SIZE != 0:
                raise DeserializationError(
                    f"Offset bytes was not a multiple of {OFFSET_SIZE}.  Got "
                    f"{num_remaining_offset_bytes}"
                )

            num_remaining_offsets = num_remaining_offset_bytes // OFFSET_SIZE
            tail_offsets = tuple(s_decode_offset(stream) for _ in range(num_remaining_offsets))

            offsets = tuple(cons(first_offset, tail_offsets))

            for left_offset, right_offset in sliding_window(2, offsets):
                element_length = right_offset - left_offset
                element_data = read_exact(element_length, stream)
                yield self.element_sedes.deserialize(element_data)

            # simply reading to the end of the current stream gives us all of the final element data
            final_element_data = stream.read()
            yield self.element_sedes.deserialize(final_element_data)
Beispiel #9
0
def pack(serialized_values: Sequence[bytes]) -> Tuple[Hash32, ...]:
    if len(serialized_values) == 0:
        return (EMPTY_CHUNK, )

    item_size = len(serialized_values[0])
    items_per_chunk = get_items_per_chunk(item_size)

    number_of_items = len(serialized_values)
    number_of_chunks = (number_of_items +
                        (items_per_chunk - 1)) // items_per_chunk

    chunk_partitions = partition(items_per_chunk, serialized_values, pad=b"")
    chunks_unpadded = (b"".join(chunk_partition)
                       for chunk_partition in chunk_partitions)

    full_chunks = tuple(
        Hash32(chunk) for chunk in take(number_of_chunks - 1, chunks_unpadded))
    last_chunk = first(chunks_unpadded)
    if len(tuple(chunks_unpadded)) > 0:
        raise Exception("Invariant: all chunks have been taken")

    return full_chunks + (Hash32(last_chunk.ljust(CHUNK_SIZE, b"\x00")), )