Esempio n. 1
0
def merkleize(chunks: Sequence[Hash32]) -> Hash32:
    padded_chunks = pad_chunks(chunks)
    number_of_layers = int(math.log2(len(padded_chunks))) + 1

    layers = take(number_of_layers, iterate(hash_layer, padded_chunks))
    root, = last(layers)
    return root
Esempio n. 2
0
    def get_validators(self, block_height: int) -> List[bytes]:
        if not self._epochs:
            raise ValueError("No epochs have been added yet")
        if block_height > self.max_height:
            raise ValueError(
                f"Validator sets are only known until height {self.max_height}"
            )

        try:
            epoch_start_height = last(
                takewhile(
                    lambda start_height: start_height <= block_height,
                    self._ordered_start_heights,
                ))
        except IndexError:
            raise ValueError(
                f"Block #{block_height} is earlier than the first epoch")
        else:
            epoch = self._epochs[epoch_start_height]
            return epoch.validators
Esempio n. 3
0
    def _is_relevant(self, epoch: Epoch) -> bool:
        """Check if an epoch should be added to the already known set of epochs.

        An epoch is considered relevant if the preceding epoch does not stem from a later validator
        definition range. For instance, if there's only one known epoch with start height 10 from
        the 5th validator definition range, an epoch starting at height 11 would be relevant if it
        belongs to the 5th or later validator definition range, but not if it belongs to the 4th or
        earlier one.
        """
        earlier_epoch_start_heights = takewhile(
            lambda start_height: start_height <= epoch.start_height,
            self._ordered_start_heights,
        )
        try:
            previous_epoch_start_height = last(earlier_epoch_start_heights)
        except IndexError:
            return True
        else:
            previous_epoch = self._epochs[previous_epoch_start_height]
            return (previous_epoch.validator_definition_index <=
                    epoch.validator_definition_index)
Esempio n. 4
0
def _generate_vm_configuration(
    *fork_start_blocks: ForkStartBlocks,
    dao_start_block: Union[int, bool] = None
) -> Generator[VMStartBlock, None, None]:  # noqa: E501
    """
    fork_start_blocks should be 2-tuples of (start_block, fork_name_or_vm_class)

    dao_start_block determines whether the Homestead fork will support the DAO
    fork and if so, at what block.

        - dao_start_block = None: perform the DAO fork at the same block as the
          Homestead start block.
        - dao_start_block = False: do not perform the DAO fork.
        - dao_start_block = <int>: perform the DAO fork at the given block number.
    """
    # if no configuration was passed in, initialize the chain with the *latest*
    # Mainnet VM rules active at block 0.
    if not fork_start_blocks:
        yield (0, last(MAINNET_VMS.values()))
        return

    # Validate that there are no fork names which are not represented in the
    # mainnet chain.
    fork_names = set(fork_name for _, fork_name in fork_start_blocks
                     if isinstance(fork_name, str))
    unknown_forks = sorted(fork_names.difference(MAINNET_VMS.keys()))
    if unknown_forks:
        raise ValidationError(
            "Configuration contains unknown forks: {0}".format(unknown_forks))

    # Validate that *if* an explicit value was passed in for dao_start_block
    # that the Homestead fork rules are part of the VM configuration.
    if dao_start_block is not None and 'homestead' not in fork_names:
        raise ValidationError(
            "The `dao_start_block` parameter is only valid for the 'homestead' "
            "fork rules.  The 'homestead' VM was not included in the provided "
            "fork configuration")

    # If no VM is set to start at block 0, default to the frontier VM
    start_blocks = set(start_block for start_block, _ in fork_start_blocks)
    if 0 not in start_blocks:
        yield 0, MAINNET_VMS['frontier']

    ordered_fork_start_blocks = sorted(fork_start_blocks,
                                       key=operator.itemgetter(0))

    # Iterate over the parameters, generating a tuple of 2-tuples in the form:
    # (start_block, vm_class)
    for start_block, fork in ordered_fork_start_blocks:
        if isinstance(fork, type) and issubclass(fork, VirtualMachineAPI):
            vm_class = fork
        elif isinstance(fork, str):
            vm_class = MAINNET_VMS[fork]
        else:
            raise Exception("Invariant: unreachable code path")

        if issubclass(vm_class, HomesteadVM):
            if dao_start_block is False:
                yield (start_block, vm_class.configure(support_dao_fork=False))
            elif dao_start_block is None:
                yield (start_block,
                       vm_class.configure(_dao_fork_block_number=start_block))
            elif isinstance(dao_start_block, int):
                validate_gte(dao_start_block, start_block)
                yield (start_block,
                       vm_class.configure(
                           _dao_fork_block_number=dao_start_block))
            else:
                raise Exception("Invariant: unreachable code path")
        else:
            yield (start_block, vm_class)
Esempio n. 5
0
 def latest_id(self) -> uuid.UUID:
     """
     Returns the id of the latest changeset
     """
     return last(self.journal_data.keys())