Example #1
0
    def from_genesis(cls,
                     base_db: BaseDB,
                     genesis_params: Dict[str, HeaderParams],
                     genesis_state: AccountState = None) -> 'BaseChain':
        """
        Initializes the Chain from a genesis state.
        """
        genesis_vm_class = cls.get_vm_class_for_block_number(BlockNumber(0))

        account_db = genesis_vm_class.get_state_class().get_account_db_class()(
            base_db,
            BLANK_ROOT_HASH,
        )

        if genesis_state is None:
            genesis_state = {}

        # mutation
        apply_state_dict(account_db, genesis_state)
        account_db.persist()

        if 'state_root' not in genesis_params:
            # If the genesis state_root was not specified, use the value
            # computed from the initialized state database.
            genesis_params = assoc(genesis_params, 'state_root',
                                   account_db.state_root)
        elif genesis_params['state_root'] != account_db.state_root:
            # If the genesis state_root was specified, validate that it matches
            # the computed state from the initialized state database.
            raise ValidationError(
                "The provided genesis state root does not match the computed "
                "genesis state root.  Got {0}.  Expected {1}".format(
                    account_db.state_root,
                    genesis_params['state_root'],
                ))

        genesis_header = BlockHeader(**genesis_params)
        return cls.from_genesis_header(base_db, genesis_header)
Example #2
0
    def validate_chain(self,
                       chain: Tuple[BlockHeader, ...],
                       seal_check_random_sample_rate: int = 1) -> None:
        parent = self.chaindb.get_block_header_by_hash(chain[0].parent_hash)
        all_indices = list(range(len(chain)))
        if seal_check_random_sample_rate == 1:
            headers_to_check_seal = set(all_indices)
        else:
            sample_size = len(all_indices) // seal_check_random_sample_rate
            headers_to_check_seal = set(random.sample(all_indices,
                                                      sample_size))

        for i, header in enumerate(chain):
            if header.parent_hash != parent.hash:
                raise ValidationError(
                    "Invalid header chain; {} has parent {}, but expected {}".
                    format(header, header.parent_hash, parent.hash))
            vm_class = self.get_vm_class_for_block_number(header.block_number)
            if i in headers_to_check_seal:
                vm_class.validate_header(header, parent, check_seal=True)
            else:
                vm_class.validate_header(header, parent, check_seal=False)
            parent = header
Example #3
0
def _generate_vm_configuration(
    *fork_start_blocks: ForkStartBlocks,
    dao_start_block: Union[int, bool] = None
) -> Generator[VMStartBlock, None, None]:  # noqa: E501
    """
    fork_start_blocks should be 2-tuples of (start_block, fork_name_or_vm_class)

    dao_start_block determines whether the Homestead fork will support the DAO
    fork and if so, at what block.

        - dao_start_block = None: perform the DAO fork at the same block as the
          Homestead start block.
        - dao_start_block = False: do not perform the DAO fork.
        - dao_start_block = <int>: perform the DAO fork at the given block number.
    """
    # if no configuration was passed in, initialize the chain with the *latest*
    # Mainnet VM rules active at block 0.
    if not fork_start_blocks:
        yield (0, last(MAINNET_VMS.values()))
        return

    # Validate that there are no fork names which are not represented in the
    # mainnet chain.
    fork_names = set(fork_name for _, fork_name in fork_start_blocks
                     if isinstance(fork_name, str))
    unknown_forks = sorted(fork_names.difference(MAINNET_VMS.keys()))
    if unknown_forks:
        raise ValidationError(
            "Configuration contains unknown forks: {0}".format(unknown_forks))

    # Validate that *if* an explicit value was passed in for dao_start_block
    # that the Homestead fork rules are part of the VM configuration.
    if dao_start_block is not None and 'homestead' not in fork_names:
        raise ValidationError(
            "The `dao_start_block` parameter is only valid for the 'homestead' "
            "fork rules.  The 'homestead' VM was not included in the provided "
            "fork configuration")

    # If no VM is set to start at block 0, default to the frontier VM
    start_blocks = set(start_block for start_block, _ in fork_start_blocks)
    if 0 not in start_blocks:
        yield 0, MAINNET_VMS['frontier']

    ordered_fork_start_blocks = sorted(fork_start_blocks,
                                       key=operator.itemgetter(0))

    # Iterate over the parameters, generating a tuple of 2-tuples in the form:
    # (start_block, vm_class)
    for start_block, fork in ordered_fork_start_blocks:
        if isinstance(fork, type) and issubclass(fork, BaseVM):
            vm_class = fork
        elif isinstance(fork, str):
            vm_class = MAINNET_VMS[fork]
        else:
            raise Exception("Invariant: unreachable code path")

        if issubclass(vm_class, HomesteadVM):
            if dao_start_block is False:
                yield (start_block, vm_class.configure(support_dao_fork=False))
            elif dao_start_block is None:
                yield (start_block,
                       vm_class.configure(dao_fork_block_number=start_block))
            elif isinstance(dao_start_block, int):
                validate_gte(dao_start_block, start_block)
                yield (start_block,
                       vm_class.configure(
                           dao_fork_block_number=dao_start_block))
            else:
                raise Exception("Invariant: unreachable code path")
        else:
            yield (start_block, vm_class)
Example #4
0
def check_body_size(body):
    if len(body) != COLLATION_SIZE:
        raise ValidationError("{} byte collation body exceeds maximum allowed size".format(
            len(body)
        ))
    return body
Example #5
0
def validate_lt(value, maximum, title="Value"):
    if value >= maximum:
        raise ValidationError(
            "{title} {0} is not less than {1}".format(value, maximum, title=title)
        )
    validate_is_integer(value, title=title)
Example #6
0
def validate_gt(value, minimum, title="Value"):
    if value <= minimum:
        raise ValidationError(
            "{title} {0} is not greater than {1}".format(value, minimum, title=title)
        )
    validate_is_integer(value, title=title)
Example #7
0
def validate_is_integer(value, title="Value"):
    if not isinstance(value, int) or isinstance(value, bool):
        raise ValidationError(
            "{title} must be a an integer.  Got: {0}".format(type(value), title=title)
        )
Example #8
0
def validate_is_bytes(value, title="Value"):
    if not isinstance(value, bytes):
        raise ValidationError(
            "{title} must be a byte string.  Got: {0}".format(type(value), title=title)
        )
Example #9
0
def validate_is_boolean(value, title="Value"):
    if not isinstance(value, bool):
        raise ValidationError(
            "{title} must be an boolean.  Got type: {0}".format(type(value), title=title)
        )
Example #10
0
def validate_multiple_of(value, multiple_of, title="Value"):
    if not value % multiple_of == 0:
        raise ValidationError(
            "{title} {0} is not a multiple of {1}".format(value, multiple_of, title=title)
        )
Example #11
0
def validate_canonical_address(value, title="Value"):
    if not isinstance(value, bytes) or not len(value) == 20:
        raise ValidationError(
            "{title} {0} is not a valid canonical address".format(value, title=title)
        )
Example #12
0
 def begin_work(self) -> None:
     if self._last_start is not None:
         raise ValidationError(
             "Cannot start the ThroughputTracker again without completing it"
         )
     self._last_start = time.perf_counter()
Example #13
0
def get_scaled_batches(
    scaled_workers: Dict[Worker, float],
    source: List[Work],
) -> Dict[Worker, List[Work]]:
    """
    Group elements from source into scaled batches. Each element from source will be present
    in exactly one of the batches. Batch lengths always round down, and any remaining elements
    from source will be batched into the highest-scale index.

    :param scales: amount to scale batches - must be >=0 and !=NaN
    :param source: list of elements to group into scaled batches

    :return: list of batches, the same length as scales. Batches *may be empty*.
    """
    scales = tuple(scaled_workers.values())
    if len(set(source)) != len(source):
        raise ValidationError("Elements to batch must be unique")
    elif len(scales) == 0:
        raise ValidationError(
            "Must have at least one target to batch elements into")
    elif any(math.isnan(scale) for scale in scales):
        raise ValidationError(
            "All scale values must be a number (ie~ not a NaN)")

    scale_sum = sum(scales)
    if scale_sum == 0:
        normalized_scales = {worker: 1.0 for worker in scaled_workers.keys()}
        total = float(len(scaled_workers))
    elif any(math.isinf(scale) for scale in scales):
        normalized_scales = {
            worker: 1.0 if math.isinf(scale) else 0.0
            for worker, scale in scaled_workers.items()
        }
        total = sum(normalized_scales.values())
    else:
        normalized_scales = scaled_workers
        total = scale_sum

    fractional_scales = {
        worker: scale / total
        for worker, scale in normalized_scales.items()
    }

    num_elements = len(source)
    element_iter = iter(source)
    batches = {}
    for worker, fraction in fractional_scales.items():
        num_to_take = math.floor(fraction * num_elements)
        if num_to_take >= 1:
            batch = list(take(num_to_take, element_iter))
            batches[worker] = batch

    # any elements missed due to rounding error will go to the largest scaled worker
    remaining = list(element_iter)
    if remaining:
        largest_worker = max(fractional_scales.keys(),
                             key=fractional_scales.get)
        if largest_worker in batches:
            batches[largest_worker] += remaining
        else:
            batches[largest_worker] = remaining

    return batches
Example #14
0
def validate_homestead_transaction(account_db, transaction):
    if transaction.s > SECPK1_N // 2 or transaction.s == 0:
        raise ValidationError("Invalid signature S value")

    validate_frontier_transaction(account_db, transaction)