Example #1
0
def update_element_in_chunk(original_chunk: Hash32, index: int,
                            element: bytes) -> Hash32:
    """Replace part of a chunk with a given element.

    The chunk is interpreted as a concatenated sequence of equally sized elements. This function
    replaces the element given by its index in the chunk with the given data.

    If the length of the element is zero or not a divisor of the chunk size, a `ValueError` is
    raised. If the index is out of range, an `IndexError is raised.

    Example:
        >>> update_element_in_chunk(b"aabbcc", 1, b"xx")
        b'aaxxcc'
    """
    element_size = len(element)
    chunk_size = len(original_chunk)

    if element_size == 0:
        raise ValueError(f"Element size is zero")
    if chunk_size % element_size != 0:
        raise ValueError(
            f"Element size is not a divisor of chunk size: {element_size}")
    if not 0 <= index < chunk_size // element_size:
        raise IndexError(
            f"Index out of range for element size {element_size}: {index}")

    first_byte_index = index * element_size
    last_byte_index = first_byte_index + element_size

    prefix = original_chunk[:first_byte_index]
    suffix = original_chunk[last_byte_index:]
    return Hash32(prefix + element + suffix)
Example #2
0
def _w3_get_block(w3: Web3, *args: Any, **kwargs: Any) -> Eth1Block:
    block_dict = w3.eth.getBlock(*args, **kwargs)
    return Eth1Block(
        block_hash=Hash32(block_dict["hash"]),
        number=BlockNumber(block_dict["number"]),
        timestamp=Timestamp(block_dict["timestamp"]),
    )
Example #3
0
def parse_byhash_uri(parsed: urllib.parse.ParseResult) -> Checkpoint:
    scheme, netloc, query = parsed.scheme, parsed.netloc, parsed.query

    try:
        parsed_query = urllib.parse.parse_qsl(query)
    except ValueError as e:
        raise ValidationError(str(e))

    query_dict = dict(parsed_query)

    # we allow any kind of separator for a nicer UX. e.g. instead of "11487662456884849810705"
    # one can use "114 876 624 568 848 498 107 05" or "1,487,662,456,884,849,810,705". This also
    # allows copying out a value from e.g etherscan.
    score = remove_non_digits(query_dict.get('score', ''))

    parts = PurePosixPath(parsed.path).parts

    if len(parts) != 3 or scheme != 'eth' or netloc != 'block' or not score:
        raise ValidationError('checkpoint string must be of the form'
                              '"eth://block/byhash/<hash>?score=<score>"')

    block_hash = parts[2]

    if not is_block_hash(block_hash):
        raise ValidationError(
            f'Block hash must be valid hex string, got: {block_hash}')

    if not score.isdigit():
        raise ValidationError(
            f'Score (total difficulty) must be an integer, got: {score}')

    return Checkpoint(Hash32(decode_hex(block_hash)), int(score))
Example #4
0
def parse_byetherscan_uri(parsed: urllib.parse.ParseResult,
                          network_id: int) -> Checkpoint:

    try:
        network = Network(network_id)
    except ValueError:
        raise ValidationError(
            f"Can not resolve checkpoint through Etherscan API"
            f"for network {network_id}. Network not supported")

    try:
        etherscan_api_key = os.environ['TRINITY_ETHERSCAN_API_KEY']
    except KeyError:
        raise RuntimeError(
            "Etherscan API key missing. Assign your Etherscan API key "
            "to the TRINITY_ETHERSCAN_API_KEY environment variable.")

    etherscan_api = Etherscan(etherscan_api_key)

    latest_block_number = etherscan_api.get_latest_block(network)
    checkpoint_block_number = latest_block_number - BLOCKS_FROM_TIP
    checkpoint_block_response = etherscan_api.get_block_by_number(
        checkpoint_block_number, network)
    checkpoint_score = to_int(
        hexstr=checkpoint_block_response['totalDifficulty'])
    checkpoint_hash = checkpoint_block_response['hash']

    return Checkpoint(Hash32(decode_hex(checkpoint_hash)), checkpoint_score)
 def get_block(self,
               arg: web3.types.BlockIdentifier) -> Optional[Eth1Block]:
     # If `arg` is block number
     if isinstance(arg, int):
         block_time = self._get_block_time(BlockNumber(arg))
         return Eth1Block(
             block_hash=Hash32(int(arg).to_bytes(32, byteorder="big")),
             number=BlockNumber(arg),
             timestamp=Timestamp(block_time),
         )
     # If `arg` is block hash
     elif isinstance(arg, bytes):
         block_hash = Hash32(arg)
         # Why are we interpreting the block hash as a block number here?
         block_number = int.from_bytes(block_hash, byteorder="big")
         latest_block_number = self._get_latest_block_number()
         # Block that's way in the future is presumed to be fake eth1 block in genesis state.
         # Return the block at `start_block_number` in this case.
         # The magic number `100` here stands for distance that's way in the future.
         if block_number > latest_block_number + 100:
             return Eth1Block(
                 block_hash=Hash32(
                     self.start_block_number.to_bytes(32, byteorder="big")),
                 number=BlockNumber(self.start_block_number),
                 timestamp=Timestamp(self.start_block_timestamp),
             )
         block_time = self._get_block_time(BlockNumber(block_number))
         return Eth1Block(
             block_hash=block_hash,
             number=BlockNumber(block_number),
             timestamp=Timestamp(block_time),
         )
     elif arg == "latest":
         latest_block_number = self._get_latest_block_number()
         block_time = self._get_block_time(latest_block_number)
         return Eth1Block(
             block_hash=Hash32(
                 latest_block_number.to_bytes(32, byteorder="big")),
             number=BlockNumber(latest_block_number),
             timestamp=block_time,
         )
     elif isinstance(arg, str):
         raise NotImplementedError(f"get_block({arg!r}) is not implemented")
     else:
         raise TypeError(
             f"Argument {arg!r} to get_block was an unexpected type: {type(arg)}"
         )
Example #6
0
    def _is_node_missing(self, node_hash: Hash32) -> bool:
        if len(node_hash) != 32:
            raise ValidationError(
                f"Must request node by its 32-byte hash: 0x{node_hash.hex()}")

        self.logger.debug2("checking if node 0x%s is present", node_hash.hex())

        return node_hash not in self._db
Example #7
0
    def apply_proper_event(self, event: AttributeDict) -> None:
        event_name = event.event

        if event_name == TRANSFER_EVENT_NAME:
            transfer_hash = compute_transfer_hash(event)
            self.transfer_hashes.add(transfer_hash)
            self.transfer_events[transfer_hash] = event
        elif event_name == CONFIRMATION_EVENT_NAME:
            transfer_hash = Hash32(bytes(event.args.transferHash))
            assert len(transfer_hash) == 32
            self.confirmation_hashes.add(transfer_hash)
        elif event_name == COMPLETION_EVENT_NAME:
            transfer_hash = Hash32(bytes(event.args.transferHash))
            assert len(transfer_hash) == 32
            self.completion_hashes.add(transfer_hash)
        else:
            raise ValueError(f"Got unknown event {event}")
Example #8
0
 def parent_hash(self) -> Optional[Hash32]:
     if self._parent_hash is None:
         if self.block_number == 0:
             return GENESIS_PARENT_HASH
         else:
             return None
     else:
         return Hash32(self._parent_hash)
Example #9
0
 def _get_finalized_head(cls,
                         db: BaseDB,
                         block_class: Type[BaseBeaconBlock]) -> BaseBeaconBlock:
     try:
         finalized_head_root = db[SchemaV1.make_finalized_head_root_lookup_key()]
     except KeyError:
         raise CanonicalHeadNotFound("No finalized head set for this chain")
     return cls._get_block_by_root(db, Hash32(finalized_head_root), block_class)
Example #10
0
async def retrieve_header(w3: Web3, block_number: int) -> BlockHeader:
    logger.debug('Retrieving header #%d', block_number)
    fancy_header = await trio.to_thread.run_sync(w3.eth.getBlock, block_number)
    header = BlockHeader(
        difficulty=fancy_header['difficulty'],
        block_number=fancy_header['number'],
        gas_limit=fancy_header['gasLimit'],
        timestamp=fancy_header['timestamp'],
        coinbase=to_canonical_address(fancy_header['miner']),
        parent_hash=Hash32(fancy_header['parentHash']),
        uncles_hash=Hash32(fancy_header['sha3Uncles']),
        state_root=Hash32(fancy_header['stateRoot']),
        transaction_root=Hash32(fancy_header['transactionsRoot']),
        receipt_root=Hash32(fancy_header['receiptsRoot']),  # type: ignore
        bloom=big_endian_to_int(bytes(fancy_header['logsBloom'])),
        gas_used=fancy_header['gasUsed'],
        extra_data=bytes(fancy_header['extraData']),
        mix_hash=Hash32(fancy_header['mixHash']),
        nonce=bytes(fancy_header['nonce']),
    )
    if header.hash != Hash32(fancy_header['hash']):
        raise ValueError(
            f"Reconstructed header hash does not match expected: "
            f"expected={encode_hex(fancy_header['hash'])}  actual={header.hex_hash}"
        )
    return header
Example #11
0
async def retrieve_header(w3: Web3, block_number: int) -> BlockHeader:
    logger.debug("Retrieving header #%d", block_number)
    w3_header = await trio.to_thread.run_sync(w3.eth.getBlock, block_number)
    header = BlockHeader(
        difficulty=w3_header["difficulty"],
        block_number=w3_header["number"],
        gas_limit=w3_header["gasLimit"],
        timestamp=w3_header["timestamp"],
        coinbase=to_canonical_address(w3_header["miner"]),
        parent_hash=Hash32(w3_header["parentHash"]),
        uncles_hash=Hash32(w3_header["sha3Uncles"]),
        state_root=Hash32(w3_header["stateRoot"]),
        transaction_root=Hash32(w3_header["transactionsRoot"]),
        receipt_root=Hash32(w3_header["receiptsRoot"]),
        bloom=big_endian_to_int(bytes(w3_header["logsBloom"])),
        gas_used=w3_header["gasUsed"],
        extra_data=bytes(w3_header["extraData"]),
        mix_hash=Hash32(w3_header["mixHash"]),
        nonce=bytes(w3_header["nonce"]),
    )
    if header.hash != Hash32(w3_header["hash"]):
        raise ValueError(
            f"Reconstructed header hash does not match expected: "
            f"expected={encode_hex(w3_header['hash'])}  actual={header.hex_hash}"
        )
    return header
Example #12
0
def int_to_bytes32(value: Union[int, bool]) -> Hash32:
    if not isinstance(value, int) or isinstance(value, bool):
        raise ValueError(f"Value must be an integer: Got: {type(value)}")
    if value < 0:
        raise ValueError(f"Value cannot be negative: Got: {value}")
    if value > UINT_256_MAX:
        raise ValueError(f"Value exeeds maximum UINT256 size.  Got: {value}")
    value_bytes = value.to_bytes(32, 'big')
    return Hash32(value_bytes)
Example #13
0
 def _get_checkpoints(cls, db: DatabaseAPI) -> Tuple[Hash32, ...]:
     concatenated_checkpoints = db.get(
         SchemaV1.make_checkpoint_headers_key())
     if concatenated_checkpoints is None:
         return ()
     else:
         return tuple(
             Hash32(concatenated_checkpoints[index:index + 32])
             for index in range(0, len(concatenated_checkpoints), 32))
Example #14
0
 def get_block(self, arg: Union[Hash32, int, str]) -> Optional[Eth1Block]:
     block_dict = self.w3.eth.getBlock(arg)
     if block_dict is None:
         raise BlockNotFound
     return Eth1Block(
         block_hash=Hash32(block_dict["hash"]),
         number=BlockNumber(block_dict["number"]),
         timestamp=Timestamp(block_dict["timestamp"]),
     )
Example #15
0
    def get_ancestor_hash(self, block_number: int) -> Hash32:
        """
        Return the hash for the ancestor block with number ``block_number``.
        Return the empty bytestring ``b''`` if the block number is outside of the
        range of available block numbers (typically the last 255 blocks).
        """
        ancestor_depth = self.block_number - block_number - 1
        is_ancestor_depth_out_of_range = (
            ancestor_depth >= MAX_PREV_HEADER_DEPTH or ancestor_depth < 0
            or block_number < 0)
        if is_ancestor_depth_out_of_range:
            return Hash32(b'')

        try:
            return nth(ancestor_depth, self.execution_context.prev_hashes)
        except StopIteration:
            # Ancestor with specified depth not present
            return Hash32(b'')
Example #16
0
def create_mock_genesis_validator_deposits(
        num_validators: int,
        config: BeaconConfig,
        pubkeys: Sequence[BLSPubkey],
        keymap: Dict[BLSPubkey, int]) -> Tuple[Deposit, ...]:
    # Mock data
    withdrawal_credentials = Hash32(b'\x22' * 32)
    deposit_timestamp = Timestamp(0)
    fork = Fork(
        previous_version=config.GENESIS_FORK_VERSION,
        current_version=config.GENESIS_FORK_VERSION,
        epoch=config.GENESIS_EPOCH,
    )

    genesis_validator_deposits = tuple(
        Deposit(
            branch=tuple(
                Hash32(b'\x11' * 32)
                for j in range(10)
            ),
            index=i,
            deposit_data=DepositData(
                deposit_input=DepositInput(
                    pubkey=pubkeys[i],
                    withdrawal_credentials=withdrawal_credentials,
                    proof_of_possession=sign_proof_of_possession(
                        deposit_input=DepositInput(
                            pubkey=pubkeys[i],
                            withdrawal_credentials=withdrawal_credentials,
                        ),
                        privkey=keymap[pubkeys[i]],
                        fork=fork,
                        slot=config.GENESIS_SLOT,
                        slots_per_epoch=config.SLOTS_PER_EPOCH,
                    ),
                ),
                amount=config.MAX_DEPOSIT_AMOUNT,
                timestamp=deposit_timestamp,
            ),
        )
        for i in range(num_validators)
    )

    return genesis_validator_deposits
Example #17
0
 def get_block(self, arg: Union[Hash32, int, str]) -> Eth1Block:
     block_dict = self.w3.eth.getBlock(arg)
     if block_dict is None:
         raise Exception("block not found")
     return Eth1Block(
         block_hash=Hash32(block_dict["hash"]),
         parent_hash=block_dict["parentHash"],
         number=BlockNumber(block_dict["number"]),
         timestamp=Timestamp(block_dict["timestamp"]),
     )
Example #18
0
def parse_byetherscan_uri(parsed: urllib.parse.ParseResult) -> Checkpoint:

    latest_block_number = get_latest_block()
    checkpoint_block_number = latest_block_number - BLOCKS_FROM_TIP
    checkpoint_block_response = get_block_by_number(checkpoint_block_number)
    checkpoint_score = to_int(
        hexstr=checkpoint_block_response['totalDifficulty'])
    checkpoint_hash = checkpoint_block_response['hash']

    return Checkpoint(Hash32(decode_hex(checkpoint_hash)), checkpoint_score)
Example #19
0
def make_deposit_proof(
    list_deposit_data: Sequence[DepositData],
    deposit_tree: MerkleTree,
    deposit_tree_root: Hash32,
    deposit_index: int,
) -> Tuple[Hash32, ...]:
    length_mix_in = Hash32(len(list_deposit_data).to_bytes(32, byteorder="little"))
    merkle_proof = get_merkle_proof(deposit_tree, deposit_index)
    merkle_proof_with_mix_in = merkle_proof + (length_mix_in,)
    return merkle_proof_with_mix_in
 def get_block(self,
               arg: web3.types.BlockIdentifier) -> Optional[Eth1Block]:
     block_dict = self.w3.eth.getBlock(arg)
     if block_dict is None:
         raise BlockNotFound
     return Eth1Block(
         block_hash=Hash32(block_dict["hash"]),
         number=BlockNumber(block_dict["number"]),
         timestamp=Timestamp(block_dict["timestamp"]),
     )
Example #21
0
def _hash_eip191_message(signable_message: SignableMessage) -> Hash32:
    version = signable_message.version
    if len(version) != 1:
        raise ValidationError(
            "The supplied message version is {version!r}. "
            "The EIP-191 signable message standard only supports one-byte versions."
        )

    joined = b'\x19' + version + signable_message.header + signable_message.body
    return Hash32(keccak(joined))
Example #22
0
    def __str__(self) -> str:
        # TODO: use eth_utils.humanize_bytes once it is released
        if len(self.data) > 4:
            pretty_data = humanize_hash(Hash32(self.data))
        else:
            pretty_data = self.data.hex()

        if len(self.topics) == 0:  # type: ignore
            pretty_topics = "(anonymous)"
        else:
            pretty_topics = "|".join((
                humanize_hash(Hash32(topic.topic))
                for topic in self.topics  # type: ignore
            ))

        return (f"Log[#{self.idx} "  # type: ignore
                f"addr={humanize_hash(self.address)} "
                f"data={pretty_data} "
                f"topics={pretty_topics}"
                "]")
Example #23
0
def extract_genesis_params(genesis_config: RawEIP1085Dict) -> GenesisParams:
    raw_params = genesis_config['genesis']

    return GenesisParams(
        nonce=decode_hex(raw_params['nonce']),
        difficulty=to_int(hexstr=raw_params['difficulty']),
        extra_data=Hash32(decode_hex(raw_params['extraData'])),
        gas_limit=to_int(hexstr=raw_params['gasLimit']),
        coinbase=Address(decode_hex(raw_params['author'])),
        timestamp=to_int(hexstr=raw_params['timestamp']),
    )
Example #24
0
 def _randao_provider_of_epoch_signature(
     public_key: BLSPubkey, epoch: Epoch
 ) -> BLSSignature:
     private_key = private_key_provider(public_key)
     # TODO: fix how we get the signing root
     message = Hash32(epoch.to_bytes(32, byteorder="big"))
     domain = Domain(
         b"\x00" * 4 + signature_domain_to_domain_type(SignatureDomain.DOMAIN_RANDAO)
     )
     sig = bls.sign(message, private_key, domain)
     return sig
Example #25
0
def hash_eth2(data: Union[bytes, bytearray]) -> Hash32:
    """
    Return SHA-256 hashed result.

    Note: this API is currently under active research/development so is subject to change
    without a major version bump.

    Note: it's a placeholder and we aim to migrate to a S[T/N]ARK-friendly hash function in
    a future Ethereum 2.0 deployment phase.
    """
    return Hash32(hashlib.sha256(data).digest())
Example #26
0
 def get_block(self, arg: Union[Hash32, int, str]) -> Optional[Eth1Block]:
     # If `arg` is block number
     if isinstance(arg, int):
         block_time = self._get_block_time(BlockNumber(arg))
         return Eth1Block(
             block_hash=Hash32(int(arg).to_bytes(32, byteorder='big')),
             number=BlockNumber(arg),
             timestamp=Timestamp(block_time),
         )
     # If `arg` is block hash
     elif isinstance(arg, bytes):
         block_number = int.from_bytes(arg, byteorder='big')
         latest_block_number = self._get_latest_block_number()
         # Check if provided block number is in valid range
         earliest_follow_block_number = self.start_block_number - ETH1_FOLLOW_DISTANCE
         is_beyond_follow_distance = block_number < earliest_follow_block_number
         if (is_beyond_follow_distance or block_number > latest_block_number):
             # If provided block number does not make sense,
             # assume it's the block at `earliest_follow_block_number`.
             return Eth1Block(
                 block_hash=Hash32(earliest_follow_block_number.to_bytes(32, byteorder='big')),
                 number=BlockNumber(earliest_follow_block_number),
                 timestamp=Timestamp(
                     self.start_block_timestamp - ETH1_FOLLOW_DISTANCE * AVERAGE_BLOCK_TIME,
                 ),
             )
         block_time = self._get_block_time(BlockNumber(block_number))
         return Eth1Block(
             block_hash=arg,
             number=BlockNumber(block_number),
             timestamp=Timestamp(block_time),
         )
     else:
         # Assume `arg` == 'latest'
         latest_block_number = self._get_latest_block_number()
         block_time = self._get_block_time(latest_block_number)
         return Eth1Block(
             block_hash=Hash32(latest_block_number.to_bytes(32, byteorder='big')),
             number=BlockNumber(latest_block_number),
             timestamp=block_time,
         )
 def get_logs(self, block_number: BlockNumber) -> Tuple[DepositLog, ...]:
     block_hash = block_number.to_bytes(32, byteorder="big")
     logs: Tuple[DepositLog, ...] = tuple()
     if block_number < self.start_block_number:
         return logs
     elif block_number == self.start_block_number:
         logs = tuple(
             DepositLog(
                 block_hash=Hash32(block_hash),
                 pubkey=deposit.pubkey,
                 withdrawal_credentials=deposit.withdrawal_credentials,
                 signature=deposit.signature,
                 amount=deposit.amount,
             ) for deposit in self.deposits)
         return logs
     else:
         amount: Gwei = Gwei(32 * GWEI_PER_ETH)
         for seed in range(self.num_deposits_per_block):
             # Multiply `block_number` by 10 to shift it one digit to the left so
             # the input to the function is generated deterministically but does not
             # conflict with blocks in the future.
             pubkey, privkey = mk_key_pair_from_seed_index(block_number *
                                                           10 + seed)
             withdrawal_credentials = Hash32(b'\x12' * 32)
             deposit_data_message = DepositMessage.create(
                 pubkey=pubkey,
                 withdrawal_credentials=withdrawal_credentials,
                 amount=amount,
             )
             signature = sign_proof_of_possession(
                 deposit_message=deposit_data_message,
                 privkey=privkey,
             )
             logs = logs + (DepositLog(
                 block_hash=Hash32(block_hash),
                 pubkey=pubkey,
                 withdrawal_credentials=withdrawal_credentials,
                 signature=signature,
                 amount=amount,
             ), )
         return logs
Example #28
0
    def _apply_web3_event(self, event: AttributeDict) -> None:
        event_name = event.event

        if event_name == TRANSFER_EVENT_NAME:
            if event.args.value == 0 or is_same_address(
                    event.args["from"], ZERO_ADDRESS):
                logger.warning(f"skipping event {event}")
                return
            transfer_hash = compute_transfer_hash(event)
            self.transfer_hashes.add(transfer_hash)
            self.transfer_events[transfer_hash] = event
        elif event_name == CONFIRMATION_EVENT_NAME:
            transfer_hash = Hash32(bytes(event.args.transferHash))
            assert len(transfer_hash) == 32
            self.confirmation_hashes.add(transfer_hash)
        elif event_name == COMPLETION_EVENT_NAME:
            transfer_hash = Hash32(bytes(event.args.transferHash))
            assert len(transfer_hash) == 32
            self.completion_hashes.add(transfer_hash)
        else:
            raise ValueError(f"Got unknown event {event}")
Example #29
0
def denormalize_get_proofs_payload(
        raw_payload: GetProofsV1Raw) -> GetProofsPayload:
    request_id, raw_proof_requests = raw_payload
    proof_requests = tuple(
        ProofRequest(
            block_hash,
            Hash32(None if storage_key == b'' else storage_key),
            state_key,
            from_level,
        ) for (block_hash, storage_key, state_key,
               from_level) in raw_proof_requests)
    return GetProofsPayload(request_id, proof_requests)
Example #30
0
    async def _spawn_predictive_nodes(self, node: bytes,
                                      priority: int) -> None:
        """
        Identify node hashes for nodes we might need in the future, and insert them to the
        predictive node queue.
        """
        # priority is the depth of the node away from an urgent node, plus one.
        # For example, the child of an urgent node has priority 2
        if priority > 3:
            # We would simply download all nodes if we kept adding predictions, so
            # instead we cut it off at a certain depth
            return

        try:
            decoded_node = rlp.decode(node)
        except rlp.DecodingError:
            # Could not decode rlp, it's probably a bytecode, carry on...
            return

        if len(decoded_node) == 17 and (priority <= 2
                                        or all(decoded_node[:16])):
            # if this is a fully filled branch node, then spawn predictive node tasks
            predictive_room = min(
                self._maybe_useful_nodes._maxsize -
                len(self._maybe_useful_nodes),
                16,
            )
            request_nodes = tuple(
                Hash32(h) for h in decoded_node[:16]
                if _is_hash(h) and Hash32(h) not in self._maybe_useful_nodes)
            queue_hashes = set(request_nodes[:predictive_room])
            for sub_hash in queue_hashes:
                self._hash_to_priority[sub_hash] = priority

            new_nodes = tuple(h for h in queue_hashes
                              if h not in self._maybe_useful_nodes)
            # this should always complete immediately because of the drop above
            await self._maybe_useful_nodes.add(new_nodes)
        else:
            self.logger.debug2("Not predicting node: %r", decoded_node)