コード例 #1
0
 async def _handle_new_block_hashes(self) -> None:
     async for event in self._event_bus.stream(NewBlockHashesEvent):
         self.logger.debug(
             "Received NewBlockHashes from %s: %s",
             event.session,
             [humanize_hash(item.hash) for item in event.command.payload],
         )
         for new_block_hash in event.command.payload:
             try:
                 with trio.fail_after(5):
                     # Sometimes we get a NewBlock/NewBlockHashes msg before the BeamSyncer
                     # service has started, and there will be no subscribers to
                     # FetchBlockWitness in that case. This ensures we wait for it to start
                     # before attempting to fire CollectMissingTrieNodes events.
                     await self._event_bus.wait_until_any_endpoint_subscribed_to(
                         FetchBlockWitness)
             except trio.TooSlowError:
                 self.logger.warning(
                     "No subscribers for FetchBlockWitness, couldn't fetch witness for %s",
                     humanize_hash(new_block_hash.hash),
                 )
                 continue
             self.manager.run_task(
                 self._event_bus.request,
                 FetchBlockWitness(event.session, new_block_hash.hash,
                                   new_block_hash.number))
コード例 #2
0
ファイル: full.py プロジェクト: big-c-note/trinity-eth2
 def _on_orphan_block(self, block: SignedBeaconBlock,
                      _exc: ParentNotFoundError) -> None:
     self.logger.debug(
         "failed to import block with root %s: missing parent with root %s",
         humanize_hash(block.hash_tree_root),
         humanize_hash(block.parent_root),
     )
     self._block_pool.add(block)
コード例 #3
0
def orphan_header_chain(session: orm.Session,
                        orphans: Sequence[HeaderIR]) -> None:
    with session.begin_nested():
        header_hashes = set(header_ir.hash for header_ir in orphans)

        # Set all the now orphaned headers as being non-canonical
        session.query(Header).filter(  # type: ignore
            Header.hash.in_(header_hashes)).update({"is_canonical": False},
                                                   synchronize_session=False)

        # Unlink each transaction from the block.  We query across the
        # `BlockTransaction` join table because the
        # `Transaction.block_header_hash` field may have already been set to
        # null in the case that this transaction has already been part of
        # another re-org.

        # We can't perform an `.update()` call if we do this with a join so first
        # we pull the transaction hashes above and then we execute the update.
        transactions = (
            session.query(Transaction)  # type: ignore
            .join(
                BlockTransaction,
                Transaction.hash == BlockTransaction.transaction_hash).filter(
                    or_(
                        BlockTransaction.block_header_hash.in_(header_hashes),
                        Transaction.block_header_hash.in_(header_hashes),
                    )).all())

        if not transactions:
            logger.debug("No orphaned transactions to unlink....")

        for transaction in transactions:
            logger.debug(
                "Unlinking txn: %s from block %s",
                humanize_hash(transaction.hash),
                humanize_hash(transaction.block_header_hash),
            )
            transaction.block_header_hash = None

            if transaction.receipt is not None:
                for log_idx, log in enumerate(transaction.receipt.logs):
                    logger.debug("Deleting log #%d", log_idx)
                    for logtopic in log.logtopics:
                        logger.debug(
                            "Deleting logtopic #%d: %s",
                            logtopic.idx,
                            humanize_hash(logtopic.topic_topic),
                        )
                        with session.begin_nested():
                            session.delete(logtopic)  # type: ignore
                    with session.begin_nested():
                        session.delete(log)  # type: ignore
                logger.debug("Deleting txn receipt: %s",
                             humanize_hash(transaction.hash))
                with session.begin_nested():
                    session.delete(transaction.receipt)  # type: ignore
            else:
                logger.debug("Txn %s already has null receipt")
コード例 #4
0
    def import_block(
        self,
        block: BaseBeaconBlock,
        perform_validation: bool = True
    ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[
            BaseBeaconBlock, ...]]:
        """
        Import a complete block and returns a 3-tuple

        - the imported block
        - a tuple of blocks which are now part of the canonical chain.
        - a tuple of blocks which were canonical and now are no longer canonical.
        """
        self.logger.debug(
            "attempting import of block with slot %s and signing_root %s",
            block.slot,
            humanize_hash(block.signing_root),
        )

        try:
            parent_block = self.get_block_by_root(block.parent_root)
        except BlockNotFound:
            raise ValidationError(
                "Attempt to import block #{}.  Cannot import block {} before importing "
                "its parent block at {}".format(
                    block.slot,
                    humanize_hash(block.signing_root),
                    humanize_hash(block.parent_root),
                ))

        state_machine = self.get_state_machine(at_slot=parent_block.slot)
        state_class = state_machine.get_state_class()
        state = self.chaindb.get_state_by_root(parent_block.state_root,
                                               state_class)

        state, imported_block = state_machine.import_block(
            block, state, check_proposer_signature=perform_validation)

        # Validate the imported block.
        if perform_validation:
            validate_imported_block_unchanged(imported_block, block)

        # TODO: Now it just persists all state. Should design how to clean up the old state.
        self.chaindb.persist_state(state)

        fork_choice_scoring = state_machine.get_fork_choice_scoring()
        (new_canonical_blocks,
         old_canonical_blocks) = self.chaindb.persist_block(
             imported_block, imported_block.__class__, fork_choice_scoring)

        self.logger.debug(
            "successfully imported block at slot %s with signing root %s",
            imported_block.slot,
            humanize_hash(imported_block.signing_root),
        )

        return imported_block, new_canonical_blocks, old_canonical_blocks
コード例 #5
0
ファイル: chain.py プロジェクト: Gauddel/trinity
    async def _display_stats(self) -> None:
        while self.is_operational:
            await self.sleep(5)
            self.logger.debug(
                "(in progress, queued, max size) of bodies, receipts: %r. Write capacity? %s",
                [(q.num_in_progress(), len(q), q._maxsize) for q in (
                    self._block_body_tasks,
                    self._receipt_tasks,
                )],
                "yes" if self._db_buffer_capacity.is_set() else "no",
            )

            stats = self.tracker.report()
            utcnow = int(datetime.datetime.utcnow().timestamp())
            head_age = utcnow - stats.latest_head.timestamp
            self.logger.info(
                ("blks=%-4d  "
                 "txs=%-5d  "
                 "bps=%-3d  "
                 "tps=%-4d  "
                 "elapsed=%0.1f  "
                 "head=#%d %s  "
                 "age=%s"),
                stats.num_blocks,
                stats.num_transactions,
                stats.blocks_per_second,
                stats.transactions_per_second,
                stats.elapsed,
                stats.latest_head.block_number,
                humanize_hash(stats.latest_head.hash),
                humanize_seconds(head_age),
            )
コード例 #6
0
ファイル: full.py プロジェクト: big-c-note/trinity-eth2
 async def _sync_batch(self, batch: SyncRequest, start_time: float,
                       start_slot: Slot) -> bool:
     try:
         count = 0
         async for block in self._host.get_blocks_by_range(
                 batch.peer_id, batch.start_slot, batch.count):
             count += 1
             imported = self.on_block(block)
             if not imported:
                 self.logger.warning(
                     "an issue with sync of this block %s",
                     humanize_hash(block.hash_tree_root),
                 )
                 return False
             if count % 8 == 0:
                 now = time.time()
                 slots = block.slot - start_slot
                 slots_per_second = slots / (now - start_time)
                 self.logger.info(
                     "synced to slot %d, syncing at [ %2f slots/sec ]",
                     block.slot,
                     slots_per_second,
                 )
     except Exception as e:
         self.logger.exception(e)
         return False
     return True
コード例 #7
0
ファイル: full.py プロジェクト: big-c-note/trinity-eth2
 def _on_block_imported(self, block: SignedBeaconBlock) -> None:
     self.logger.debug(
         "successfully imported signed block at slot %d with root %s",
         block.slot,
         humanize_hash(block.hash_tree_root),
     )
     self._try_import_orphan(block.message.hash_tree_root)
コード例 #8
0
ファイル: validator_api.py プロジェクト: pamir-s/trinity
 async def _post_block_proposal(self, request: web.Request) -> web.Response:
     block_data = await request.json()
     block = from_formatted_dict(block_data, BeaconBlock)
     self.logger.info("broadcasting block with root %s",
                      humanize_hash(block.hash_tree_root))
     # TODO the actual brodcast
     return web.Response()
コード例 #9
0
ファイル: full.py プロジェクト: big-c-note/trinity-eth2
 def _on_block_failure(self, block: SignedBeaconBlock,
                       exc: Exception) -> None:
     self.logger.exception(
         "failed to import block with root %s: %s",
         humanize_hash(block.hash_tree_root),
         exc,
     )
コード例 #10
0
    def __str__(self) -> str:
        # TODO: use eth_utils.humanize_bytes once it is released
        if len(self.data) > 4:
            pretty_data = humanize_hash(Hash32(self.data))
        else:
            pretty_data = self.data.hex()

        if len(self.topics) == 0:  # type: ignore
            pretty_topics = "(anonymous)"
        else:
            pretty_topics = "|".join((
                humanize_hash(Hash32(topic.topic))
                for topic in self.topics  # type: ignore
            ))

        return f"Log[#{self.idx} A={humanize_hash(self.address)} D={pretty_data}/T={pretty_topics}]"  # type: ignore  # noqa: E501
コード例 #11
0
    async def _full_skeleton_sync(
            self, skeleton_syncer: SkeletonSyncer[TChainPeer]) -> None:
        skeleton_generator = skeleton_syncer.next_skeleton_segment()
        try:
            first_segment = await skeleton_generator.__anext__()
        except StopAsyncIteration:
            self.logger.debug(
                "Skeleton %s was cancelled before first header was returned",
                skeleton_syncer.peer,
            )
            return

        self.logger.debug(
            "Skeleton syncer asserts that parent (%s) of the first header (%s) is already present",
            humanize_hash(first_segment[0].parent_hash),
            first_segment[0],
        )
        first_parent = await self._db.coro_get_block_header_by_hash(
            first_segment[0].parent_hash)
        try:
            self._stitcher.set_finished_dependency(first_parent)
        except DuplicateTasks:
            # the first header of this segment was already registered: no problem, carry on
            pass

        self._stitcher.register_tasks(first_segment, ignore_duplicates=True)

        previous_segment = first_segment
        async for segment in skeleton_generator:
            self._stitcher.register_tasks(segment, ignore_duplicates=True)

            gap_length = segment[0].block_number - previous_segment[
                -1].block_number - 1
            if gap_length > MAX_HEADERS_FETCH:
                raise ValidationError(
                    f"Header skeleton gap of {gap_length} > {MAX_HEADERS_FETCH}"
                )
            elif gap_length == 0:
                # no need to fill in when there is no gap, just verify against previous header
                await self._chain.coro_validate_chain(
                    previous_segment[-1],
                    segment,
                    SEAL_CHECK_RANDOM_SAMPLE_RATE,
                )
            elif gap_length < 0:
                raise ValidationError(
                    f"Invalid headers: {gap_length} gap from {previous_segment} to {segment}"
                )
            else:
                # if the header filler is overloaded, this will pause
                await self._meat.schedule_segment(
                    previous_segment[-1],
                    gap_length,
                    skeleton_syncer.peer,
                )
            previous_segment = segment

            # Don't race ahead if the consumer is lagging
            await self._buffer_capacity.wait()
コード例 #12
0
ファイル: headers.py プロジェクト: teotoplak/trinity
    async def _fetch_segment(self, peer: TChainPeer,
                             parent_header: BlockHeader,
                             length: int) -> Tuple[BlockHeader, ...]:
        if length > peer.max_headers_fetch:
            raise ValidationError(
                f"Can't request {length} headers, because peer maximum is {peer.max_headers_fetch}"
            )

        headers = await self._request_headers(
            peer,
            BlockNumber(parent_header.block_number + 1),
            length,
        )
        if not headers:
            return tuple()
        elif headers[0].parent_hash != parent_header.hash:
            # Segment doesn't match leading peer, drop this peer
            # Eventually, we'll do something smarter, in case the leading peer is the divergent one
            self.logger.warning(
                "%s returned segment starting %s & parent %s, doesn't match %s, ignoring result...",
                peer,
                headers[0],
                humanize_hash(headers[0].parent_hash),
                parent_header,
            )
            return tuple()
        elif len(headers) != length:
            self.logger.debug(
                "Ignoring %d headers from %s, because wanted %d",
                len(headers),
                peer,
                length,
            )
            return tuple()
        else:
            try:
                await self.wait(
                    self._chain.coro_validate_chain(
                        parent_header,
                        headers,
                        SEAL_CHECK_RANDOM_SAMPLE_RATE,
                    ))
            except ValidationError as e:
                self.logger.warning(
                    "Received invalid header segment from %s against known parent %s, "
                    ": %s",
                    peer,
                    parent_header,
                    e,
                )
                return tuple()
            else:
                # stitch headers together in order, ignoring duplicates
                self._stitcher.register_tasks(headers, ignore_duplicates=True)
                if self.sync_progress:
                    last_received_header = headers[-1]
                    self.sync_progress = self.sync_progress.update_current_block(
                        last_received_header.block_number, )
                return headers
コード例 #13
0
    async def _periodically_report_import(self) -> None:
        last_reported_height = None
        last_reported_at = None

        import_rate_ema = None

        while self.manager.is_running:
            async for _ in every(5, initial_delay=2):  # noqa: F841
                if self._last_loaded_block is None:
                    self.logger.info("Waiting for first block to load...")
                    continue

                last_loaded_block = self._last_loaded_block
                last_loaded_height = last_loaded_block.header.block_number

                # If this is our *first* report
                if last_reported_height is None or last_reported_at is None:
                    last_reported_height = last_loaded_height
                    last_reported_at = time.monotonic()
                    continue

                if last_loaded_height < last_reported_height:
                    raise Exception("Invariant")
                elif last_loaded_height == last_reported_height:
                    continue

                num_imported = last_loaded_height - last_reported_height
                total_rows = query_row_count(self._session,
                                             last_reported_height,
                                             last_loaded_height)
                duration = time.monotonic() - last_reported_at
                blocks_per_second = num_imported / duration
                items_per_second = total_rows / duration

                if import_rate_ema is None:
                    import_rate_ema = EMA(blocks_per_second, 0.05)
                    items_rate_ema = EMA(items_per_second, 0.05)
                else:
                    import_rate_ema.update(blocks_per_second)
                    items_rate_ema.update(items_per_second)

                self.logger.info(
                    "head=%d (%s) blocks=%d rows=%d bps=%s bps_ema=%s ips=%s, ips_ema=%s",
                    last_loaded_height,
                    humanize_hash(last_loaded_block.header.hash),
                    num_imported,
                    total_rows,
                    (int(blocks_per_second)
                     if blocks_per_second > 2 else f"{blocks_per_second:.2f}"),
                    (int(import_rate_ema.value) if import_rate_ema.value > 2
                     else f"{import_rate_ema.value:.2f}"),
                    (int(items_per_second)
                     if items_per_second > 2 else f"{items_per_second:.2f}"),
                    (int(items_rate_ema.value) if items_rate_ema.value > 2 else
                     f"{items_rate_ema.value:.2f}"),
                )

                last_reported_height = last_loaded_height
                last_reported_at = time.monotonic()
コード例 #14
0
 def block_identifier(self) -> str:
     if isinstance(self.block_number_or_hash, int):
         return str(self.block_number_or_hash)
     elif isinstance(self.block_number_or_hash, bytes):
         return humanize_hash(self.block_number_or_hash)
     else:
         raise Exception(f"Unexpected type for block identifier: "
                         f"{type(self.block_number_or_hash)}")
コード例 #15
0
    async def handle_first_tick(self, slot: Slot) -> None:
        head = self.chain.get_canonical_head()
        state_machine = self.chain.get_state_machine()
        state = self.chain.get_head_state()
        self.logger.debug(
            bold_green(
                "status at slot %s in epoch %s: state_root %s, finalized_checkpoint %s"
            ),
            state.slot,
            state.current_epoch(self.slots_per_epoch),
            humanize_hash(head.message.state_root),
            state.finalized_checkpoint,
        )
        self.logger.debug(
            ("status at slot %s in epoch %s:"
             " previous_justified_checkpoint %s, current_justified_checkpoint %s"
             ),
            state.slot,
            state.current_epoch(self.slots_per_epoch),
            state.previous_justified_checkpoint,
            state.current_justified_checkpoint,
        )
        self.logger.debug(
            ("status at slot %s in epoch %s:"
             " previous_epoch_attestations %s, current_epoch_attestations %s"),
            state.slot,
            state.current_epoch(self.slots_per_epoch),
            state.previous_epoch_attestations,
            state.current_epoch_attestations,
        )

        # To see if a validator is assigned to propose during the slot, the beacon state must
        # be in the epoch in question. At the epoch boundaries, the validator must run an
        # epoch transition into the epoch to successfully check the proposal assignment of the
        # first slot.
        temp_state = state_machine.state_transition.apply_state_transition(
            state,
            future_slot=slot,
        )
        proposer_index = get_beacon_proposer_index(
            temp_state,
            CommitteeConfig(state_machine.config),
        )

        # `latest_proposed_epoch` is used to prevent validator from erraneously proposing twice
        # in the same epoch due to service crashing.
        epoch = compute_epoch_at_slot(slot, self.slots_per_epoch)
        if proposer_index in self.validator_privkeys:
            has_proposed = epoch <= self.latest_proposed_epoch[proposer_index]
            if not has_proposed:
                await self.propose_block(
                    proposer_index=proposer_index,
                    slot=slot,
                    state=state,
                    state_machine=state_machine,
                    head_block=head,
                )
                self.latest_proposed_epoch[proposer_index] = epoch
コード例 #16
0
ファイル: validator_api.py プロジェクト: pamir-s/trinity
 async def _post_attestation(self, request: web.Request) -> web.Response:
     attestation_data = await request.json()
     attestation = from_formatted_dict(attestation_data, Attestation)
     self.logger.info(
         "broadcasting attestation with root %s",
         humanize_hash(attestation.hash_tree_root),
     )
     # TODO the actual brodcast
     return web.Response()
コード例 #17
0
ファイル: full.py プロジェクト: big-c-note/trinity-eth2
 def _on_slashable_block(self, block: SignedBeaconBlock,
                         exc: SlashableBlockError) -> None:
     self.logger.warning(
         "failed to import block with root %s: %s",
         humanize_hash(block.hash_tree_root),
         exc,
     )
     # NOTE: chain will write the block in ``on_block`` but not the block's state
     # See the place that exception is raised for further rationale.
     # TODO: pipe to "slasher" software...
     self._slashable_block_pool.add(block)
コード例 #18
0
ファイル: full.py プロジェクト: big-c-note/trinity-eth2
 def _try_import_orphan(self, imported_parent_root: Root) -> None:
     for orphan in self._block_pool:
         if orphan.message.parent_root == imported_parent_root:
             self._block_pool.discard(orphan)
             imported = self.on_block(orphan)
             if not imported:
                 self.logger.warning(
                     "failed to import orphan with root %s",
                     humanize_hash(orphan.hash_tree_root),
                 )
                 return
コード例 #19
0
ファイル: validator.py プロジェクト: lemonclik/trinity
 async def handle_first_tick(self, slot: Slot) -> None:
     head = self.chain.get_canonical_head()
     state_machine = self.chain.get_state_machine()
     state = self.chain.get_head_state()
     self.logger.debug(
         bold_green(
             "status at slot %s in epoch %s: state_root %s, finalized_checkpoint %s"
         ),
         state.slot,
         state.current_epoch(self.slots_per_epoch),
         humanize_hash(head.state_root),
         state.finalized_checkpoint,
     )
     self.logger.debug(
         ("status at slot %s in epoch %s:"
          " previous_justified_checkpoint %s, current_justified_checkpoint %s"
          ),
         state.slot,
         state.current_epoch(self.slots_per_epoch),
         state.previous_justified_checkpoint,
         state.current_justified_checkpoint,
     )
     self.logger.debug(
         ("status at slot %s in epoch %s:"
          " previous_epoch_attestations %s, current_epoch_attestations %s"),
         state.slot,
         state.current_epoch(self.slots_per_epoch),
         state.previous_epoch_attestations,
         state.current_epoch_attestations,
     )
     proposer_index = _get_proposer_index(
         state.copy(slot=slot, ),
         state_machine.config,
     )
     # `latest_proposed_epoch` is used to prevent validator from erraneously proposing twice
     # in the same epoch due to service crashing.
     epoch = compute_epoch_of_slot(slot, self.slots_per_epoch)
     if proposer_index in self.validator_privkeys:
         has_proposed = epoch <= self.latest_proposed_epoch[proposer_index]
         if not has_proposed:
             await self.propose_block(
                 proposer_index=proposer_index,
                 slot=slot,
                 state=state,
                 state_machine=state_machine,
                 head_block=head,
             )
             self.latest_proposed_epoch[proposer_index] = epoch
コード例 #20
0
ファイル: utils.py プロジェクト: wschwab/trinity
def extract_privkeys_from_dir(dir_path: Path) -> Dict[BLSPubkey, int]:
    validator_keymap: Dict[BLSPubkey, int] = {}  # pub -> priv
    try:
        key_files = os.listdir(dir_path)
    except FileNotFoundError:
        logger.debug('Could not find key directory: %s', str(dir_path))
        return validator_keymap
    for key_file_name in key_files:
        key_file_path = dir_path / key_file_name
        privkey = _read_privkey(key_file_path)
        pubkey = bls.privtopub(privkey)
        validator_keymap[pubkey] = privkey
        logger.debug('imported public key: %s', humanize_hash(Hash32(pubkey)))
    if len(validator_keymap) == 0:
        pass
    return validator_keymap
コード例 #21
0
def scrape_action(args: argparse.Namespace) -> None:
    config = Config(args)
    xdg_ethpmcli_root = get_xdg_ethpmcli_root()
    chain_data_path = xdg_ethpmcli_root / IPFS_CHAIN_DATA
    validate_chain_data_store(chain_data_path, config.w3)
    cli_logger.info("Loading IPFS scraper...")
    start_block = args.start_block if args.start_block else 0
    last_scraped_block = scrape(config.w3, xdg_ethpmcli_root, start_block)
    last_scraped_block_hash = config.w3.eth.getBlock(last_scraped_block)["hash"]
    cli_logger.info(
        "All blocks scraped up to # %d: %s.",
        last_scraped_block,
        humanize_hash(last_scraped_block_hash),
    )
    cli_logger.debug(
        "All blocks scraped up to # %d: %s.",
        last_scraped_block,
        last_scraped_block_hash,
    )
コード例 #22
0
ファイル: component.py プロジェクト: onyb/trinity
    def _generate_network_as_json(cls, args: Namespace,
                                  trinity_config: TrinityConfig) -> None:
        config = _get_eth2_config(args.config_profile)
        validator_count = config.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT
        output_file_path = (args.output if args.output else
                            _get_network_config_path_from())

        cls.logger.info(
            "generating a configuration file at '%s'"
            " for %d validators and the genesis state containing them...",
            output_file_path,
            validator_count,
        )
        validator_key_pairs = create_key_pairs_for(validator_count)
        deposits = create_genesis_deposits_from(
            validator_key_pairs,
            withdrawal_credentials_provider=mk_withdrawal_credentials_from(
                config.BLS_WITHDRAWAL_PREFIX.to_bytes(1, byteorder="little")),
            amount_provider=lambda _public_key: config.MAX_EFFECTIVE_BALANCE,
        )
        eth1_block_hash = ZERO_HASH32
        eth1_timestamp = config.MIN_GENESIS_TIME
        genesis_state = initialize_beacon_state_from_eth1(
            eth1_block_hash=eth1_block_hash,
            eth1_timestamp=Timestamp(eth1_timestamp),
            deposits=deposits,
            config=config,
        )
        output = {
            "eth2_config":
            serialize(config),
            "genesis_validator_key_pairs":
            mk_genesis_key_map(validator_key_pairs, genesis_state),
            "genesis_state":
            to_formatted_dict(genesis_state),
        }
        cls.logger.info(
            "configuration generated; genesis state has root %s",
            humanize_hash(genesis_state.hash_tree_root),
        )
        output_file_path.parent.mkdir(parents=True, exist_ok=True)
        with open(output_file_path, "w") as output_file:
            output_file.write(json.dumps(output))
コード例 #23
0
    def _generate_network_as_json(cls, args: Namespace,
                                  trinity_config: TrinityConfig) -> None:
        if args.genesis_time:
            genesis_time = args.genesis_time
        elif args.genesis_delay:
            genesis_time = int(time.time()) + args.genesis_delay
        else:
            genesis_time = None

        genesis_config = generate_genesis_config(args.config_profile,
                                                 genesis_time)

        output_file_path = args.output
        cls.logger.info(
            "configuration generated; genesis state has root %s with genesis time %d; "
            "writing to '%s'",
            humanize_hash(genesis_config["genesis_state"].hash_tree_root),
            genesis_config["genesis_state"].genesis_time,
            output_file_path,
        )
        output_file_path.parent.mkdir(parents=True, exist_ok=True)
        with open(output_file_path, "w") as output_file:
            output_file.write(json.dumps(genesis_config))
コード例 #24
0
ファイル: validator.py プロジェクト: big-c-note/trinity-eth2
async def _get_chain_info(context: Context, request: Request) -> Response:
    head = context.chain.get_canonical_head()
    return {"slot": head.slot, "root": humanize_hash(head.hash_tree_root)}
コード例 #25
0
ファイル: exfiltration.py プロジェクト: matt783/alexandria
 def __str__(self) -> str:
     return '<BlockHeader #{0} {1}>'.format(
         self.block_number,
         humanize_hash(self.hash),
     )
コード例 #26
0
def humanize_node_id(node_id: NodeID) -> str:
    node_id_bytes = node_id.to_bytes(32, 'big')
    return humanize_hash(Hash32(node_id_bytes))
コード例 #27
0
    def __init__(self, snapshot_id: "SnapshotID"):
        if isinstance(snapshot_id, bytes):
            # Is block hash
            snapshot_id = humanize_hash(snapshot_id)  # type: ignore

        super().__init__(f"Unknown snapshot ID '{str(snapshot_id)}'.")
コード例 #28
0
 def __str__(self) -> str:
     clipped_hash = humanize_hash(self.hash)
     return f"Block #{self.number}-0x{clipped_hash}"
コード例 #29
0
def test_humanize_hash(hash32, expected):
    assert humanize_hash(hash32) == expected
コード例 #30
0
def humanize_node_id(node_id: NodeID) -> str:
    return humanize_hash(node_id)  # type: ignore