def finalize_block(block: l5_block_model.L5BlockModel, last_confirmed_block: Dict[str, Any], confirmed_txn_hash: str) -> None: _log.info(f"[L5] Block {block.block_id} confirmed") if last_confirmed_block["proof"].get("proof"): block.prev_proof = last_confirmed_block["proof"]["proof"] _log.info("[L5] Signing block") block.transaction_hash = [confirmed_txn_hash] block.proof = keys.get_my_keys().sign_block(block) _log.info("[L5] Storing new block and moving pointers") elasticsearch.put_index_in_storage("BLOCK", block.block_id, block) set_last_confirmed_block(block) # Notify L1s that contributed to L5 block broadcast.dispatch(block)
def broadcast_to_public_chain(l5_block: l5_block_model.L5BlockModel) -> None: _log.info("[L5] Preparing to broadcast") # Hash the block and publish the block to a public network public_hash = keys.get_my_keys().hash_l5_for_public_broadcast(l5_block) transaction_hash = INTERCHAIN.publish_to_public_network(public_hash) _log.info("[L5] After Publish to public network, setting new broadcast time") _log.info(f"[L5] transaction_hash {transaction_hash}") # Append transaction hash to list, add network and last block sent at l5_block.transaction_hash += [transaction_hash] l5_block.block_last_sent_at = INTERCHAIN.get_current_block() l5_block.network = INTERCHAIN_NETWORK _log.info(f"[L5] ADDING TO BLOCK key BLOCK/{l5_block.block_id}") _log.info(f"[L5] ADDING TO BLOCK val {l5_block.export_as_at_rest()}") elasticsearch.put_index_in_storage("BLOCK", l5_block.block_id, l5_block)
def finalize_block(block: l5_block_model.L5BlockModel, last_confirmed_block: Dict[str, Any], confirmed_txn_hash: str) -> None: _log.info(f"[L5] Block {block.block_id} confirmed") if last_confirmed_block["proof"].get("proof"): block.prev_proof = last_confirmed_block["proof"]["proof"] _log.info("[L5] Signing block") block.transaction_hash = [confirmed_txn_hash] block.proof = keys.get_my_keys().sign_block(block) _log.info("[L5] Storing new block and moving pointers") storage.put_object_as_json(f"BLOCK/{block.block_id}", block.export_as_at_rest()) # In the future if we change/add indexes to an L5 block, it may need to be re-indexed here. # For now, no re-indexing is necessary, only a storage update set_last_confirmed_block(block) # Notify L1s that contributed to L5 block broadcast.dispatch(block)
def broadcast_to_public_chain(l5_block: l5_block_model.L5BlockModel) -> None: _log.info("[L5] Preparing to broadcast") # Hash the block and publish the block to a public network public_hash = keys.get_my_keys().hash_l5_for_public_broadcast(l5_block) transaction_hash = _interchain_client.publish_l5_hash_to_public_network( public_hash) _log.info( "[L5] After Publish to public network, setting new broadcast time") _log.info(f"[L5] transaction_hash {transaction_hash}") # Append transaction hash to list, add network and last block sent at l5_block.transaction_hash += [transaction_hash] l5_block.block_last_sent_at = _interchain_client.get_current_block() l5_block.network = INTERCHAIN_NETWORK storage_key = f"BLOCK/{l5_block.block_id}" _log.info(f"[L5] Adding to storage at {storage_key} and creating index") storage.put_object_as_json(storage_key, l5_block.export_as_at_rest()) redisearch.put_document(redisearch.Indexes.block.value, l5_block.block_id, l5_block.export_as_search_index())
def set_last_confirmed_block(l5_block: l5_block_model.L5BlockModel) -> None: storage.put_object_as_json( "BROADCAST/LAST_CONFIRMED_BLOCK", { "block_id": l5_block.block_id, "proof": l5_block.export_as_at_rest()["proof"] })