def sign_block(l2_block: l2_block_model.L2BlockModel) -> None: if PROOF_SCHEME == "work": _log.info("[L2] Performing PoW on block") l2_block.proof, l2_block.nonce = keys.get_my_keys().pow_block(l2_block) else: _log.info("[L2] Signing block") l2_block.proof = keys.get_my_keys().sign_block(l2_block)
def sign_block(l4_block: l4_block_model.L4BlockModel) -> None: if PROOF_SCHEME == "work": _log.info("[L4] Performing PoW on block") l4_block.proof, l4_block.nonce = keys.get_my_keys().pow_block(l4_block) else: _log.info("[L4] Signing block") l4_block.proof = keys.get_my_keys().sign_block(l4_block) _log.info(f"[L4] Finished Block:\n{l4_block.export_as_at_rest()}")
def sign_block(block: l1_block_model.L1BlockModel) -> None: # Strip payloads _log.info("[L1] Stripping payloads and signing") block.strip_payloads() # Add proof, currently work or trust if PROOF_SCHEME == "work": _log.info("[L1] Doing PoW...") block.proof, block.nonce = keys.get_my_keys().pow_block(block) block.scheme = "work" else: _log.info("[L1] Signing block...") block.proof = keys.get_my_keys().sign_block(block) block.scheme = "trust"
def register_new_key_with_matchmaking() -> str: """Make a new auth key and register it with matchmaking Returns: auth key string of the newly shared key Raises: RuntimeError when bad response from chain or couldn't save to storage """ auth_key = api_key_model.gen_auth_key() signature = keys.get_my_keys().make_signature( f"matchmaking_{auth_key}".encode("utf-8"), crypto.SupportedHashes.sha256) new_key = { "dcid": keys.get_public_id(), "key": auth_key, "signature": signature } try: r = requests.post(f"{matchmaking.MATCHMAKING_ADDRESS}/auth-register", json=new_key, timeout=30) except Exception as e: raise RuntimeError( f"Unable to register shared auth key with matchmaking\nError: {e}") if r.status_code < 200 or r.status_code >= 300: raise RuntimeError( f"Unable to register shared auth key with matchmaking\nStatus code: {r.status_code}" ) if not save_matchmaking_auth_key(auth_key): raise RuntimeError("Unable to add new interchain auth key to storage") return auth_key
def register_new_interchain_key_with_remote(interchain_dcid: str) -> str: """Make a new auth key and register it with a remote dragonchain for inter-level communication Args: interchain_dcid: chain id of the interchain sharing this key Returns: auth key string of the newly shared key Raises: RuntimeError when bad response from chain or couldn't save to storage """ # We need to estabilish a shared HMAC key for this chain before we can post auth_key = gen_auth_key() signature = keys.get_my_keys().make_signature( f"{interchain_dcid}_{auth_key}".encode("utf-8"), crypto.SupportedHashes.sha256) new_key = { "dcid": keys.get_public_id(), "key": auth_key, "signature": signature } try: r = requests.post( f"{matchmaking.get_dragonchain_address(interchain_dcid)}/v1/interchain-auth-register", json=new_key, timeout=30) except Exception as e: raise RuntimeError( f"Unable to register shared auth key with dragonchain {interchain_dcid}\nError: {e}" ) if r.status_code < 200 or r.status_code >= 300: raise RuntimeError( f"Unable to register shared auth key with dragonchain {interchain_dcid}\nStatus code: {r.status_code}" ) if not save_interchain_auth_key(interchain_dcid, auth_key): raise RuntimeError("Unable to add new interchain auth key to storage") return auth_key
def sign_transaction(transaction: "transaction_model.TransactionModel", block_id: str) -> None: """Sign a transaction model for a given block Args: transaction: TransactionModel to be signed block_id: block id to give to this transaction before Signing """ transaction.block_id = block_id full_hash, signature = keys.get_my_keys().sign_transaction(transaction) transaction.full_hash = full_hash transaction.signature = signature
def finalize_block(block: l5_block_model.L5BlockModel, last_confirmed_block: Dict[str, Any], confirmed_txn_hash: str) -> None: _log.info(f"[L5] Block {block.block_id} confirmed") if last_confirmed_block["proof"].get("proof"): block.prev_proof = last_confirmed_block["proof"]["proof"] _log.info("[L5] Signing block") block.transaction_hash = [confirmed_txn_hash] block.proof = keys.get_my_keys().sign_block(block) _log.info("[L5] Storing new block and moving pointers") elasticsearch.put_index_in_storage("BLOCK", block.block_id, block) set_last_confirmed_block(block) # Notify L1s that contributed to L5 block broadcast.dispatch(block)
def broadcast_to_public_chain(l5_block: l5_block_model.L5BlockModel) -> None: _log.info("[L5] Preparing to broadcast") # Hash the block and publish the block to a public network public_hash = keys.get_my_keys().hash_l5_for_public_broadcast(l5_block) transaction_hash = INTERCHAIN.publish_to_public_network(public_hash) _log.info("[L5] After Publish to public network, setting new broadcast time") _log.info(f"[L5] transaction_hash {transaction_hash}") # Append transaction hash to list, add network and last block sent at l5_block.transaction_hash += [transaction_hash] l5_block.block_last_sent_at = INTERCHAIN.get_current_block() l5_block.network = INTERCHAIN_NETWORK _log.info(f"[L5] ADDING TO BLOCK key BLOCK/{l5_block.block_id}") _log.info(f"[L5] ADDING TO BLOCK val {l5_block.export_as_at_rest()}") elasticsearch.put_index_in_storage("BLOCK", l5_block.block_id, l5_block)
def finalize_block(block: l5_block_model.L5BlockModel, last_confirmed_block: Dict[str, Any], confirmed_txn_hash: str) -> None: _log.info(f"[L5] Block {block.block_id} confirmed") if last_confirmed_block["proof"].get("proof"): block.prev_proof = last_confirmed_block["proof"]["proof"] _log.info("[L5] Signing block") block.transaction_hash = [confirmed_txn_hash] block.proof = keys.get_my_keys().sign_block(block) _log.info("[L5] Storing new block and moving pointers") storage.put_object_as_json(f"BLOCK/{block.block_id}", block.export_as_at_rest()) # In the future if we change/add indexes to an L5 block, it may need to be re-indexed here. # For now, no re-indexing is necessary, only a storage update set_last_confirmed_block(block) # Notify L1s that contributed to L5 block broadcast.dispatch(block)
def broadcast_to_public_chain(l5_block: l5_block_model.L5BlockModel) -> None: _log.info("[L5] Preparing to broadcast") # Hash the block and publish the block to a public network public_hash = keys.get_my_keys().hash_l5_for_public_broadcast(l5_block) transaction_hash = _interchain_client.publish_l5_hash_to_public_network( public_hash) _log.info( "[L5] After Publish to public network, setting new broadcast time") _log.info(f"[L5] transaction_hash {transaction_hash}") # Append transaction hash to list, add network and last block sent at l5_block.transaction_hash += [transaction_hash] l5_block.block_last_sent_at = _interchain_client.get_current_block() l5_block.network = INTERCHAIN_NETWORK storage_key = f"BLOCK/{l5_block.block_id}" _log.info(f"[L5] Adding to storage at {storage_key} and creating index") storage.put_object_as_json(storage_key, l5_block.export_as_at_rest()) redisearch.put_document(redisearch.Indexes.block.value, l5_block.block_id, l5_block.export_as_search_index())
def sign(message: bytes) -> str: return keys.get_my_keys().make_signature(message, crypto.SupportedHashes.sha256)