def sign(self, key): """Sign this transaction with a private key. A potentially already existing signature would be overridden. """ if not is_bitcoin_available() or not is_secp256k1_available(): raise ImportError( "In order to sign transactions the " "`bitcoin` and `secp256k1` packages must be installed." ) from bitcoin import privtopub from secp256k1 import PrivateKey if key in (0, b'', b'\x00' * 32, b'0' * 64): raise ValueError("Zero privkey cannot sign") rawhash = keccak(rlp.encode(self, UnsignedTransaction)) if len(key) in {64, 66}: # we need a binary key key = decode_hex(key) pk = PrivateKey(key, raw=True) sig_bytes, rec_id = pk.ecdsa_recoverable_serialize( pk.ecdsa_sign_recoverable(rawhash, raw=True) ) signature = sig_bytes + force_bytes(chr(rec_id)) self.v = (ord(signature[64]) if is_string(signature[64]) else signature[64]) + 27 self.r = decode_big_endian_int(signature[0:32]) self.s = decode_big_endian_int(signature[32:64]) self.sender = to_normalized_address(keccak(privtopub(key)[1:])[-20:]) return self
def sender(self): if not self._sender: if not is_bitcoin_available() or not is_secp256k1_available(): raise ImportError( "In order to derive the sender for transactions the " "`bitcoin` and `secp256k1` packages must be installed." ) from bitcoin import N from secp256k1 import PublicKey, ALL_FLAGS # Determine sender if self.v: has_invalid_signature_values = ( self.r >= N or self.s >= N or self.v < 27 or self.v > 28 or self.r == 0 or self.s == 0 ) if has_invalid_signature_values: raise ValueError("Invalid signature values!") rlpdata = rlp.encode(self, UnsignedTransaction) rawhash = keccak(rlpdata) pk = PublicKey(flags=ALL_FLAGS) try: pk.public_key = pk.ecdsa_recover( rawhash, pk.ecdsa_recoverable_deserialize( pad_left( int_to_big_endian(self.r), 32, b'\x00', ) + pad_left( int_to_big_endian(self.s), 32, b'\x00', ), self.v - 27 ), raw=True ) pub = pk.serialize(compressed=False) except Exception: raise ValueError("Invalid signature values (x^3+7 is non-residue)") if pub[1:] == b"\x00" * (len(pub) - 1): raise ValueError("Invalid signature (zero privkey cannot sign)") self._sender = to_normalized_address(keccak(pub[1:])[-20:]) assert self.sender == self._sender else: self._sender = 0 return self._sender
async def _handle_peer(self, peer: ETHPeer) -> None: while True: try: cmd, msg = await peer.read_sub_proto_msg(self.cancel_token) except OperationCancelled: # Either our cancel token or the peer's has been triggered, so break out of the # loop. break if isinstance(cmd, eth.NodeData): self.logger.debug("Processing NodeData with %d entries", len(msg)) for node in msg: self._total_processed_nodes += 1 node_key = keccak(node) try: self.scheduler.process([(node_key, node)]) except SyncRequestAlreadyProcessed: # This means we received a node more than once, which can happen when we # retry after a timeout. pass # A node may be received more than once, so pop() with a default value. self._pending_nodes.pop(node_key, None) else: # It'd be very convenient if we could ignore everything that is not a NodeData # when doing a StateSync, but need to double check because peers may consider that # "Bad Form" and disconnect from us. self.logger.debug("Ignoring %s(%s) while doing a StateSync", cmd, msg)
def signTransaction(self, transaction_dict, private_key): ''' @param private_key in bytes, str, or int. ''' assert isinstance(transaction_dict, Mapping) account = self.privateKeyToAccount(private_key) # sign transaction ( v, r, s, rlp_encoded, ) = sign_transaction_dict(account._key_obj, transaction_dict) transaction_hash = keccak(rlp_encoded) return AttributeDict({ 'rawTransaction': HexBytes(rlp_encoded), 'hash': HexBytes(transaction_hash), 'r': r, 's': s, 'v': v, })
async def _handshake(initiator: 'HandshakeInitiator', reader: asyncio.StreamReader, writer: asyncio.StreamWriter ) -> Tuple[bytes, bytes, PreImage, PreImage]: """See the handshake() function above. This code was factored out into this helper so that we can create Peers with directly connected readers/writers for our tests. """ initiator_nonce = keccak(os.urandom(HASH_LEN)) auth_msg = initiator.create_auth_message(initiator_nonce) auth_init = initiator.encrypt_auth_message(auth_msg) writer.write(auth_init) auth_ack = await reader.read(ENCRYPTED_AUTH_ACK_LEN) ephemeral_pubkey, responder_nonce = initiator.decode_auth_ack_message(auth_ack) aes_secret, mac_secret, egress_mac, ingress_mac = initiator.derive_secrets( initiator_nonce, responder_nonce, ephemeral_pubkey, auth_init, auth_ack ) return aes_secret, mac_secret, egress_mac, ingress_mac
async def get_account( self, block_hash: bytes, address: bytes, cancel_token: CancelToken) -> Account: key = keccak(address) proof = await self._get_proof(cancel_token, block_hash, account_key=b'', key=key) header = await self.get_block_header_by_hash(block_hash, cancel_token) rlp_account = HexaryTrie.get_from_proof(header.state_root, key, proof) return rlp.decode(rlp_account, sedes=Account)
def get_block_hash_for_testing(self, block_number): if block_number >= self.block_number: return b'' elif block_number < self.block_number - 256: return b'' else: return keccak(text="{0}".format(block_number))
def test_build_filter_topic_signature(web3): contract = web3.eth.contract(abi=CONTRACT_ABI) filter_builder = contract.events.Increased.build_filter() filter_builder.args['value'].match_any(100, 200, 300) _filter = filter_builder.deploy(web3) assert _filter.filter_params == { 'topics': ( HexBytes(keccak(text="Increased(uint256)")).hex(),)} assert _filter.data_filter_set == (('uint256', (100, 200, 300)),)
def hash_log_entries(log_entries): """ Helper function for computing the RLP hash of the logs from transaction execution. """ from evm.rlp.logs import Log logs = [Log(*entry) for entry in log_entries] encoded_logs = rlp.encode(logs) logs_hash = keccak(encoded_logs) return logs_hash
def validate_block(self, block): if not block.is_genesis: parent_header = get_parent_header(block.header, self.chaindb) validate_gas_limit(block.header.gas_limit, parent_header.gas_limit) validate_length_lte(block.header.extra_data, 32, title="BlockHeader.extra_data") # timestamp if block.header.timestamp < parent_header.timestamp: raise ValidationError( "`timestamp` is before the parent block's timestamp.\n" "- block : {0}\n" "- parent : {1}. ".format( block.header.timestamp, parent_header.timestamp, ) ) elif block.header.timestamp == parent_header.timestamp: raise ValidationError( "`timestamp` is equal to the parent block's timestamp\n" "- block : {0}\n" "- parent: {1}. ".format( block.header.timestamp, parent_header.timestamp, ) ) if len(block.uncles) > MAX_UNCLES: raise ValidationError( "Blocks may have a maximum of {0} uncles. Found " "{1}.".format(MAX_UNCLES, len(block.uncles)) ) for uncle in block.uncles: self.validate_uncle(block, uncle) if not self.state.is_key_exists(block.header.state_root): raise ValidationError( "`state_root` was not found in the db.\n" "- state_root: {0}".format( block.header.state_root, ) ) local_uncle_hash = keccak(rlp.encode(block.uncles)) if local_uncle_hash != block.header.uncles_hash: raise ValidationError( "`uncles_hash` and block `uncles` do not match.\n" " - num_uncles : {0}\n" " - block uncle_hash : {1}\n" " - header uncle_hash: {2}".format( len(block.uncles), local_uncle_hash, block.header.uncle_hash, ) )
def test_pack(): sender, recipient = random_address(), random_address() version = rlp.sedes.big_endian_int.serialize(discovery.PROTO_VERSION) payload = [version, sender.to_endpoint(), recipient.to_endpoint()] privkey = keys.PrivateKey(keccak(b"seed")) message = discovery._pack(discovery.CMD_PING.id, payload, privkey) pubkey, cmd_id, payload, _ = discovery._unpack(message) assert pubkey == privkey.public_key assert cmd_id == discovery.CMD_PING.id assert len(payload) == discovery.CMD_PING.elem_count
def get_cache(block_number): while len(cache_seeds) <= block_number // EPOCH_LENGTH: cache_seeds.append(keccak(cache_seeds[-1])) seed = cache_seeds[block_number // EPOCH_LENGTH] if seed in cache_by_seed: c = cache_by_seed.pop(seed) # pop and append at end cache_by_seed[seed] = c return c c = mkcache_bytes(block_number) cache_by_seed[seed] = c if len(cache_by_seed) > cache_by_seed.max_items: cache_by_seed.popitem(last=False) # remove last recently accessed return c
def sha3(primitive=None, text=None, hexstr=None): if isinstance(primitive, (bytes, int, type(None))): input_bytes = to_bytes(primitive, hexstr=hexstr, text=text) return keccak(input_bytes) raise TypeError( "You called sha3 with first arg %r and keywords %r. You must call it with one of " "these approaches: sha3(text='txt'), sha3(hexstr='0x747874'), " "sha3(b'\\x74\\x78\\x74'), or sha3(0x747874)." % ( primitive, {'text': text, 'hexstr': hexstr} ) )
def apply_create_message(self): snapshot = self.vm_state.snapshot() # EIP161 nonce incrementation with self.vm_state.state_db() as state_db: state_db.increment_nonce(self.msg.storage_address) computation = self.apply_message() if computation.is_error: self.vm_state.revert(snapshot) return computation else: contract_code = computation.output if contract_code and len(contract_code) >= EIP170_CODE_SIZE_LIMIT: computation._error = OutOfGas( "Contract code size exceeds EIP170 limit of {0}. Got code of " "size: {1}".format( EIP170_CODE_SIZE_LIMIT, len(contract_code), ) ) self.vm_state.revert(snapshot) elif contract_code: contract_code_gas_cost = len(contract_code) * constants.GAS_CODEDEPOSIT try: computation.gas_meter.consume_gas( contract_code_gas_cost, reason="Write contract code for CREATE", ) except OutOfGas as err: # Different from Frontier: reverts state on gas failure while # writing contract code. computation._error = err self.vm_state.revert(snapshot) else: if self.logger: self.logger.debug( "SETTING CODE: %s -> length: %s | hash: %s", encode_hex(self.msg.storage_address), len(contract_code), encode_hex(keccak(contract_code)) ) with self.vm_state.state_db() as state_db: state_db.set_code(self.msg.storage_address, contract_code) self.vm_state.commit(snapshot) else: self.vm_state.commit(snapshot) return computation
def derive_secrets(self, initiator_nonce: bytes, responder_nonce: bytes, remote_ephemeral_pubkey: datatypes.PublicKey, auth_init_ciphertext: bytes, auth_ack_ciphertext: bytes ) -> Tuple[bytes, bytes, PreImage, PreImage]: """Derive base secrets from ephemeral key agreement.""" # ecdhe-shared-secret = ecdh.agree(ephemeral-privkey, remote-ephemeral-pubk) ecdhe_shared_secret = ecies.ecdh_agree( self.ephemeral_privkey, remote_ephemeral_pubkey) # shared-secret = keccak(ecdhe-shared-secret || keccak(nonce || initiator-nonce)) shared_secret = keccak( ecdhe_shared_secret + keccak(responder_nonce + initiator_nonce)) # aes-secret = keccak(ecdhe-shared-secret || shared-secret) aes_secret = keccak(ecdhe_shared_secret + shared_secret) # mac-secret = keccak(ecdhe-shared-secret || aes-secret) mac_secret = keccak(ecdhe_shared_secret + aes_secret) # setup keccak instances for the MACs # egress-mac = keccak_with_digest.new(mac-secret ^ recipient-nonce || auth-sent-init) mac1 = keccak_with_digest.new( sxor(mac_secret, responder_nonce) + auth_init_ciphertext ) # ingress-mac = keccak_with_digest.new(mac-secret ^ initiator-nonce || auth-recvd-ack) mac2 = keccak_with_digest.new( sxor(mac_secret, initiator_nonce) + auth_ack_ciphertext ) if self._is_initiator: egress_mac, ingress_mac = mac1, mac2 else: egress_mac, ingress_mac = mac2, mac1 return aes_secret, mac_secret, egress_mac, ingress_mac
def create_auth_message(self, nonce: bytes) -> bytes: ecdh_shared_secret = ecies.ecdh_agree(self.privkey, self.remote.pubkey) secret_xor_nonce = sxor(ecdh_shared_secret, nonce) # S(ephemeral-privk, ecdh-shared-secret ^ nonce) S = self.ephemeral_privkey.sign_msg_hash(secret_xor_nonce).to_bytes() # S || H(ephemeral-pubk) || pubk || nonce || 0x0 return ( S + keccak(self.ephemeral_pubkey.to_bytes()) + self.pubkey.to_bytes() + nonce + b'\x00' )
def test_secret_registry_register_batch(secret_registry_proxy): secrets = [get_random_bytes(32) for i in range(4)] secrethashes = [keccak(secret) for secret in secrets] event_filter = secret_registry_proxy.secret_registered_filter() secret_registry_proxy.register_secret_batch(secrets) logs = event_filter.get_all_entries() assert len(logs) == 4 block = secret_registry_proxy.get_register_block_for_secrethash(secrethashes[0]) decoded_events = [secret_registry_proxy.proxy.decode_event(log) for log in logs] assert all(event['blockNumber'] == block for event in decoded_events) recovered_hashes = [event['args']['secrethash'] for event in decoded_events] assert all(secrethash in recovered_hashes for secrethash in secrethashes)
def test_secret_registry(secret_registry_proxy): # register secret secret = get_random_bytes(32) event_filter = secret_registry_proxy.secret_registered_filter() secret_registry_proxy.register_secret(secret) # check if event is raised logs = event_filter.get_all_entries() assert len(logs) == 1 decoded_event = secret_registry_proxy.proxy.decode_event(logs[0]) data = keccak(secret) assert decoded_event['args']['secrethash'] == data # check if registration block matches block = secret_registry_proxy.get_register_block_for_secrethash(data) assert logs[0]['blockNumber'] == block # test non-existing secret assert 0 == secret_registry_proxy.get_register_block_for_secrethash(b'\x11' * 32)
def import_block(self, block): self.configure_header( coinbase=block.header.coinbase, gas_limit=block.header.gas_limit, timestamp=block.header.timestamp, extra_data=block.header.extra_data, mix_hash=block.header.mix_hash, nonce=block.header.nonce, uncles_hash=keccak(rlp.encode(block.uncles)), ) # run all of the transactions. for transaction in block.transactions: self.apply_transaction(transaction) # transfer the list of uncles. self.block.uncles = block.uncles return self.mine_block()
def pack_block(self, block, *args, **kwargs): """ Pack block for mining. :param bytes coinbase: 20-byte public address to receive block reward :param bytes uncles_hash: 32 bytes :param bytes state_root: 32 bytes :param bytes transaction_root: 32 bytes :param bytes receipt_root: 32 bytes :param int bloom: :param int gas_used: :param bytes extra_data: 32 bytes :param bytes mix_hash: 32 bytes :param bytes nonce: 8 bytes """ if 'uncles' in kwargs: block.uncles = kwargs.pop('uncles') kwargs.setdefault('uncles_hash', keccak(rlp.encode(block.uncles))) header = block.header provided_fields = set(kwargs.keys()) known_fields = set(tuple(zip(*BlockHeader.fields))[0]) unknown_fields = provided_fields.difference(known_fields) if unknown_fields: raise AttributeError( "Unable to set the field(s) {0} on the `BlockHeader` class. " "Received the following unexpected fields: {1}.".format( ", ".join(known_fields), ", ".join(unknown_fields), ) ) for key, value in kwargs.items(): setattr(header, key, value) # Perform validation self.validate_block(block) return block
def apply_create_message(self): snapshot = self.vm_state.snapshot() computation = self.apply_message() if computation.is_error: self.vm_state.revert(snapshot) return computation else: contract_code = computation.output if contract_code: contract_code_gas_cost = len(contract_code) * constants.GAS_CODEDEPOSIT try: computation.gas_meter.consume_gas( contract_code_gas_cost, reason="Write contract code for CREATE", ) except OutOfGas as err: # Different from Frontier: reverts state on gas failure while # writing contract code. computation._error = err self.vm_state.revert(snapshot) else: if self.logger: self.logger.debug( "SETTING CODE: %s -> length: %s | hash: %s", encode_hex(self.msg.storage_address), len(contract_code), encode_hex(keccak(contract_code)) ) with self.vm_state.state_db() as state_db: state_db.set_code(self.msg.storage_address, contract_code) self.vm_state.commit(snapshot) else: self.vm_state.commit(snapshot) return computation
import os import pytest from eth_keys import KeyAPI from eth_keys.backends import CoinCurveECCBackend from eth_keys.backends import NativeECCBackend from eth_utils import ( keccak, ) MSG = b'message' MSGHASH = keccak(MSG) backends = [ NativeECCBackend(), ] try: import coincurve backends.append(CoinCurveECCBackend()) except ImportError: if 'REQUIRE_COINCURVE' in os.environ: raise def backend_id_fn(backend): return type(backend).__name__ @pytest.fixture(params=backends, ids=backend_id_fn)
from web3.auto.infura import w3 from rlp.sedes import (Binary, big_endian_int, binary) import rlp from eth_utils import ( keccak, ) address = Binary.fixed_length(20, allow_empty=True) class _Transaction(rlp.Serializable): fields = [('nonce', big_endian_int), ('gas_price', big_endian_int), ('gas', big_endian_int), ('to', address), ('value', big_endian_int), ('data', binary), ('v', big_endian_int), ('r', big_endian_int), ('s', big_endian_int)] tx = w3.eth.getTransaction( 0x95604ade1cd64e12851e9626270fe1b61b3034293ae6d6d85fcf27b19d146c28) raw_tx = _Transaction(tx.nonce, tx.gasPrice, tx.gas, w3.toBytes(hexstr=tx.to), tx.value, w3.toBytes(hexstr=w3.toHex(hexstr=tx.input)), tx.v, w3.toInt(tx.r), w3.toInt(tx.s)) rlp_tx = rlp.encode(raw_tx) assert tx.hash == keccak(rlp_tx) print(True)
def sha3(data: bytes) -> bytes: return keccak(data)
def execute_transaction(self, transaction): # state_db ontext manager that restricts access as specified in the transacion state_db_cm = functools.partial(self.state_db, access_list=transaction.prefix_list) # # 1) Pre Computation # # Validate the transaction transaction.validate() self.validate_transaction(transaction) with state_db_cm() as state_db: # Setup VM Message message_gas = transaction.gas - transaction.intrinsic_gas if transaction.code: contract_address = generate_CREATE2_contract_address( b'', transaction.code, ) data = b'' code = transaction.code is_create = True else: contract_address = None data = transaction.data code = state_db.get_code(transaction.to) is_create = False self.logger.info( ("TRANSACTION: to: %s | gas: %s | " "gas-price: %s | data-hash: %s | code-hash: %s"), encode_hex(transaction.to), transaction.gas, transaction.gas_price, encode_hex(keccak(transaction.data)), encode_hex(keccak(transaction.code)), ) message = ShardingMessage( gas=message_gas, to=transaction.to, sender=ENTRY_POINT, value=0, data=data, code=code, is_create=is_create, access_list=transaction.prefix_list, ) transaction_context = self.get_transaction_context_class()( gas_price=transaction.gas_price, origin=ENTRY_POINT, sig_hash=transaction.sig_hash, transaction_gas_limit=transaction.gas, ) # # 2) Apply the message to the VM. # if message.is_create: with state_db_cm(read_only=True) as state_db: is_collision = state_db.account_has_code(contract_address) # Check if contract address provided by transaction is correct if contract_address != transaction.to: computation = self.get_computation(message, transaction_context) computation._error = IncorrectContractCreationAddress( "Contract address calculated: {0} but {1} is provided". format( encode_hex(contract_address), encode_hex(transaction.to), )) self.logger.debug( "Contract address calculated: %s but %s is provided", encode_hex(contract_address), encode_hex(transaction.to), ) elif is_collision: # The address of the newly created contract has collided # with an existing contract address. computation = self.get_computation(message, transaction_context) computation._error = ContractCreationCollision( "Address collision while creating contract: {0}".format( encode_hex(contract_address), )) self.logger.debug( "Address collision while creating contract: %s", encode_hex(contract_address), ) else: computation = self.get_computation( message, transaction_context, ).apply_create_message() else: computation = self.get_computation( message, transaction_context).apply_message() # # 2) Post Computation # # Self Destruct Refunds num_deletions = len(computation.get_accounts_for_deletion()) if num_deletions: computation.gas_meter.refund_gas(REFUND_SELFDESTRUCT * num_deletions) # Gas Refunds transaction_fee, gas_refund_amount = computation.compute_transaction_fee_and_refund( ) if gas_refund_amount: self.logger.debug( 'TRANSACTION REFUND: %s -> %s', gas_refund_amount, encode_hex(message.to), ) with state_db_cm() as state_db: state_db.delta_balance(message.to, gas_refund_amount) # Miner Fees self.logger.debug( 'TRANSACTION FEE: %s', transaction_fee, ) # Process Self Destructs with state_db_cm() as state_db: for account, beneficiary in computation.get_accounts_for_deletion( ): # TODO: need to figure out how we prevent multiple selfdestructs from # the same account and if this is the right place to put this. self.logger.debug('DELETING ACCOUNT: %s', encode_hex(account)) # TODO: this balance setting is likely superflous and can be # removed since `delete_account` does this. state_db.set_balance(account, 0) state_db.delete_account(account) return computation
def test_transfer_hash_computation(transfer_event): transfer_hash = compute_transfer_hash(transfer_event) assert transfer_event.logIndex == 5 assert transfer_hash == keccak(transfer_event.transactionHash + b"\x05")
def validate_block(self, block): if not block.is_genesis: parent_header = get_parent_header(block.header, self.chaindb) validate_gas_limit(block.header.gas_limit, parent_header.gas_limit) validate_length_lte(block.header.extra_data, 32, title="BlockHeader.extra_data") # timestamp if block.header.timestamp < parent_header.timestamp: raise ValidationError( "`timestamp` is before the parent block's timestamp.\n" "- block : {0}\n" "- parent : {1}. ".format( block.header.timestamp, parent_header.timestamp, ) ) elif block.header.timestamp == parent_header.timestamp: raise ValidationError( "`timestamp` is equal to the parent block's timestamp\n" "- block : {0}\n" "- parent: {1}. ".format( block.header.timestamp, parent_header.timestamp, ) ) tx_root_hash, _ = make_trie_root_and_nodes(block.transactions) if tx_root_hash != block.header.transaction_root: raise ValidationError( "Block's transaction_root ({0}) does not match expected value: {1}".format( block.header.transaction_root, tx_root_hash)) if len(block.uncles) > MAX_UNCLES: raise ValidationError( "Blocks may have a maximum of {0} uncles. Found " "{1}.".format(MAX_UNCLES, len(block.uncles)) ) for uncle in block.uncles: self.validate_uncle(block, uncle) if not self.state.is_key_exists(block.header.state_root): raise ValidationError( "`state_root` was not found in the db.\n" "- state_root: {0}".format( block.header.state_root, ) ) local_uncle_hash = keccak(rlp.encode(block.uncles)) if local_uncle_hash != block.header.uncles_hash: raise ValidationError( "`uncles_hash` and block `uncles` do not match.\n" " - num_uncles : {0}\n" " - block uncle_hash : {1}\n" " - header uncle_hash: {2}".format( len(block.uncles), local_uncle_hash, block.header.uncle_hash, ) )
def sha3(data: bytes) -> bytes: return keccak(data)
def random_node(): seed = to_bytes(text="".join(random.sample(string.ascii_lowercase, 10))) priv_key = keys.PrivateKey(keccak(seed)) return kademlia.Node(priv_key.public_key, random_address())
def __hash__(self) -> int: return big_endian_to_int(keccak(self.to_bytes()))
def hash(self) -> Hash32: if self._hash is None: self._hash = keccak(self._cached_rlp) return self._hash
def _execute_frontier_transaction(vm_state, transaction): # Reusable for other forks # # 1) Pre Computation # # Validate the transaction transaction.validate() vm_state.validate_transaction(transaction) gas_fee = transaction.gas * transaction.gas_price with vm_state.mutable_state_db() as state_db: # Buy Gas state_db.delta_balance(transaction.sender, -1 * gas_fee) # Increment Nonce state_db.increment_nonce(transaction.sender) # Setup VM Message message_gas = transaction.gas - transaction.intrinsic_gas if transaction.to == constants.CREATE_CONTRACT_ADDRESS: contract_address = generate_contract_address( transaction.sender, state_db.get_nonce(transaction.sender) - 1, ) data = b'' code = transaction.data else: contract_address = None data = transaction.data code = state_db.get_code(transaction.to) vm_state.logger.info( ("TRANSACTION: sender: %s | to: %s | value: %s | gas: %s | " "gas-price: %s | s: %s | r: %s | v: %s | data-hash: %s"), encode_hex(transaction.sender), encode_hex(transaction.to), transaction.value, transaction.gas, transaction.gas_price, transaction.s, transaction.r, transaction.v, encode_hex(keccak(transaction.data)), ) message = Message( gas=message_gas, to=transaction.to, sender=transaction.sender, value=transaction.value, data=data, code=code, create_address=contract_address, ) transaction_context = vm_state.get_transaction_context_class()( gas_price=transaction.gas_price, origin=transaction.sender, ) # # 2) Apply the message to the VM. # if message.is_create: is_collision = vm_state.read_only_state_db.account_has_code_or_nonce( contract_address) if is_collision: # The address of the newly created contract has *somehow* collided # with an existing contract address. computation = vm_state.get_computation(message, transaction_context) computation._error = ContractCreationCollision( "Address collision while creating contract: {0}".format( encode_hex(contract_address), )) vm_state.logger.debug( "Address collision while creating contract: %s", encode_hex(contract_address), ) else: computation = vm_state.get_computation( message, transaction_context, ).apply_create_message() else: computation = vm_state.get_computation( message, transaction_context).apply_message() # # 2) Post Computation # # Self Destruct Refunds num_deletions = len(computation.get_accounts_for_deletion()) if num_deletions: computation.gas_meter.refund_gas(REFUND_SELFDESTRUCT * num_deletions) # Gas Refunds gas_remaining = computation.get_gas_remaining() gas_refunded = computation.get_gas_refund() gas_used = transaction.gas - gas_remaining gas_refund = min(gas_refunded, gas_used // 2) gas_refund_amount = (gas_refund + gas_remaining) * transaction.gas_price if gas_refund_amount: vm_state.logger.debug( 'TRANSACTION REFUND: %s -> %s', gas_refund_amount, encode_hex(message.sender), ) with vm_state.mutable_state_db() as state_db: state_db.delta_balance(message.sender, gas_refund_amount) # Miner Fees transaction_fee = (transaction.gas - gas_remaining - gas_refund) * transaction.gas_price vm_state.logger.debug( 'TRANSACTION FEE: %s -> %s', transaction_fee, encode_hex(vm_state.coinbase), ) with vm_state.mutable_state_db() as state_db: state_db.delta_balance(vm_state.coinbase, transaction_fee) # Process Self Destructs with vm_state.mutable_state_db() as state_db: for account, beneficiary in computation.get_accounts_for_deletion(): # TODO: need to figure out how we prevent multiple selfdestructs from # the same account and if this is the right place to put this. vm_state.logger.debug('DELETING ACCOUNT: %s', encode_hex(account)) # TODO: this balance setting is likely superflous and can be # removed since `delete_account` does this. state_db.set_balance(account, 0) state_db.delete_account(account) return computation
def _set_root_node(self, root_node): validate_is_node(root_node) encoded_root_node = rlp.encode(root_node) self.root_hash = keccak(encoded_root_node) self.db[self.root_hash] = encoded_root_node
compute_leaf_key, compute_extension_key, is_blank_node, is_extension_node, is_leaf_node, consume_common_prefix, key_starts_with, ) from trie.validation import ( validate_is_node, validate_is_bytes, ) # sanity check assert BLANK_NODE_HASH == keccak(rlp.encode(b'')) assert BLANK_HASH == keccak(b'') class HexaryTrie(object): db = None root_hash = None # Shortcuts BLANK_NODE_HASH = BLANK_NODE_HASH BLANK_NODE = BLANK_NODE def __init__(self, db, root_hash=BLANK_NODE_HASH): self.db = db validate_is_bytes(root_hash) self.root_hash = root_hash
def __init__(self, pubkey: datatypes.PublicKey, address: Address) -> None: self.pubkey = pubkey self.address = address self.id = big_endian_to_int(keccak(pubkey.to_bytes()))
def __init__(self, pubkey: datatypes.PublicKey, address: Address) -> None: self.pubkey = pubkey self.address = address self.id = big_endian_to_int(keccak(pubkey.to_bytes()))
def get_discovery_protocol(seed=b"seed"): privkey = keys.PrivateKey(keccak(seed)) return discovery.DiscoveryProtocol(privkey, random_address(), bootstrap_nodes=[])
def mining_hash(self): return keccak( rlp.encode(self, BlockHeader.exclude(['mix_hash', 'nonce'])))
def test_persist_header_to_db_unknown_parent(chaindb, header, seed): header.parent_hash = keccak(seed) with pytest.raises(ParentNotFound): chaindb.persist_header_to_db(header)
def random_node(): seed = force_bytes("".join(random.sample(string.ascii_lowercase, 10))) priv_key = keys.PrivateKey(keccak(seed)) return kademlia.Node(priv_key.public_key, random_address())
def compute_transfer_hash(transfer_event: AttributeDict) -> Hash32: return Hash32( keccak(b"".join([ bytes(transfer_event.transactionHash), int_to_big_endian(transfer_event.logIndex), ])))
def hash(self): return keccak(self.encoded)
from raiden.utils.keys import privatekey_to_address from raiden.utils.signing import sha3 from raiden.utils.typing import ChainID, List, Port, PrivateKey, TokenAmount from raiden_contracts.constants import NETWORKNAME_TO_ID NUM_GETH_NODES = 3 NUM_RAIDEN_ACCOUNTS = 10 START_PORT = 30301 START_RPCPORT = 8101 DEFAULT_ACCOUNTS_SEEDS = [ "127.0.0.1:{}".format(START_PORT + i).encode() for i in range(NUM_RAIDEN_ACCOUNTS) ] DEFAULT_ACCOUNTS_KEYS: List[PrivateKey] = [ PrivateKey(keccak(seed)) for seed in DEFAULT_ACCOUNTS_SEEDS ] DEFAULT_ACCOUNTS = [ AccountDescription(privatekey_to_address(key), TokenAmount(DEFAULT_BALANCE)) for key in DEFAULT_ACCOUNTS_KEYS ] def main() -> None: tmpdir = tempfile.mkdtemp() geth_nodes = [] for i in range(NUM_GETH_NODES): is_miner = i == 0 node_key = PrivateKey(sha3(f"node:{i}".encode()))
def recover_public_key_from_msg(self, message: bytes) -> PublicKey: message_hash = keccak(message) return self.recover_public_key_from_msg_hash(message_hash)
def _encode(self, value: Any) -> HexStr: if is_dynamic_sized_type(self.arg_type): return to_hex(keccak(encode_single_packed(self.arg_type, value))) else: return to_hex(self.abi_codec.encode_single(self.arg_type, value))
def private_keys() -> List[str]: offset = 14789632 return [encode_hex(keccak(offset + i)) for i in range(NUMBER_OF_NODES)]
def test_upgrading(testerchain, deploy_contract): creator = testerchain.client.accounts[0] secret_hash = keccak(secret) secret2_hash = keccak(secret2) # Only escrow contract is allowed in Adjudicator constructor with pytest.raises((TransactionFailed, ValueError)): deploy_contract('Adjudicator', creator, ALGORITHM_KECCAK256, 1, 2, 3, 4) # Deploy contracts escrow1, _ = deploy_contract('StakingEscrowForAdjudicatorMock') escrow2, _ = deploy_contract('StakingEscrowForAdjudicatorMock') address1 = escrow1.address address2 = escrow2.address contract_library_v1, _ = deploy_contract('Adjudicator', address1, ALGORITHM_KECCAK256, 1, 2, 3, 4) dispatcher, _ = deploy_contract('Dispatcher', contract_library_v1.address, secret_hash) # Deploy second version of the contract contract_library_v2, _ = deploy_contract('AdjudicatorV2Mock', address2, ALGORITHM_SHA256, 5, 6, 7, 8) contract = testerchain.client.get_contract(abi=contract_library_v2.abi, address=dispatcher.address, ContractFactoryClass=Contract) # Can't call `finishUpgrade` and `verifyState` methods outside upgrade lifecycle with pytest.raises((TransactionFailed, ValueError)): tx = contract_library_v1.functions.finishUpgrade( contract.address).transact({'from': creator}) testerchain.wait_for_receipt(tx) with pytest.raises((TransactionFailed, ValueError)): tx = contract_library_v1.functions.verifyState( contract.address).transact({'from': creator}) testerchain.wait_for_receipt(tx) # Upgrade to the second version assert address1 == contract.functions.escrow().call() assert ALGORITHM_KECCAK256 == contract.functions.hashAlgorithm().call() assert 1 == contract.functions.basePenalty().call() assert 2 == contract.functions.penaltyHistoryCoefficient().call() assert 3 == contract.functions.percentagePenaltyCoefficient().call() assert 4 == contract.functions.rewardCoefficient().call() tx = dispatcher.functions.upgrade(contract_library_v2.address, secret, secret2_hash).transact({'from': creator}) testerchain.wait_for_receipt(tx) # Check constructor and storage values assert contract_library_v2.address == dispatcher.functions.target().call() assert address2 == contract.functions.escrow().call() assert ALGORITHM_SHA256 == contract.functions.hashAlgorithm().call() assert 5 == contract.functions.basePenalty().call() assert 6 == contract.functions.penaltyHistoryCoefficient().call() assert 7 == contract.functions.percentagePenaltyCoefficient().call() assert 8 == contract.functions.rewardCoefficient().call() # Check new ABI tx = contract.functions.setValueToCheck(3).transact({'from': creator}) testerchain.wait_for_receipt(tx) assert 3 == contract.functions.valueToCheck().call() # Can't upgrade to the previous version or to the bad version contract_library_bad, _ = deploy_contract('AdjudicatorBad') with pytest.raises(TransactionFailed): tx = dispatcher.functions.upgrade(contract_library_v1.address, secret2, secret_hash) \ .transact({'from': creator}) testerchain.wait_for_receipt(tx) with pytest.raises(TransactionFailed): tx = dispatcher.functions.upgrade(contract_library_bad.address, secret2, secret_hash) \ .transact({'from': creator}) testerchain.wait_for_receipt(tx) # But can rollback tx = dispatcher.functions.rollback(secret2, secret_hash).transact({'from': creator}) testerchain.wait_for_receipt(tx) assert contract_library_v1.address == dispatcher.functions.target().call() assert address1 == contract.functions.escrow().call() assert ALGORITHM_KECCAK256 == contract.functions.hashAlgorithm().call() assert 1 == contract.functions.basePenalty().call() assert 2 == contract.functions.penaltyHistoryCoefficient().call() assert 3 == contract.functions.percentagePenaltyCoefficient().call() assert 4 == contract.functions.rewardCoefficient().call() # After rollback new ABI is unavailable with pytest.raises(TransactionFailed): tx = contract.functions.setValueToCheck(2).transact({'from': creator}) testerchain.wait_for_receipt(tx) # Try to upgrade to the bad version with pytest.raises(TransactionFailed): tx = dispatcher.functions.upgrade(contract_library_bad.address, secret, secret2_hash) \ .transact({'from': creator}) testerchain.wait_for_receipt(tx) events = dispatcher.events.StateVerified.createFilter( fromBlock=0).get_all_entries() assert 4 == len(events) event_args = events[0]['args'] assert contract_library_v1.address == event_args['testTarget'] assert creator == event_args['sender'] event_args = events[1]['args'] assert contract_library_v2.address == event_args['testTarget'] assert creator == event_args['sender'] assert event_args == events[2]['args'] event_args = events[3]['args'] assert contract_library_v2.address == event_args['testTarget'] assert creator == event_args['sender'] events = dispatcher.events.UpgradeFinished.createFilter( fromBlock=0).get_all_entries() assert 3 == len(events) event_args = events[0]['args'] assert contract_library_v1.address == event_args['target'] assert creator == event_args['sender'] event_args = events[1]['args'] assert contract_library_v2.address == event_args['target'] assert creator == event_args['sender'] event_args = events[2]['args'] assert contract_library_v1.address == event_args['target'] assert creator == event_args['sender']
def verify_msg(self, message: bytes, public_key: PublicKey) -> bool: message_hash = keccak(message) return self.verify_msg_hash(message_hash, public_key)
def get_discovery_protocol(seed=b"seed"): privkey = keys.PrivateKey(keccak(seed)) return discovery.DiscoveryProtocol(privkey, random_address(), bootstrap_nodes=[])
def test_register_secret_happy_path(secret_registry_proxy: SecretRegistry, contract_manager): """Test happy path of SecretRegistry with a single secret. Test that `register_secret` changes the smart contract state by registering the secret, this can be verified by the block height and the existence of the SecretRegistered event. """ secret = make_secret() secrethash = keccak(secret) secret_unregistered = make_secret() secrethash_unregistered = keccak(secret_unregistered) secret_registered_filter = secret_registry_proxy.secret_registered_filter() assert not secret_registry_proxy.is_secret_registered( secrethash=secrethash, block_identifier='latest', ), 'Test setup is invalid, secret must be unknown' assert not secret_registry_proxy.is_secret_registered( secrethash=secrethash_unregistered, block_identifier='latest', ), 'Test setup is invalid, secret must be unknown' chain = BlockChainService( jsonrpc_client=secret_registry_proxy.client, contract_manager=contract_manager, ) chain.wait_until_block(STATE_PRUNING_AFTER_BLOCKS + 1) with pytest.raises(NoStateForBlockIdentifier): secret_registry_proxy.is_secret_registered( secrethash=secrethash_unregistered, block_identifier=0, ) secret_registry_proxy.register_secret( secret=secret, given_block_identifier='latest', ) logs = [ secret_registry_proxy.proxy.decode_event(encoded_log) for encoded_log in secret_registered_filter.get_all_entries() ] secret_registered = must_have_event( logs, { 'event': 'SecretRevealed', 'args': { 'secrethash': secrethash, }, }, ) msg = 'SecretRegistry.register_secret returned but the SecretRevealed event was not emitted.' assert secret_registered, msg registered_block = secret_registry_proxy.get_secret_registration_block_by_secrethash( secrethash=secrethash, block_identifier='latest', ) msg = ( 'Block height returned by the SecretRegistry.get_secret_registration_block_by_secrethash ' 'does not match the block from the SecretRevealed event.') assert secret_registered['blockNumber'] == registered_block, msg block = secret_registry_proxy.get_secret_registration_block_by_secrethash( secrethash=secrethash_unregistered, block_identifier='latest', ) assert block is None, 'The secret that was not registered must not change block height!'
def test_upgrading(testerchain): creator = testerchain.client.accounts[0] secret_hash = keccak(secret) secret2_hash = keccak(secret2) # Only escrow contract is allowed in PolicyManager constructor with pytest.raises((TransactionFailed, ValueError)): testerchain.deploy_contract('PolicyManager', creator) # Deploy contracts escrow1, _ = testerchain.deploy_contract('StakingEscrowForPolicyMock', 1) escrow2, _ = testerchain.deploy_contract('StakingEscrowForPolicyMock', 1) address1 = escrow1.address address2 = escrow2.address contract_library_v1, _ = testerchain.deploy_contract( 'PolicyManager', address1) dispatcher, _ = testerchain.deploy_contract('Dispatcher', contract_library_v1.address, secret_hash) # Deploy second version of the contract contract_library_v2, _ = testerchain.deploy_contract( 'PolicyManagerV2Mock', address2) contract = testerchain.client.get_contract(abi=contract_library_v2.abi, address=dispatcher.address, ContractFactoryClass=Contract) # Can't call `finishUpgrade` and `verifyState` methods outside upgrade lifecycle with pytest.raises((TransactionFailed, ValueError)): tx = contract_library_v1.functions.finishUpgrade( contract.address).transact({'from': creator}) testerchain.wait_for_receipt(tx) with pytest.raises((TransactionFailed, ValueError)): tx = contract_library_v1.functions.verifyState( contract.address).transact({'from': creator}) testerchain.wait_for_receipt(tx) # Upgrade to the second version assert address1 == contract.functions.escrow().call() tx = dispatcher.functions.upgrade(contract_library_v2.address, secret, secret2_hash).transact({'from': creator}) testerchain.wait_for_receipt(tx) # Check constructor and storage values assert contract_library_v2.address == dispatcher.functions.target().call() assert address2 == contract.functions.escrow().call() # Check new ABI tx = contract.functions.setValueToCheck(3).transact({'from': creator}) testerchain.wait_for_receipt(tx) assert 3 == contract.functions.valueToCheck().call() # Can't upgrade to the previous version or to the bad version contract_library_bad, _ = testerchain.deploy_contract( 'PolicyManagerBad', address2) with pytest.raises((TransactionFailed, ValueError)): tx = dispatcher.functions.upgrade(contract_library_v1.address, secret2, secret_hash)\ .transact({'from': creator}) testerchain.wait_for_receipt(tx) with pytest.raises((TransactionFailed, ValueError)): tx = dispatcher.functions.upgrade(contract_library_bad.address, secret2, secret_hash)\ .transact({'from': creator}) testerchain.wait_for_receipt(tx) # But can rollback tx = dispatcher.functions.rollback(secret2, secret_hash).transact({'from': creator}) testerchain.wait_for_receipt(tx) assert contract_library_v1.address == dispatcher.functions.target().call() assert address1 == contract.functions.escrow().call() # After rollback new ABI is unavailable with pytest.raises((TransactionFailed, ValueError)): tx = contract.functions.setValueToCheck(2).transact({'from': creator}) testerchain.wait_for_receipt(tx) # Try to upgrade to the bad version with pytest.raises((TransactionFailed, ValueError)): tx = dispatcher.functions.upgrade(contract_library_bad.address, secret, secret2_hash)\ .transact({'from': creator}) testerchain.wait_for_receipt(tx) events = dispatcher.events.StateVerified.createFilter( fromBlock=0).get_all_entries() assert 4 == len(events) event_args = events[0]['args'] assert contract_library_v1.address == event_args['testTarget'] assert creator == event_args['sender'] event_args = events[1]['args'] assert contract_library_v2.address == event_args['testTarget'] assert creator == event_args['sender'] assert event_args == events[2]['args'] event_args = events[3]['args'] assert contract_library_v2.address == event_args['testTarget'] assert creator == event_args['sender'] events = dispatcher.events.UpgradeFinished.createFilter( fromBlock=0).get_all_entries() assert 3 == len(events) event_args = events[0]['args'] assert contract_library_v1.address == event_args['target'] assert creator == event_args['sender'] event_args = events[1]['args'] assert contract_library_v2.address == event_args['target'] assert creator == event_args['sender'] event_args = events[2]['args'] assert contract_library_v1.address == event_args['target'] assert creator == event_args['sender']
def gen_eth_address_from_eos_public_key(pub_key): pub_key = base58.b58decode(pub_key[3:]) eth_address = eth_utils.keccak(pub_key[:-4])[12:] return eth_address.hex()
def _execute_frontier_transaction(vm_state, transaction): # Reusable for other forks # # 1) Pre Computation # # Validate the transaction transaction.validate() vm_state.validate_transaction(transaction) gas_fee = transaction.gas * transaction.gas_price with vm_state.state_db() as state_db: # Buy Gas state_db.delta_balance(transaction.sender, -1 * gas_fee) # Increment Nonce state_db.increment_nonce(transaction.sender) # Setup VM Message message_gas = transaction.gas - transaction.intrinsic_gas if transaction.to == constants.CREATE_CONTRACT_ADDRESS: contract_address = generate_contract_address( transaction.sender, state_db.get_nonce(transaction.sender) - 1, ) data = b'' code = transaction.data else: contract_address = None data = transaction.data code = state_db.get_code(transaction.to) vm_state.logger.info( ( "TRANSACTION: sender: %s | to: %s | value: %s | gas: %s | " "gas-price: %s | s: %s | r: %s | v: %s | data-hash: %s" ), encode_hex(transaction.sender), encode_hex(transaction.to), transaction.value, transaction.gas, transaction.gas_price, transaction.s, transaction.r, transaction.v, encode_hex(keccak(transaction.data)), ) message = Message( gas=message_gas, to=transaction.to, sender=transaction.sender, value=transaction.value, data=data, code=code, create_address=contract_address, ) transaction_context = vm_state.get_transaction_context_class()( gas_price=transaction.gas_price, origin=transaction.sender, ) # # 2) Apply the message to the VM. # if message.is_create: with vm_state.state_db(read_only=True) as state_db: is_collision = state_db.account_has_code_or_nonce(contract_address) if is_collision: # The address of the newly created contract has *somehow* collided # with an existing contract address. computation = vm_state.get_computation(message, transaction_context) computation._error = ContractCreationCollision( "Address collision while creating contract: {0}".format( encode_hex(contract_address), ) ) vm_state.logger.debug( "Address collision while creating contract: %s", encode_hex(contract_address), ) else: computation = vm_state.get_computation( message, transaction_context, ).apply_create_message() else: computation = vm_state.get_computation(message, transaction_context).apply_message() # # 2) Post Computation # # Self Destruct Refunds num_deletions = len(computation.get_accounts_for_deletion()) if num_deletions: computation.gas_meter.refund_gas(REFUND_SELFDESTRUCT * num_deletions) # Gas Refunds gas_remaining = computation.get_gas_remaining() gas_refunded = computation.get_gas_refund() gas_used = transaction.gas - gas_remaining gas_refund = min(gas_refunded, gas_used // 2) gas_refund_amount = (gas_refund + gas_remaining) * transaction.gas_price if gas_refund_amount: vm_state.logger.debug( 'TRANSACTION REFUND: %s -> %s', gas_refund_amount, encode_hex(message.sender), ) with vm_state.state_db() as state_db: state_db.delta_balance(message.sender, gas_refund_amount) # Miner Fees transaction_fee = (transaction.gas - gas_remaining - gas_refund) * transaction.gas_price vm_state.logger.debug( 'TRANSACTION FEE: %s -> %s', transaction_fee, encode_hex(vm_state.coinbase), ) with vm_state.state_db() as state_db: state_db.delta_balance(vm_state.coinbase, transaction_fee) # Process Self Destructs with vm_state.state_db() as state_db: for account, beneficiary in computation.get_accounts_for_deletion(): # TODO: need to figure out how we prevent multiple selfdestructs from # the same account and if this is the right place to put this. vm_state.logger.debug('DELETING ACCOUNT: %s', encode_hex(account)) # TODO: this balance setting is likely superflous and can be # removed since `delete_account` does this. state_db.set_balance(account, 0) state_db.delete_account(account) return computation
def from_seed(cls, seed: bytes, *args: Any, **kwargs: Any) -> DiscoveryProtocol: privkey = keys.PrivateKey(keccak(seed)) return cls(*args, privkey=privkey, **kwargs)
UINT256_MAX, ) from raiden.utils import sha3 from raiden.tests.utils.tests import fixture_all_combinations from raiden.tests.utils.factories import make_privkey_address, UNIT_CHAIN_ID from raiden.transfer.state import EMPTY_MERKLE_ROOT from raiden.messages import ( DirectTransfer, Lock, LockedTransfer, RefundTransfer, ) PRIVKEY, ADDRESS = make_privkey_address() CHANNEL_ID = keccak(b'somechannel') INVALID_ADDRESSES = [ b' ', b' ' * 19, b' ' * 21, ] VALID_SECRETS = [ letter.encode() * 32 for letter in string.ascii_uppercase[:7] ] SECRETHASHES_SECRESTS = { sha3(secret): secret for secret in VALID_SECRETS } VALID_SECRETHASHES = list(SECRETHASHES_SECRESTS.keys())
def _type_hash(primaryType: str, types) -> str: return keccak(_encode_type(primaryType, types).encode())
def get_block_transaction_hashes(self, block_header): for encoded_transaction in self._get_block_transaction_data(block_header): yield keccak(encoded_transaction)
def _struct_hash(primaryType: str, data: any, types) -> str: return keccak(_encode_data(primaryType, data, types))
async def get_directly_linked_peers_without_handshake( peer1_class=LESPeer, peer1_chaindb=None, peer2_class=LESPeer, peer2_chaindb=None): """See get_directly_linked_peers(). Neither the P2P handshake nor the sub-protocol handshake will be performed here. """ if peer1_chaindb is None: peer1_chaindb = get_fresh_mainnet_chaindb() if peer2_chaindb is None: peer2_chaindb = get_fresh_mainnet_chaindb() peer1_private_key = ecies.generate_privkey() peer2_private_key = ecies.generate_privkey() peer1_remote = kademlia.Node( peer2_private_key.public_key, kademlia.Address('0.0.0.0', 0, 0)) peer2_remote = kademlia.Node( peer1_private_key.public_key, kademlia.Address('0.0.0.0', 0, 0)) initiator = auth.HandshakeInitiator(peer1_remote, peer1_private_key) peer2_reader = asyncio.StreamReader() peer1_reader = asyncio.StreamReader() # Link the peer1's writer to the peer2's reader, and the peer2's writer to the # peer1's reader. peer2_writer = type( "mock-streamwriter", (object,), {"write": peer1_reader.feed_data, "close": lambda: None} ) peer1_writer = type( "mock-streamwriter", (object,), {"write": peer2_reader.feed_data, "close": lambda: None} ) peer1, peer2 = None, None handshake_finished = asyncio.Event() async def do_handshake(): nonlocal peer1, peer2 aes_secret, mac_secret, egress_mac, ingress_mac = await auth._handshake( initiator, peer1_reader, peer1_writer) # Need to copy those before we pass them on to the Peer constructor because they're # mutable. Also, the 2nd peer's ingress/egress MACs are reversed from the first peer's. peer2_ingress = egress_mac.copy() peer2_egress = ingress_mac.copy() peer1 = peer1_class( remote=peer1_remote, privkey=peer1_private_key, reader=peer1_reader, writer=peer1_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, chaindb=peer1_chaindb, network_id=1) peer2 = peer2_class( remote=peer2_remote, privkey=peer2_private_key, reader=peer2_reader, writer=peer2_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=peer2_egress, ingress_mac=peer2_ingress, chaindb=peer2_chaindb, network_id=1) handshake_finished.set() asyncio.ensure_future(do_handshake()) responder = auth.HandshakeResponder(peer2_remote, peer2_private_key) auth_msg = await peer2_reader.read(constants.ENCRYPTED_AUTH_MSG_LEN) # Can't assert return values, but checking that the decoder doesn't raise # any exceptions at least. _, _ = responder.decode_authentication(auth_msg) peer2_nonce = keccak(os.urandom(constants.HASH_LEN)) auth_ack_msg = responder.create_auth_ack_message(peer2_nonce) auth_ack_ciphertext = responder.encrypt_auth_ack_message(auth_ack_msg) peer2_writer.write(auth_ack_ciphertext) await handshake_finished.wait() return peer1, peer2
def generate_typed_data_hash(typedData: EIP712TypedData) -> str: return '0x' + keccak( b"\x19\x01" + _struct_hash('EIP712Domain', typedData['domain'], typedData['types']) + _struct_hash(typedData['primaryType'], typedData['message'], typedData['types'])).hex()