async def sha3(self, data: str) -> str: """ Returns Keccak-256 of the given data. """ return encode_hex(keccak(decode_hex(data)))
async def contract_code_hash(genesis_state, simple_contract_address): return keccak(genesis_state[simple_contract_address]['code'])
def get_eth_address(self) -> str: return keccak(self.to_bytes()[1:]).hex()[-40:]
def bytecode_hash(bytecode): return keccak(bytecode)
from datetime import timedelta from decimal import Decimal from eth_hash.auto import keccak from eth_utils import encode_hex, decode_hex from eth_abi import decode_single from notify.events import event_high EVENT_TAGS = ['ogn'] SIG_EVENT_NEW_DURATIONS = encode_hex( keccak(b"NewDurations(address,uint256[])")) SIG_EVENT_NEW_RATES = encode_hex(keccak(b"NewRates(address,uint256[])")) DAYS_365_SECONDS = 31536000 def get_rates_events(logs): """ Get NewRates events """ return logs.filter(topic_0=SIG_EVENT_NEW_RATES).order_by('block_number') def get_durations_event(logs, tx_hash): """ NewDurations is triggered during the same transaction and needs to match the NewRates one to figure out how they relate """ for log in logs.filter(transaction_hash=tx_hash).order_by('block_number'): if log.topic_0 == SIG_EVENT_NEW_DURATIONS: return log return None def run_trigger(new_logs): """ Template trigger """ events = []
def add_uncle(self, uncle: BlockHeader) -> "FrontierBlock": self.uncles.append(uncle) self.header.uncles_hash = keccak(rlp.encode(self.uncles)) return self
def __delitem__(self, key): del self._trie[keccak(key)]
def mining_hash(self) -> Hash32: return keccak(rlp.encode(self[:-2], MiningHeader))
def __setitem__(self, key, value): self._trie[keccak(key)] = value
def __getitem__(self, key): return self._trie[keccak(key)]
def __init__(self, bootnodes): privkey = keys.PrivateKey(keccak(b"seed")) super().__init__( privkey, random_address(), bootnodes, CancelToken("discovery-test") )
def random_node(): seed = to_bytes(text="".join(random.sample(string.ascii_lowercase, 10))) priv_key = keys.PrivateKey(keccak(seed)) return kademlia.Node(priv_key.public_key, random_address())
def address_with_storage_hash(address_with_storage): return keccak(address_with_storage)
async def get_directly_linked_peers_without_handshake(peer1_class=LESPeer, peer1_headerdb=None, peer2_class=LESPeer, peer2_headerdb=None): """See get_directly_linked_peers(). Neither the P2P handshake nor the sub-protocol handshake will be performed here. """ cancel_token = CancelToken("get_directly_linked_peers_without_handshake") if peer1_headerdb is None: peer1_headerdb = get_fresh_mainnet_headerdb() if peer2_headerdb is None: peer2_headerdb = get_fresh_mainnet_headerdb() peer1_private_key = ecies.generate_privkey() peer2_private_key = ecies.generate_privkey() peer1_remote = kademlia.Node(peer2_private_key.public_key, kademlia.Address('0.0.0.0', 0, 0)) peer2_remote = kademlia.Node(peer1_private_key.public_key, kademlia.Address('0.0.0.0', 0, 0)) use_eip8 = False initiator = auth.HandshakeInitiator(peer1_remote, peer1_private_key, use_eip8, cancel_token) peer2_reader = asyncio.StreamReader() peer1_reader = asyncio.StreamReader() # Link the peer1's writer to the peer2's reader, and the peer2's writer to the # peer1's reader. peer2_writer = MockStreamWriter(peer1_reader.feed_data) peer1_writer = MockStreamWriter(peer2_reader.feed_data) peer1, peer2 = None, None handshake_finished = asyncio.Event() async def do_handshake(): nonlocal peer1 aes_secret, mac_secret, egress_mac, ingress_mac = await auth._handshake( initiator, peer1_reader, peer1_writer, cancel_token) peer1 = peer1_class(remote=peer1_remote, privkey=peer1_private_key, reader=peer1_reader, writer=peer1_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, headerdb=peer1_headerdb, network_id=1) handshake_finished.set() asyncio.ensure_future(do_handshake()) use_eip8 = False responder = auth.HandshakeResponder(peer2_remote, peer2_private_key, use_eip8, cancel_token) auth_cipher = await peer2_reader.read(constants.ENCRYPTED_AUTH_MSG_LEN) initiator_ephemeral_pubkey, initiator_nonce, _ = decode_authentication( auth_cipher, peer2_private_key) responder_nonce = keccak(os.urandom(constants.HASH_LEN)) auth_ack_msg = responder.create_auth_ack_message(responder_nonce) auth_ack_ciphertext = responder.encrypt_auth_ack_message(auth_ack_msg) peer2_writer.write(auth_ack_ciphertext) await handshake_finished.wait() aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce, responder_nonce, initiator_ephemeral_pubkey, auth_cipher, auth_ack_ciphertext) assert egress_mac.digest() == peer1.ingress_mac.digest() assert ingress_mac.digest() == peer1.egress_mac.digest() peer2 = peer2_class(remote=peer2_remote, privkey=peer2_private_key, reader=peer2_reader, writer=peer2_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, headerdb=peer2_headerdb, network_id=1) return peer1, peer2
def __contains__(self, key): return keccak(key) in self._trie
def hash(self) -> Hash32: if self._hash is None: self._hash = keccak(rlp.encode(self)) return self._hash
def test_keccak_256(keccak, prehash, expected_result): assert keccak(prehash) == expected_result
def _persist_uncles(db: BaseDB, uncles: Tuple[BlockHeader]) -> Hash32: uncles_hash = keccak(rlp.encode(uncles)) db.set(uncles_hash, rlp.encode(uncles, sedes=rlp.sedes.CountableList(BlockHeader))) return uncles_hash
def hash(self) -> Hash32: return keccak(rlp.encode(self))
for test_group, tests in test_groups.items(): for filler, filler_kwargs in tests: test_name = get_test_name(filler) filename = test_name + ".json" filler_src_path = os.path.join(filler_dir, test_group, filename) filler_path = os.path.join(FILLER_PARENT_DIR, filler_src_path) test_path = os.path.join(TEST_PARENT_DIR, test_dir, test_group, filename) for path in [filler_path, test_path]: os.makedirs(os.path.dirname(path), exist_ok=True) formatted_filler = filler_formatter(filler) filler_string = json.dumps(formatted_filler, indent=4, sort_keys=True) with open(filler_path, "w") as filler_file: filler_file.write(filler_string) filler_hash = keccak(filler_string.encode("ascii")) info = { "source": filler_src_path, "sourceHash": encode_hex(filler_hash), } test = fill_test(filler, info=info, **filler_kwargs or {}) with open(test_path, "w") as test_file: json.dump(test, test_file, indent=4, sort_keys=True)
def test_get_from_proof_empty(): state_root = keccak(b'state root') key = keccak(b'some key') proof = [] with pytest.raises(BadTrieProof): HexaryTrie.get_from_proof(state_root, key, proof)
def ETH_Address(un_pubk_bytes): return '0x' + keccak(un_pubk_bytes[1:])[-20:].hex()
def generate_contract_address(address: bytes, nonce: bytes) -> bytes: return keccak(rlp.encode([address, nonce]))[-20:]
next(iterate_chunks(body)) chunks = test_chunks body = b"".join(chunks) + b"\x00" with pytest.raises(ValidationError): next(iterate_chunks(body)) chunks = test_chunks + [b"\x00" * CHUNK_SIZE] body = b"".join(chunks) with pytest.raises(ValidationError): next(iterate_chunks(body)) @pytest.mark.parametrize( "leaves,root", [([b"single leaf"], keccak(b"single leaf")), ([b"left", b"right"], keccak(keccak(b"left") + keccak(b"right"))), ([b"1", b"2", b"3", b"4"], keccak( keccak(keccak(b"1") + keccak(b"2")) + keccak(keccak(b"3") + keccak(b"4"))))]) def test_merkle_root_calculation(leaves, root): assert calc_merkle_root(leaves) == root @pytest.mark.parametrize("leave_number", [0, 3, 5, 6, 7, 9]) def test_invalid_merkle_root_calculation(leave_number): with pytest.raises(ValidationError): calc_merkle_root([b""] * leave_number)
def address_with_balance_hash(address_with_balance): return keccak(address_with_balance)