def create_mock_signed_attestation(state, shard_committee, voting_committee_indices, attestation_data): message = hash_eth2( rlp.encode(attestation_data) + (0).to_bytes(1, "big")) # participants sign message signatures = [ bls.sign(message, privkeys[shard_committee.committee[committee_index]], domain=get_domain( fork_data=state.fork_data, slot=attestation_data.slot, domain_type=SignatureDomain.DOMAIN_ATTESTATION, )) for committee_index in voting_committee_indices ] # aggregate signatures and construct participant bitfield participation_bitfield, aggregate_signature = aggregate_votes( bitfield=get_empty_bitfield(len(shard_committee.committee)), sigs=(), voting_sigs=signatures, voting_committee_indices=voting_committee_indices, ) # create attestation from attestation_data, particpipant_bitfield, and signature return Attestation( data=attestation_data, participation_bitfield=participation_bitfield, custody_bitfield=b'', aggregate_signature=aggregate_signature, )
def shuffle(values: Sequence[Any], seed: Hash32) -> Iterable[Any]: """ Returns the shuffled ``values`` with seed as entropy. Mainly for shuffling active validators in-protocol. Spec: https://github.com/ethereum/eth2.0-specs/blob/70cef14a08de70e7bd0455d75cf380eb69694bfb/specs/core/0_beacon-chain.md#helper-functions # noqa: E501 """ values_count = len(values) # The range of the RNG places an upper-bound on the size of the list that # may be shuffled. It is a logic error to supply an oversized list. if values_count >= RAND_MAX: raise ValueError( "values_count (%s) should less than RAND_MAX (%s)." % (values_count, RAND_MAX) ) output = [x for x in values] source = seed index = 0 while index < values_count - 1: # Re-hash the `source` to obtain a new pattern of bytes. source = hash_eth2(source) # Iterate through the `source` bytes in 3-byte chunks. for position in range(0, 32 - (32 % RAND_BYTES), RAND_BYTES): # Determine the number of indices remaining in `values` and exit # once the last index is reached. remaining = values_count - index if remaining == 1: break # Read 3-bytes of `source` as a 24-bit big-endian integer. sample_from_source = int.from_bytes( source[position:position + RAND_BYTES], 'big' ) # Sample values greater than or equal to `sample_max` will cause # modulo bias when mapped into the `remaining` range. sample_max = RAND_MAX - RAND_MAX % remaining # Perform a swap if the consumed entropy will not cause modulo bias. if sample_from_source < sample_max: # Select a replacement index for the current index. replacement_position = (sample_from_source % remaining) + index # Swap the current index with the replacement index. (output[index], output[replacement_position]) = ( output[replacement_position], output[index] ) index += 1 else: # The sample causes modulo bias. A new sample should be read. pass return output
def hash_to_G2(message: bytes, domain: int) -> Tuple[FQ2, FQ2, FQ2]: domain_in_bytes = domain.to_bytes(8, 'big') # Initial candidate x coordinate x_re = big_endian_to_int(hash_eth2(domain_in_bytes + b'\x01' + message)) x_im = big_endian_to_int(hash_eth2(domain_in_bytes + b'\x02' + message)) x_coordinate = FQ2([x_re, x_im]) # x_re + x_im * i # Test candidate y coordinates until a one is found while 1: y_coordinate_squared = x_coordinate**3 + FQ2( [4, 4]) # The curve is y^2 = x^3 + 4(i + 1) y_coordinate = modular_squareroot(y_coordinate_squared) if y_coordinate is not None: # Check if quadratic residue found break x_coordinate += FQ2([1, 0]) # Add 1 and try again return multiply((x_coordinate, y_coordinate, FQ2([1, 0])), G2_cofactor)
def test_get_merkle_root(): hash_0 = b"0" * 32 leaves = (hash_0, ) root = get_merkle_root(leaves) assert root == hash_0 hash_1 = b"1" * 32 leaves = (hash_0, hash_1) root = get_merkle_root(leaves) assert root == hash_eth2(hash_0 + hash_1)
def get_new_validator_registry_delta_chain_tip( current_validator_registry_delta_chain_tip: Hash32, index: int, pubkey: int, flag: int) -> Hash32: """ Compute the next hash in the validator registry delta hash chain. """ return hash_eth2( current_validator_registry_delta_chain_tip + flag.to_bytes(1, 'big') + index.to_bytes(3, 'big') + # TODO: currently, we use 256-bit pubkey which is different form the spec pubkey.to_bytes(32, 'big'))
def verify_merkle_proof(root: Hash32, item: Union[bytes, bytearray], item_index: int, proof: MerkleProof) -> bool: """ Verify a Merkle proof against a root hash. """ leaf = hash_eth2(item) branch_indices = get_branch_indices(item_index, len(proof)) node_orderers = [ identity if branch_index % 2 == 0 else reversed for branch_index in branch_indices ] proof_root = reduce( lambda n1, n2_and_order: _calc_parent_hash(*n2_and_order[1] ([n1, n2_and_order[0]])), zip(proof, node_orderers), leaf, ) return proof_root == root
def validate_serenity_attestation_aggregate_signature( state: BeaconState, attestation: Attestation, epoch_length: int) -> None: """ Validate ``aggregate_signature`` field of ``attestation``. Raise ``ValidationError`` if it's invalid. Note: This is the phase 0 version of `aggregate_signature`` validation. All proof of custody bits are assumed to be 0 within the signed data. This will change to reflect real proof of custody bits in the Phase 1. """ participant_indices = get_attestation_participants( state=state, slot=attestation.data.slot, shard=attestation.data.shard, participation_bitfield=attestation.participation_bitfield, epoch_length=epoch_length, ) pubkeys = tuple(state.validator_registry[validator_index].pubkey for validator_index in participant_indices) group_public_key = bls.aggregate_pubkeys(pubkeys) # TODO: change to tree hashing when we have SSZ # TODO: Replace with AttestationAndCustodyBit data structure message = hash_eth2(rlp.encode(attestation.data) + (0).to_bytes(1, "big")) is_valid_signature = bls.verify( message=message, pubkey=group_public_key, signature=attestation.aggregate_signature, domain=get_domain( fork_data=state.fork_data, slot=attestation.data.slot, domain_type=SignatureDomain.DOMAIN_ATTESTATION, ), ) if not is_valid_signature: raise ValidationError( "Attestation ``aggregate_signature`` is invalid.")
def calc_merkle_tree(items: Sequence[Union[bytes, bytearray]]) -> MerkleTree: """ Calculate the Merkle tree corresponding to a list of items. """ leaves = tuple(hash_eth2(item) for item in items) return calc_merkle_tree_from_leaves(leaves)
def test_hash_is_keccak256(): assert hash_eth2(b'foo') == keccak(b'foo')
def test_chaindb_persist_block_and_unknown_parent(chaindb, block, seed): n_block = block.copy(parent_root=hash_eth2(seed)) with pytest.raises(ParentNotFound): chaindb.persist_block(n_block, n_block.__class__)
def hash(self) -> Hash32: if self._hash is None: self._hash = hash_eth2(rlp.encode(self)) return self._hash
def test_hash(): output = hash_eth2(b'helloworld') assert len(output) == 32
def test_hash(sample_beacon_state_params): state = BeaconState(**sample_beacon_state_params) assert state.root == hash_eth2(rlp.encode(state))
hash_eth2, ) from eth._utils.merkle import ( get_merkle_root_from_items, calc_merkle_tree, get_root, get_merkle_proof, get_merkle_root, verify_merkle_proof, ) @pytest.mark.parametrize("leaves,tree", [ ( (b"single leaf", ), ((hash_eth2(b"single leaf"), ), ), ), ( (b"left", b"right"), ( (hash_eth2(hash_eth2(b"left") + hash_eth2(b"right")), ), (hash_eth2(b"left"), hash_eth2(b"right")), ), ), ( (b"1", b"2", b"3", b"4"), ( (hash_eth2( hash_eth2(hash_eth2(b"1") + hash_eth2(b"2")) + hash_eth2(hash_eth2(b"3") + hash_eth2(b"4"))), ), (
def privkeys(): return [ int.from_bytes(hash_eth2(str(i).encode('utf-8'))[:4], 'big') for i in range(100) ]
def _calc_parent_hash(left_node: Hash32, right_node: Hash32) -> Hash32: """ Calculate the parent hash of a node and its sibling. """ return hash_eth2(left_node + right_node)
def root(self) -> Hash32: if self._root is None: self._root = hash_eth2(rlp.encode(self)) return self._root