async def test_sending_collations(request, event_loop): sender, receiver = await get_directly_linked_peers( request, event_loop, ShardingPeer, None, ShardingPeer, None, ) c1 = Collation(CollationHeader(0, b"\x11" * 32, 2, b"\x33" * 20), b"\x44" * COLLATION_SIZE) c2 = Collation(CollationHeader(1, b"\x11" * 32, 2, b"\x33" * 20), b"\x44" * COLLATION_SIZE) c3 = Collation(CollationHeader(2, b"\x11" * 32, 2, b"\x33" * 20), b"\x44" * COLLATION_SIZE) sender.sub_proto.send_collations([c1]) received_c1 = await asyncio.wait_for( receiver.incoming_collation_queue.get(), timeout=1) assert received_c1 == c1 assert receiver.known_collation_hashes == set([c1.hash]) sender.sub_proto.send_collations([c2, c3]) received_c2 = await asyncio.wait_for( receiver.incoming_collation_queue.get(), timeout=1) received_c3 = await asyncio.wait_for( receiver.incoming_collation_queue.get(), timeout=1) assert set([received_c2, received_c3]) == set([c2, c3]) assert receiver.known_collation_hashes == set([c1.hash, c2.hash, c3.hash])
def collation_header(): return CollationHeader( 0, b"\x11" * 32, 2, b"\x33" * 20, )
def vm(): header = CollationHeader( shard_id=0, expected_period_number=2, period_start_prevhash=decode_hex( "3c4cc7b99c7eb9281e9a8d15cd4b2f98c5df085e929f15388c699b41cdde78d7" ), parent_hash=ZERO_HASH32, transaction_root=EMPTY_SHA3, coinbase=to_canonical_address( "8888f1f195afa192cfee860698584c030f4c9db1"), state_root=EMPTY_SHA3, receipt_root=EMPTY_SHA3, number=10, ) chaindb = ChainDB( get_db_backend(), account_state_class=ShardingAccountStateDB, trie_class=BinaryTrie, ) vm = ShardingVM(header=header, chaindb=chaindb) vm_state = vm.state with vm_state.state_db() as statedb: for address, code in HELPER_CONTRACTS.items(): statedb.set_code(address, code) statedb.set_balance(ACCOUNT_ADDRESS, INITIAL_BALANCE) # Update state_root manually vm.block.header.state_root = vm_state.state_root return vm
def collation_header(): return CollationHeader( shard_id=0, chunk_root=b"\x11" * 32, period=2, proposer_address=b"\x22" * 20 )
def mk_testing_colhdr(vmc_handler, shard_id, parent_hash, number, coinbase=test_keys[0].public_key.to_canonical_address()): period_length = vmc_handler.config['PERIOD_LENGTH'] current_block_number = vmc_handler.web3.eth.blockNumber expected_period_number = (current_block_number + 1) // period_length logger.debug("mk_testing_colhdr: expected_period_number=%s", expected_period_number) period_start_prevblock_number = expected_period_number * period_length - 1 period_start_prev_block = vmc_handler.web3.eth.getBlock( period_start_prevblock_number) period_start_prevhash = period_start_prev_block['hash'] logger.debug("mk_testing_colhdr: period_start_prevhash=%s", period_start_prevhash) transaction_root = b"tx_list " * 4 state_root = b"post_sta" * 4 receipt_root = b"receipt " * 4 collation_header = CollationHeader( shard_id=shard_id, expected_period_number=expected_period_number, period_start_prevhash=period_start_prevhash, parent_hash=parent_hash, transaction_root=transaction_root, coinbase=coinbase, state_root=state_root, receipt_root=receipt_root, number=number, ) return collation_header
def random_collation(shard_id, period): body = zpad_right(int_to_big_endian(random.getrandbits(8 * 32)), COLLATION_SIZE) header = CollationHeader( shard_id=shard_id, period=period, chunk_root=calc_chunk_root(body), proposer_address=b"\xff" * 20, ) return Collation(header, body)
def test_smc_encoding_decoding(collation_header): encoded = collation_header.encode_for_smc() assert len(encoded) == CollationHeader.smc_encoded_size assert encoded == b"".join([ b"\x00" * 32, b"\x11" * 32, b"\x00" * 31 + b"\x02", b"\x00" * 12 + b"\x22" * 20 ]) decoded = CollationHeader.decode_from_smc(encoded) assert decoded == collation_header
def parse_collation_added_log(log): # `shard_id` is the first indexed entry,hence the second entry in topics shard_id_bytes32 = log['topics'][1] data_bytes = decode_hex(log['data']) header_bytes = shard_id_bytes32 + data_bytes[:-64] is_new_head = bool(big_endian_to_int(data_bytes[-64:-32])) score = big_endian_to_int(data_bytes[-32:]) collation_header = CollationHeader.from_bytes(header_bytes) yield 'header', collation_header yield 'is_new_head', is_new_head yield 'score', score
def parse_collation_added_log(log): # here assume `shard_id` is the first indexed , which is the second element in topics shard_id_bytes32 = log['topics'][1] data_hex = log['data'] data_bytes = decode_hex(data_hex) score = big_endian_to_int(data_bytes[-32:]) is_new_head = bool(big_endian_to_int(data_bytes[-64:-32])) header_bytes = shard_id_bytes32 + data_bytes[:-64] collation_header = CollationHeader.from_bytes(header_bytes) yield 'header', collation_header yield 'is_new_head', is_new_head yield 'score', score
def propose(self) -> Collation: """Broadcast a new collation to the network, add it to the local shard, and return it.""" # create collation for current period period = self.get_current_period() body = zpad_right(str(self).encode("utf-8"), COLLATION_SIZE) header = CollationHeader(self.shard.shard_id, calc_chunk_root(body), period, b"\x11" * 20) collation = Collation(header, body) self.logger.debug("Proposing collation {}".format(collation)) # add collation to local chain self.shard.add_collation(collation) # broadcast collation for peer in self.peer_pool.peers: cast(ShardingPeer, peer).send_collations([collation]) return collation
def generate_collations(): explicit_params = {} for period in itertools.count(): default_params = { "shard_id": 0, "period": period, "body": zpad_right(b"body%d" % period, COLLATION_SIZE), "proposer_address": zpad_right(b"proposer%d" % period, 20), } # only calculate chunk root if it wouldn't be replaced anyway if "chunk_root" not in explicit_params: default_params["chunk_root"] = calc_chunk_root(default_params["body"]) params = merge(default_params, explicit_params) header = CollationHeader( shard_id=params["shard_id"], chunk_root=params["chunk_root"], period=params["period"], proposer_address=params["proposer_address"], ) collation = Collation(header, params["body"]) explicit_params = (yield collation) or {}
def test_get_witness_nodes(populated_shard_chaindb_and_root_hash): chaindb, root_hash = populated_shard_chaindb_and_root_hash header = CollationHeader( shard_id=1, expected_period_number=0, period_start_prevhash=ZERO_HASH32, parent_hash=ZERO_HASH32, number=0, state_root=root_hash ) prefixes = [ get_balance_key(A_ADDRESS), get_balance_key(B_ADDRESS), get_storage_key(A_ADDRESS, big_endian_to_int(b"key1")), get_storage_key(B_ADDRESS, big_endian_to_int(b"key1")), get_storage_key(B_ADDRESS, big_endian_to_int(b"key2")), get_storage_key(B_ADDRESS, big_endian_to_int(b"key")), get_storage_key(B_ADDRESS, big_endian_to_int(b"")), ] witness_nodes = chaindb.get_witness_nodes(header, prefixes) assert len(witness_nodes) == len(set(witness_nodes)) # no duplicates assert sorted(witness_nodes) == sorted(witness_nodes) # sorted
def test_parse_collation_added_log(log, expected_header_dict, expected_is_new_head, expected_score): parsed_data = parse_collation_added_log(log) assert parsed_data['header'] == CollationHeader(**expected_header_dict) assert parsed_data['is_new_head'] == expected_is_new_head assert parsed_data['score'] == expected_score
def header(body): return CollationHeader(shard_id=0, chunk_root=calc_chunk_root(body), period=2, proposer_address=b"\x22" * 20)
async def test_collation_requests(request, event_loop): # setup two peers sender, receiver = await get_directly_linked_sharding_peers(request, event_loop) receiver_peer_pool = MockPeerPoolWithConnectedPeers([receiver]) # setup shard db for request receiving node receiver_db = ShardDB(MemoryDB()) receiver_shard = Shard(receiver_db, 0) # create three collations and add two to the shard of the receiver # body is shared to avoid unnecessary chunk root calculation body = zpad_right(b"body", COLLATION_SIZE) chunk_root = calc_chunk_root(body) c1 = Collation(CollationHeader(0, chunk_root, 0, zpad_right(b"proposer1", 20)), body) c2 = Collation(CollationHeader(0, chunk_root, 1, zpad_right(b"proposer2", 20)), body) c3 = Collation(CollationHeader(0, chunk_root, 2, zpad_right(b"proposer3", 20)), body) for collation in [c1, c2]: receiver_shard.add_collation(collation) # start shard syncer receiver_syncer = ShardSyncer(receiver_shard, receiver_peer_pool) asyncio.ensure_future(receiver_syncer.run()) def finalizer(): event_loop.run_until_complete(receiver_syncer.cancel()) request.addfinalizer(finalizer) cancel_token = CancelToken("test") # request single collation received_collations = await asyncio.wait_for( sender.get_collations([c1.hash], cancel_token), timeout=1, ) assert received_collations == set([c1]) # request multiple collations received_collations = await asyncio.wait_for( sender.get_collations([c1.hash, c2.hash], cancel_token), timeout=1, ) assert received_collations == set([c1, c2]) # request no collations received_collations = await asyncio.wait_for( sender.get_collations([], cancel_token), timeout=1, ) assert received_collations == set() # request unknown collation received_collations = await asyncio.wait_for( sender.get_collations([c3.hash], cancel_token), timeout=1, ) assert received_collations == set() # request multiple collations, including unknown one received_collations = await asyncio.wait_for( sender.get_collations([c1.hash, c2.hash, c3.hash], cancel_token), timeout=1, ) assert received_collations == set([c1, c2])
def test_from_bytes_valid_bytes_length(header_bytes, expected_header_dict): actual_collation_header = CollationHeader.from_bytes(header_bytes) expected_collation_header = CollationHeader(**expected_header_dict) assert actual_collation_header == expected_collation_header
def test_state_fixtures(fixture, fixture_vm_class): if fixture_vm_class is not ShardingVMForTesting: account_state_class = MainAccountStateDB trie_class = HexaryTrie header = BlockHeader( coinbase=fixture['env']['currentCoinbase'], difficulty=fixture['env']['currentDifficulty'], block_number=fixture['env']['currentNumber'], gas_limit=fixture['env']['currentGasLimit'], timestamp=fixture['env']['currentTimestamp'], parent_hash=fixture['env']['previousHash'], ) else: account_state_class = ShardingAccountStateDB trie_class = BinaryTrie header = CollationHeader( shard_id=fixture['env']['shardID'], expected_period_number=fixture['env']['expectedPeriodNumber'], period_start_prevhash=fixture['env']['periodStartHash'], parent_hash=fixture['env']['previousHash'], coinbase=fixture['env']['currentCoinbase'], number=fixture['env']['currentNumber'], ) chaindb = ChainDB(get_db_backend(), account_state_class=account_state_class, trie_class=trie_class) vm = fixture_vm_class(header=header, chaindb=chaindb) vm_state = vm.state with vm_state.mutable_state_db() as state_db: state_db.apply_state_dict(fixture['pre']) # Update state_root manually vm.block.header.state_root = vm_state.state_root if 'secretKey' in fixture['transaction']: unsigned_transaction = vm.create_unsigned_transaction( nonce=fixture['transaction']['nonce'], gas_price=fixture['transaction']['gasPrice'], gas=fixture['transaction']['gasLimit'], to=fixture['transaction']['to'], value=fixture['transaction']['value'], data=fixture['transaction']['data'], ) private_key = keys.PrivateKey(fixture['transaction']['secretKey']) transaction = unsigned_transaction.as_signed_transaction( private_key=private_key) elif 'vrs' in fixture['transaction']: v, r, s = ( fixture['transaction']['v'], fixture['transaction']['r'], fixture['transaction']['s'], ) transaction = vm.create_transaction( nonce=fixture['transaction']['nonce'], gas_price=fixture['transaction']['gasPrice'], gas=fixture['transaction']['gasLimit'], to=fixture['transaction']['to'], value=fixture['transaction']['value'], data=fixture['transaction']['data'], v=v, r=r, s=s, ) else: # sharding transaction transaction = vm.create_transaction( chain_id=fixture['transaction']['chainID'], shard_id=fixture['transaction']['shardID'], to=fixture['transaction']['to'], data=fixture['transaction']['data'], gas=fixture['transaction']['gasLimit'], gas_price=fixture['transaction']['gasPrice'], access_list=fixture['transaction']['accessList'], code=fixture['transaction']['code'], ) try: computation, _ = vm.apply_transaction(transaction) except ValidationError as err: transaction_error = err LOGGER.warn("Got transaction error", exc_info=True) else: transaction_error = False if not transaction_error: log_entries = computation.get_log_entries() actual_logs_hash = hash_log_entries(log_entries) if 'logs' in fixture['post']: expected_logs_hash = fixture['post']['logs'] assert expected_logs_hash == actual_logs_hash elif log_entries: raise AssertionError("Got log {0} entries. hash:{1}".format( len(log_entries), actual_logs_hash, )) if 'out' in fixture: expected_output = fixture['out'] if isinstance(expected_output, int): assert len(computation.output) == expected_output else: assert computation.output == expected_output assert vm.block.header.state_root == fixture['post']['hash']
def test_from_bytes_invalid_bytes_length(header_bytes): with pytest.raises(ValidationError): CollationHeader.from_bytes(header_bytes)