def print_shard_balance(env, rb, full_shard_id): shard = Shard(env, full_shard_id, None) state = shard.state state.init_from_root_block(rb) print("Full shard id: %d" % full_shard_id) print("Block height: %d" % state.header_tip.height) print("Trie hash: %s" % state.meta_tip.hash_evm_state_root.hex()) trie = Trie(state.raw_db, state.meta_tip.hash_evm_state_root) key = trie.next(bytes(32)) total = 0 while key is not None: rlpdata = trie.get(key) o = rlp.decode(rlpdata, _Account) tb = TokenBalances(o.token_balances, state.raw_db) balance = tb.balance(token_id_encode("QKC")) print("Key: %s, Balance: %s" % (key.hex(), balance)) total += balance key = trie.next(key) print("Total balance in shard: %d" % total) return total, state.header_tip.height
def test_encode_zero_balance(encoding, mapping): # starting from blank account b0 = TokenBalances(b"", InMemoryDb()) for k, v in mapping.items(): b0._balances[k] = v assert b0._balances == mapping assert b0.serialize() == encoding # starting from RLP encoding b1 = TokenBalances(encoding, InMemoryDb()) assert b1._balances == {k: v for k, v in mapping.items() if v != 0} if b1._balances: assert b1.serialize() == encoding else: assert b1.serialize() == b""
def test_encode_bytes(encoding, mapping): # starting from blank account b0 = TokenBalances(b"", InMemoryDb()) for k, v in mapping.items(): b0.balances[k] = v assert b0.balances == mapping assert b0.serialize() == encoding # starting from RLP encoding b1 = TokenBalances(encoding, InMemoryDb()) assert b1.balances == mapping assert b1.serialize() == encoding
def test_blank_account(): b = TokenBalances(b"", InMemoryDb()) assert b._balances == {} assert b.serialize() == b"" assert b.is_blank()
def test_reset_balance_in_trie_and_revert(): db = InMemoryDb() b = TokenBalances(b"", db) mapping = { token_id_encode("Q" + chr(65 + i)): int(i * 1e3) + 42 for i in range(17) } b._balances = mapping.copy() b.commit() journal = [] b.set_balance(journal, 999, 999) assert b.balance(999) == 999 b.reset(journal) assert b.is_blank() assert b.to_dict() == {} for op in journal: op() assert not b.is_blank() assert b.to_dict() == mapping
def test_encoding_change_from_dict_to_trie(): db = InMemoryDb() b = TokenBalances(b"", db) # start with 16 entries - right below the threshold mapping = { token_id_encode("Q" + chr(65 + i)): int(i * 1e3) + 42 for i in range(16) } b._balances = mapping.copy() b.commit() assert b.serialize().startswith(b"\x00") assert b.token_trie is None # add one more entry and expect changes journal = [] new_token = token_id_encode("QKC") b.set_balance(journal, new_token, 123) assert b.balance(new_token) == 123 b.commit() assert b.token_trie is not None assert b.serialize().startswith(b"\x01") root1 = b.token_trie.root_hash # clear all balances except QKC for k in mapping: b.set_balance(journal, k, 0) # still have those token keys in balance map assert b.balance(token_id_encode("QA")) == 0 assert b.to_dict() == {new_token: 123} # trie hash should change after serialization b.commit() serialized = b.serialize() root2 = b.token_trie.root_hash assert serialized == b"\x01" + root2 assert root1 != root2 # balance map truncated, but accessing will bring it back to map with val 0 assert b._balances == {} assert b.balance(token_id_encode("QB")) == 0 assert len(b._balances) == 1 assert b.to_dict() == {new_token: 123} assert not b.is_blank() # remove the last entry b.set_balance(journal, new_token, 0) assert b.to_dict() == {} b.commit() assert b.token_trie.root_hash == BLANK_ROOT assert b._balances == {}
def test_encoding_in_trie(): encoding = b"\x01\x84\x8dBq\xe4N\xa4\x14f\xfe5Ua\xddC\xb1f\xc9'\xd2\xec\xa0\xa8\xdd\x90\x1a\x8edi\xec\xde\xb1" mapping = { token_id_encode("Q" + chr(65 + i)): int(i * 1e3) + 42 for i in range(17) } db = InMemoryDb() # starting from blank account b0 = TokenBalances(b"", db) b0._balances = mapping.copy() b0.commit() assert b0.serialize() == encoding # check internal states assert b0.token_trie is not None # starting from RLP encoding b1 = TokenBalances(encoding, db) assert b1.to_dict() == mapping assert b1.serialize() == encoding # check internal states assert b1._balances == {} assert b1.token_trie is not None assert not b1.is_blank() assert b1.balance(token_id_encode("QC")) == mapping[token_id_encode("QC")] # underlying balance map populated assert len(b1._balances) == 1 # serialize without commit should fail try: b1.serialize() pytest.fail() except AssertionError: pass # otherwise should succeed b1.commit() b1.serialize()
def test_encoding_order_of_balance(): b0 = TokenBalances(b"", InMemoryDb()) b0._balances[0] = 100 b0._balances[1] = 100 b0._balances[2] = 100 b0._balances[3] = 100 b1 = TokenBalances(b"", InMemoryDb()) b1._balances[3] = 100 b1._balances[2] = 100 b1._balances[1] = 100 b1._balances[0] = 100 assert b0.serialize() == b1.serialize()
def test_encoding_singularity(): b0 = TokenBalances(b"", InMemoryDb()) b0.balances[0] = 100 b0.balances[1] = 100 b0.balances[2] = 100 b0.balances[3] = 100 b1 = TokenBalances(b"", InMemoryDb()) b1.balances[3] = 100 b1.balances[2] = 100 b1.balances[1] = 100 b1.balances[0] = 100 assert b0.serialize() == b1.serialize()