def test_reset_balance_in_trie_and_revert(): db = InMemoryDb() b = TokenBalances(b"", db) mapping = { token_id_encode("Q" + chr(65 + i)): int(i * 1e3) + 42 for i in range(17) } b._balances = mapping.copy() b.commit() journal = [] b.set_balance(journal, 999, 999) assert b.balance(999) == 999 b.reset(journal) assert b.is_blank() assert b.to_dict() == {} for op in journal: op() assert not b.is_blank() assert b.to_dict() == mapping
def test_encoding_change_from_dict_to_trie(): db = InMemoryDb() b = TokenBalances(b"", db) # start with 16 entries - right below the threshold mapping = { token_id_encode("Q" + chr(65 + i)): int(i * 1e3) + 42 for i in range(16) } b._balances = mapping.copy() b.commit() assert b.serialize().startswith(b"\x00") assert b.token_trie is None # add one more entry and expect changes journal = [] new_token = token_id_encode("QKC") b.set_balance(journal, new_token, 123) assert b.balance(new_token) == 123 b.commit() assert b.token_trie is not None assert b.serialize().startswith(b"\x01") root1 = b.token_trie.root_hash copied_mapping = mapping.copy() copied_mapping[new_token] = 123 assert b.to_dict() == copied_mapping # clear all balances except QKC for k in mapping: b.set_balance(journal, k, 0) # still have those token keys in balance map assert b.balance(token_id_encode("QA")) == 0 assert b.to_dict() == {new_token: 123} # trie hash should change after serialization b.commit() serialized = b.serialize() root2 = b.token_trie.root_hash assert serialized == b"\x01" + root2 assert root1 != root2 # balance map truncated, entries with 0 value will be ignored assert b._balances == {} assert b.balance(token_id_encode("QB")) == 0 assert len(b._balances) == 0 assert b.to_dict() == {new_token: 123} assert not b.is_blank() # remove the last entry b.set_balance(journal, new_token, 0) assert b.to_dict() == {} b.commit() assert b.token_trie.root_hash == BLANK_ROOT assert b._balances == {}
def test_encoding_in_trie(): encoding = b"\x01\x84\x8dBq\xe4N\xa4\x14f\xfe5Ua\xddC\xb1f\xc9'\xd2\xec\xa0\xa8\xdd\x90\x1a\x8edi\xec\xde\xb1" mapping = { token_id_encode("Q" + chr(65 + i)): int(i * 1e3) + 42 for i in range(17) } db = InMemoryDb() # starting from blank account b0 = TokenBalances(b"", db) b0._balances = mapping.copy() b0.commit() assert b0.serialize() == encoding # check internal states assert b0.token_trie is not None # starting from RLP encoding b1 = TokenBalances(encoding, db) assert b1.to_dict() == mapping assert b1.serialize() == encoding # check internal states assert b1._balances == {} assert b1.token_trie is not None assert not b1.is_blank() assert b1.balance(token_id_encode("QC")) == mapping[token_id_encode("QC")] # underlying balance map populated assert len(b1._balances) == 1 # serialize without commit should fail try: b1.serialize() pytest.fail() except AssertionError: pass # otherwise should succeed b1.commit() b1.serialize()
def test_blank_account(): b = TokenBalances(b"", InMemoryDb()) assert b._balances == {} assert b.serialize() == b"" assert b.is_blank()