def test_encode_bytes(encoding, mapping): # starting from blank account b0 = TokenBalances(b"", InMemoryDb()) for k, v in mapping.items(): b0._balances[k] = v assert b0._balances == mapping assert b0.serialize() == encoding # starting from RLP encoding b1 = TokenBalances(encoding, InMemoryDb()) assert b1._balances == mapping assert b1.serialize() == encoding
def test_encoding_change_from_dict_to_trie(): db = InMemoryDb() b = TokenBalances(b"", db) # start with 16 entries - right below the threshold mapping = { token_id_encode("Q" + chr(65 + i)): int(i * 1e3) + 42 for i in range(16) } b._balances = mapping.copy() b.commit() assert b.serialize().startswith(b"\x00") assert b.token_trie is None # add one more entry and expect changes journal = [] new_token = token_id_encode("QKC") b.set_balance(journal, new_token, 123) assert b.balance(new_token) == 123 b.commit() assert b.token_trie is not None assert b.serialize().startswith(b"\x01") root1 = b.token_trie.root_hash copied_mapping = mapping.copy() copied_mapping[new_token] = 123 assert b.to_dict() == copied_mapping # clear all balances except QKC for k in mapping: b.set_balance(journal, k, 0) # still have those token keys in balance map assert b.balance(token_id_encode("QA")) == 0 assert b.to_dict() == {new_token: 123} # trie hash should change after serialization b.commit() serialized = b.serialize() root2 = b.token_trie.root_hash assert serialized == b"\x01" + root2 assert root1 != root2 # balance map truncated, entries with 0 value will be ignored assert b._balances == {} assert b.balance(token_id_encode("QB")) == 0 assert len(b._balances) == 0 assert b.to_dict() == {new_token: 123} assert not b.is_blank() # remove the last entry b.set_balance(journal, new_token, 0) assert b.to_dict() == {} b.commit() assert b.token_trie.root_hash == BLANK_ROOT assert b._balances == {}
def test_encoding_order_of_balance(): b0 = TokenBalances(b"", InMemoryDb()) b0._balances[0] = 100 b0._balances[1] = 100 b0._balances[2] = 100 b0._balances[3] = 100 b1 = TokenBalances(b"", InMemoryDb()) b1._balances[3] = 100 b1._balances[2] = 100 b1._balances[1] = 100 b1._balances[0] = 100 assert b0.serialize() == b1.serialize()
def test_encoding_singularity(): b0 = TokenBalances(b"", InMemoryDb()) b0.balances[0] = 100 b0.balances[1] = 100 b0.balances[2] = 100 b0.balances[3] = 100 b1 = TokenBalances(b"", InMemoryDb()) b1.balances[3] = 100 b1.balances[2] = 100 b1.balances[1] = 100 b1.balances[0] = 100 assert b0.serialize() == b1.serialize()
def test_encode_zero_balance(encoding, mapping): # starting from blank account b0 = TokenBalances(b"", InMemoryDb()) for k, v in mapping.items(): b0.balances[k] = v assert b0.balances == mapping assert b0.serialize() == encoding # starting from RLP encoding b1 = TokenBalances(encoding, InMemoryDb()) assert b1.balances == {k: v for k, v in mapping.items() if v != 0} if b1.balances: assert b1.serialize() == encoding else: assert b1.serialize() == b""
def test_encoding_in_trie(): encoding = b"\x01\x84\x8dBq\xe4N\xa4\x14f\xfe5Ua\xddC\xb1f\xc9'\xd2\xec\xa0\xa8\xdd\x90\x1a\x8edi\xec\xde\xb1" mapping = { token_id_encode("Q" + chr(65 + i)): int(i * 1e3) + 42 for i in range(17) } db = InMemoryDb() # starting from blank account b0 = TokenBalances(b"", db) b0._balances = mapping.copy() b0.commit() assert b0.serialize() == encoding # check internal states assert b0.token_trie is not None # starting from RLP encoding b1 = TokenBalances(encoding, db) assert b1.to_dict() == mapping assert b1.serialize() == encoding # check internal states assert b1._balances == {} assert b1.token_trie is not None assert not b1.is_blank() assert b1.balance(token_id_encode("QC")) == mapping[token_id_encode("QC")] # underlying balance map populated assert len(b1._balances) == 1 # serialize without commit should fail try: b1.serialize() pytest.fail() except AssertionError: pass # otherwise should succeed b1.commit() b1.serialize()
def test_blank_account(): b = TokenBalances(b"", InMemoryDb()) assert b._balances == {} assert b.serialize() == b"" assert b.is_blank()