def test_proof_serialize_deserialize(): node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) keys = { k.encode(): [ rlp_encode([v]), ] for k, v in [('k1', 'v1'), ('k2', 'v2'), ('k35', 'v55'), ('k70', 'v99')] } for k, v in keys.items(): node_trie.update(k, v[0]) for k in keys: keys[k].append(node_trie.generate_state_proof(k, serialize=True)) for k in keys: prf = keys[k][1] assert isinstance(prf, bytes) assert client_trie.verify_spv_proof(node_trie.root_hash, k, keys[k][0], prf, serialized=True)
def test_proof_prefix_only_prefix_nodes(): node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) prefix = 'abcdefgh' keys_suffices = set() while len(keys_suffices) != 20: keys_suffices.add(randint(25, 25000)) key_vals = {'{}{}'.format(prefix, k): str(randint(3000, 5000)) for k in keys_suffices} for k, v in key_vals.items(): node_trie.update(k.encode(), rlp_encode([v])) proof_nodes, val = node_trie.generate_state_proof_for_keys_with_prefix( prefix.encode(), get_value=True) encoded = {k.encode(): rlp_encode([v]) for k, v in key_vals.items()} # Check returned values match the actual values assert encoded == val assert client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded, proof_nodes) # Check without value proof_nodes = node_trie.generate_state_proof_for_keys_with_prefix( prefix.encode(), get_value=False) assert client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded, proof_nodes)
def test_proof_prefix_only_prefix_nodes(): node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) prefix = 'abcdefgh' keys_suffices = set() while len(keys_suffices) != 20: keys_suffices.add(randint(25, 25000)) key_vals = { '{}{}'.format(prefix, k): str(randint(3000, 5000)) for k in keys_suffices } for k, v in key_vals.items(): node_trie.update(k.encode(), rlp_encode([v])) proof_nodes, val = node_trie.generate_state_proof_for_keys_with_prefix( prefix.encode(), get_value=True) encoded = {k.encode(): rlp_encode([v]) for k, v in key_vals.items()} # Check returned values match the actual values assert encoded == val assert client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded, proof_nodes) # Check without value proof_nodes = node_trie.generate_state_proof_for_keys_with_prefix( prefix.encode(), get_value=False) assert client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded, proof_nodes)
def test_proof_specific_root(): node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) kvs = OrderedDict({'k1': 'v1', 'k2': 'v2', 'x3': 'v3', 'x4': 'v5', 'x5': 'v7', 'y99': 'v6'}) size = len(kvs) # Only add some keys old_keys = set() i = 0 for k, v in kvs.items(): node_trie.update(k.encode(), rlp_encode([v])) old_keys.add(k) i += 1 if i >= size // 2: break # Record the root root_hash_0 = node_trie.root_hash root_node_0 = deepcopy(node_trie.root_node) # Add remaining keys new_keys = set() i = 0 for k, v in reversed(kvs.items()): node_trie.update(k.encode(), rlp_encode([v])) new_keys.add(k) i += 1 if i >= size // 2: break # Record new roots root_hash_1 = node_trie.root_hash root_node_1 = deepcopy(node_trie.root_node) # Check each root present for k, v in kvs.items(): assert node_trie.get(k.encode()) == rlp_encode([v]) # Old and new roots should be different assert root_hash_0 != root_hash_1 assert root_node_0 != root_node_1 # Generate and verify proof for both old (if key was present) and new roots for k, v in kvs.items(): k, v = k.encode(), rlp_encode([v]) if k in old_keys: old_root_proof = node_trie.generate_state_proof(k, root=root_node_0) assert client_trie.verify_spv_proof(root_hash_0, k, v, old_root_proof) new_root_proof = node_trie.generate_state_proof(k, root=root_node_1) assert client_trie.verify_spv_proof(root_hash_1, k, v, new_root_proof)
def test_get_proof_and_value(): # Non prefix nodes num_keys = 100 test_data = gen_test_data(num_keys) node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) for k, v in test_data.items(): node_trie.update(k, v) for k in test_data: proof, v = node_trie.produce_spv_proof(k, get_value=True) proof.append(deepcopy(node_trie.root_node)) assert v == test_data[k] assert client_trie.verify_spv_proof(node_trie.root_hash, k, v, proof)
def test_proof_serialize_deserialize(): node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) keys = {k.encode(): [rlp_encode([v]), ] for k, v in [('k1', 'v1'), ('k2', 'v2'), ('k35', 'v55'), ('k70', 'v99')]} for k, v in keys.items(): node_trie.update(k, v[0]) for k in keys: keys[k].append(node_trie.generate_state_proof(k, serialize=True)) for k in keys: prf = keys[k][1] assert isinstance(prf, bytes) assert client_trie.verify_spv_proof(node_trie.root_hash, k, keys[k][0], prf, serialized=True)
def test_verify_proof_random_data(): """ Add some key value pairs in trie. Generate and verify proof for them. :return: """ num_keys = 100 test_data = gen_test_data(num_keys) partitions = 4 partition_size = num_keys // partitions keys = [ list(list(test_data.keys())[i:i + partition_size]) for i in range(0, len(test_data), partition_size) ] node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) root_hashes = [] proofs = [] for i in range(0, partitions): for k in keys[i]: node_trie.update(k, test_data[k]) root_hashes.append(node_trie.root_hash) proofs.append({k: node_trie.generate_state_proof(k) for k in keys[i]}) assert all([ client_trie.verify_spv_proof(root_hashes[i], k, test_data[k], proofs[i][k]) for k in keys[i] ]) # Pick any keys from any partition and verify the already generated proof for _ in range(400): p = randint(0, partitions - 1) key = choice(keys[p]) assert client_trie.verify_spv_proof(root_hashes[p], key, test_data[key], proofs[p][key]) # Pick any key randomly, generate new proof corresponding to current root # and verify proof all_keys = [k for i in keys for k in i] root_hash = node_trie.root_hash for _ in range(400): key = choice(all_keys) proof = node_trie.generate_state_proof(key) assert client_trie.verify_spv_proof(root_hash, key, test_data[key], proof)
def test_proof_prefix_only_prefix_nodes(): node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) prefix = 'abcdefgh' keys_suffices = set() while len(keys_suffices) != 20: keys_suffices.add(randint(25, 25000)) key_vals = {'{}{}'.format(prefix, k): str(randint(3000, 5000)) for k in keys_suffices} for k, v in key_vals.items(): node_trie.update(k.encode(), rlp_encode([v])) proof_nodes = node_trie.generate_state_proof_for_key_prfx(prefix.encode()) assert client_trie.verify_spv_proof_multi(node_trie.root_hash, {k.encode(): rlp_encode([v]) for k, v in key_vals.items()}, proof_nodes)
def test_verify_proof_random_data(): """ Add some key value pairs in trie. Generate and verify proof for them. :return: """ num_keys = 100 test_data = gen_test_data(num_keys) partitions = 4 partition_size = num_keys // partitions keys = [list(list(test_data.keys())[i:i + partition_size]) for i in range(0, len(test_data), partition_size)] node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) root_hashes = [] proofs = [] for i in range(0, partitions): for k in keys[i]: node_trie.update(k, test_data[k]) root_hashes.append(node_trie.root_hash) proofs.append({k: node_trie.generate_state_proof(k) for k in keys[i]}) assert all([client_trie.verify_spv_proof(root_hashes[i], k, test_data[k], proofs[i][k]) for k in keys[i]]) # Pick any keys from any partition and verify the already generated proof for _ in range(400): p = randint(0, partitions - 1) key = choice(keys[p]) assert client_trie.verify_spv_proof(root_hashes[p], key, test_data[key], proofs[p][key]) # Pick any key randomly, generate new proof corresponding to current root # and verify proof all_keys = [k for i in keys for k in i] root_hash = node_trie.root_hash for _ in range(400): key = choice(all_keys) proof = node_trie.generate_state_proof(key) assert client_trie.verify_spv_proof(root_hash, key, test_data[key], proof)
def test_proof_specific_root(): node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) node_trie.update('k1'.encode(), rlp_encode(['v1'])) node_trie.update('k2'.encode(), rlp_encode(['v2'])) node_trie.update('x3'.encode(), rlp_encode(['v3'])) root_hash_0 = node_trie.root_hash root_node_0 = node_trie.root_node node_trie.update('x4'.encode(), rlp_encode(['v5'])) node_trie.update('y99'.encode(), rlp_encode(['v6'])) node_trie.update('x5'.encode(), rlp_encode(['v7'])) # root_hash_1 = node_trie.root_hash # root_node_1 = node_trie.root_node k, v = 'k1'.encode(), rlp_encode(['v1']) old_root_proof = node_trie.generate_state_proof(k, root=root_node_0) assert client_trie.verify_spv_proof(root_hash_0, k, v, old_root_proof)
def test_proof_multiple_prefix_nodes(): node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) prefix_1 = 'abcdefgh' prefix_2 = 'abcdefxy' # Prefix overlaps with previous prefix_3 = 'pqrstuvw' prefix_4 = 'mnoptuvw' # Suffix overlaps all_prefixes = (prefix_1, prefix_2, prefix_3, prefix_4) other_nodes_count = 1000 prefix_nodes_count = 100 # Some nodes before prefix nodes for _ in range(other_nodes_count): k, v = randomString(randint(8, 19)).encode(), rlp_encode( [randomString(15)]) node_trie.update(k, v) keys_suffices = set() while len(keys_suffices) != prefix_nodes_count: keys_suffices.add(randint(25, 250000)) key_vals = { '{}{}'.format(prefix, k): str(randint(3000, 5000)) for prefix in all_prefixes for k in keys_suffices } for k, v in key_vals.items(): node_trie.update(k.encode(), rlp_encode([v])) # Some nodes after prefix nodes for _ in range(other_nodes_count): node_trie.update( randomString(randint(8, 19)).encode(), rlp_encode([randomString(15)])) for prefix in all_prefixes: client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) proof_nodes, val = node_trie.generate_state_proof_for_keys_with_prefix( prefix.encode(), get_value=True) encoded = { k.encode(): rlp_encode([v]) for k, v in key_vals.items() if k.startswith(prefix) } # Check returned values match the actual values assert encoded == val assert client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded, proof_nodes) # Check without value proof_nodes = node_trie.generate_state_proof_for_keys_with_prefix( prefix.encode(), get_value=False) assert client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded, proof_nodes) # Verify keys with a different prefix encoded = { k.encode(): rlp_encode([v]) for k, v in key_vals.items() if not k.startswith(prefix) } assert not client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded, proof_nodes)
def test_get_prefix_nodes(): trie = Trie(PersistentDB(KeyValueStorageInMemory())) prefix = 'abcd' prefix_nibbles = bin_to_nibbles(to_string(prefix)) key1 = prefix + '1' key2 = prefix + '2' key3 = prefix + '3' trie.update(key1.encode(), rlp_encode(['v1'])) last_node = trie._get_last_node_for_prfx(trie.root_node, prefix_nibbles) # The last node should be a leaf since only 1 key assert trie._get_node_type(last_node) == NODE_TYPE_LEAF # The queried key is larger than prefix, results in blank node last_node_ = trie._get_last_node_for_prfx( trie.root_node, bin_to_nibbles(to_string(prefix + '5'))) assert last_node_ == BLANK_NODE trie.update(key2.encode(), rlp_encode(['v2'])) last_node = trie._get_last_node_for_prfx(trie.root_node, prefix_nibbles) # The last node should be an extension since more than 1 key assert trie._get_node_type(last_node) == NODE_TYPE_EXTENSION trie.update(key3.encode(), rlp_encode(['v3'])) last_node = trie._get_last_node_for_prfx(trie.root_node, prefix_nibbles) assert trie._get_node_type(last_node) == NODE_TYPE_EXTENSION last_node_key = without_terminator(unpack_to_nibbles(last_node[0])) # Key for the fetched prefix nodes (ignore last nibble) is same as prefix nibbles assert last_node_key[:-1] == prefix_nibbles # The extension node is correctly decoded. decoded_extension = trie._decode_to_node(last_node[1]) assert decoded_extension[1] == [b' ', rlp_encode(['v1'])] assert decoded_extension[2] == [b' ', rlp_encode(['v2'])] assert decoded_extension[3] == [b' ', rlp_encode(['v3'])]
def test_verify_proof(): node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) node_trie.update('k1'.encode(), rlp_encode(['v1'])) node_trie.update('k2'.encode(), rlp_encode(['v2'])) root_hash_0 = node_trie.root_hash p0 = node_trie.produce_spv_proof('k2'.encode()) p0.append(deepcopy(node_trie.root_node)) p00 = deepcopy(p0) assert client_trie.verify_spv_proof(root_hash_0, 'k2'.encode(), rlp_encode(['v2']), p0) assert p00 == p0 node_trie.update('k3'.encode(), rlp_encode(['v3'])) node_trie.update('k4'.encode(), rlp_encode(['v4'])) node_trie.update('x1'.encode(), rlp_encode(['y1'])) node_trie.update('x2'.encode(), rlp_encode(['y2'])) root_hash_1 = node_trie.root_hash # Generate 1 proof and then verify that proof p1 = node_trie.produce_spv_proof('k1'.encode()) p1.append(node_trie.root_node) assert client_trie.verify_spv_proof(root_hash_1, 'k1'.encode(), rlp_encode(['v1']), p1) p2 = node_trie.produce_spv_proof('x2'.encode()) p2.append(node_trie.root_node) assert client_trie.verify_spv_proof(root_hash_1, 'x2'.encode(), rlp_encode(['y2']), p2) # Generate more than 1 proof and then verify all proofs p3 = node_trie.produce_spv_proof('k3'.encode()) p3.append(node_trie.root_node) p4 = node_trie.produce_spv_proof('x1'.encode()) p4.append(node_trie.root_node) assert client_trie.verify_spv_proof(root_hash_1, 'k3'.encode(), rlp_encode(['v3']), p3) assert client_trie.verify_spv_proof(root_hash_1, 'x1'.encode(), rlp_encode(['y1']), p4) # Proof is correct but value is different assert not client_trie.verify_spv_proof(root_hash_1, 'x1'.encode(), rlp_encode(['y99']), p4) # Verify same proof again assert client_trie.verify_spv_proof(root_hash_1, 'k3'.encode(), rlp_encode(['v3']), p3) assert p00 == p0 assert client_trie.verify_spv_proof(root_hash_0, 'k2'.encode(), rlp_encode(['v2']), p0)
def test_verify_proof_generated_using_helper(): node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) node_trie.update('k1'.encode(), rlp_encode(['v1'])) node_trie.update('k2'.encode(), rlp_encode(['v2'])) root_hash_0 = node_trie.root_hash p0 = node_trie.generate_state_proof('k2'.encode()) assert client_trie.verify_spv_proof(root_hash_0, 'k2'.encode(), rlp_encode(['v2']), p0) node_trie.update('k3'.encode(), rlp_encode(['v3'])) node_trie.update('k4'.encode(), rlp_encode(['v4'])) node_trie.update('x1'.encode(), rlp_encode(['y1'])) node_trie.update('x2'.encode(), rlp_encode(['y2'])) root_hash_1 = node_trie.root_hash # Generate 1 proof and then verify that proof p1 = node_trie.generate_state_proof('k1'.encode()) assert client_trie.verify_spv_proof(root_hash_1, 'k1'.encode(), rlp_encode(['v1']), p1) p2 = node_trie.generate_state_proof('x2'.encode()) assert client_trie.verify_spv_proof(root_hash_1, 'x2'.encode(), rlp_encode(['y2']), p2) # Generate more than 1 proof and then verify all proofs p3 = node_trie.generate_state_proof('k3'.encode()) p4 = node_trie.generate_state_proof('x1'.encode()) assert client_trie.verify_spv_proof(root_hash_1, 'k3'.encode(), rlp_encode(['v3']), p3) assert client_trie.verify_spv_proof(root_hash_1, 'x1'.encode(), rlp_encode(['y1']), p4) # Proof is correct but value is different assert not client_trie.verify_spv_proof(root_hash_1, 'x1'.encode(), rlp_encode(['y99']), p4) # Verify same proof again assert client_trie.verify_spv_proof(root_hash_1, 'k3'.encode(), rlp_encode(['v3']), p3) assert client_trie.verify_spv_proof(root_hash_0, 'k2'.encode(), rlp_encode(['v2']), p0) # Proof generated using non-existent key fails verification p5 = node_trie.generate_state_proof('x909'.encode()) assert not client_trie.verify_spv_proof(root_hash_1, 'x909'.encode(), rlp_encode(['y909']), p5)
def test_proof_prefix_with_other_nodes(): node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) prefix = 'abcdefgh' other_nodes_count = 1000 prefix_nodes_count = 100 # Some nodes before prefix node for _ in range(other_nodes_count): node_trie.update( randomString(randint(8, 19)).encode(), rlp_encode([randomString(15)])) keys_suffices = set() while len(keys_suffices) != prefix_nodes_count: keys_suffices.add(randint(25, 250000)) key_vals = { '{}{}'.format(prefix, k): str(randint(3000, 5000)) for k in keys_suffices } for k, v in key_vals.items(): node_trie.update(k.encode(), rlp_encode([v])) # Some nodes after prefix node for _ in range(other_nodes_count): node_trie.update( randomString(randint(8, 19)).encode(), rlp_encode([randomString(15)])) proof_nodes, val = node_trie.generate_state_proof_for_keys_with_prefix( prefix.encode(), get_value=True) encoded = {k.encode(): rlp_encode([v]) for k, v in key_vals.items()} # Check returned values match the actual values assert encoded == val assert client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded, proof_nodes) # Check without value proof_nodes = node_trie.generate_state_proof_for_keys_with_prefix( prefix.encode(), get_value=False) assert client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded, proof_nodes) # Change value of one of any random key encoded_new = deepcopy(encoded) random_key = next(iter(encoded_new.keys())) encoded_new[random_key] = rlp_encode( [rlp_decode(encoded_new[random_key])[0] + b'2212']) assert not client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded_new, proof_nodes)
def test_proof_multiple_prefix_nodes(): node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) prefix_1 = 'abcdefgh' prefix_2 = 'abcdefxy' # Prefix overlaps with previous prefix_3 = 'pqrstuvw' prefix_4 = 'mnoptuvw' # Suffix overlaps all_prefixes = (prefix_1, prefix_2, prefix_3, prefix_4) other_nodes_count = 1000 prefix_nodes_count = 100 # Some nodes before prefix nodes for _ in range(other_nodes_count): k, v = randomString(randint(8, 19)).encode(), rlp_encode([randomString(15)]) node_trie.update(k, v) keys_suffices = set() while len(keys_suffices) != prefix_nodes_count: keys_suffices.add(randint(25, 250000)) key_vals = {'{}{}'.format(prefix, k): str(randint(3000, 5000)) for prefix in all_prefixes for k in keys_suffices} for k, v in key_vals.items(): node_trie.update(k.encode(), rlp_encode([v])) # Some nodes after prefix nodes for _ in range(other_nodes_count): node_trie.update(randomString(randint(8, 19)).encode(), rlp_encode([randomString(15)])) for prefix in all_prefixes: client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) proof_nodes, val = node_trie.generate_state_proof_for_keys_with_prefix( prefix.encode(), get_value=True) encoded = {k.encode(): rlp_encode([v]) for k, v in key_vals.items() if k.startswith(prefix)} # Check returned values match the actual values assert encoded == val assert client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded, proof_nodes) # Check without value proof_nodes = node_trie.generate_state_proof_for_keys_with_prefix( prefix.encode(), get_value=False) assert client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded, proof_nodes) # Verify keys with a different prefix encoded = {k.encode(): rlp_encode([v]) for k, v in key_vals.items() if not k.startswith(prefix)} assert not client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded, proof_nodes)
def test_proof_prefix_with_other_nodes(): node_trie = Trie(PersistentDB(KeyValueStorageInMemory())) client_trie = Trie(PersistentDB(KeyValueStorageInMemory())) prefix = 'abcdefgh' other_nodes_count = 1000 prefix_nodes_count = 100 # Some nodes before prefix node for _ in range(other_nodes_count): node_trie.update(randomString(randint(8, 19)).encode(), rlp_encode([randomString(15)])) keys_suffices = set() while len(keys_suffices) != prefix_nodes_count: keys_suffices.add(randint(25, 250000)) key_vals = {'{}{}'.format(prefix, k): str(randint(3000, 5000)) for k in keys_suffices} for k, v in key_vals.items(): node_trie.update(k.encode(), rlp_encode([v])) # Some nodes after prefix node for _ in range(other_nodes_count): node_trie.update(randomString(randint(8, 19)).encode(), rlp_encode([randomString(15)])) proof_nodes, val = node_trie.generate_state_proof_for_keys_with_prefix(prefix.encode(), get_value=True) encoded = {k.encode(): rlp_encode([v]) for k, v in key_vals.items()} # Check returned values match the actual values assert encoded == val assert client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded, proof_nodes) # Check without value proof_nodes = node_trie.generate_state_proof_for_keys_with_prefix( prefix.encode(), get_value=False) assert client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded, proof_nodes) # Change value of one of any random key encoded_new = deepcopy(encoded) random_key = next(iter(encoded_new.keys())) encoded_new[random_key] = rlp_encode([rlp_decode(encoded_new[random_key])[0] + b'2212']) assert not client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded_new, proof_nodes)
def test_get_prefix_nodes(): trie = Trie(PersistentDB(KeyValueStorageInMemory())) prefix = 'abcd' prefix_nibbles = bin_to_nibbles(prefix) key1 = prefix + '1' key2 = prefix + '2' key3 = prefix + '3' trie.update(key1.encode(), rlp_encode(['v1'])) seen_prefix = [] last_node = trie._get_last_node_for_prfx(trie.root_node, prefix_nibbles, seen_prfx=seen_prefix) # The last node should be a leaf since only 1 key assert trie._get_node_type(last_node) == NODE_TYPE_LEAF # Seen prefix matches the prefix exactly assert seen_prefix == [] # The queried key is larger than prefix, results in blank node last_node_ = trie._get_last_node_for_prfx(trie.root_node, bin_to_nibbles(prefix + '5'), []) assert last_node_ == BLANK_NODE seen_prefix = [] trie.update(key2.encode(), rlp_encode(['v2'])) last_node = trie._get_last_node_for_prfx(trie.root_node, prefix_nibbles, seen_prfx=seen_prefix) # The last node should be an extension since more than 1 key assert trie._get_node_type(last_node) == NODE_TYPE_EXTENSION assert seen_prefix == [] seen_prefix = [] trie.update(key3.encode(), rlp_encode(['v3'])) last_node = trie._get_last_node_for_prfx(trie.root_node, prefix_nibbles, seen_prfx=seen_prefix) assert trie._get_node_type(last_node) == NODE_TYPE_EXTENSION assert seen_prefix == [] last_node_key = without_terminator(unpack_to_nibbles(last_node[0])) # Key for the fetched prefix nodes (ignore last nibble) is same as prefix nibbles assert last_node_key[:-1] == prefix_nibbles # The extension node is correctly decoded. decoded_extension = trie._decode_to_node(last_node[1]) assert decoded_extension[1] == [b' ', rlp_encode(['v1'])] assert decoded_extension[2] == [b' ', rlp_encode(['v2'])] assert decoded_extension[3] == [b' ', rlp_encode(['v3'])] # Add keys with extended prefix extended_prefix = '1' key4 = prefix + extended_prefix + '85' trie.update(key4.encode(), rlp_encode(['v11'])) key5 = prefix + extended_prefix + '96' trie.update(key5.encode(), rlp_encode(['v12'])) seen_prefix = [] new_prefix_nibbs = bin_to_nibbles(prefix + extended_prefix) last_node = trie._get_last_node_for_prfx(trie.root_node, new_prefix_nibbs, seen_prfx=seen_prefix) assert trie._get_node_type(last_node) == NODE_TYPE_BRANCH assert new_prefix_nibbs == seen_prefix assert seen_prefix == bin_to_nibbles(prefix + '1') # traverse to the next node remaining_key4_nibbs = bin_to_nibbles(key4)[len(seen_prefix):] remaining_key5_nibbs = bin_to_nibbles(key5)[len(seen_prefix):] next_nibble = remaining_key4_nibbs[0] if remaining_key4_nibbs[ 0] > remaining_key5_nibbs[0] else remaining_key5_nibbs[0] next_node = trie._decode_to_node(last_node[next_nibble]) assert trie._get_node_type(next_node) == NODE_TYPE_BRANCH # The 8th index should lead to a node with key '5', key4 ended in '85' assert trie._get_node_type(next_node[8]) == NODE_TYPE_LEAF assert without_terminator(unpack_to_nibbles( next_node[8][0])) == bin_to_nibbles('5') # The 9th index should lead to a node with key '6', key5 ended in '96' assert trie._get_node_type(next_node[9]) == NODE_TYPE_LEAF assert without_terminator(unpack_to_nibbles( next_node[9][0])) == bin_to_nibbles('6') prefix_1 = prefix + 'efgh' prefix_1_nibbles = bin_to_nibbles(prefix_1) key1 = prefix_1 + '1' key2 = prefix_1 + '2' key3 = prefix_1 + '3' trie.update(key1.encode(), rlp_encode(['v1'])) trie.update(key2.encode(), rlp_encode(['v1'])) trie.update(key3.encode(), rlp_encode(['v1'])) seen_prefix = [] last_node = trie._get_last_node_for_prfx(trie.root_node, prefix_1_nibbles, seen_prfx=seen_prefix) assert trie._get_node_type(last_node) == NODE_TYPE_EXTENSION assert len(seen_prefix) > 0 assert starts_with(prefix_1_nibbles, seen_prefix)
class PruningState(State): """ This class is used to store the committed root hash of the trie in the db. The committed root hash is only updated once a batch gets written to the ledger. It might happen that a few batches are in 3 phase commit and the node crashes. Now when the node restarts, it restores the db from the committed root hash and all entries for uncommitted batches will be ignored """ # SOME KEY THAT DOES NOT COLLIDE WITH ANY STATE VARIABLE'S NAME rootHashKey = b'\x88\xc8\x88 \x9a\xa7\x89\x1b' def __init__(self, keyValueStorage: KeyValueStorage): self._kv = keyValueStorage if self.rootHashKey in self._kv: rootHash = bytes(self._kv.get(self.rootHashKey)) else: rootHash = BLANK_ROOT self._kv.put(self.rootHashKey, BLANK_ROOT) self._trie = Trie(PersistentDB(self._kv), rootHash) @property def head(self): # The current head of the state, if the state is a merkle tree then # head is the root return self._trie.root_node @property def committedHead(self): # The committed head of the state, if the state is a merkle tree then # head is the root return self._hash_to_node(self.committedHeadHash) def get_head_by_hash(self, root_hash): # return node of a merkle tree by given hash return self._hash_to_node(root_hash) def _hash_to_node(self, node_hash): if node_hash == BLANK_ROOT: return BLANK_NODE return self._trie._decode_to_node(node_hash) def set(self, key: bytes, value: bytes): self._trie.update(key, rlp_encode([value])) def get(self, key: bytes, isCommitted: bool = True) -> Optional[bytes]: if not isCommitted: val = self._trie.get(key) else: val = self._trie._get(self.committedHead, bin_to_nibbles(to_string(key))) if val: return self.get_decoded(val) def get_for_root_hash(self, root_hash, key: bytes) -> Optional[bytes]: root = self._hash_to_node(root_hash) val = self._trie._get(root, bin_to_nibbles(to_string(key))) if val: return self.get_decoded(val) def get_all_leaves_for_root_hash(self, root_hash): node = self._hash_to_node(root_hash) leaves = self._trie.to_dict(node) return leaves def remove(self, key: bytes): self._trie.delete(key) def commit(self, rootHash=None, rootNode=None): if rootNode: rootHash = self._trie._encode_node(rootNode) elif rootHash and isHex(rootHash): if isinstance(rootHash, str): rootHash = rootHash.encode() rootHash = unhexlify(rootHash) elif rootHash: rootHash = rootHash else: rootHash = self.headHash self._kv.put(self.rootHashKey, rootHash) def revertToHead(self, headHash=None): head = self._hash_to_node(headHash) self._trie.replace_root_hash(self._trie.root_node, head) # Proofs are always generated over committed state def generate_state_proof(self, key: bytes, root=None, serialize=False, get_value=False): return self._trie.generate_state_proof(key, root, serialize, get_value=get_value) def generate_state_proof_for_keys_with_prefix(self, key_prfx, root=None, serialize=False, get_value=False): return self._trie.generate_state_proof_for_keys_with_prefix( key_prfx, root, serialize, get_value=get_value) @staticmethod def verify_state_proof(root, key, value, proof_nodes, serialized=False): encoded_key, encoded_value = PruningState.encode_kv_for_verification( key, value) return Trie.verify_spv_proof(root, encoded_key, encoded_value, proof_nodes, serialized) @staticmethod def verify_state_proof_multi(root, key_values, proof_nodes, serialized=False): encoded_key_values = dict( PruningState.encode_kv_for_verification(k, v) for k, v in key_values.items()) return Trie.verify_spv_proof_multi(root, encoded_key_values, proof_nodes, serialized) @staticmethod def encode_kv_for_verification(key, value): encoded_key = key.encode() if isinstance(key, str) else key encoded_value = rlp_encode([value]) if value is not None else b'' return encoded_key, encoded_value @property def as_dict(self): d = self._trie.to_dict() return {k: self.get_decoded(v) for k, v in d.items()} @property def headHash(self): """ The hash of the current head of the state, if the state is a merkle tree then hash of the root :return: """ return self._trie.root_hash @property def committedHeadHash(self): return self._kv.get(self.rootHashKey) @property def closed(self): return not self._kv or self._kv.closed @property def isEmpty(self): return self._kv and self.committedHeadHash == BLANK_ROOT def close(self): if self._kv: self._kv.close() self._kv = None @staticmethod def get_decoded(encoded): return rlp_decode(encoded)[0]
class PruningState(State): # This class is used to store the # committed root hash of the trie in the db. # The committed root hash is only updated once a batch gets written to the # ledger. It might happen that a few batches are in 3 phase commit and the # node crashes. Now when the node restarts, it restores the db from the # committed root hash and all entries for uncommitted batches will be # ignored # some key that does not collide with any state variable's name rootHashKey = b'\x88\xc8\x88 \x9a\xa7\x89\x1b' def __init__(self, keyValueStorage: KeyValueStorage): self._kv = keyValueStorage if self.rootHashKey in self._kv: rootHash = bytes(self._kv.get(self.rootHashKey)) else: rootHash = BLANK_ROOT self._kv.put(self.rootHashKey, BLANK_ROOT) self._trie = Trie(PersistentDB(self._kv), rootHash) @property def head(self): # The current head of the state, if the state is a merkle tree then # head is the root return self._trie.root_node @property def committedHead(self): # The committed head of the state, if the state is a merkle tree then # head is the root if self.committedHeadHash == BLANK_ROOT: return BLANK_NODE else: return self._trie._decode_to_node(self.committedHeadHash) def set(self, key: bytes, value: bytes): self._trie.update(key, rlp_encode([value])) def get(self, key: bytes, isCommitted: bool = True): if not isCommitted: val = self._trie.get(key) else: val = self._trie._get(self.committedHead, bin_to_nibbles(to_string(key))) if val: return rlp_decode(val)[0] def remove(self, key: bytes): self._trie.delete(key) def commit(self, rootHash=None, rootNode=None): if rootNode: rootHash = self._trie._encode_node(rootNode) elif rootHash and isHex(rootHash): if isinstance(rootHash, str): rootHash = rootHash.encode() rootHash = unhexlify(rootHash) elif rootHash: rootHash = rootHash else: rootHash = self.headHash self._kv.put(self.rootHashKey, rootHash) def revertToHead(self, headHash=None): if headHash != BLANK_ROOT: head = self._trie._decode_to_node(headHash) else: head = BLANK_NODE self._trie.replace_root_hash(self._trie.root_node, head) @property def as_dict(self): d = self._trie.to_dict() return {k: rlp_decode(v)[0] for k, v in d.items()} @property def headHash(self): """ The hash of the current head of the state, if the state is a merkle tree then hash of the root :return: """ return self._trie.root_hash @property def committedHeadHash(self): return self._kv.get(self.rootHashKey) @property def isEmpty(self): return self.committedHeadHash == BLANK_ROOT def close(self): if self._kv: self._kv.close() self._kv = None
def test_get_values_at_roots_in_memory(): # Update key with different values but preserve root after each update # Check values of keys with different previous roots and check that they # are correct trie = Trie(PersistentDB(KeyValueStorageInMemory())) trie.update('k1'.encode(), rlp_encode(['v1'])) # print state.root_hash.encode('hex') # print state.root_node val = trie.get('k1') print(rlp_decode(val)) oldroot1 = trie.root_node old_root1_hash = trie.root_hash assert trie._decode_to_node(old_root1_hash) == oldroot1 trie.update('k1'.encode(), rlp_encode(['v1a'])) val = trie.get('k1') assert rlp_decode(val) == [b'v1a', ] # Already saved roots help in getting previous values oldval = trie.get_at(oldroot1, 'k1') assert rlp_decode(oldval) == [b'v1', ] oldroot1a = trie.root_node trie.update('k1'.encode(), rlp_encode([b'v1b'])) val = trie.get('k1') assert rlp_decode(val) == [b'v1b'] oldval = trie.get_at(oldroot1a, 'k1') assert rlp_decode(oldval) == [b'v1a', ] oldval = trie.get_at(oldroot1, 'k1') assert rlp_decode(oldval) == [b'v1', ] oldroot1b = trie.root_node trie.update('k1'.encode(), rlp_encode([b'v1c'])) val = trie.get('k1') assert rlp_decode(val) == [b'v1c', ] oldval = trie.get_at(oldroot1b, 'k1') assert rlp_decode(oldval) == [b'v1b', ] oldval = trie.get_at(oldroot1a, 'k1') assert rlp_decode(oldval) == [b'v1a', ] oldval = trie.get_at(oldroot1, 'k1') assert rlp_decode(oldval) == [b'v1', ] oldroot1c = trie.root_node trie.delete('k1'.encode()) assert trie.get('k1') == BLANK_NODE oldval = trie.get_at(oldroot1c, 'k1') assert rlp_decode(oldval) == [b'v1c', ] oldval = trie.get_at(oldroot1b, 'k1') assert rlp_decode(oldval) == [b'v1b', ] oldval = trie.get_at(oldroot1a, 'k1') assert rlp_decode(oldval) == [b'v1a', ] oldval = trie.get_at(oldroot1, 'k1') assert rlp_decode(oldval) == [b'v1', ] trie.root_node = oldroot1c val = trie.get('k1') assert rlp_decode(val) == [b'v1c', ] trie.root_node = oldroot1 val = trie.get('k1') assert rlp_decode(val) == [b'v1', ]
class PruningState(State): """ This class is used to store the committed root hash of the trie in the db. The committed root hash is only updated once a batch gets written to the ledger. It might happen that a few batches are in 3 phase commit and the node crashes. Now when the node restarts, it restores the db from the committed root hash and all entries for uncommitted batches will be ignored """ # SOME KEY THAT DOES NOT COLLIDE WITH ANY STATE VARIABLE'S NAME rootHashKey = b'\x88\xc8\x88 \x9a\xa7\x89\x1b' def __init__(self, keyValueStorage: KeyValueStorage): self._kv = keyValueStorage if self.rootHashKey in self._kv: rootHash = bytes(self._kv.get(self.rootHashKey)) else: rootHash = BLANK_ROOT self._kv.put(self.rootHashKey, BLANK_ROOT) self._trie = Trie( PersistentDB(self._kv), rootHash) @property def head(self): # The current head of the state, if the state is a merkle tree then # head is the root return self._trie.root_node @property def committedHead(self): # The committed head of the state, if the state is a merkle tree then # head is the root return self._hash_to_node(self.committedHeadHash) def get_head_by_hash(self, root_hash): # return node of a merkle tree by given hash return self._hash_to_node(root_hash) def _hash_to_node(self, node_hash): if node_hash == BLANK_ROOT: return BLANK_NODE return self._trie._decode_to_node(node_hash) def set(self, key: bytes, value: bytes): self._trie.update(key, rlp_encode([value])) def get(self, key: bytes, isCommitted: bool = True) -> Optional[bytes]: if not isCommitted: val = self._trie.get(key) else: val = self._trie._get(self.committedHead, bin_to_nibbles(to_string(key))) if val: return self.get_decoded(val) def get_for_root_hash(self, root_hash, key: bytes) -> Optional[bytes]: root = self._hash_to_node(root_hash) val = self._trie._get(root, bin_to_nibbles(to_string(key))) if val: return self.get_decoded(val) def get_all_leaves_for_root_hash(self, root_hash): node = self._hash_to_node(root_hash) leaves = self._trie.to_dict(node) return leaves def remove(self, key: bytes): self._trie.delete(key) def commit(self, rootHash=None, rootNode=None): if rootNode: rootHash = self._trie._encode_node(rootNode) elif rootHash and isHex(rootHash): if isinstance(rootHash, str): rootHash = rootHash.encode() rootHash = unhexlify(rootHash) elif rootHash: rootHash = rootHash else: rootHash = self.headHash self._kv.put(self.rootHashKey, rootHash) def revertToHead(self, headHash=None): head = self._hash_to_node(headHash) self._trie.replace_root_hash(self._trie.root_node, head) # Proofs are always generated over committed state def generate_state_proof(self, key: bytes, root=None, serialize=False, get_value=False): return self._trie.generate_state_proof(key, root, serialize, get_value=get_value) def generate_state_proof_for_keys_with_prefix(self, key_prfx, root=None, serialize=False, get_value=False): return self._trie.generate_state_proof_for_keys_with_prefix(key_prfx, root, serialize, get_value=get_value) @staticmethod def verify_state_proof(root, key, value, proof_nodes, serialized=False): encoded_key, encoded_value = PruningState.encode_kv_for_verification(key, value) return Trie.verify_spv_proof(root, encoded_key, encoded_value, proof_nodes, serialized) @staticmethod def verify_state_proof_multi(root, key_values, proof_nodes, serialized=False): encoded_key_values = dict(PruningState.encode_kv_for_verification(k, v) for k, v in key_values.items()) return Trie.verify_spv_proof_multi(root, encoded_key_values, proof_nodes, serialized) @staticmethod def encode_kv_for_verification(key, value): encoded_key = key.encode() if isinstance(key, str) else key encoded_value = rlp_encode([value]) if value is not None else b'' return encoded_key, encoded_value @property def as_dict(self): d = self._trie.to_dict() return {k: self.get_decoded(v) for k, v in d.items()} @property def headHash(self): """ The hash of the current head of the state, if the state is a merkle tree then hash of the root :return: """ return self._trie.root_hash @property def committedHeadHash(self): return self._kv.get(self.rootHashKey) @property def isEmpty(self): return self.committedHeadHash == BLANK_ROOT def close(self): if self._kv: self._kv.close() self._kv = None @staticmethod def get_decoded(encoded): return rlp_decode(encoded)[0]
def test_get_prefix_nodes(): trie = Trie(PersistentDB(KeyValueStorageInMemory())) prefix = 'abcd' prefix_nibbles = bin_to_nibbles(prefix) key1 = prefix + '1' key2 = prefix + '2' key3 = prefix + '3' trie.update(key1.encode(), rlp_encode(['v1'])) seen_prefix = [] last_node = trie._get_last_node_for_prfx(trie.root_node, prefix_nibbles, seen_prfx=seen_prefix) # The last node should be a leaf since only 1 key assert trie._get_node_type(last_node) == NODE_TYPE_LEAF # Seen prefix matches the prefix exactly assert seen_prefix == [] # The queried key is larger than prefix, results in blank node last_node_ = trie._get_last_node_for_prfx(trie.root_node, bin_to_nibbles(prefix + '5'), []) assert last_node_ == BLANK_NODE seen_prefix = [] trie.update(key2.encode(), rlp_encode(['v2'])) last_node = trie._get_last_node_for_prfx(trie.root_node, prefix_nibbles, seen_prfx=seen_prefix) # The last node should be an extension since more than 1 key assert trie._get_node_type(last_node) == NODE_TYPE_EXTENSION assert seen_prefix == [] seen_prefix = [] trie.update(key3.encode(), rlp_encode(['v3'])) last_node = trie._get_last_node_for_prfx(trie.root_node, prefix_nibbles, seen_prfx=seen_prefix) assert trie._get_node_type(last_node) == NODE_TYPE_EXTENSION assert seen_prefix == [] last_node_key = without_terminator(unpack_to_nibbles(last_node[0])) # Key for the fetched prefix nodes (ignore last nibble) is same as prefix nibbles assert last_node_key[:-1] == prefix_nibbles # The extension node is correctly decoded. decoded_extension = trie._decode_to_node(last_node[1]) assert decoded_extension[1] == [b' ', rlp_encode(['v1'])] assert decoded_extension[2] == [b' ', rlp_encode(['v2'])] assert decoded_extension[3] == [b' ', rlp_encode(['v3'])] # Add keys with extended prefix extended_prefix = '1' key4 = prefix + extended_prefix + '85' trie.update(key4.encode(), rlp_encode(['v11'])) key5 = prefix + extended_prefix + '96' trie.update(key5.encode(), rlp_encode(['v12'])) seen_prefix = [] new_prefix_nibbs = bin_to_nibbles(prefix + extended_prefix) last_node = trie._get_last_node_for_prfx(trie.root_node, new_prefix_nibbs, seen_prfx=seen_prefix) assert trie._get_node_type(last_node) == NODE_TYPE_BRANCH assert new_prefix_nibbs == seen_prefix assert seen_prefix == bin_to_nibbles(prefix + '1') # traverse to the next node remaining_key4_nibbs = bin_to_nibbles(key4)[len(seen_prefix):] remaining_key5_nibbs = bin_to_nibbles(key5)[len(seen_prefix):] next_nibble = remaining_key4_nibbs[0] if remaining_key4_nibbs[0] > remaining_key5_nibbs[0] else remaining_key5_nibbs[0] next_node = trie._decode_to_node(last_node[next_nibble]) assert trie._get_node_type(next_node) == NODE_TYPE_BRANCH # The 8th index should lead to a node with key '5', key4 ended in '85' assert trie._get_node_type(next_node[8]) == NODE_TYPE_LEAF assert without_terminator(unpack_to_nibbles(next_node[8][0])) == bin_to_nibbles('5') # The 9th index should lead to a node with key '6', key5 ended in '96' assert trie._get_node_type(next_node[9]) == NODE_TYPE_LEAF assert without_terminator(unpack_to_nibbles(next_node[9][0])) == bin_to_nibbles('6') prefix_1 = prefix + 'efgh' prefix_1_nibbles = bin_to_nibbles(prefix_1) key1 = prefix_1 + '1' key2 = prefix_1 + '2' key3 = prefix_1 + '3' trie.update(key1.encode(), rlp_encode(['v1'])) trie.update(key2.encode(), rlp_encode(['v1'])) trie.update(key3.encode(), rlp_encode(['v1'])) seen_prefix = [] last_node = trie._get_last_node_for_prfx(trie.root_node, prefix_1_nibbles, seen_prfx=seen_prefix) assert trie._get_node_type(last_node) == NODE_TYPE_EXTENSION assert len(seen_prefix) > 0 assert starts_with(prefix_1_nibbles, seen_prefix)