def _get_fees(self, is_committed=False, with_proof=False):
     fees = {}
     proof = None
     try:
         if with_proof:
             proof, serz = self.state.generate_state_proof(
                 self.fees_state_key, serialize=True, get_value=True)
             if serz:
                 serz = rlp_decode(serz)[0]
             root_hash = self.state.committedHeadHash if is_committed else self.state.headHash
             encoded_root_hash = state_roots_serializer.serialize(
                 bytes(root_hash))
             multi_sig = self.bls_store.get(encoded_root_hash)
             if multi_sig:
                 encoded_proof = proof_nodes_serializer.serialize(proof)
                 proof = {
                     MULTI_SIGNATURE: multi_sig.as_dict(),
                     ROOT_HASH: encoded_root_hash,
                     PROOF_NODES: encoded_proof
                 }
             else:
                 proof = {}
         else:
             serz = self.state.get(self.fees_state_key,
                                   isCommitted=is_committed)
         if serz:
             fees = self.state_serializer.deserialize(serz)
     except KeyError:
         pass
     if with_proof:
         return fees, proof
     return fees
Пример #2
0
 def update_state(self, txn, prev_result, request, is_committed=False):
     self._validate_txn_type(txn)
     seq_no = get_seq_no(txn)
     txn_time = get_txn_time(txn)
     _, taa_list = self.state.generate_state_proof_for_keys_with_prefix(
         StaticTAAHelper.state_path_taa_digest(""),
         serialize=False,
         get_value=True)
     for encode_key, encode_data in taa_list.items():
         taa = rlp_decode(encode_data)
         taa, last_seq_no, last_update_time = self._decode_state_value(
             taa[0])
         digest = StaticTAAHelper.get_digest_from_state_key(encode_key)
         if TXN_AUTHOR_AGREEMENT_RETIREMENT_TS not in taa or taa.get(
                 TXN_AUTHOR_AGREEMENT_RETIREMENT_TS, 0) > txn_time:
             self._set_taa_to_state(
                 digest,
                 seq_no,
                 txn_time,
                 taa[TXN_AUTHOR_AGREEMENT_TEXT],
                 taa[TXN_AUTHOR_AGREEMENT_VERSION],
                 taa.get(TXN_AUTHOR_AGREEMENT_RATIFICATION_TS,
                         last_update_time),
                 retirement_ts=txn_time)
     self.state.remove(StaticTAAHelper.state_path_taa_latest())
Пример #3
0
    def get_result(self, request: Request):
        address = request.operation[ADDRESS]
        from_seqno = request.operation.get(FROM_SEQNO)
        encoded_root_hash = state_roots_serializer.serialize(
            bytes(self.state.committedHeadHash))
        proof, rv = self.state.generate_state_proof_for_keys_with_prefix(
            address, serialize=True, get_value=True)
        multi_sig = self.database_manager.bls_store.get(encoded_root_hash)
        if multi_sig:
            encoded_proof = proof_nodes_serializer.serialize(proof)
            proof = {
                MULTI_SIGNATURE: multi_sig.as_dict(),
                ROOT_HASH: encoded_root_hash,
                PROOF_NODES: encoded_proof
            }
        else:
            proof = {}

        # The outputs need to be returned in sorted order since each node's reply should be same.
        # Since no of outputs can be large, a concious choice to not use `operator.attrgetter` on an
        # already constructed list was made
        outputs = SortedItems()
        for k, v in rv.items():
            addr, seq_no = TokenStaticHelper.parse_state_key(k.decode())
            amount = rlp_decode(v)[0]
            if not amount:
                continue
            outputs.add(Output(addr, int(seq_no), int(amount)))

        utxos = outputs.sorted_list
        next_seqno = None
        if from_seqno:
            idx = next((idx for utxo, idx in zip(utxos, range(len(utxos)))
                        if utxo.seqNo >= from_seqno), None)
            if idx:
                utxos = utxos[idx:]
            else:
                utxos = []
        if len(utxos) > UTXO_LIMIT:
            next_seqno = utxos[UTXO_LIMIT].seqNo
            utxos = utxos[:UTXO_LIMIT]

        result = {
            f.IDENTIFIER.nm: request.identifier,
            f.REQ_ID.nm: request.reqId,
            OUTPUTS: utxos
        }

        result.update(request.operation)
        if next_seqno:
            result[NEXT_SEQNO] = next_seqno
        if proof:
            res_sub = deepcopy(result)
            res_sub[STATE_PROOF] = proof
            if len(json.dumps(res_sub)) <= self._msg_limit:
                result = res_sub
        return result
Пример #4
0
def test_proof_prefix_with_other_nodes():
    node_trie = Trie(PersistentDB(KeyValueStorageInMemory()))
    client_trie = Trie(PersistentDB(KeyValueStorageInMemory()))
    prefix = 'abcdefgh'

    other_nodes_count = 1000
    prefix_nodes_count = 100

    # Some nodes before prefix node
    for _ in range(other_nodes_count):
        node_trie.update(
            randomString(randint(8, 19)).encode(),
            rlp_encode([randomString(15)]))

    keys_suffices = set()
    while len(keys_suffices) != prefix_nodes_count:
        keys_suffices.add(randint(25, 250000))

    key_vals = {
        '{}{}'.format(prefix, k): str(randint(3000, 5000))
        for k in keys_suffices
    }
    for k, v in key_vals.items():
        node_trie.update(k.encode(), rlp_encode([v]))

    # Some nodes after prefix node
    for _ in range(other_nodes_count):
        node_trie.update(
            randomString(randint(8, 19)).encode(),
            rlp_encode([randomString(15)]))

    proof_nodes, val = node_trie.generate_state_proof_for_keys_with_prefix(
        prefix.encode(), get_value=True)
    encoded = {k.encode(): rlp_encode([v]) for k, v in key_vals.items()}
    # Check returned values match the actual values
    assert encoded == val
    assert client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded,
                                              proof_nodes)
    # Check without value
    proof_nodes = node_trie.generate_state_proof_for_keys_with_prefix(
        prefix.encode(), get_value=False)
    assert client_trie.verify_spv_proof_multi(node_trie.root_hash, encoded,
                                              proof_nodes)

    # Change value of one of any random key
    encoded_new = deepcopy(encoded)
    random_key = next(iter(encoded_new.keys()))
    encoded_new[random_key] = rlp_encode(
        [rlp_decode(encoded_new[random_key])[0] + b'2212'])
    assert not client_trie.verify_spv_proof_multi(node_trie.root_hash,
                                                  encoded_new, proof_nodes)
Пример #5
0
def test_proof_prefix_with_other_nodes():
    node_trie = Trie(PersistentDB(KeyValueStorageInMemory()))
    client_trie = Trie(PersistentDB(KeyValueStorageInMemory()))
    prefix = 'abcdefgh'

    other_nodes_count = 1000
    prefix_nodes_count = 100

    # Some nodes before prefix node
    for _ in range(other_nodes_count):
        node_trie.update(randomString(randint(8, 19)).encode(),
                         rlp_encode([randomString(15)]))

    keys_suffices = set()
    while len(keys_suffices) != prefix_nodes_count:
        keys_suffices.add(randint(25, 250000))

    key_vals = {'{}{}'.format(prefix, k): str(randint(3000, 5000))
                for k in keys_suffices}
    for k, v in key_vals.items():
        node_trie.update(k.encode(), rlp_encode([v]))

    # Some nodes after prefix node
    for _ in range(other_nodes_count):
        node_trie.update(randomString(randint(8, 19)).encode(),
                         rlp_encode([randomString(15)]))

    proof_nodes, val = node_trie.generate_state_proof_for_keys_with_prefix(prefix.encode(), get_value=True)
    encoded = {k.encode(): rlp_encode([v]) for k, v in key_vals.items()}
    # Check returned values match the actual values
    assert encoded == val
    assert client_trie.verify_spv_proof_multi(node_trie.root_hash,
                                              encoded, proof_nodes)
    # Check without value
    proof_nodes = node_trie.generate_state_proof_for_keys_with_prefix(
        prefix.encode(), get_value=False)
    assert client_trie.verify_spv_proof_multi(node_trie.root_hash,
                                              encoded, proof_nodes)

    # Change value of one of any random key
    encoded_new = deepcopy(encoded)
    random_key = next(iter(encoded_new.keys()))
    encoded_new[random_key] = rlp_encode([rlp_decode(encoded_new[random_key])[0] + b'2212'])
    assert not client_trie.verify_spv_proof_multi(node_trie.root_hash,
                                                  encoded_new, proof_nodes)
Пример #6
0
    def get_all_utxo(self, request: Request):
        address = request.operation[ADDRESS]
        encoded_root_hash = state_roots_serializer.serialize(
            bytes(self.state.committedHeadHash))
        proof, rv = self.state.generate_state_proof_for_keys_with_prefix(
            address, serialize=True, get_value=True)
        multi_sig = self.bls_store.get(encoded_root_hash)
        if multi_sig:
            encoded_proof = proof_nodes_serializer.serialize(proof)
            proof = {
                MULTI_SIGNATURE: multi_sig.as_dict(),
                ROOT_HASH: encoded_root_hash,
                PROOF_NODES: encoded_proof
            }
        else:
            proof = {}

        # The outputs need to be returned in sorted order since each node's reply should be same.
        # Since no of outputs can be large, a concious choice to not use `operator.attrgetter` on an
        # already constructed list was made
        outputs = SortedItems()
        for k, v in rv.items():
            addr, seq_no = self.parse_state_key(k.decode())
            amount = rlp_decode(v)[0]
            if not amount:
                continue
            outputs.add(Output(addr, int(seq_no), int(amount)))

        result = {
            f.IDENTIFIER.nm: request.identifier,
            f.REQ_ID.nm: request.reqId,
            OUTPUTS: outputs.sorted_list
        }
        if proof:
            result[STATE_PROOF] = proof

        result.update(request.operation)
        return result