Beispiel #1
0
def test_state_proof_for_get_fee(looper, helpers,
                                 nodeSetWithIntegratedTokenPlugin,
                                 sdk_pool_handle):
    fees_1 = {NYM_FEES_ALIAS: 1}
    fees_2 = {NYM_FEES_ALIAS: 2}
    node_set = [n.nodeIbStasher for n in nodeSetWithIntegratedTokenPlugin]

    helpers.general.do_set_fees(fees_1)
    response1 = helpers.general.do_get_fees()
    check_state_proof(response1, build_path_for_set_fees(),
                      JsonSerializer().serialize(fees_1))

    config_state = nodeSetWithIntegratedTokenPlugin[0].states[2]
    assert config_state.headHash == config_state.committedHeadHash

    # We delay commit messages to get different committed and uncommitted roots for ledger
    with delay_rules(node_set, cDelay()):
        helpers.general.set_fees_without_waiting(fees_2)
        looper.runFor(3)
        response2 = helpers.general.do_get_fees()
        # Returned state proof for first set_fees, which is committed
        check_state_proof(response2, build_path_for_set_fees(),
                          JsonSerializer().serialize(fees_1))
        # Let's check that uncommitted state differs from committed
        assert config_state.headHash != config_state.committedHeadHash
Beispiel #2
0
def test_transform_txn_for_catchup_rep(alh, db_manager,
                                       initial_domain_size, initial_pool_size, initial_config_size):
    do_apply_audit_txn(alh,
                       txns_count=10, ledger_id=DOMAIN_LEDGER_ID,
                       view_no=0, pp_sq_no=1, txn_time=10000,
                       has_audit_txn=True)

    audit_txn_after_serialization = \
        JsonSerializer.loads(
            JsonSerializer.dumps(
                alh.ledger.get_last_txn()
            )
        )

    transformed_audit_txn = alh.transform_txn_for_ledger(audit_txn_after_serialization)
    check_audit_txn(txn=transformed_audit_txn,
                    view_no=0, pp_seq_no=1,
                    seq_no=1, txn_time=10000,
                    ledger_id=DOMAIN_LEDGER_ID,
                    txn_root=db_manager.get_ledger(DOMAIN_LEDGER_ID).uncommitted_root_hash,
                    state_root=db_manager.get_state(DOMAIN_LEDGER_ID).headHash,
                    pool_size=initial_pool_size,
                    domain_size=initial_domain_size + 10,
                    config_size=initial_config_size,
                    last_pool_seqno=None,
                    last_domain_seqno=None,
                    last_config_seqno=None)
Beispiel #3
0
def migrate_all_hash_stores(node_data_directory):
    # the new hash store (merkle tree) will be recovered from the new transaction log after re-start
    # just delete the current hash store
    new_merkle_nodes = os.path.join(node_data_directory, '_merkleNodes')
    new_merkle_leaves = os.path.join(node_data_directory, '_merkleLeaves')
    new_merkle_nodes_bin = os.path.join(
        node_data_directory, '_merkleNodes.bin')
    new_merkle_leaves_bin = os.path.join(
        node_data_directory, '_merkleLeaves.bin')
    new_merkle_nodes_config_bin = os.path.join(
        node_data_directory, 'config_merkleNodes.bin')
    new_merkle_leaves_config_bin = os.path.join(
        node_data_directory, 'config_merkleLeaves.bin')

    if os.path.exists(new_merkle_nodes):
        shutil.rmtree(new_merkle_nodes)
    if os.path.exists(new_merkle_leaves):
        shutil.rmtree(new_merkle_leaves)
    if os.path.exists(new_merkle_nodes_bin):
        os.remove(new_merkle_nodes_bin)
    if os.path.exists(new_merkle_leaves_bin):
        os.remove(new_merkle_leaves_bin)
    if os.path.exists(new_merkle_nodes_config_bin):
        os.remove(new_merkle_nodes_config_bin)
    if os.path.exists(new_merkle_leaves_config_bin):
        os.remove(new_merkle_leaves_config_bin)

    # open new Ledgers
    fields = getTxnOrderedFields()
    __open_old_ledger(node_data_directory, config.poolTransactionsFile,
                      'pool', serializer=JsonSerializer())
    __open_old_ledger(node_data_directory, config.domainTransactionsFile,
                      'domain', serializer=CompactSerializer(fields=fields))
    __open_old_ledger(node_data_directory, config.configTransactionsFile,
                      'config', serializer=JsonSerializer())
Beispiel #4
0
def migrate_genesis_txn(base_dir):
    for suffix in ('sandbox', 'live', 'local'):
        old_domain_genesis = os.path.join(
            base_dir, 'transactions_{}'.format(suffix))
        old_pool_genesis = os.path.join(
            base_dir, 'pool_transactions_{}'.format(suffix))

        new_domain_genesis = os.path.join(
            base_dir, 'domain_transactions_{}_genesis'.format(suffix))
        new_pool_genesis = os.path.join(
            base_dir, 'pool_transactions_{}_genesis'.format(suffix))

        if os.path.exists(old_domain_genesis):
            os.remove(old_domain_genesis)
        if os.path.exists(old_pool_genesis):
            os.remove(old_pool_genesis)

        if os.path.exists(new_domain_genesis):
            old_ser = CompactSerializer(getTxnOrderedFields())
            new_ser = JsonSerializer()
            with open(new_domain_genesis, 'r') as f1:
                with open(old_domain_genesis, 'w') as f2:
                    for line in store_utils.cleanLines(f1):
                        txn = new_ser.deserialize(line)
                        txn = old_ser.serialize(txn)
                        f2.write(txn)
            os.remove(new_domain_genesis)
        if os.path.exists(new_pool_genesis):
            os.rename(new_pool_genesis, old_domain_genesis)
 def __init__(self,
              config_state: PruningState,
              utxo_cache: UTXOCache):
     super().__init__()
     self.config_state = config_state
     self.utxo_cache = utxo_cache
     self.state_serializer = JsonSerializer()
def migrate_genesis_txn(base_dir):
    for suffix in ('sandbox', 'live', 'local'):
        old_domain_genesis = os.path.join(
            base_dir, 'transactions_{}'.format(suffix))
        old_pool_genesis = os.path.join(
            base_dir, 'pool_transactions_{}'.format(suffix))

        new_domain_genesis = os.path.join(
            base_dir, 'domain_transactions_{}_genesis'.format(suffix))
        new_pool_genesis = os.path.join(
            base_dir, 'pool_transactions_{}_genesis'.format(suffix))

        if os.path.exists(old_domain_genesis):
            os.remove(old_domain_genesis)
        if os.path.exists(old_pool_genesis):
            os.remove(old_pool_genesis)

        if os.path.exists(new_domain_genesis):
            old_ser = CompactSerializer(getTxnOrderedFields())
            new_ser = JsonSerializer()
            with open(new_domain_genesis, 'r') as f1:
                with open(old_domain_genesis, 'w') as f2:
                    for line in store_utils.cleanLines(f1):
                        txn = new_ser.deserialize(line)
                        txn = old_ser.serialize(txn)
                        f2.write(txn)
            os.remove(new_domain_genesis)
        if os.path.exists(new_pool_genesis):
            os.rename(new_pool_genesis, old_domain_genesis)
Beispiel #7
0
def explorer():
    #args = read_args()
    config = getConfig()
    result = []
    ledger_data_dir = get_ledger_dir("", "")
    read_copy_ledger_data_dir = None
    try:
        # RocksDB supports real read-only mode and does not need to have a ledger copy.
        if config.hashStore['type'].lower() != HS_ROCKSDB:
            config.db_transactions_config = None
            # NOTE: such approach works well only for small ledgers.
            tmp = make_copy_of_ledger(ledger_data_dir)

            # Let's be paranoid to avoid removing of ledger instead of its copy.
            ledger_path = Path(ledger_data_dir)
            ledger_copy_path = Path(tmp)
            assert ledger_path != ledger_copy_path
            assert ledger_copy_path not in ledger_path.parents

            read_copy_ledger_data_dir = tmp
            ledger_data_dir = read_copy_ledger_data_dir
        elif config.db_transactions_config is not None:
            # This allows to avoid debug logs creation on each read_ledger run
            config.db_transactions_config['db_log_dir'] = '/dev/null'
        storage = get_storage("domain", ledger_data_dir)
    finally:
        if read_copy_ledger_data_dir:
            shutil.rmtree(read_copy_ledger_data_dir)
        for seqNo, txn in storage.iterator(start=0, end=100):
            txn = ledger_txn_serializer.deserialize(txn)
            serializer = JsonSerializer()
            x = serializer.serialize(txn, toBytes=False)
            result.append(x)
        return result
Beispiel #8
0
def check_audit_txn(txn,
                    view_no,
                    pp_seq_no,
                    seq_no,
                    txn_time,
                    txn_roots,
                    state_roots,
                    pool_size,
                    domain_size,
                    config_size,
                    last_domain_seqno,
                    last_pool_seqno,
                    last_config_seqno,
                    primaries,
                    digest='',
                    other_sizes={}):
    expectedLedgerRoots = {}
    txn_roots = {k: Ledger.hashToStr(v) for k, v in txn_roots.items()}
    state_roots = {k: Ledger.hashToStr(v) for k, v in state_roots.items()}
    # we expect deltas here, that is a difference from the current audit ledger txn to
    # the audit txn where the corresponding ledger was updated
    if last_domain_seqno:
        expectedLedgerRoots[1] = seq_no - last_domain_seqno
    if last_pool_seqno:
        expectedLedgerRoots[0] = seq_no - last_pool_seqno
    if last_config_seqno:
        expectedLedgerRoots[2] = seq_no - last_config_seqno
    expectedLedgerRoots.update(txn_roots)
    ledger_size = {0: pool_size, 1: domain_size, 2: config_size}
    ledger_size.update(other_sizes)

    expected = {
        "reqSignature": {},
        "txn": {
            "data": {
                "ledgerRoot": expectedLedgerRoots,
                "ver": "1",
                "viewNo": view_no,
                "ppSeqNo": pp_seq_no,
                "ledgerSize": ledger_size,
                "stateRoot": state_roots,
                "primaries": primaries,
                "digest": digest,
            },
            "metadata": {},
            "protocolVersion": CURRENT_PROTOCOL_VERSION,
            "type": "2",  # AUDIT
        },
        "txnMetadata": {
            "seqNo": seq_no,
            "txnTime": txn_time
        },
        "ver": "1"
    }
    txn = JsonSerializer().serialize(txn)
    expected = JsonSerializer().serialize(expected)
    print(txn)
    print(expected)
    assert expected == txn
def test_genesis_txn_file_initiator(tempdir, init_genesis_txn_file, genesis_txns):
    # Check that initiator of genesis txns work:
    # It uses a text file with JsonSerializer by default
    genesis_file = genesis_txn_file(
        os.path.join(tempdir, init_genesis_txn_file))
    assert os.path.exists(genesis_file)
    i = 0
    serializer = JsonSerializer()
    with open(genesis_file) as f:
        for line in store_utils.cleanLines(f.readlines()):
            assert sorted(serializer.deserialize(line).items()
                          ) == sorted(genesis_txns[i].items())
            i += 1
Beispiel #10
0
def test_genesis_txn_file_initiator(tempdir, init_genesis_txn_file, genesis_txns):
    # Check that initiator of genesis txns work:
    # It uses a text file with JsonSerializer by default
    genesis_file = genesis_txn_file(
        os.path.join(tempdir, init_genesis_txn_file))
    assert os.path.exists(genesis_file)
    i = 0
    serializer = JsonSerializer()
    with open(genesis_file) as f:
        for line in store_utils.cleanLines(f.readlines()):
            assert sorted(serializer.deserialize(line).items()
                          ) == sorted(genesis_txns[i].items())
            i += 1
def view_change_digest(msg: ViewChange) -> str:
    msg_as_dict = msg.__dict__
    msg_as_dict['checkpoints'] = [
        cp.__dict__ for cp in msg_as_dict['checkpoints']
    ]
    serialized = JsonSerializer().dumps(msg_as_dict)
    return sha256(serialized).hexdigest()
Beispiel #12
0
def txn_serializer(request):
    if request.param == 'MsgPack':
        return MsgPackSerializer()
    if request.param == 'Json':
        return JsonSerializer()
    if request.param == 'Compact':
        return CompactSerializer(orderedFields)
Beispiel #13
0
def migrate_all_ledgers_for_node(node_data_directory):
    # using default ledger names
    __migrate_ledger(node_data_directory,
                     config.poolTransactionsFile, config.poolTransactionsFile,
                     serializer=JsonSerializer())
    __migrate_ledger(
        node_data_directory,
        config.configTransactionsFile,
        config.configTransactionsFile,
        serializer=JsonSerializer())

    # domain ledger uses custom CompactSerializer and old file name
    fields = getTxnOrderedFields()
    __migrate_ledger(node_data_directory,
                     config.domainTransactionsFile.replace(
                         'domain_', ''), config.domainTransactionsFile,
                     serializer=CompactSerializer(fields=fields))
Beispiel #14
0
def check_audit_txn(txn, view_no, pp_seq_no, seq_no, txn_time, ledger_id,
                    txn_root, state_root, pool_size, domain_size, config_size,
                    last_domain_seqno, last_pool_seqno, last_config_seqno):
    expectedLedgerRoots = {}
    # we expect deltas here, that is a difference from the current audit ledger txn to
    # the audit txn where the corresponding ledger was updated
    if last_domain_seqno:
        expectedLedgerRoots[1] = seq_no - last_domain_seqno
    if last_pool_seqno:
        expectedLedgerRoots[0] = seq_no - last_pool_seqno
    if last_config_seqno:
        expectedLedgerRoots[2] = seq_no - last_config_seqno
    expectedLedgerRoots[ledger_id] = Ledger.hashToStr(txn_root)

    expected = {
        "reqSignature": {},
        "txn": {
            "data": {
                "ledgerRoot": expectedLedgerRoots,
                "ver": "1",
                "viewNo": view_no,
                "ppSeqNo": pp_seq_no,
                "ledgerSize": {
                    0: pool_size,
                    1: domain_size,
                    2: config_size
                },
                "stateRoot": {
                    ledger_id: Ledger.hashToStr(state_root),
                }
            },
            "metadata": {},
            "protocolVersion": CURRENT_PROTOCOL_VERSION,
            "type": "2",  # AUDIT
        },
        "txnMetadata": {
            "seqNo": seq_no,
            "txnTime": txn_time
        },
        "ver": "1"
    }
    txn = JsonSerializer().serialize(txn)
    expected = JsonSerializer().serialize(expected)
    print(txn)
    print(expected)
    assert expected == txn
Beispiel #15
0
def taa_value(result, text, version):
    return JsonSerializer().serialize({
        "val": {
            TXN_AUTHOR_AGREEMENT_TEXT: text,
            TXN_AUTHOR_AGREEMENT_VERSION: version
        },
        "lsn": result[TXN_METADATA_SEQ_NO],
        "lut": result[TXN_METADATA_TIME]
    })
Beispiel #16
0
def hash_serializer(request):
    if request.param == 'MsgPack':
        return MsgPackSerializer()
    if request.param == 'Json':
        return JsonSerializer()
    if request.param == 'Signing':
        return SigningSerializer()
    if request.param == 'Compact':
        return CompactSerializer(orderedFields)
Beispiel #17
0
def check_audit_txn(txn, view_no, pp_seq_no, seq_no, txn_time, ledger_id,
                    txn_root, state_root, pool_size, domain_size, config_size,
                    last_domain_seqno, last_pool_seqno, last_config_seqno):
    expectedLedgerRoots = {}
    if last_domain_seqno:
        expectedLedgerRoots["1"] = last_domain_seqno
    if last_pool_seqno:
        expectedLedgerRoots["0"] = last_pool_seqno
    if last_config_seqno:
        expectedLedgerRoots["2"] = last_config_seqno
    expectedLedgerRoots[str(ledger_id)] = Ledger.hashToStr(txn_root)

    expected = {
        "reqSignature": {},
        "txn": {
            "data": {
                "ledgerRoot": expectedLedgerRoots,
                "ver": "1",
                "viewNo": view_no,
                "ppSeqNo": pp_seq_no,
                "ledgerSize": {
                    "0": pool_size,
                    "1": domain_size,
                    "2": config_size
                },
                "stateRoot": {
                    str(ledger_id): Ledger.hashToStr(state_root),
                }
            },
            "metadata": {},
            "protocolVersion": CURRENT_PROTOCOL_VERSION,
            "type": "2",  # AUDIT
        },
        "txnMetadata": {
            "seqNo": seq_no,
            "txnTime": txn_time
        },
        "ver": "1"
    }
    txn = JsonSerializer().serialize(txn)
    expected = JsonSerializer().serialize(expected)
    print(txn)
    print(expected)
    assert expected == txn
Beispiel #18
0
def taa_aml_value(result, version, aml, context):
    return JsonSerializer().serialize({
        "val": {
            AML_VERSION: version,
            AML: aml,
            AML_CONTEXT: context
        },
        "lsn": result[TXN_METADATA_SEQ_NO],
        "lut": result[TXN_METADATA_TIME]
    })
Beispiel #19
0
def test_pool_genesis_txns(bootstrap, pool_genesis_file):
    serializer = JsonSerializer()
    with open(pool_genesis_file) as f:
        for line in store_utils.cleanLines(f.readlines()):
            txn = serializer.deserialize(line)
            assert get_seq_no(txn)
            assert get_txn_id(txn)
            assert get_payload_data(txn)
            assert get_type(txn) == NODE
            assert get_version(txn) == "1"
            assert get_payload_data(txn)[TARGET_NYM]
            data = get_payload_data(txn).get(DATA)
            assert data
            assert data[ALIAS]
            assert data[CLIENT_IP]
            assert data[CLIENT_PORT]
            assert data[NODE_IP]
            assert data[NODE_PORT]
            assert data[SERVICES] == [VALIDATOR]
            assert data[BLS_KEY]
def taa_value(result, text, version, digest, retired=False):
    value = {
        TXN_AUTHOR_AGREEMENT_TEXT: text,
        TXN_AUTHOR_AGREEMENT_VERSION: version,
        TXN_AUTHOR_AGREEMENT_DIGEST: digest
    }
    if retired:
        value[TXN_AUTHOR_AGREEMENT_RETIREMENT_TS] = retired
    return JsonSerializer().serialize({
        "val": value,
        "lsn": result[TXN_METADATA_SEQ_NO],
        "lut": result[TXN_METADATA_TIME]
    })
def test_pool_genesis_txns(bootstrap, pool_genesis_file):
    serializer = JsonSerializer()
    with open(pool_genesis_file) as f:
        for line in store_utils.cleanLines(f.readlines()):
            txn = serializer.deserialize(line)
            assert get_seq_no(txn)
            assert get_txn_id(txn)
            assert get_payload_data(txn)
            assert get_type(txn) == NODE
            assert get_version(txn) == "1"
            assert get_protocol_version(txn) is None
            assert get_payload_data(txn)[TARGET_NYM]
            data = get_payload_data(txn).get(DATA)
            assert data
            assert data[ALIAS]
            assert data[CLIENT_IP]
            assert data[CLIENT_PORT]
            assert data[NODE_IP]
            assert data[NODE_PORT]
            assert data[SERVICES] == [VALIDATOR]
            assert data[BLS_KEY]
            assert data[BLS_KEY_PROOF]
Beispiel #22
0
def test_domain_genesis_txns(bootstrap, domain_genesis_file):
    serializer = JsonSerializer()
    with open(domain_genesis_file) as f:
        i = 0
        for line in store_utils.cleanLines(f.readlines()):
            txn = serializer.deserialize(line)
            assert get_seq_no(txn)
            assert get_payload_data(txn)
            assert get_type(txn) == NYM
            assert get_version(txn) == "1"
            assert get_payload_data(txn)[VERKEY]
            assert get_payload_data(txn)[TARGET_NYM]
            assert ALIAS not in get_payload_data(txn)

            # expect Trustees, then Stewards, then Clients
            if 0 <= i < TRUSTEE_COUNT:
                expected_role = TRUSTEE
            elif TRUSTEE_COUNT <= i < TRUSTEE_COUNT + NODE_COUNT:
                expected_role = STEWARD
            else:
                expected_role = None
            assert get_payload_data(txn).get(ROLE) == expected_role
            i += 1
Beispiel #23
0
    def static_validation(self, request: Request):
        self._validate_request_type(request)
        try:
            content_as_dict = JsonSerializer.loads(
                request.operation[RS_CONTENT])
        except ValueError:
            raise InvalidClientRequest(
                request.identifier, request.reqId,
                "'{}' must be a JSON serialized string".format(RS_CONTENT))

        if self.is_json_ld_content():
            self.do_static_validation_json_ld(content_as_dict, request)

        self.do_static_validation_content(content_as_dict, request)
def test_domain_genesis_txns(bootstrap, domain_genesis_file):
    serializer = JsonSerializer()
    with open(domain_genesis_file) as f:
        i = 0
        for line in store_utils.cleanLines(f.readlines()):
            txn = serializer.deserialize(line)
            assert get_seq_no(txn)
            assert get_payload_data(txn)
            assert get_type(txn) == NYM
            assert get_version(txn) == "1"
            assert get_protocol_version(txn) is None
            assert get_payload_data(txn)[VERKEY]
            assert get_payload_data(txn)[TARGET_NYM]
            assert ALIAS not in get_payload_data(txn)

            # expect Trustees, then Stewards, then Clients
            if 0 <= i < TRUSTEE_COUNT:
                expected_role = TRUSTEE
            elif TRUSTEE_COUNT <= i < TRUSTEE_COUNT + NODE_COUNT:
                expected_role = STEWARD
            else:
                expected_role = None
            assert get_payload_data(txn).get(ROLE) == expected_role
            i += 1
Beispiel #25
0
    def get_result(self, request: Request):
        auction_id = request.operation.get(DATA).get("auction_id").encode()
        serialized_auction = self.state.get(auction_id, isCommitted=True)

        auction = JsonSerializer().deserialize(
            serialized_auction) if serialized_auction else None

        return {
            **request.operation,
            **{
                f.IDENTIFIER.nm: request.identifier,
                f.REQ_ID.nm: request.reqId,
                auction_id: auction
            }
        }
def taa_value(result, text, version, digest, retired=None, ratified=None):
    if ratified is None:
        ratified = result[TXN_METADATA_TIME]
    value = {
        TXN_AUTHOR_AGREEMENT_TEXT: text,
        TXN_AUTHOR_AGREEMENT_VERSION: version,
        TXN_AUTHOR_AGREEMENT_DIGEST: digest,
        TXN_AUTHOR_AGREEMENT_RATIFICATION_TS: ratified
    }
    if retired:
        value[TXN_AUTHOR_AGREEMENT_RETIREMENT_TS] = retired
    return JsonSerializer().serialize({
        "val": value,
        "lsn": result[TXN_METADATA_SEQ_NO],
        "lut": result[TXN_METADATA_TIME]
    })
Beispiel #27
0
def print_txns(storage, args):
    serializer = None
    if args.serializer == 'json':
        serializer = JsonSerializer()
    else:
        print("Unknown serializer for output: {}".format(args.serializer))
        exit()

    # --count
    count = args.count
    if count:
        print_count(storage)
        return

    # --seq_no
    seq_no = args.seq_no
    if seq_no:
        print_by_seq_no(storage, seq_no, serializer)
        return

    # print all (--from --to)
    print_all(storage, serializer)
    def do_dynamic_validation_content(self, request):
        # it has been checked on static validation step that the content is a valid JSON.
        # and it has schema and mapping fields
        content_as_dict = JsonSerializer.loads(request.operation[RS_CONTENT])
        schema_id = content_as_dict[RS_CRED_DEF_SCHEMA]
        mapping_id = content_as_dict[RS_CRED_DEF_MAPPING]

        # 1. check that the schema field points to an existing object on the ledger
        schema, _, _ = self.get_from_state(schema_id)
        if not schema:
            raise InvalidClientRequest(
                request.identifier, request.reqId,
                "Can not find a referenced '{}' with id={}; please make sure that it has been added to the ledger"
                .format(RS_CRED_DEF_SCHEMA, schema_id))

        # 2. check that the mapping field points to an existing object on the ledger
        mapping, _, _ = self.get_from_state(mapping_id)
        if not mapping:
            raise InvalidClientRequest(
                request.identifier, request.reqId,
                "Can not find a referenced '{}' with id={}; please make sure that it has been added to the ledger"
                .format(RS_CRED_DEF_MAPPING, mapping_id))

        # 3. check that the schema field points to an object of the Schema type
        if schema.get(RS_TYPE) != RS_SCHEMA_TYPE_VALUE:
            raise InvalidClientRequest(
                request.identifier, request.reqId,
                "'{}' field must reference a schema with {}={}".format(
                    RS_CRED_DEF_SCHEMA, RS_TYPE, RS_SCHEMA_TYPE_VALUE))

        # 4. check that the mapping fields points to an object of the Mapping type
        if mapping.get(RS_TYPE) != RS_MAPPING_TYPE_VALUE:
            raise InvalidClientRequest(
                request.identifier, request.reqId,
                "'{}' field must reference a mapping with {}={}".format(
                    RS_CRED_DEF_MAPPING, RS_TYPE, RS_MAPPING_TYPE_VALUE))
Beispiel #29
0
 def _get_msg_digest(self, msg: NewView):
     msg_dict = msg._asdict()
     return sha256(JsonSerializer().dumps(msg_dict)).hexdigest()
class FeesAuthorizer(AbstractAuthorizer):
    def __init__(self, config_state: PruningState, utxo_cache: UTXOCache):
        super().__init__()
        self.config_state = config_state
        self.utxo_cache = utxo_cache
        self.state_serializer = JsonSerializer()

    @staticmethod
    def has_fees(request) -> bool:
        return hasattr(request, FEES) and request.fees is not None

    @staticmethod
    def get_change_for_fees(request) -> list:
        return request.fees[1] if len(request.fees) >= 2 else []

    @staticmethod
    def calculate_fees_from_req(utxo_cache, request):
        if hasattr(request, FEES):
            inputs = request.fees[0]
            outputs = FeesAuthorizer.get_change_for_fees(request)
        else:
            inputs = request.operation[INPUTS]
            outputs = request.operation[OUTPUTS]
        try:
            sum_inputs = utxo_cache.sum_inputs(inputs, is_committed=False)
        except Exception as e:
            logger.error(
                "Unexpected exception while sum_inputs calculating: {}".format(
                    e))
            return 0

        sum_outputs = sum([a[AMOUNT] for a in outputs])
        return sum_inputs - sum_outputs

    def can_pay_fees(self, request, required_fees):
        try:
            self._can_pay_fees(request, required_fees)
        except (InvalidFundsError, UnauthorizedClientRequest, ExtraFundsError,
                InsufficientFundsError, InvalidClientMessageException) as e:
            return False, str(e)

        return True, ''

    def _can_pay_fees(self, request, required_fees):

        if request.operation[TXN_TYPE] == XFER_PUBLIC:
            # Fees in XFER_PUBLIC is part of operation[INPUTS]
            inputs = request.operation[INPUTS]
            outputs = request.operation[OUTPUTS]
            self._validate_fees_can_pay(request, inputs, outputs,
                                        required_fees)
        else:
            inputs = request.fees[0]
            outputs = self.get_change_for_fees(request)
            self._validate_fees_can_pay(request, inputs, outputs,
                                        required_fees)

    def _validate_fees_can_pay(self, request, inputs, outputs, required_fees):
        """
        Calculate and verify that inputs and outputs for fees can both be paid and change is properly specified

        This function ASSUMES that validation of the fees for the request has already been done.

        :param request:
        :param required_fees:
        :return:
        """

        try:
            sum_inputs = self.utxo_cache.sum_inputs(inputs, is_committed=False)
        except UTXOError as ex:
            raise InvalidFundsError(request.identifier, request.reqId,
                                    "{}".format(ex))
        except Exception as ex:
            error = 'Exception {} while processing inputs/outputs'.format(ex)
            raise UnauthorizedClientRequest(request.identifier, request.reqId,
                                            error)
        else:
            change_amount = sum([a[AMOUNT] for a in outputs])
            expected_amount = change_amount + required_fees
            TokenStaticHelper.validate_given_inputs_outputs(
                sum_inputs, change_amount, expected_amount, request,
                'fees: {}'.format(required_fees))

    def _get_fees_alias_from_constraint(self, auth_constaint: AuthConstraint):
        if auth_constaint.metadata:
            if FEES_FIELD_NAME in auth_constaint.metadata:
                return auth_constaint.metadata[FEES_FIELD_NAME]

    def _get_fees_from_state(self):
        key = build_path_for_set_fees()
        serz = self.config_state.get(key, isCommitted=False)
        if not serz:
            return {}
        return self.state_serializer.deserialize(serz)

    def authorize(self,
                  request,
                  auth_constraint: AuthConstraint,
                  auth_action: AbstractAuthAction = None):
        fees_alias = self._get_fees_alias_from_constraint(auth_constraint)
        fees = self._get_fees_from_state()
        fees_amount = fees.get(fees_alias, 0)
        is_fees_required = fees_amount > 0
        if request.txn_type != XFER_PUBLIC:
            if is_fees_required and not self.has_fees(request):
                logger.warning(
                    "Validation error: Fees are required for this txn type")
                return False, "Fees are required for this txn type"
            if not is_fees_required and self.has_fees(request) \
                    and self.calculate_fees_from_req(self.utxo_cache, request) > 0:
                logger.warning(
                    "Validation error: Fees are not required for this txn type"
                )
                return False, "Fees are not required for this txn type"
            if not is_fees_required and not self.has_fees(request):
                return True, ""
        return self.can_pay_fees(request, fees_amount)
Beispiel #31
0
class StaticFeesReqHandler(FeeReqHandler):
    write_types = FeeReqHandler.write_types.union({SET_FEES, FEE_TXN})
    query_types = FeeReqHandler.query_types.union({GET_FEES, GET_FEE})
    set_fees_validator_cls = SetFeesMsg
    get_fee_validator_cls = GetFeeMsg
    state_serializer = JsonSerializer()

    def __init__(self,
                 ledger,
                 state,
                 token_ledger,
                 token_state,
                 utxo_cache,
                 domain_state,
                 bls_store,
                 node,
                 write_req_validator,
                 ts_store=None):

        super().__init__(ledger,
                         state,
                         domain_state,
                         idrCache=node.idrCache,
                         upgrader=node.upgrader,
                         poolManager=node.poolManager,
                         poolCfg=node.poolCfg,
                         write_req_validator=node.write_req_validator,
                         bls_store=bls_store,
                         ts_store=ts_store)

        self.token_ledger = token_ledger
        self.token_state = token_state
        self.utxo_cache = utxo_cache
        self.domain_state = domain_state
        self.bls_store = bls_store
        self.write_req_validator = write_req_validator

        self._add_query_handler(GET_FEES, self.get_fees)
        self._add_query_handler(GET_FEE, self.get_fee)

        # Tracks count of transactions paying sovtokenfees while a batch is being
        # processed. Reset to zero once a batch is created (not committed)
        self.fee_txns_in_current_batch = 0
        # Tracks amount of deducted sovtokenfees for a transaction
        self.deducted_fees = {}
        self.token_tracker = LedgerUncommittedTracker(
            token_state.committedHeadHash, token_ledger.uncommitted_root_hash,
            token_ledger.size)

    @property
    def fees(self):
        return self._get_fees(is_committed=False)

    @staticmethod
    def get_ref_for_txn_fees(ledger_id, seq_no):
        return '{}:{}'.format(ledger_id, seq_no)

    def get_txn_fees(self, request) -> int:
        return self.fees.get(request.operation[TXN_TYPE], 0)

    # TODO: Fix this to match signature of `FeeReqHandler` and extract
    # the params from `kwargs`
    def deduct_fees(self, request, cons_time, ledger_id, seq_no, txn):
        txn_type = request.operation[TXN_TYPE]
        fees_key = "{}#{}".format(txn_type, seq_no)
        if txn_type != XFER_PUBLIC and FeesAuthorizer.has_fees(request):
            inputs, outputs, signatures = getattr(request, f.FEES.nm)
            # This is correct since FEES is changed from config ledger whose
            # transactions have no fees
            fees = FeesAuthorizer.calculate_fees_from_req(
                self.utxo_cache, request)
            sigs = {i[ADDRESS]: s for i, s in zip(inputs, signatures)}
            txn = {
                OPERATION: {
                    TXN_TYPE: FEE_TXN,
                    INPUTS: inputs,
                    OUTPUTS: outputs,
                    REF: self.get_ref_for_txn_fees(ledger_id, seq_no),
                    FEES: fees,
                },
                f.SIGS.nm: sigs,
                f.REQ_ID.nm: get_req_id(txn),
                f.PROTOCOL_VERSION.nm: 2,
            }
            txn = reqToTxn(txn)
            self.token_ledger.append_txns_metadata([txn], txn_time=cons_time)
            _, txns = self.token_ledger.appendTxns(
                [TokenReqHandler.transform_txn_for_ledger(txn)])
            self.updateState(txns)
            self.fee_txns_in_current_batch += 1
            self.deducted_fees[fees_key] = fees
            return txn

    def doStaticValidation(self, request: Request):
        operation = request.operation
        if operation[TXN_TYPE] in (SET_FEES, GET_FEES, GET_FEE):
            try:
                if operation[TXN_TYPE] == SET_FEES:
                    self.set_fees_validator_cls(**request.operation)
                elif operation[TXN_TYPE] == GET_FEE:
                    self.get_fee_validator_cls(**request.operation)
            except TypeError as exc:
                raise InvalidClientRequest(request.identifier, request.reqId,
                                           exc)
        else:
            super().doStaticValidation(request)

    def _fees_specific_validation(self, request: Request):
        operation = request.operation
        current_fees = self._get_fees()
        constraint = self.get_auth_constraint(operation)
        wrong_aliases = []
        self._validate_metadata(self.fees, constraint, wrong_aliases)
        if len(wrong_aliases) > 0:
            raise InvalidClientMessageException(
                request.identifier, request.reqId,
                "Fees alias(es) {} does not exist in current fees {}. "
                "Please add the alias(es) via SET_FEES transaction first.".
                format(", ".join(wrong_aliases), current_fees))

    def _validate_metadata(self, current_fees, constraint: AuthConstraint,
                           wrong_aliases):
        if constraint.constraint_id != ConstraintsEnum.ROLE_CONSTRAINT_ID:
            for constr in constraint.auth_constraints:
                self._validate_metadata(current_fees, constr, wrong_aliases)
        else:
            meta_alias = constraint.metadata.get(FEES_FIELD_NAME, None)
            if meta_alias and meta_alias not in current_fees:
                wrong_aliases.append(meta_alias)

    def validate(self, request: Request):
        operation = request.operation
        if operation[TXN_TYPE] == SET_FEES:
            return self.write_req_validator.validate(request, [
                AuthActionEdit(
                    txn_type=SET_FEES, field="*", old_value="*", new_value="*")
            ])
        else:
            super().validate(request)
        if operation[TXN_TYPE] == AUTH_RULE:
            # metadata validation
            self._fees_specific_validation(request)

    def updateState(self, txns, isCommitted=False):
        for txn in txns:
            self._update_state_with_single_txn(txn, is_committed=isCommitted)
        super().updateState(txns, isCommitted=isCommitted)

    def get_fees(self, request: Request):
        fees, proof = self._get_fees(is_committed=True, with_proof=True)
        result = {
            f.IDENTIFIER.nm: request.identifier,
            f.REQ_ID.nm: request.reqId,
            FEES: fees
        }
        if proof:
            result[STATE_PROOF] = proof
        result.update(request.operation)
        return result

    def get_fee(self, request: Request):
        alias = request.operation.get(ALIAS)
        fee, proof = self._get_fee(alias, is_committed=True, with_proof=True)
        result = {
            f.IDENTIFIER.nm: request.identifier,
            f.REQ_ID.nm: request.reqId,
            FEE: fee
        }
        if proof:
            result[STATE_PROOF] = proof
        result.update(request.operation)
        return result

    def post_batch_created(self, ledger_id, state_root):
        # it mean, that all tracker thins was done in onBatchCreated phase for TokenReqHandler
        self.token_tracker.apply_batch(self.token_state.headHash,
                                       self.token_ledger.uncommitted_root_hash,
                                       self.token_ledger.uncommitted_size)
        if ledger_id == TOKEN_LEDGER_ID:
            return
        if self.fee_txns_in_current_batch > 0:
            state_root = self.token_state.headHash
            TokenReqHandler.on_batch_created(self.utxo_cache, state_root)
            # ToDo: Needed investigation about affection of removing setting this var into 0
            self.fee_txns_in_current_batch = 0

    def post_batch_rejected(self, ledger_id):
        uncommitted_hash, uncommitted_txn_root, txn_count = self.token_tracker.reject_batch(
        )
        if ledger_id == TOKEN_LEDGER_ID:
            # TODO: Need to improve this logic for case, when we got a XFER txn with fees
            # All of other txn with fees it's a 2 steps, "apply txn" and "apply fees"
            # But for XFER txn with fees we do only "apply fees with transfer too"
            return
        if txn_count == 0 or self.token_ledger.uncommitted_root_hash == uncommitted_txn_root or \
                self.token_state.headHash == uncommitted_hash:
            return 0
        self.token_state.revertToHead(uncommitted_hash)
        self.token_ledger.discardTxns(txn_count)
        count_reverted = TokenReqHandler.on_batch_rejected(self.utxo_cache)
        logger.info("Reverted {} txns with fees".format(count_reverted))

    def post_batch_committed(self, ledger_id, pp_time, committed_txns,
                             state_root, txn_root):
        token_state_root, token_txn_root, _ = self.token_tracker.commit_batch()
        if ledger_id == TOKEN_LEDGER_ID:
            return
        committed_seq_nos_with_fees = [
            get_seq_no(t) for t in committed_txns
            if get_type(t) != XFER_PUBLIC and "{}#{}".format(
                get_type(t), get_seq_no(t)) in self.deducted_fees
        ]
        if len(committed_seq_nos_with_fees) > 0:
            r = TokenReqHandler.__commit__(
                self.utxo_cache, self.token_ledger, self.token_state,
                len(committed_seq_nos_with_fees), token_state_root,
                txn_root_serializer.serialize(token_txn_root), pp_time)
            i = 0
            for txn in committed_txns:
                if get_seq_no(txn) in committed_seq_nos_with_fees:
                    txn[FEES] = r[i]
                    i += 1
            self.fee_txns_in_current_batch = 0

    def _get_fees(self, is_committed=False, with_proof=False):
        result = self._get_fee_from_state(is_committed=is_committed,
                                          with_proof=with_proof)
        if with_proof:
            fees, proof = result
            return (fees, proof) if fees is not None else ({}, proof)
        else:
            return result if result is not None else {}

    def _get_fee(self, alias, is_committed=False, with_proof=False):
        return self._get_fee_from_state(fees_alias=alias,
                                        is_committed=is_committed,
                                        with_proof=with_proof)

    def _get_fee_from_state(self,
                            fees_alias=None,
                            is_committed=False,
                            with_proof=False):
        fees = None
        proof = None
        try:
            fees_key = build_path_for_set_fees(alias=fees_alias)
            if with_proof:
                proof, serz = self.state.generate_state_proof(fees_key,
                                                              serialize=True,
                                                              get_value=True)
                if serz:
                    serz = rlp_decode(serz)[0]
                root_hash = self.state.committedHeadHash if is_committed else self.state.headHash
                encoded_root_hash = state_roots_serializer.serialize(
                    bytes(root_hash))
                multi_sig = self.bls_store.get(encoded_root_hash)
                if multi_sig:
                    encoded_proof = proof_nodes_serializer.serialize(proof)
                    proof = {
                        MULTI_SIGNATURE: multi_sig.as_dict(),
                        ROOT_HASH: encoded_root_hash,
                        PROOF_NODES: encoded_proof
                    }
                else:
                    proof = {}
            else:
                serz = self.state.get(fees_key, isCommitted=is_committed)
            if serz:
                fees = self.state_serializer.deserialize(serz)
        except KeyError:
            pass
        if with_proof:
            return fees, proof
        return fees

    def _set_to_state(self, key, val):
        val = self.state_serializer.serialize(val)
        key = key.encode()
        self.state.set(key, val)

    def _update_state_with_single_txn(self, txn, is_committed=False):
        typ = get_type(txn)
        if typ == SET_FEES:
            payload = get_payload_data(txn)
            fees_from_req = payload.get(FEES)
            current_fees = self._get_fees()
            current_fees.update(fees_from_req)
            for fees_alias, fees_value in fees_from_req.items():
                self._set_to_state(build_path_for_set_fees(alias=fees_alias),
                                   fees_value)
            self._set_to_state(build_path_for_set_fees(), current_fees)

        elif typ == FEE_TXN:
            for utxo in txn[TXN_PAYLOAD][TXN_PAYLOAD_DATA][INPUTS]:
                TokenReqHandler.spend_input(state=self.token_state,
                                            utxo_cache=self.utxo_cache,
                                            address=utxo[ADDRESS],
                                            seq_no=utxo[SEQNO],
                                            is_committed=is_committed)
            seq_no = get_seq_no(txn)
            for output in txn[TXN_PAYLOAD][TXN_PAYLOAD_DATA][OUTPUTS]:
                TokenReqHandler.add_new_output(state=self.token_state,
                                               utxo_cache=self.utxo_cache,
                                               output=Output(
                                                   output[ADDRESS], seq_no,
                                                   output[AMOUNT]),
                                               is_committed=is_committed)

    @staticmethod
    def _handle_incorrect_funds(sum_inputs, sum_outputs, expected_amount,
                                required_fees, request):
        if sum_inputs < expected_amount:
            error = 'Insufficient funds, sum of inputs is {} ' \
                    'but required is {} (sum of outputs: {}, ' \
                    'fees: {})'.format(sum_inputs, expected_amount, sum_outputs, required_fees)
            raise InsufficientFundsError(request.identifier, request.reqId,
                                         error)
        if sum_inputs > expected_amount:
            error = 'Extra funds, sum of inputs is {} ' \
                    'but required is: {} -- sum of outputs: {} ' \
                    '-- fees: {})'.format(sum_inputs, expected_amount, sum_outputs, required_fees)
            raise ExtraFundsError(request.identifier, request.reqId, error)

    @staticmethod
    def transform_txn_for_ledger(txn):
        """
        Some transactions need to be updated before they can be stored in the
        ledger
        """
        return txn

    def postCatchupCompleteClbk(self):
        self.token_tracker.set_last_committed(
            self.token_state.committedHeadHash,
            self.token_ledger.uncommitted_root_hash, self.token_ledger.size)
Beispiel #32
0
from typing import Mapping

from common.serializers.base58_serializer import Base58Serializer
from common.serializers.base64_serializer import Base64Serializer
from common.serializers.json_serializer import JsonSerializer
from common.serializers.msgpack_serializer import MsgPackSerializer
from common.serializers.signing_serializer import SigningSerializer

signing_serializer = SigningSerializer()
ledger_txn_serializer = MsgPackSerializer()
ledger_hash_serializer = MsgPackSerializer()
domain_state_serializer = JsonSerializer()
pool_state_serializer = JsonSerializer()
client_req_rep_store_serializer = JsonSerializer()
multi_sig_store_serializer = JsonSerializer()
state_roots_serializer = Base58Serializer()
txn_root_serializer = Base58Serializer()
proof_nodes_serializer = Base64Serializer()
multi_signature_value_serializer = MsgPackSerializer()
invalid_index_serializer = JsonSerializer()


# TODO: separate data, metadata and signature, so that we don't need to have topLevelKeysToIgnore
def serialize_msg_for_signing(msg: Mapping, topLevelKeysToIgnore=None):
    """
    Serialize a message for signing.

    :param msg: the message to sign
    :param topLevelKeysToIgnore: the top level keys of the Mapping that should
    not be included in the serialized form
    :return: a uft-8 encoded version of `msg`
 def _updateStateWithSingleTxn(self, txn, isCommitted=False):
     # Dummy update so that state root is non empty
     data = get_payload_data(txn)
     for k, v in data.items():
         self.state.set(k.encode(), JsonSerializer.dumps(v))
def testJsonSerializer():
    sz = JsonSerializer()
    m1 = {'integer': 36, 'name': 'Foo', 'surname': 'Bar', 'float': 14.8639,
          'index': 1, 'index_start_at': 56, 'email': '*****@*****.**',
          'fullname': 'Foo Bar', 'bool': False}
    m1s = '{"bool":false,"email":"*****@*****.**","float":14.8639,"fullname":"Foo Bar",' \
          '"index":1,"index_start_at":56,"integer":36,"name":"Foo","surname":"Bar"}'

    m2 = {'latitude': 31.351883, 'longitude': -97.466179,
          'tags': ['foo', 'bar', 'baz', 'alice', 'bob',
                   'carol', 'dave']}
    m2s = '{"latitude":31.351883,"longitude":-97.466179,' \
          '"tags":["foo","bar","baz","alice","bob","carol","dave"]}'

    m3 = {'name': 'Alice Bob', 'website': 'example.com', 'friends': [
        {
          'id': 0,
          'name': 'Dave'
          },
        {
            'id': 1,
            'name': 'Carol'
        },
        {
            'id': 2,
            'name': 'Dave'
        }]}
    m3s = '{"friends":[{"id":0,"name":"Dave"},{"id":1,"name":"Carol"},' \
          '{"id":2,"name":"Dave"}],' \
          '"name":"Alice Bob","website":"example.com"}'

    assert sz.serialize(m1) == m1s.encode()
    assert sz.serialize(m1, toBytes=False) == m1s
    assert sz.serialize(m2) == m2s.encode()
    assert sz.serialize(m2, toBytes=False) == m2s
    assert sz.serialize(m3) == m3s.encode()
    assert sz.serialize(m3, toBytes=False) == m3s

    assert sz.deserialize(m1s) == m1
    assert sz.deserialize(m1s.encode()) == m1
    assert sz.deserialize(m2s) == m2
    assert sz.deserialize(m2s.encode()) == m2
    assert sz.deserialize(m3s) == m3
    assert sz.deserialize(m3s.encode()) == m3
Beispiel #35
0
 def _updateStateWithSingleTxn(self, txn, isCommitted=False):
     # Dummy update so that state root is non empty
     data = get_payload_data(txn)
     for k, v in data.items():
         self.state.set(k.encode(), JsonSerializer.dumps(v))
class StaticFeesReqHandler(FeeReqHandler):
    valid_txn_types = {SET_FEES, GET_FEES, FEE_TXN}
    write_types = {SET_FEES, FEE_TXN}
    query_types = {
        GET_FEES,
    }
    _fees_validator = FeesStructureField()
    MinSendersForFees = 3
    fees_state_key = b'fees'
    state_serializer = JsonSerializer()

    def __init__(self, ledger, state, token_ledger, token_state, utxo_cache,
                 domain_state, bls_store):
        super().__init__(ledger, state)
        self.token_ledger = token_ledger
        self.token_state = token_state
        self.utxo_cache = utxo_cache
        self.domain_state = domain_state
        self.bls_store = bls_store

        # In-memory map of sovtokenfees, changes on SET_FEES txns
        self.fees = self._get_fees(is_committed=True)

        self.query_handlers = {
            GET_FEES: self.get_fees,
        }

        # Tracks count of transactions paying sovtokenfees while a batch is being
        # processed. Reset to zero once a batch is created (not committed)
        self.fee_txns_in_current_batch = 0
        # Tracks amount of deducted sovtokenfees for a transaction
        self.deducted_fees = {}
        # Since inputs are spent in XFER. FIND A BETTER SOLUTION
        self.deducted_fees_xfer = {}
        # Tracks txn and state root for each batch with at least 1 transaction
        # paying sovtokenfees
        self.uncommitted_state_roots_for_batches = []

    @staticmethod
    def has_fees(request) -> bool:
        return hasattr(request, FEES) and request.fees is not None

    @staticmethod
    def get_change_for_fees(request) -> list:
        return request.fees[1] if len(request.fees) >= 2 else []

    @staticmethod
    def get_ref_for_txn_fees(ledger_id, seq_no):
        return '{}:{}'.format(ledger_id, seq_no)

    def get_txn_fees(self, request) -> int:
        return self.fees.get(request.operation[TXN_TYPE], 0)

    def can_pay_fees(self, request):
        required_fees = self.get_txn_fees(request)

        if request.operation[TXN_TYPE] == XFER_PUBLIC:
            # Fees in XFER_PUBLIC is part of operation[INPUTS]
            inputs = request.operation[INPUTS]
            outputs = request.operation[OUTPUTS]
            self._validate_fees_can_pay(request, inputs, outputs,
                                        required_fees)
            self.deducted_fees_xfer[request.key] = required_fees
        elif required_fees:
            if StaticFeesReqHandler.has_fees(request):
                inputs = request.fees[0]
                outputs = self.get_change_for_fees(request)
                self._validate_fees_can_pay(request, inputs, outputs,
                                            required_fees)
            else:
                raise InvalidClientMessageException(
                    getattr(request, f.IDENTIFIER.nm, None),
                    getattr(request, f.REQ_ID.nm, None),
                    'Fees are required for this txn type')
        else:
            if StaticFeesReqHandler.has_fees(request):
                raise InvalidClientMessageException(
                    getattr(request, f.IDENTIFIER.nm, None),
                    getattr(request, f.REQ_ID.nm, None),
                    'Fees are not allowed for this txn type')

    # TODO: Fix this to match signature of `FeeReqHandler` and extract
    # the params from `kwargs`
    def deduct_fees(self, request, cons_time, ledger_id, seq_no, txn):
        txn_type = request.operation[TXN_TYPE]
        fees_key = "{}#{}".format(txn_type, seq_no)
        if txn_type == XFER_PUBLIC:
            if request.key in self.deducted_fees_xfer:
                self.deducted_fees[fees_key] = self.deducted_fees_xfer.pop(
                    request.key)
        else:
            if self.has_fees(request):
                inputs, outputs, signatures = getattr(request, f.FEES.nm)
                # This is correct since FEES is changed from config ledger whose
                # transactions have no fees
                fees = self.get_txn_fees(request)
                sigs = {i[ADDRESS]: s for i, s in zip(inputs, signatures)}
                txn = {
                    OPERATION: {
                        TXN_TYPE: FEE_TXN,
                        INPUTS: inputs,
                        OUTPUTS: outputs,
                        REF: self.get_ref_for_txn_fees(ledger_id, seq_no),
                        FEES: fees,
                    },
                    f.SIGS.nm: sigs,
                    f.REQ_ID.nm: get_req_id(txn),
                    f.PROTOCOL_VERSION.nm: 2,
                }
                txn = reqToTxn(txn)
                self.token_ledger.append_txns_metadata([txn],
                                                       txn_time=cons_time)
                _, txns = self.token_ledger.appendTxns(
                    [TokenReqHandler.transform_txn_for_ledger(txn)])
                self.updateState(txns)
                self.fee_txns_in_current_batch += 1
                self.deducted_fees[fees_key] = fees
                return txn

    def doStaticValidation(self, request: Request):
        operation = request.operation
        if operation[TXN_TYPE] in (SET_FEES, GET_FEES):
            error = ''
            if operation[TXN_TYPE] == SET_FEES:
                error = self._fees_validator.validate(operation.get(FEES))
            if error:
                raise InvalidClientRequest(request.identifier, request.reqId,
                                           error)
        else:
            super().doStaticValidation(request)

    def validate(self, request: Request):
        operation = request.operation
        if operation[TXN_TYPE] == SET_FEES:
            validate_multi_sig_txn(request, TRUSTEE, self.domain_state,
                                   self.MinSendersForFees)
        else:
            super().validate(request)

    def get_query_response(self, request: Request):
        return self.query_handlers[request.operation[TXN_TYPE]](request)

    def updateState(self, txns, isCommitted=False):
        for txn in txns:
            self._update_state_with_single_txn(txn, is_committed=isCommitted)

    def get_fees(self, request: Request):
        fees, proof = self._get_fees(is_committed=True, with_proof=True)
        result = {
            f.IDENTIFIER.nm: request.identifier,
            f.REQ_ID.nm: request.reqId,
            FEES: fees
        }
        if proof:
            result[STATE_PROOF] = proof
        result.update(request.operation)
        return result

    def post_batch_created(self, ledger_id, state_root):
        if self.fee_txns_in_current_batch > 0:
            state_root = self.token_state.headHash
            txn_root = self.token_ledger.uncommittedRootHash
            self.uncommitted_state_roots_for_batches.append(
                (txn_root, state_root))
            TokenReqHandler.on_batch_created(self.utxo_cache, state_root)
            self.fee_txns_in_current_batch = 0

    def post_batch_rejected(self, ledger_id):
        if self.fee_txns_in_current_batch > 0:
            TokenReqHandler.on_batch_rejected(self.utxo_cache)
            self.fee_txns_in_current_batch = 0

    def post_batch_committed(self, ledger_id, pp_time, committed_txns,
                             state_root, txn_root):
        committed_seq_nos_with_fees = [
            get_seq_no(t) for t in committed_txns
            if "{}#{}".format(get_type(t), get_seq_no(t)) in self.deducted_fees
            and get_type(t) != XFER_PUBLIC
        ]
        if len(committed_seq_nos_with_fees) > 0:
            txn_root, state_root = self.uncommitted_state_roots_for_batches.pop(
                0)
            r = TokenReqHandler.__commit__(
                self.utxo_cache, self.token_ledger, self.token_state,
                len(committed_seq_nos_with_fees), state_root,
                txn_root_serializer.serialize(txn_root), pp_time)
            i = 0
            for txn in committed_txns:
                if get_seq_no(txn) in committed_seq_nos_with_fees:
                    txn[FEES] = r[i]
                    i += 1

    def _validate_fees_can_pay(self, request, inputs, outputs, required_fees):
        """
        Calculate and verify that inputs and outputs for fees can both be paid and change is properly specified

        This function ASSUMES that validation of the fees for the request has already been done.

        :param request:
        :param required_fees:
        :return:
        """

        try:
            sum_inputs = self.utxo_cache.sum_inputs(inputs, is_committed=False)
        except UTXOError as ex:
            raise InvalidFundsError(request.identifier, request.reqId,
                                    "{}".format(ex))
        except Exception as ex:
            error = 'Exception {} while processing inputs/outputs'.format(ex)
            raise UnauthorizedClientRequest(request.identifier, request.reqId,
                                            error)
        else:
            change_amount = sum([a[AMOUNT] for a in outputs])
            expected_amount = change_amount + required_fees
            TokenReqHandler.validate_given_inputs_outputs(
                sum_inputs, change_amount, expected_amount, request,
                'fees: {}'.format(required_fees))

    def _get_fees(self, is_committed=False, with_proof=False):
        fees = {}
        proof = None
        try:
            if with_proof:
                proof, serz = self.state.generate_state_proof(
                    self.fees_state_key, serialize=True, get_value=True)
                if serz:
                    serz = rlp_decode(serz)[0]
                root_hash = self.state.committedHeadHash if is_committed else self.state.headHash
                encoded_root_hash = state_roots_serializer.serialize(
                    bytes(root_hash))
                multi_sig = self.bls_store.get(encoded_root_hash)
                if multi_sig:
                    encoded_proof = proof_nodes_serializer.serialize(proof)
                    proof = {
                        MULTI_SIGNATURE: multi_sig.as_dict(),
                        ROOT_HASH: encoded_root_hash,
                        PROOF_NODES: encoded_proof
                    }
                else:
                    proof = {}
            else:
                serz = self.state.get(self.fees_state_key,
                                      isCommitted=is_committed)
            if serz:
                fees = self.state_serializer.deserialize(serz)
        except KeyError:
            pass
        if with_proof:
            return fees, proof
        return fees

    def _update_state_with_single_txn(self, txn, is_committed=False):
        typ = get_type(txn)
        if typ == SET_FEES:
            payload = get_payload_data(txn)
            existing_fees = self._get_fees(is_committed=is_committed)
            existing_fees.update(payload[FEES])
            val = self.state_serializer.serialize(existing_fees)
            self.state.set(self.fees_state_key, val)
            self.fees = existing_fees
        elif typ == FEE_TXN:
            for utxo in txn[TXN_PAYLOAD][TXN_PAYLOAD_DATA][INPUTS]:
                TokenReqHandler.spend_input(state=self.token_state,
                                            utxo_cache=self.utxo_cache,
                                            address=utxo[ADDRESS],
                                            seq_no=utxo[SEQNO],
                                            is_committed=is_committed)
            seq_no = get_seq_no(txn)
            for output in txn[TXN_PAYLOAD][TXN_PAYLOAD_DATA][OUTPUTS]:
                TokenReqHandler.add_new_output(state=self.token_state,
                                               utxo_cache=self.utxo_cache,
                                               output=Output(
                                                   output[ADDRESS], seq_no,
                                                   output[AMOUNT]),
                                               is_committed=is_committed)
        else:
            logger.warning('Unknown type {} found while updating '
                           'state with txn {}'.format(typ, txn))

    @staticmethod
    def _handle_incorrect_funds(sum_inputs, sum_outputs, expected_amount,
                                required_fees, request):
        if sum_inputs < expected_amount:
            error = 'Insufficient funds, sum of inputs is {} ' \
                    'but required is {} (sum of outputs: {}, ' \
                    'fees: {})'.format(sum_inputs, expected_amount, sum_outputs, required_fees)
            raise InsufficientFundsError(request.identifier, request.reqId,
                                         error)
        if sum_inputs > expected_amount:
            error = 'Extra funds, sum of inputs is {} ' \
                    'but required is: {} -- sum of outputs: {} ' \
                    '-- fees: {})'.format(sum_inputs, expected_amount, sum_outputs, required_fees)
            raise ExtraFundsError(request.identifier, request.reqId, error)

    @staticmethod
    def transform_txn_for_ledger(txn):
        """
        Some transactions need to be updated before they can be stored in the
        ledger
        """
        return txn