def test_append_seq_no_when_adding(ledger, looper, sdk_wallet_client): txns = create_txns(looper, sdk_wallet_client) seq_no = ledger.seqNo for txn in txns: seq_no += 1 assert get_seq_no(txn) is None ledger.add(txn) assert get_seq_no(txn) == seq_no
def test_proof_in_write_reply(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_client): resp = sdk_send_random_and_check(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_client, 1) req = resp[0][0] result = resp[0][1]['result'] assert result assert get_type(result) == "buy" assert get_from(result) == req[f.IDENTIFIER.nm] assert get_req_id(result) == req[f.REQ_ID.nm] assert get_seq_no(result) assert get_txn_time(result) assert STATE_PROOF in result state_proof = result[STATE_PROOF] assert ROOT_HASH in state_proof assert MULTI_SIGNATURE in state_proof assert PROOF_NODES in state_proof multi_sig = state_proof[MULTI_SIGNATURE] assert MULTI_SIGNATURE_SIGNATURE in multi_sig assert MULTI_SIGNATURE_PARTICIPANTS in multi_sig assert MULTI_SIGNATURE_VALUE in multi_sig multi_sig_value = multi_sig[MULTI_SIGNATURE_VALUE] assert MULTI_SIGNATURE_VALUE_LEDGER_ID in multi_sig_value assert MULTI_SIGNATURE_VALUE_STATE_ROOT in multi_sig_value assert MULTI_SIGNATURE_VALUE_TXN_ROOT in multi_sig_value assert MULTI_SIGNATURE_VALUE_POOL_STATE_ROOT in multi_sig_value assert MULTI_SIGNATURE_VALUE_TIMESTAMP in multi_sig_value assert validate_multi_signature(state_proof, txnPoolNodeSet) assert validate_proof_for_write(result)
def test_dirty_read(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_client): """ Tests the case when read request comes before write request is not executed on some nodes """ slow_nodes = list(txnPoolNodeSet)[2:4] for node in slow_nodes: logger.debug("Making node {} slow".format(node)) make_node_slow(node) received_replies = sdk_send_random_and_check(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_client, 1) result = received_replies[0][1]["result"] seq_no = get_seq_no(result) _, did = sdk_wallet_client req = sdk_build_get_txn_request(looper, did, seq_no) request = sdk_sign_and_send_prepared_request(looper, sdk_wallet_client, sdk_pool_handle, req) received_replies = sdk_get_and_check_replies(looper, [request]) results = [str(get_payload_data(reply['result'][DATA])) for _, reply in received_replies] assert len(set(results)) == 1
def test_get_txn_response_as_expected(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_steward): seed = randomString(32) wh, _ = sdk_wallet_steward # filling nym request and getting steward did # if role == None, we are adding client nym_request, new_did = looper.loop.run_until_complete( prepare_nym_request(sdk_wallet_steward, seed, None, None)) # sending request using 'sdk_' functions request_couple = sdk_sign_and_send_prepared_request( looper, sdk_wallet_steward, sdk_pool_handle, nym_request) result1 = sdk_get_and_check_replies(looper, [request_couple])[0][1]['result'] seqNo = get_seq_no(result1) _, steward_did = sdk_wallet_steward request = sdk_build_get_txn_request(looper, steward_did, seqNo) request_couple = \ sdk_sign_and_send_prepared_request(looper, sdk_wallet_steward, sdk_pool_handle, request) result2 = sdk_get_and_check_replies(looper, [request_couple])[0][1]['result'] assert result1 == result2['data']
def add(self, txn): if get_seq_no(txn) is None: self._append_seq_no([txn], self.seqNo) merkle_info = super().add(txn) # seqNo is part of the transaction itself, so no need to duplicate it here merkle_info.pop(F.seqNo.name, None) return merkle_info
def _nymReply(self, result, preparedReq): target = get_reply_nym(result) idy = self._trustAnchored.get(target) if idy: idy.seqNo = get_seq_no(result) else: logger.warning( "Target {} not found in trust anchored".format(target))
def _attribReply(self, result, preparedReq): origin, dest, val = self._attrib_data_from_reply(result) for attrib in self.getAttributesForNym(dest): attrib_val = attrib.value if attrib.ledgerStore == LedgerStore.RAW: attrib_val = json.loads(attrib_val) if attrib.origin == origin and attrib_val == val: attrib.seqNo = get_seq_no(result)
def test_append_seq_no(ledger, looper, sdk_wallet_client): txns = create_txns(looper, sdk_wallet_client) seq_no = 10 txns = ledger._append_seq_no(txns, seq_no) for txn in txns: seq_no += 1 assert get_seq_no(txn) == seq_no
def test_claim_def_dynamic_validation_without_ref_to_not_schema(claim_def_request, schema_request, claim_def_handler: ClaimDefHandler, creator): nym = reqToTxn(Request(identifier=creator, operation={'type': NYM})) claim_def_request.operation[REF] = get_seq_no(nym) claim_def_handler.ledger.appendTxns([nym]) with pytest.raises(InvalidClientRequest) as e: claim_def_handler.dynamic_validation(claim_def_request) assert "Mentioned seqNo ({}) isn't seqNo of the schema.".format(claim_def_request.operation[REF]) \ in e._excinfo[1].args[0]
def domain_ledger(): ledger = FakeSomething() ledger.txn_list = {} ledger.getBySeqNo = lambda seq_no: ledger.txn_list[seq_no] ledger.appendTxns = lambda txns: ledger.txn_list.update( {get_seq_no(txn): txn for txn in txns}) ledger.get_by_seq_no_uncommitted = lambda seq_no: ledger.txn_list[seq_no] return ledger
def check_fee_request_on_ledger(helpers, client_address, nym_result): transactions = helpers.node.get_last_ledger_transaction_on_nodes(TOKEN_LEDGER_ID) for fee_txn in transactions: fee_data = get_payload_data(fee_txn) assert fee_data[OUTPUTS] == [{ ADDRESS: client_address, AMOUNT: MINT_TOKEN_AMOUNT - TXN_FEES[NYM] }] assert fee_data[FEES] == TXN_FEES[NYM] assert get_seq_no(fee_txn) == 2 nym_seq_no = get_seq_no(nym_result) helpers.node.assert_deducted_fees(NYM, nym_seq_no, TXN_FEES[NYM]) formatted_txn = demo_logger.format_json(transactions[0]) demo_logger.log_header(step7_info) demo_logger.log_blue("Fee transaction found on Payment ledger:") demo_logger.log_yellow(formatted_txn)
def update_state(self, txn, prev_result, request, is_committed=False): self._validate_txn_type(txn) payload = get_payload_data(txn) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) serialized_data = encode_state_value(payload, seq_no, txn_time, serializer=config_state_serializer) version = payload[AML_VERSION] self.state.set(StaticTAAHelper.state_path_taa_aml_latest(), serialized_data) self.state.set(StaticTAAHelper.state_path_taa_aml_version(version), serialized_data)
def post_batch_committed(self, ledger_id, pp_time, committed_txns, state_root, txn_root): committed_seq_nos_with_fees = [get_seq_no(t) for t in committed_txns if "{}#{}".format(get_type(t), get_seq_no(t)) in self.deducted_fees and get_type(t) != XFER_PUBLIC ] if len(committed_seq_nos_with_fees) > 0: txn_root, state_root = self.uncommitted_state_roots_for_batches.pop(0) r = TokenReqHandler.__commit__(self.utxo_cache, self.token_ledger, self.token_state, len(committed_seq_nos_with_fees), state_root, txn_root_serializer.serialize(txn_root), pp_time) i = 0 for txn in committed_txns: if get_seq_no(txn) in committed_seq_nos_with_fees: txn[FEES] = r[i] i += 1
def _fill_primaries(self, txn, three_pc_batch, last_audit_txn): last_audit_txn_data = get_payload_data( last_audit_txn) if last_audit_txn is not None else None last_txn_value = last_audit_txn_data[ AUDIT_TXN_PRIMARIES] if last_audit_txn_data else None current_primaries = three_pc_batch.primaries # 1. First audit txn if last_audit_txn_data is None: txn[AUDIT_TXN_PRIMARIES] = current_primaries # 2. Previous primaries field contains primary list # If primaries did not changed, we will store seq_no delta # between current txn and last persisted primaries, i.e. # we can find seq_no of last actual primaries, like: # last_audit_txn_seq_no - last_audit_txn[AUDIT_TXN_PRIMARIES] elif isinstance(last_txn_value, Iterable): if last_txn_value == current_primaries: txn[AUDIT_TXN_PRIMARIES] = 1 else: txn[AUDIT_TXN_PRIMARIES] = current_primaries # 3. Previous primaries field is delta elif isinstance(last_txn_value, int) and last_txn_value < self.ledger.uncommitted_size: last_primaries_seq_no = get_seq_no(last_audit_txn) - last_txn_value last_primaries = get_payload_data( self.ledger.get_by_seq_no_uncommitted( last_primaries_seq_no))[AUDIT_TXN_PRIMARIES] if isinstance(last_primaries, Iterable): if last_primaries == current_primaries: txn[AUDIT_TXN_PRIMARIES] = last_txn_value + 1 else: txn[AUDIT_TXN_PRIMARIES] = current_primaries else: raise LogicError( 'Value, mentioned in primaries field must be a ' 'seq_no of a txn with primaries') # 4. That cannot be else: raise LogicError( 'Incorrect primaries field in audit ledger (seq_no: {}. value: {})' .format(get_seq_no(last_audit_txn), last_txn_value))
def test_multiple_inputs_outputs_without_change( helpers, addresses, initial_mint ): [address1, address2, address3, address4, address5] = addresses mint_seq_no = get_seq_no(initial_mint) inputs = [ {"address": address1, "seqNo": mint_seq_no}, {"address": address2, "seqNo": mint_seq_no}, {"address": address3, "seqNo": mint_seq_no}, ] outputs = [ {"address": address4, "amount": 200}, {"address": address5, "amount": 100}, ] request = helpers.request.transfer(inputs, outputs) response = helpers.sdk.send_and_check_request_objects([request]) assert response[0][1]["result"]["reqSignature"] != {} result = helpers.sdk.get_first_result(response) xfer_seq_no = get_seq_no(result) [ address1_utxos, address2_utxos, address3_utxos, address4_utxos, address5_utxos ] = helpers.general.get_utxo_addresses(addresses) assert address1_utxos == [] assert address2_utxos == [] assert address3_utxos == [] assert address4_utxos == [ {"address": address4, "seqNo": mint_seq_no, "amount": 100}, {"address": address4, "seqNo": xfer_seq_no, "amount": 200}, ] assert address5_utxos == [ {"address": address5, "seqNo": mint_seq_no, "amount": 100}, {"address": address5, "seqNo": xfer_seq_no, "amount": 100}, ]
def test_revert_for_all_after_view_change(looper, helpers, nodeSetWithIntegratedTokenPlugin, sdk_pool_handle, fees_set, mint_tokens, addresses, fees): node_set = nodeSetWithIntegratedTokenPlugin current_amount = get_amount_from_token_txn(mint_tokens) seq_no = get_seq_no(mint_tokens) reverted_node = nodeSetWithIntegratedTokenPlugin[-1] current_amount, seq_no, _ = send_and_check_nym_with_fees( helpers, fees_set, seq_no, looper, addresses, current_amount) current_amount, seq_no, _ = send_and_check_transfer( helpers, addresses, fees, looper, current_amount, seq_no) ensure_all_nodes_have_same_data(looper, node_set) with delay_rules([n.nodeIbStasher for n in node_set], cDelay(), pDelay()): len_batches_before = len(reverted_node.master_replica.batches) current_amount, seq_no, resp1 = send_and_check_transfer( helpers, addresses, fees, looper, current_amount, seq_no, check_reply=False) current_amount, seq_no, resp2 = send_and_check_nym_with_fees( helpers, fees_set, seq_no, looper, addresses, current_amount, check_reply=False) looper.runFor( waits.expectedPrePrepareTime( len(nodeSetWithIntegratedTokenPlugin))) len_batches_after = len(reverted_node.master_replica.batches) """ Checks, that we have a 2 new batches """ assert len_batches_after - len_batches_before == 2 for n in node_set: n.view_changer.on_master_degradation() ensure_view_change_complete(looper, nodeSetWithIntegratedTokenPlugin) looper.run( eventually( lambda: assertExp(reverted_node.mode == Mode.participating))) ensure_all_nodes_have_same_data(looper, node_set) sdk_get_and_check_replies(looper, resp1) sdk_get_and_check_replies(looper, resp2) send_and_check_nym_with_fees(helpers, fees_set, seq_no, looper, addresses, current_amount) ensure_all_nodes_have_same_data(looper, node_set)
def updateNym(self, nym, txn, isCommitted=True): updatedData = super().updateNym(nym, txn, isCommitted=isCommitted) txn_time = get_txn_time(txn) self.idrCache.set(nym, seqNo=get_seq_no(txn), txnTime=txn_time, ta=updatedData.get(f.IDENTIFIER.nm), role=updatedData.get(ROLE), verkey=updatedData.get(VERKEY), isCommitted=isCommitted)
def update_state(self, txn, prev_result, request, is_committed=False): self._validate_txn_type(txn) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) ledgers_ids = get_payload_data(txn)[LEDGERS_IDS] frozen_ledgers = self.make_frozen_ledgers_list(ledgers_ids) self.state.set( StaticLedgersFreezeHelper.make_state_path_for_frozen_ledgers(), encode_state_value(frozen_ledgers, seq_no, txn_time)) return txn
def updateNym(self, nym, txn, isCommitted=True): updatedData = super().updateNym(nym, txn, isCommitted=isCommitted) txn_time = get_txn_time(txn) self.idrCache.set(nym, seqNo=get_seq_no(txn), txnTime=txn_time, ta=updatedData.get(f.IDENTIFIER.nm), role=updatedData.get(ROLE), verkey=updatedData.get(VERKEY), isCommitted=isCommitted)
def _get_last_audited_primaries(self): audit = self.node.getLedger(AUDIT_LEDGER_ID) last_txn = audit.get_last_committed_txn() last_txn_prim_value = get_payload_data(last_txn)[AUDIT_TXN_PRIMARIES] if isinstance(last_txn_prim_value, int): seq_no = get_seq_no(last_txn) - last_txn_prim_value last_txn_prim_value = get_payload_data(audit.getBySeqNo(seq_no))[AUDIT_TXN_PRIMARIES] return last_txn_prim_value
def claim_def_request(creator, schema): return Request(identifier=creator, reqId=5, signature="sig", operation={ 'type': CLAIM_DEF, 'ref': get_seq_no(schema), 'verkey': randomString(), 'data': {} })
def update_state(self, txn, prev_result, request, is_committed=False): txn_time = get_txn_time(txn) nym = get_payload_data(txn).get(TARGET_NYM) self.database_manager.idr_cache.set(nym, seqNo=get_seq_no(txn), txnTime=txn_time, ta=prev_result.get(f.IDENTIFIER.nm), role=prev_result.get(ROLE), verkey=prev_result.get(VERKEY), isCommitted=is_committed)
def test_get_utxo_utxos_in_order(helpers, addresses): """ In response of GET_UTXO make sure all UTXOs are ordered in the same way; ascending order of seq_no """ address_1, address_2 = addresses total = 1000 outputs = [{"address": address_1, "amount": total}] mint_result = helpers.general.do_mint(outputs) seq_no = get_seq_no(mint_result) remaining = total for _ in range(10): amount = randint(1, 10) inputs = [ { "address": address_1, "seqNo": seq_no }, ] outputs = [{ "address": address_2, "amount": amount }, { "address": address_1, "amount": remaining - amount }] request = helpers.request.transfer(inputs, outputs) response = helpers.sdk.send_and_check_request_objects([request]) result = helpers.sdk.get_first_result(response) seq_no = get_seq_no(result) remaining -= amount request = helpers.request.get_utxo(address_2) responses = helpers.sdk.send_and_check_request_objects([request]) for response in responses: result = response[1]['result'] seq_nos = [] for output in result[OUTPUTS]: seq_nos.append(output["seqNo"]) assert seq_nos == sorted(seq_nos)
def prepare_schema_for_state(txn): origin = get_from(txn) schema_name = get_txn_schema_name(txn) schema_version = get_txn_schema_version(txn) value = {SCHEMA_ATTR_NAMES: get_txn_schema_attr_names(txn)} path = make_state_path_for_schema(origin, schema_name, schema_version) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) value_bytes = encode_state_value(value, seq_no, txn_time) return path, value_bytes
def test_same_input_address_multiple_seq_nos( tokens_distributed, looper, # noqa sdk_pool_handle, sdk_wallet_client, seller_token_wallet, seller_address, user1_address, user2_address, user3_address, user1_token_wallet, user2_token_wallet): # Send a PUBLIC_XFER with inputs using the same address but different # sequence nos. global seller_gets seq_no_1 = tokens_distributed for (w, a) in [(user1_token_wallet, user1_address), (user2_token_wallet, user2_address)]: inputs = [ [w, a, seq_no_1], ] amount = w.get_total_address_amount(address=a) outputs = [{"address": seller_address, "amount": amount}] send_xfer(looper, inputs, outputs, sdk_pool_handle) res1 = send_get_utxo(looper, seller_address, sdk_wallet_client, sdk_pool_handle) assert len(res1[OUTPUTS]) > 1 update_token_wallet_with_result(seller_token_wallet, res1) inputs = [] output_amount = 0 for s, amt in list( seller_token_wallet.get_all_address_utxos( seller_address).values())[0]: inputs.append([seller_token_wallet, seller_address, s]) output_amount += amt outputs = [{"address": user3_address, "amount": output_amount}] new_seq_no = get_seq_no(send_xfer(looper, inputs, outputs, sdk_pool_handle)) res2 = send_get_utxo(looper, user3_address, sdk_wallet_client, sdk_pool_handle) assert len(res2[OUTPUTS]) > 0 for output in res2[OUTPUTS]: if output["seqNo"] == new_seq_no: assert output["amount"] == output_amount break else: raise AssertionError('Needed to find output {}:{} with val {} but not ' 'found'.format(user3_address, new_seq_no, output_amount))
def test_xfer_breakdown_and_consolidate(helpers, addresses): """ Breakdown utxo into separate utxos with different sequence numbers and then combine them. """ amount = 10 [address1, address2] = addresses # Mint an extra token, so all xfer requests can have a change value. outputs = [{ADDRESS: address1, AMOUNT: amount + 1}] mint_result = helpers.general.do_mint(outputs) seq_no = get_seq_no(mint_result) xfer_seq_no = seq_no + 1 for change in range(0, amount): inputs = [{"source": utxo_from_addr_and_seq_no(address1, seq_no)}] outputs = [ {ADDRESS: address2, AMOUNT: 1}, {ADDRESS: address1, AMOUNT: amount - change} ] result = helpers.general.do_transfer(inputs, outputs) seq_no = get_seq_no(result) utxos = helpers.general.get_utxo_addresses([address2])[0] for seq_no in range(xfer_seq_no, xfer_seq_no + amount): assert utxos[seq_no - xfer_seq_no][PAYMENT_ADDRESS] == address2 assert utxos[seq_no - xfer_seq_no][AMOUNT] == 1 expected_utxos = [ {"source": utxo_from_addr_and_seq_no(address2, seq_no)} for seq_no in range(xfer_seq_no, xfer_seq_no + amount) ] outputs = [{ADDRESS: address2, AMOUNT: amount}] result = helpers.general.do_transfer(expected_utxos, outputs) utxos = helpers.general.get_utxo_addresses([address2])[0] assert utxos[0][PAYMENT_ADDRESS] == address2 assert utxos[0][AMOUNT] == amount
def _update_state_xfer_public(self, txn, is_committed=False): payload = get_payload_data(txn) for inp in payload[INPUTS]: self._spend_input(inp["address"], inp["seqNo"], is_committed=is_committed) for output in payload[OUTPUTS]: seq_no = get_seq_no(txn) self._add_new_output(Output(output["address"], seq_no, output["amount"]), is_committed=is_committed)
def test_mint_after_paying_fees(helpers, addresses, mint_tokens, fees): xfer_result = send_transfer_request(helpers, mint_tokens, fees, addresses) address_giver = addresses[0] outputs = [{ADDRESS: address_giver, AMOUNT: 1000}] mint_result = helpers.general.do_mint(outputs) xfer_seq_no = get_seq_no(xfer_result) mint_seq_no = get_seq_no(mint_result) utxos = helpers.general.do_get_utxo(address_giver)[OUTPUTS] assert utxos == [{ ADDRESS: address_giver, SEQNO: xfer_seq_no, AMOUNT: 900 - fees[XFER_PUBLIC] }, { ADDRESS: address_giver, SEQNO: mint_seq_no, AMOUNT: 1000 }]
def test_mint_after_set_fees(helpers, fees_set): # Try another minting after setting fees address = helpers.wallet.create_address() outputs = [{ADDRESS: address, AMOUNT: 60}] mint_req = helpers.general.do_mint(outputs) utxos = helpers.general.do_get_utxo(address)[OUTPUTS] assert utxos == [{ ADDRESS: address, SEQNO: get_seq_no(mint_req), AMOUNT: 60 }]
def prepare_schema_for_state(txn): origin = get_from(txn) txn_data = get_payload_data(txn) data = deepcopy(txn_data.get(DATA)) schema_name = data.pop(NAME) schema_version = data.pop(VERSION) path = make_state_path_for_schema(origin, schema_name, schema_version) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) value_bytes = encode_state_value(data, seq_no, txn_time) return path, value_bytes
def prepare_schema_for_state(txn): origin = get_from(txn) txn_data = get_payload_data(txn) data = deepcopy(txn_data.get(DATA)) schema_name = data.pop(NAME) schema_version = data.pop(VERSION) path = make_state_path_for_schema(origin, schema_name, schema_version) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) value_bytes = encode_state_value(data, seq_no, txn_time) return path, value_bytes
def test_revert_set_fees_and_view_change_all_nodes( nodeSetWithIntegratedTokenPlugin, xfer_mint_tokens, helpers, looper, xfer_addresses): """ Send SET_FEES and init view change. Check that it is reverted and transaction passes with old fees """ def _get_len_preprepares(n): replica = n.master_replica return len(replica._ordering_service.sent_preprepares if replica. isPrimary else replica._ordering_service.prePrepares) def _check_len_pprs(old_pprs_len): _len_pprs = set([ _get_len_preprepares(n) for n in nodeSetWithIntegratedTokenPlugin ]) _len_ppr = _len_pprs.pop() assert old_pprs_len + 1 == _len_ppr helpers.general.do_set_fees({NYM_FEES_ALIAS: 3}) nodes = nodeSetWithIntegratedTokenPlugin node_stashers = [n.nodeIbStasher for n in nodeSetWithIntegratedTokenPlugin] seq_no = get_seq_no(xfer_mint_tokens) _old_len_pprs = set( [_get_len_preprepares(n) for n in nodeSetWithIntegratedTokenPlugin]) assert len(_old_len_pprs) _old_len_ppr = _old_len_pprs.pop() with delay_rules_without_processing(node_stashers, cDelay()): helpers.general.set_fees_without_waiting({NYM_FEES_ALIAS: 5}) looper.run(eventually(functools.partial(_check_len_pprs, _old_len_ppr))) send_and_check_nym_with_fees(helpers, {FEES: { NYM_FEES_ALIAS: 5 }}, seq_no, looper, xfer_addresses, 1000, check_reply=False) for n in nodeSetWithIntegratedTokenPlugin: n.start_catchup() for n in nodes: looper.run( eventually(lambda: assertExp(n.mode == Mode.participating))) ensure_all_nodes_have_same_data(looper, nodes) send_and_check_nym_with_fees(helpers, {FEES: { NYM_FEES_ALIAS: 3 }}, seq_no, looper, xfer_addresses, 1000, check_reply=False) ensure_all_nodes_have_same_data(looper, nodes)
def test_multiple_inputs_outputs_without_change(helpers, addresses, initial_mint): [address1, address2, address3, address4, address5] = addresses mint_seq_no = get_seq_no(initial_mint) inputs = helpers.general.get_utxo_addresses([address1, address2, address3]) inputs = [utxo for utxos in inputs for utxo in utxos] outputs = [ { "address": address4, "amount": 200 }, { "address": address5, "amount": 100 }, ] request = helpers.request.transfer(inputs, outputs) response = helpers.sdk.send_and_check_request_objects([request]) assert response[0][1]["result"]["reqSignature"] != {} result = helpers.sdk.get_first_result(response) xfer_seq_no = get_seq_no(result) [ address1_utxos, address2_utxos, address3_utxos, address4_utxos, address5_utxos ] = helpers.general.get_utxo_addresses(addresses) assert address1_utxos == [] assert address2_utxos == [] assert address3_utxos == [] assert address4_utxos[0][PAYMENT_ADDRESS] == address4 assert address4_utxos[1][PAYMENT_ADDRESS] == address4 assert address4_utxos[0][AMOUNT] == 100 assert address4_utxos[1][AMOUNT] == 200 assert address5_utxos[0][PAYMENT_ADDRESS] == address5 assert address5_utxos[1][PAYMENT_ADDRESS] == address5 assert address5_utxos[0][AMOUNT] == 100 assert address5_utxos[1][AMOUNT] == 100
def test_two_mints_to_same_address(addresses, helpers): outputs = [{ADDRESS: address, AMOUNT: 100} for address in addresses] first_mint_result = helpers.general.do_mint(outputs) outputs = [{ADDRESS: address, AMOUNT: 200} for address in addresses] second_mint_result = helpers.general.do_mint(outputs) first_mint_seq_no = get_seq_no(first_mint_result) second_mint_seq_no = get_seq_no(second_mint_result) [address1, address2, address3, address4, address5] = addresses [ address1_utxos, address2_utxos, address3_utxos, address4_utxos, address5_utxos ] = helpers.general.get_utxo_addresses(addresses) assert first_mint_seq_no != second_mint_seq_no assert address1_utxos == [ {ADDRESS: address1, SEQNO: first_mint_seq_no, AMOUNT: 100}, {ADDRESS: address1, SEQNO: second_mint_seq_no, AMOUNT: 200}, ] assert address2_utxos == [ {ADDRESS: address2, SEQNO: first_mint_seq_no, AMOUNT: 100}, {ADDRESS: address2, SEQNO: second_mint_seq_no, AMOUNT: 200}, ] assert address3_utxos == [ {ADDRESS: address3, SEQNO: first_mint_seq_no, AMOUNT: 100}, {ADDRESS: address3, SEQNO: second_mint_seq_no, AMOUNT: 200}, ] assert address4_utxos == [ {ADDRESS: address4, SEQNO: first_mint_seq_no, AMOUNT: 100}, {ADDRESS: address4, SEQNO: second_mint_seq_no, AMOUNT: 200}, ] assert address5_utxos == [ {ADDRESS: address5, SEQNO: first_mint_seq_no, AMOUNT: 100}, {ADDRESS: address5, SEQNO: second_mint_seq_no, AMOUNT: 200}, ]
def test_xfer_to_negative_output(helpers, addresses, initial_mint): [address1, address2, *_] = addresses seq_no = get_seq_no(initial_mint) inputs = [{ADDRESS: address1, SEQNO: seq_no}] outputs = [ {ADDRESS: address2, AMOUNT: -50}, {ADDRESS: address1, AMOUNT: 150} ] with pytest.raises(RequestNackedException): helpers.general.do_transfer(inputs, outputs)
def test_address_utxos(helpers, addresses): """ Mint tokens and get the utxos for an address """ address = addresses[0] outputs = [{"address": address, "amount": 1000}] mint_result = helpers.general.do_mint(outputs) mint_seq_no = get_seq_no(mint_result) get_utxo_result = helpers.general.get_utxo_addresses([address])[0] assert get_utxo_result[0][PAYMENT_ADDRESS] == address assert get_utxo_result[0][AMOUNT] == 1000
def test_claim_def_dynamic_validation_for_unknown_identifier(claim_def_request, schema_request, claim_def_handler: ClaimDefHandler): claim_def_handler.write_request_validator.validate = get_exception(True) test_identifier = randomString() schema = reqToTxn(schema_request) claim_def_request.operation[REF] = get_seq_no(schema) claim_def_handler.ledger.appendTxns([schema]) request = Request(identifier=test_identifier, reqId=claim_def_request.reqId, operation=claim_def_request.operation) with pytest.raises(UnauthorizedClientRequest) as e: claim_def_handler.dynamic_validation(request)
def post_batch_committed(self, ledger_id, pp_time, committed_txns, state_root, txn_root): token_state_root, token_txn_root, _ = self.token_tracker.commit_batch() if ledger_id == TOKEN_LEDGER_ID: return committed_seq_nos_with_fees = [ get_seq_no(t) for t in committed_txns if get_type(t) != XFER_PUBLIC and "{}#{}".format( get_type(t), get_seq_no(t)) in self.deducted_fees ] if len(committed_seq_nos_with_fees) > 0: r = TokenReqHandler.__commit__( self.utxo_cache, self.token_ledger, self.token_state, len(committed_seq_nos_with_fees), token_state_root, txn_root_serializer.serialize(token_txn_root), pp_time) i = 0 for txn in committed_txns: if get_seq_no(txn) in committed_seq_nos_with_fees: txn[FEES] = r[i] i += 1 self.fee_txns_in_current_batch = 0
def assert_valid_minting(helpers, amount): address = helpers.wallet.create_address() outputs = [{ADDRESS: address, AMOUNT: amount}] result = helpers.general.do_mint(outputs) utxos = helpers.general.get_utxo_addresses([address])[0] expected = { ADDRESS: address, SEQNO: get_seq_no(result), AMOUNT: amount } matches = [utxo for utxo in utxos if utxo is expected]
def _addNym(self, txn, isCommitted=False) -> None: txn_data = get_payload_data(txn) nym = txn_data.get(TARGET_NYM) data = { f.IDENTIFIER.nm: get_from(txn), f.SEQ_NO.nm: get_seq_no(txn), TXN_TIME: get_txn_time(txn) } if ROLE in txn_data: data[ROLE] = txn_data.get(ROLE) if VERKEY in txn_data: data[VERKEY] = txn_data.get(VERKEY) self.updateNym(nym, txn, isCommitted=isCommitted)
def _addNym(self, txn, isCommitted=False) -> None: txn_data = get_payload_data(txn) nym = txn_data.get(TARGET_NYM) data = { f.IDENTIFIER.nm: get_from(txn), f.SEQ_NO.nm: get_seq_no(txn), TXN_TIME: get_txn_time(txn) } if ROLE in txn_data: data[ROLE] = txn_data.get(ROLE) if VERKEY in txn_data: data[VERKEY] = txn_data.get(VERKEY) self.updateNym(nym, txn, isCommitted=isCommitted)
def prepare_attr_for_state(txn): """ Make key(path)-value pair for state from ATTRIB or GET_ATTR :return: state path, state value, value for attribute store """ assert get_type(txn) == ATTRIB txn_data = get_payload_data(txn) nym = txn_data[TARGET_NYM] attr_type, attr_key, value = parse_attr_txn(txn_data) hashed_value = hash_of(value) if value else '' seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) value_bytes = encode_state_value(hashed_value, seq_no, txn_time) path = make_state_path_for_attr(nym, attr_key, attr_type == HASH) return attr_type, path, value, hashed_value, value_bytes
def prepare_claim_def_for_state(txn): txn_data = get_payload_data(txn) origin = get_from(txn) schema_seq_no = txn_data.get(REF) if schema_seq_no is None: raise ValueError("'{}' field is absent, " "but it must contain schema seq no".format(REF)) data = txn_data.get(DATA) if data is None: raise ValueError("'{}' field is absent, " "but it must contain components of keys" .format(DATA)) signature_type = txn_data.get(SIGNATURE_TYPE, 'CL') path = make_state_path_for_claim_def(origin, schema_seq_no, signature_type) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) value_bytes = encode_state_value(data, seq_no, txn_time) return path, value_bytes
def prepare_revoc_reg_entry_accum_for_state(txn): author_did = get_from(txn) txn_data = get_payload_data(txn) revoc_reg_def_id = txn_data.get(REVOC_REG_DEF_ID) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) assert author_did assert revoc_reg_def_id assert seq_no assert txn_time path = make_state_path_for_revoc_reg_entry_accum(revoc_reg_def_id=revoc_reg_def_id) # TODO: do not duplicate seqNo here # doing this now just for backward-compatibility txn_data = deepcopy(txn_data) txn_data[f.SEQ_NO.nm] = seq_no txn_data[TXN_TIME] = txn_time value_bytes = encode_state_value(txn_data, seq_no, txn_time) return path, value_bytes
def put_into_seq_no_db(txn): # If there is no reqId, then it's genesis txn if get_req_id(txn) is None: return txn_new = copy.deepcopy(txn) operation = get_payload_data(txn_new) operation[TXN_TYPE] = get_type(txn_new) dct = { f.IDENTIFIER.nm: get_from(txn_new), f.REQ_ID.nm: get_req_id(txn_new), OPERATION: operation, } if get_protocol_version(txn_new) is not None: dct[f.PROTOCOL_VERSION.nm] = get_protocol_version(txn_new) digest = sha256(serialize_msg_for_signing(dct)).hexdigest() seq_no = get_seq_no(txn_new) ledger_id = get_ledger_id_by_txn_type(operation[TXN_TYPE]) line_to_record = str(ledger_id) + ReqIdrToTxn.delimiter + str(seq_no) dest_seq_no_db_storage.put(digest, line_to_record) return digest
def prepare_revoc_def_for_state(txn): author_did = get_from(txn) txn_data = get_payload_data(txn) cred_def_id = txn_data.get(CRED_DEF_ID) revoc_def_type = txn_data.get(REVOC_TYPE) revoc_def_tag = txn_data.get(TAG) assert author_did assert cred_def_id assert revoc_def_type assert revoc_def_tag path = make_state_path_for_revoc_def(author_did, cred_def_id, revoc_def_type, revoc_def_tag) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) assert seq_no assert txn_time value_bytes = encode_state_value(txn_data, seq_no, txn_time) return path, value_bytes
def appendTxns(self, txns: List): # These transactions are not yet committed so they do not go to # the ledger _no_seq_no_txns = [txn for txn in txns if get_seq_no(txn) is None] if _no_seq_no_txns: raise PlenumValueError( 'txns', txns, ("all txns should have defined seq_no, undefined in {}" .format(_no_seq_no_txns)) ) uncommittedSize = self.size + len(self.uncommittedTxns) self.uncommittedTree = self.treeWithAppliedTxns(txns, self.uncommittedTree) self.uncommittedRootHash = self.uncommittedTree.root_hash self.uncommittedTxns.extend(txns) if txns: return (uncommittedSize + 1, uncommittedSize + len(txns)), txns else: return (uncommittedSize, uncommittedSize), txns
def test_pool_genesis_txns(bootstrap, pool_genesis_file): serializer = JsonSerializer() with open(pool_genesis_file) as f: for line in store_utils.cleanLines(f.readlines()): txn = serializer.deserialize(line) assert get_seq_no(txn) assert get_txn_id(txn) assert get_payload_data(txn) assert get_type(txn) == NODE assert get_version(txn) == "1" assert get_protocol_version(txn) is None assert get_payload_data(txn)[TARGET_NYM] data = get_payload_data(txn).get(DATA) assert data assert data[ALIAS] assert data[CLIENT_IP] assert data[CLIENT_PORT] assert data[NODE_IP] assert data[NODE_PORT] assert data[SERVICES] == [VALIDATOR] assert data[BLS_KEY] assert data[BLS_KEY_PROOF]
def test_domain_genesis_txns(bootstrap, domain_genesis_file): serializer = JsonSerializer() with open(domain_genesis_file) as f: i = 0 for line in store_utils.cleanLines(f.readlines()): txn = serializer.deserialize(line) assert get_seq_no(txn) assert get_payload_data(txn) assert get_type(txn) == NYM assert get_version(txn) == "1" assert get_protocol_version(txn) is None assert get_payload_data(txn)[VERKEY] assert get_payload_data(txn)[TARGET_NYM] assert ALIAS not in get_payload_data(txn) # expect Trustees, then Stewards, then Clients if 0 <= i < TRUSTEE_COUNT: expected_role = TRUSTEE elif TRUSTEE_COUNT <= i < TRUSTEE_COUNT + NODE_COUNT: expected_role = STEWARD else: expected_role = None assert get_payload_data(txn).get(ROLE) == expected_role i += 1
def _submitData(result, error): data = get_payload_data(result) seqNo = get_seq_no(result) return data, seqNo
def test_get_seq_no_none(txn): txn["txnMetadata"].pop("seqNo", None) assert get_seq_no(txn) is None
def _poolConfigReply(self, result, preparedReq): _, cfgKey = preparedReq pconf = self.getPoolConfig(cfgKey) pconf.seqNo = get_seq_no(result)
def _poolUpgradeReply(self, result, preparedReq): _, upgKey = preparedReq upgrade = self.getPoolUpgrade(upgKey) upgrade.seqNo = get_seq_no(result)
def _nodeReply(self, result, preparedReq): _, nodeKey = preparedReq node = self.getNode(nodeKey) node.seqNo = get_seq_no(result)
def test_append_txns_correct_seq_nos(ledger_with_batches_appended): ledger = ledger_with_batches_appended seq_no = ledger.seqNo for txn in ledger.uncommittedTxns: seq_no += 1 assert get_seq_no(txn) == seq_no
def test_get_seq_no(txn): assert get_seq_no(txn) == 144