def test_token_req_handler_apply_MINT_PUBLIC_success_with_inputs( helpers, addresses, token_handler_a): [address1, address2] = addresses outputs = [{ "address": address1, "amount": 40 }, { "address": address2, "amount": 20 }] request = helpers.request.mint(outputs) request.operation[INPUTS] = [[address1, 1]] seq_no, txn = token_handler_a.apply(request, CONS_TIME) expected = TxnResponse( MINT_PUBLIC, request.operation, signatures=request.signatures, req_id=request.reqId, frm=request._identifier, ).form_response() assert get_payload_data(txn) == get_payload_data(expected) assert get_req_id(txn) == get_req_id(expected) assert get_from(txn) == get_from(expected) assert get_sorted_signatures(txn) == get_sorted_signatures(txn)
def check_request_ordered(node, request: Request): # it's ok to iterate through all txns since this is a test for seq_no, txn in node.domainLedger.getAllTxn(): if get_req_id(txn) is None: continue if get_from(txn) is None: continue if get_req_id(txn) != request.reqId: continue if get_from(txn) != request.identifier: continue return True raise ValueError('{} request not ordered by node {}'.format(request, node.name))
def prepare_buy_for_state(txn): from common.serializers.serialization import domain_state_serializer identifier = get_from(txn) req_id = get_req_id(txn) value = domain_state_serializer.serialize({"amount": get_payload_data(txn)['amount']}) key = TestDomainRequestHandler.prepare_buy_key(identifier, req_id) return key, value
def update_state(self, txn, prev_result, request, is_committed=False): self._validate_txn_type(txn) nym = get_payload_data(txn).get(TARGET_NYM) existing_data = get_nym_details(self.state, nym, is_committed=is_committed) txn_data = get_payload_data(txn) new_data = {} if not existing_data: # New nym being added to state, set the TrustAnchor new_data[f.IDENTIFIER.nm] = get_from(txn) # New nym being added to state, set the role and verkey to None, this makes # the state data always have a value for `role` and `verkey` since we allow # clients to omit specifying `role` and `verkey` in the request consider a # default value of None new_data[ROLE] = None new_data[VERKEY] = None new_data[ROLE] = txn_data.get(ROLE, None) if VERKEY in txn_data: new_data[VERKEY] = txn_data[VERKEY] new_data[F.seqNo.name] = get_seq_no(txn) new_data[TXN_TIME] = get_txn_time(txn) self.__update_steward_count(new_data, existing_data) existing_data.update(new_data) val = self.state_serializer.serialize(existing_data) key = self.gen_state_key(txn) self.state.set(key, val) return existing_data
def check_ledger_after_upgrade( node_set, allowed_actions, ledger_size, expected_version, allowed_txn_types=[NODE_UPGRADE], node_ids=None): versions = set() for node in node_set: # print(len(node.configLedger)) assert len(node.configLedger) == ledger_size ids = set() for _, txn in node.configLedger.getAllTxn(): type = get_type(txn) assert type in allowed_txn_types txn_data = get_payload_data(txn) data = txn_data if type == NODE_UPGRADE: data = txn_data[DATA] assert data[ACTION] assert data[ACTION] in allowed_actions ids.add(get_from(txn)) assert data[VERSION] versions.add(data[VERSION]) ids.add(node.id) if node_ids: assert ids == set(node_ids) assert len(versions) == 1 assert list(versions)[0] == expected_version
def test_proof_in_write_reply(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_client): resp = sdk_send_random_and_check(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_client, 1) req = resp[0][0] result = resp[0][1]['result'] assert result assert get_type(result) == "buy" assert get_from(result) == req[f.IDENTIFIER.nm] assert get_req_id(result) == req[f.REQ_ID.nm] assert get_seq_no(result) assert get_txn_time(result) assert STATE_PROOF in result state_proof = result[STATE_PROOF] assert ROOT_HASH in state_proof assert MULTI_SIGNATURE in state_proof assert PROOF_NODES in state_proof multi_sig = state_proof[MULTI_SIGNATURE] assert MULTI_SIGNATURE_SIGNATURE in multi_sig assert MULTI_SIGNATURE_PARTICIPANTS in multi_sig assert MULTI_SIGNATURE_VALUE in multi_sig multi_sig_value = multi_sig[MULTI_SIGNATURE_VALUE] assert MULTI_SIGNATURE_VALUE_LEDGER_ID in multi_sig_value assert MULTI_SIGNATURE_VALUE_STATE_ROOT in multi_sig_value assert MULTI_SIGNATURE_VALUE_TXN_ROOT in multi_sig_value assert MULTI_SIGNATURE_VALUE_POOL_STATE_ROOT in multi_sig_value assert MULTI_SIGNATURE_VALUE_TIMESTAMP in multi_sig_value assert validate_multi_signature(state_proof, txnPoolNodeSet) assert validate_proof_for_write(result)
def test_fill_ts_store_after_catchup(txnPoolNodeSet, looper, sdk_pool_handle, sdk_wallet_steward, tconf, tdir, allPluginsPath): sdk_send_random_and_check(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_steward, 5) node_to_disconnect = txnPoolNodeSet[-1] disconnect_node_and_ensure_disconnected(looper, txnPoolNodeSet, node_to_disconnect) looper.removeProdable(name=node_to_disconnect.name) sdk_replies = sdk_send_random_and_check(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_steward, 2) node_to_disconnect = start_stopped_node(node_to_disconnect, looper, tconf, tdir, allPluginsPath) txnPoolNodeSet[-1] = node_to_disconnect looper.run(checkNodesConnected(txnPoolNodeSet)) waitNodeDataEquality(looper, node_to_disconnect, *txnPoolNodeSet, exclude_from_check=['check_last_ordered_3pc_backup']) req_handler = node_to_disconnect.read_manager.request_handlers[GET_BUY] for reply in sdk_replies: key = BuyHandler.prepare_buy_key(get_from(reply[1]['result']), get_req_id(reply[1]['result'])) root_hash = req_handler.database_manager.ts_store.get_equal_or_prev( get_txn_time(reply[1]['result'])) assert root_hash from_state = req_handler.state.get_for_root_hash(root_hash=root_hash, key=key) assert domain_state_serializer.deserialize(from_state)['amount'] == \ get_payload_data(reply[1]['result'])['amount']
def update_state(self, txn, prev_result, request, is_committed=False) -> None: self._validate_txn_type(txn) txn_data = get_payload_data(txn) primary_key = txn_data[RS_ID].encode() secondary_key = self.make_secondary_key(txn_data[RS_TYPE], txn_data[RS_NAME], txn_data[RS_VERSION]) value = { RS_ID: txn_data[RS_ID], RS_TYPE: txn_data[RS_TYPE], RS_NAME: txn_data[RS_NAME], RS_VERSION: txn_data[RS_VERSION], RS_CONTENT: txn_data[RS_CONTENT], TXN_PAYLOAD_METADATA_FROM: get_from(txn), TXN_PAYLOAD_METADATA_ENDORSER: get_endorser(txn), TXN_PAYLOAD_VERSION: get_payload_txn_version(txn), } seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) value_bytes = encode_state_value(value, seq_no, txn_time) self.state.set(primary_key, value_bytes) self.state.set(secondary_key, primary_key)
async def getSchema(self, id: ID) -> Optional[Schema]: data = None issuer_id = None if id.schemaKey: issuer_id = id.schemaKey.issuerId op = { TARGET_NYM: issuer_id, TXN_TYPE: GET_SCHEMA, DATA: { NAME: id.schemaKey.name, VERSION: id.schemaKey.version, } } data, seqNo = await self._sendGetReq(op) else: op = { f.LEDGER_ID.nm: DOMAIN_LEDGER_ID, TXN_TYPE: GET_TXNS, DATA: id.schemaId } res, seqNo = await self._sendGetReq(op) if res and get_type(res) == SCHEMA: issuer_id = get_from(res) data = get_payload_data(res)[DATA] if not data or ATTR_NAMES not in data: raise SchemaNotFoundError('No schema with ID={} and key={}'.format( id.schemaId, id.schemaKey)) return Schema(name=data[NAME], version=data[VERSION], attrNames=data[ATTR_NAMES], issuerId=issuer_id, seqId=seqNo)
def updateNym(self, nym, txn, isCommitted=True): existingData = self.getNymDetails(self.state, nym, isCommitted=isCommitted) txn_data = get_payload_data(txn) newData = {} if not existingData: # New nym being added to state, set the TrustAnchor newData[f.IDENTIFIER.nm] = get_from(txn) # New nym being added to state, set the role and verkey to None, this makes # the state data always have a value for `role` and `verkey` since we allow # clients to omit specifying `role` and `verkey` in the request consider a # default value of None newData[ROLE] = None newData[VERKEY] = None if ROLE in txn_data: newData[ROLE] = txn_data[ROLE] if VERKEY in txn_data: newData[VERKEY] = txn_data[VERKEY] newData[F.seqNo.name] = get_seq_no(txn) newData[TXN_TIME] = get_txn_time(txn) existingData.update(newData) val = self.stateSerializer.serialize(existingData) key = self.nym_to_state_key(nym) self.state.set(key, val) return existingData
def check_ledger_after_upgrade( node_set, allowed_actions, ledger_size, expected_version, allowed_txn_types=[NODE_UPGRADE], node_ids=None): versions = set() for node in node_set: print(len(node.configLedger)) assert len(node.configLedger) == ledger_size ids = set() for _, txn in node.configLedger.getAllTxn(): type = get_type(txn) assert type in allowed_txn_types txn_data = get_payload_data(txn) data = txn_data if type == NODE_UPGRADE: data = txn_data[DATA] assert data[ACTION] assert data[ACTION] in allowed_actions ids.add(get_from(txn)) assert data[VERSION] versions.add(data[VERSION]) ids.add(node.id) if node_ids: assert ids == set(node_ids) assert len(versions) == 1 assert list(versions)[0] == expected_version
def prepare_buy_for_state(txn): identifier = get_from(txn) req_id = get_req_id(txn) value = domain_state_serializer.serialize( {"amount": get_payload_data(txn)['amount']}) key = BuyHandler.prepare_buy_key(identifier, req_id) return key, value
def _addRevocRegEntry(self, txn, isCommitted=False) -> None: current_entry, revoc_def = self._get_current_revoc_entry_and_revoc_def( author_did=get_from(txn), revoc_reg_def_id=get_payload_data(txn)[REVOC_REG_DEF_ID], req_id=get_req_id(txn)) writer_cls = self.get_revocation_strategy( revoc_def[VALUE][ISSUANCE_TYPE]) writer = writer_cls(self.state) writer.write(current_entry, txn)
def update_state(self, txn, prev_result, request, is_committed=False): self._validate_txn_type(txn) current_entry, revoc_def = self._get_current_revoc_entry_and_revoc_def( author_did=get_from(txn), revoc_reg_def_id=get_payload_data(txn)[REVOC_REG_DEF_ID], req_id=get_req_id(txn)) writer_cls = self.get_revocation_strategy(revoc_def[ISSUANCE_TYPE]) writer = writer_cls(self.state) writer.write(current_entry, txn)
def prepare_schema_for_state(txn): origin = get_from(txn) schema_name = get_txn_schema_name(txn) schema_version = get_txn_schema_version(txn) value = {SCHEMA_ATTR_NAMES: get_txn_schema_attr_names(txn)} path = make_state_path_for_schema(origin, schema_name, schema_version) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) value_bytes = encode_state_value(value, seq_no, txn_time) return path, value_bytes
def _addRevocRegEntry(self, txn, isCommitted=False) -> None: current_entry, revoc_def = self._get_current_revoc_entry_and_revoc_def( author_did=get_from(txn), revoc_reg_def_id=get_payload_data(txn)[REVOC_REG_DEF_ID], req_id=get_req_id(txn) ) writer_cls = self.get_revocation_strategy( revoc_def[VALUE][ISSUANCE_TYPE]) writer = writer_cls(self.state) writer.write(current_entry, txn)
def updateState(self, txns, isCommitted=False): for txn in txns: nodeNym = get_payload_data(txn).get(TARGET_NYM) data = get_payload_data(txn).get(DATA, {}) existingData = self.getNodeData(nodeNym, isCommitted=isCommitted) # Node data did not exist in state, so this is a new node txn, # hence store the author of the txn (steward of node) if not existingData: existingData[f.IDENTIFIER.nm] = get_from(txn) existingData.update(data) self.updateNodeData(nodeNym, existingData)
def prepare_schema_for_state(txn): origin = get_from(txn) txn_data = get_payload_data(txn) data = deepcopy(txn_data.get(DATA)) schema_name = data.pop(NAME) schema_version = data.pop(VERSION) path = make_state_path_for_schema(origin, schema_name, schema_version) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) value_bytes = encode_state_value(data, seq_no, txn_time) return path, value_bytes
def _attrib_data_from_reply(self, result): dt = get_payload_data(result) dest = dt[TARGET_NYM] origin = get_from(result) val = None if RAW in dt: val = json.loads(dt[RAW]) elif ENC in dt: val = dt[ENC] elif HASH in dt: val = dt[HASH] return origin, dest, val
def update_state(self, txn, prev_result, request, is_committed=False): self._validate_txn_type(txn) node_nym = get_payload_data(txn).get(TARGET_NYM) data = get_payload_data(txn).get(DATA, {}) existing_data = self.get_from_state(node_nym, is_committed=is_committed) # Node data did not exist in state, so this is a new node txn, # hence store the author of the txn (steward of node) if not existing_data: existing_data[f.IDENTIFIER.nm] = get_from(txn) existing_data.update(data) key = self.gen_state_key(txn) val = self.state_serializer.serialize(existing_data) self.state.set(key, val)
def update_version(self, txn): if get_type(txn) == POOL_UPGRADE and get_payload_data(txn).get(ACTION) == START: N = len(get_payload_data(txn).get(SCHEDULE, {})) self._f = (N - 1) // 3 elif get_type(txn) == NODE_UPGRADE and get_payload_data(txn)[DATA][ACTION] == COMPLETE: version = get_payload_data(txn)[DATA][VERSION] self._votes_for_new_version.setdefault(version, set()) self._votes_for_new_version[version].add(get_from(txn)) if len(self._votes_for_new_version[version]) > self._f: self._versions[get_txn_time(txn)] = version self._votes_for_new_version = SortedDict({v: senders for v, senders in self._votes_for_new_version.items() if v > version})
def _addNym(self, txn, isCommitted=False) -> None: txn_data = get_payload_data(txn) nym = txn_data.get(TARGET_NYM) data = { f.IDENTIFIER.nm: get_from(txn), f.SEQ_NO.nm: get_seq_no(txn), TXN_TIME: get_txn_time(txn) } if ROLE in txn_data: data[ROLE] = txn_data.get(ROLE) if VERKEY in txn_data: data[VERKEY] = txn_data.get(VERKEY) self.updateNym(nym, txn, isCommitted=isCommitted)
def prepare_context_for_state(txn, path_only=False): origin = get_from(txn) context_name = get_txn_context_name(txn) context_version = get_txn_context_version(txn) value = { CONTEXT_CONTEXT_ARRAY: get_txn_context_context_array(txn) } path = ContextHandler.make_state_path_for_context(origin, context_name, context_version) if path_only: return path seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) value_bytes = encode_state_value(value, seq_no, txn_time) return path, value_bytes
def _parse_pool_transaction_file(ledger, nodeReg, cliNodeReg, nodeKeys, activeValidators, ledger_size=None): """ helper function for parseLedgerForHaAndKeys """ for _, txn in ledger.getAllTxn(to=ledger_size): if get_type(txn) == NODE: txn_data = get_payload_data(txn) nodeName = txn_data[DATA][ALIAS] clientStackName = nodeName + CLIENT_STACK_SUFFIX nHa = (txn_data[DATA][NODE_IP], txn_data[DATA][NODE_PORT]) \ if (NODE_IP in txn_data[DATA] and NODE_PORT in txn_data[DATA]) \ else None cHa = (txn_data[DATA][CLIENT_IP], txn_data[DATA][CLIENT_PORT]) \ if (CLIENT_IP in txn_data[DATA] and CLIENT_PORT in txn_data[DATA]) \ else None if nHa: nodeReg[nodeName] = HA(*nHa) if cHa: cliNodeReg[clientStackName] = HA(*cHa) try: # TODO: Need to handle abbreviated verkey key_type = 'verkey' verkey = cryptonymToHex(str(txn_data[TARGET_NYM])) key_type = 'identifier' cryptonymToHex(get_from(txn)) except ValueError: logger.exception( 'Invalid {}. Rebuild pool transactions.'.format( key_type)) exit('Invalid {}. Rebuild pool transactions.'.format( key_type)) nodeKeys[nodeName] = verkey services = txn_data[DATA].get(SERVICES) if isinstance(services, list): if VALIDATOR in services: activeValidators.add(nodeName) else: activeValidators.discard(nodeName)
def prepare_claim_def_for_state(txn): txn_data = get_payload_data(txn) origin = get_from(txn) schema_seq_no = txn_data.get(REF) if schema_seq_no is None: raise ValueError("'{}' field is absent, " "but it must contain schema seq no".format(REF)) data = txn_data.get(DATA) if data is None: raise ValueError("'{}' field is absent, " "but it must contain components of keys".format(DATA)) signature_type = txn_data.get(SIGNATURE_TYPE, 'CL') path = make_state_path_for_claim_def(origin, schema_seq_no, signature_type) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) value_bytes = encode_state_value(data, seq_no, txn_time) return path, value_bytes
def prepare_revoc_def_for_state(txn): author_did = get_from(txn) txn_data = get_payload_data(txn) cred_def_id = txn_data.get(CRED_DEF_ID) revoc_def_type = txn_data.get(REVOC_TYPE) revoc_def_tag = txn_data.get(TAG) assert author_did assert cred_def_id assert revoc_def_type assert revoc_def_tag path = make_state_path_for_revoc_def(author_did, cred_def_id, revoc_def_type, revoc_def_tag) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) assert seq_no assert txn_time value_bytes = encode_state_value(txn_data, seq_no, txn_time) return path, value_bytes
def prepare_claim_def_for_state(txn): txn_data = get_payload_data(txn) origin = get_from(txn) schema_seq_no = txn_data.get(REF) if schema_seq_no is None: raise ValueError("'{}' field is absent, " "but it must contain schema seq no".format(REF)) data = txn_data.get(DATA) if data is None: raise ValueError("'{}' field is absent, " "but it must contain components of keys" .format(DATA)) signature_type = txn_data.get(SIGNATURE_TYPE, 'CL') path = make_state_path_for_claim_def(origin, schema_seq_no, signature_type) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) value_bytes = encode_state_value(data, seq_no, txn_time) return path, value_bytes
def prepare_revoc_reg_entry_accum_for_state(txn): author_did = get_from(txn) txn_data = get_payload_data(txn) revoc_reg_def_id = txn_data.get(REVOC_REG_DEF_ID) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) assert author_did assert revoc_reg_def_id assert seq_no assert txn_time path = make_state_path_for_revoc_reg_entry_accum(revoc_reg_def_id=revoc_reg_def_id) # TODO: do not duplicate seqNo here # doing this now just for backward-compatibility txn_data = deepcopy(txn_data) txn_data[f.SEQ_NO.nm] = seq_no txn_data[TXN_TIME] = txn_time value_bytes = encode_state_value(txn_data, seq_no, txn_time) return path, value_bytes
def put_into_seq_no_db(txn): # If there is no reqId, then it's genesis txn if get_req_id(txn) is None: return txn_new = copy.deepcopy(txn) operation = get_payload_data(txn_new) operation[TXN_TYPE] = get_type(txn_new) dct = { f.IDENTIFIER.nm: get_from(txn_new), f.REQ_ID.nm: get_req_id(txn_new), OPERATION: operation, } if get_protocol_version(txn_new) is not None: dct[f.PROTOCOL_VERSION.nm] = get_protocol_version(txn_new) digest = sha256(serialize_msg_for_signing(dct)).hexdigest().encode() seq_no = get_seq_no(txn_new) ledger_id = get_ledger_id_by_txn_type(operation[TXN_TYPE]) line_to_record = str(ledger_id) + ReqIdrToTxn.delimiter + str(seq_no) dest_seq_no_db_storage.put(digest, line_to_record) return digest
def prepare_claim_def_for_state(txn): origin = get_from(txn) schema_seq_no = get_txn_claim_def_schema_ref(txn) if schema_seq_no is None: raise ValueError( "'{}' field is absent, " "but it must contain schema seq no".format(CLAIM_DEF_SCHEMA_REF)) data = get_txn_claim_def_public_keys(txn) if data is None: raise ValueError("'{}' field is absent, " "but it must contain components of keys".format( CLAIM_DEF_PUBLIC_KEYS)) signature_type = get_txn_claim_def_signature_type(txn) tag = get_txn_claim_def_tag(txn) path = make_state_path_for_claim_def(origin, schema_seq_no, signature_type, tag) seq_no = get_seq_no(txn) txn_time = get_txn_time(txn) value_bytes = encode_state_value(data, seq_no, txn_time) return path, value_bytes
def put_into_seq_no_db(txn): # If there is no reqId, then it's genesis txn if get_req_id(txn) is None: return txn_new = copy.deepcopy(txn) operation = get_payload_data(txn_new) operation[TXN_TYPE] = get_type(txn_new) dct = { f.IDENTIFIER.nm: get_from(txn_new), f.REQ_ID.nm: get_req_id(txn_new), OPERATION: operation, } if get_protocol_version(txn_new) is not None: dct[f.PROTOCOL_VERSION.nm] = get_protocol_version(txn_new) digest = sha256(serialize_msg_for_signing(dct)).hexdigest() seq_no = get_seq_no(txn_new) ledger_id = get_ledger_id_by_txn_type(operation[TXN_TYPE]) line_to_record = str(ledger_id) + ReqIdrToTxn.delimiter + str(seq_no) dest_seq_no_db_storage.put(digest, line_to_record) return digest
def build_demote_node_req(node_name, txn): txn_data = get_payload_data(txn) target_nym = txn_data[TARGET_NYM] txn_data = txn_data['data'] operation = { TYPE: NODE, DATA: { ALIAS: node_name, CLIENT_IP: txn_data[CLIENT_IP], CLIENT_PORT: txn_data[CLIENT_PORT], NODE_IP: txn_data[NODE_IP], NODE_PORT: txn_data[NODE_PORT], SERVICES: [], }, TARGET_NYM: target_nym } return Request(operation=operation, reqId=1513945121191691, protocolVersion=CURRENT_PROTOCOL_VERSION, identifier=get_from(txn))
def _parse_pool_transaction_file( ledger, nodeReg, cliNodeReg, nodeKeys, activeValidators, ledger_size=None): """ helper function for parseLedgerForHaAndKeys """ for _, txn in ledger.getAllTxn(to=ledger_size): if get_type(txn) == NODE: txn_data = get_payload_data(txn) nodeName = txn_data[DATA][ALIAS] clientStackName = nodeName + CLIENT_STACK_SUFFIX nHa = (txn_data[DATA][NODE_IP], txn_data[DATA][NODE_PORT]) \ if (NODE_IP in txn_data[DATA] and NODE_PORT in txn_data[DATA]) \ else None cHa = (txn_data[DATA][CLIENT_IP], txn_data[DATA][CLIENT_PORT]) \ if (CLIENT_IP in txn_data[DATA] and CLIENT_PORT in txn_data[DATA]) \ else None if nHa: nodeReg[nodeName] = HA(*nHa) if cHa: cliNodeReg[clientStackName] = HA(*cHa) try: # TODO: Need to handle abbreviated verkey key_type = 'verkey' verkey = cryptonymToHex(str(txn_data[TARGET_NYM])) key_type = 'identifier' cryptonymToHex(get_from(txn)) except ValueError: logger.exception( 'Invalid {}. Rebuild pool transactions.'.format(key_type)) exit('Invalid {}. Rebuild pool transactions.'.format(key_type)) nodeKeys[nodeName] = verkey services = txn_data[DATA].get(SERVICES) if isinstance(services, list): if VALIDATOR in services: activeValidators.add(nodeName) else: activeValidators.discard(nodeName)
def write(self, current_reg_entry, txn): txn = deepcopy(txn) txn_data = get_payload_data(txn) self.set_parameters_from_txn(author_did=get_from(txn), revoc_reg_def_id=txn_data.get(REVOC_REG_DEF_ID), req_id=get_req_id(txn)) if current_reg_entry is not None: value_from_state = current_reg_entry.get(VALUE) assert value_from_state indices = value_from_state.get(ISSUED, []) value_from_txn = txn_data.get(VALUE) issued_from_txn = value_from_txn.get(ISSUED, []) revoked_from_txn = value_from_txn.get(REVOKED, []) # set with all previous issued minus revoked from txn result_indicies = set(indices).difference(revoked_from_txn) result_indicies.update(issued_from_txn) value_from_txn[REVOKED] = [] value_from_txn[ISSUED] = list(result_indicies) txn_data[VALUE] = value_from_txn # contains already changed txn self.set_to_state(txn) del txn
def write(self, current_reg_entry, txn): txn = deepcopy(txn) txn_data = get_payload_data(txn) self.set_parameters_from_txn( author_did=get_from(txn), revoc_reg_def_id=txn_data.get(REVOC_REG_DEF_ID), req_id=get_req_id(txn)) if current_reg_entry is not None: value_from_state = current_reg_entry.get(VALUE) assert value_from_state indices = value_from_state.get(ISSUED, []) value_from_txn = txn_data.get(VALUE) issued_from_txn = value_from_txn.get(ISSUED, []) revoked_from_txn = value_from_txn.get(REVOKED, []) # set with all previous issued minus revoked from txn result_indicies = set(indices).difference(revoked_from_txn) result_indicies.update(issued_from_txn) value_from_txn[REVOKED] = [] value_from_txn[ISSUED] = list(result_indicies) txn_data[VALUE] = value_from_txn # contains already changed txn self.set_to_state(txn)
def update_state(self, txn, prev_result, is_committed=False): self._validate_txn_type(txn) nym = get_payload_data(txn).get(TARGET_NYM) existing_data = get_nym_details(self.state, nym, is_committed=is_committed) txn_data = get_payload_data(txn) new_data = {} if not existing_data: new_data[f.IDENTIFIER.nm] = get_from(txn) new_data[ROLE] = None new_data[VERKEY] = None if ROLE in txn_data: new_data[ROLE] = txn_data[ROLE] if VERKEY in txn_data: new_data[VERKEY] = txn_data[VERKEY] new_data[F.seqNo.name] = get_seq_no(txn) new_data[TXN_TIME] = get_txn_time(txn) existing_data.update(new_data) val = self.state_serializer.serialize(existing_data) key = nym_to_state_key(nym) self.state.set(key, val) return existing_data
async def getSchema(self, id: ID) -> Optional[Schema]: data = None issuer_id = None if id.schemaKey: issuer_id = id.schemaKey.issuerId op = { TARGET_NYM: issuer_id, TXN_TYPE: GET_SCHEMA, DATA: { NAME: id.schemaKey.name, VERSION: id.schemaKey.version, } } data, seqNo = await self._sendGetReq(op) else: op = { f.LEDGER_ID.nm: DOMAIN_LEDGER_ID, TXN_TYPE: GET_TXNS, DATA: id.schemaId } res, seqNo = await self._sendGetReq(op) if res and get_type(res) == SCHEMA: issuer_id = get_from(res) data = get_payload_data(res)[DATA] if not data or ATTR_NAMES not in data: raise SchemaNotFoundError( 'No schema with ID={} and key={}'.format( id.schemaId, id.schemaKey)) return Schema(name=data[NAME], version=data[VERSION], attrNames=data[ATTR_NAMES], issuerId=issuer_id, seqId=seqNo)
def test_get_from(txn): assert get_from(txn) == "6ouriXMZkLeHsuXrN1X1fd"
def test_get_from_none(txn): txn["txn"]["metadata"].pop("from", None) assert get_from(txn) is None