async def test_pool_open(pool_name, pool_genesis_txn_path, pool_genesis_txn_file): for pool_cfg in ({'extra-property': True}, {'auto-remove': 'non-boolean'}): try: NodePool(pool_name, pool_genesis_txn_path, pool_cfg) assert False except JSONValidation: pass p = NodePool(pool_name, pool_genesis_txn_path, {'auto-remove': True}) await p.open() assert p.handle is not None await p.close() p = NodePool(pool_name, pool_genesis_txn_path) # auto-remove default: False await p.open() assert p.handle is not None await p.close() p = NodePool( pool_name, pool_genesis_txn_path, {'auto-remove': True}) # check survival on re-opening existing pool await p.open() assert p.handle is not None await p.close()
def __init__(self): config = hyperledger_indy.config() self.pool = NodePool('the-org-book-holder', config['genesis_txn_path']) self.instance = VonHolderProver( self.pool, Wallet(self.pool.name, WALLET_SEED, 'TheOrgBook Holder Wallet'))
async def test_wallet(pool_name, pool_genesis_txn_path, pool_genesis_txn_file): p = NodePool(pool_name, pool_genesis_txn_path) await p.open() assert p.handle is not None seed = '00000000000000000000000000000000' base_name = 'my-wallet' w = Wallet(p.name, seed, base_name, 0) await w.open() num = w.num assert num != None assert w.did assert w.verkey (did, verkey) = (w.did, w.verkey) await w.close() num += 1 x = Wallet(p.name, seed, base_name, num) await x.open() assert did == x.did assert verkey == x.verkey await x.close() await p.close()
async def test_wallet_did_creation(pool_name, pool_genesis_txn_path, pool_genesis_txn_file): p = NodePool(pool_name, pool_genesis_txn_path, {'auto_remove': True}) seed1 = '00000000000000000000000000000000' seed2 = '11111111111111111111111111111111' name = 'my-wallet' w = Wallet(p.name, name) await w.open() await w.create_did(seed=seed1) assert w.did assert w.verkey (did1, verkey1) = (w.did, w.verkey) await w.create_did(seed=seed2) (did2, verkey2) = (w.did, w.verkey) stored_dids = await w.stored_dids() assert len(stored_dids) == 2 assert stored_dids[0]['did'] == did1 assert stored_dids[0]['verkey'] == verkey1 assert stored_dids[1]['did'] == did2 assert stored_dids[1]['verkey'] == verkey2 await w.close()
def __init__(self): genesis_config = genesis.config() self.pool = NodePool( 'permitify-verifier', genesis_config['genesis_txn_path']) wallet_name = config['name'] + '_Verifier_Wallet' # TODO force to virtual for now verifier_type = 'virtual' verifier_config = {'freshness_time': 0} verifier_creds = {'key': ''} logger.debug('Using virtual Cfg: {} Creds: {}'.format( verifier_config, verifier_creds)) logger.debug("Verifier __init__>>> {} {} {}".format( verifier_type, verifier_config, verifier_creds)) verifier_wallet = Wallet( self.pool, WALLET_SEED, wallet_name, verifier_type, verifier_config, verifier_creds) logger.debug("Verifier __init__>>> {} {} {}".format( verifier_type, verifier_config, verifier_creds)) self.instance = VonVerifier( # self.pool, verifier_wallet )
def __init__(self): genesis_config = genesis.config() self.pool = NodePool( 'permitify-issuer', genesis_config['genesis_txn_path']) wallet_name = config['name'] + '_Issuer_Wallet' # TODO force to virtual for now issuer_type = 'virtual' issuer_config = {'freshness_time': 0} issuer_creds = {'key': ''} logger.debug('Using virtual Cfg: {} Creds: {}'.format( issuer_config, issuer_creds)) logger.debug("Issuer __init__>>> create wallet {} {} {}".format( issuer_type, issuer_config, issuer_creds)) issuer_wallet = Wallet( self.pool, WALLET_SEED, wallet_name, issuer_type, issuer_config, issuer_creds) logger.debug("Issuer __init__>>> done {} {} {}".format( issuer_type, issuer_config, issuer_creds)) self.instance = VonIssuer( # self.pool, issuer_wallet )
def __init__(self): WALLET_SEED = os.environ.get('INDY_WALLET_SEED') if not WALLET_SEED or len(WALLET_SEED) is not 32: raise Exception( 'INDY_WALLET_SEED must be set and be 32 characters long.') self.__logger = logging.getLogger(__name__) config = hyperledger_indy.config() self.pool = NodePool('the-org-book-verifier', config['genesis_txn_path']) wallet_name = 'TheOrgBook_Verifier_Wallet' verifier_type = 'virtual' verifier_config = {'freshness_time': 0} verifier_creds = {'key': ''} self.__logger.debug("Verifier __init__>>> {} {} {}".format( verifier_type, verifier_config, verifier_creds)) verifier_wallet = Wallet(self.pool, WALLET_SEED, wallet_name, verifier_type, verifier_config, verifier_creds) self.__logger.debug("Verifier __init__>>> {} {} {}".format( verifier_type, verifier_config, verifier_creds)) self.instance = VonVerifier( # self.pool, verifier_wallet)
async def test_wallet(pool_name, pool_genesis_txn_path, pool_genesis_txn_file): p = NodePool(pool_name, pool_genesis_txn_path, {'auto-remove': True}) await p.open() assert p.handle is not None seed = '00000000000000000000000000000000' name = 'my-wallet' # 1. Exercise configuration: auto-remove must be boolean if present, but extra properties are OK try: Wallet(p.name, seed, name, {'auto-remove': 'non-boolean'}) assert False except JSONValidation: pass Wallet(p.name, seed, name, {'auto-remove': True, 'extra-property': 'ok'}) # 2. Default configuration (auto-remove=False) w = Wallet(p.name, seed, name) await w.open() assert w.did assert w.verkey (did, verkey) = (w.did, w.verkey) await w.close() # 3. Make sure wallet opens from extant file x = Wallet(p.name, seed, name, {'auto-remove': True}) await x.open() assert did == x.did assert verkey == x.verkey await x.close() await p.close()
async def test_wallet_did_loading(pool_name, pool_genesis_txn_path, pool_genesis_txn_file): p = NodePool(pool_name, pool_genesis_txn_path, {'auto_remove': True}) seed1 = '00000000000000000000000000000000' seed2 = '11111111111111111111111111111111' name = 'my-wallet' w = Wallet(p.name, name) await w.open() await w.create_did(seed=seed1) assert w.did assert w.verkey (did1, verkey1) = (w.did, w.verkey) await w.create_did(seed=seed2) assert w.did != did1 assert w.verkey != verkey1 await w.load_did(did1) assert w.did == did1 assert w.verkey == verkey1 await w.close()
def __init__(self): config = hyperledger_indy.config() self.pool = NodePool('the-org-book-holder', config['genesis_txn_path']) self.instance = VonHolderProver(self.pool, 'the_org_book_holder_000000000000', 'TheOrgBook Holder Wallet', None, '127.0.0.1', 9703, 'api/v0')
async def test_pool_open(pool_name, pool_genesis_txn_path, pool_genesis_txn_file): p = NodePool(pool_name, pool_genesis_txn_path, {'auto_remove': True}) await p.open() assert p.handle is not None await p.close() p = NodePool(pool_name, pool_genesis_txn_path) await p.open() assert p.handle is not None await p.close() p = NodePool(pool_name, pool_genesis_txn_path, {'auto_remove': True}) await p.open() assert p.handle is not None await p.close()
def __init__(self): genesis_config = genesis.config() self.pool = NodePool('permitify-holder', genesis_config['genesis_txn_path']) self.instance = VonHolderProver( self.pool, Wallet(self.pool.name, WALLET_SEED, config['name'] + ' Holder Wallet'))
async def main(): pool = NodePool('test', '.indy/pool_transactions_sandbox_genesis') print('\n\n\nOpening pool...\n\n\n') await pool.open() print('\n\n\nFinished opening pool\n\n\n') print('\n\n\nClosing pool...\n\n\n') await pool.close() print('\n\n\nFinished closing pool\n\n\n')
async def test_pool_open(path_home, pool_name, pool_genesis_txn_path, pool_genesis_txn_file): path = Path(path_home, 'pool', pool_name) try: NodePool(pool_name, pool_genesis_txn_path, {'auto-remove': 'non-boolean'}) assert False except JSONValidation: pass try: pool = NodePool(pool_name, pool_genesis_txn_path, { 'auto-remove': True, 'extra-property': True }) await pool.remove() assert not path.exists(), 'Pool path {} still present'.format(path) except JSONValidation: assert False pool = NodePool(pool_name, pool_genesis_txn_path, {'auto-remove': True}) await pool.open() assert pool.handle is not None await pool.close() assert not path.exists(), 'Pool path {} still present'.format(path) pool = NodePool(pool_name, pool_genesis_txn_path) # auto-remove default: False await pool.open() assert pool.handle is not None await pool.close() assert path.exists(), 'Pool path {} not present'.format(path) pool = NodePool( pool_name, pool_genesis_txn_path, {'auto-remove': True}) # check survival re-opening existing pool await pool.open() assert pool.handle is not None await pool.close() assert not path.exists(), 'Pool path {} still present'.format(path)
async def boot(): global pool global trust_anchor pool = NodePool( 'nodepool', '/home/indy/.indy-cli/networks/sandbox/pool_transactions_genesis') await pool.open() trust_anchor = TrustAnchorAgent(pool, '000000000000000000000000Trustee1', 'trustee_wallet', None, '127.0.0.1', 9700, 'api/v0') await trust_anchor.open()
async def convert_seed_to_did(seed): genesis_config = genesis.config() pool = NodePool('util-agent', genesis_config['genesis_txn_path']) agent = _BaseAgent( pool, Wallet(pool.name, seed, seed + '-wallet'), ) await agent.open() agent_did = agent.did await agent.close() return agent_did
async def boot(): global pool global trust_anchor pool = NodePool( 'nodepool', '/home/indy/.indy-cli/networks/sandbox/pool_transactions_genesis') wallet = Wallet(pool, '000000000000000000000000Trustee1', 'trustee_wallet') await pool.open() await wallet.create() trust_anchor = AgentRegistrar(wallet) await trust_anchor.open()
def __init__(self, legal_entity_id: str = None): WALLET_SEED = os.environ.get('INDY_WALLET_SEED') if not WALLET_SEED or len(WALLET_SEED) is not 32: raise Exception( 'INDY_WALLET_SEED must be set and be 32 characters long.') self.__logger = logging.getLogger(__name__) config = hyperledger_indy.config() thread_id = threading.get_ident() self.pool = NodePool('the-org-book-holder-' + str(thread_id), config['genesis_txn_path']) wallet_name = 'TheOrgBook_Holder_Wallet' + '$$' + str(thread_id) holder_type = os.environ.get('INDY_WALLET_TYPE') if holder_type == 'remote': # wallet_name = wallet_name + "$$" + str(thread_id) holder_url = os.environ.get('INDY_WALLET_URL') holder_config = { 'endpoint': holder_url, 'ping': 'schema/', 'auth': 'api-token-auth/', 'keyval': 'keyval/', 'freshness_time': 0 } holder_creds = { 'auth_token': apps.get_remote_wallet_token(), 'virtual_wallet': legal_entity_id } self.__logger.debug('Using remote Cfg: {} Creds: {}'.format( holder_config, holder_creds)) else: # TODO force to virtual for now holder_type = 'virtual' holder_config = {'freshness_time': 0} holder_creds = {'key': '', 'virtual_wallet': legal_entity_id} self.__logger.debug('Using virtual Cfg: {} Creds: {}'.format( holder_config, holder_creds)) self.__logger.debug("Holder __init__>>> {} {} {}".format( holder_type, holder_config, holder_creds)) holder_wallet = Wallet(self.pool, WALLET_SEED, wallet_name, holder_type, holder_config, holder_creds) self.__logger.debug("Holder __init__>>> {} {} {}".format( holder_type, holder_config, holder_creds)) self.instance = VonHolderProver( # self.pool, holder_wallet)
def __init__(self, legal_entity_id: str = None): logger.debug("Issuer __init__>>>") genesis_config = genesis.config() thread_id = threading.get_ident() self.pool = NodePool( # 'dflow-issuer-' + str(thread_id), 'dflow-issuer', genesis_config['genesis_txn_path']) wallet_name = config['name'] + '_Issuer_Wallet' issuer_type = os.environ.get('INDY_WALLET_TYPE') if issuer_type == 'remote': wallet_name = wallet_name + "$$" + str(thread_id) holder_url = os.environ.get('INDY_WALLET_URL') issuer_config = { 'endpoint': holder_url, 'ping': 'schema/', 'auth': 'api-token-auth/', 'keyval': 'keyval/', 'freshness_time': 0 } issuer_creds = { 'auth_token': apps.get_remote_wallet_token(), 'virtual_wallet': legal_entity_id } logger.debug('Using remote Cfg: {} Creds: {}'.format( issuer_config, issuer_creds)) else: # TODO force to virtual for now issuer_type = 'virtual' issuer_config = {'freshness_time': 0} issuer_creds = {'key': '', 'virtual_wallet': legal_entity_id} logger.debug('Using virtual Cfg: {} Creds: {}'.format( issuer_config, issuer_creds)) logger.debug("Issuer __init__>>> create wallet {} {} {}".format( issuer_type, issuer_config, issuer_creds)) issuer_wallet = Wallet(self.pool, WALLET_SEED, wallet_name, issuer_type, issuer_config, issuer_creds) logger.debug("Issuer __init__>>> done {} {} {}".format( issuer_type, issuer_config, issuer_creds)) self.instance = VonIssuer( # self.pool, issuer_wallet) logger.debug("Issuer __init__>>> created VonIssuer")
async def convert_seed_to_did(seed): genesis_config = genesis.config() pool = NodePool('util-agent', genesis_config['genesis_txn_path']) agent_wallet = Wallet(pool, seed, seed + '-' + random_string() + '-wallet') agent = _BaseAgent( # pool, agent_wallet, ) await pool.open() await agent_wallet.create() await agent.open() agent_did = agent.did await agent.close() return agent_did
def __init__(self, legal_entity_id: str = None): logger.debug("Verifier __init__>>>") genesis_config = genesis.config() self.pool = NodePool('dflow-verifier', genesis_config['genesis_txn_path']) wallet_name = config['name'] + '_Verifier_Wallet' verifier_type = os.environ.get('INDY_WALLET_TYPE') if verifier_type == 'remote': holder_url = os.environ.get('INDY_WALLET_URL') verifier_config = { 'endpoint': holder_url, 'ping': 'schema/', 'auth': 'api-token-auth/', 'keyval': 'keyval/', 'freshness_time': 0 } verifier_creds = { 'auth_token': apps.get_remote_wallet_token(), 'virtual_wallet': legal_entity_id } logger.debug('Using remote Cfg: {} Creds: {}'.format( verifier_config, verifier_creds)) else: # TODO force to virtual for now verifier_type = 'virtual' verifier_config = {'freshness_time': 0} verifier_creds = {'key': '', 'virtual_wallet': legal_entity_id} logger.debug('Using virtual Cfg: {} Creds: {}'.format( verifier_config, verifier_creds)) logger.debug("Verifier __init__>>> {} {} {}".format( verifier_type, verifier_config, verifier_creds)) verifier_wallet = Wallet(self.pool, WALLET_SEED, wallet_name, verifier_type, verifier_config, verifier_creds) logger.debug("Verifier __init__>>> {} {} {}".format( verifier_type, verifier_config, verifier_creds)) self.instance = VonVerifier( # self.pool, verifier_wallet)
async def start(self): global pool global orgbook print('connecting to node pool with genesis txn file:') print('/opt/app-root/genesis') pool = NodePool( # Hack to use different pool names. Agent lib doesn't support # reopening existing pool config... 'theorgbook', '/opt/app-root/genesis') await pool.open() orgbook = OrgBookAgent(pool, 'The-Org-Book-Agent-0000000000000', 'the-org-book-agent-wallet', None, '127.0.0.1', 9702, 'api/v0') await orgbook.open() await orgbook.create_master_secret('secret')
async def test_wallet(pool_name, pool_genesis_txn_path, pool_genesis_txn_file): p = NodePool(pool_name, pool_genesis_txn_path, {'auto_remove': True}) seed = '00000000000000000000000000000000' name = 'my-wallet' w = Wallet(p.name, name) await w.open() await w.create_did(seed=seed) assert w.did assert w.verkey (did, verkey) = (w.did, w.verkey) await w.close() x = Wallet(p.name, name) await x.open() await x.create_did(seed=seed) assert did == x.did assert verkey == x.verkey await x.close()
def __init__(self, wallet_config: WalletConfig, instance_cls, issuer_type: str, ext_cfg=None): if not wallet_config.genesis_path: raise ValueError("Missing genesis_path for wallet configuration") self._pool = NodePool( wallet_config.name + "-" + issuer_type, wallet_config.genesis_path ) self._instance_cls = instance_cls self._instance = None self._wallet = Wallet( self._pool, wallet_config.seed, wallet_config.name + "-" + issuer_type + "-Wallet", wallet_config.type, wallet_config.params, wallet_config.creds, ) self._ext_cfg = ext_cfg self._opened = None self._keep_open = False
def go(): logger = logging.getLogger(__name__) cfg = do(mem_cache.get('config')) role = (cfg['Agent']['role'] or '').lower().replace( ' ', '') # will be a dir as a pool name: spaces are evil profile = environ.get('AGENT_PROFILE').lower().replace( ' ', '') # several profiles may share a role logger.debug('Starting agent; profile={}, role={}'.format( profile, role)) pool = NodePool('pool.{}'.format(profile), cfg['Pool']['genesis.txn.path']) do(pool.open()) assert pool.handle do(mem_cache.set('pool', pool)) ag = None if role == 'trust-anchor': bootstrap_json = cfg['Agent'] ag = TrustAnchorAgent( do(Wallet(pool, cfg['Agent']['seed'], profile).create()), BootSequence.agent_config_for(cfg)) do(ag.open()) assert ag.did tag_did = ag.did # register trust anchor if need be if not json.loads(do(ag.get_nym(ag.did))): do(ag.send_nym(ag.did, ag.verkey, ag.wallet.profile)) if not json.loads(do(ag.get_endpoint(ag.did))): do(ag.send_endpoint()) # originate schemata if need be do(BootSequence.originate(ag, cfg)) elif role in ('sri', 'org-book', 'bc-registrar'): # create agent by role if role == 'sri': ag = SRIAgent( do(Wallet(pool, cfg['Agent']['seed'], profile).create()), BootSequence.agent_config_for(cfg)) elif role == 'org-book': ag = OrgBookAgent( do(Wallet(pool, cfg['Agent']['seed'], profile).create()), BootSequence.agent_config_for(cfg)) elif role == 'bc-registrar': ag = BCRegistrarAgent( do(Wallet(pool, cfg['Agent']['seed'], profile).create()), BootSequence.agent_config_for(cfg)) do(ag.open()) logger.debug('profile {}; ag class {}'.format( profile, ag.__class__.__name__)) trust_anchor_base_url = 'http://{}:{}/api/v0'.format( cfg['Trust Anchor']['host'], cfg['Trust Anchor']['port']) # get nym: if not registered; get trust-anchor host & port, post an agent-nym-send form if not json.loads(do(ag.get_nym(ag.did))): try: r = requests.get('{}/did'.format(trust_anchor_base_url)) if not r.ok: logger.error( 'Agent {} nym is not on the ledger, but trust anchor is not responding' .format(profile)) r.raise_for_status() tag_did = r.json() logger.debug('{}; tag_did {}'.format(profile, tag_did)) assert tag_did with open( pjoin(BootSequence.dir_proto, 'agent-nym-send.json'), 'r') as proto: j = proto.read() logger.debug('{}; sending {}'.format( profile, j % (ag.did, ag.verkey))) r = requests.post( '{}/agent-nym-send'.format(trust_anchor_base_url), json=json.loads(j % (ag.did, ag.verkey))) r.raise_for_status() except Exception: raise ServerError( 'Agent {} requires Trust Anchor agent, but it is not responding' .format(profile)) # get endpoint: if not present, send it if not json.loads(do(ag.get_endpoint(ag.did))): do(ag.send_endpoint()) if role in ('bc-registrar', 'sri'): # originate schemata if need be do(BootSequence.originate(ag, cfg)) if role in ('org-book'): # set master secret from os import getpid # append pid to avoid re-using a master secret on restart of HolderProver agent; indy-sdk library # is shared, so it remembers and forbids it unless we shut down all processes do( ag.create_master_secret(cfg['Agent']['master.secret'] + '.' + str(getpid()))) else: raise ServerError( 'Agent profile {} configured for unsupported role {}'.format( profile, role)) assert ag is not None do(mem_cache.set('agent', ag))
def __init__(self): self.pool = NodePool('permitify-holder', '/app/.genesis') self.instance = VonHolderProver(self.pool, WALLET_SEED, config['name'] + ' Holder Wallet', None, '127.0.0.1', 9703, 'api/v0')
def ready(self): logger = logging.getLogger(__name__) cfg = init_config() base_api_url_path = PATH_PREFIX_SLASH.strip('/') role = (cfg['Agent']['role'] or '').lower().replace( ' ', '') # will be a dir as a pool name: spaces are evil p = None # the node pool p = NodePool('pool.{}'.format(role), cfg['Pool']['genesis.txn.path']) do(p.open()) assert p.handle cache.set('pool', p) ag = None if role == 'trust-anchor': bootstrap_json = cfg['Agent'] ag = TrustAnchorAgent(p, cfg['Agent']['seed'], 'wallet-{}'.format(role), None, cfg['Agent']['host'], int(cfg['Agent']['port']), base_api_url_path) do(ag.open()) assert ag.did tag_did = ag.did # register trust anchor if need be if not json.loads(do(ag.get_nym(ag.did))): do(ag.send_nym(ag.did, ag.verkey)) if not json.loads(do(ag.get_endpoint(ag.did))): do(ag.send_endpoint()) # send schema if need be, seeding schema cache en passant with open( pjoin(dirname(abspath(__file__)), 'protocol', 'schema-lookup.json'), 'r') as proto: j = proto.read() if not json.loads( do( ag.process_post( json.loads(j % (ag.did, cfg['Schema']['name'], cfg['Schema']['version']))))): with open( pjoin(dirname(abspath(__file__)), 'protocol', 'schema-send.json'), 'r') as proto: j = proto.read() schema = do( ag.process_post( json.loads(j % (ag.did, cfg['Schema']['name'], cfg['Schema']['version'])))) assert schema elif role in ('sri', 'the-org-book', 'bc-registrar'): logging.debug("check {} 1".format(role)) # create agent via factory by role if role == 'sri': ag = SRIAgent(p, cfg['Agent']['seed'], 'wallet-{}'.format(role), None, cfg['Agent']['host'], int(cfg['Agent']['port']), base_api_url_path) elif role == 'the-org-book': ag = OrgBookAgent(p, cfg['Agent']['seed'], 'wallet-{}'.format(role), None, cfg['Agent']['host'], int(cfg['Agent']['port']), PATH_PREFIX_SLASH.strip('/')) elif role == 'bc-registrar': ag = BCRegistrarAgent(p, cfg['Agent']['seed'], 'wallet-{}'.format(role), None, cfg['Agent']['host'], int(cfg['Agent']['port']), base_api_url_path) do(ag.open()) logging.debug("role {}; ag class {}".format( role, ag.__class__.__name__)) trust_anchor_host = cfg['Trust Anchor']['host'] trust_anchor_port = cfg['Trust Anchor']['port'] # trust anchor DID is necessary r = requests.get('http://{}:{}/{}/did'.format( trust_anchor_host, trust_anchor_port, base_api_url_path)) r.raise_for_status() tag_did = r.json() assert tag_did logging.debug("{}; tag_did {}".format(role, tag_did)) # get nym: if not registered; get trust-anchor host & port, post an agent-nym-send form if not json.loads(do(ag.get_nym(ag.did))): with open( pjoin(dirname(abspath(__file__)), 'protocol', 'agent-nym-send.json'), 'r') as proto: j = proto.read() logging.debug("{}; sending {}".format(role, j % (ag.did, ag.verkey))) r = requests.post('http://{}:{}/{}/agent-nym-send'.format( trust_anchor_host, trust_anchor_port, base_api_url_path), json=json.loads(j % (ag.did, ag.verkey))) r.raise_for_status() # get endpoint: if not present, send it if not json.loads(do(ag.get_endpoint(ag.did))): do(ag.send_endpoint()) # Post a schema_lookup, seeding schema cache and obviating need to specify schema in POST messages with open( pjoin(dirname(abspath(__file__)), 'protocol', 'schema-lookup.json'), 'r') as proto: j = proto.read() schema_json = do( ag.process_post( json.loads(j % (tag_did, cfg['Schema']['name'], cfg['Schema']['version'])))) assert json.loads(schema_json) if role in ('bc-registrar', 'sri'): # issuer send claim def do(ag.send_claim_def(schema_json)) logging.debug("\n== check {} 8".format(role)) if role in ('the-org-book', 'sri'): # set master secret from os import getpid # append pid to avoid re-using a master secret on restart of HolderProver agent; indy-sdk library # is shared, so it remembers and forbids it unless we shut down all processes do( ag.create_master_secret(cfg['Agent']['master.secret'] + '.' + str(getpid()))) else: raise ValueError('Unsupported agent role [{}]'.format(role)) assert ag is not None assert ag._schema_cache cache.set('agent', ag) atexit.register(_cleanup)
async def test_agents_direct(pool_name, pool_genesis_txn_path, seed_trustee1, pool_genesis_txn_file, path_home): # 1. Open pool, init agents p = NodePool(pool_name, pool_genesis_txn_path) await p.open() assert p.handle tag = TrustAnchorAgent(p, seed_trustee1, 'trustee_wallet', None, '127.0.0.1', 8000, 'api/v0') sag = SRIAgent(p, 'SRI-Agent-0000000000000000000000', 'sri-agent-wallet', None, '127.0.0.1', 8001, 'api/v0') pspcobag = OrgBookAgent(p, 'PSPC-Org-Book-Agent-000000000000', 'pspc-org-book-agent-wallet', None, '127.0.0.1', 8002, 'api/v0') bcobag = OrgBookAgent(p, 'BC-Org-Book-Agent-00000000000000', 'bc-org-book-agent-wallet', None, '127.0.0.1', 8003, 'api/v0') bcrag = BCRegistrarAgent(p, 'BC-Registrar-Agent-0000000000000', 'bc-registrar-agent-wallet', None, '127.0.0.1', 8004, 'api/v0') await tag.open() await sag.open() await pspcobag.open() await bcobag.open() await bcrag.open() # 2. Publish agent particulars to ledger if not yet present for ag in (tag, sag, pspcobag, bcobag, bcrag): if not json.loads(await tag.get_nym(ag.did)): await tag.send_nym(ag.did, ag.verkey) if not json.loads(await tag.get_endpoint(ag.did)): await ag.send_endpoint() nyms = { 'tag': await tag.get_nym(tag.did), 'sag': await tag.get_nym(sag.did), 'pspcobag': await tag.get_nym(pspcobag.did), 'bcobag': await tag.get_nym(bcobag.did), 'bcrag': await tag.get_nym(bcrag.did) } endpoints = { 'tag': await tag.get_endpoint(tag.did), 'sag': await tag.get_endpoint(sag.did), 'pspcobag': await tag.get_endpoint(pspcobag.did), 'bcobag': await tag.get_endpoint(bcobag.did), 'bcrag': await tag.get_endpoint(bcrag.did) } print('\n\n== 1 == nyms {}\nendpoints {}\n'.format(nyms, endpoints)) for k in nyms: assert 'dest' in nyms[k] for k in endpoints: assert 'host' in endpoints[k] assert 'port' in endpoints[k] # 3. Publish schema to ledger if not yet present; get from ledger schema_data = { 'name': 'supplier-registration', 'version': '1.1', 'attr_names': [ 'id', 'busId', 'orgTypeId', 'jurisdictionId', 'LegalName', 'effectiveDate', 'endDate', 'sriRegDate' ] } try: schema_json = await tag.get_schema(tag.did, 'Xxxx', 'X.x') # Bad version number except IndyError as e: assert ErrorCode.LedgerInvalidTransaction == e.error_code schema_json = await tag.get_schema(tag.did, schema_data['name'], schema_data['version']) # may exist if not json.loads(schema_json): schema_json = await tag.send_schema(json.dumps(schema_data)) schema_json = await tag.get_schema(tag.did, schema_data['name'], schema_data['version'] ) # should exist now schema = json.loads(schema_json) assert schema print('\n\n== 2 == SCHEMA {}'.format(ppjson(schema))) # 4. BC Registrar and SRI agents (as Issuers) create, store, and publish claim definitions to ledger # print('TAG DID {}'.format(tag.did)) # V4SG... # print('SAG DID {}'.format(sag.did)) # FaBA... # print('PSPCOBAG DID {}'.format(pspcobag.did)) # 45Ue... # print('BCOBAG DID {}'.format(bcobag.did)) # Rzra... # print('BCRAG DID {}'.format(bcrag.did)) # Q4zq... non_claim_def_json = await bcobag.get_claim_def(999999, bcrag.did ) # ought not exist assert not json.loads(non_claim_def_json) claim_def_json = await bcrag.send_claim_def(schema_json) claim_def_json = await bcobag.get_claim_def(schema['seqNo'], bcrag.did ) # ought to exist now await sag.get_schema(tag.did, schema_data['name'], schema_data['version']) # seed schema cache sri_claim_def_json = await sag.send_claim_def(schema_json) sri_claim_def_json = await pspcobag.get_claim_def(schema['seqNo'], sag.did) assert json.loads(claim_def_json)['ref'] == schema['seqNo'] assert json.loads(sri_claim_def_json)['ref'] == schema['seqNo'] print('\n\n== 3 == claim def {}'.format(ppjson( json.loads(claim_def_json)))) # 5. Setup master secrets, claim reqs at HolderProver agents await bcobag.create_master_secret('MasterSecret') await pspcobag.create_master_secret('SecretMaster') for ag in (bcobag, pspcobag): wallet_num = ag.wallet.num assert (await ag.reset_wallet() ) > wallet_num # makes sure later ops are OK on reset wallet await bcobag.store_claim_offer(bcrag.did, schema['seqNo']) await pspcobag.store_claim_offer(sag.did, schema['seqNo']) claim_req_json = await bcobag.store_claim_req(bcrag.did, claim_def_json) sri_claim_req_json = await pspcobag.store_claim_req( sag.did, sri_claim_def_json) print('\n\n== 4 == BC reg claim req {}\n\nSRI claim req {}'.format( claim_req_json, sri_claim_req_json)) # 6. BC Reg agent (as Issuer) issues claims and stores at HolderProver: get claim req, create claim, store claim claims = [{ 'id': claim_value_pair('1'), 'busId': claim_value_pair('11121398'), 'orgTypeId': claim_value_pair('2'), 'jurisdictionId': claim_value_pair('1'), 'LegalName': claim_value_pair('The Original House of Pies'), 'effectiveDate': claim_value_pair('2010-10-10'), 'endDate': claim_value_pair(None), 'sriRegDate': claim_value_pair(None) }, { 'id': claim_value_pair('2'), 'busId': claim_value_pair('11133333'), 'orgTypeId': claim_value_pair('1'), 'jurisdictionId': claim_value_pair('1'), 'LegalName': claim_value_pair('Planet Cake'), 'effectiveDate': claim_value_pair('2011-10-01'), 'endDate': claim_value_pair(None), 'sriRegDate': claim_value_pair(None) }, { 'id': claim_value_pair('3'), 'busId': claim_value_pair('11144444'), 'orgTypeId': claim_value_pair('2'), 'jurisdictionId': claim_value_pair('1'), 'LegalName': claim_value_pair('Tart City'), 'effectiveDate': claim_value_pair('2012-12-01'), 'endDate': claim_value_pair(None), 'sriRegDate': claim_value_pair(None) }] for c in claims: (_, claim_json) = await bcrag.create_claim(claim_req_json, c) assert json.loads(claim_json) await bcobag.store_claim(claim_json) # 7. BC Org Book agent (as HolderProver) finds claims by_attr = { 'nonce': '1234', 'name': 'proof_req_0', 'version': '0', 'requested_attrs': { '{}_uuid'.format(attr): { 'schema_seq_no': schema['seqNo'], 'name': attr } for attr in claims[0] }, 'requested_predicates': {} } (claim_uuids_all, claims_found_json) = await bcobag.get_claims(json.dumps(by_attr)) print('\n\n== 5 == claims by attr, no filter {}; {}'.format( claim_uuids_all, ppjson(claims_found_json))) claims_found = json.loads(claims_found_json) display_pruned_postfilt = claims_for( claims_found, {'LegalName': claims[2]['LegalName'][0]}) print( '\n\n== 6 == display claims filtered post-hoc matching {}: {}'.format( claims[2]['LegalName'][0], ppjson(display_pruned_postfilt))) display_pruned = prune_claims_json({k for k in display_pruned_postfilt}, claims_found) print('\n\n== 7 == stripped down {}'.format(ppjson(display_pruned))) filter_enc = { k: claims[2][k][0] for k in claims[2] if k in ('sriRegDate', 'busId') } (claim_uuids_filt, claims_found_json) = await bcobag.get_claims(json.dumps(by_attr), filter_enc) print('\n\n== 8 == claims by attr, filtered a priori {}; {}'.format( claim_uuids_filt, ppjson(claims_found_json))) assert set([*display_pruned_postfilt]) == claim_uuids_filt assert len(display_pruned_postfilt) == 1 claim_uuid = claim_uuids_filt.pop() # 8. BC Org Book (as HolderProver) creates proof for claim specified by filter claims_found = json.loads(claims_found_json) requested_claims = { 'self_attested_attributes': {}, 'requested_attrs': { attr: [claim_uuid, True] for attr in by_attr['requested_attrs'] if attr in claims_found['attrs'] }, 'requested_predicates': {pred: claim_uuid for pred in by_attr['requested_predicates']} } proof_json = await bcobag.create_proof(json.dumps(by_attr), schema, json.loads(claim_def_json), requested_claims) print('\n\n== 9 == proof (by filter) {}'.format(ppjson(proof_json))) # 9. SRI agent (as Verifier) verifies proof (by filter) rc_json = await sag.verify_proof(json.dumps(by_attr), json.loads(proof_json), schema, json.loads(claim_def_json)) print('\n\n== 10 == the proof (by filter) verifies as {}'.format( ppjson(rc_json))) assert json.loads(rc_json) # 10. BC Org Book (as HolderProver) finds claim by claim-uuid, no claim by non-claim-uuid claim_found_by_uuid = json.loads(await bcobag.get_claim_by_claim_uuid( schema_json, claim_uuid)) print('\n\n== 11 == claim by claim-uuid={}: {}'.format( claim_uuid, ppjson(claim_found_by_uuid))) assert claim_found_by_uuid assert claim_found_by_uuid['attrs'] non_claim_by_uuid = json.loads(await bcobag.get_claim_by_claim_uuid( schema_json, 'claim::ffffffff-ffff-ffff-ffff-ffffffffffff')) assert non_claim_by_uuid print('\n\n== 12 == non-claim: {}'.format(ppjson(non_claim_by_uuid))) assert all(not non_claim_by_uuid['attrs'][attr] for attr in non_claim_by_uuid['attrs']) # 11. BC Org Book (as HolderProver) creates proof for claim specified by claim-uuid requested_claims = { 'self_attested_attributes': {}, 'requested_attrs': {attr: [claim_uuid, True] for attr in claim_found_by_uuid['attrs']}, 'requested_predicates': {} } proof_json = await bcobag.create_proof(json.dumps(by_attr), schema, json.loads(claim_def_json), requested_claims) proof = json.loads(proof_json) print('\n\n== 13 == proof by claim-uuid={} {}'.format( claim_uuid, ppjson(proof_json))) # 12. SRI agent (as Verifier) verifies proof rc_json = await sag.verify_proof(json.dumps(by_attr), proof, schema, json.loads(claim_def_json)) print('\n\n== 14 == the proof by claim-uuid={} verifies as {}'.format( claim_uuid, ppjson(rc_json))) assert json.loads(rc_json) # 13. Create and store SRI registration completion claim from verified proof sri_claim = revealed_attrs(proof) yyyy_mm_dd = datetime.date.today().strftime('%Y-%m-%d') sri_claim['sriRegDate'] = claim_value_pair(yyyy_mm_dd) print('\n\n== 15 == sri_claim: {}'.format(ppjson(sri_claim))) (_, sri_claim_json) = await sag.create_claim(sri_claim_req_json, c) assert json.loads(sri_claim_json) await pspcobag.store_claim(sri_claim_json) # 14. PSPC Org Book Agent (as HolderProver) finds claim (sri_claim_uuids_all, sri_claims_found_json) = await pspcobag.get_claims(json.dumps(by_attr)) print('\n\n== 16 == SRI claims by attr, no filter {}; {}'.format( sri_claim_uuids_all, ppjson(sri_claims_found_json))) assert len(sri_claim_uuids_all) == 1 sri_claim_uuid = sri_claim_uuids_all.pop() sri_claims_found = json.loads(sri_claims_found_json) # 15. PSPC Org Book Agent (as HolderProver) creates proof sri_requested_claims = { 'self_attested_attributes': {}, 'requested_attrs': {attr: [sri_claim_uuid, True] for attr in sri_claims_found['attrs']}, 'requested_predicates': {} } sri_proof_json = await pspcobag.create_proof( json.dumps(by_attr), schema, json.loads(sri_claim_def_json), sri_requested_claims) print('\n\n== 17 == PSPC Org Book proof on claim-uuid={} {}'.format( sri_claim_uuid, ppjson(sri_proof_json))) # 16. SRI (as Verifier) verify proof rc_json = await sag.verify_proof(json.dumps(by_attr), json.loads(sri_proof_json), schema, json.loads(sri_claim_def_json)) print('\n\n== 18 == the SRI proof by claim-uuid={} verifies as {}'.format( sri_claim_uuid, ppjson(rc_json))) assert json.loads(rc_json) await bcrag.close() await bcobag.close() await pspcobag.close() await sag.close() await tag.close() await p.close()
async def test_wallet(pool_name, pool_genesis_txn_path, pool_genesis_txn_file, path_home): pool = NodePool(pool_name, pool_genesis_txn_path, {'auto-remove': True}) await pool.open() assert pool.handle is not None seed = '00000000000000000000000000000000' name = 'my-wallet' path = Path(path_home, 'wallet', name) path_seed2did = path.with_name('{}.seed2did'.format(path.name)) # 1. Configuration with auto-remove set w = Wallet(pool, seed, name, None, {'auto-remove': True}) await w.create() assert path.exists(), 'Wallet path {} not present'.format(path) await w.open() assert w.did assert w.verkey await w.close() assert not path.exists(), 'Wallet path {} still present'.format(path) assert not path_seed2did.exists(), 'Wallet path {} still present'.format( path_seed2did) print('\n\n== 1 == New wallet with auto-remove OK') # 2. Default configuration (auto-remove=False) w = Wallet(pool, seed, name) await w.create() assert path.exists(), 'Wallet path {} not present'.format(path) assert not path_seed2did.exists(), 'Wallet path {} still present'.format( path_seed2did) await w.open() assert w.did assert w.verkey (w_did, w_verkey) = (w.did, w.verkey) await w.close() assert path.exists(), 'Wallet path {} not present'.format(path) assert not path_seed2did.exists(), 'Wallet path {} still present'.format( path_seed2did) print('\n\n== 2 == New wallet with default config (no auto-remove) OK') # 3. Make sure wallet opens from extant file x = Wallet(pool, seed, name, None, {'auto-remove': True}) await x.create() async with x: assert x.did == w_did assert x.verkey == w_verkey assert not path.exists(), 'Wallet path {} still present'.format(path) assert not path_seed2did.exists(), 'Wallet path {} still present'.format( path_seed2did) print('\n\n== 3 == Re-use extant wallet OK') # 4. Double-open try: async with await Wallet(pool, seed, name, None, { 'auto-remove': True }).create() as w: async with w: assert False except IndyError as e: assert e.error_code == ErrorCode.WalletAlreadyOpenedError assert not path.exists(), 'Wallet path {} still present'.format(path) assert not path_seed2did.exists(), 'Wallet path {} still present'.format( path_seed2did) await pool.close() # 5. Pool closed try: x = await Wallet(pool, seed, name, None, { 'auto-remove': True }).create() await x.open() assert False except ClosedPool: pass assert not path.exists(), 'Wallet path {} still present'.format(path) assert not path_seed2did.exists(), 'Wallet path {} still present'.format( path_seed2did) print('\n\n== 4 == Error cases error as expected')
async def test_agents_process_forms_local(pool_name, pool_genesis_txn_path, seed_trustee1, pool_genesis_txn_file, path_home): # 1. Open pool, init agents async with NodePool( pool_name, pool_genesis_txn_path) as p, (TrustAnchorAgent( p, seed_trustee1, 'trustee_wallet', None, '127.0.0.1', '8000', 'api/v0')) as tag, (SRIAgent( p, 'SRI-Agent-0000000000000000000000', 'sri-agent-wallet', None, '127.0.0.1', 8001, 'api/v0')) as sag, (OrgBookAgent( p, 'PSPC-Org-Book-Agent-000000000000', 'pspc-org-book-agent-wallet', None, '127.0.0.1', 8003, 'api/v0')) as pspcobag, (OrgBookAgent( p, 'BC-Org-Book-Agent-00000000000000', 'bc-org-book-agent-wallet', None, '127.0.0.1', 8003, 'api/v0')) as bcobag, (BCRegistrarAgent( p, 'BC-Registrar-Agent-0000000000000', 'bc-reg-agent-wallet', None, '127.0.0.1', 8004, 'api/v0')) as bcrag: assert p.handle is not None # 2. Publish agent particulars to ledger if not yet present for ag in (tag, sag, pspcobag, bcobag, bcrag): nym_lookup_form = { 'type': 'agent-nym-lookup', 'data': { 'agent-nym': { 'did': ag.did } } } nym = json.loads(await ag.process_post(nym_lookup_form)) if not nym: resp_json = await tag.process_post({ 'type': 'agent-nym-send', 'data': { 'agent-nym': { 'did': ag.did, 'verkey': ag.verkey } } }) nym = json.loads(await ag.process_post(nym_lookup_form)) assert nym endpoint_lookup_form = { 'type': 'agent-endpoint-lookup', 'data': { 'agent-endpoint': { 'did': ag.did } } } endpoint = json.loads(await tag.process_post(endpoint_lookup_form)) if not endpoint: resp_json = await ag.process_post({ 'type': 'agent-endpoint-send', 'data': {} }) endpoint = json.loads(await ag.process_post(endpoint_lookup_form)) assert endpoint try: # Make sure only a trust anchor can register an agent await sag.process_post({ 'type': 'agent-nym-send', 'data': { 'agent-nym': { 'did': sag.did, 'verkey': sag.verkey } } }) assert False except NotImplementedError: pass # 3. Publish schema to ledger if not yet present; get from ledger schema_data = { 'name': 'supplier-registration', 'version': '1.1', 'attr_names': [ 'id', 'busId', 'orgTypeId', 'jurisdictionId', 'LegalName', 'effectiveDate', 'endDate', 'sriRegDate' ] } schema_lookup_form = { 'type': 'schema-lookup', 'data': { 'schema': { 'issuer-did': tag.did, 'name': schema_data['name'], 'version': 'xxxx' }, } } try: schema_json = await tag.process_post(schema_lookup_form ) # Bad version number assert False except IndyError: pass schema_lookup_form['data']['schema']['version'] = '999.999' assert not json.loads( await tag.process_post(schema_lookup_form)) # ought not exist schema_lookup_form['data']['schema']['version'] = '1.1' schema_json = await tag.process_post(schema_lookup_form) # may exist if not json.loads(schema_json): schema_send = json.loads(await tag.process_post({ 'type': 'schema-send', 'data': { 'schema': { 'issuer-did': tag.did, 'name': schema_data['name'], 'version': schema_data['version'] }, 'attr-names': schema_data['attr_names'] } })) assert schema_send schema_json = await tag.process_post(schema_lookup_form) schema = json.loads(schema_json) # should exist now assert schema print('\n\n== 2 == SCHEMA {}'.format(ppjson(schema))) try: # Make sure only an origin can send a schema await sag.process_post({ 'type': 'schema-send', 'data': { 'schema': { 'issuer-did': tag.did, 'name': schema_data['name'], 'version': schema_data['version'] }, 'attr-names': schema_data['attr_names'] } }) assert False except NotImplementedError: pass # 4. BC Registrar and SRI agents (as Issuers) create, store, and publish claim def to ledger # print('TAG DID {}'.format(tag.did)) # V4SG... # print('SAG DID {}'.format(sag.did)) # FaBA... # print('PSPCOBAG DID {}'.format(pspcobag.did)) # 45Ue... # print('BCOBAG DID {}'.format(bcobag.did)) # Rzra... # print('BCRAG DID {}'.format(bcrag.did)) # Q4zq... claim_def_send_form = {'type': 'claim-def-send', 'data': {}} try: # schema unspecified, ought to fail await bcrag.process_post(claim_def_send_form) except ValueError: pass await bcrag.process_post(schema_lookup_form ) # seed Issuers' schema caches await sag.process_post(schema_lookup_form) await bcrag.process_post(claim_def_send_form) await sag.process_post(claim_def_send_form) claim_def_json = await bcobag.get_claim_def( schema['seqNo'], bcrag.did) # ought to exist now (short-circuit) assert json.loads(claim_def_json)['ref'] == schema['seqNo'] # 5. Setup master secrets, claim reqs at HolderProver agents master_secret_set_form = { 'type': 'master-secret-set', 'data': { 'label': 'maestro' } } claim_hello_form = { 'type': 'claim-hello', 'data': { 'issuer-did': bcrag.did } } try: # master secret unspecified, ought to fail await bcobag.process_post(claim_hello_form) except ValueError: pass await bcobag.process_post(master_secret_set_form) try: # schema unspecified, ought to fail claim_req_json = await bcobag.process_post(claim_hello_form) except ValueError: pass claims_reset_form = {'type': 'claims-reset', 'data': {}} claims_reset_resp = json.loads(await bcobag.process_post(claims_reset_form)) assert not claims_reset_resp # make sure later ops are OK on reset wallet -- response is {} if OK await bcobag.process_post(schema_lookup_form ) # seed HolderProver's schema cache claim_req_json = await bcobag.process_post(claim_hello_form) claim_req = json.loads(claim_req_json) assert claim_req # 6. BC Reg agent (as Issuer) issues claims and stores at HolderProver: get claim req, create claim, store claim claims = [{ 'id': 1, 'busId': 11121398, 'orgTypeId': 2, 'jurisdictionId': 1, 'LegalName': 'The Original House of Pies', 'effectiveDate': '2010-10-10', 'endDate': None, 'sriRegDate': None }, { 'id': 2, 'busId': 11133333, 'orgTypeId': 1, 'jurisdictionId': 1, 'LegalName': 'Planet Cake', 'effectiveDate': '2011-10-01', 'endDate': None, 'sriRegDate': None }, { 'id': 3, 'busId': 11144444, 'orgTypeId': 2, 'jurisdictionId': 1, 'LegalName': 'Tart City', 'effectiveDate': '2012-12-01', 'endDate': None, 'sriRegDate': None }] for c in claims: claim_json = await bcrag.process_post({ 'type': 'claim-create', 'data': { 'claim-req': claim_req, 'claim-attrs': c } }) await bcobag.process_post({ 'type': 'claim-store', 'data': { 'claim': json.loads(claim_json) } }) # 7. BC Org Book agent (as HolderProver) finds claims by_attr = { 'nonce': '1234', 'name': 'proof_req_0', 'version': '0', 'requested_attrs': { '{}_uuid'.format(attr): { 'schema_seq_no': schema['seqNo'], 'name': attr } for attr in claims[0] }, 'requested_predicates': {}, } claims_all = json.loads(await bcobag.process_post({ 'type': 'claim-request', 'data': { 'claim-filter': { 'attr-match': {}, 'predicate-match': [] } } })) print('\n\n== 3 == claims by attr, no filter, process-post {}'.format( ppjson(claims_all))) display_pruned_postfilt = claims_for( claims_all['claims'], {'LegalName': claims[2]['LegalName']}) print('\n\n== 4 == display claims filtered post-hoc matching {}: {}'. format(claims[2]['LegalName'], ppjson(display_pruned_postfilt))) display_pruned = prune_claims_json( {k for k in display_pruned_postfilt}, claims_all['claims']) print('\n\n== 5 == stripped down {}'.format(ppjson(display_pruned))) claims_prefilt_json = await bcobag.process_post({ 'type': 'claim-request', 'data': { 'claim-filter': { 'attr-match': { k: claims[2][k] for k in claims[2] if k in ('sriRegDate', 'busId') }, 'predicate-match': [] } } }) claims_prefilt = json.loads(claims_prefilt_json) print( '\n\n== 6 == claims by attr, with filter a priori, process-post {}' .format(ppjson(claims_prefilt))) display_pruned_prefilt = claims_for(claims_prefilt['claims']) print('\n\n== 7 == display claims filtered a priori matching {}: {}'. format(claims[2]['LegalName'], ppjson(display_pruned_prefilt))) assert set([*display_pruned_postfilt ]) == set([*display_pruned_prefilt]) assert len(display_pruned_postfilt) == 1 # 8. BC Org Book (as HolderProver) creates proof (by filter) proof_resp = json.loads(await bcobag.process_post({ 'type': 'proof-request', 'data': { 'claim-filter': { 'attr-match': { k: claims[2][k] for k in claims[2] if k in ('sriRegDate', 'busId') }, 'predicate-match': [] } } })) print('\n\n== 8 == proof response (by filter) {}'.format( ppjson(proof_resp))) # 9. SRI agent (as Verifier) verifies proof (by filter) rc_json = await sag.process_post({ 'type': 'verification-request', 'data': proof_resp }) print('\n\n== 9 == the proof (by filter) verifies as {}'.format( ppjson(rc_json))) assert json.loads(rc_json) # 10. BC Org Book (as HolderProver) creates proof (by claim-uuid) claim_uuid = set([*display_pruned_prefilt]).pop() proof_resp = json.loads(await bcobag.process_post({ 'type': 'proof-request-by-claim-uuid', 'data': { 'claim-uuid': claim_uuid } })) print('\n\n== 10 == proof response by claim-uuid={}: {}'.format( claim_uuid, ppjson(proof_resp))) # 11. BC Org Book (HolderProver) creates non-proof (by non-claim-uuid) non_claim_uuid = 'claim::ffffffff-ffff-ffff-ffff-ffffffffffff' try: proof_resp = json.loads(await bcobag.process_post({ 'type': 'proof-request-by-claim-uuid', 'data': { 'claim-uuid': non_claim_uuid } })) except ValueError: pass # 12. SRI agent (as Verifier) verifies proof (by claim-uuid) rc_json = await sag.process_post({ 'type': 'verification-request', 'data': proof_resp }) print('\n\n== 12 == the proof by claim_uuid={} verifies as {}'.format( claim_uuid, ppjson(rc_json))) assert json.loads(rc_json) # 13. Finish bootstrapping PSPC Org Book as HolderProver master_secret_set_form['data']['label'] = 'shhhh' await pspcobag.process_post(master_secret_set_form) await pspcobag.process_post(schema_lookup_form) # seed schema cache sri_claims_reset_resp = json.loads( await pspcobag.process_post(claims_reset_form)) assert not sri_claims_reset_resp # make sure later ops are OK on reset wallet -- response is {} if OK sri_claim_def_json = await bcobag.get_claim_def( schema['seqNo'], sag.did) assert json.loads(sri_claim_def_json)['ref'] == schema['seqNo'] await sag.process_post(schema_lookup_form) # seed schema cache # 14. Create and store SRI registration completion claim from verified proof claim_hello_form['data']['issuer-did'] = sag.did sri_claim_req_json = await pspcobag.process_post(claim_hello_form) sri_claim_req = json.loads(sri_claim_req_json) assert sri_claim_req sri_claim = revealed_attrs(proof_resp['proof']) yyyy_mm_dd = datetime.date.today().strftime('%Y-%m-%d') sri_claim['sriRegDate'] = yyyy_mm_dd print('\n\n== 13 == sri_claim: {}'.format(ppjson(sri_claim))) sri_claim_json = await sag.process_post({ 'type': 'claim-create', 'data': { 'claim-req': sri_claim_req, 'claim-attrs': sri_claim } }) await pspcobag.process_post({ 'type': 'claim-store', 'data': { 'claim': json.loads(sri_claim_json) } }) assert json.loads(sri_claim_json) # 15. PSPC Org Book (as HolderProver) finds claim sri_claims_all = json.loads(await pspcobag.process_post({ 'type': 'claim-request', 'data': { 'claim-filter': { 'attr-match': {}, 'predicate-match': [] } } })) print('\n\n== 14 == SRI claim {}'.format(ppjson(sri_claims_all))) # 16. PSPC Org Book (as HolderProver) creates proof sri_display = claims_for(sri_claims_all['claims']) sri_claim_uuid = set([*sri_display]).pop() sri_proof_resp = json.loads(await pspcobag.process_post({ 'type': 'proof-request-by-claim-uuid', 'data': { 'claim-uuid': sri_claim_uuid } })) print('\n\n== 15 == SRI proof response by claim-uuid={}: {}'.format( sri_claim_uuid, ppjson(sri_proof_resp))) # 17. SRI (as Verifier) verifies proof rc_json = await sag.process_post({ 'type': 'verification-request', 'data': sri_proof_resp }) print('\n\n== 16 == the SRI proof by claim_uuid={} verifies as {}'. format(sri_claim_uuid, ppjson(rc_json))) assert json.loads(rc_json) # 18. Exercise helper GET calls txn_json = await sag.process_get_txn(schema['seqNo']) print('\n\n== 17 == schema by txn #{}: {}'.format( schema['seqNo'], ppjson(txn_json))) assert json.loads(txn_json) txn_json = await sag.process_get_txn(99999) # ought not exist assert not json.loads(txn_json) did_json = await bcrag.process_get_did() print('\n\n== 18 == bcrag did: {}'.format(ppjson(did_json))) assert json.loads(did_json)