async def test_pool_open(path_home, pool_name, pool_genesis_txn_data, pool_ip): print( Ink.YELLOW( '\n\n== Testing Node Pool Config vs. IP {} =='.format(pool_ip))) try: NodePool(pool_name, config={'extra': 'not allowed'}) assert False except JSONValidation: pass # Set up node pool ledger config and wallets, open pool, init anchors p_mgr = NodePoolManager() if pool_name not in await p_mgr.list(): await p_mgr.add_config(pool_name, pool_genesis_txn_data) pool = p_mgr.get(pool_name) async with pool: assert pool.handle is not None assert pool.handle is None await pool.close() # exercise double-close: should silently carry on pool.config['timeout'] = 'should be an integer' try: async with pool: assert False except IndyError as x_indy: assert x_indy.error_code == ErrorCode.CommonInvalidStructure pool.config.pop('timeout') print( '\n\n== 1 == Pool {} opens and closes OK from existing ledger configuration' .format(pool))
async def main(pool_name: str, wallet_name: str) -> None: """ Main line for revocation registry builder operating in external process on behalf of issuer agent. :param pool_name: name of (running) node pool :param wallet_name: wallet name - must match that of issuer with existing wallet """ logging.basicConfig(level=logging.WARN, format='%(levelname)-8s | %(name)-12s | %(message)s') logging.getLogger('indy').setLevel(logging.ERROR) pool = NodePool(pool_name) path_start = join(RevRegBuilder.dir_tails_sentinel(wallet_name), '.start') with open(path_start, 'r') as fh_start: start_data = json.loads(fh_start.read()) remove(path_start) logging.getLogger(__name__).setLevel(start_data['logging']['level']) for path_log in start_data['logging']['paths']: logging.getLogger(__name__).addHandler(logging.FileHandler(path_log)) async with Wallet( wallet_name, start_data['wallet']['storage_type'], start_data['wallet']['config'], start_data['wallet']['access_creds']) as wallet, (RevRegBuilder( wallet, pool, rrbx=True)) as rrban: await rrban.serve()
def close_pool(pool: NodePool) -> None: """ Close node pool. :param pool: node pool to close """ do_wait(pool.close())
async def _setup_pool(self) -> None: """ Initialize the Indy NodePool, fetching the genesis transaction if necessary """ if not self._opened: await self._check_genesis_path() if self._protocol_version is not None and 0 < len( self._protocol_version): pool_cfg = {'protocol': self._protocol_version} else: pool_cfg = None self._pool = NodePool(self._name, self._genesis_path, pool_cfg) await self._pool.open() self._opened = True
def __init__(self): self._cache = None self._instance = None self._ready = False self._sync_lock = None pool_cfg = None # {'protocol': protocol_version} self._pool = NodePool( 'nodepool', '/home/indy/.indy-cli/networks/sandbox/pool_transactions_genesis', pool_cfg, ) self._wallet = Wallet( '000000000000000000000000Trustee1', 'trustee_wallet', )
def __init__(self): self._cache = None self._instance = None self._ready = False self._ledger_lock = None self._sync_lock = None pool_cfg = None # {'protocol': protocol_version} self._pool = NodePool( 'nodepool', get_genesis_file(), pool_cfg, ) self._wallet = Wallet( LEDGER_SEED, 'trustee_wallet', )
async def test_pool_open( path_home, pool_name, pool_genesis_txn_path, pool_genesis_txn_file): assert Protocol.V_13.indy() != Protocol.V_14.indy() assert Protocol.V_14.indy() == Protocol.V_15.indy() assert Protocol.V_15.indy() == Protocol.V_16.indy() assert Protocol.V_16.indy() == Protocol.DEFAULT.indy() path = Path(path_home, 'pool', pool_name) try: NodePool(pool_name, pool_genesis_txn_path, {'auto-remove': 'non-boolean'}) assert False except JSONValidation: pass try: NodePool(pool_name, pool_genesis_txn_path, {'auto-remove': True, 'protocol': '0.0a'}) assert False except JSONValidation: pass try: pool = NodePool(pool_name, pool_genesis_txn_path, {'auto-remove': True, 'extra-property': True}) await pool.remove() assert not path.exists(), 'Pool path {} still present'.format(path) except JSONValidation: assert False pool = NodePool(pool_name, pool_genesis_txn_path, {'auto-remove': True, 'protocol': '1.6'}) await pool.open() assert pool.handle is not None await pool.close() assert not path.exists(), 'Pool path {} still present'.format(path) pool = NodePool(pool_name, pool_genesis_txn_path) # auto-remove default: False, protocol default: latest await pool.open() assert pool.handle is not None await pool.close() assert path.exists(), 'Pool path {} not present'.format(path) pool = NodePool(pool_name, pool_genesis_txn_path, {'auto-remove': True}) # check survival re-opening existing pool await pool.open() assert pool.handle is not None await pool.close() assert not path.exists(), 'Pool path {} still present'.format(path)
async def test_pool_open(path_home, pool_name, pool_genesis_txn_data, pool_ip): print( Ink.YELLOW( '\n\n== Testing Node Pool Config vs. IP {} =='.format(pool_ip))) try: NodePool(pool_name, config={'extra': 'not allowed'}) assert False except JSONValidation: pass # Set up node pool ledger config and wallets, open pool, init anchors manager = NodePoolManager() if pool_name not in await manager.list(): await manager.add_config(pool_name, pool_genesis_txn_data) pool = manager.get(pool_name) await pool.open() assert pool.handle is not None await pool.close() print( '\n\n== 1 == Pool {} opens and closes OK from existing ledger configuration' .format(pool_name))
async def test_von_tails(pool_ip, genesis_txn_file, path_cli_ini, cli_ini, path_setnym_ini, setnym_ini): print( Ink.YELLOW('\n\n== Testing tails server vs. IP {} =='.format(pool_ip))) # Set config for tails clients config = {} i = 0 for profile in path_cli_ini: cli_config = inis2dict(str(path_cli_ini[profile])) config[profile] = cli_config with open(path_cli_ini[profile], 'r') as fh_cfg: print('\n\n== 0.{} == {} tails sync configuration:\n{}'.format( i, profile, fh_cfg.read())) i += 1 # Start tails server print('\n\n== 1 == Starting tails server on port {}'.format( config['issuer']['Tails Server']['port'])) tsrv = TailsServer(config['issuer']['Tails Server']['port']) started = tsrv.start() if not started: print( '\n\n== X == Server already running - stop it to run test from scratch' ) assert False assert tsrv.is_up() print( '\n\n== 2 == Started tails server, docker-compose port-forwarded via localhost:{}' .format(tsrv.port)) atexit.register(shutdown) # Set nyms (operation creates pool if need be) i = 0 setnym_config = {} for profile in path_setnym_ini: cli_config = inis2dict(str(path_setnym_ini[profile])) if profile == 'admin': # tails server anchor on ledger a priori continue setnym_config[profile] = cli_config with open(path_setnym_ini[profile], 'r') as fh_cfg: print('\n\n== 3.{} == {} setnym configuration:\n{}'.format( i, profile, fh_cfg.read())) sub_proc = subprocess.run( ['von_anchor_setnym', str(path_setnym_ini[profile])], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) assert not sub_proc.returncode i += 1 print('\n\n== 4 == Setnym ops completed OK') # wallets = {profile: Wallet(setnym_config[profile]['VON Anchor']['name']) for profile in setnym_config} # wallets['admin'] = Wallet(config['admin']['VON Anchor']['name']) wallets = await get_wallets( { **{ profile: setnym_config[profile]['VON Anchor'] for profile in setnym_config }, 'admin': config['admin']['VON Anchor'] }, open_all=False) # Open pool and anchors, issue creds to create tails files async with wallets['issuer'] as w_issuer, ( wallets['prover']) as w_prover, (NodePool( config['issuer']['Node Pool']['name'])) as pool, (RegistrarAnchor( w_issuer, pool)) as ian, (OrgBookAnchor(w_prover, pool)) as pan: # Get nyms from ledger for display i = 0 for an in (ian, pan): print('\n\n== 5.{} == {} nym on ledger: {}'.format( i, an.wallet.name, ppjson(await an.get_nym()))) i += 1 # Publish schema to ledger S_ID = schema_id(ian.did, 'rainbow', '{}.0'.format(int(time()))) schema_data = { 'name': schema_key(S_ID).name, 'version': schema_key(S_ID).version, 'attr_names': ['numeric', 'sha256'] } S_KEY = schema_key(S_ID) try: await ian.get_schema(S_KEY) # may exist (almost certainly not) except AbsentSchema: await ian.send_schema(json.dumps(schema_data)) schema_json = await ian.get_schema(S_KEY) schema = json.loads(schema_json) print('\n\n== 6 == SCHEMA [{} v{}]: {}'.format(S_KEY.name, S_KEY.version, ppjson(schema))) assert schema # should exist now # Setup link secret for creation of cred req or proof await pan.create_link_secret('LinkSecret') # Issuer anchor create, store, publish cred definitions to ledger; create cred offers await ian.send_cred_def(S_ID, revo=True) cd_id = cred_def_id(S_KEY.origin_did, schema['seqNo'], pool.protocol) assert ((not Tails.unlinked(ian.dir_tails)) and [f for f in Tails.links(ian.dir_tails, ian.did) if cd_id in f]) cred_def_json = await ian.get_cred_def(cd_id) # ought to exist now cred_def = json.loads(cred_def_json) print('\n\n== 7.0 == Cred def [{} v{}]: {}'.format( S_KEY.name, S_KEY.version, ppjson(json.loads(cred_def_json)))) assert cred_def.get('schemaId', None) == str(schema['seqNo']) cred_offer_json = await ian.create_cred_offer(schema['seqNo']) cred_offer = json.loads(cred_offer_json) print('\n\n== 7.1 == Credential offer [{} v{}]: {}'.format( S_KEY.name, S_KEY.version, ppjson(cred_offer_json))) (cred_req_json, cred_req_metadata_json) = await pan.create_cred_req( cred_offer_json, cd_id) cred_req = json.loads(cred_req_json) print('\n\n== 8 == Credential request [{} v{}]: metadata {}, cred {}'. format(S_KEY.name, S_KEY.version, ppjson(cred_req_metadata_json), ppjson(cred_req_json))) assert json.loads(cred_req_json) # Issuer anchor issues creds and stores at HolderProver: get cred req, create cred, store cred cred_data = [] CREDS = 450 # enough to build 4 rev regs print('\n\n== 9 == creating and storing {} credentials:'.format(CREDS)) for number in range(CREDS): (cred_json, _) = await ian.create_cred( cred_offer_json, cred_req_json, { 'numeric': str(number), 'sha256': sha256(str(number).encode()).hexdigest(), }) cred_id = await pan.store_cred(cred_json, cred_req_metadata_json) print('.', end='' if (number + 1) % 100 else '{}\n'.format(number + 1), flush=True) # Exercise list view, least to most specific for tails_list_path in ('all', ian.did, cd_id): url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path)) r = requests.get(url) assert r.status_code == 200 assert not r.json() rr_ids_up = { basename(link) for link in Tails.links(ian.dir_tails, ian.did) } for rr_id in rr_ids_up: url = url_for(tsrv.port, 'tails/list/{}'.format(rr_id)) r = requests.get(url) assert r.status_code == 200 assert not r.json() print( '\n\n== 10 == All listing views at server come back OK and empty as expected' ) rv = pexpect.run('python ../src/sync/sync.py {}'.format( path_cli_ini['issuer'])) print('\n\n== 11 == Issuer sync uploaded local tails files') for tails_list_path in ('all', ian.did, cd_id): url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path)) r = requests.get(url) assert r.status_code == 200 assert {rr for rr in r.json()} == rr_ids_up for rr_id in rr_ids_up: url = url_for(tsrv.port, 'tails/list/{}'.format(rr_id)) r = requests.get(url) assert r.status_code == 200 assert r.json() == [rr_id] # list with one rr_id should come back # Exercise list view, least to most specific for tails_list_path in ('all', ian.did, cd_id): url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path)) r = requests.get(url) assert r.status_code == 200 assert len(r.json()) == len(rr_ids_up) print( '\n\n== 12 == All listing views at server come back OK with {} uploaded files' .format(len(rr_ids_up))) rv = pexpect.run('python ../src/sync/sync.py {}'.format( path_cli_ini['prover'])) print('\n\n== 13 == Prover sync downloaded remote tails files') rr_ids_down = { basename(link) for link in Tails.links( config['prover']['Tails Client']['tails.dir'], ian.did) } assert rr_ids_down == rr_ids_up # Exercise admin-delete rv = pexpect.run('python ../src/admin/delete.py {} all'.format( path_cli_ini['admin'])) print('\n\n== 14 == Admin called for deletion at tails server') # Check tails server deletion url = url_for(tsrv.port, 'tails/list/all') r = requests.get(url) assert r.status_code == 200 assert not r.json() print( '\n\n== 15 == All listing views at server come back OK and empty as expected' ) rv = pexpect.run('python ../src/sync/multisync.py 1 {}'.format( path_cli_ini['issuer'])) print( '\n\n== 16 == Issuer multisync on 1 sync iteration uploaded local tails files' ) for tails_list_path in ('all', ian.did, cd_id): url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path)) r = requests.get(url) assert r.status_code == 200 assert {rr for rr in r.json()} == rr_ids_up for rr_id in rr_ids_up: url = url_for(tsrv.port, 'tails/list/{}'.format(rr_id)) r = requests.get(url) assert r.status_code == 200 assert r.json() == [rr_id] # list with one rr_id should come back # Exercise list view, least to most specific for tails_list_path in ('all', ian.did, cd_id): url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path)) r = requests.get(url) assert r.status_code == 200 assert len(r.json()) == len(rr_ids_up) print( '\n\n== 17 == All listing views at server come back OK with {} uploaded files' .format(len(rr_ids_up))) # Remove tails server anchor wallet await wallets['admin'].remove() print('\n\n== 18 == Removed admin (tails server anchor {}) wallet'. format(wallets['admin'].name))