Ejemplo n.º 1
0
def survey(dir_tails: str,
           host: str,
           port: int,
           issuer_did: str = None) -> tuple:
    """
    Return tuple with paths to local tails symbolic links (revocation registry identifiers) and
    revocation registry identifiers of interest on tails server.

    Raise ConnectionError on connection failure.

    :param dir_tails: local tails directory
    :param host: tails server host
    :param port: tails server port
    :param issuer_did: issuer DID of interest for local and remote tails file survey (default all)
    :return: pair (remote paths to tails links, remote rev reg ids)
    """

    loc = Tails.links(dir_tails, issuer_did)
    url = 'http://{}:{}/tails/list/{}'.format(
        host, port, issuer_did if issuer_did else 'all')
    resp = requests.get(url)
    rem = set(resp.json())

    logging.debug('Survey: local=%s, remote=%s', ppjson(loc), ppjson(rem))
    return (loc, rem)
Ejemplo n.º 2
0
async def test_pack():

    print(Ink.YELLOW('\n\n== Testing packing/unpacking =='))

    wallets = await get_wallets(
        {
            'agent-13': 'Agent-13-00000000000000000000000',
            'agent-86': 'Agent-86-00000000000000000000000',
            'agent-99': 'Agent-99-00000000000000000000000'
        },
        open_all=False,
        auto_remove=True)

    # Open wallets and operate
    async with wallets['agent-13'] as w13, (
            wallets['agent-86']) as w86, (
            wallets['agent-99']) as w99:

        dids = {name: wallets[name].did for name in wallets}
        print('\n\n== 1 == DIDs: {}'.format(ppjson(dids)))

        # Agent 86 packs and unpacks to and from itself anonymously, implicitly and explicitly
        plain = 'Hello World'
        packed = await w86.pack(plain)
        print('\n\n== 2 == Plaintext: {}, packed JWE: {}'.format(plain, packed))
        unpacked = await w86.unpack(packed)
        assert unpacked == (plain, w86.verkey, None)
        packed = await w86.pack(plain, w86.verkey)
        unpacked = await w86.unpack(packed)
        assert unpacked == (plain, w86.verkey, None)
        packed = await w86.pack(plain, [w86.verkey])
        unpacked = await w86.unpack(packed)
        assert unpacked == (plain, w86.verkey, None)
        print('\n\n== 3 == {} packed and unpacked anonymous message: {}'.format(w86.name, unpacked[0]))

        # Agent 86 signs and packs to itself, then unpacks, with anchor verkey and loc did verkey
        packed = await w86.pack(plain, None, w86.verkey)
        unpacked = await w86.unpack(packed)
        assert unpacked == (plain, w86.verkey, w86.verkey)
        loc_did_info = await w86.create_local_did('Shoe-Phone-000000000000000000000')
        packed = await w86.pack(plain, None, loc_did_info.verkey)
        unpacked = await w86.unpack(packed)
        assert unpacked == (plain, w86.verkey, loc_did_info.verkey)
        print('\n\n== 4 == {} packed and unpacked authenticated message: {}'.format(w86.name, unpacked[0]))

        # Agent 86 signs and packs to agents 13 and 99, fails to unpack
        packed = await w86.pack(plain, [w13.verkey, w99.verkey], loc_did_info.verkey)
        unpacked = await w13.unpack(packed)
        assert unpacked == (plain, w13.verkey, loc_did_info.verkey)
        print('\n\n== 5.0 == {} auth-packed, {} unpacked: {}'.format(w86.name, w13.name, unpacked[0]))
        unpacked = await w99.unpack(packed)
        assert unpacked == (plain, w99.verkey, loc_did_info.verkey)
        print('\n\n== 5.1 == {} auth-packed, {} unpacked: {}'.format(w86.name, w99.name, unpacked[0]))
        try:
            unpacked = await w86.unpack(packed)
            assert False
        except AbsentRecord:
            pass
        print('\n\n== 5.2 == {} correctly failed to unpack ciphertext'.format(w86.name))
Ejemplo n.º 3
0
async def sync_issuer(
        dir_tails: str,
        host: str,
        port: int,
        local_only: set,
        noman: NominalAnchor) -> None:
    """
    Synchronize for issuer: upload any tails files appearing locally but not remotely.

    :param dir_tails: local tails directory
    :param host: tails server host
    :param port: tails server port
    :param local_only: paths to local tails symbolic links (rev reg ids) without corresponding remote tails files
    :param noman: open issuer anchor
    """

    logging.debug('Sync-issuer: local-only=%s', ppjson(local_only))
    if not local_only:
        return

    for rr_id in local_only:
        if not ok_rev_reg_id(rr_id, noman.did):  # restrict POSTs to issuer's own tails files
            logging.debug(
                'Sync-issuer: local-only %s is not a rev reg id for issuer %s (%s)',
                rr_id,
                noman.did,
                noman.wallet.name)
            continue

        epoch = int(time())
        url = 'http://{}:{}/tails/{}/{}'.format(host, port, quote(rr_id), epoch)
        path_tails = Tails.linked(dir_tails, rr_id)
        with open(path_tails, 'rb') as tails_fh:
            tails = tails_fh.read()
            sig = await noman.sign('{}||{}'.format(epoch, tails))
        try:
            resp = requests.post(
                url,
                files={
                    'tails-file': (basename(path_tails), tails),
                    'signature': ('signature', sig)
                })
            logging.info('Upload: url %s status %s', url, resp.status_code)
        except RequestsConnectionError:
            logging.error('POST connection refused: %s', url)
Ejemplo n.º 4
0
async def test_a2a():
    print(Ink.YELLOW('\n\n== Testing DID Doc wranglers =='))

    # One authn key by reference
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': '3',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC X...'
            },
            {
                'id': '4',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC 9...'
            },
            {
                'id': '6',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'authentication': [
            {
                'type': 'RsaSignatureAuthentication2018',
                'publicKey': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4'
            }
        ],
        'service': [
            {
                'id': '0',
                'type': 'Agency',
                'serviceEndpoint': 'did:sov:Q4zqM7aXqm7gDQkUVLng9h'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey'])
    assert len(dd.authnkey) == len(dd_in['authentication'])

    dd_out = dd.serialize()
    print('\n\n== 1 == DID Doc {} on abbreviated identifiers: {}'.format(dd, ppjson(dd_out)))

    # Exercise JSON, de/serialization
    dd_json = dd.to_json()
    dd_copy = dd.from_json(dd_json)
    assert dd_copy.did == dd.did
    assert all(dd_copy.authnkey[k].to_dict() == dd.authnkey[k].to_dict() for k in dd_copy.authnkey)
    assert {k for k in dd_copy.authnkey} == {k for k in dd.authnkey}
    assert all(dd_copy.pubkey[k].to_dict() == dd.pubkey[k].to_dict() for k in dd_copy.pubkey)
    assert {k for k in dd_copy.pubkey} == {k for k in dd.pubkey}
    assert all(dd_copy.service[k].to_dict() == dd.service[k].to_dict() for k in dd_copy.service)
    assert {k for k in dd_copy.service} == {k for k in dd.service}
    print('\n\n== 2 == DID Doc de/serialization operates OK:')

    # Exercise accessors
    dd.did = dd_out['id']
    assert dd.did == canon_did(dd_out['id'])
    try:
        dd.set(['neither a service', 'nor a public key'])
        assert False
    except BadDIDDocItem:
        pass
    assert dd.service[[k for k in dd.service][0]].did == dd.did
    print('\n\n== 3 == DID Doc accessors operate OK')

    # One authn key embedded, all possible refs canonical
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': '3',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC X...'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC 9...'
            }
        ],
        'authentication': [
            {
                'type': 'RsaSignatureAuthentication2018',
                'publicKey': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#6',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'service': [
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL;0',
                'type': 'Agency',
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey']) + 1
    assert len(dd.authnkey) == len(dd_in['authentication'])

    dd_out = dd.serialize()
    print('\n\n== 4 == DID Doc on mixed reference styles, embedded and ref style authn keys: {}'.format(ppjson(dd_out)))

    # All references canonical where possible; one authn key embedded and one by reference
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#3',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC X...'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC 9...'
            }
        ],
        'authentication': [
            {
                'type': 'RsaSignatureAuthentication2018',
                'publicKey': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#6',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'service': [
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL;0',
                'type': 'DidMessaging',
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey']) + 1
    assert len(dd.authnkey) == len(dd_in['authentication'])

    dd_out = dd.serialize()
    print('\n\n== 5 == DID Doc on canonical refs: {}'.format(ppjson(dd_out)))

    # Minimal as per indy-agent test suite without explicit identifiers
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'type': 'DidMessaging',
                'recipientKeys': ['~XXXXXXXXXXXXXXXX'],
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey'])
    assert len(dd.authnkey) == 0

    dd_out = dd.serialize()
    print('\n\n== 6 == DID Doc miminal style, implcit DID document identifier: {}'.format(
        ppjson(dd_out)))

    # Minimal + ids as per indy-agent test suite with explicit identifiers; novel service recipient key on raw base58
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL;indy',
                'type': 'DidMessaging',
                'priority': 1,
                'recipientKeys': ['~YYYYYYYYYYYYYYYY'],
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == 1 + len(dd_in['publicKey'])
    assert len(dd.authnkey) == 0

    dd_out = dd.serialize()
    print('\n\n== 7 == DID Doc miminal style plus explicit idents and novel raw base58 service recip key: {}'.format(
        ppjson(dd_out)))

    # Minimal + ids as per indy-agent test suite with explicit identifiers; novel service recipient key on raw base58
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            },
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-2',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~YYYYYYYYYYYYYYYY'
            },
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-3',
                'type': 'Secp256k1VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyHex': '02b97c30de767f084ce3080168ee293053ba33b235d7116a3263d29f1450936b71'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-4',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'service': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL;indy',
                'type': 'DidMessaging',
                'priority': 0,
                'recipientKeys': ['~ZZZZZZZZZZZZZZZZ'],
                'serviceEndpoint': 'did:sov:LjgpST2rjsoxYegQDRm7EL;1'
            },
            {
                'id': '1',
                'type': 'one',
                'priority': 1,
                'recipientKeys': [
                    '~XXXXXXXXXXXXXXXX',
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-1'
                ],
                'routingKeys': [
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-4'
                ],
                'serviceEndpoint': 'LjgpST2rjsoxYegQDRm7EL;2'
            },
            {
                'id': '2',
                'type': 'two',
                'priority': 2,
                'recipientKeys': [
                    '~XXXXXXXXXXXXXXXX',
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-1'
                ],
                'routingKeys': [
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-4'
                ],
                'serviceEndpoint': 'https://www.two.ca/two'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == 1 + len(dd_in['publicKey'])
    assert len(dd.authnkey) == 0
    assert {s.priority for s in dd.service.values()} == {0, 1, 2}
    assert len(dd.service) == 3
    assert all(len(dd.service[k].to_dict()['recipientKeys']) == 1 for k in dd.service)
    assert 'routingKeys' not in dd.service['did:sov:LjgpST2rjsoxYegQDRm7EL;indy'].to_dict()
    assert all(len(dd.service[k].to_dict()['routingKeys']) == 1
        for k in ('did:sov:LjgpST2rjsoxYegQDRm7EL;1', 'did:sov:LjgpST2rjsoxYegQDRm7EL;2'))


    dd_out = dd.serialize()
    print('\n\n== 8 == DID Doc on mixed service routing and recipient keys: {}'.format(ppjson(dd_out)))

    pk = PublicKey(
        dd.did,
        '99',
        '~AAAAAAAAAAAAAAAA',
        PublicKeyType.ED25519_SIG_2018,
        dd.did,
        True)
    dd.set(pk)
    assert len(dd.pubkey) == 2 + len(dd_in['publicKey'])
    assert canon_ref(dd.did, '99', '#') in dd.pubkey
    assert len(dd.authnkey) == 1

    service = Service(
        dd.did,
        'abc',
        'IndyAgent',
        [pk],
        [pk],
        'http://www.abc.ca/123'
    )
    dd.set(service)
    assert len(dd.service) == 4
    assert canon_ref(dd.did, 'abc', ';') in dd.service
    print('\n\n== 9 == DID Doc adds public key and service via set() OK')

    # Exercise missing service recipient key
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL;indy',
                'type': 'DidMessaging',
                'priority': 1,
                'recipientKeys': [
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-3'
                ],
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    try:
        dd = DIDDoc.deserialize(dd_in)
        assert False
    except AbsentDIDDocItem:
        pass
    print('\n\n== 10 == DID Doc on underspecified service key fails as expected')

    # Minimal as per W3C Example 2, draft 0.12
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
        'authentication': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'type': 'DidMessaging',
                'serviceEndpoint': 'https://example.com/endpoint/8377464'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == 1
    assert len(dd.authnkey) == 1
    assert len(dd.service) == 1

    dd_out = dd.serialize()
    print('\n\n== 11 == Minimal DID Doc (no pubkey except authentication) as per W3C spec parses OK: {}'.format(
        ppjson(dd_out)))

    # Exercise no-identifier case
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'authentication': [
            {
                'type': 'Ed25519VerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'type': 'DidMessaging',
                'serviceEndpoint': 'https://example.com/endpoint/8377464'
            }
        ]
    }

    try:
        dd = DIDDoc.deserialize(dd_in)
        assert False
    except AbsentDIDDocItem:
        pass
    print('\n\n== 12 == DID Doc without identifier rejected as expected')

    # Exercise reference canonicalization, including failure paths
    try:
        canon_ref('not-a-DID', ref=dd.did, delimiter='#')
        assert False
    except BadIdentifier:
        pass

    try:
        canon_ref(dd.did, ref='did:sov:not-a-DID', delimiter='#')
        assert False
    except BadIdentifier:
        pass
    
    urlref = 'https://www.clafouti-quasar.ca:8443/supply-management/fruit/index.html'
    assert canon_ref(dd.did, ref=urlref) == urlref
    print('\n\n== 13 == Reference canonicalization operates as expected')

    assert PublicKeyType.get('no-such-type') is None
    pubkey0 = dd.pubkey[[k for k in dd.pubkey][0]]
    was_authn = pubkey0.authn
    pubkey0.authn = not was_authn
    assert pubkey0.authn != was_authn
    print('\n\n== 14 == Changed authentication setting for DIDDoc {} in public key {}, now {}'.format(
        pubkey0.did,
        pubkey0.id,
        repr(pubkey0)))
Ejemplo n.º 5
0
async def test_von_tails(pool_ip, genesis_txn_file, path_cli_ini, cli_ini,
                         path_setnym_ini, setnym_ini):

    print(
        Ink.YELLOW('\n\n== Testing tails server vs. IP {} =='.format(pool_ip)))

    # Set config for tails clients
    config = {}
    i = 0
    for profile in path_cli_ini:
        cli_config = inis2dict(str(path_cli_ini[profile]))
        config[profile] = cli_config
        with open(path_cli_ini[profile], 'r') as fh_cfg:
            print('\n\n== 0.{} == {} tails sync configuration:\n{}'.format(
                i, profile, fh_cfg.read()))
        i += 1

    # Start tails server
    print('\n\n== 1 == Starting tails server on port {}'.format(
        config['issuer']['Tails Server']['port']))
    tsrv = TailsServer(config['issuer']['Tails Server']['port'])
    started = tsrv.start()
    if not started:
        print(
            '\n\n== X == Server already running - stop it to run test from scratch'
        )
        assert False

    assert tsrv.is_up()
    print(
        '\n\n== 2 == Started tails server, docker-compose port-forwarded via localhost:{}'
        .format(tsrv.port))
    atexit.register(shutdown)

    # Set nyms (operation creates pool if need be)
    i = 0
    setnym_config = {}
    for profile in path_setnym_ini:
        cli_config = inis2dict(str(path_setnym_ini[profile]))
        if profile == 'admin':  # tails server anchor on ledger a priori
            continue
        setnym_config[profile] = cli_config
        with open(path_setnym_ini[profile], 'r') as fh_cfg:
            print('\n\n== 3.{} == {} setnym configuration:\n{}'.format(
                i, profile, fh_cfg.read()))
        sub_proc = subprocess.run(
            ['von_anchor_setnym',
             str(path_setnym_ini[profile])],
            stdout=subprocess.PIPE,
            stderr=subprocess.DEVNULL)
        assert not sub_proc.returncode
        i += 1
    print('\n\n== 4 == Setnym ops completed OK')

    # wallets = {profile: Wallet(setnym_config[profile]['VON Anchor']['name']) for profile in setnym_config}
    # wallets['admin'] = Wallet(config['admin']['VON Anchor']['name'])
    wallets = await get_wallets(
        {
            **{
                profile: setnym_config[profile]['VON Anchor']
                for profile in setnym_config
            }, 'admin': config['admin']['VON Anchor']
        },
        open_all=False)

    # Open pool and anchors, issue creds to create tails files
    async with wallets['issuer'] as w_issuer, (
        wallets['prover']) as w_prover, (NodePool(
            config['issuer']['Node Pool']['name'])) as pool, (RegistrarAnchor(
                w_issuer, pool)) as ian, (OrgBookAnchor(w_prover,
                                                        pool)) as pan:

        # Get nyms from ledger for display
        i = 0
        for an in (ian, pan):
            print('\n\n== 5.{} == {} nym on ledger: {}'.format(
                i, an.wallet.name, ppjson(await an.get_nym())))
            i += 1

        # Publish schema to ledger
        S_ID = schema_id(ian.did, 'rainbow', '{}.0'.format(int(time())))
        schema_data = {
            'name': schema_key(S_ID).name,
            'version': schema_key(S_ID).version,
            'attr_names': ['numeric', 'sha256']
        }

        S_KEY = schema_key(S_ID)
        try:
            await ian.get_schema(S_KEY)  # may exist (almost certainly not)
        except AbsentSchema:
            await ian.send_schema(json.dumps(schema_data))
        schema_json = await ian.get_schema(S_KEY)
        schema = json.loads(schema_json)
        print('\n\n== 6 == SCHEMA [{} v{}]: {}'.format(S_KEY.name,
                                                       S_KEY.version,
                                                       ppjson(schema)))
        assert schema  # should exist now

        # Setup link secret for creation of cred req or proof
        await pan.create_link_secret('LinkSecret')

        # Issuer anchor create, store, publish cred definitions to ledger; create cred offers
        await ian.send_cred_def(S_ID, revo=True)

        cd_id = cred_def_id(S_KEY.origin_did, schema['seqNo'], pool.protocol)

        assert ((not Tails.unlinked(ian.dir_tails)) and
                [f for f in Tails.links(ian.dir_tails, ian.did) if cd_id in f])

        cred_def_json = await ian.get_cred_def(cd_id)  # ought to exist now
        cred_def = json.loads(cred_def_json)
        print('\n\n== 7.0 == Cred def [{} v{}]: {}'.format(
            S_KEY.name, S_KEY.version, ppjson(json.loads(cred_def_json))))
        assert cred_def.get('schemaId', None) == str(schema['seqNo'])

        cred_offer_json = await ian.create_cred_offer(schema['seqNo'])
        cred_offer = json.loads(cred_offer_json)
        print('\n\n== 7.1 == Credential offer [{} v{}]: {}'.format(
            S_KEY.name, S_KEY.version, ppjson(cred_offer_json)))

        (cred_req_json, cred_req_metadata_json) = await pan.create_cred_req(
            cred_offer_json, cd_id)
        cred_req = json.loads(cred_req_json)
        print('\n\n== 8 == Credential request [{} v{}]: metadata {}, cred {}'.
              format(S_KEY.name, S_KEY.version, ppjson(cred_req_metadata_json),
                     ppjson(cred_req_json)))
        assert json.loads(cred_req_json)

        # Issuer anchor issues creds and stores at HolderProver: get cred req, create cred, store cred
        cred_data = []

        CREDS = 450  # enough to build 4 rev regs
        print('\n\n== 9 == creating and storing {} credentials:'.format(CREDS))
        for number in range(CREDS):
            (cred_json, _) = await ian.create_cred(
                cred_offer_json, cred_req_json, {
                    'numeric': str(number),
                    'sha256': sha256(str(number).encode()).hexdigest(),
                })

            cred_id = await pan.store_cred(cred_json, cred_req_metadata_json)
            print('.',
                  end='' if (number + 1) % 100 else '{}\n'.format(number + 1),
                  flush=True)

        # Exercise list view, least to most specific
        for tails_list_path in ('all', ian.did, cd_id):
            url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path))
            r = requests.get(url)
            assert r.status_code == 200
            assert not r.json()
        rr_ids_up = {
            basename(link)
            for link in Tails.links(ian.dir_tails, ian.did)
        }
        for rr_id in rr_ids_up:
            url = url_for(tsrv.port, 'tails/list/{}'.format(rr_id))
            r = requests.get(url)
            assert r.status_code == 200
            assert not r.json()
        print(
            '\n\n== 10 == All listing views at server come back OK and empty as expected'
        )

        rv = pexpect.run('python ../src/sync/sync.py {}'.format(
            path_cli_ini['issuer']))
        print('\n\n== 11 == Issuer sync uploaded local tails files')

        for tails_list_path in ('all', ian.did, cd_id):
            url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path))
            r = requests.get(url)
            assert r.status_code == 200
            assert {rr for rr in r.json()} == rr_ids_up
        for rr_id in rr_ids_up:
            url = url_for(tsrv.port, 'tails/list/{}'.format(rr_id))
            r = requests.get(url)
            assert r.status_code == 200
            assert r.json() == [rr_id]  # list with one rr_id should come back

        # Exercise list view, least to most specific
        for tails_list_path in ('all', ian.did, cd_id):
            url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path))
            r = requests.get(url)
            assert r.status_code == 200
            assert len(r.json()) == len(rr_ids_up)
        print(
            '\n\n== 12 == All listing views at server come back OK with {} uploaded files'
            .format(len(rr_ids_up)))

        rv = pexpect.run('python ../src/sync/sync.py {}'.format(
            path_cli_ini['prover']))
        print('\n\n== 13 == Prover sync downloaded remote tails files')

        rr_ids_down = {
            basename(link)
            for link in Tails.links(
                config['prover']['Tails Client']['tails.dir'], ian.did)
        }
        assert rr_ids_down == rr_ids_up

        # Exercise admin-delete
        rv = pexpect.run('python ../src/admin/delete.py {} all'.format(
            path_cli_ini['admin']))
        print('\n\n== 14 == Admin called for deletion at tails server')

        # Check tails server deletion
        url = url_for(tsrv.port, 'tails/list/all')
        r = requests.get(url)
        assert r.status_code == 200
        assert not r.json()
        print(
            '\n\n== 15 == All listing views at server come back OK and empty as expected'
        )

        rv = pexpect.run('python ../src/sync/multisync.py 1 {}'.format(
            path_cli_ini['issuer']))
        print(
            '\n\n== 16 == Issuer multisync on 1 sync iteration uploaded local tails files'
        )

        for tails_list_path in ('all', ian.did, cd_id):
            url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path))
            r = requests.get(url)
            assert r.status_code == 200
            assert {rr for rr in r.json()} == rr_ids_up
        for rr_id in rr_ids_up:
            url = url_for(tsrv.port, 'tails/list/{}'.format(rr_id))
            r = requests.get(url)
            assert r.status_code == 200
            assert r.json() == [rr_id]  # list with one rr_id should come back

        # Exercise list view, least to most specific
        for tails_list_path in ('all', ian.did, cd_id):
            url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path))
            r = requests.get(url)
            assert r.status_code == 200
            assert len(r.json()) == len(rr_ids_up)
        print(
            '\n\n== 17 == All listing views at server come back OK with {} uploaded files'
            .format(len(rr_ids_up)))

        # Remove tails server anchor wallet
        await wallets['admin'].remove()
        print('\n\n== 18 == Removed admin (tails server anchor {}) wallet'.
              format(wallets['admin'].name))
Ejemplo n.º 6
0
async def test_anchors_tails_load(
        pool_name,
        pool_genesis_txn_data,
        seed_trustee1):

    rrbx = True
    print(Ink.YELLOW('\n\n== Load-testing tails on {}ternal rev reg builder ==').format("ex" if rrbx else "in"))

    await RevRegBuilder.stop(WALLET_NAME)  # in case of re-run

    # Set up node pool ledger config and wallets, open pool, init anchors
    p_mgr = NodePoolManager()
    if pool_name not in await p_mgr.list():
        await p_mgr.add_config(pool_name, pool_genesis_txn_data)
    pool = p_mgr.get(pool_name)
    await pool.open()

    w_mgr = WalletManager()
    wallets = {
        'trustee-anchor': {
            'seed': seed_trustee1,
            'storage_type': None,
            'config': None,
            'access_creds': None
        },
        WALLET_NAME: {
            'seed': 'Superstar-Anchor-000000000000000',
            'storage_type': None,
            'config': None,
            'access_creds': {
                'key': 'rrbx-test'
            }
        }
    }
    for (name, wdata) in wallets.items():
        try:
            wdata['wallet'] = await w_mgr.create({
                'id': name,
                'seed': wdata['seed']
            })
        except ExtantWallet:
            wdata['wallet'] = w_mgr.get({'id': name})
        finally:
            await wdata['wallet'].open()

    tan = TrusteeAnchor(wallets['trustee-anchor']['wallet'], pool)
    no_prox = rrbx_prox()
    san = OrgHubAnchor(wallets[WALLET_NAME]['wallet'], pool, rrbx=rrbx)
    if rrbx:
        await beep('external rev reg builder process on {}'.format(WALLET_NAME), 15)
        if rrbx_prox() != no_prox + 1:
            await RevRegBuilder.stop(WALLET_NAME)
            assert False, "External rev reg builder process did not start"
        async with OrgHubAnchor(
                wallets[WALLET_NAME]['wallet'],
                pool,
                rrbx=rrbx):  # check for exactly 1 external rev reg builder process
            await beep('external rev reg builder process uniqueness test on {}'.format(WALLET_NAME), 5)
            if rrbx_prox() != no_prox + 1:
                await RevRegBuilder.stop(WALLET_NAME)
                assert False, "External rev reg builder process was not unique"

    assert pool.handle

    await tan.open()
    await san.open()

    # Publish anchor particulars to ledger if not yet present
    for an in (tan, san):
        if not json.loads(await tan.get_nym(an.did)):
            await tan.send_nym(an.did, an.verkey, an.wallet.name, an.least_role())

    nyms = {
        'tan': json.loads(await tan.get_nym(tan.did)),
        'san': json.loads(await tan.get_nym(san.did))
    }
    print('\n\n== 1 == nyms: {}'.format(ppjson(nyms)))

    for k in nyms:
        assert 'dest' in nyms[k]

    # Publish schema to ledger if not yet present; get from ledger
    S_ID = schema_id(san.did, 'tails_load', '{}.0'.format(int(time.time())))
    S_KEY = schema_key(S_ID)

    schema_data = {
        'name': schema_key(S_ID).name,
        'version': schema_key(S_ID).version,
        'attr_names': [
            'number',
            'remainder'
        ]
    }

    try:
        await san.get_schema(S_KEY)  # may exist (almost certainly not)
    except AbsentSchema:
        await san.send_schema(json.dumps(schema_data))
    schema_json = await san.get_schema(S_KEY)
    schema = json.loads(schema_json)
    assert schema  # should exist now
    print('\n\n== 2 == SCHEMA [{} v{}]: {}'.format(S_KEY.name, S_KEY.version, ppjson(schema)))

    # Setup link secret for creation of cred req or proof
    await san.create_link_secret('LinkSecret')

    # SRI anchor create, store, publish cred definitions to ledger; create cred offers
    await san.send_cred_def(S_ID, revo=True)
    cd_id = cred_def_id(S_KEY.origin_did, schema['seqNo'], pool.protocol)

    assert ((not Tails.unlinked(san.dir_tails)) and
        [f for f in Tails.links(san.dir_tails, san.did) if cd_id in f])

    cred_def_json = await san.get_cred_def(cd_id)  # ought to exist now
    cred_def = json.loads(cred_def_json)
    print('\n\n== 3.0 == Cred def [{} v{}]: {}'.format(
        S_KEY.name,
        S_KEY.version,
        ppjson(json.loads(cred_def_json))))
    assert cred_def.get('schemaId', None) == str(schema['seqNo'])

    cred_offer_json = await san.create_cred_offer(schema['seqNo'])
    print('\n\n== 3.1 == Credential offer [{} v{}]: {}'.format(
        S_KEY.name,
        S_KEY.version,
        ppjson(cred_offer_json)))

    (cred_req_json, cred_req_metadata_json) = await san.create_cred_req(cred_offer_json, cd_id)
    print('\n\n== 4 == Credential request [{} v{}]: metadata {}, cred-req {}'.format(
        S_KEY.name,
        S_KEY.version,
        ppjson(cred_req_metadata_json),
        ppjson(cred_req_json)))
    assert json.loads(cred_req_json)

    # BC Reg anchor (as Issuer) issues creds and stores at HolderProver: get cred req, create cred, store cred
    CREDS = 4034  # enough to kick off rev reg on size 4096 and issue two creds in it: 1 needing set-rev-reg, 1 not
    print('\n\n== 5 == creating {} credentials'.format(CREDS))
    swatch = Stopwatch(2)
    optima = {}  # per rev-reg, fastest/slowest pairs
    for number in range(CREDS):
        swatch.mark()
        (cred_json, _) = await san.create_cred(
            cred_offer_json,
            cred_req_json,
            {
                'number': str(number),
                'remainder': str(number % 100)
            })
        elapsed = swatch.mark()
        tag = rev_reg_id2tag(Tails.current_rev_reg_id(san.dir_tails, cd_id))
        if tag not in optima:
            optima[tag] = (elapsed, elapsed)
        else:
            optima[tag] = (min(optima[tag][0], elapsed), max(optima[tag][1], elapsed))
        print('.', end='', flush=True)
        if ((number + 1) % 100) == 0:
            print('{}: #{}: {:.2f}-{:.2f}s'.format(number + 1, tag, *optima[tag]), flush=True)

        assert json.loads(cred_json)
    print('{}: #{}: {:.2f}-{:.2f}s'.format(number + 1, tag, *optima[tag]), flush=True)

    print('\n\n== 6 == best, worst times by revocation registry: {}'.format(ppjson(optima)))
    assert (not rrbx) or (max(optima[tag][1] for tag in optima) <
        4 * min(optima[tag][1] for tag in optima if int(tag) > 0))  # if waiting on rr beyond #0, sizes increase as 2^n

    await san.close()
    if rrbx:
        await RevRegBuilder.stop(WALLET_NAME)
    await tan.close()
    for (name, wdata) in wallets.items():
        await wdata['wallet'].close()
    await pool.close()
Ejemplo n.º 7
0
async def test_setnym(pool_ip, pool_name, pool_genesis_txn_data, seed_trustee1,
                      path_setnym_ini, setnym_ini_file):

    print(
        Ink.YELLOW('\n\n== Testing setnym operation on node pool {} =='.format(
            pool_ip)))

    with open(path_setnym_ini, 'r') as cfg_fh:
        print('\n\n== 1 == Initial configuration:\n{}'.format(cfg_fh.read()))
    cfg = inis2dict(str(path_setnym_ini))

    # Set up node pool ledger config and wallets, open pool, init anchors
    p_mgr = NodePoolManager()
    if pool_name not in await p_mgr.list():
        await p_mgr.add_config(pool_name, pool_genesis_txn_data)

    wallets = await get_wallets(
        {
            'trustee-anchor': {
                'seed': seed_trustee1
            },
            cfg['VON Anchor']['name']: {
                'seed': cfg['VON Anchor']['seed']
            },
            'x-anchor': {
                'seed': 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
            }
        },
        open_all=True)

    try:
        async with NominalAnchor(wallets['x-anchor']) as xan:
            await xan.get_nym()
    except AbsentPool:
        pass
    wallets.pop('x-anchor')

    # Open pool, check if nym already present
    pool = p_mgr.get(pool_name)
    await pool.open()
    assert pool.handle

    tan = TrusteeAnchor(wallets['trustee-anchor'], pool)
    await tan.open()

    noman = NominalAnchor(wallets[cfg['VON Anchor']['name']], pool)

    nym = json.loads(await noman.get_nym(noman.did))
    print('\n\n== 2 == Nym {} on ledger for anchor {} on DID {}'.format(
        '{} already'.format(ppjson(nym)) if nym else 'not yet',
        noman.wallet.name, noman.did))

    await tan.close()
    await pool.close()

    # Run setnym on initial configuration, check ledger
    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 3 == Set nym with TRUST_ANCHOR role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    assert nym and Role.get(nym['role']) == Role.TRUST_ANCHOR
    print('\n\n== 4 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    # Run setnym on configuration with DID and explicit storage type, check ledger
    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if section == 'VON Anchor':
                    if key == 'seed':
                        print('did={}'.format(noman.did), file=ini_fh)
                    elif key == 'wallet.type':
                        print('wallet.type=default', file=ini_fh)
                    else:
                        print('{}={}'.format(key, value), file=ini_fh)
                else:
                    print('{}={}'.format(key, value), file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 5 == Next configuration, on DID instead of seed and explicit wallet type:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 6 == Set nym with TRUST_ANCHOR role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    assert nym and Role.get(nym['role']) == Role.TRUST_ANCHOR
    last_nym_seqno = nym['seqNo']
    print('\n\n== 7 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    # Run setnym on configuration with no seeds nor VON Anchor role, check ledger
    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if key in ('seed', 'genesis.txn.path'):
                    continue
                print('{}={}'.format(
                    key, '${X_ROLE:-}' if key == 'role' else value),
                      file=ini_fh)  # exercise default
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 8 == Next configuration, no seeds, no VON Anchor role:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 9 == Set nym with USER role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    assert nym and Role.get(nym['role']) == Role.USER
    last_nym_seqno = nym['seqNo']
    print('\n\n== 10 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    # Run again to check idempotence
    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 11 == Set nym again with default role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    last_nym_seqno = nym['seqNo']
    print(
        '\n\n== 12 == Got (same) nym transaction from ledger for DID {} ({}): {}'
        .format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    # Run setnym on configuration with no seeds and bad VON Anchor role, check ledger
    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if key in ('seed', 'genesis.txn.path'):
                    continue
                print('{}={}'.format(key,
                                     'BAD_ROLE' if key == 'role' else value),
                      file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 13 == Next configuration, no seeds, bad VON Anchor role:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert sub_proc.returncode
    assert str(int(ErrorCode.BadRole)) in sub_proc.stdout.decode()
    print(
        '\n\n== 14 == Called to set bad role for {}, got error text {}'.format(
            noman.wallet.name, sub_proc.stdout.decode()))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    noman_role = await noman.get_nym_role()
    assert nym and nym['seqNo'] == last_nym_seqno
    await noman.close()
    await pool.close()
    print('\n\n== 15 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))

    # Exercise reseed, ensure no side effect to role on ledger
    await pool.open()
    pan = ProctorAnchor(wallets[cfg['VON Anchor']['name']], pool, rrbx=False)
    await pan.open()
    next_seed = '{}000000000000VonAnchor1'.format(int(time()) + 1)
    await pan.reseed(next_seed)
    nym = json.loads(await pan.get_nym(noman.did))
    pan_role = await pan.get_nym_role()
    await pool.close()
    assert nym and nym['seqNo'] != last_nym_seqno
    assert pan_role == noman_role
    print(
        '\n\n== 16 == As Proctor Anchor, reseeded, then got nym transaction from ledger for DID {} ({}): {}'
        .format(pan.did, pan.wallet.name, ppjson(nym)))
    last_nym_seqno = nym['seqNo']

    # Run setnym on configuration with same wallet for trustee and VON anchor
    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if section == 'VON Anchor' and key == 'name':
                    print('{}={}'.format(key, cfg['Trustee Anchor']['name']),
                          file=ini_fh)
                else:
                    print('{}={}'.format(key, value), file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 17 == Next configuration, same wallet for trustee anchor and VON anchor:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert sub_proc.returncode
    assert str(int(ErrorCode.ExtantWallet)) in sub_proc.stdout.decode()
    print(
        '\n\n== 18 == Called with same wallet for trustee anchor and VON anchor, got error text {}'
        .format(sub_proc.stdout.decode()))

    # Run setnym on configuration with new ledger node pool configuration
    genesis_tmp = NamedTemporaryFile(mode='w+b', buffering=0, delete=False)
    with genesis_tmp:
        genesis_tmp.write(pool_genesis_txn_data.encode())
    pool_copy = '{}.{}'.format(cfg['Node Pool']['name'], int(time()))
    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if section == 'Node Pool':
                    if key == 'name':
                        print('name={}'.format(pool_copy), file=ini_fh)
                    elif key == 'genesis.txn.path':
                        print('genesis.txn.path={}'.format(genesis_tmp.name),
                              file=ini_fh)  # includes /tmp/ path
                    else:
                        print('{}={}.xxx'.format(key, value), file=ini_fh)
                else:
                    print('{}={}'.format(key, value), file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 19 == Next configuration, calling for copy of node pool ledger config:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 20 == Called for new copy {} of node pool ledger config'.
          format(pool_copy))

    unlink(genesis_tmp.name)
    await p_mgr.remove(pool_copy)
    await pool.open()
    await pan.open()
    nym = json.loads(await pan.get_nym(pan.did))
    assert nym and Role.get(nym['role']) == Role.TRUST_ANCHOR
    assert nym and nym['seqNo'] != last_nym_seqno
    print('\n\n== 21 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(pan.did, pan.wallet.name, ppjson(nym)))
    await pan.close()
    await pool.close()

    # Run setnym on configuration with wrong genesis transaction path
    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if section == 'Node Pool':
                    print('{}={}.xxx'.format(key, value), file=ini_fh)
                else:
                    print('{}={}'.format(key, value), file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 22 == Next configuration, missing pool and bad genesis txn path:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert sub_proc.returncode
    assert str(int(ErrorCode.AbsentPool)) in sub_proc.stdout.decode()
    print(
        '\n\n== 23 == Called with missing pool and bad genesis txn path, got error text {}'
        .format(sub_proc.stdout.decode()))

    # Run setnym on configuration with no node pool ledger configuration
    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if section == 'Node Pool':
                    if key == 'name':
                        print('{}={}.xxx'.format(key, value), file=ini_fh)
                    elif key == 'genesis.txn.path':
                        print('genesis.txn.path=', file=ini_fh)
                    else:
                        print('{}={}'.format(key, value), file=ini_fh)
                else:
                    print('{}={}'.format(key, value), file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 24 == Next configuration, missing pool and no genesis txn path:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert sub_proc.returncode
    assert str(int(ErrorCode.AbsentPool)) in sub_proc.stdout.decode()
    print(
        '\n\n== 25 == Called with missing pool and no genesis txn path, got error text {}'
        .format(sub_proc.stdout.decode()))

    # Run again without trustee anchor wallet present
    await wallets['trustee-anchor'].close()
    await wallets['trustee-anchor'].remove()
    wallets.pop('trustee-anchor')
    noman = NominalAnchor(wallets[cfg['VON Anchor']['name']], pool)

    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                print('{}={}'.format(key, value), file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 26 == Set VON anchor configuration, no Trustee anchor wallet a priori:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 27 == Set nym with TRUST_ANCHOR role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    assert nym and Role.get(nym['role']) == Role.TRUST_ANCHOR
    print('\n\n== 28 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    await pan.close()
    for name in wallets:
        await wallets[name].close()
Ejemplo n.º 8
0
async def test_a2a():
    print(Ink.YELLOW('\n\n== Testing DID Doc wranglers =='))

    # One authn key by reference
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': '3',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC X...'
            },
            {
                'id': '4',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC 9...'
            },
            {
                'id': '6',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'authentication': [
            {
                'type': 'RsaSignatureAuthentication2018',
                'publicKey': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4'
            }
        ],
        'service': [
            {
                'id': '0',
                'type': 'Agency',
                'serviceEndpoint': 'did:sov:Q4zqM7aXqm7gDQkUVLng9h'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey'])
    assert len(dd.authnkey) == len(dd_in['authentication'])

    dd_out = dd.serialize()
    print('\n\n== 1 == DID Doc on abbreviated identifiers: {}'.format(ppjson(dd_out)))

    # One authn key embedded, all possible refs canonical
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': '3',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC X...'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC 9...'
            }
        ],
        'authentication': [
            {
                'type': 'RsaSignatureAuthentication2018',
                'publicKey': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#6',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'service': [
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL;0',
                'type': 'Agency',
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey']) + 1
    assert len(dd.authnkey) == len(dd_in['authentication'])

    dd_out = dd.serialize()
    print('\n\n== 2 == DID Doc on mixed reference styles, embedded and ref style authn keys: {}'.format(ppjson(dd_out)))

    # All references canonical where possible; one authn key embedded and one by reference
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#3',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC X...'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC 9...'
            }
        ],
        'authentication': [
            {
                'type': 'RsaSignatureAuthentication2018',
                'publicKey': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#6',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'service': [
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL;0',
                'type': 'DidMessaging',
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey']) + 1
    assert len(dd.authnkey) == len(dd_in['authentication'])

    dd_out = dd.serialize()
    print('\n\n== 3 == DID Doc on canonical refs: {}'.format(ppjson(dd_out)))

    # Minimal as per indy-agent test suite without explicit identifiers
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'type': 'DidMessaging',
                'recipientKeys': ['~XXXXXXXXXXXXXXXX'],
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey'])
    assert len(dd.authnkey) == 0

    dd_out = dd.serialize()
    print('\n\n== 4 == DID Doc miminal style, implcit DID document identifier: {}'.format(
        ppjson(dd_out)))

    # Minimal + ids as per indy-agent test suite with explicit identifiers; novel service recipient key on raw base58
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL;indy',
                'type': 'DidMessaging',
                'priority': 1,
                'recipientKeys': ['~YYYYYYYYYYYYYYYY'],
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == 1 + len(dd_in['publicKey'])
    assert len(dd.authnkey) == 0

    dd_out = dd.serialize()
    print('\n\n== 5 == DID Doc miminal style plus explicit idents and novel raw base58 service recip key: {}'.format(
        ppjson(dd_out)))

    # Minimal + ids as per indy-agent test suite with explicit identifiers; novel service recipient key on raw base58
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            },
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-2',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~YYYYYYYYYYYYYYYY'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-3',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'service': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL;indy',
                'type': 'DidMessaging',
                'priority': 0,
                'recipientKeys': ['~ZZZZZZZZZZZZZZZZ'],
                'serviceEndpoint': 'did:sov:LjgpST2rjsoxYegQDRm7EL;1'
            },
            {
                'id': '1',
                'type': 'one',
                'priority': 1,
                'recipientKeys': [
                    '~XXXXXXXXXXXXXXXX',
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-1'
                ],
                'routingKeys': [
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-3'
                ],
                'serviceEndpoint': 'LjgpST2rjsoxYegQDRm7EL;2'
            },
            {
                'id': '2',
                'type': 'two',
                'priority': 2,
                'recipientKeys': [
                    '~XXXXXXXXXXXXXXXX',
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-1'
                ],
                'routingKeys': [
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-3'
                ],
                'serviceEndpoint': 'https://www.two.ca/two'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == 1 + len(dd_in['publicKey'])
    assert len(dd.authnkey) == 0
    assert {s.priority for s in dd.service.values()} == {0, 1, 2}
    assert len(dd.service) == 3

    dd_out = dd.serialize()
    print('\n\n== 6 == DID Doc on mixed service routing and recipient keys: {}'.format(
        ppjson(dd_out)))

    # Exercise missing service recipient key
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL;indy',
                'type': 'DidMessaging',
                'priority': 1,
                'recipientKeys': [
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-3'
                ],
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    try:
        dd = DIDDoc.deserialize(dd_in)
        assert False
    except AbsentDIDDocItem:
        pass
    print('\n\n== 7 == DID Doc on underspecified service key fails as expected')
Ejemplo n.º 9
0
async def test_setnym(pool_ip, pool_name, pool_genesis_txn_data, seed_trustee1,
                      path_setnym_ini, setnym_ini_file):

    print(
        Ink.YELLOW('\n\n== Testing setnym operation on node pool {} =='.format(
            pool_ip)))

    with open(path_setnym_ini, 'r') as cfg_fh:
        print('\n\n== 1 == Initial configuration:\n{}'.format(cfg_fh.read()))
    cfg = inis2dict(str(path_setnym_ini))

    # Set up node pool ledger config and wallets, open pool, init anchors
    manager = NodePoolManager()
    if pool_name not in await manager.list():
        await manager.add_config(pool_name, pool_genesis_txn_data)

    seeds = {
        'trustee-anchor': seed_trustee1,
        cfg['VON Anchor']['wallet.name']: cfg['VON Anchor']['seed'],
        'x-anchor': 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
    }
    wallets = await get_wallets(seeds, True)

    try:
        async with NominalAnchor(wallets['x-anchor']) as xan:
            await xan.get_nym()
    except AbsentPool:
        pass
    wallets.pop('x-anchor')

    # Open pool, check if nym already present
    pool = manager.get(pool_name)
    await pool.open()
    assert pool.handle

    tan = TrusteeAnchor(wallets['trustee-anchor'], pool)
    await tan.open()

    noman = NominalAnchor(wallets[cfg['VON Anchor']['wallet.name']], pool)

    nym = json.loads(await noman.get_nym(noman.did))
    print('\n\n== 2 == Nym {} on ledger for anchor {} on DID {}'.format(
        '{} already'.format(ppjson(nym)) if nym else 'not yet',
        noman.wallet.name, noman.did))

    await tan.close()
    await pool.close()

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 3 == Set nym with TRUST_ANCHOR role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    assert nym and Role.get(nym['role']) == Role.TRUST_ANCHOR
    print('\n\n== 4 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if key in ('seed', 'genesis.txn.path'):
                    continue
                print('{}={}'.format(
                    key, '${X_ROLE:-}' if key == 'role' else value),
                      file=ini_fh)  # exercise default
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 5 == Next configuration, no seeds, no VON Anchor role:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 6 == Set nym with default role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    assert nym and Role.get(nym['role']) == Role.USER
    last_nym_seqno = nym['seqNo']
    print('\n\n== 7 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    sub_proc = subprocess.run(  #  do it again
        [
            'python',
            join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor',
                 'op', 'setnym.py'),
            str(path_setnym_ini)
        ],
        stdout=subprocess.PIPE,
        stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 8 == Set nym again with default role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    last_nym_seqno = nym['seqNo']
    print(
        '\n\n== 9 == Got (same) nym transaction from ledger for DID {} ({}): {}'
        .format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if key in ('seed', 'genesis.txn.path'):
                    continue
                print('{}={}'.format(key,
                                     'BAD_ROLE' if key == 'role' else value),
                      file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 10 == Next configuration, no seeds, bad VON Anchor role:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert sub_proc.returncode
    print(
        '\n\n== 11 == Called to set bad role for {}, got error text {}'.format(
            noman.wallet.name, sub_proc.stdout.decode()))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    noman_role = await noman.get_nym_role()
    assert nym and nym['seqNo'] == last_nym_seqno
    await noman.close()
    await pool.close()

    print('\n\n== 12 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))

    await pool.open()
    san = SRIAnchor(wallets[cfg['VON Anchor']['wallet.name']], pool)
    await san.open()
    next_seed = "{}000000000000VonAnchor1".format(int(time()) + 1)
    await san.reseed(next_seed)
    nym = json.loads(await san.get_nym(noman.did))
    san_role = await san.get_nym_role()
    assert nym and nym['seqNo'] != last_nym_seqno
    assert san_role == noman_role  # ensure that reseed does not side-effect role on ledger

    print(
        '\n\n== 13 == As SRI Anchor, reseeded, then got nym transaction from ledger for DID {} ({}): {}'
        .format(san.did, san.wallet.name, ppjson(nym)))

    await san.close()
    await pool.close()
    for name in wallets:
        await wallets[name].close()
Ejemplo n.º 10
0
async def test_non_secrets():

    print(Ink.YELLOW('\n\n== Testing non-secrets operations =='))

    wallets = await get_wallets(
        {
            'multipass': '******'
        },
        open_all=False,
        auto_remove=True)

    # Open wallet and operate
    async with wallets['multipass'] as w:
        await w.delete_non_secret('a-type', 'id0')  # not present: silently carries on
        assert await w.get_non_secret('a-type', 'id0') == {}

        try: # exercise tag value type checking
            NonSecret('a-type', 'id0', 'value', {'a_tag': 123})
            assert False
        except BadRecord:
            pass

        # Store non-secret records
        ns = [
            NonSecret('a-type', '0', 'value 0'),
            NonSecret('a-type', '1', 'value 1', {'epoch': str(int(time()))})
        ]
        assert ns[0] != ns[1]

        await w.write_non_secret(ns[0])
        await w.write_non_secret(ns[1])
        recs = await w.get_non_secret(ns[1].type, ns[1].id)
        print('\n\n== 1 == Stored and got {} record{} for id 1: {}'.format(
            len(recs or {}),
            '' if len(recs or {}) == 1 else 's',
            ppjson({k: vars(recs[k]) for k in recs})))
        assert {k for k in recs[ns[1].id].tags} == {'epoch'}

        # Exercise tag type checking
        for tags in [{'price': 4.95}, {'too': {'deep': ''}}, {(0,1): 'key-str'}]:
            try:
                ns[0].tags = tags
                assert False
            except BadRecord:
                pass
        ns[1].tags['score'] = 7
        try:
            await w.write_non_secret(ns[1])
            assert False
        except BadRecord:
            pass
        print('\n\n== 2 == Tags type validation enforces flat {str: str} dict')

        # Augment/override vs. replace metadata
        ns[1].tags = {'score': '7'}
        await w.write_non_secret(ns[1])
        recs = await w.get_non_secret(ns[1].type, ns[1].id)
        assert {k for k in recs[ns[1].id].tags} == {'epoch', 'score'}
        await w.write_non_secret(ns[1], replace_meta = True)
        recs = await w.get_non_secret(ns[1].type, ns[1].id)
        assert {k for k in recs[ns[1].id].tags} == {'score'}
        print('\n\n== 3 == Metadata augment/override vs. replace metadata behaviour OK')

        ns[1].tags['~clear'] = 'text'  # exercise clear/encr tags
        assert {k for k in ns[1].clear_tags} == {'~clear'}
        assert {k for k in ns[1].encr_tags} == {'score'}

        ns[1].value = 'value 0'
        ns[1].tags = None
        await w.write_non_secret(ns[1], replace_meta=True)
        recs = await w.get_non_secret(ns[1].type, ns[1].id)
        assert recs[ns[1].id].tags == None and recs[ns[1].id].value == 'value 0'
        print('\n\n== 4 == Record replacement OK')

        nsb = NonSecret('b-type', ns[1].id, ns[1].value, ns[1].tags)
        await w.write_non_secret(nsb)
        recs = await w.get_non_secret(nsb.type, nsb.id)
        assert recs[nsb.id].type == 'b-type' and recs[nsb.id].tags == None and recs[nsb.id].value == 'value 0'
        recs = await w.get_non_secret('a-type', nsb.id)
        assert recs[nsb.id].type == 'a-type' and recs[nsb.id].tags == None and recs[nsb.id].value == 'value 0'
        print('\n\n== 5 == Check for record type respect passes OK')
        await w.delete_non_secret('b-type', nsb.id)

        ns = []
        epoch = int(time())
        for i in range(5):
            await w.write_non_secret(NonSecret(
                'searchable',
                str(i),
                str(i),
                {
                    '~epoch': str(epoch),
                    'encr': str(i)
                }))

        # Get by WQL $neq
        recs = await w.get_non_secret(
            'searchable',
            {
                '~epoch': {
                    '$neq': epoch + 1  # exercise to-str canonicalization
                }
            })
        print('\n\n== 6 == Got {} record{} from by WQL on $neq: {}'.format(
            len(recs or {}),
            '' if len(recs or {}) == 1 else 's',
            ppjson({k: vars(recs[k]) for k in recs})))
        assert len(recs) == 5

        # Get by WQL $not-$in
        recs = await w.get_non_secret(
            'searchable',
            {
                '$not': {
                    '~epoch': {
                        '$in': [epoch - 1, epoch + 1]
                    }
                }
            })
        print('\n\n== 7 == Got {} record{} from by WQL on $not-$in: {}'.format(
            len(recs or {}),
            '' if len(recs or {}) == 1 else 's',
            ppjson({k: vars(recs[k]) for k in recs})))
        assert len(recs) == 5

        # Get by WQL $like
        recs = await w.get_non_secret(
            'searchable',
            {
                '~epoch': {
                    '$like': '{}%'.format(epoch)
                }
            })
        print('\n\n== 8 == Got {} record{} from by WQL on $not-$in: {}'.format(
            len(recs or {}),
            '' if len(recs or {}) == 1 else 's',
            ppjson({k: vars(recs[k]) for k in recs})))
        assert len(recs) == 5

        # Get by WQL equality
        recs = await w.get_non_secret(
            'searchable',
            {
                '~epoch': epoch
            })
        print('\n\n== 9 == Got {} record{} from by WQL on equality: {}'.format(
            len(recs or {}),
            '' if len(recs or {}) == 1 else 's',
            ppjson({k: vars(recs[k]) for k in recs})))
        assert len(recs) == 5

        # Get by WQL $or
        recs = await w.get_non_secret(
            'searchable',
            {
                '$or': [
                    {
                        '~epoch': epoch
                    },
                    {
                        '~epoch': epoch + 1
                    }
                ]
            })
        print('\n\n== 10 == Got {} record{} from by WQL on equality: {}'.format(
            len(recs or {}),
            '' if len(recs or {}) == 1 else 's',
            ppjson({k: vars(recs[k]) for k in recs})))
        assert len(recs) == 5

        # Get by WQL $lte
        recs = await w.get_non_secret(
            'searchable',
            {
                '~epoch': {
                    '$lte': epoch
                }
            })
        print('\n\n== 11 == Got {} record{} from by WQL on $lte: {}'.format(
            len(recs or {}),
            '' if len(recs or {}) == 1 else 's',
            ppjson({k: vars(recs[k]) for k in recs})))
        assert len(recs) == 5

        # Get by WQL $not on encrypted tag values
        recs = await w.get_non_secret(
            'searchable',
            {
                '$not': {
                    'encr': str(0)
                }
            })
        print('\n\n== 12 == Got {} record{} from by WQL on $not for encrypted tag value: {}'.format(
            len(recs or {}),
            '' if len(recs or {}) == 1 else 's',
            ppjson({k: vars(recs[k]) for k in recs})))
        assert len(recs) == 4

        # Get by WQL equality on encrypted tag values
        recs = await w.get_non_secret(
            'searchable',
            {
                'encr': str(0)
            })
        print('\n\n== 13 == Got {} record{} from by WQL on equality for encrypted tag value: {}'.format(
            len(recs or {}),
            '' if len(recs or {}) == 1 else 's',
            ppjson({k: vars(recs[k]) for k in recs})))
        assert len(recs) == 1

        # Exercise WQL search pagination
        cardinality = Wallet.DEFAULT_CHUNK + 16
        nsw = [
            NonSecret('wql', str(i), 'value {}'.format(i), {'~meta': str(i)}) for i in range(cardinality)
        ]

        for i in range(cardinality):
            await w.write_non_secret(nsw[i])

        recs = await w.get_non_secret(
            'wql',
            {
                '~meta': {
                    '$gte': 0
                }
            })

        print('\n\n== 14 == Stored and got {} record{} using WQL pagination'.format(
            len(recs or {}),
            '' if len(recs or {}) == 1 else 's'))
        assert len(recs) == cardinality
        assert {i for i in range(cardinality)} == {int(k) for k in recs}

        # Exercise limit
        recs = await w.get_non_secret(
            'wql',
            {
                '~meta': {
                    '$gte': 0
                }
            },
            limit=Wallet.DEFAULT_CHUNK)

        print('\n\n== 15 == Stored and got {} record{} using hard limit of {}: {}'.format(
            len(recs or {}),
            '' if len(recs or {}) == 1 else 's',
            Wallet.DEFAULT_CHUNK,
            ppjson({k: vars(recs[k]) for k in recs})))
        assert len(recs) == Wallet.DEFAULT_CHUNK
        assert all(int(k) in range(cardinality) for k in recs)
Ejemplo n.º 11
0
async def test_pairwise():

    print(Ink.YELLOW('\n\n== Testing pairwise operations =='))

    wallets = await get_wallets(
        {
            'multipass': '******',
            'agent-86': 'Agent-86-00000000000000000000000',
            'agent-99': 'Agent-99-00000000000000000000000',
        },
        open_all=False,
        auto_remove=True)

    pairwises = {}  # naive pairwise info, with anchor dids and verkeys
    for name in wallets:
        if name != 'multipass':
            async with wallets[name] as their_wallet:  # engage auto-remove
                pairwises[name] = PairwiseInfo(
                    their_wallet.did,
                    their_wallet.verkey,
                    wallets['multipass'].did,
                    wallets['multipass'].verkey,
                    None)

    assert pairwises['agent-86'] != pairwises['agent-99']
    baseline_meta = {'their_verkey', 'their_did', 'my_verkey', 'my_did'}

    # Open wallets and operate
    async with wallets['multipass'] as w:
        print('\n\n== 1 == Pairwise DIDs: {}'.format(ppjson(pairwises)))

        await w.delete_pairwise(pairwises['agent-86'].their_did)  # not present: silently carries on
        await w.delete_pairwise(pairwises['agent-99'].their_did)  # not present: silently carries on
        assert await w.get_pairwise(pairwises['agent-86'].their_did) == {}

        # Store record for agent 86, 99; get by DID
        metadata = {'epoch': int(time())}  # preparing to exercise metadata int to str
        await w.write_pairwise(
            pairwises['agent-99'].their_did,
            pairwises['agent-99'].their_verkey,
            wallets['multipass'].did,
            metadata)
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            wallets['multipass'].did)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 2 == Stored and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta
        assert records[pairwises['agent-86'].their_did].my_did == wallets['multipass'].did
        assert records[pairwises['agent-86'].their_did].my_verkey == wallets['multipass'].verkey

        # Set metadata; get by DID
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            wallets['multipass'].did,
            metadata)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 3 == Stored metadata and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta | {'epoch'}

        # Augment metadata; get by DID
        metadata = {'clearance': 'galactic'}
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            wallets['multipass'].did,
            metadata)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 4 == Stored metadata and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta | {'epoch', 'clearance'}

        # Replace metadata; get by DID
        metadata = {'secrecy': 'hover cover'}
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            wallets['multipass'].did,
            metadata,
            replace_meta=True)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 5 == Replaced metadata and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta | {'secrecy'}

        # Update metadata with ~tags, exercise equivalence; get by DID
        metadata = {'~clearance': 'cosmic'}
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            wallets['multipass'].did,
            metadata)  # update metadata should overwrite prior (clearance) attr on ~
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 6 == Updated metadata on ~tags and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert ({k for k in records[pairwises['agent-86'].their_did].metadata} ==
            baseline_meta | {'secrecy', 'clearance'})

        # Replace metadata on ~tags, exercise equivalence; get by DID
        metadata = {'~secrecy': 'hover cover'}
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            wallets['multipass'].did,
            metadata,
            replace_meta=True)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 7 == Replaced metadata on ~tags and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta | {'secrecy'}

        # Vacuous storage changing nothing: show intact metadata; get by DID
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            wallets['multipass'].did)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 8 == Wrote non-delta and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta | {'secrecy'}

        # Clear metadata, show retention of did and verkey base line; get by DID
        metadata = None
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            wallets['multipass'].did,
            metadata,
            replace_meta=True)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 9 == Cleared metadata and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta

        # Restore epoch to metadata; get all
        metadata = {'epoch': int(time())}
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            wallets['multipass'].did,
            metadata,
            replace_meta=True)
        records = await w.get_pairwise()
        print('\n\n== 10 == Got {} record{} from get-all: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert len(records) == 2
        assert all({k for k in records[pairwises[name].their_did].metadata} ==
            baseline_meta | {'epoch'} for name in pairwises)

        # Get by WQL $neq
        records = await w.get_pairwise(json.dumps({
            'their_verkey': {
                '$neq': pairwises['agent-99'].their_verkey
            }
        }))
        print('\n\n== 11 == Got {} record{} from by WQL on $neq: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert len(records) == 1
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta | {'epoch'}

        # Get by WQL $in
        records = await w.get_pairwise(json.dumps({
            'their_verkey': {
                '$in': [pairwises[name].their_verkey for name in pairwises]
            }
        }))
        print('\n\n== 12 == Got {} record{} from by WQL on $in: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert len(records) == 2
        assert all({k for k in records[pairwises[name].their_did].metadata} ==
            baseline_meta | {'epoch'} for name in pairwises)

        # Get by WQL $or
        records = await w.get_pairwise(json.dumps({
            '$or': [
                {
                    'their_verkey': pairwises['agent-86'].their_verkey,
                },
                {
                    'their_did': pairwises['agent-99'].their_did,
                }
            ]
        }))
        print('\n\n== 13 == Got {} record{} from by WQL on $or: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert len(records) == 2
        assert all({k for k in records[pairwises[name].their_did].metadata} ==
            baseline_meta | {'epoch'} for name in pairwises)

        # Get by WQL $neq
        records = await w.get_pairwise(json.dumps({
            'their_verkey': {
                '$neq': pairwises['agent-99'].their_verkey
            }
        }))
        print('\n\n== 14 == Got {} record{} from by WQL on $neq: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert len(records) == 1
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta | {'epoch'}

        # Get by WQL $lte
        records = await w.get_pairwise(json.dumps({
            'epoch': {
                '$lte': int(time())
            }
        }))
        print('\n\n== 15 == Got {} record{} from by WQL on $lte: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert len(records) == 2
        assert all({k for k in records[pairwises[name].their_did].metadata} ==
            baseline_meta | {'epoch'} for name in pairwises)

        # Get by WQL $like
        records = await w.get_pairwise(json.dumps({
            'their_did': {
                '$like': '{}%'.format(pairwises['agent-86'].their_did)
            }
        }))
        print('\n\n== 16 == Got {} record{} from by WQL on $like: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert len(records) == 1
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta | {'epoch'}

        # Get by WQL equality
        records = await w.get_pairwise(json.dumps({
            'their_did': pairwises['agent-86'].their_did
        }))
        print('\n\n== 17 == Got {} record{} from by WQL on equality: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert len(records) == 1
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta | {'epoch'}

        # Get by nested WQL $or-$like
        records = await w.get_pairwise(json.dumps({
            '$or': [
                {
                    'their_verkey': {
                        '$like': '{}%'.format(pairwises['agent-86'].their_verkey)
                    }
                },
                {
                    'their_verkey': {
                        '$like': '{}%'.format(pairwises['agent-99'].their_verkey)
                    }
                }
            ]
        }))
        print('\n\n== 18 == Got {} record{} from by nested $or-$like WQL: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert len(records) == 2
        assert all({k for k in records[pairwises[name].their_did].metadata} ==
            baseline_meta | {'epoch'} for name in pairwises)

        # Get by nested WQL
        records = await w.get_pairwise(json.dumps({
            '$not': {
                'my_did': None
            },
            '$not': {
                'epoch': {
                    '$in': [1, 2, 3, 4, 5]
                }
            },
            'epoch': {
                '$gt': 0,
            },
            '$or': [
                {
                    'their_verkey': {
                        '$like': '{}%'.format(pairwises['agent-86'].their_verkey)
                    }
                },
                {
                    'their_verkey': {
                        '$like': '{}%'.format(pairwises['agent-99'].their_verkey)
                    }
                }
            ]
        }))
        print('\n\n== 19 == Got {} record{} from by nested WQL: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert len(records) == 2
        assert all({k for k in records[pairwises[name].their_did].metadata} ==
            baseline_meta | {'epoch'} for name in pairwises)

        # Delete
        await w.delete_pairwise(pairwises['agent-86'].their_did)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 20 == Deleted agent-86 record and checked its absence')
        assert not records

        # Exercise the above writes without specifying local DID; ensure operation creates new local DIDs and verkeys

        metadata = {'epoch': int(time())}  # preparing to exercise metadata int to str
        await w.write_pairwise(
            pairwises['agent-99'].their_did,
            pairwises['agent-99'].their_verkey,
            None,
            metadata)
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 21 == Stored and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta
        p86_my_did = records[pairwises['agent-86'].their_did].my_did
        p86_my_verkey = records[pairwises['agent-86'].their_did].my_verkey
        assert p86_my_did != wallets['multipass'].did
        assert p86_my_verkey != wallets['multipass'].verkey

        # Set metadata; get by DID
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            None,
            metadata)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 22 == Stored metadata and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta | {'epoch'}
        assert records[pairwises['agent-86'].their_did].my_did != wallets['multipass'].did
        assert records[pairwises['agent-86'].their_did].my_verkey != wallets['multipass'].verkey
        assert records[pairwises['agent-86'].their_did].my_did != p86_my_did
        assert records[pairwises['agent-86'].their_did].my_verkey != p86_my_verkey

        # Augment metadata; get by DID
        metadata = {'clearance': 'galactic'}
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            None,
            metadata)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 23 == Stored metadata and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta | {'epoch', 'clearance'}

        # Replace metadata; get by DID
        metadata = {'secrecy': 'hover cover'}
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            None,
            metadata,
            replace_meta=True)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 24 == Replaced metadata and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta | {'secrecy'}

        # Update metadata with ~tags, exercise equivalence; get by DID
        metadata = {'~clearance': 'cosmic'}
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            None,
            metadata)  # update metadata should overwrite prior (clearance) attr on ~
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 25 == Updated metadata on ~tags and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert ({k for k in records[pairwises['agent-86'].their_did].metadata} ==
            baseline_meta | {'secrecy', 'clearance'})

        # Replace metadata on ~tags, exercise equivalence; get by DID
        metadata = {'~secrecy': 'hover cover'}
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            None,
            metadata,
            replace_meta=True)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 26 == Replaced metadata on ~tags and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta | {'secrecy'}

        # Vacuous storage changing nothing: show intact metadata; get by DID
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 27 == Wrote non-delta and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta | {'secrecy'}

        # Clear metadata, show retention of did and verkey base line; get by DID
        metadata = None
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            None,
            metadata,
            replace_meta=True)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 28 == Cleared metadata and got {} record{} for agent-86: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert {k for k in records[pairwises['agent-86'].their_did].metadata} == baseline_meta

        # Restore epoch to metadata; get all
        metadata = {'epoch': int(time())}
        await w.write_pairwise(
            pairwises['agent-86'].their_did,
            pairwises['agent-86'].their_verkey,
            None,
            metadata,
            replace_meta=True)
        records = await w.get_pairwise()
        print('\n\n== 29 == Got {} record{} from get-all: {}'.format(
            len(records or {}),
            '' if len(records or {}) == 1 else 's',
            ppjson({k: vars(records[k]) for k in records})))
        assert len(records) == 2
        assert all({k for k in records[pairwises[name].their_did].metadata} ==
            baseline_meta | {'epoch'} for name in pairwises)

        # Delete
        await w.delete_pairwise(pairwises['agent-86'].their_did)
        records = await w.get_pairwise(pairwises['agent-86'].their_did)
        print('\n\n== 30 == Deleted agent-86 record and checked its absence')
        assert not records