コード例 #1
0
async def test_canon():
    print(Ink.YELLOW('\n\n== Testing Attribute Canonicalization =='))
    assert canon('testAttr') == 'testattr'
    assert canon(' test Attr ') == 'testattr'
    assert canon('testattr') == 'testattr'
    assert canon('testAttrZeroOneTwoThree') == 'testattrzeroonetwothree'
    print('\n\n== Canonicalization for attr values works as expected')
コード例 #2
0
ファイル: test_pool.py プロジェクト: swcurran/von_anchor
async def test_pool_open(path_home, pool_name, pool_genesis_txn_data, pool_ip):

    print(
        Ink.YELLOW(
            '\n\n== Testing Node Pool Config vs. IP {} =='.format(pool_ip)))

    try:
        NodePool(pool_name, config={'extra': 'not allowed'})
        assert False
    except JSONValidation:
        pass

    # Set up node pool ledger config and wallets, open pool, init anchors
    p_mgr = NodePoolManager()
    if pool_name not in await p_mgr.list():
        await p_mgr.add_config(pool_name, pool_genesis_txn_data)
    pool = p_mgr.get(pool_name)
    async with pool:
        assert pool.handle is not None
    assert pool.handle is None
    await pool.close()  # exercise double-close: should silently carry on

    pool.config['timeout'] = 'should be an integer'
    try:
        async with pool:
            assert False
    except IndyError as x_indy:
        assert x_indy.error_code == ErrorCode.CommonInvalidStructure
    pool.config.pop('timeout')

    print(
        '\n\n== 1 == Pool {} opens and closes OK from existing ledger configuration'
        .format(pool))
コード例 #3
0
ファイル: test_cache.py プロジェクト: shlemph/von_anchor
async def test_cache_multithread():
    global cache_test_done

    print(Ink.YELLOW('\n\n== Testing Cache Multithreading =='))

    THREADS = 64
    MODULUS = 5
    epoch_start = epoch()
    cache_threads = []

    dot_thread = Thread(target=_dot)
    for ser_no in range(THREADS):
        cache_threads.append(
            Thread(target=get,
                   args=(ser_no % MODULUS, _ser_no2did(ser_no % MODULUS))))

    dot_thread.start()

    shuffle(cache_threads)
    for thread in cache_threads:
        # print('Starting thread {}'.format(cache_threads.index(thread)))
        thread.start()

    for thread in cache_threads:
        thread.join()

    elapsed = ceil(epoch() - epoch_start)
    assert elapsed < 2 * MODULUS * DELAY  # shouldn't get caught waiting more than once per cache write

    cache_test_done = True
    dot_thread.join()
コード例 #4
0
ファイル: test_wallet.py プロジェクト: nrempel/von_anchor
async def test_pack():

    print(Ink.YELLOW('\n\n== Testing packing/unpacking =='))

    wallets = await get_wallets(
        {
            'agent-13': 'Agent-13-00000000000000000000000',
            'agent-86': 'Agent-86-00000000000000000000000',
            'agent-99': 'Agent-99-00000000000000000000000'
        },
        open_all=False,
        auto_remove=True)

    # Open wallets and operate
    async with wallets['agent-13'] as w13, (
            wallets['agent-86']) as w86, (
            wallets['agent-99']) as w99:

        dids = {name: wallets[name].did for name in wallets}
        print('\n\n== 1 == DIDs: {}'.format(ppjson(dids)))

        # Agent 86 packs and unpacks to and from itself anonymously, implicitly and explicitly
        plain = 'Hello World'
        packed = await w86.pack(plain)
        print('\n\n== 2 == Plaintext: {}, packed JWE: {}'.format(plain, packed))
        unpacked = await w86.unpack(packed)
        assert unpacked == (plain, w86.verkey, None)
        packed = await w86.pack(plain, w86.verkey)
        unpacked = await w86.unpack(packed)
        assert unpacked == (plain, w86.verkey, None)
        packed = await w86.pack(plain, [w86.verkey])
        unpacked = await w86.unpack(packed)
        assert unpacked == (plain, w86.verkey, None)
        print('\n\n== 3 == {} packed and unpacked anonymous message: {}'.format(w86.name, unpacked[0]))

        # Agent 86 signs and packs to itself, then unpacks, with anchor verkey and loc did verkey
        packed = await w86.pack(plain, None, w86.verkey)
        unpacked = await w86.unpack(packed)
        assert unpacked == (plain, w86.verkey, w86.verkey)
        loc_did_info = await w86.create_local_did('Shoe-Phone-000000000000000000000')
        packed = await w86.pack(plain, None, loc_did_info.verkey)
        unpacked = await w86.unpack(packed)
        assert unpacked == (plain, w86.verkey, loc_did_info.verkey)
        print('\n\n== 4 == {} packed and unpacked authenticated message: {}'.format(w86.name, unpacked[0]))

        # Agent 86 signs and packs to agents 13 and 99, fails to unpack
        packed = await w86.pack(plain, [w13.verkey, w99.verkey], loc_did_info.verkey)
        unpacked = await w13.unpack(packed)
        assert unpacked == (plain, w13.verkey, loc_did_info.verkey)
        print('\n\n== 5.0 == {} auth-packed, {} unpacked: {}'.format(w86.name, w13.name, unpacked[0]))
        unpacked = await w99.unpack(packed)
        assert unpacked == (plain, w99.verkey, loc_did_info.verkey)
        print('\n\n== 5.1 == {} auth-packed, {} unpacked: {}'.format(w86.name, w99.name, unpacked[0]))
        try:
            unpacked = await w86.unpack(packed)
            assert False
        except AbsentRecord:
            pass
        print('\n\n== 5.2 == {} correctly failed to unpack ciphertext'.format(w86.name))
コード例 #5
0
async def test_protocol():

    print(Ink.YELLOW('\n\n== Testing Node Pool Protocols =='))

    assert Protocol.V_13.indy() != Protocol.V_14.indy(
    )  # all the same except indy-node 1.3
    assert Protocol.V_14.indy() == Protocol.V_15.indy()
    assert Protocol.V_15.indy() == Protocol.V_16.indy()
    assert Protocol.V_16.indy() == Protocol.V_17.indy()
    assert Protocol.V_17.indy() == Protocol.V_18.indy()
    assert Protocol.V_18.indy() == Protocol.DEFAULT.indy()

    print('\n\n== 1 == Protocols OK')
コード例 #6
0
ファイル: test_cache.py プロジェクト: shlemph/von_anchor
async def test_schema_cache():
    print(Ink.YELLOW('\n\n== Testing Schema Cache =='))
    N = 32
    s_key = []
    schema = []
    for i in range(N):
        s_key.append(
            SchemaKey('Q4zqM7aXqm7gDQkUVLng{:02d}'.format(i).replace('0', 'Q'),
                      'schema-{}'.format(i // 5), '{}'.format(i % 5)))
        schema.append({
            # 'id': schema_id(s_key[i].origin_did, s_key[i].name, s_key[i].version),
            'id':
            schema_id(*s_key[i]),
            'name':
            s_key[i].version,
            'version':
            s_key[i].version,
            'seqNo':
            i,
            'attrNames': ['attr-{}-{}'.format(i, j) for j in range(N)],
            'ver':
            '1.0'
        })

    for i in range(N):
        if i % 2:
            SCHEMA_CACHE[s_key[i]] = schema[i]
        else:
            SCHEMA_CACHE[schema[i]['seqNo']] = schema[i]

    for i in range(N):
        assert SCHEMA_CACHE.contains(s_key[i])
        assert SCHEMA_CACHE.contains(schema[i]['seqNo'])
        assert SCHEMA_CACHE[s_key[i]] == SCHEMA_CACHE[schema[i]['seqNo']]

    assert len(SCHEMA_CACHE.index()) == N
    assert not SCHEMA_CACHE.contains(-1)

    try:
        SCHEMA_CACHE[-1]
    except CacheIndex:
        pass

    # Exercise cache clearing and feeding
    cached = SCHEMA_CACHE.schemata()
    assert SCHEMA_CACHE.schemata()
    cached_json = json.dumps(cached)
    SCHEMA_CACHE.clear()
    assert not SCHEMA_CACHE.schemata()
    SCHEMA_CACHE.feed(json.loads(cached_json))
    assert len(SCHEMA_CACHE.schemata()) == len(cached)
コード例 #7
0
async def test_box_ids():
    print(Ink.YELLOW('\n\n== Testing Box Identifier Checks =='))
    
    assert ok_did('Q4zqM7aXqm7gDQkUVLng9h')  # quibble: not technically a box id
    assert not ok_did('Q4zqM7aXqm7gDQkUVLng9I')
    assert not ok_did('Q4zqM7aXqm7gDQkUVLng')

    assert Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng9h')
    assert Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng')
    assert not Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9h')
    assert not Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng9hx')
    assert not Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng90')

    assert ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:3:bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h::bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2::1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:bc-reg:')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:bc-reg:1.0a')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9I:2:bc-reg:1.0')  # I is not in base58

    assert ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18:tag')  # protocol >= 1.4
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:4:CL:18:0')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h::CL:18:0')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9I:3:CL:18:tag')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3::18:tag')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:18:tag')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18z:tag')
    assert ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18')  # protocol == 1.3

    assert ok_rev_reg_id('LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:1')  # protocol >= 1.4
    assert not ok_rev_reg_id('LjgpST2rjsoxYegQDRm7EL:5:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:1')
    assert not ok_rev_reg_id('LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:4:CL:20:0:CL_ACCUM:1')
    assert not ok_rev_reg_id('LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL::CL:20:0:CL_ACCUM:1')
    assert not ok_rev_reg_id('LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:NOT_CL:20:tag:CL_ACCUM:1')
    assert not ok_rev_reg_id('LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20z:tag:CL_ACCUM:1')
    assert not ok_rev_reg_id('LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20::CL_ACCUM:1')
    assert not ok_rev_reg_id('LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag::1')
    assert not ok_rev_reg_id('LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:1')
    assert not ok_rev_reg_id('LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:')
    assert not ok_rev_reg_id('LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM')
    assert ok_rev_reg_id('LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:CL_ACCUM:1')  # protocol == 1.3
コード例 #8
0
async def test_encode():
    print(Ink.YELLOW('\n\n== Testing encode for string of length up to 1024'))

    for printable_len in range(0, 1025):
        orig = ''.join(choice(printable) for _ in range(printable_len))
        print('.',
              end='' if
              (printable_len + 1) % 100 else '{}\n'.format(printable_len),
              flush=True)
        enc = encode(orig)
        assert int(log(max(int(enc), 1)) / log(2)) < 256
    print('\n\n== Random printable string test passed')

    print('\n\n== Typical cases - (type) orig -> encoded:')
    for orig in (chr(0), chr(1), chr(2), 'Alice', 'Bob', 'J.R. "Bob" Dobbs',
                 None, True, False, -5, 0, 1024, 2**31 - 1, 2**31, 2**31 + 1,
                 -2**31 - 1, -2**31, -2**31 + 1, 0.0, '0.0', 0.1, -0.1,
                 -1.9234856120348166e+37, 1.9234856120348166e+37,
                 -19234856120348165921835629183561023142.55,
                 19234856120348165921835629183561023142.55, float_info.max,
                 'Hello', '', 'True', 'False', '1234', '-12345', [],
                 [0, 1, 2, 3], {
                     'a': 1,
                     'b': 2,
                     'c': 3
                 }, [{}, {
                     'a': [0, 0.1],
                     'b': [0.0, float_info.min]
                 }, True]):
        enc = encode(orig)
        print('  ({})({}) -> {}'.format(
            type(orig).__name__,
            '0x{:02x}'.format(ord(orig)) if orig in (chr(0), chr(1),
                                                     chr(2)) else "%f" %
            orig if isinstance(orig, float) else orig, enc))
        assert isinstance(enc, str)
        assert re.match(r'-?[0-9]+$', enc)
        if int(enc) == orig:
            assert isinstance(orig, int) and (-I32_BOUND <= orig < I32_BOUND
                                              )  # includes bools
        else:
            assert not (isinstance(orig, int) and
                        (-I32_BOUND <= orig < I32_BOUND))
コード例 #9
0
ファイル: test_wallet.py プロジェクト: nrempel/von_anchor
async def test_local_dids():

    print(Ink.YELLOW('\n\n== Testing local DID operations =='))

    wallets = await get_wallets(
        {
            'multipass': '******',
        },
        open_all=False,
        auto_remove=True)

    # Open wallet and operate
    async with wallets['multipass'] as w:
        did_info = await w.create_local_did(None, '55GkHamhTU1ZbTbV2ab9DE')
        print('\n\n== 1 == Created local known DID: {}'.format(did_info))
        assert did_info.did and did_info.verkey and len(did_info.metadata) == 1  # 'since'
        assert did_info == await w.get_local_did_info(did_info.did)
        assert did_info == await w.get_local_did_info(did_info.verkey)

        did_info = await w.create_local_did()
        print('\n\n== 2 == Created random local DID: {}'.format(did_info))
        assert did_info.did and did_info.verkey and len(did_info.metadata) == 1
        assert did_info == await w.get_local_did_info(did_info.did)
        assert did_info == await w.get_local_did_info(did_info.verkey)

        did_info = await w.create_local_did('Agent-44-00000000000000000000000')
        print('\n\n== 3 == Created local DID on seed: {}'.format(did_info))
        assert did_info.did and did_info.verkey and len(did_info.metadata)
        assert did_info == await w.get_local_did_info(did_info.did)
        assert did_info == await w.get_local_did_info(did_info.verkey)

        did_info = await w.create_local_did(metadata={'hello': 'world'})
        print('\n\n== 4 == Created random local DID with metadata: {}'.format(did_info))
        assert did_info.did and did_info.verkey and len(did_info.metadata) == 2
        assert did_info == await w.get_local_did_info(did_info.did)
        assert did_info == await w.get_local_did_info(did_info.verkey)

        did_info = await w.create_local_did('Agent-13-00000000000000000000000', metadata={'hello': 'world'})
        print('\n\n== 5 == Created local DID on seed with metadata: {}'.format(did_info))
        assert did_info.did and did_info.verkey and len(did_info.metadata) == 2
        assert did_info == await w.get_local_did_info(did_info.did)
        assert did_info == await w.get_local_did_info(did_info.verkey)
コード例 #10
0
async def test_formalisms(pool_ip, pool_name, pool_genesis_txn_data,
                          seed_trustee1, path_setnym_ini, setnym_ini_file):

    print(Ink.YELLOW('\n\n== Testing usage screed and data structures'))

    # Run setnym with no parameters to engage usage message
    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py')
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert sub_proc.returncode == 1
    print('\n\n== 1 == Missing parameter invokes usage message OK')

    # Exercise namedtuples for syntax
    nodepool_data = NodePoolData('name', None)
    anchor_data = AnchorData('role', 'name', 'seed', 'did', 'wallet_create',
                             'wallet_type', 'wallet_access')
    print('\n\n== 2 == Data structures create OK')
コード例 #11
0
async def test_pool_open(path_home, pool_name, pool_genesis_txn_data, pool_ip):

    print(
        Ink.YELLOW(
            '\n\n== Testing Node Pool Config vs. IP {} =='.format(pool_ip)))

    try:
        NodePool(pool_name, config={'extra': 'not allowed'})
        assert False
    except JSONValidation:
        pass

    # Set up node pool ledger config and wallets, open pool, init anchors
    manager = NodePoolManager()
    if pool_name not in await manager.list():
        await manager.add_config(pool_name, pool_genesis_txn_data)
    pool = manager.get(pool_name)
    await pool.open()
    assert pool.handle is not None
    await pool.close()

    print(
        '\n\n== 1 == Pool {} opens and closes OK from existing ledger configuration'
        .format(pool_name))
コード例 #12
0
async def test_setnym(pool_ip, pool_name, pool_genesis_txn_data, seed_trustee1,
                      path_setnym_ini, setnym_ini_file):

    print(
        Ink.YELLOW('\n\n== Testing setnym operation on node pool {} =='.format(
            pool_ip)))

    with open(path_setnym_ini, 'r') as cfg_fh:
        print('\n\n== 1 == Initial configuration:\n{}'.format(cfg_fh.read()))
    cfg = inis2dict(str(path_setnym_ini))

    # Set up node pool ledger config and wallets, open pool, init anchors
    p_mgr = NodePoolManager()
    if pool_name not in await p_mgr.list():
        await p_mgr.add_config(pool_name, pool_genesis_txn_data)

    wallets = await get_wallets(
        {
            'trustee-anchor': {
                'seed': seed_trustee1
            },
            cfg['VON Anchor']['name']: {
                'seed': cfg['VON Anchor']['seed']
            },
            'x-anchor': {
                'seed': 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
            }
        },
        open_all=True)

    try:
        async with NominalAnchor(wallets['x-anchor']) as xan:
            await xan.get_nym()
    except AbsentPool:
        pass
    wallets.pop('x-anchor')

    # Open pool, check if nym already present
    pool = p_mgr.get(pool_name)
    await pool.open()
    assert pool.handle

    tan = TrusteeAnchor(wallets['trustee-anchor'], pool)
    await tan.open()

    noman = NominalAnchor(wallets[cfg['VON Anchor']['name']], pool)

    nym = json.loads(await noman.get_nym(noman.did))
    print('\n\n== 2 == Nym {} on ledger for anchor {} on DID {}'.format(
        '{} already'.format(ppjson(nym)) if nym else 'not yet',
        noman.wallet.name, noman.did))

    await tan.close()
    await pool.close()

    # Run setnym on initial configuration, check ledger
    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 3 == Set nym with TRUST_ANCHOR role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    assert nym and Role.get(nym['role']) == Role.TRUST_ANCHOR
    print('\n\n== 4 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    # Run setnym on configuration with DID and explicit storage type, check ledger
    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if section == 'VON Anchor':
                    if key == 'seed':
                        print('did={}'.format(noman.did), file=ini_fh)
                    elif key == 'wallet.type':
                        print('wallet.type=default', file=ini_fh)
                    else:
                        print('{}={}'.format(key, value), file=ini_fh)
                else:
                    print('{}={}'.format(key, value), file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 5 == Next configuration, on DID instead of seed and explicit wallet type:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 6 == Set nym with TRUST_ANCHOR role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    assert nym and Role.get(nym['role']) == Role.TRUST_ANCHOR
    last_nym_seqno = nym['seqNo']
    print('\n\n== 7 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    # Run setnym on configuration with no seeds nor VON Anchor role, check ledger
    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if key in ('seed', 'genesis.txn.path'):
                    continue
                print('{}={}'.format(
                    key, '${X_ROLE:-}' if key == 'role' else value),
                      file=ini_fh)  # exercise default
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 8 == Next configuration, no seeds, no VON Anchor role:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 9 == Set nym with USER role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    assert nym and Role.get(nym['role']) == Role.USER
    last_nym_seqno = nym['seqNo']
    print('\n\n== 10 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    # Run again to check idempotence
    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 11 == Set nym again with default role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    last_nym_seqno = nym['seqNo']
    print(
        '\n\n== 12 == Got (same) nym transaction from ledger for DID {} ({}): {}'
        .format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    # Run setnym on configuration with no seeds and bad VON Anchor role, check ledger
    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if key in ('seed', 'genesis.txn.path'):
                    continue
                print('{}={}'.format(key,
                                     'BAD_ROLE' if key == 'role' else value),
                      file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 13 == Next configuration, no seeds, bad VON Anchor role:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert sub_proc.returncode
    assert str(int(ErrorCode.BadRole)) in sub_proc.stdout.decode()
    print(
        '\n\n== 14 == Called to set bad role for {}, got error text {}'.format(
            noman.wallet.name, sub_proc.stdout.decode()))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    noman_role = await noman.get_nym_role()
    assert nym and nym['seqNo'] == last_nym_seqno
    await noman.close()
    await pool.close()
    print('\n\n== 15 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))

    # Exercise reseed, ensure no side effect to role on ledger
    await pool.open()
    pan = ProctorAnchor(wallets[cfg['VON Anchor']['name']], pool, rrbx=False)
    await pan.open()
    next_seed = '{}000000000000VonAnchor1'.format(int(time()) + 1)
    await pan.reseed(next_seed)
    nym = json.loads(await pan.get_nym(noman.did))
    pan_role = await pan.get_nym_role()
    await pool.close()
    assert nym and nym['seqNo'] != last_nym_seqno
    assert pan_role == noman_role
    print(
        '\n\n== 16 == As Proctor Anchor, reseeded, then got nym transaction from ledger for DID {} ({}): {}'
        .format(pan.did, pan.wallet.name, ppjson(nym)))
    last_nym_seqno = nym['seqNo']

    # Run setnym on configuration with same wallet for trustee and VON anchor
    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if section == 'VON Anchor' and key == 'name':
                    print('{}={}'.format(key, cfg['Trustee Anchor']['name']),
                          file=ini_fh)
                else:
                    print('{}={}'.format(key, value), file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 17 == Next configuration, same wallet for trustee anchor and VON anchor:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert sub_proc.returncode
    assert str(int(ErrorCode.ExtantWallet)) in sub_proc.stdout.decode()
    print(
        '\n\n== 18 == Called with same wallet for trustee anchor and VON anchor, got error text {}'
        .format(sub_proc.stdout.decode()))

    # Run setnym on configuration with new ledger node pool configuration
    genesis_tmp = NamedTemporaryFile(mode='w+b', buffering=0, delete=False)
    with genesis_tmp:
        genesis_tmp.write(pool_genesis_txn_data.encode())
    pool_copy = '{}.{}'.format(cfg['Node Pool']['name'], int(time()))
    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if section == 'Node Pool':
                    if key == 'name':
                        print('name={}'.format(pool_copy), file=ini_fh)
                    elif key == 'genesis.txn.path':
                        print('genesis.txn.path={}'.format(genesis_tmp.name),
                              file=ini_fh)  # includes /tmp/ path
                    else:
                        print('{}={}.xxx'.format(key, value), file=ini_fh)
                else:
                    print('{}={}'.format(key, value), file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 19 == Next configuration, calling for copy of node pool ledger config:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 20 == Called for new copy {} of node pool ledger config'.
          format(pool_copy))

    unlink(genesis_tmp.name)
    await p_mgr.remove(pool_copy)
    await pool.open()
    await pan.open()
    nym = json.loads(await pan.get_nym(pan.did))
    assert nym and Role.get(nym['role']) == Role.TRUST_ANCHOR
    assert nym and nym['seqNo'] != last_nym_seqno
    print('\n\n== 21 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(pan.did, pan.wallet.name, ppjson(nym)))
    await pan.close()
    await pool.close()

    # Run setnym on configuration with wrong genesis transaction path
    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if section == 'Node Pool':
                    print('{}={}.xxx'.format(key, value), file=ini_fh)
                else:
                    print('{}={}'.format(key, value), file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 22 == Next configuration, missing pool and bad genesis txn path:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert sub_proc.returncode
    assert str(int(ErrorCode.AbsentPool)) in sub_proc.stdout.decode()
    print(
        '\n\n== 23 == Called with missing pool and bad genesis txn path, got error text {}'
        .format(sub_proc.stdout.decode()))

    # Run setnym on configuration with no node pool ledger configuration
    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if section == 'Node Pool':
                    if key == 'name':
                        print('{}={}.xxx'.format(key, value), file=ini_fh)
                    elif key == 'genesis.txn.path':
                        print('genesis.txn.path=', file=ini_fh)
                    else:
                        print('{}={}'.format(key, value), file=ini_fh)
                else:
                    print('{}={}'.format(key, value), file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 24 == Next configuration, missing pool and no genesis txn path:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert sub_proc.returncode
    assert str(int(ErrorCode.AbsentPool)) in sub_proc.stdout.decode()
    print(
        '\n\n== 25 == Called with missing pool and no genesis txn path, got error text {}'
        .format(sub_proc.stdout.decode()))

    # Run again without trustee anchor wallet present
    await wallets['trustee-anchor'].close()
    await wallets['trustee-anchor'].remove()
    wallets.pop('trustee-anchor')
    noman = NominalAnchor(wallets[cfg['VON Anchor']['name']], pool)

    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                print('{}={}'.format(key, value), file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 26 == Set VON anchor configuration, no Trustee anchor wallet a priori:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 27 == Set nym with TRUST_ANCHOR role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    assert nym and Role.get(nym['role']) == Role.TRUST_ANCHOR
    print('\n\n== 28 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    await pan.close()
    for name in wallets:
        await wallets[name].close()
コード例 #13
0
ファイル: test_a2a.py プロジェクト: nrempel/von_anchor
async def test_a2a():
    print(Ink.YELLOW('\n\n== Testing DID Doc wranglers =='))

    # One authn key by reference
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': '3',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC X...'
            },
            {
                'id': '4',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC 9...'
            },
            {
                'id': '6',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'authentication': [
            {
                'type': 'RsaSignatureAuthentication2018',
                'publicKey': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4'
            }
        ],
        'service': [
            {
                'id': '0',
                'type': 'Agency',
                'serviceEndpoint': 'did:sov:Q4zqM7aXqm7gDQkUVLng9h'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey'])
    assert len(dd.authnkey) == len(dd_in['authentication'])

    dd_out = dd.serialize()
    print('\n\n== 1 == DID Doc on abbreviated identifiers: {}'.format(ppjson(dd_out)))

    # One authn key embedded, all possible refs canonical
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': '3',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC X...'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC 9...'
            }
        ],
        'authentication': [
            {
                'type': 'RsaSignatureAuthentication2018',
                'publicKey': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#6',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'service': [
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL;0',
                'type': 'Agency',
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey']) + 1
    assert len(dd.authnkey) == len(dd_in['authentication'])

    dd_out = dd.serialize()
    print('\n\n== 2 == DID Doc on mixed reference styles, embedded and ref style authn keys: {}'.format(ppjson(dd_out)))

    # All references canonical where possible; one authn key embedded and one by reference
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#3',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC X...'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC 9...'
            }
        ],
        'authentication': [
            {
                'type': 'RsaSignatureAuthentication2018',
                'publicKey': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#6',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'service': [
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL;0',
                'type': 'DidMessaging',
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey']) + 1
    assert len(dd.authnkey) == len(dd_in['authentication'])

    dd_out = dd.serialize()
    print('\n\n== 3 == DID Doc on canonical refs: {}'.format(ppjson(dd_out)))

    # Minimal as per indy-agent test suite without explicit identifiers
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'type': 'DidMessaging',
                'recipientKeys': ['~XXXXXXXXXXXXXXXX'],
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey'])
    assert len(dd.authnkey) == 0

    dd_out = dd.serialize()
    print('\n\n== 4 == DID Doc miminal style, implcit DID document identifier: {}'.format(
        ppjson(dd_out)))

    # Minimal + ids as per indy-agent test suite with explicit identifiers; novel service recipient key on raw base58
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL;indy',
                'type': 'DidMessaging',
                'priority': 1,
                'recipientKeys': ['~YYYYYYYYYYYYYYYY'],
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == 1 + len(dd_in['publicKey'])
    assert len(dd.authnkey) == 0

    dd_out = dd.serialize()
    print('\n\n== 5 == DID Doc miminal style plus explicit idents and novel raw base58 service recip key: {}'.format(
        ppjson(dd_out)))

    # Minimal + ids as per indy-agent test suite with explicit identifiers; novel service recipient key on raw base58
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            },
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-2',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~YYYYYYYYYYYYYYYY'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-3',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'service': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL;indy',
                'type': 'DidMessaging',
                'priority': 0,
                'recipientKeys': ['~ZZZZZZZZZZZZZZZZ'],
                'serviceEndpoint': 'did:sov:LjgpST2rjsoxYegQDRm7EL;1'
            },
            {
                'id': '1',
                'type': 'one',
                'priority': 1,
                'recipientKeys': [
                    '~XXXXXXXXXXXXXXXX',
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-1'
                ],
                'routingKeys': [
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-3'
                ],
                'serviceEndpoint': 'LjgpST2rjsoxYegQDRm7EL;2'
            },
            {
                'id': '2',
                'type': 'two',
                'priority': 2,
                'recipientKeys': [
                    '~XXXXXXXXXXXXXXXX',
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-1'
                ],
                'routingKeys': [
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-3'
                ],
                'serviceEndpoint': 'https://www.two.ca/two'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == 1 + len(dd_in['publicKey'])
    assert len(dd.authnkey) == 0
    assert {s.priority for s in dd.service.values()} == {0, 1, 2}
    assert len(dd.service) == 3

    dd_out = dd.serialize()
    print('\n\n== 6 == DID Doc on mixed service routing and recipient keys: {}'.format(
        ppjson(dd_out)))

    # Exercise missing service recipient key
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL;indy',
                'type': 'DidMessaging',
                'priority': 1,
                'recipientKeys': [
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-3'
                ],
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    try:
        dd = DIDDoc.deserialize(dd_in)
        assert False
    except AbsentDIDDocItem:
        pass
    print('\n\n== 7 == DID Doc on underspecified service key fails as expected')
コード例 #14
0
async def test_manager(path_home, pool_genesis_txn_data, pool_ip):

    print(
        Ink.YELLOW(
            '\n\n== Testing Node Pool Manager vs. IP {} =='.format(pool_ip)))

    # Create node pool manager
    manager = NodePoolManager()
    assert manager.protocol == Protocol.DEFAULT

    # Create new pool on raw data
    name = 'pool-{}'.format(int(time()))
    assert name not in await manager.list()
    print('\n\n== 1 == Pool {} not initially configured'.format(name))

    await manager.add_config(name, pool_genesis_txn_data)
    assert name in await manager.list()
    print(
        '\n\n== 2 == Added pool {} configuration on genesis transaction data'.
        format(name))

    try:
        await manager.add_config(name, pool_genesis_txn_data)
        assert False
    except ExtantPool:
        pass

    try:
        pool = manager.get('no-such-pool.{}'.format(int(time())))
        await pool.open()
        assert False
    except AbsentPool:
        pass

    pool = manager.get(name)
    await pool.open()
    await pool.refresh()
    assert pool.handle is not None
    await pool.close()
    print(
        '\n\n== 3 == Opened, refreshed, and closed pool {} on default configuration'
        .format(name))

    cache_id = pool.cache_id
    sleep(1)
    x_name = 'pool-{}'.format(int(time()))
    await manager.add_config(
        'pool-{}'.format(int(time())),
        '\n'.join(pool_genesis_txn_data.split('\n')[::-1]))
    x_pool = manager.get(x_name)
    assert x_pool.cache_id == cache_id
    await manager.remove(x_name)
    print('\n\n== 4 == Confirmed cache id consistency: {}'.format(cache_id))

    pool = manager.get(name, {'timeout': 3600, 'extended_timeout': 7200})
    await pool.open()
    await pool.refresh()
    assert pool.handle is not None
    await pool.close()
    print(
        '\n\n== 5 == Opened, refreshed, and closed pool {} on explicit configuration'
        .format(name))

    await manager.remove(name)
    assert name not in await manager.list()
    print('\n\n== 6 == Removed pool {} configuration'.format(name))

    with NamedTemporaryFile(mode='w+b', buffering=0) as fh_gen:
        fh_gen.write(pool_genesis_txn_data.encode())
        await manager.add_config(name, fh_gen.name)
    assert name in await manager.list()
    print(
        '\n\n== 7 == Added pool {} configuration on genesis transaction file'.
        format(name))

    pool = manager.get(name, {'timeout': 3600, 'extended_timeout': 7200})
    await pool.open()
    await pool.refresh()
    assert pool.handle is not None
    await pool.close()
    print(
        '\n\n== 8 == Opened, refreshed, and closed pool {} on explicit configuration'
        .format(name))

    await manager.remove(name)
    assert name not in await manager.list()
    print('\n\n== 9 == Removed pool {} configuration'.format(name))
コード例 #15
0
async def test_ids():
    print(Ink.YELLOW('\n\n== Testing Identifier Checks =='))

    assert ok_wallet_reft('49ad0727-8663-45ae-a115-12b09860f9c6')
    assert not ok_wallet_reft('Q4zqM7aXqm7gDQkUVLng9I')
    assert not ok_wallet_reft('49ad0727-45ae-a115-12b09860f9c6')
    print('\n\n== 1 == Wallet referent identifier checks pass OK')

    assert ok_did('Q4zqM7aXqm7gDQkUVLng9h')
    assert not ok_did('Q4zqM7aXqm7gDQkUVLng9I')  # 'I' not a base58 char
    assert not ok_did('Q4zqM7aXqm7gDQkUVLng')  # too short
    print('\n\n== 2 == Distributed identifier checks pass OK')

    for value in (None, 'TRUSTEE', 'STEWARD', 'TRUST_ANCHOR', ''):
        assert ok_role(value)
    for value in (123, 'TRUSTY', 'STEW', 'ANCHOR', ' '):
        assert not ok_role(value)
    print('\n\n== 3 == Role identifier checks pass OK')

    assert Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng9h')
    assert Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng')
    assert not Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9h')
    assert not Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng9hx')
    assert not Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng90')
    print('\n\n== 4 == Tails hash identifier checks pass OK')

    assert ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:3:bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h::bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2::1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:bc-reg:')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:bc-reg:1.0a')
    assert not ok_schema_id(
        'Q4zqM7aXqm7gDQkUVLng9I:2:bc-reg:1.0')  # I is not in base58
    print('\n\n== 5 == Schema identifier checks pass OK')

    assert ok_cred_def_id(
        'Q4zqM7aXqm7gDQkUVLng9h:3:CL:18:tag')  # protocol >= 1.4
    assert ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18:tag',
                          'Q4zqM7aXqm7gDQkUVLng9h')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18:tag',
                              'Xxxxxxxxxxxxxxxxxxxxxx')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:4:CL:18:0')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h::CL:18:0')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9I:3:CL:18:tag')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3::18:tag')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:18:tag')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18z:tag')
    assert ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18')  # protocol == 1.3
    assert ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18',
                          'Q4zqM7aXqm7gDQkUVLng9h')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18',
                              'Xxxxxxxxxxxxxxxxxxxxxx')
    print('\n\n== 6 == Credential definition identifier checks pass OK')

    assert ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:1'
    )  # protocol >= 1.4
    assert ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:1',
        'LjgpST2rjsoxYegQDRm7EL')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:1',
        'Xxxxxxxxxxxxxxxxxxxxxx')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:5:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:1'
    )
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:4:CL:20:0:CL_ACCUM:1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL::CL:20:0:CL_ACCUM:1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:NOT_CL:20:tag:CL_ACCUM:1'
    )
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20z:tag:CL_ACCUM:1'
    )
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20::CL_ACCUM:1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag::1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:'
    )
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM')
    assert ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:CL_ACCUM:1'
    )  # protocol == 1.3
    assert ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:CL_ACCUM:1',
        'LjgpST2rjsoxYegQDRm7EL')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:CL_ACCUM:1',
        'Xxxxxxxxxxxxxxxxxxxxxx')
    print('\n\n== 7 == Revocation registry identifier checks pass OK')

    assert ok_endpoint('10.0.0.2:9702')
    assert ok_endpoint('0.0.0.0:0')
    assert not ok_endpoint('canada.gc.ca:8088')
    assert not ok_endpoint(':37')
    assert not ok_endpoint('http://url-wrong')
    assert not ok_endpoint('2.3.4.5')
    assert not ok_endpoint('2.3.4:8080')
    assert not ok_endpoint('1.2.3.4:abc')
    assert not ok_endpoint('1.2.3.4:1234.56')
    print('\n\n== 8 == Endpoint checks pass OK')
コード例 #16
0
async def test_pool_open(path_home, pool_name, pool_genesis_txn_path,
                         pool_genesis_txn_file):

    print(Ink.YELLOW('\n\n== Testing Pool Config =='))

    assert Protocol.V_13.indy() != Protocol.V_14.indy()
    assert Protocol.V_14.indy() == Protocol.V_15.indy()
    assert Protocol.V_15.indy() == Protocol.V_16.indy()
    assert Protocol.V_16.indy() == Protocol.DEFAULT.indy()

    path = Path(path_home, 'pool', pool_name)

    try:
        NodePool(pool_name, pool_genesis_txn_path,
                 {'auto-remove': 'non-boolean'})
        assert False
    except JSONValidation:
        pass

    try:
        NodePool(pool_name, pool_genesis_txn_path, {
            'auto-remove': True,
            'protocol': '0.0a'
        })
        assert False
    except JSONValidation:
        pass

    try:
        pool = NodePool(pool_name, pool_genesis_txn_path, {
            'auto-remove': True,
            'extra-property': True
        })
        await pool.remove()
        assert not path.exists(), 'Pool path {} still present'.format(path)
    except JSONValidation:
        assert False

    pool = NodePool(pool_name, pool_genesis_txn_path, {
        'auto-remove': True,
        'protocol': '1.6'
    })
    await pool.open()
    assert pool.handle is not None
    await pool.close()
    assert not path.exists(), 'Pool path {} still present'.format(path)

    pool = NodePool(pool_name, pool_genesis_txn_path
                    )  # auto-remove default: False, protocol default: latest
    await pool.open()
    assert pool.handle is not None
    await pool.close()
    assert path.exists(), 'Pool path {} not present'.format(path)

    pool = NodePool(
        pool_name, pool_genesis_txn_path,
        {'auto-remove': True})  # check survival re-opening existing pool
    await pool.open()
    assert pool.handle is not None
    await pool.close()
    assert not path.exists(), 'Pool path {} still present'.format(path)
コード例 #17
0
ファイル: test_wallet.py プロジェクト: shlemph/von_anchor
async def test_wallet(path_home):

    print(Ink.YELLOW('\n\n== Testing Wallet Configuration + Context =='))

    seed = '00000000000000000000000000000000'
    name = 'my-wallet'
    path = Path(path_home, 'wallet', name)
    path_seed2did = path.with_name('{}.seed2did'.format(path.name))

    # 1. Configuration with auto-remove set
    w = Wallet(seed, name, None, {'auto-remove': True})
    await w.create()
    assert path.exists(), 'Wallet path {} not present'.format(path)
    await w.open()
    assert w.did
    assert w.verkey
    await w.close()
    assert not path.exists(), 'Wallet path {} still present'.format(path)
    assert not path_seed2did.exists(), 'Wallet path {} still present'.format(
        path_seed2did)
    print('\n\n== 1 == New wallet with auto-remove OK')

    # 2. Default configuration (auto-remove=False)
    w = Wallet(seed, name)
    await w.create()
    assert path.exists(), 'Wallet path {} not present'.format(path)
    assert not path_seed2did.exists(), 'Wallet path {} still present'.format(
        path_seed2did)

    await w.open()
    assert w.did
    assert w.verkey
    (w_did, w_verkey) = (w.did, w.verkey)
    await w.close()
    assert path.exists(), 'Wallet path {} not present'.format(path)
    assert not path_seed2did.exists(), 'Wallet path {} still present'.format(
        path_seed2did)
    print('\n\n== 2 == New wallet with default config (no auto-remove) OK')

    # 3. Make sure wallet opens from extant file
    x = Wallet(seed, name, None, {'auto-remove': True})
    await x.create()

    async with x:
        assert x.did == w_did
        assert x.verkey == w_verkey

    assert not path.exists(), 'Wallet path {} still present'.format(path)
    assert not path_seed2did.exists(), 'Wallet path {} still present'.format(
        path_seed2did)
    print('\n\n== 3 == Re-use extant wallet OK')

    # 4. Double-open
    try:
        async with await Wallet(seed, name, None, {
                'auto-remove': True
        }).create() as w:
            async with w:
                assert False
    except IndyError as e:
        assert e.error_code == ErrorCode.WalletAlreadyOpenedError

    assert not path.exists(), 'Wallet path {} still present'.format(path)
    assert not path_seed2did.exists(), 'Wallet path {} still present'.format(
        path_seed2did)

    # 5. Bad config
    try:
        Wallet(seed, name, None, {'auto-remove': 'a suffusion of yellow'})
    except JSONValidation:
        pass
    print('\n\n== 4 == Error cases error as expected')
コード例 #18
0
ファイル: test_id_valid.py プロジェクト: nrempel/von_anchor
async def test_box_ids():
    print(Ink.YELLOW('\n\n== Testing Identifier Checks =='))

    assert ok_wallet_reft('49ad0727-8663-45ae-a115-12b09860f9c6')
    assert not ok_wallet_reft('Q4zqM7aXqm7gDQkUVLng9I')
    assert not ok_wallet_reft('49ad0727-45ae-a115-12b09860f9c6')

    assert ok_did(
        'Q4zqM7aXqm7gDQkUVLng9h')  # quibble: not technically a box id
    assert not ok_did('Q4zqM7aXqm7gDQkUVLng9I')
    assert not ok_did('Q4zqM7aXqm7gDQkUVLng')

    for value in (None, 'TRUSTEE', 'STEWARD', 'TRUST_ANCHOR', ''):
        assert ok_role(value)
    for value in (123, 'TRUSTY', 'STEW', 'ANCHOR', ' '):
        assert not ok_role(value)

    assert Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng9h')
    assert Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng')
    assert not Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9h')
    assert not Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng9hx')
    assert not Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng90')

    assert ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:3:bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h::bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2::1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:bc-reg:')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:bc-reg:1.0a')
    assert not ok_schema_id(
        'Q4zqM7aXqm7gDQkUVLng9I:2:bc-reg:1.0')  # I is not in base58

    assert ok_cred_def_id(
        'Q4zqM7aXqm7gDQkUVLng9h:3:CL:18:tag')  # protocol >= 1.4
    assert ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18:tag',
                          'Q4zqM7aXqm7gDQkUVLng9h')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18:tag',
                              'Xxxxxxxxxxxxxxxxxxxxxx')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:4:CL:18:0')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h::CL:18:0')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9I:3:CL:18:tag')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3::18:tag')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:18:tag')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18z:tag')
    assert ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18')  # protocol == 1.3
    assert ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18',
                          'Q4zqM7aXqm7gDQkUVLng9h')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18',
                              'Xxxxxxxxxxxxxxxxxxxxxx')

    assert ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:1'
    )  # protocol >= 1.4
    assert ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:1',
        'LjgpST2rjsoxYegQDRm7EL')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:1',
        'Xxxxxxxxxxxxxxxxxxxxxx')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:5:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:1'
    )
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:4:CL:20:0:CL_ACCUM:1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL::CL:20:0:CL_ACCUM:1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:NOT_CL:20:tag:CL_ACCUM:1'
    )
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20z:tag:CL_ACCUM:1'
    )
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20::CL_ACCUM:1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag::1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:'
    )
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM')

    assert ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:CL_ACCUM:1'
    )  # protocol == 1.3
    assert ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:CL_ACCUM:1',
        'LjgpST2rjsoxYegQDRm7EL')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:CL_ACCUM:1',
        'Xxxxxxxxxxxxxxxxxxxxxx')

    assert ok_endpoint('10.0.0.2:9702')
    assert ok_endpoint('0.0.0.0:0')
    assert not ok_endpoint('canada.gc.ca:8088')
    assert not ok_endpoint(':37')
    assert not ok_endpoint('http://url-wrong')
    assert not ok_endpoint('2.3.4.5')
    assert not ok_endpoint('2.3.4:8080')
    assert not ok_endpoint('1.2.3.4:abc')
    assert not ok_endpoint('1.2.3.4:1234.56')
コード例 #19
0
async def test_setnym(pool_ip, pool_name, pool_genesis_txn_data, seed_trustee1,
                      path_setnym_ini, setnym_ini_file):

    print(
        Ink.YELLOW('\n\n== Testing setnym operation on node pool {} =='.format(
            pool_ip)))

    with open(path_setnym_ini, 'r') as cfg_fh:
        print('\n\n== 1 == Initial configuration:\n{}'.format(cfg_fh.read()))
    cfg = inis2dict(str(path_setnym_ini))

    # Set up node pool ledger config and wallets, open pool, init anchors
    manager = NodePoolManager()
    if pool_name not in await manager.list():
        await manager.add_config(pool_name, pool_genesis_txn_data)

    seeds = {
        'trustee-anchor': seed_trustee1,
        cfg['VON Anchor']['wallet.name']: cfg['VON Anchor']['seed'],
        'x-anchor': 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
    }
    wallets = await get_wallets(seeds, True)

    try:
        async with NominalAnchor(wallets['x-anchor']) as xan:
            await xan.get_nym()
    except AbsentPool:
        pass
    wallets.pop('x-anchor')

    # Open pool, check if nym already present
    pool = manager.get(pool_name)
    await pool.open()
    assert pool.handle

    tan = TrusteeAnchor(wallets['trustee-anchor'], pool)
    await tan.open()

    noman = NominalAnchor(wallets[cfg['VON Anchor']['wallet.name']], pool)

    nym = json.loads(await noman.get_nym(noman.did))
    print('\n\n== 2 == Nym {} on ledger for anchor {} on DID {}'.format(
        '{} already'.format(ppjson(nym)) if nym else 'not yet',
        noman.wallet.name, noman.did))

    await tan.close()
    await pool.close()

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 3 == Set nym with TRUST_ANCHOR role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    assert nym and Role.get(nym['role']) == Role.TRUST_ANCHOR
    print('\n\n== 4 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if key in ('seed', 'genesis.txn.path'):
                    continue
                print('{}={}'.format(
                    key, '${X_ROLE:-}' if key == 'role' else value),
                      file=ini_fh)  # exercise default
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 5 == Next configuration, no seeds, no VON Anchor role:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 6 == Set nym with default role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    assert nym and Role.get(nym['role']) == Role.USER
    last_nym_seqno = nym['seqNo']
    print('\n\n== 7 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    sub_proc = subprocess.run(  #  do it again
        [
            'python',
            join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor',
                 'op', 'setnym.py'),
            str(path_setnym_ini)
        ],
        stdout=subprocess.PIPE,
        stderr=subprocess.DEVNULL)
    assert not sub_proc.returncode
    print('\n\n== 8 == Set nym again with default role on {} for {}'.format(
        noman.did, noman.wallet.name))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    last_nym_seqno = nym['seqNo']
    print(
        '\n\n== 9 == Got (same) nym transaction from ledger for DID {} ({}): {}'
        .format(noman.did, noman.wallet.name, ppjson(nym)))
    await noman.close()
    await pool.close()

    with open(path_setnym_ini, 'w+') as ini_fh:
        for section in cfg:
            print('[{}]'.format(section), file=ini_fh)
            for (key, value) in cfg[section].items():
                if key in ('seed', 'genesis.txn.path'):
                    continue
                print('{}={}'.format(key,
                                     'BAD_ROLE' if key == 'role' else value),
                      file=ini_fh)
            print(file=ini_fh)
    with open(path_setnym_ini, 'r') as cfg_fh:
        print(
            '\n\n== 10 == Next configuration, no seeds, bad VON Anchor role:\n{}'
            .format(cfg_fh.read()))

    sub_proc = subprocess.run([
        'python',
        join(dirname(dirname(dirname(realpath(__file__)))), 'von_anchor', 'op',
             'setnym.py'),
        str(path_setnym_ini)
    ],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.DEVNULL)
    assert sub_proc.returncode
    print(
        '\n\n== 11 == Called to set bad role for {}, got error text {}'.format(
            noman.wallet.name, sub_proc.stdout.decode()))

    await pool.open()
    await noman.open()
    nym = json.loads(await noman.get_nym(noman.did))
    noman_role = await noman.get_nym_role()
    assert nym and nym['seqNo'] == last_nym_seqno
    await noman.close()
    await pool.close()

    print('\n\n== 12 == Got nym transaction from ledger for DID {} ({}): {}'.
          format(noman.did, noman.wallet.name, ppjson(nym)))

    await pool.open()
    san = SRIAnchor(wallets[cfg['VON Anchor']['wallet.name']], pool)
    await san.open()
    next_seed = "{}000000000000VonAnchor1".format(int(time()) + 1)
    await san.reseed(next_seed)
    nym = json.loads(await san.get_nym(noman.did))
    san_role = await san.get_nym_role()
    assert nym and nym['seqNo'] != last_nym_seqno
    assert san_role == noman_role  # ensure that reseed does not side-effect role on ledger

    print(
        '\n\n== 13 == As SRI Anchor, reseeded, then got nym transaction from ledger for DID {} ({}): {}'
        .format(san.did, san.wallet.name, ppjson(nym)))

    await san.close()
    await pool.close()
    for name in wallets:
        await wallets[name].close()
コード例 #20
0
ファイル: test_a2a.py プロジェクト: swcurran/von_anchor
async def test_a2a():
    print(Ink.YELLOW('\n\n== Testing DID Doc wranglers =='))

    # One authn key by reference
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': '3',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC X...'
            },
            {
                'id': '4',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC 9...'
            },
            {
                'id': '6',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'authentication': [
            {
                'type': 'RsaSignatureAuthentication2018',
                'publicKey': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4'
            }
        ],
        'service': [
            {
                'id': '0',
                'type': 'Agency',
                'serviceEndpoint': 'did:sov:Q4zqM7aXqm7gDQkUVLng9h'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey'])
    assert len(dd.authnkey) == len(dd_in['authentication'])

    dd_out = dd.serialize()
    print('\n\n== 1 == DID Doc {} on abbreviated identifiers: {}'.format(dd, ppjson(dd_out)))

    # Exercise JSON, de/serialization
    dd_json = dd.to_json()
    dd_copy = dd.from_json(dd_json)
    assert dd_copy.did == dd.did
    assert all(dd_copy.authnkey[k].to_dict() == dd.authnkey[k].to_dict() for k in dd_copy.authnkey)
    assert {k for k in dd_copy.authnkey} == {k for k in dd.authnkey}
    assert all(dd_copy.pubkey[k].to_dict() == dd.pubkey[k].to_dict() for k in dd_copy.pubkey)
    assert {k for k in dd_copy.pubkey} == {k for k in dd.pubkey}
    assert all(dd_copy.service[k].to_dict() == dd.service[k].to_dict() for k in dd_copy.service)
    assert {k for k in dd_copy.service} == {k for k in dd.service}
    print('\n\n== 2 == DID Doc de/serialization operates OK:')

    # Exercise accessors
    dd.did = dd_out['id']
    assert dd.did == canon_did(dd_out['id'])
    try:
        dd.set(['neither a service', 'nor a public key'])
        assert False
    except BadDIDDocItem:
        pass
    assert dd.service[[k for k in dd.service][0]].did == dd.did
    print('\n\n== 3 == DID Doc accessors operate OK')

    # One authn key embedded, all possible refs canonical
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': '3',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC X...'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC 9...'
            }
        ],
        'authentication': [
            {
                'type': 'RsaSignatureAuthentication2018',
                'publicKey': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#6',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'service': [
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL;0',
                'type': 'Agency',
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey']) + 1
    assert len(dd.authnkey) == len(dd_in['authentication'])

    dd_out = dd.serialize()
    print('\n\n== 4 == DID Doc on mixed reference styles, embedded and ref style authn keys: {}'.format(ppjson(dd_out)))

    # All references canonical where possible; one authn key embedded and one by reference
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#3',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC X...'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC 9...'
            }
        ],
        'authentication': [
            {
                'type': 'RsaSignatureAuthentication2018',
                'publicKey': 'did:sov:LjgpST2rjsoxYegQDRm7EL#4'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#6',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'service': [
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL;0',
                'type': 'DidMessaging',
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey']) + 1
    assert len(dd.authnkey) == len(dd_in['authentication'])

    dd_out = dd.serialize()
    print('\n\n== 5 == DID Doc on canonical refs: {}'.format(ppjson(dd_out)))

    # Minimal as per indy-agent test suite without explicit identifiers
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'type': 'DidMessaging',
                'recipientKeys': ['~XXXXXXXXXXXXXXXX'],
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == len(dd_in['publicKey'])
    assert len(dd.authnkey) == 0

    dd_out = dd.serialize()
    print('\n\n== 6 == DID Doc miminal style, implcit DID document identifier: {}'.format(
        ppjson(dd_out)))

    # Minimal + ids as per indy-agent test suite with explicit identifiers; novel service recipient key on raw base58
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL;indy',
                'type': 'DidMessaging',
                'priority': 1,
                'recipientKeys': ['~YYYYYYYYYYYYYYYY'],
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == 1 + len(dd_in['publicKey'])
    assert len(dd.authnkey) == 0

    dd_out = dd.serialize()
    print('\n\n== 7 == DID Doc miminal style plus explicit idents and novel raw base58 service recip key: {}'.format(
        ppjson(dd_out)))

    # Minimal + ids as per indy-agent test suite with explicit identifiers; novel service recipient key on raw base58
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            },
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-2',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~YYYYYYYYYYYYYYYY'
            },
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-3',
                'type': 'Secp256k1VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyHex': '02b97c30de767f084ce3080168ee293053ba33b235d7116a3263d29f1450936b71'
            },
            {
                'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-4',
                'type': 'RsaVerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyPem': '-----BEGIN PUBLIC A...'
            }
        ],
        'service': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL;indy',
                'type': 'DidMessaging',
                'priority': 0,
                'recipientKeys': ['~ZZZZZZZZZZZZZZZZ'],
                'serviceEndpoint': 'did:sov:LjgpST2rjsoxYegQDRm7EL;1'
            },
            {
                'id': '1',
                'type': 'one',
                'priority': 1,
                'recipientKeys': [
                    '~XXXXXXXXXXXXXXXX',
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-1'
                ],
                'routingKeys': [
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-4'
                ],
                'serviceEndpoint': 'LjgpST2rjsoxYegQDRm7EL;2'
            },
            {
                'id': '2',
                'type': 'two',
                'priority': 2,
                'recipientKeys': [
                    '~XXXXXXXXXXXXXXXX',
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-1'
                ],
                'routingKeys': [
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-4'
                ],
                'serviceEndpoint': 'https://www.two.ca/two'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == 1 + len(dd_in['publicKey'])
    assert len(dd.authnkey) == 0
    assert {s.priority for s in dd.service.values()} == {0, 1, 2}
    assert len(dd.service) == 3
    assert all(len(dd.service[k].to_dict()['recipientKeys']) == 1 for k in dd.service)
    assert 'routingKeys' not in dd.service['did:sov:LjgpST2rjsoxYegQDRm7EL;indy'].to_dict()
    assert all(len(dd.service[k].to_dict()['routingKeys']) == 1
        for k in ('did:sov:LjgpST2rjsoxYegQDRm7EL;1', 'did:sov:LjgpST2rjsoxYegQDRm7EL;2'))


    dd_out = dd.serialize()
    print('\n\n== 8 == DID Doc on mixed service routing and recipient keys: {}'.format(ppjson(dd_out)))

    pk = PublicKey(
        dd.did,
        '99',
        '~AAAAAAAAAAAAAAAA',
        PublicKeyType.ED25519_SIG_2018,
        dd.did,
        True)
    dd.set(pk)
    assert len(dd.pubkey) == 2 + len(dd_in['publicKey'])
    assert canon_ref(dd.did, '99', '#') in dd.pubkey
    assert len(dd.authnkey) == 1

    service = Service(
        dd.did,
        'abc',
        'IndyAgent',
        [pk],
        [pk],
        'http://www.abc.ca/123'
    )
    dd.set(service)
    assert len(dd.service) == 4
    assert canon_ref(dd.did, 'abc', ';') in dd.service
    print('\n\n== 9 == DID Doc adds public key and service via set() OK')

    # Exercise missing service recipient key
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'LjgpST2rjsoxYegQDRm7EL',
        'publicKey': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL;indy',
                'type': 'DidMessaging',
                'priority': 1,
                'recipientKeys': [
                    'did:sov:LjgpST2rjsoxYegQDRm7EL#keys-3'
                ],
                'serviceEndpoint': 'https://www.von.ca'
            }
        ]
    }

    try:
        dd = DIDDoc.deserialize(dd_in)
        assert False
    except AbsentDIDDocItem:
        pass
    print('\n\n== 10 == DID Doc on underspecified service key fails as expected')

    # Minimal as per W3C Example 2, draft 0.12
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'id': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
        'authentication': [
            {
                'id': 'LjgpST2rjsoxYegQDRm7EL#keys-1',
                'type': 'Ed25519VerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'type': 'DidMessaging',
                'serviceEndpoint': 'https://example.com/endpoint/8377464'
            }
        ]
    }

    dd = DIDDoc.deserialize(dd_in)
    assert len(dd.pubkey) == 1
    assert len(dd.authnkey) == 1
    assert len(dd.service) == 1

    dd_out = dd.serialize()
    print('\n\n== 11 == Minimal DID Doc (no pubkey except authentication) as per W3C spec parses OK: {}'.format(
        ppjson(dd_out)))

    # Exercise no-identifier case
    dd_in = {
        '@context': 'https://w3id.org/did/v1',
        'authentication': [
            {
                'type': 'Ed25519VerificationKey2018',
                'controller': 'did:sov:LjgpST2rjsoxYegQDRm7EL',
                'publicKeyBase58': '~XXXXXXXXXXXXXXXX'
            }
        ],
        'service': [
            {
                'type': 'DidMessaging',
                'serviceEndpoint': 'https://example.com/endpoint/8377464'
            }
        ]
    }

    try:
        dd = DIDDoc.deserialize(dd_in)
        assert False
    except AbsentDIDDocItem:
        pass
    print('\n\n== 12 == DID Doc without identifier rejected as expected')

    # Exercise reference canonicalization, including failure paths
    try:
        canon_ref('not-a-DID', ref=dd.did, delimiter='#')
        assert False
    except BadIdentifier:
        pass

    try:
        canon_ref(dd.did, ref='did:sov:not-a-DID', delimiter='#')
        assert False
    except BadIdentifier:
        pass
    
    urlref = 'https://www.clafouti-quasar.ca:8443/supply-management/fruit/index.html'
    assert canon_ref(dd.did, ref=urlref) == urlref
    print('\n\n== 13 == Reference canonicalization operates as expected')

    assert PublicKeyType.get('no-such-type') is None
    pubkey0 = dd.pubkey[[k for k in dd.pubkey][0]]
    was_authn = pubkey0.authn
    pubkey0.authn = not was_authn
    assert pubkey0.authn != was_authn
    print('\n\n== 14 == Changed authentication setting for DIDDoc {} in public key {}, now {}'.format(
        pubkey0.did,
        pubkey0.id,
        repr(pubkey0)))
コード例 #21
0
async def test_anchors_tails_load(
        pool_name,
        pool_genesis_txn_data,
        seed_trustee1):

    rrbx = True
    print(Ink.YELLOW('\n\n== Load-testing tails on {}ternal rev reg builder ==').format("ex" if rrbx else "in"))

    await RevRegBuilder.stop(WALLET_NAME)  # in case of re-run

    # Set up node pool ledger config and wallets, open pool, init anchors
    p_mgr = NodePoolManager()
    if pool_name not in await p_mgr.list():
        await p_mgr.add_config(pool_name, pool_genesis_txn_data)
    pool = p_mgr.get(pool_name)
    await pool.open()

    w_mgr = WalletManager()
    wallets = {
        'trustee-anchor': {
            'seed': seed_trustee1,
            'storage_type': None,
            'config': None,
            'access_creds': None
        },
        WALLET_NAME: {
            'seed': 'Superstar-Anchor-000000000000000',
            'storage_type': None,
            'config': None,
            'access_creds': {
                'key': 'rrbx-test'
            }
        }
    }
    for (name, wdata) in wallets.items():
        try:
            wdata['wallet'] = await w_mgr.create({
                'id': name,
                'seed': wdata['seed']
            })
        except ExtantWallet:
            wdata['wallet'] = w_mgr.get({'id': name})
        finally:
            await wdata['wallet'].open()

    tan = TrusteeAnchor(wallets['trustee-anchor']['wallet'], pool)
    no_prox = rrbx_prox()
    san = OrgHubAnchor(wallets[WALLET_NAME]['wallet'], pool, rrbx=rrbx)
    if rrbx:
        await beep('external rev reg builder process on {}'.format(WALLET_NAME), 15)
        if rrbx_prox() != no_prox + 1:
            await RevRegBuilder.stop(WALLET_NAME)
            assert False, "External rev reg builder process did not start"
        async with OrgHubAnchor(
                wallets[WALLET_NAME]['wallet'],
                pool,
                rrbx=rrbx):  # check for exactly 1 external rev reg builder process
            await beep('external rev reg builder process uniqueness test on {}'.format(WALLET_NAME), 5)
            if rrbx_prox() != no_prox + 1:
                await RevRegBuilder.stop(WALLET_NAME)
                assert False, "External rev reg builder process was not unique"

    assert pool.handle

    await tan.open()
    await san.open()

    # Publish anchor particulars to ledger if not yet present
    for an in (tan, san):
        if not json.loads(await tan.get_nym(an.did)):
            await tan.send_nym(an.did, an.verkey, an.wallet.name, an.least_role())

    nyms = {
        'tan': json.loads(await tan.get_nym(tan.did)),
        'san': json.loads(await tan.get_nym(san.did))
    }
    print('\n\n== 1 == nyms: {}'.format(ppjson(nyms)))

    for k in nyms:
        assert 'dest' in nyms[k]

    # Publish schema to ledger if not yet present; get from ledger
    S_ID = schema_id(san.did, 'tails_load', '{}.0'.format(int(time.time())))
    S_KEY = schema_key(S_ID)

    schema_data = {
        'name': schema_key(S_ID).name,
        'version': schema_key(S_ID).version,
        'attr_names': [
            'number',
            'remainder'
        ]
    }

    try:
        await san.get_schema(S_KEY)  # may exist (almost certainly not)
    except AbsentSchema:
        await san.send_schema(json.dumps(schema_data))
    schema_json = await san.get_schema(S_KEY)
    schema = json.loads(schema_json)
    assert schema  # should exist now
    print('\n\n== 2 == SCHEMA [{} v{}]: {}'.format(S_KEY.name, S_KEY.version, ppjson(schema)))

    # Setup link secret for creation of cred req or proof
    await san.create_link_secret('LinkSecret')

    # SRI anchor create, store, publish cred definitions to ledger; create cred offers
    await san.send_cred_def(S_ID, revo=True)
    cd_id = cred_def_id(S_KEY.origin_did, schema['seqNo'], pool.protocol)

    assert ((not Tails.unlinked(san.dir_tails)) and
        [f for f in Tails.links(san.dir_tails, san.did) if cd_id in f])

    cred_def_json = await san.get_cred_def(cd_id)  # ought to exist now
    cred_def = json.loads(cred_def_json)
    print('\n\n== 3.0 == Cred def [{} v{}]: {}'.format(
        S_KEY.name,
        S_KEY.version,
        ppjson(json.loads(cred_def_json))))
    assert cred_def.get('schemaId', None) == str(schema['seqNo'])

    cred_offer_json = await san.create_cred_offer(schema['seqNo'])
    print('\n\n== 3.1 == Credential offer [{} v{}]: {}'.format(
        S_KEY.name,
        S_KEY.version,
        ppjson(cred_offer_json)))

    (cred_req_json, cred_req_metadata_json) = await san.create_cred_req(cred_offer_json, cd_id)
    print('\n\n== 4 == Credential request [{} v{}]: metadata {}, cred-req {}'.format(
        S_KEY.name,
        S_KEY.version,
        ppjson(cred_req_metadata_json),
        ppjson(cred_req_json)))
    assert json.loads(cred_req_json)

    # BC Reg anchor (as Issuer) issues creds and stores at HolderProver: get cred req, create cred, store cred
    CREDS = 4034  # enough to kick off rev reg on size 4096 and issue two creds in it: 1 needing set-rev-reg, 1 not
    print('\n\n== 5 == creating {} credentials'.format(CREDS))
    swatch = Stopwatch(2)
    optima = {}  # per rev-reg, fastest/slowest pairs
    for number in range(CREDS):
        swatch.mark()
        (cred_json, _) = await san.create_cred(
            cred_offer_json,
            cred_req_json,
            {
                'number': str(number),
                'remainder': str(number % 100)
            })
        elapsed = swatch.mark()
        tag = rev_reg_id2tag(Tails.current_rev_reg_id(san.dir_tails, cd_id))
        if tag not in optima:
            optima[tag] = (elapsed, elapsed)
        else:
            optima[tag] = (min(optima[tag][0], elapsed), max(optima[tag][1], elapsed))
        print('.', end='', flush=True)
        if ((number + 1) % 100) == 0:
            print('{}: #{}: {:.2f}-{:.2f}s'.format(number + 1, tag, *optima[tag]), flush=True)

        assert json.loads(cred_json)
    print('{}: #{}: {:.2f}-{:.2f}s'.format(number + 1, tag, *optima[tag]), flush=True)

    print('\n\n== 6 == best, worst times by revocation registry: {}'.format(ppjson(optima)))
    assert (not rrbx) or (max(optima[tag][1] for tag in optima) <
        4 * min(optima[tag][1] for tag in optima if int(tag) > 0))  # if waiting on rr beyond #0, sizes increase as 2^n

    await san.close()
    if rrbx:
        await RevRegBuilder.stop(WALLET_NAME)
    await tan.close()
    for (name, wdata) in wallets.items():
        await wdata['wallet'].close()
    await pool.close()
コード例 #22
0
async def test_enco_deco():
    print(Ink.YELLOW('\n\n== Testing encode/decode for string of length up to 1024'))

    for printable_len in range(0, 1025):
        orig = ''.join(choice(printable) for _ in range(printable_len))
        print('.', end='' if (printable_len + 1) % 100 else '{}\n'.format(printable_len), flush=True)
        enc = encode(orig)
        dec = decode(enc)
        assert cred_attr_value(orig) == {'raw': raw(orig), 'encoded': enc}
        assert orig == dec
    print('\n\n== Random printable string test passed')

    print('\n\n== Typical cases - (type) orig -> encoded -> (type) decoded:')
    for orig in (
            chr(0),
            chr(1),
            chr(2),
            'Alice',
            'Bob',
            'J.R. "Bob" Dobbs',
            None,
            True,
            False,
            -5,
            0,
            1024,
            2**31 - 1,
            2**31,
            2**31 + 1,
            -2**31 - 1,
            -2**31,
            -2**31 + 1,
            0.0,
            '0.0',
            0.1,
            -0.1,
            -1.9234856120348166e+37,
            1.9234856120348166e+37,
            -19234856120348165921835629183561023142.55,
            19234856120348165921835629183561023142.55,
            float_info.max,
            'Hello',
            '',
            'True',
            'False',
            '1234',
            '-12345',
            [],
            [0, 1, 2, 3],
            {'a': 1, 'b': 2, 'c': 3},
            [{}, {'a': [0, 0.1], 'b': [0.0, float_info.min]}, True],
            ):
        enc = encode(orig)
        dec = decode(enc)
        print('  ({})({}) -> {} -> ({})({})'.format(
            type(orig).__name__,
            '0x{:02x}'.format(ord(orig))
                if orig in (chr(0), chr(1), chr(2))
                else "%f" % orig if isinstance(orig, float)
                else orig,
            enc,
            type(dec).__name__,
            '0x{:02x}'.format(ord(dec))
                if dec in (chr(0), chr(1), chr(2))
                else "%f" % dec if isinstance(dec, float)
                else dec))
        assert orig == dec

    for i in range(32):
        orig = ''.join(map(chr, [0] * i))
        enc = encode(orig)
        dec = decode(enc)
        assert cred_attr_value(orig) == {'raw': raw(orig), 'encoded': enc}
        assert orig == dec
    print('Tests OK for (str)(chr(0) multiples)')
コード例 #23
0
async def test_aries_compliance():
    print(
        Ink.YELLOW(
            '\n\n== Testing encoding for Aries Interop Profile compliance'))
    values = {
        "address2": {
            "raw":
            "101 Wilson Lane",
            "encoded":
            "68086943237164982734333428280784300550565381723532936263016368251445461241953"
        },
        "zip": {
            "raw": "87121",
            "encoded": "87121"
        },
        "city": {
            "raw":
            "SLC",
            "encoded":
            "101327353979588246869873249766058188995681113722618593621043638294296500696424"
        },
        "address1": {
            "raw":
            "101 Tela Lane",
            "encoded":
            "63690509275174663089934667471948380740244018358024875547775652380902762701972"
        },
        "state": {
            "raw":
            "UT",
            "encoded":
            "93856629670657830351991220989031130499313559332549427637940645777813964461231"
        },
        "Empty": {
            "raw":
            "",
            "encoded":
            "102987336249554097029535212322581322789799900648198034993379397001115665086549"
        },
        "Null": {
            "raw":
            None,
            "encoded":
            "99769404535520360775991420569103450442789945655240760487761322098828903685777"
        },
        "str None": {
            "raw":
            "None",
            "encoded":
            "99769404535520360775991420569103450442789945655240760487761322098828903685777"
        },
        "bool True": {
            "raw": True,
            "encoded": "1"
        },
        "bool False": {
            "raw": False,
            "encoded": "0",
        },
        "str True": {
            "raw":
            "True",
            "encoded":
            "27471875274925838976481193902417661171675582237244292940724984695988062543640"
        },
        "str False": {
            "raw":
            "False",
            "encoded":
            "43710460381310391454089928988014746602980337898724813422905404670995938820350"
        },
        "max i32": {
            "raw": 2147483647,
            "encoded": "2147483647"
        },
        "max i32 + 1": {
            "raw":
            2147483648,
            "encoded":
            "26221484005389514539852548961319751347124425277437769688639924217837557266135"
        },
        "min i32": {
            "raw": -2147483648,
            "encoded": "-2147483648"
        },
        "min i32 - 1": {
            "raw":
            -2147483649,
            "encoded":
            "68956915425095939579909400566452872085353864667122112803508671228696852865689"
        },
        "float 0.0": {
            "raw":
            0.0,
            "encoded":
            "62838607218564353630028473473939957328943626306458686867332534889076311281879"
        },
        "str 0.0": {
            "raw":
            "0.0",
            "encoded":
            "62838607218564353630028473473939957328943626306458686867332534889076311281879"
        },
        "chr 0": {
            "raw":
            chr(0),
            "encoded":
            "49846369543417741186729467304575255505141344055555831574636310663216789168157"
        },
        "chr 1": {
            "raw":
            chr(1),
            "encoded":
            "34356466678672179216206944866734405838331831190171667647615530531663699592602"
        },
        "chr 2": {
            "raw":
            chr(2),
            "encoded":
            "99398763056634537812744552006896172984671876672520535998211840060697129507206"
        }
    }

    for (tag, attr) in values.items():
        assert encode(attr['raw']) == attr['encoded']
        print('.. OK: {}'.format(tag))
コード例 #24
0
async def test_ids():
    print(Ink.YELLOW('\n\n== Testing Identifier Checks =='))

    assert ok_wallet_reft('49ad0727-8663-45ae-a115-12b09860f9c6')
    assert not ok_wallet_reft('Q4zqM7aXqm7gDQkUVLng9I')
    assert not ok_wallet_reft('49ad0727-45ae-a115-12b09860f9c6')
    print('\n\n== 1 == Wallet referent identifier checks pass OK')

    assert ok_did('Q4zqM7aXqm7gDQkUVLng9h')
    assert not ok_did('Q4zqM7aXqm7gDQkUVLng9I')  # 'I' not a base58 char
    assert not ok_did('Q4zqM7aXqm7gDQkUVLng')  # too short
    print('\n\n== 2 == Distributed identifier checks pass OK')

    for value in (None, 'TRUSTEE', 'STEWARD', 'TRUST_ANCHOR', ''):
        assert ok_role(value)
    for value in (123, 'TRUSTY', 'STEW', 'ANCHOR', ' '):
        assert not ok_role(value)
    print('\n\n== 3 == Role identifier checks pass OK')

    assert Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng9h')
    assert Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng')
    assert not Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9h')
    assert not Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng9hx')
    assert not Tails.ok_hash('Q4zqM7aXqm7gDQkUVLng9hQ4zqM7aXqm7gDQkUVLng90')
    print('\n\n== 4 == Tails hash identifier checks pass OK')

    assert ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:3:bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h::bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:bc-reg:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2::1.0')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:bc-reg:')
    assert not ok_schema_id('Q4zqM7aXqm7gDQkUVLng9h:2:bc-reg:1.0a')
    assert not ok_schema_id(
        'Q4zqM7aXqm7gDQkUVLng9I:2:bc-reg:1.0')  # I is not in base58
    print('\n\n== 5 == Schema identifier checks pass OK')

    assert ok_cred_def_id(
        'Q4zqM7aXqm7gDQkUVLng9h:3:CL:18:tag')  # protocol >= 1.4
    assert ok_cred_def_id(
        'Q4zqM7aXqm7gDQkUVLng9h:3:CL:Q4zqM7aXqm7gDQkUVLng9h:2:schema_name:1.0:tag'
    )  # long form
    assert ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18:tag',
                          'Q4zqM7aXqm7gDQkUVLng9h')  # issuer-did
    assert ok_cred_def_id(
        'Q4zqM7aXqm7gDQkUVLng9h:3:CL:Q999999999999999999999:2:schema_name:1.0:tag',
        'Q4zqM7aXqm7gDQkUVLng9h')  # long form, issuer-did
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18:tag',
                              'Xxxxxxxxxxxxxxxxxxxxxx')
    assert not ok_cred_def_id(
        'Q4zqM7aXqm7gDQkUVLng9h:3:CL:Q4zqM7aXqm7gDQkUVLng9h:2:schema_name:1.0:tag',
        'Xxxxxxxxxxxxxxxxxxxxxx')  # long form, issuer-did
    assert ok_cred_def_id(
        'Q4zqM7aXqm7gDQkUVLng9h:3:CL:Q4zqM7aXqm7gDQkUVLng9h:2:schema_name:1.0:tag'
    )  # long form
    assert not ok_cred_def_id(
        'Q4zqM7aXqm7gDQkUVLng9h:3:CL:Q4zqM7aXqm7gDQkUVLng9h:schema_name:1.0:tag'
    )  # no :2:
    assert not ok_cred_def_id(
        'Q4zqM7aXqm7gDQkUVLng9h:3:CL:QIIIIIIIII7gDQkUVLng9h:schema_name:1.0:tag'
    )  # I not base58
    assert not ok_cred_def_id(
        'Q4zqM7aXqm7gDQkUVLng9h:3:CL:QIIIIIIIII7gDQkUVLng9h:schema_name:v1.0:tag'
    )  # bad version
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:4:CL:18:0')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h::CL:18:0')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9I:3:CL:18:tag')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3::18:tag')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:18:tag')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18z:tag')
    assert ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18')  # protocol == 1.3
    assert ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18',
                          'Q4zqM7aXqm7gDQkUVLng9h')
    assert not ok_cred_def_id('Q4zqM7aXqm7gDQkUVLng9h:3:CL:18',
                              'Xxxxxxxxxxxxxxxxxxxxxx')
    assert ok_cred_def_id(
        rev_reg_id2cred_def_id(
            'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:Q4zqM7aXqm7gDQkUVLng9h:2:schema_name:1.0:tag:CL_ACCUM:1'
        ))
    print('\n\n== 6 == Credential definition identifier checks pass OK')

    assert ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:1'
    )  # protocol >= 1.4
    assert ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:1',
        'LjgpST2rjsoxYegQDRm7EL')
    assert ok_rev_reg_id(  # long form
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:Q4zqM7aXqm7gDQkUVLng9h:2:schema_name:1.0:tag:CL_ACCUM:1'
    )
    assert ok_rev_reg_id(  # long form
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:Q4zqM7aXqm7gDQkUVLng9h:2:schema_name:1.0:tag:CL_ACCUM:1',
        'LjgpST2rjsoxYegQDRm7EL')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:1',
        'Xxxxxxxxxxxxxxxxxxxxxx')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:5:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:1'
    )
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:4:CL:20:0:CL_ACCUM:1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL::CL:20:0:CL_ACCUM:1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:NOT_CL:20:tag:CL_ACCUM:1'
    )
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20z:tag:CL_ACCUM:1'
    )
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20::CL_ACCUM:1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag::1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:1')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM:'
    )
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:tag:CL_ACCUM')
    assert ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:CL_ACCUM:1'
    )  # protocol == 1.3
    assert ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:CL_ACCUM:1',
        'LjgpST2rjsoxYegQDRm7EL')
    assert not ok_rev_reg_id(
        'LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:20:CL_ACCUM:1',
        'Xxxxxxxxxxxxxxxxxxxxxx')
    print('\n\n== 7 == Revocation registry identifier checks pass OK')
コード例 #25
0
ファイル: test_server.py プロジェクト: sklump/von_tails
async def test_von_tails(pool_ip, genesis_txn_file, path_cli_ini, cli_ini,
                         path_setnym_ini, setnym_ini):

    print(
        Ink.YELLOW('\n\n== Testing tails server vs. IP {} =='.format(pool_ip)))

    # Set config for tails clients
    config = {}
    i = 0
    for profile in path_cli_ini:
        cli_config = inis2dict(str(path_cli_ini[profile]))
        config[profile] = cli_config
        with open(path_cli_ini[profile], 'r') as fh_cfg:
            print('\n\n== 0.{} == {} tails sync configuration:\n{}'.format(
                i, profile, fh_cfg.read()))
        i += 1

    # Start tails server
    print('\n\n== 1 == Starting tails server on port {}'.format(
        config['issuer']['Tails Server']['port']))
    tsrv = TailsServer(config['issuer']['Tails Server']['port'])
    started = tsrv.start()
    if not started:
        print(
            '\n\n== X == Server already running - stop it to run test from scratch'
        )
        assert False

    assert tsrv.is_up()
    print(
        '\n\n== 2 == Started tails server, docker-compose port-forwarded via localhost:{}'
        .format(tsrv.port))
    atexit.register(shutdown)

    # Set nyms (operation creates pool if need be)
    i = 0
    setnym_config = {}
    for profile in path_setnym_ini:
        cli_config = inis2dict(str(path_setnym_ini[profile]))
        if profile == 'admin':  # tails server anchor on ledger a priori
            continue
        setnym_config[profile] = cli_config
        with open(path_setnym_ini[profile], 'r') as fh_cfg:
            print('\n\n== 3.{} == {} setnym configuration:\n{}'.format(
                i, profile, fh_cfg.read()))
        sub_proc = subprocess.run(
            ['von_anchor_setnym',
             str(path_setnym_ini[profile])],
            stdout=subprocess.PIPE,
            stderr=subprocess.DEVNULL)
        assert not sub_proc.returncode
        i += 1
    print('\n\n== 4 == Setnym ops completed OK')

    # wallets = {profile: Wallet(setnym_config[profile]['VON Anchor']['name']) for profile in setnym_config}
    # wallets['admin'] = Wallet(config['admin']['VON Anchor']['name'])
    wallets = await get_wallets(
        {
            **{
                profile: setnym_config[profile]['VON Anchor']
                for profile in setnym_config
            }, 'admin': config['admin']['VON Anchor']
        },
        open_all=False)

    # Open pool and anchors, issue creds to create tails files
    async with wallets['issuer'] as w_issuer, (
        wallets['prover']) as w_prover, (NodePool(
            config['issuer']['Node Pool']['name'])) as pool, (RegistrarAnchor(
                w_issuer, pool)) as ian, (OrgBookAnchor(w_prover,
                                                        pool)) as pan:

        # Get nyms from ledger for display
        i = 0
        for an in (ian, pan):
            print('\n\n== 5.{} == {} nym on ledger: {}'.format(
                i, an.wallet.name, ppjson(await an.get_nym())))
            i += 1

        # Publish schema to ledger
        S_ID = schema_id(ian.did, 'rainbow', '{}.0'.format(int(time())))
        schema_data = {
            'name': schema_key(S_ID).name,
            'version': schema_key(S_ID).version,
            'attr_names': ['numeric', 'sha256']
        }

        S_KEY = schema_key(S_ID)
        try:
            await ian.get_schema(S_KEY)  # may exist (almost certainly not)
        except AbsentSchema:
            await ian.send_schema(json.dumps(schema_data))
        schema_json = await ian.get_schema(S_KEY)
        schema = json.loads(schema_json)
        print('\n\n== 6 == SCHEMA [{} v{}]: {}'.format(S_KEY.name,
                                                       S_KEY.version,
                                                       ppjson(schema)))
        assert schema  # should exist now

        # Setup link secret for creation of cred req or proof
        await pan.create_link_secret('LinkSecret')

        # Issuer anchor create, store, publish cred definitions to ledger; create cred offers
        await ian.send_cred_def(S_ID, revo=True)

        cd_id = cred_def_id(S_KEY.origin_did, schema['seqNo'], pool.protocol)

        assert ((not Tails.unlinked(ian.dir_tails)) and
                [f for f in Tails.links(ian.dir_tails, ian.did) if cd_id in f])

        cred_def_json = await ian.get_cred_def(cd_id)  # ought to exist now
        cred_def = json.loads(cred_def_json)
        print('\n\n== 7.0 == Cred def [{} v{}]: {}'.format(
            S_KEY.name, S_KEY.version, ppjson(json.loads(cred_def_json))))
        assert cred_def.get('schemaId', None) == str(schema['seqNo'])

        cred_offer_json = await ian.create_cred_offer(schema['seqNo'])
        cred_offer = json.loads(cred_offer_json)
        print('\n\n== 7.1 == Credential offer [{} v{}]: {}'.format(
            S_KEY.name, S_KEY.version, ppjson(cred_offer_json)))

        (cred_req_json, cred_req_metadata_json) = await pan.create_cred_req(
            cred_offer_json, cd_id)
        cred_req = json.loads(cred_req_json)
        print('\n\n== 8 == Credential request [{} v{}]: metadata {}, cred {}'.
              format(S_KEY.name, S_KEY.version, ppjson(cred_req_metadata_json),
                     ppjson(cred_req_json)))
        assert json.loads(cred_req_json)

        # Issuer anchor issues creds and stores at HolderProver: get cred req, create cred, store cred
        cred_data = []

        CREDS = 450  # enough to build 4 rev regs
        print('\n\n== 9 == creating and storing {} credentials:'.format(CREDS))
        for number in range(CREDS):
            (cred_json, _) = await ian.create_cred(
                cred_offer_json, cred_req_json, {
                    'numeric': str(number),
                    'sha256': sha256(str(number).encode()).hexdigest(),
                })

            cred_id = await pan.store_cred(cred_json, cred_req_metadata_json)
            print('.',
                  end='' if (number + 1) % 100 else '{}\n'.format(number + 1),
                  flush=True)

        # Exercise list view, least to most specific
        for tails_list_path in ('all', ian.did, cd_id):
            url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path))
            r = requests.get(url)
            assert r.status_code == 200
            assert not r.json()
        rr_ids_up = {
            basename(link)
            for link in Tails.links(ian.dir_tails, ian.did)
        }
        for rr_id in rr_ids_up:
            url = url_for(tsrv.port, 'tails/list/{}'.format(rr_id))
            r = requests.get(url)
            assert r.status_code == 200
            assert not r.json()
        print(
            '\n\n== 10 == All listing views at server come back OK and empty as expected'
        )

        rv = pexpect.run('python ../src/sync/sync.py {}'.format(
            path_cli_ini['issuer']))
        print('\n\n== 11 == Issuer sync uploaded local tails files')

        for tails_list_path in ('all', ian.did, cd_id):
            url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path))
            r = requests.get(url)
            assert r.status_code == 200
            assert {rr for rr in r.json()} == rr_ids_up
        for rr_id in rr_ids_up:
            url = url_for(tsrv.port, 'tails/list/{}'.format(rr_id))
            r = requests.get(url)
            assert r.status_code == 200
            assert r.json() == [rr_id]  # list with one rr_id should come back

        # Exercise list view, least to most specific
        for tails_list_path in ('all', ian.did, cd_id):
            url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path))
            r = requests.get(url)
            assert r.status_code == 200
            assert len(r.json()) == len(rr_ids_up)
        print(
            '\n\n== 12 == All listing views at server come back OK with {} uploaded files'
            .format(len(rr_ids_up)))

        rv = pexpect.run('python ../src/sync/sync.py {}'.format(
            path_cli_ini['prover']))
        print('\n\n== 13 == Prover sync downloaded remote tails files')

        rr_ids_down = {
            basename(link)
            for link in Tails.links(
                config['prover']['Tails Client']['tails.dir'], ian.did)
        }
        assert rr_ids_down == rr_ids_up

        # Exercise admin-delete
        rv = pexpect.run('python ../src/admin/delete.py {} all'.format(
            path_cli_ini['admin']))
        print('\n\n== 14 == Admin called for deletion at tails server')

        # Check tails server deletion
        url = url_for(tsrv.port, 'tails/list/all')
        r = requests.get(url)
        assert r.status_code == 200
        assert not r.json()
        print(
            '\n\n== 15 == All listing views at server come back OK and empty as expected'
        )

        rv = pexpect.run('python ../src/sync/multisync.py 1 {}'.format(
            path_cli_ini['issuer']))
        print(
            '\n\n== 16 == Issuer multisync on 1 sync iteration uploaded local tails files'
        )

        for tails_list_path in ('all', ian.did, cd_id):
            url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path))
            r = requests.get(url)
            assert r.status_code == 200
            assert {rr for rr in r.json()} == rr_ids_up
        for rr_id in rr_ids_up:
            url = url_for(tsrv.port, 'tails/list/{}'.format(rr_id))
            r = requests.get(url)
            assert r.status_code == 200
            assert r.json() == [rr_id]  # list with one rr_id should come back

        # Exercise list view, least to most specific
        for tails_list_path in ('all', ian.did, cd_id):
            url = url_for(tsrv.port, 'tails/list/{}'.format(tails_list_path))
            r = requests.get(url)
            assert r.status_code == 200
            assert len(r.json()) == len(rr_ids_up)
        print(
            '\n\n== 17 == All listing views at server come back OK with {} uploaded files'
            .format(len(rr_ids_up)))

        # Remove tails server anchor wallet
        await wallets['admin'].remove()
        print('\n\n== 18 == Removed admin (tails server anchor {}) wallet'.
              format(wallets['admin'].name))
コード例 #26
0
ファイル: test_canon.py プロジェクト: swcurran/von_anchor
async def test_canon_non_secret_wql():
    print(Ink.YELLOW('\n\n== Testing non-secret WQL canonicalization =='))
    invariant = [
        {},
        {
            '$or': [{
                '~redness': '0'
            }, {
                '~redness': {
                    '$gte': '204'
                }
            }]
        },
        {  # and
            'name': {
                '$in': [
                    'blue', 'vermilion', 'crimson', 'scarlet', 'claret',
                    'burgundy'
                ]
            },
            '~hello': {
                '$like': 'world%'
            }
        }
    ]

    assert all(canon_non_secret_wql(q) == q for q in invariant)
    print(
        '\n\n== Canonicalization for invariant non-secret WQL works as expected'
    )

    # simplest case
    q = {'item': 1}
    canon_q = canon_non_secret_wql(q)
    assert all(canon_q[k] == raw(q[k]) for k in q)

    # and
    q = {'yes': 1, 'no': 0}
    canon_q = canon_non_secret_wql(q)
    assert all(canon_q[k] == raw(q[k]) for k in q)

    # or
    q = {'$or': [{'YES': '1'}, {'NO': '0'}]}
    canon_q = canon_non_secret_wql(q)
    assert canon_q['$or'] == [
        {
            'YES': '1'
        },
        {
            'NO': '0'
        }  # do not touch tag names
    ]

    # and, not, like
    q = {
        '~value': {
            '$like': '%'
        },
        '$not': {
            '$or': [
                {
                    '~value': 0
                },
                {
                    '~value': 1
                },
                {
                    '~value': {
                        '$gt': 10
                    }
                },
                {
                    '~value': {
                        '$in': [-3, -7]
                    }
                },
            ]
        }
    }
    canon_q = canon_non_secret_wql(q)
    assert canon_q['~value'] == {'$like': '%'}
    canon_q.pop('~value')
    assert canon_q['$not']['$or'] == [
        {
            '~value': '0'
        },
        {
            '~value': '1'
        },
        {
            '~value': {
                '$gt': '10'
            }
        },
        {
            '~value': {
                '$in': ['-3', '-7']
            }
        },
    ]
    canon_q.pop('$not')
    assert not canon_q

    # bad 'or'
    q = {'$or': {'~value': 0, '~value': 1}}
    try:
        canon_q = canon_non_secret_wql(q)
        assert False
    except BadWalletQuery:
        pass

    print('\n\n== Canonicalization for non-secret WQL works as expected')
コード例 #27
0
ファイル: test_pool.py プロジェクト: swcurran/von_anchor
async def test_manager(path_home, pool_genesis_txn_data, pool_ip):

    print(
        Ink.YELLOW(
            '\n\n== Testing Node Pool Manager vs. IP {} =='.format(pool_ip)))

    # Create node pool manager
    p_mgr = NodePoolManager()
    p_mgr.protocol = Protocol.V_18
    assert p_mgr.protocol == Protocol.DEFAULT

    # Create new pool on raw data
    name = 'pool-{}'.format(int(time()))
    assert name not in await p_mgr.list()
    print('\n\n== 1 == Pool {} not initially configured'.format(name))

    try:  # exercise bad pool addition
        await p_mgr.add_config(name, 'Not genesis transaction data')
        assert False
    except AbsentPool:
        pass

    await p_mgr.add_config(name, pool_genesis_txn_data)
    assert name in await p_mgr.list()
    print(
        '\n\n== 2 == Added pool {} configuration on genesis transaction data'.
        format(name))

    try:
        await p_mgr.add_config(name, pool_genesis_txn_data)
        assert False
    except ExtantPool:
        pass

    try:
        pool = p_mgr.get('no-such-pool.{}'.format(int(time())))
        await pool.open()
        assert False
    except AbsentPool:
        pass

    pool = p_mgr.get(name)
    await pool.open()
    await pool.refresh()
    assert pool.handle is not None
    await pool.close()
    print(
        '\n\n== 3 == Opened, refreshed, and closed pool {} on default configuration'
        .format(name))

    pool = p_mgr.get(name, {'timeout': 3600, 'extended_timeout': 7200})
    await pool.open()
    await pool.refresh()
    assert pool.handle is not None
    await pool.close()
    print(
        '\n\n== 4 == Opened, refreshed, and closed pool {} on explicit configuration'
        .format(name))

    await p_mgr.remove(name)
    assert name not in await p_mgr.list()
    print('\n\n== 5 == Removed pool {} configuration'.format(name))

    with NamedTemporaryFile(mode='w+b', buffering=0) as fh_gen:
        fh_gen.write(pool_genesis_txn_data.encode())
        await p_mgr.add_config(name, fh_gen.name)
    assert name in await p_mgr.list()
    print(
        '\n\n== 6 == Added pool {} configuration on genesis transaction file'.
        format(name))

    pool = p_mgr.get(name, {'timeout': 3600, 'extended_timeout': 7200})
    await pool.open()
    await pool.refresh()
    assert pool.handle is not None
    try:
        await p_mgr.remove(name)  # exercise non-removal of open pool
        assert False
    except ExtantPool:
        pass
    await pool.close()
    print(
        '\n\n== 7 == Opened, refreshed, and closed pool {} on explicit configuration'
        .format(name))

    await p_mgr.remove(name)
    assert name not in await p_mgr.list()
    print('\n\n== 8 == Removed pool {} configuration'.format(name))
コード例 #28
0
ファイル: test_canon.py プロジェクト: swcurran/von_anchor
async def test_canon_pairwise_wql():
    print(Ink.YELLOW('\n\n== Testing pairwise WQL canonicalization =='))
    invariant = [
        {
            '$or': [{
                '~redness': '0'
            }, {
                '~redness': {
                    '$gte': '204'
                }
            }]
        },
        {  # and
            '~name': {
                '$in': [
                    'blue', 'vermilion', 'crimson', 'scarlet', 'claret',
                    'burgundy'
                ]
            },
            '~hello': {
                '$like': 'world%'
            }
        }
    ]

    assert all(canon_pairwise_wql(q) == q for q in invariant)
    print(
        '\n\n== Canonicalization for invariant pairwise WQL works as expected')

    # empty query
    assert canon_pairwise_wql({}) == {'~their_did': {'$neq': ''}}

    # simplest case
    q = {'item': 1}
    canon_q = canon_pairwise_wql(q)
    assert all(canon_q[canon_pairwise_tag(k)] == raw(q[k]) for k in q)

    # and
    q = {'yes': 1, 'no': 0}
    canon_q = canon_pairwise_wql(q)
    assert all(canon_q[canon_pairwise_tag(k)] == raw(q[k]) for k in q)

    # or
    q = {'$or': [{'YES': '1'}, {'NO': '0'}]}
    canon_q = canon_pairwise_wql(q)
    assert canon_q['$or'] == [
        {
            '~YES': '1'
        },
        {
            '~NO': '0'
        }  # mark for unencrypted storage
    ]

    # and, not, like
    q = {
        '~value': {
            '$like': '%'
        },
        '$not': {
            '$or': [
                {
                    'value': 0
                },
                {
                    'value': 1
                },
                {
                    '~value': {
                        '$gt': 10
                    }
                },
                {
                    '~value': {
                        '$in': [-3, -7]
                    }
                },
            ]
        }
    }
    canon_q = canon_pairwise_wql(q)
    assert canon_q['~value'] == {'$like': '%'}
    canon_q.pop('~value')
    assert canon_q['$not']['$or'] == [
        {
            '~value': '0'
        },
        {
            '~value': '1'
        },
        {
            '~value': {
                '$gt': '10'
            }
        },
        {
            '~value': {
                '$in': ['-3', '-7']
            }
        },
    ]
    canon_q.pop('$not')
    assert not canon_q

    # bad 'or'
    q = {'$or': {'~value': 0, '~value': 1}}
    try:
        canon_q = canon_pairwise_wql(q)
        assert False
    except BadWalletQuery:
        pass

    print('\n\n== Canonicalization for pairwise WQL works as expected')
コード例 #29
0
ファイル: test_pool.py プロジェクト: swcurran/von_anchor
async def test_protocol():

    print(Ink.YELLOW('\n\n== Testing Node Pool Protocols =='))

    assert Protocol.V_13.indy() != Protocol.V_14.indy(
    )  # all the same except indy-node 1.3
    assert Protocol.V_14.indy() == Protocol.V_15.indy()
    assert Protocol.V_15.indy() == Protocol.V_16.indy()
    assert Protocol.V_16.indy() == Protocol.V_17.indy()
    assert Protocol.V_17.indy() == Protocol.V_18.indy()
    assert Protocol.V_18.indy() == Protocol.DEFAULT.indy()
    assert Protocol.get('1.8') == Protocol.DEFAULT
    print('\n\n== 1 == Protocol enum values correspond OK to indy values')

    issuer_did = 'ZqhtaRvibYPQ23456789ee'
    seq_no = 123
    assert Protocol.V_13.cred_def_id(issuer_did,
                                     seq_no) == '{}:3:CL:{}'.format(
                                         issuer_did, seq_no)
    assert Protocol.DEFAULT.cred_def_id(issuer_did,
                                        seq_no) == '{}:3:CL:{}:tag'.format(
                                            issuer_did, seq_no)

    assert Protocol.V_13.cd_id_tag(for_box_id=True) == ''
    assert Protocol.V_13.cd_id_tag(
        for_box_id=False
    ) == 'tag'  # indy-sdk refuses empty string on issue-cred-def
    assert Protocol.DEFAULT.cd_id_tag(for_box_id=True) == ':tag'
    assert Protocol.DEFAULT.cd_id_tag(
        for_box_id=False
    ) == 'tag'  # indy-sdk refuses empty string on issue-cred-def
    print(
        '\n\n== 2 == Protocol enum values build cred def id and tags as expected'
    )

    txn_13 = json.loads('''{
        "op": "REPLY",
        "result": {
            "data": {
                "identifier": "WgWxqztrNooG92RXvxSTWv",
                "data": {
                    "name": "green",
                    "version": "1.0",
                    "...": "..."
                },
                "...": "..."
            },
            "txnTime": 1234567890,
            "...": "..."
        },
        "...": "..."
    }''')
    assert json.loads(
        Protocol.V_13.txn2data(txn_13)) == txn_13['result']['data']
    assert Protocol.V_13.txn2epoch(txn_13) == 1234567890
    assert Protocol.V_13.txn_data2schema_key(
        json.loads(Protocol.V_13.txn2data(txn_13))) == SchemaKey(
            'WgWxqztrNooG92RXvxSTWv', 'green', '1.0')

    txn_18 = json.loads('''{
        "op": "REPLY",
        "result": {
            "data": {
                "txn": {
                    "data": {
                        "data": {
                            "name": "green",
                            "version": "1.0",
                            "...": "..."
                        }
                    },
                    "metadata": {
                        "from": "WgWxqztrNooG92RXvxSTWv",
                        "...": "..."
                    },
                    "...": "..."
                },
                "...": "..."
            },
            "txnMetadata": {
                "txnTime": 1234567890,
                "...": "..."
            },
            "...": "..."
        },
        "...": "..."
    }''')
    assert json.loads(
        Protocol.DEFAULT.txn2data(txn_18)) == txn_18['result']['data']['txn']
    assert Protocol.DEFAULT.txn2epoch(txn_18) == 1234567890
    assert Protocol.DEFAULT.txn_data2schema_key(
        json.loads(Protocol.DEFAULT.txn2data(txn_18))) == SchemaKey(
            'WgWxqztrNooG92RXvxSTWv', 'green', '1.0')
    print(
        '\n\n== 3 == Protocol enum values extricate transaction data as expected'
    )
コード例 #30
0
ファイル: test_canon.py プロジェクト: swcurran/von_anchor
async def test_canon_cred_wql():

    print(Ink.YELLOW('\n\n== Testing credential WQL canonicalization =='))

    invariant = [
        {},
        {
            'attr::test::marker': '1'
        },
        {
            'schema_id': None
        },
        {
            '$or': [{
                'attr::test::value': '0'
            }, {
                'attr::test::value': {
                    '$gt': '10'
                }
            }, {
                'attr::test::value': {
                    '$lt': '-10'
                }
            }]
        },
        {  # and
            'attr::test::marker': '1',
            'attr::test::value': {
                '$in': ['1', '2', '3', '5', '8', '13']
            },
            'attr::another::value': {
                '$like': 'hello%'
            }
        }
    ]

    assert all(canon_cred_wql(q) == q for q in invariant)
    print(
        '\n\n== Canonicalization for invariant credential WQL works as expected'
    )

    # simplest case
    q = {'attr::testAttributeName::marker': 1}
    canon_q = canon_cred_wql(q)
    assert all(canon_q[canon(k)] == raw(q[k]) for k in q)

    # and
    q = {
        'attr::testAttributeName::marker': 1,
        'attr::testAttributeName::value': 0
    }
    canon_q = canon_cred_wql(q)
    assert all(canon_q[canon(k)] == raw(q[k]) for k in q)

    # or
    q = {
        '$or': [{
            'attr::testAttributeName::value': 0
        }, {
            'attr::testAttributeName::value': 1
        }, {
            'attr::testAttributeName::value': 2
        }]
    }
    canon_q = canon_cred_wql(q)
    assert canon_q['$or'] == [
        {
            'attr::testattributename::value': '0'
        },
        {
            'attr::testattributename::value': '1'
        },
        {
            'attr::testattributename::value': '2'
        }  # canonicalize tag names
    ]

    # and, not, like
    q = {
        'attr::testAttributeName::value': {
            '$like': '%'
        },
        '$not': {
            '$or': [
                {
                    'attr::testAttributeName::value': 0
                },
                {
                    'attr::testAttributeName::value': 1
                },
                {
                    'attr::testAttributeName::value': {
                        '$gt': 10
                    }
                },
                {
                    'attr::testAttributeName::value': {
                        '$in': [-3, -7]
                    }
                },
            ]
        }
    }
    canon_q = canon_cred_wql(q)
    assert canon_q['attr::testattributename::value'] == {'$like': '%'}
    canon_q.pop('attr::testattributename::value')
    assert canon_q['$not']['$or'] == [
        {
            'attr::testattributename::value': '0'
        },
        {
            'attr::testattributename::value': '1'
        },
        {
            'attr::testattributename::value': {
                '$gt': '10'
            }
        },
        {
            'attr::testattributename::value': {
                '$in': ['-3', '-7']
            }
        },
    ]
    canon_q.pop('$not')
    assert not canon_q

    # bad 'or'
    q = {
        '$or': {
            'attr::testAttributeName::value': 0,
            'attr::testAttributeName::value': 1
        }
    }
    try:
        canon_q = canon_cred_wql(q)
        assert False
    except BadWalletQuery:
        pass

    print('\n\n== Canonicalization for credential WQL works as expected')