def _main(): """ Run this test case. If a URL is specified, then the integration test will run pointing to that ledger implementation. If a URL is _not_ supplied, then a local sandbox is spun up on a random port, the integration test is run, and the sandbox is shut down when it is complete. """ import argparse argparser = argparse.ArgumentParser() argparser.add_argument('--sandbox-version') backend_args = argparser.add_mutually_exclusive_group() backend_args.add_argument('--url', required=False, help='The URL of the *existing* server to connect to') backend_args.add_argument('--port', required=False, type=int, help='The port to spin up a NEW Sandbox') argparser.add_argument('--keep-alive', action='store_true') args = argparser.parse_args() if args.url: LOG.info('Running the test against an existing sandbox: %s', args.url) run_test(args.url) else: LOG.info('Spinning up a local sandbox as part of the test...') with sandbox(dar_file if dar_file.exists() else daml_file, port=args.port, backend=args.sandbox_version, extra_args=['-w']) as damli_proc: run_test(damli_proc.url, args.keep_alive)
def test_select_reflects_archive_events(self): notification_count = 3 # we expect that, upon each on_created notification of an OperatorNotification contract, # when we query the ACS, we get precisely the same number of contracts. expected_select_count = notification_count * notification_count actual_select_count = 0 def on_notification_contract(_, __): nonlocal actual_select_count actual_select_count += len( party_client.select(OperatorNotification)) with sandbox(DAML_FILE) as proc: with create_client(participant_url=proc.url, parties=[PARTY]) as client: party_client = client.client(PARTY) party_client.on_ready(lambda *args, **kwargs: create( OperatorRole, {'operator': PARTY})) party_client.on_created( OperatorRole, lambda cid, cdata: exercise( cid, 'PublishMany', dict(count=3))) party_client.on_created( OperatorNotification, lambda cid, _: exercise(cid, 'Archive')) party_client.on_created(OperatorNotification, on_notification_contract) client.run_until_complete() final_select_count = len( party_client.select(OperatorNotification)) self.assertEqual(actual_select_count, expected_select_count) self.assertEqual(0, final_select_count)
def main(): """ Main method. """ with sandbox(daml_file) as damli_proc, \ create_client(participant_url=damli_proc.url, parties=['POSTMAN'] + [m['party_name'] for m in MEMBERS]) \ as client: app = set_up() # Print the "app" on the command line. print(app) register_application(client, app) # Set up a plugin that will simply capture all data for later output on # the console. inspector = LedgerCapturePlugin.stdout() client.register(inspector) # Expect program that is expected to terminate. It will be considered done # only once all pending commands have been completed (including any of their # follow-ups) try: client.run_until_complete() finally: inspector.dump_all() pass
def test_static_dump_and_tail(self): with sandbox(daml_path=DAML_PATH) as proc: seen_contracts = [] network = Network() network.set_config(url=proc.url) client = network.simple_party('PARTY') @client.ledger_ready() def print_initial_state(event): print(event.acs_find_active('*')) @client.ledger_created('*') def print_create(event): print(event.cid, event.cdata) seen_contracts.append(event.cid) async def publish_some_contracts(): aclient = network.aio_party('PARTY') for i in range(0, 5): sleep_interval = random() print(f'Publishing contract {i}, then sleeping for {sleep_interval} seconds...') await aclient.submit_create('Main.PostmanRole', {'postman': 'PARTY'}) await sleep(sleep_interval) network.shutdown() network.run_forever(publish_some_contracts())
def test_some_party_receives_public_contract(self): some_party_cids = [] publisher_cids = [] with sandbox(AllParty, extra_args=None) as proc: network = Network() network.set_config(url=proc.url, party_groups=[ALL_PARTY]) some_client = network.aio_party(SOME_PARTY) some_client.add_ledger_ready( lambda _: create(PrivateContract, {'someParty': SOME_PARTY})) publisher_client = network.aio_party(PUBLISHER) publisher_client.add_ledger_ready( lambda _: create(PublicContract, { 'publisher': PUBLISHER, 'allParty': ALL_PARTY })) some_client.add_ledger_created( PublicContract, lambda e: some_party_cids.append(e.cid)) some_client.add_ledger_created( PrivateContract, lambda e: some_party_cids.append(e.cid)) publisher_client.add_ledger_created( PublicContract, lambda e: publisher_cids.append(e.cid)) publisher_client.add_ledger_created( PrivateContract, lambda e: publisher_cids.append(e.cid)) network.run_until_complete() print( f'got to the end with some_party contracts: {some_party_cids} and publisher contracts: {publisher_cids}' ) self.assertEqual(len(some_party_cids), 2) self.assertEqual(len(publisher_cids), 1)
def test_threadsafe_methods(self): with sandbox(DAML_FILE) as proc: with simple_client(proc.url, PARTY) as client: client.ready() client.submit_create(OperatorRole, {'operator': PARTY}) operator_cid, _ = client.find_one(OperatorRole) client.submit_exercise(operator_cid, 'PublishMany', dict(count=5)) notifications = client.find_nonempty(OperatorNotification, {'operator': PARTY}, min_count=5) contracts_to_delete = [] for cid, cdata in notifications.items(): if int(cdata['text']) <= 3: contracts_to_delete.append(cid) client.submit( [exercise(cid, 'Archive') for cid in contracts_to_delete]) client.submit_exercise(operator_cid, 'PublishMany', dict(count=3)) print(client.find_active('*'))
def test_complicated_map_support(): with sandbox(MapSupport) as proc: with simple_client(url=proc.url, party='Test') as client: client.ready() client.submit_create( 'MapSupport.ComplicatedSample', { 'party': 'Test', # Note: Python `dict`s are not hashable, so the only way to write this out # is to create a special dict as a key 'keyIsMap': { frozendict(A='b'): 'mmm' }, 'keyIsRecord': { frozendict(x=2, y=4): 'rrr' }, 'keyIsRecordWithTypeParam': { frozendict(x=2, y=4): 'rrr' }, 'keyIsVariant': { frozendict(Apple=''): 'ttt' } }) logging.info(client.find_active('*'))
def test_select_reflects_archive_events(self): notification_count = 3 # we expect that, upon each on_created notification of an OperatorNotification contract, # when we query the ACS, we get precisely the same number of contracts. expected_select_count = notification_count * notification_count actual_select_count = 0 def on_notification_contract(event): nonlocal actual_select_count actual_select_count += len( event.acs_find_active(OperatorNotification)) with sandbox(Simple) as proc: network = Network() network.set_config(url=proc.url) party_client = network.aio_party(PARTY) party_client.add_ledger_ready( lambda e: create(OperatorRole, {'operator': PARTY})) party_client.add_ledger_created( OperatorRole, lambda e: exercise(e.cid, 'PublishMany', dict(count=3))) party_client.add_ledger_created( OperatorNotification, lambda e: exercise(e.cid, 'Archive')) party_client.add_ledger_created(OperatorNotification, on_notification_contract) network.run_until_complete() final_select_count = len( party_client.find_active(OperatorNotification)) self.assertEqual(actual_select_count, expected_select_count) self.assertEqual(0, final_select_count)
def test_package_events(): set_event_loop(new_event_loop()) initial_events = [] follow_up_events = [] with sandbox([]) as proc: network = Network() network.set_config(url=proc.url) client = network.aio_party('TestParty') async def upload_dars_and_verify(): # make sure the client is "ready" before uploading DARs, because we are explicitly # checking to make sure proper reporting of packages that are uploaded after a # client is running and # operational await client.ready() await upload_test_dars(network) # give the client some time to pick up the new packages; unfortunately there isn't # much more to do here except wait await sleep(10) client.add_ledger_packages_added(initial_events.append, initial=True) client.add_ledger_packages_added(follow_up_events.append) network.run_until_complete(upload_dars_and_verify()) assert len(initial_events) == 2 assert len(follow_up_events) == 1
def test_select_template_retrieves_contracts(self): number_of_contracts = 10 with sandbox(Pending) as proc: network = Network() network.set_config(url=proc.url) party_client = network.aio_party(PARTY) party_client.add_ledger_ready(lambda _: [ create(Counter, { 'owner': PARTY, 'value': 0 }), *[ create(AccountRequest, {'owner': PARTY}) for i in range(number_of_contracts) ], ]) @party_client.ledger_created(AccountRequest) async def on_account_request(event): counter_cid, counter_cdata = await event.acs_find_one(Counter) return [ exercise(event.cid, 'CreateAccount', dict(accountId=counter_cdata['value'])), exercise(counter_cid, 'Increment') ] network.run_until_complete() data = party_client.find_active(Account) self.assertEqual(len(data), number_of_contracts)
def test_parties_can_be_added_after_run_forever(self): with sandbox(daml_path=DAML_PATH) as proc: network = Network() network.set_config(url=proc.url) operator = network.aio_party('Operator') party_a = network.aio_party('Party A') party_b = network.aio_party('Party B') @operator.ledger_ready() def _(event): return create('Main.PostmanRole', {'postman': 'Operator'}) @operator.ledger_created('Main.PostmanRole') def __(event): return [ exercise(event.cid, 'InviteParticipant', { 'party': party, 'address': 'whatevs' }) for party in ('Party A', 'Party B', 'Party C') ] @party_a.ledger_created('Main.InviteAuthorRole') async def _(event): party_c = network.aio_party('Party C') @party_c.ledger_created('Main.AuthorRole') def ___(event): network.shutdown() cid, cdata = await party_c.find_one('Main.InviteAuthorRole') party_c.submit_exercise(cid, 'AcceptInviteAuthorRole') network.run_forever()
def test_env_ls(): import os with sandbox(daml_path=DAML_PATH) as proc: os.environ['DAML_LEDGER_URL'] = proc.url os.environ['DAML_LEDGER_PARTY'] = 'Alice' exit_code = _main('dazl ls'.split(' ')) assert exit_code == 0
def test_event_handler_exceptions(self): with sandbox(daml_path=DAML_PATH) as proc: with simple_client(proc.url, SAMPLE_PARTY) as client: def throw_error(event: ReadyEvent): raise MagicException(event.ledger_id) client.add_ledger_ready(throw_error) client.ready()
def test_maps(self): with sandbox(TEST_DAML) as proc: with create_client(participant_url=proc.url, parties=[PARTY]) as client: party_client = client.client(PARTY) party_client.on_ready(lambda *args, **kwargs: create( 'AllKindsOf.MappyContract', { 'operator': PARTY, 'value': {'Map_internal': []} })) client.run_until_complete()
def test_event_order(): stage1 = Stage1LedgerInit() stage2 = Stage2LedgerVerify() with sandbox(SimpleDar) as proc: stage1.run(proc.url) stage2.run(proc.url) for event, cid, _ in stage2.events: print(event, cid)
def main_boilerplate(globals_, run_test): __name__ = globals_['__name__'] # DOC_BEGIN: MAIN-BOILERPLATE if __name__ == '__main__': import sys with sandbox(DAML_FILE) as server: exit_code = run_test(server.url) sys.exit(int(exit_code))
def some_sample_app(): setup_default_logger(logging.INFO) stage1 = Stage1LedgerInit() stage2 = Stage2LedgerVerify() with sandbox(SIMPLE_DAML) as proc: stage1.run(proc.url) stage2.run(proc.url) for event, cid, _ in stage2.events: print(event, cid)
def test_select_unknown_template_retrieves_empty_set(self): with sandbox(DAML_FILE) as proc: with create_client(participant_url=proc.url, parties=[PARTY]) as client: party_client = client.client(PARTY) party_client.on_ready(lambda *args, **kwargs: create( OperatorRole, {'operator': PARTY})) client.run_until_complete() data = party_client.select('NonExistentTemplate') self.assertEqual(len(data), 0)
def test_record_dotted_fields_submit(self): with sandbox(daml_path=DottedFields) as proc: with simple_client(url=proc.url, party='Test') as client: client.ready() client.submit_create('DottedFields.American', { 'person': 'Test', 'address.address': '1 Test Place', 'address.city': 'Somewhere', 'address.state': 'ZZ', 'address.zip': '99999' }) print(client.find_active('DottedFields.American'))
def test_select_template_retrieves_contracts(): with sandbox(Simple) as proc: network = Network() network.set_config(url=proc.url) party_client = network.aio_party(PARTY) party_client.add_ledger_ready( lambda e: create(OperatorRole, {'operator': PARTY})) network.run_until_complete() data = party_client.find_active(OperatorRole) assert len(data) == 1
def test_map_support(self): with sandbox(daml_path=DAML_FILE) as proc: with simple_client(url=proc.url, party='Test') as client: client.ready() client.submit_create('MapSupport.Sample', { 'party': 'Test', 'mappings': { 65: 'A', 97: 'a' }, 'text': None }) print(client.find_active('*'))
def test_select_unknown_template_retrieves_empty_set(self): with sandbox(Simple) as proc: network = Network() network.set_config(url=proc.url) party_client = network.aio_party(PARTY) party_client.add_ledger_ready( lambda e: create(OperatorRole, {'operator': PARTY})) network.run_until_complete() data = party_client.find_active('NonExistentTemplate') self.assertEqual(len(data), 0)
def _sandbox_test(self, extra_args=None): cids = [] with sandbox(Simple, extra_args=extra_args) as proc: network = Network() network.set_config(url=proc.url) party_client = network.aio_party(PARTY) party_client.add_ledger_ready(lambda _: create(OperatorRole, {'operator': PARTY})) party_client.add_ledger_created(OperatorRole, lambda e: cids.append(e.cid)) network.run_until_complete() print('got to the end with contracts: ', cids) self.assertEqual(len(cids), 1)
def test_simple_client_api(self): party = 'abc' print('creating sandbox') with sandbox(daml_path=DAML_FILE) as proc: print('creating client') with simple_client(url=proc.url, party=party) as client: client.ready() print('submitting') client.submit_create('Main.PostmanRole', {'postman': party}) print('getting contracts') contracts = client.find_active('*') print('got the contracts') self.assertEqual(1, len(contracts))
def test_ssl_connectivity(self): client_ssl_settings, server_ssl_settings = create_ssl_test_package() messages_received = [] with sandbox(TEMPLATE_DAML_FILE, ssl_settings=server_ssl_settings) as proc: with create_client(participant_url=proc.url, parties=['SOME_PARTY'], ca_file=client_ssl_settings.ca_file, cert_file=client_ssl_settings.cert_file, cert_key_file=client_ssl_settings.cert_key_file) as client_mgr: client = client_mgr.client('SOME_PARTY') client.on_ready(lambda *args, **kwargs: create('Main.PostmanRole', dict(party='SOME_PARTY'))) client.on_created('Main.PostmanRole', lambda cid, cdata: messages_received.append(cid)) ledger_run = client_mgr.run_until_complete() self.assertEqual(ledger_run.exit_code, 0) self.assertEqual(len(messages_received), 1)
def test_map_support(): with sandbox(MapSupport) as proc: with simple_client(url=proc.url, party='Test') as client: client.ready() client.submit_create('MapSupport.Sample', { 'party': 'Test', 'mappings': { '65': 'A', '97': 'a' }, 'text': None }) logging.info(client.find_active('*'))
def test_duplicate_ledger(self): """ Main method. """ with sandbox(TEMPLATE_DAML_FILE) as damli_proc_1, \ sandbox(TEMPLATE_DAML_FILE) as damli_proc_2, \ create_client(participant_url=damli_proc_1.url, parties=['POSTMAN'] + [m['party_name'] for m in MEMBERS]) as client_1, \ create_client(participant_url=damli_proc_2.url, parties=['POSTMAN'] + [m['party_name'] for m in MEMBERS]) as client_2: app = set_up() register_application(client_1, app) register_application(client_2, app) # Expect program that is expected to terminate. It will be considered done # only once all pending commands have been completed (including any of their # follow-ups) event_loop = asyncio.get_event_loop() event_loop.run_until_complete( asyncio.gather(client_1.main(False), client_2.main(False))) LOG.info('Application finished.')
def _sandbox_test(self, extra_args=None): cids = [] with sandbox(DAML_FILE, extra_args=extra_args) as proc: with create_client(participant_url=proc.url, parties=[PARTY]) as client: party_client = client.client(PARTY) party_client.on_ready(lambda *args, **kwargs: create( OperatorRole, {'operator': PARTY})) party_client.on_created(OperatorRole, lambda cid, cdata: cids.append(cid)) client.run_until_complete() print('got to the end with contracts: ', cids) self.assertEqual(len(cids), 1)
def test_set_static_time_two_clients(self): """ Run a slightly complicated test involving manipulation of static time and multiple clients: * Client 1 sends a command at ledger startup. * Client 2 also listens for the command. * When both Client 1 and Client 2 have heard the original command, Client 1 advances the time and shuts down WITHOUT producing a new command. * After Client 1 is shut down, Client 2 manually syncs its local time and submits a command to the Sandbox. * Observe Client 2 receives its corresponding event. """ event_loop = get_event_loop() with sandbox(TEMPLATE_DAML_FILE) as damli_proc: test = _TestSetStaticTimeTwoClients(damli_proc.url) event_loop.run_until_complete(test.main())
def test_maps(self): with sandbox(AllKindsOf) as proc: network = Network() network.set_config(url=proc.url) party_client = network.aio_party(PARTY) party_client.add_ledger_ready( lambda e: create('AllKindsOf.MappyContract', { 'operator': PARTY, 'value': { 'Map_internal': [] } })) network.run_until_complete()