def test_simple_flask_integration(sandbox): setup_default_logger(logging.INFO) network = Network() network.set_config(url=sandbox) client = network.simple_new_party() # seed the ledger with some initial state client.add_ledger_ready(create_initial_state) network.start_in_background() LOG.info('Waiting for the client to be ready...') client.ready() # TODO: This is currently necessary because there is a bug that prevents ready() from waiting # on ledger_ready events even when those callbacks are added at the appropriate time. sleep(10) LOG.info('Client is ready.') # now start a Flask app LOG.info('Starting up the flask app...') main_thread = Thread(target=run_flask_app, args=(client, 9999)) main_thread.start() returned_data = run_flask_test(9999) assert returned_data == {'postman': client.party} network.shutdown() network.join(30) main_thread.join(30) if main_thread.is_alive(): raise Exception('The Flask thread should have terminated, but did not.')
def some_sample_app(): setup_default_logger(logging.INFO) stage1 = Stage1LedgerInit() stage2 = Stage2LedgerVerify() with sandbox(SIMPLE_DAML) as proc: stage1.run(proc.url) stage2.run(proc.url) for event, cid, _ in stage2.events: print(event, cid)
def test_incomplete_package_loading(self): # Attempt to load only the DALF into the Sandbox; this will cause the PackageService to # return a nonsensical result, the lack of an exception being thrown signals that we can # tolerate this condition d = {} setup_default_logger(logging.INFO) with sandbox(DALF_FILE) as proc: network = Network() network.set_config(url=proc.url) client = network.aio_party('TestParty') client.add_ledger_ready(lambda event: setitem(d, 'metadata', event.package_store)) network.run_until_complete() store: PackageStore = d['metadata'] actual_package_ids = store.package_ids() # When the only package has no dependencies, the PackageStore should remain empty self.assertFalse(set(), set(actual_package_ids))
def test_simple_flask_integration(): setup_default_logger(logging.INFO) with sandbox(daml_path=DAML_PATH) as proc: with simple_client(proc.url, SAMPLE_PARTY) as client: # seed the ledger with some initial state client.add_ledger_ready(create_initial_state) LOG.info('Waiting for the client to be ready...') client.ready() LOG.info('Client is ready.') # now start a Flask app LOG.info('Starting up the flask app...') main_thread = Thread(target=run_flask_app, args=(client, 9999)) main_thread.start() returned_data = run_flask_test(9999) assert returned_data == {'postman': SAMPLE_PARTY} main_thread.join(30) if main_thread.is_alive(): raise Exception( 'The Flask thread should have terminated, but did not.')
import logging import unittest from asyncio import gather, get_event_loop, ensure_future from datetime import datetime from pathlib import Path from dazl import create, exercise, sandbox, Network, setup_default_logger TEMPLATE_DAML_FILE = Path( __file__).parent.parent.parent / '_template' / 'Main.daml' LOG = logging.getLogger('test_static_time') setup_default_logger(logging.DEBUG) PARTY = 'POSTMAN' class TestStaticTime(unittest.TestCase): def test_set_static_time(self): """ Run a simple test involving manipulation of static time: * Send a command at ledger startup. * Upon receipt of a corresponding event, advance the time and submit a new command * Observe a corresponding event has been received. """ with sandbox(TEMPLATE_DAML_FILE) as damli_proc: network = Network() network.set_config(url=damli_proc.url)
other_party = network.aio_party('TestB') other_party.add_ledger_ready(ready) dump_state() network.run_forever() def ready(event: ReadyEvent): print('The ledger is now ready.') return create('Main:PostmanRole', {'postman': event.party}) def created(event: ContractCreateEvent): print(f'Created happened for {event.cdata}') def dump_state(): loop = get_event_loop() print(datetime.now()) all_objects = muppy.get_objects() sum1 = summary.summarize(all_objects) summary.print_(sum1, limit=100) loop.call_later(10, dump_state) if __name__ == '__main__': setup_default_logger() main(sys.argv[1])
import os import logging import uuid import time import dazl from dazl import create, exercise, exercise_by_key dazl.setup_default_logger(logging.INFO) class Chat: AliasesRequest = 'Chat.V3:AliasesRequest' Chat = 'Chat.V3:Chat' CreateChatRequest = 'Chat.V3:CreateChatRequest' ForwardToSlack = 'Chat.V3:ForwardToSlack' Message = 'Chat.V3:Message' Operator = 'Chat.V3:Operator' SelfAlias = 'Chat.V3:SelfAlias' User = '******' UserAccountRequest = 'Chat.V3:UserAccountRequest' ArchiveBotRequest = 'Chat.V3:ArchiveBotRequest' UserSettings = 'Chat.V3:UserSettings' class SlackIntegration: OutboundMessage = 'SlackIntegration.OutboundMessage:OutboundMessage' def main(): url = os.getenv('DAML_LEDGER_URL')
self.store = {} self.events = [] def run(self, url): network = Network() network.set_config(url=url) operator = network.aio_party(OPERATOR_PARTY) operator.add_ledger_ready(self.on_ready) operator.add_ledger_created(Simple.OperatorNotification, self.on_notification_created) operator.add_ledger_archived(Simple.OperatorNotification, self.on_notification_archived) network.run_until_complete() self.events.append(('finished', (), ())) def on_ready(self, _): self.events.append(('ready', (), ())) def on_notification_created(self, event): self.events.append(('created', event.cid, event.cdata)) self.store[event.cid] = event.cdata def on_notification_archived(self, event): self.events.append(('archived', event.cid, ())) del self.store[event.cid] if __name__ == '__main__': from dazl import setup_default_logger setup_default_logger(logging.INFO) test_event_order()