import dazl import pickle import json network = dazl.Network() network.set_config(url='http://localhost:6865') towerGuard = network.simple_party('towerGuard') @towerGuard.ledger_created( 'ContactRelatedContracts.ContactConfirmationContract') def addToMap(event): pickle_off = open("datafile.txt", "rb") dict = pickle.load(pickle_off) dict['towerGuard'] = event.cid #print(dict) with open('datafile.txt', 'wb') as fh: pickle.dump(dict, fh) @towerGuard.ledger_created( 'ContactRelatedContracts.L1HighInfectionDangerContract') def automate_l2(event): print('towerGuard') pickle_off = open("datafile.txt", "rb") dict = pickle.load(pickle_off) if 'towerGuard' != event.cdata['userPartyId']: print("HERE") towerGuard.submit_exercise( dict['towerGuard'], 'CreateModerateInfectionDanger', {
def main(): url = os.getenv('DAML_LEDGER_URL') party = os.getenv('DAML_LEDGER_PARTY') public_party = os.getenv('DABL_PUBLIC_PARTY') network = dazl.Network() network.set_config(url=url) logging.info(f'starting a the operator_bot for party {party}') logging.info(f'public party is {public_party}') client = network.aio_party(party) @client.ledger_ready() def create_operator(event): # pylint: disable=unused-variable logging.info(f'On Ledger Ready') res = client.find_active(CHAT.Operator) logging.info(f'found {len(res)} {CHAT.Operator} contracts') if not res: logging.info(f'Creating Operator contract for {party}...') return client.submit_create(CHAT.Operator, { 'operator': client.party, 'publicParty': public_party }) else: logging.info(f'Operator {party} is ready') @client.ledger_created(CHAT.Operator) def invite_users(event): # pylint: disable=unused-variable logging.info(f'On {CHAT.Operator} created!') user_sessions = client.find_active(CHAT.UserSession) logging.info( f'found {len(user_sessions)} {CHAT.UserSession} contracts') return [ exercise(cid, 'UserSessionAck') for cid in user_sessions.keys() ] @client.ledger_created(CHAT.UserSession) def invite_user_to_chat(event): # pylint: disable=unused-variable logging.info(f'On {CHAT.UserSession} created!') return client.submit_exercise(event.cid, 'UserSessionAck') @client.ledger_created(CHAT.Message) def send_to_slack_channel(event): # pylint: disable=unused-variable cdata = event.cdata logging.info(f"on message! {cdata}") forwards = client.find_active(CHAT.ForwardToSlack, {'chatId': cdata['chatId']}) logging.info(f'Found {len(forwards)} {CHAT.ForwardToSlack} contracts') posted_at = time.strftime("%a, %d %b %Y %H:%M:%S %Z", time.localtime(float(cdata['postedAt']))) message_text = f"`From:` {cdata['sender']}\n`Posted At:` {posted_at}\n" \ f"`DABL Chat Id:` {cdata['chatId']}\n`Message:` {cdata['message']}" return [ create( SLACK_INTEGRATION.OUTBOUND_MESSAGE.OutboundMessage, { 'integrationParty': client.party, 'slackChannel': f['slackChannelId'], 'messageText': message_text, 'attemptCount': 3 }) for (_, f) in forwards.items() ] @client.ledger_created(CHAT.AliasesRequest) def divulge_aliases(event): # pylint: disable=unused-variable logging.info(f'On {CHAT.AliasesRequest}') aliases = client.find_active(CHAT.SelfAlias) logging.info(f'found {len(aliases)} {CHAT.SelfAlias} contracts') mappings = [ f"{cdata['user']} -> {cdata['alias']}" for _, cdata in aliases.items() ] mappings_str = '\n'.join(mappings) commands = [] commands.append(exercise(event.cid, 'Archive', {})) commands.append(exercise_by_key(CHAT.Chat, {'_1': client.party, '_2': event.cdata['user']}, 'ChatPostMessage', { 'poster': client.party, 'message': f"Here is the list of known users:\n```scala\n{mappings_str}\n```" \ if len(mappings) > 0 else "I couldn't find any known users!", 'postedAt': f"{int(time.time())}" } )) return client.submit(commands) @client.ledger_created(CHAT.User) def add_to_public_chats(event): # pylint: disable=unused-variable logging.info(f'On {CHAT.User} created!') if event.cdata['operator'] != client.party: return chats = client.find_active(CHAT.Chat, {'isPublic': True}) logging.info(f'found {len(chats)} public {CHAT.Chat} contracts') new_user = event.cdata['user'] commands = [] for (cid, cdata) in chats.items(): if new_user not in cdata['members']: commands.append( exercise(cid, 'ChatAddMembers', { 'member': client.party, 'newMembers': [new_user] })) logging.info(f"adding {new_user} to {cdata['name']}...") return client.submit(commands) @client.ledger_created(CHAT.CreateChatRequest) def respond_to_chat_request(event): # pylint: disable=unused-variable logging.info(f'On {CHAT.CreateChatRequest} created!') cdata = event.cdata if cdata['operator'] != client.party: return user_contracts = client.find_active(CHAT.User) party_members = [] is_public = cdata['isPublic'] for (_, data) in user_contracts.items(): if is_public or data['user'].lower() in map( lambda m: m.lower(), cdata['members']): party_members.append(data['user']) if party_members: return client.submit_exercise(event.cid, 'CreateChatRequestRespond', { 'partyMembers': party_members, 'chatId': str(uuid.uuid4()) }) else: return client.submit_exercise(event.cid, 'CreateChatRequestReject') network.run_forever()
def setUp(self) -> None: self.network = dazl.Network() self.network.set_config(url=sandbox_url) self.network.start_in_background()
def main(): url = os.getenv('DAML_LEDGER_URL') party = os.getenv('DAML_LEDGER_PARTY') network = dazl.Network() network.set_config(url=url) logging.info(f'starting a the user_bot for party {party}') client = network.aio_party(party) # default to 14 days as defined in DAML archive_state = ArchiveState(message_heap=[], archive_after=int( timedelta(days=14).total_seconds())) def to_sec(time, unit) -> int: switch = {'s': 'seconds', 'm': 'minutes', 'h': 'hours', 'd': 'days'} period = switch[unit] return int(timedelta(**{period: time}).total_seconds()) def expired(after: int, posted_at: int) -> bool: return (datetime.fromtimestamp(posted_at) + timedelta(seconds=after)) < datetime.now() async def batch_submit(commands, size): batched_commands = [ commands[i * size:(i + 1) * size] for i in range((len(commands) + size - 1) // size) ] for cmds in batched_commands: await client.submit(cmds) @client.ledger_ready() async def bot_ready(_): existing_messages = client.find_active(Chat.Message, {'sender': client.party}) archive_state.message_heap = [ Message(cdata['postedAt'], cid) for cid, cdata in existing_messages.items() ] message_heap = archive_state.message_heap heapq.heapify(message_heap) logging.info(f"Message cache loaded.") (_, settings_cdata) = await client.find_one(Chat.UserSettings, {'user': client.party}) archive_state.archive_after = to_sec( **settings_cdata['archiveMessagesAfter']) logging.info(f'started auto-archiving bot for party {party}') while True: try: while len(message_heap) > 0 and expired( archive_state.archive_after, message_heap[0].post_at): top = message_heap[0] logging.info( f'archiving {Chat.Message}:{top.cid}' f' expired after {archive_state.archive_after}s') message_to_archive = client.find_by_id(top.cid) if not message_to_archive or not message_to_archive.active: logging.info( f'Message: {top.cid} archived somewhere else, skip.' ) heapq.heappop(message_heap) continue await client.submit(exercise(top.cid, 'Archive')) heapq.heappop(message_heap) logging.info(f'{Chat.Message}:{top.cid} archived.') except: logging.error( f"Could not auto archive messages: {traceback.print_exc()}" ) if len(message_heap) > 0: logging.info( f'waiting for next message to archive: {message_heap[0].cid}' ) await asyncio.sleep(bot_polling_sec) @client.ledger_created(Chat.ArchiveMessagesRequest) async def archive_stale_messages(event): logging.info(f'On {Chat.ArchiveMessagesRequest} created!') try: (_, settings_cdata) = await client.find_one(Chat.UserSettings, {'user': client.party}) user_messages = client.find_active(Chat.Message, {'sender': client.party}) commands = [ exercise(cid, 'Archive') for (cid, cdata) in user_messages.items() if expired( settings_cdata['archiveMessagesAfter'], cdata['postedAt']) ] logging.info(f"Will archive {len(commands)} message(s)") commands.append(exercise(event.cid, 'Archive')) await batch_submit(commands, 50) except: logging.error(f"Could not archive stale messages") await client.submit_exercise(event.cid, 'Archive') @client.ledger_created(Chat.Message) async def message_heapify(event): message_heap = archive_state.message_heap logging.info(f'On {Chat.Message} created') if event.cdata['sender'] == client.party: logging.info(f'New {Chat.Message} archive candidate added.') heapq.heappush(message_heap, Message(event.cdata['postedAt'], event.cid)) @client.ledger_created(Chat.UserSettings) async def archive_bot(event): archive_state.archive_after = to_sec( **event.cdata['archiveMessagesAfter']) logging.info( f"New auto archiving setting: {event.cdata['archiveMessagesAfter']}s" ) network.run_forever()
def main(): url = os.getenv('DAML_LEDGER_URL') exchange = os.getenv('DAML_LEDGER_PARTY') exchange_party = "Exchange" if not exchange else exchange network = dazl.Network() network.set_config(url=url) logging.info(f'Integration will run under party: {exchange_party}') client = network.aio_party(exchange_party) async def find_and_run(template_name, fn): commands = [] templates = client.find_active(template_name) for (cid, item) in templates.items(): event = FakeContractEvent(cid, item) result = await fn(event) if inspect.iscoroutinefunction( fn) else fn(event) if isinstance(result, list): commands.extend(result) else: commands.append(result) return commands async def collect_run_commands(template_pairs): commands = [] for (template, fn) in template_pairs: commands.extend(await find_and_run(template, fn)) return commands @client.ledger_ready() def say_hello(event): logging.info("DA Marketplace <> Exberry adapter is ready!") return collect_run_commands([ (MARKETPLACE.CreateListingRequest, handle_new_listing), (MARKETPLACE.CreateOrderRequest, handle_order_request), (EXBERRY.Instrument, handle_new_listing_success), (EXBERRY.FailedInstrumentRequest, handle_new_listing_failure), (EXBERRY.NewOrderSuccess, handle_new_order_success), (EXBERRY.NewOrderFailure, handle_new_order_failure), (EXBERRY.ExecutionReport, handle_execution_report), (MARKETPLACE.CancelOrderRequest, handle_order_cancel_request), (EXBERRY.CancelOrderSuccess, handle_cancel_order_success), (EXBERRY.CancelOrderFailure, handle_cancel_order_failure), ]) # Marketplace --> Exberry @client.ledger_created(MARKETPLACE.CreateOrderRequest) def handle_order_request(event): logging.info(f'Received Create Order Request - {event}') order = event.cdata['details'] opt_fee_contract_id = order['optExchangeFee'] fee_amount = 0.0 if opt_fee_contract_id != None: fee_deposit = client.find_by_id(opt_fee_contract_id) fee_amount = fee_deposit.cdata['asset']['quantity'] fee_schedules = client.find_active(MARKETPLACE.FeeSchedule, {'provider': exchange_party}) for (_, schedule) in fee_schedules.items(): if schedule['currentFee']['amount'] > fee_amount: return [ exercise( event.cid, 'RejectRequest', { 'errorCode': '790', 'errorMessage': 'Fee does not match exchange fee' }) ] return create( EXBERRY.NewOrderRequest, { 'order': { 'orderType': list(order['orderType'])[0], 'instrument': order['listingId'], 'quantity': float(order['asset']['quantity']), 'price': float(-1) if list(order['orderType'])[0] == 'Market' else float(order['orderType']['Limit']['price']), 'side': order['side'], 'timeInForce': list(order['timeInForce'])[0], 'expiryDate': int(-1) if not list(order['timeInForce'])[0] == 'GTD' else int(order['timeInForce']['GTD']['expiryDate']), 'mpOrderId': int(order['id']), # This will be the SID for now 'userId': make_user_user_id(event.cdata['provider']), }, 'integrationParty': client.party }) # Marketplace <-- Exberry @client.ledger_created(EXBERRY.NewOrderSuccess) async def handle_new_order_success(event): return [ exercise_by_key(MARKETPLACE.CreateOrderRequest, { '_1': client.party, '_2': event.cdata['sid'] }, 'AcknowledgeRequest', {'providerOrderId': event.cdata['orderId']}), exercise(event.cid, 'Archive', {}) ] # Marketplace <-- Exberry @client.ledger_created(EXBERRY.NewOrderFailure) async def handle_new_order_failure(event): return [ exercise_by_key(MARKETPLACE.CreateOrderRequest, { '_1': client.party, '_2': event.cdata['sid'] }, 'RejectRequest', { 'errorCode': event.cdata['errorCode'], 'errorMessage': event.cdata['errorMessage'] }), exercise(event.cid, 'Archive', {}) ] # Marketplace <-- Exberry @client.ledger_created(EXBERRY.NewOrderCancelled) async def handle_new_order_cancel(event): return [ exercise_by_key( MARKETPLACE.CreateOrderRequest, { '_1': client.party, '_2': event.cdata['mpOrderId'] }, 'CancelRequest', { 'providerOrderId': event.cdata['orderId'], 'cancelledQuantity': event.cdata['cancelledQuantity'] }), exercise(event.cid, 'Archive', {}) ] # Marketplace --> Exberry @client.ledger_created(MARKETPLACE.CreateListingRequest) def handle_new_listing(event): logging.info(f'Received Listing request - {event}') listing = event.cdata symbol = listing['symbol'] description = listing['description'] calendar_id = listing['calendarId'] quote_currency = listing['quotedAssetId']['label'] price_precision = listing['quotedAssetPrecision'] quantity_precision = listing['tradedAssetPrecision'] min_quantity = listing['minimumTradableQuantity'] max_quantity = listing['maximumTradableQuantity'] status = listing['status'] return create( EXBERRY.CreateInstrumentRequest, { 'integrationParty': client.party, 'symbol': symbol, 'quoteCurrency': quote_currency, 'instrumentDescription': description, 'calendarId': calendar_id, 'pricePrecision': price_precision, 'quantityPrecision': quantity_precision, 'minQuantity': min_quantity, 'maxQuantity': max_quantity, 'status': status }) # Exberry --> Marketplace @client.ledger_created(EXBERRY.Instrument) def handle_new_listing_success(event): return [ exercise_by_key(MARKETPLACE.CreateListingRequest, { '_1': client.party, '_2': event.cdata['symbol'] }, 'ListingRequestSuccess', {'providerId': event.cdata['instrumentId']}), exercise(event.cid, 'Archive', {}) ] # Exberry --> Marketplace @client.ledger_created(EXBERRY.FailedInstrumentRequest) def handle_new_listing_failure(event): return [ exercise_by_key(MARKETPLACE.CreateListingRequest, { '_1': client.party, '_2': event.cdata['symbol'] }, 'ListingRequestFailure', { 'message': event.cdata['message'], 'name': event.cdata['name'], 'code': event.cdata['code'] }), exercise(event.cid, 'Archive', {}) ] # Marketplace --> Exberry @client.ledger_created(MARKETPLACE.CancelOrderRequest) async def handle_order_cancel_request(event): cancel_request = event.cdata return create( EXBERRY.CancelOrderRequest, { 'integrationParty': client.party, 'instrument': cancel_request['details']['listingId'], 'mpOrderId': cancel_request['details']['id'], 'userId': make_user_user_id(cancel_request['provider']) }) # Marketplace <-- Exberry @client.ledger_created(EXBERRY.CancelOrderSuccess) async def handle_cancel_order_success(event): return [ exercise_by_key(MARKETPLACE.CancelOrderRequest, { '_1': client.party, '_2': event.cdata['sid'] }, 'AcknowledgeCancel', {}), exercise(event.cid, 'Archive', {}) ] # Marketplace <-- Exberry @client.ledger_created(EXBERRY.CancelOrderFailure) async def handle_cancel_order_failure(event): return [ exercise_by_key(MARKETPLACE.CancelOrderRequest, { '_1': client.party, '_2': event.cdata['sid'] }, 'FailureCancel', { 'errorCode': event.cdata['errorCode'], 'errorMessage': event.cdata['errorMessage'] }), exercise(event.cid, 'Archive', {}) ] # Marketplace <-- Exberry @client.ledger_created(EXBERRY.ExecutionReport) async def handle_execution_report(event): execution = event.cdata # TODO: Check if we should be calling the matching service directly return [ exercise_by_key( MARKETPLACE.MatchingService, client.party, 'MatchOrders', { 'execution': { 'matchId': execution['matchId'], 'makerOrderId': execution['makerMpOrderId'], 'takerOrderId': execution['takerMpOrderId'], 'quantity': execution['executedQuantity'], 'price': execution['executedPrice'], 'timestamp': execution['eventTimestamp'] } }), exercise(event.cid, 'Archive', {}) ] network.run_forever()