def test_update_attr_values_default_dict(): attr = ATTR.TYPED_DICT(dict={'key': ATTR.STR()}) _update_attr_values( attr=ATTR.TYPED_DICT(dict={'attr': attr}), value='default', value_path='attr.key', value_val='test_update_attr_values', ) assert attr._args['dict']['key']._default == 'test_update_attr_values'
def test_update_attr_values_default_dict_nested_list(): attr = ATTR.TYPED_DICT(dict={'key': ATTR.LIST(list=[ATTR.STR()])}) _update_attr_values( attr=ATTR.TYPED_DICT(dict={'attr': attr}), value='default', value_path='attr.key:0', value_val='test_update_attr_values', ) assert attr._args['dict']['key']._args['list'][ 0]._default == 'test_update_attr_values'
async def test_validate_attr_DICT_nested_dict_invalid(): with pytest.raises(InvalidAttrException): await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.TYPED_DICT( dict={ 'key1': ATTR.STR(), 'key2': ATTR.TYPED_DICT(dict={'child_key': ATTR.INT()}), } ), attr_val={ 'key1': 'value', 'key2': 2, }, mode='create', )
async def test_validate_attr_DICT_None_allow_none(): attr_val = await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.TYPED_DICT(dict={'key': ATTR.STR()}), attr_val=None, mode='update', ) assert attr_val == None
async def test_validate_attr_DICT_nested_dict(): dict_attr_val = { 'key1': 'value', 'key2': {'child_key': 2}, } attr_val = await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.TYPED_DICT( dict={ 'key1': ATTR.STR(), 'key2': ATTR.TYPED_DICT(dict={'child_key': ATTR.INT()}), } ), attr_val=dict_attr_val, mode='create', ) assert attr_val == dict_attr_val
async def test_validate_attr_DICT_None(): with pytest.raises(InvalidAttrException): await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.TYPED_DICT(dict={'key': ATTR.STR()}), attr_val=None, mode='create', )
def test_generate_attr_TYPED_DICT(): attr_val = utils.generate_attr(attr_type=ATTR.TYPED_DICT(dict={ 'foo': ATTR.INT(), 'bar': ATTR.STR(), })) assert len(attr_val.keys()) == 2 assert set(attr_val.keys()) == {'foo', 'bar'} assert type(attr_val['foo']) == int assert type(attr_val['bar']) == str
async def test_validate_attr_DICT_default_int_allow_none(): attr_type = ATTR.TYPED_DICT(dict={'key': ATTR.STR()}) attr_type._default = 'test_validate_attr_DICT' attr_val = await validate_attr( attr_name='test_validate_attr_DICT', attr_type=attr_type, attr_val=1, mode='update', ) assert attr_val == None
async def test_validate_doc_allow_update_kv_dict_typed_dict_time_dict_dot_notated(): attrs = { 'shift': ATTR.KV_DICT( key=ATTR.STR(pattern=r'[0-9]{2}'), val=ATTR.TYPED_DICT(dict={'start': ATTR.TIME(), 'end': ATTR.TIME()}), ) } doc = {'shift.01.start': '09:00'} await utils.validate_doc(doc=doc, attrs=attrs, mode='update') assert doc == {'shift.01.start': '09:00'}
def test_generate_attr_UNION(): attr_val1 = utils.generate_attr(attr_type=ATTR.UNION( union=[ATTR.STR(), ATTR.INT()])) attr_val2 = utils.generate_attr(attr_type=ATTR.UNION(union=[ ATTR.LIST(list=[ATTR.STR()], min=1), ATTR.TYPED_DICT(dict={'foo': ATTR.FLOAT()}) ])) assert type(attr_val1) in [str, int] assert (type(attr_val2) == list and type(attr_val2[0]) == str) or ( type(attr_val2) == dict and type(attr_val2['foo']) == float)
async def test_validate_attr_DICT_simple_dict_Any_None_value(): with pytest.raises(InvalidAttrException): await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.TYPED_DICT(dict={'key1': ATTR.ANY(), 'key2': ATTR.ANY()}), attr_val={ 'key1': '', # [DOC] This is accepted 'key2': None, # [DOC] This would fail, raising exception }, mode='create', )
async def test_validate_attr_DICT_simple_dict_Any_default_None_value(): dict_attr_val = { 'key1': None, 'key2': '', } attr_type_any = ATTR.ANY() attr_type_any._default = None attr_val = await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.TYPED_DICT(dict={'key1': attr_type_any, 'key2': attr_type_any}), attr_val=dict_attr_val, mode='create', ) assert attr_val == dict_attr_val
async def test_validate_doc_allow_update_list_typed_dict_locale_dot_notated( preserve_state, ): with preserve_state(config, 'Config'): config.Config.locales = ['en_GB', 'jp_JP'] config.Config.locale = 'en_GB' attrs = { 'val': ATTR.LIST( list=[ATTR.TYPED_DICT(dict={'address': ATTR.LOCALE(), 'coords': ATTR.GEO()})] ) } doc = {'val.0.address.jp_JP': 'new_address'} await utils.validate_doc(doc=doc, attrs=attrs, mode='update') assert doc == {'val.0.address.jp_JP': 'new_address'}
async def test_validate_attr_DICT_nested_list_dict(): attr_val = await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.TYPED_DICT( dict={ 'key1': ATTR.STR(), 'key2': ATTR.LIST(list=[ATTR.INT()]), } ), attr_val={'key1': 'value', 'key2': [1, '2', 3]}, mode='create', ) assert attr_val == { 'key1': 'value', 'key2': [1, 2, 3], }
version='1.0.0', gateways={ 'mailgun_messages': mailgun_messages_gateway, 'mailgun_newsletters': mailgun_newsletters_gateway, }, vars_types={ 'mailgun': ATTR.TYPED_DICT( dict={ 'key': ATTR.STR(), 'newsletters': ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.STR()), 'senders': ATTR.KV_DICT( key=ATTR.STR(), val=ATTR.TYPED_DICT( dict={ 'uri': ATTR.URI_WEB(allowed_domains=['api.mailgun.net'], strict=True), 'sender_name': ATTR.STR(), 'sender_email': ATTR.EMAIL(), }), ), }) }, )
from typing import Dict, Any, TypedDict from pyfcm import FCMNotification def fcm_gateway( registration_id: str, message_title: str, message_body: str, data_message: Dict[str, Any], fcm_auth: TypedDict('GATEWAY_FCM_AUTH', token=str) = None, ): if not fcm_auth: fcm_auth = Registry.var('fcm') push_service = FCMNotification(api_key=fcm_auth['token']) push_service.notify_single_device( registration_id=registration_id, message_title=message_title, message_body=message_body, data_message=data_message, ) config = PACKAGE_CONFIG( api_level='1.0', version='1.0.0', gateways={'fcm': fcm_gateway}, vars_types={'fcm': ATTR.TYPED_DICT(dict={'token': ATTR.STR()})}, )
class Analytic(BaseModule): '''`Analytic` module provides data type and controller from `Analytics Workflow` and accompanying analytics docs. It uses `pre_create` handler to assure no events duplications occur and all occurrences of the same event are recorded in one doc.''' collection = 'analytics' attrs = { 'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'), 'event': ATTR.STR(desc='Analytics event name.'), 'subevent': ATTR.ANY( desc= 'Analytics subevent distinguishing attribute. This is usually `STR`, or `ID` but it is introduced in the module as `ANY` to allow wider use-cases by developers.' ), 'date': ATTR.DATE( desc= 'Analytics event date. This allows clustering of events occupancies to limit doc size.' ), 'occurrences': ATTR.LIST( desc='All occurrences of the event as list.', list=[ ATTR.TYPED_DICT( desc='Single occurrence of the event details.', dict={ 'args': ATTR.KV_DICT( desc= 'Key-value `dict` containing event args, if any.', key=ATTR.STR(), val=ATTR.ANY(), ), 'score': ATTR.INT( desc='Numerical score for occurrence of the event.' ), 'create_time': ATTR.DATETIME( desc= 'Python `datetime` ISO format of the occurrence of the event.' ), }, ) ], ), 'score': ATTR.INT( desc= 'Total score of all scores of all occurrences of the event. This can be used for data analysis.' ), } unique_attrs = [('user', 'event', 'subevent', 'date')] methods = { 'read': METHOD(permissions=[PERM(privilege='read')]), 'create': METHOD( permissions=[PERM(privilege='__sys')], doc_args={ 'event': ATTR.STR(), 'subevent': ATTR.ANY(), 'args': ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.ANY()), }, ), 'update': METHOD(permissions=[PERM(privilege='__sys')]), 'delete': METHOD(permissions=[PERM(privilege='delete')]), } async def pre_create(self, skip_events, env, query, doc, payload): analytic_results = await self.read( skip_events=[Event.PERM], env=env, query=[ { 'user': env['session'].user._id, 'event': doc['event'], 'subevent': doc['subevent'], 'date': datetime.date.today().isoformat(), }, { '$limit': 1 }, ], ) if analytic_results.args.count: analytic_results = await self.update( skip_events=[Event.PERM], env=env, query=[{ '_id': analytic_results.args.docs[0]._id }], doc={ 'occurrences': { '$append': { 'args': doc['args'], 'score': doc['score'] if 'score' in doc.keys() else 0, 'create_time': datetime.datetime.utcnow().isoformat(), } }, 'score': { '$add': doc['score'] if 'score' in doc.keys() else 0 }, }, ) if analytic_results.status == 200: return (skip_events, env, query, doc, { '__results': analytic_results }) else: raise self.exception( status=analytic_results.status, msg=analytic_results.msg, args=analytic_results.args, ) else: doc = { 'event': doc['event'], 'subevent': doc['subevent'], 'date': datetime.date.today().isoformat(), 'occurrences': [{ 'args': doc['args'], 'score': doc['score'] if 'score' in doc.keys() else 0, 'create_time': datetime.datetime.utcnow().isoformat(), }], 'score': doc['score'] if 'score' in doc.keys() else 0, } return (skip_events, env, query, doc, payload)
# Ancora Imparo. from nawah.classes import PACKAGE_CONFIG, ATTR from nawah.registry import Registry from typing import TypedDict from otsdc.rest.client import OTSRestClient from otsdc.url.http_url import HttpOTSUrl def unifonic_gateway( phone: str, content: str, unifonic_auth: TypedDict('GATEWAY_UNIFONIC_AUTH', sid=str) = None, ): if not unifonic_auth: unifonic_auth = Registry.var('unifonic') client = OTSRestClient(appSid=unifonic_auth['sid']) msg = client.messageResource msg.send(phone, content) config = PACKAGE_CONFIG( api_level='1.0', version='1.0.0', gateways={'unifonic': unifonic_gateway}, vars_types={'unifonic': ATTR.TYPED_DICT(dict={'sid': ATTR.STR()})}, )