async def test_validate_attr_DICT_nested_dict_invalid(): with pytest.raises(InvalidAttrException): await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.KV_DICT( key=ATTR.STR(), val=ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.INT()) ), attr_val={ 'key1': 'value', 'key2': 2, }, mode='create', )
async def test_validate_attr_DICT_nested_dict(): dict_attr_val = { 'key1': {'child_key': 1}, 'key2': {'child_key': 2}, } attr_val = await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.KV_DICT( key=ATTR.STR(), val=ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.INT()) ), attr_val=dict_attr_val, mode='create', ) assert attr_val == dict_attr_val
async def test_validate_attr_DICT_None_allow_none(): attr_val = await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.INT()), attr_val=None, mode='update', ) assert attr_val == None
async def test_validate_attr_DICT_min_req_max_dict(): attr_val = await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.INT(), min=3, max=4, req=['key3']), attr_val={'key1': '4', 'key2': 3, 'key3': 0, 'key4': 5}, mode='create', ) assert attr_val == {'key1': 4, 'key2': 3, 'key3': 0, 'key4': 5}
async def test_validate_attr_DICT_min_req_max_dict_invalid(): with pytest.raises(InvalidAttrException): await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.INT(), min=3, max=4, req=['key3']), attr_val={'key1': '4', 'key2': 3, 'key3': 0, 'key4': 5, 'key5': 2}, mode='create', )
async def test_validate_attr_DICT_req_dict(): attr_val = await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.INT(), req=['key3']), attr_val={'key1': '4', 'key2': 1, 'key3': 0}, mode='create', ) assert attr_val == {'key1': 4, 'key2': 1, 'key3': 0}
async def test_validate_attr_DICT_None(): with pytest.raises(InvalidAttrException): await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.ANY()), attr_val=None, mode='create', )
async def test_validate_attr_DICT_default_int_allow_none(): attr_type = ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.INT()) attr_type._default = 'test_validate_attr_DICT' attr_val = await validate_attr( attr_name='test_validate_attr_DICT', attr_type=attr_type, attr_val=1, mode='update', ) assert attr_val == None
async def test_validate_attr_DICT_nested_list_dict_invalid(): with pytest.raises(InvalidAttrException): await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.LIST(list=[ATTR.INT()])), attr_val={ 'key1': ['a'], }, mode='create', )
async def test_validate_doc_allow_update_kv_dict_typed_dict_time_dict_dot_notated(): attrs = { 'shift': ATTR.KV_DICT( key=ATTR.STR(pattern=r'[0-9]{2}'), val=ATTR.TYPED_DICT(dict={'start': ATTR.TIME(), 'end': ATTR.TIME()}), ) } doc = {'shift.01.start': '09:00'} await utils.validate_doc(doc=doc, attrs=attrs, mode='update') assert doc == {'shift.01.start': '09:00'}
async def test_validate_attr_DICT_nested_list_dict(): attr_val = await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.LIST(list=[ATTR.INT()])), attr_val={'key1': ['4'], 'key2': [1, '2', 3]}, mode='create', ) assert attr_val == { 'key1': [4], 'key2': [1, 2, 3], }
async def test_validate_attr_DICT_simple_dict(): dict_attr_val = { 'key1': 3, 'key2': 2, } attr_val = await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.ANY()), attr_val=dict_attr_val, mode='create', ) assert attr_val == dict_attr_val
async def test_validate_attr_LIST_nested_dict(): attr_val = await validate_attr( attr_name='test_validate_attr_LIST', attr_type=ATTR.LIST( list=[ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.INT())]), attr_val=[{ 'key': 1 }, { 'key': '2' }], mode='create', ) assert attr_val == [{'key': 1}, {'key': 2}]
async def test_validate_attr_LIST_nested_dict_invalid(): with pytest.raises(InvalidAttrException): await validate_attr( attr_name='test_validate_attr_LIST', attr_type=ATTR.LIST( list=[ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.INT())]), attr_val=[{ 'key': 1 }, { 'key': 'val' }], mode='create', )
class Group(BaseModule): '''`Group` module provides data type and controller for groups in Nawah eco-system.''' collection = 'groups' attrs = { 'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'), 'name': ATTR.LOCALE(desc='Name of the groups as `LOCALE`.'), 'desc': ATTR.LOCALE( desc= 'Description of the group as `LOCALE`. This can be used for dynamic generated groups that are meant to be exposed to end-users.' ), 'privileges': ATTR.KV_DICT( desc='Privileges that any user is a member of the group has.', key=ATTR.STR(), val=ATTR.LIST(list=[ATTR.STR()]), ), 'settings': ATTR.KV_DICT( desc= '`Setting` docs to be created, or required for members users when added to the group.', key=ATTR.STR(), val=ATTR.ANY(), ), 'create_time': ATTR.DATETIME( desc='Python `datetime` ISO format of the doc creation.'), } defaults = { 'desc': {locale: '' for locale in Config.locales}, 'privileges': {}, 'settings': {}, } methods = { 'read': METHOD(permissions=[PERM(privilege='admin')]), 'create': METHOD(permissions=[PERM(privilege='admin')]), 'update': METHOD( permissions=[ PERM(privilege='admin'), PERM( privilege='update', query_mod={'user': '******'}, doc_mod={'privileges': None}, ), ], query_args={'_id': ATTR.ID()}, ), 'delete': METHOD( permissions=[ PERM(privilege='admin'), PERM(privilege='delete', query_mod={'user': '******'}), ], query_args={'_id': ATTR.ID()}, ), } async def pre_update(self, skip_events, env, query, doc, payload): # [DOC] Make sure no attrs overwriting would happen if 'attrs' in doc.keys(): results = await self.read(skip_events=[Event.PERM], env=env, query=query) if not results.args.count: raise self.exception(status=400, msg='Group is invalid.', args={'code': 'INVALID_GROUP'}) if results.args.count > 1: raise self.exception( status=400, msg= 'Updating group attrs can be done only to individual groups.', args={'code': 'MULTI_ATTRS_UPDATE'}, ) results.args.docs[0]['attrs'].update({ attr: doc['attrs'][attr] for attr in doc['attrs'].keys() if doc['attrs'][attr] != None and doc['attrs'][attr] != '' }) doc['attrs'] = results.args.docs[0]['attrs'] return (skip_events, env, query, doc, payload)
class User(BaseModule): '''`User` module provides data type and controller for users in Nawah eco-system. The permissions of the module methods are designed to be as secure for exposed calls, and as flexible for privileged-access.''' collection = 'users' attrs = { 'name': ATTR.LOCALE(desc='Name of the user as `LOCALE`.'), 'locale': ATTR.LOCALES(desc='Default locale of the user.'), 'create_time': ATTR.DATETIME( desc='Python `datetime` ISO format of the doc creation.'), 'login_time': ATTR.DATETIME(desc='Python `datetime` ISO format of the last login.'), 'groups': ATTR.LIST( desc='List of `_id` for every group the user is member of.', list=[ATTR.ID(desc='`_id` of Group doc the user is member of.')], ), 'privileges': ATTR.KV_DICT( desc= 'Privileges of the user. These privileges are always available to the user regardless of whether groups user is part of have them or not.', key=ATTR.STR(), val=ATTR.LIST(list=[ATTR.STR()]), ), 'status': ATTR.LITERAL( desc= 'Status of the user to determine whether user has access to the app or not.', literal=['active', 'banned', 'deleted', 'disabled_password'], ), } defaults = { 'login_time': None, 'status': 'active', 'groups': [], 'privileges': {} } unique_attrs = [] methods = { 'read': METHOD(permissions=[ PERM(privilege='admin'), PERM(privilege='read', query_mod={'_id': '$__user'}), ]), 'create': METHOD(permissions=[PERM(privilege='admin')]), 'update': METHOD( permissions=[ PERM(privilege='admin', doc_mod={'groups': None}), PERM( privilege='update', query_mod={'_id': '$__user'}, doc_mod={ 'groups': None, 'privileges': None }, ), ], query_args={'_id': ATTR.ID()}, ), 'delete': METHOD( permissions=[ PERM(privilege='admin'), PERM(privilege='delete', query_mod={'_id': '$__user'}), ], query_args={'_id': ATTR.ID()}, ), 'read_privileges': METHOD( permissions=[ PERM(privilege='admin'), PERM(privilege='read', query_mod={'_id': '$__user'}), ], query_args={'_id': ATTR.ID()}, ), 'add_group': METHOD( permissions=[PERM(privilege='admin')], query_args={'_id': ATTR.ID()}, doc_args=[{ 'group': ATTR.ID() }, { 'group': ATTR.LIST(list=[ATTR.ID()]) }], ), 'delete_group': METHOD( permissions=[PERM(privilege='admin')], query_args={ '_id': ATTR.ID(), 'group': ATTR.ID() }, ), 'retrieve_file': METHOD(permissions=[PERM(privilege='__sys')], get_method=True), 'create_file': METHOD(permissions=[PERM(privilege='__sys')]), 'delete_file': METHOD(permissions=[PERM(privilege='__sys')]), } async def on_read(self, results, skip_events, env, query, doc, payload): for i in range(len(results['docs'])): user = results['docs'][i] for auth_attr in Config.user_attrs.keys(): del user[f'{auth_attr}_hash'] if len(Config.user_doc_settings): setting_results = await Registry.module('setting').read( skip_events=[Event.PERM, Event.ARGS], env=env, query=[{ 'user': user._id, 'var': { '$in': Config.user_doc_settings } }], ) user_doc_settings = copy.copy(Config.user_doc_settings) if setting_results.args.count: for setting_doc in setting_results.args.docs: user_doc_settings.remove(setting_doc['var']) user[setting_doc['var']] = setting_doc['val'] # [DOC] Forward-compatibility: If user was created before presence of any user_doc_settings, add them with default value for setting_attr in user_doc_settings: user[setting_attr] = Config.user_settings[ setting_attr].default # [DOC] Set NAWAH_VALUES.NONE_VALUE to None if it was default if user[setting_attr] == NAWAH_VALUES.NONE_VALUE: user[setting_attr] = None return (results, skip_events, env, query, doc, payload) async def pre_create(self, skip_events, env, query, doc, payload): if Event.ARGS not in skip_events: doc['groups'] = [ObjectId('f00000000000000000000013')] user_settings = {} for attr in Config.user_settings.keys(): if Config.user_settings[attr].type == 'user_sys': user_settings[attr] = copy.deepcopy( Config.user_settings[attr].default) else: if attr in doc.keys(): try: await validate_attr( mode='create', attr_name=attr, attr_type=Config.user_settings[attr].val_type, attr_val=doc[attr], ) user_settings[attr] = doc[attr] except: raise self.exception( status=400, msg= f'Invalid settings attr \'{attr}\' for \'create\' request on module \'CORE_USER\'', args={'code': 'INVALID_ATTR'}, ) else: if Config.user_settings[ attr].default == NAWAH_VALUES.NONE_VALUE: raise self.exception( status=400, msg= f'Missing settings attr \'{attr}\' for \'create\' request on module \'CORE_USER\'', args={'code': 'MISSING_ATTR'}, ) else: user_settings[attr] = copy.deepcopy( Config.user_settings[attr].default) payload['user_settings'] = user_settings return (skip_events, env, query, doc, payload) async def on_create(self, results, skip_events, env, query, doc, payload): if 'user_settings' in payload.keys(): for setting in payload['user_settings'].keys(): setting_results = await Registry.module('setting').create( skip_events=[Event.PERM, Event.ARGS], env=env, doc={ 'user': results['docs'][0]._id, 'var': setting, 'val_type': encode_attr_type( attr_type=Config.user_settings[setting].val_type), 'val': payload['user_settings'][setting], 'type': Config.user_settings[setting].type, }, ) if setting_results.status != 200: return setting_results return (results, skip_events, env, query, doc, payload) async def read_privileges(self, skip_events=[], env={}, query=[], doc={}): # [DOC] Confirm _id is valid results = await self.read(skip_events=[Event.PERM], env=env, query=[{ '_id': query['_id'][0] }]) if not results.args.count: raise self.exception(status=400, msg='User is invalid.', args={'code': 'INVALID_USER'}) user = results.args.docs[0] for group in user.groups: group_results = await Registry.module('group').read( skip_events=[Event.PERM], env=env, query=[{ '_id': group }]) group = group_results.args.docs[0] for privilege in group.privileges.keys(): if privilege not in user.privileges.keys(): user.privileges[privilege] = [] for i in range(len(group.privileges[privilege])): if group.privileges[privilege][i] not in user.privileges[ privilege]: user.privileges[privilege].append( group.privileges[privilege][i]) return results async def add_group(self, skip_events=[], env={}, query=[], doc={}): # [DOC] Check for list group attr if type(doc['group']) == list: for i in range(0, len(doc['group']) - 1): await self.add_group( skip_events=skip_events, env=env, query=query, doc={'group': doc['group'][i]}, ) doc['group'] = doc['group'][-1] # [DOC] Confirm all basic args are provided doc['group'] = ObjectId(doc['group']) # [DOC] Confirm group is valid results = await Registry.module('group').read(skip_events=[Event.PERM], env=env, query=[{ '_id': doc['group'] }]) if not results.args.count: raise self.exception(status=400, msg='Group is invalid.', args={'code': 'INVALID_GROUP'}) # [DOC] Get user details results = await self.read(skip_events=[Event.PERM], env=env, query=query) if not results.args.count: raise self.exception(status=400, msg='User is invalid.', args={'code': 'INVALID_USER'}) user = results.args.docs[0] # [DOC] Confirm group was not added before if doc['group'] in user.groups: raise self.exception( status=400, msg='User is already a member of the group.', args={'code': 'GROUP_ADDED'}, ) user.groups.append(doc['group']) # [DOC] Update the user results = await self.update(skip_events=[Event.PERM], env=env, query=query, doc={'groups': user.groups}) # [DOC] if update fails, return update results if results.status != 200: return results # [DOC] Check if the updated User doc belongs to current session and update it if env['session'].user._id == user._id: user_results = await self.read_privileges(skip_events=[Event.PERM], env=env, query=[{ '_id': user._id }]) env['session']['user'] = user_results.args.docs[0] return results async def delete_group(self, skip_events=[], env={}, query=[], doc={}): # [DOC] Confirm group is valid results = await Registry.module('group').read(skip_events=[Event.PERM], env=env, query=[{ '_id': query['group'][0] }]) if not results.args.count: raise self.exception(status=400, msg='Group is invalid.', args={'code': 'INVALID_GROUP'}) # [DOC] Get user details results = await self.read(skip_events=[Event.PERM], env=env, query=[{ '_id': query['_id'][0] }]) if not results.args.count: raise self.exception(status=400, msg='User is invalid.', args={'code': 'INVALID_USER'}) user = results.args.docs[0] # [DOC] Confirm group was not added before if query['group'][0] not in user.groups: raise self.exception( status=400, msg='User is not a member of the group.', args={'code': 'GROUP_NOT_ADDED'}, ) # [DOC] Update the user results = await self.update( skip_events=[Event.PERM], env=env, query=[{ '_id': query['_id'][0] }], doc={'groups': { '$del_val': [query['group'][0]] }}, ) # [DOC] if update fails, return update results if results.status != 200: return results # [DOC] Check if the updated User doc belongs to current session and update it if env['session'].user._id == user._id: user_results = await self.read_privileges(skip_events=[Event.PERM], env=env, query=[{ '_id': user._id }]) env['session']['user'] = user_results.args.docs[0] return results
config = PACKAGE_CONFIG( api_level='1.0', version='1.0.0', gateways={ 'mailgun_messages': mailgun_messages_gateway, 'mailgun_newsletters': mailgun_newsletters_gateway, }, vars_types={ 'mailgun': ATTR.TYPED_DICT( dict={ 'key': ATTR.STR(), 'newsletters': ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.STR()), 'senders': ATTR.KV_DICT( key=ATTR.STR(), val=ATTR.TYPED_DICT( dict={ 'uri': ATTR.URI_WEB(allowed_domains=['api.mailgun.net'], strict=True), 'sender_name': ATTR.STR(), 'sender_email': ATTR.EMAIL(), }), ), })
def test_generate_attr_KV_DICT_no_args(): attr_val = utils.generate_attr( attr_type=ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.INT())) assert len(attr_val.keys()) == 0
def test_generate_attr_KV_DICT_key_LITERAL(): attr_val = utils.generate_attr(attr_type=ATTR.KV_DICT( key=ATTR.LITERAL(literal=['foo', 'bar']), val=ATTR.INT(), min=2)) assert len(attr_val.keys()) == 2 assert set(attr_val.keys()) == {'foo', 'bar'}
def test_generate_attr_KV_DICT_min(): attr_val = utils.generate_attr( attr_type=ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.INT(), min=2)) assert len(attr_val.keys()) == 2 assert set(type(k) for k in attr_val.keys()) == {str} assert set(type(v) for v in attr_val.values()) == {int}
class Diff(BaseModule): '''`Diff` module provides data type and controller for `Diff Workflow`. It is meant for use by internal calls only.''' collection = 'diff' attrs = { 'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'), 'module': ATTR.STR(desc='Name of the module the original doc is part of.'), 'doc': ATTR.ID(desc='`_id` of the original doc.'), 'vars': ATTR.KV_DICT( desc='Key-value `dict` containing all attrs that have been updated from the original doc.', key=ATTR.STR(), val=ATTR.ANY(), ), 'remarks': ATTR.STR( desc='Human-readable remarks of the doc. This is introduced to allow developers to add log messages to diff docs.' ), 'create_time': ATTR.DATETIME( desc='Python `datetime` ISO format of the doc creation.' ), } defaults = {'doc': None, 'remarks': ''} methods = { 'read': METHOD(permissions=[PERM(privilege='read')]), 'create': METHOD(permissions=[PERM(privilege='__sys')]), 'delete': METHOD(permissions=[PERM(privilege='delete')]), } async def pre_create(self, skip_events, env, query, doc, payload): # [DOC] format Doc Oper with prefixed underscores to avoid data errors doc = self.format_doc_oper(doc=doc) # [DOC] Detect non-_id update query: if '_id' not in query: results = await Registry.module(doc['module']).read( skip_events=[Event.PERM], env=env, query=query ) if results.args.count > 1: query.append({'_id': {'$in': [doc._id for doc in results.args.docs]}}) elif results.args.count == 1: query.append({'_id': results.args.docs[0]._id}) else: raise self.exception( status=400, msg='No update docs matched.', args={'code': 'NO_MATCH'} ) if '_id' in query and type(query['_id'][0]) == list: for i in range(len(query['_id'][0]) - 1): self.create( skip_events=[Event.PERM], env=env, query=[{'_id': query['_id'][0][i]}], doc=doc, ) query['_id'][0] = query['_id'][0][-1] doc['doc'] = ObjectId(query['_id'][0]) return (skip_events, env, query, doc, payload) def format_doc_oper(self, *, doc: NAWAH_DOC): shadow_doc: NAWAH_DOC = {} for attr in doc.keys(): if attr[0] == '$': shadow_doc[f'__{attr}'] = doc[attr] elif type(doc[attr]) == dict: shadow_doc[attr] = self.format_doc_oper(doc=doc[attr]) else: shadow_doc[attr] = doc[attr] return shadow_doc
class Analytic(BaseModule): '''`Analytic` module provides data type and controller from `Analytics Workflow` and accompanying analytics docs. It uses `pre_create` handler to assure no events duplications occur and all occurrences of the same event are recorded in one doc.''' collection = 'analytics' attrs = { 'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'), 'event': ATTR.STR(desc='Analytics event name.'), 'subevent': ATTR.ANY( desc= 'Analytics subevent distinguishing attribute. This is usually `STR`, or `ID` but it is introduced in the module as `ANY` to allow wider use-cases by developers.' ), 'date': ATTR.DATE( desc= 'Analytics event date. This allows clustering of events occupancies to limit doc size.' ), 'occurrences': ATTR.LIST( desc='All occurrences of the event as list.', list=[ ATTR.TYPED_DICT( desc='Single occurrence of the event details.', dict={ 'args': ATTR.KV_DICT( desc= 'Key-value `dict` containing event args, if any.', key=ATTR.STR(), val=ATTR.ANY(), ), 'score': ATTR.INT( desc='Numerical score for occurrence of the event.' ), 'create_time': ATTR.DATETIME( desc= 'Python `datetime` ISO format of the occurrence of the event.' ), }, ) ], ), 'score': ATTR.INT( desc= 'Total score of all scores of all occurrences of the event. This can be used for data analysis.' ), } unique_attrs = [('user', 'event', 'subevent', 'date')] methods = { 'read': METHOD(permissions=[PERM(privilege='read')]), 'create': METHOD( permissions=[PERM(privilege='__sys')], doc_args={ 'event': ATTR.STR(), 'subevent': ATTR.ANY(), 'args': ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.ANY()), }, ), 'update': METHOD(permissions=[PERM(privilege='__sys')]), 'delete': METHOD(permissions=[PERM(privilege='delete')]), } async def pre_create(self, skip_events, env, query, doc, payload): analytic_results = await self.read( skip_events=[Event.PERM], env=env, query=[ { 'user': env['session'].user._id, 'event': doc['event'], 'subevent': doc['subevent'], 'date': datetime.date.today().isoformat(), }, { '$limit': 1 }, ], ) if analytic_results.args.count: analytic_results = await self.update( skip_events=[Event.PERM], env=env, query=[{ '_id': analytic_results.args.docs[0]._id }], doc={ 'occurrences': { '$append': { 'args': doc['args'], 'score': doc['score'] if 'score' in doc.keys() else 0, 'create_time': datetime.datetime.utcnow().isoformat(), } }, 'score': { '$add': doc['score'] if 'score' in doc.keys() else 0 }, }, ) if analytic_results.status == 200: return (skip_events, env, query, doc, { '__results': analytic_results }) else: raise self.exception( status=analytic_results.status, msg=analytic_results.msg, args=analytic_results.args, ) else: doc = { 'event': doc['event'], 'subevent': doc['subevent'], 'date': datetime.date.today().isoformat(), 'occurrences': [{ 'args': doc['args'], 'score': doc['score'] if 'score' in doc.keys() else 0, 'create_time': datetime.datetime.utcnow().isoformat(), }], 'score': doc['score'] if 'score' in doc.keys() else 0, } return (skip_events, env, query, doc, payload)