async def test_validate_attr_DICT_simple_dict_Any_None_value(): with pytest.raises(InvalidAttrException): await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.TYPED_DICT(dict={'key1': ATTR.ANY(), 'key2': ATTR.ANY()}), attr_val={ 'key1': '', # [DOC] This is accepted 'key2': None, # [DOC] This would fail, raising exception }, mode='create', )
def test_compile_query_step_one_step_match_attr(): aggregate_prefix = [ {'$match': {'__deleted': {'$exists': False}}}, {'$match': {'__create_draft': {'$exists': False}}}, {'$match': {'__update_draft': {'$exists': False}}}, ] aggregate_suffix = [{'$group': {'_id': '$_id'}}] aggregate_match = [] collection_name = 'collection_name' attrs = {'attr': ATTR.ANY()} step = {'attr': 'match_term'} watch_mode = False _query._compile_query_step( aggregate_prefix=aggregate_prefix, aggregate_suffix=aggregate_suffix, aggregate_match=aggregate_match, collection_name=collection_name, attrs=attrs, step=step, watch_mode=watch_mode, ) assert aggregate_prefix == [ {'$match': {'__deleted': {'$exists': False}}}, {'$match': {'__create_draft': {'$exists': False}}}, {'$match': {'__update_draft': {'$exists': False}}}, ] assert aggregate_suffix == [{'$group': {'_id': '$_id'}}] assert aggregate_match == [{'attr': 'match_term'}]
async def test_validate_attr_ANY_None(): with pytest.raises(InvalidAttrException): await validate_attr( attr_name='test_validate_attr_ANY', attr_type=ATTR.ANY(), attr_val=None, mode='create', )
async def test_validate_attr_ANY_str(): attr_val = await validate_attr( attr_name='test_validate_attr_ANY', attr_type=ATTR.ANY(), attr_val='test_validate_attr_ANY', mode='create', ) assert attr_val == 'test_validate_attr_ANY'
async def test_validate_attr_DICT_None(): with pytest.raises(InvalidAttrException): await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.ANY()), attr_val=None, mode='create', )
async def test_validate_attr_ANY_default_None(): attr_type = ATTR.ANY() attr_type._default = 'test_validate_attr_ANY' attr_val = await validate_attr( attr_name='test_validate_attr_ANY', attr_type=attr_type, attr_val=None, mode='create', ) assert attr_val == 'test_validate_attr_ANY'
async def test_validate_attr_DICT_simple_dict(): dict_attr_val = { 'key1': 3, 'key2': 2, } attr_val = await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.ANY()), attr_val=dict_attr_val, mode='create', ) assert attr_val == dict_attr_val
async def test_validate_attr_DICT_simple_dict_Any_default_None_value(): dict_attr_val = { 'key1': None, 'key2': '', } attr_type_any = ATTR.ANY() attr_type_any._default = None attr_val = await validate_attr( attr_name='test_validate_attr_DICT', attr_type=ATTR.TYPED_DICT(dict={'key1': attr_type_any, 'key2': attr_type_any}), attr_val=dict_attr_val, mode='create', ) assert attr_val == dict_attr_val
def test_compile_query_attrs_mixed_in_attrs(): skip, limit, sort, group, aggregate_query = _query._compile_query( collection_name='collection_name', attrs={'attr1': ATTR.ANY()}, query=Query([{ '$attrs': ['attr1', 'attr2'] }]), watch_mode=False, ) assert skip == None assert limit == None assert sort == {'_id': -1} assert group == None assert aggregate_query == [ { '$match': { '__deleted': { '$exists': False } } }, { '$match': { '__create_draft': { '$exists': False } } }, { '$match': { '__update_draft': { '$exists': False } } }, { '$group': { '_id': '$_id', 'attr1': { '$first': '$attr1' } } }, ]
class Group(BaseModule): '''`Group` module provides data type and controller for groups in Nawah eco-system.''' collection = 'groups' attrs = { 'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'), 'name': ATTR.LOCALE(desc='Name of the groups as `LOCALE`.'), 'desc': ATTR.LOCALE( desc= 'Description of the group as `LOCALE`. This can be used for dynamic generated groups that are meant to be exposed to end-users.' ), 'privileges': ATTR.KV_DICT( desc='Privileges that any user is a member of the group has.', key=ATTR.STR(), val=ATTR.LIST(list=[ATTR.STR()]), ), 'settings': ATTR.KV_DICT( desc= '`Setting` docs to be created, or required for members users when added to the group.', key=ATTR.STR(), val=ATTR.ANY(), ), 'create_time': ATTR.DATETIME( desc='Python `datetime` ISO format of the doc creation.'), } defaults = { 'desc': {locale: '' for locale in Config.locales}, 'privileges': {}, 'settings': {}, } methods = { 'read': METHOD(permissions=[PERM(privilege='admin')]), 'create': METHOD(permissions=[PERM(privilege='admin')]), 'update': METHOD( permissions=[ PERM(privilege='admin'), PERM( privilege='update', query_mod={'user': '******'}, doc_mod={'privileges': None}, ), ], query_args={'_id': ATTR.ID()}, ), 'delete': METHOD( permissions=[ PERM(privilege='admin'), PERM(privilege='delete', query_mod={'user': '******'}), ], query_args={'_id': ATTR.ID()}, ), } async def pre_update(self, skip_events, env, query, doc, payload): # [DOC] Make sure no attrs overwriting would happen if 'attrs' in doc.keys(): results = await self.read(skip_events=[Event.PERM], env=env, query=query) if not results.args.count: raise self.exception(status=400, msg='Group is invalid.', args={'code': 'INVALID_GROUP'}) if results.args.count > 1: raise self.exception( status=400, msg= 'Updating group attrs can be done only to individual groups.', args={'code': 'MULTI_ATTRS_UPDATE'}, ) results.args.docs[0]['attrs'].update({ attr: doc['attrs'][attr] for attr in doc['attrs'].keys() if doc['attrs'][attr] != None and doc['attrs'][attr] != '' }) doc['attrs'] = results.args.docs[0]['attrs'] return (skip_events, env, query, doc, payload)
def test_generate_attr_ANY(): utils.generate_attr(attr_type=ATTR.ANY())
class Analytic(BaseModule): '''`Analytic` module provides data type and controller from `Analytics Workflow` and accompanying analytics docs. It uses `pre_create` handler to assure no events duplications occur and all occurrences of the same event are recorded in one doc.''' collection = 'analytics' attrs = { 'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'), 'event': ATTR.STR(desc='Analytics event name.'), 'subevent': ATTR.ANY( desc= 'Analytics subevent distinguishing attribute. This is usually `STR`, or `ID` but it is introduced in the module as `ANY` to allow wider use-cases by developers.' ), 'date': ATTR.DATE( desc= 'Analytics event date. This allows clustering of events occupancies to limit doc size.' ), 'occurrences': ATTR.LIST( desc='All occurrences of the event as list.', list=[ ATTR.TYPED_DICT( desc='Single occurrence of the event details.', dict={ 'args': ATTR.KV_DICT( desc= 'Key-value `dict` containing event args, if any.', key=ATTR.STR(), val=ATTR.ANY(), ), 'score': ATTR.INT( desc='Numerical score for occurrence of the event.' ), 'create_time': ATTR.DATETIME( desc= 'Python `datetime` ISO format of the occurrence of the event.' ), }, ) ], ), 'score': ATTR.INT( desc= 'Total score of all scores of all occurrences of the event. This can be used for data analysis.' ), } unique_attrs = [('user', 'event', 'subevent', 'date')] methods = { 'read': METHOD(permissions=[PERM(privilege='read')]), 'create': METHOD( permissions=[PERM(privilege='__sys')], doc_args={ 'event': ATTR.STR(), 'subevent': ATTR.ANY(), 'args': ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.ANY()), }, ), 'update': METHOD(permissions=[PERM(privilege='__sys')]), 'delete': METHOD(permissions=[PERM(privilege='delete')]), } async def pre_create(self, skip_events, env, query, doc, payload): analytic_results = await self.read( skip_events=[Event.PERM], env=env, query=[ { 'user': env['session'].user._id, 'event': doc['event'], 'subevent': doc['subevent'], 'date': datetime.date.today().isoformat(), }, { '$limit': 1 }, ], ) if analytic_results.args.count: analytic_results = await self.update( skip_events=[Event.PERM], env=env, query=[{ '_id': analytic_results.args.docs[0]._id }], doc={ 'occurrences': { '$append': { 'args': doc['args'], 'score': doc['score'] if 'score' in doc.keys() else 0, 'create_time': datetime.datetime.utcnow().isoformat(), } }, 'score': { '$add': doc['score'] if 'score' in doc.keys() else 0 }, }, ) if analytic_results.status == 200: return (skip_events, env, query, doc, { '__results': analytic_results }) else: raise self.exception( status=analytic_results.status, msg=analytic_results.msg, args=analytic_results.args, ) else: doc = { 'event': doc['event'], 'subevent': doc['subevent'], 'date': datetime.date.today().isoformat(), 'occurrences': [{ 'args': doc['args'], 'score': doc['score'] if 'score' in doc.keys() else 0, 'create_time': datetime.datetime.utcnow().isoformat(), }], 'score': doc['score'] if 'score' in doc.keys() else 0, } return (skip_events, env, query, doc, payload)
class Diff(BaseModule): '''`Diff` module provides data type and controller for `Diff Workflow`. It is meant for use by internal calls only.''' collection = 'diff' attrs = { 'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'), 'module': ATTR.STR(desc='Name of the module the original doc is part of.'), 'doc': ATTR.ID(desc='`_id` of the original doc.'), 'vars': ATTR.KV_DICT( desc='Key-value `dict` containing all attrs that have been updated from the original doc.', key=ATTR.STR(), val=ATTR.ANY(), ), 'remarks': ATTR.STR( desc='Human-readable remarks of the doc. This is introduced to allow developers to add log messages to diff docs.' ), 'create_time': ATTR.DATETIME( desc='Python `datetime` ISO format of the doc creation.' ), } defaults = {'doc': None, 'remarks': ''} methods = { 'read': METHOD(permissions=[PERM(privilege='read')]), 'create': METHOD(permissions=[PERM(privilege='__sys')]), 'delete': METHOD(permissions=[PERM(privilege='delete')]), } async def pre_create(self, skip_events, env, query, doc, payload): # [DOC] format Doc Oper with prefixed underscores to avoid data errors doc = self.format_doc_oper(doc=doc) # [DOC] Detect non-_id update query: if '_id' not in query: results = await Registry.module(doc['module']).read( skip_events=[Event.PERM], env=env, query=query ) if results.args.count > 1: query.append({'_id': {'$in': [doc._id for doc in results.args.docs]}}) elif results.args.count == 1: query.append({'_id': results.args.docs[0]._id}) else: raise self.exception( status=400, msg='No update docs matched.', args={'code': 'NO_MATCH'} ) if '_id' in query and type(query['_id'][0]) == list: for i in range(len(query['_id'][0]) - 1): self.create( skip_events=[Event.PERM], env=env, query=[{'_id': query['_id'][0][i]}], doc=doc, ) query['_id'][0] = query['_id'][0][-1] doc['doc'] = ObjectId(query['_id'][0]) return (skip_events, env, query, doc, payload) def format_doc_oper(self, *, doc: NAWAH_DOC): shadow_doc: NAWAH_DOC = {} for attr in doc.keys(): if attr[0] == '$': shadow_doc[f'__{attr}'] = doc[attr] elif type(doc[attr]) == dict: shadow_doc[attr] = self.format_doc_oper(doc=doc[attr]) else: shadow_doc[attr] = doc[attr] return shadow_doc
class Setting(BaseModule): '''`Setting` module module provides data type and controller for settings in Nawah eco-system. This is used by `User` module tp provide additional user-wise settings. It also allows for global-typed settings.''' collection = 'settings' attrs = { 'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'), 'var': ATTR.STR( desc='Name of the setting. This is unique for every `user` in the module.' ), 'val': ATTR.ANY(desc='Value of the setting.'), 'val_type': ATTR.DYNAMIC_ATTR(), 'type': ATTR.LITERAL( desc='Type of the setting. This sets whether setting is global, or belong to user, and whether use can update it or not.', literal=['global', 'user', 'user_sys'], ), } diff = True unique_attrs = [('user', 'var', 'type')] extns = { 'val': ATTR.TYPE(type=attr_extn_val), } methods = { 'read': METHOD( permissions=[ PERM(privilege='admin', query_mod={'$limit': 1}), PERM( privilege='read', query_mod={ 'user': '******', 'type': ATTR.TYPE(type=attr_query_mod_type), '$limit': 1, }, ), ], query_args=[ { '_id': ATTR.ID(), 'type': ATTR.LITERAL(literal=['global', 'user', 'user_sys']), }, { 'var': ATTR.STR(), 'type': ATTR.LITERAL(literal=['global']), }, { 'var': ATTR.STR(), 'user': ATTR.ID(), 'type': ATTR.LITERAL(literal=['user', 'user_sys']), }, ], ), 'create': METHOD( permissions=[ PERM(privilege='admin'), PERM(privilege='create', doc_mod={'type': 'user'}), ] ), 'update': METHOD( permissions=[ PERM(privilege='admin', query_mod={'$limit': 1}), PERM( privilege='update', query_mod={'type': 'user', 'user': '******', '$limit': 1}, doc_mod={'var': None, 'val_type': None, 'type': None}, ), ], query_args=[ { '_id': ATTR.ID(), 'type': ATTR.LITERAL(literal=['global', 'user', 'user_sys']), }, { 'var': ATTR.STR(), 'type': ATTR.LITERAL(literal=['global']), }, { 'var': ATTR.STR(), 'user': ATTR.ID(), 'type': ATTR.LITERAL(literal=['user', 'user_sys']), }, ], ), 'delete': METHOD( permissions=[PERM(privilege='admin', query_mod={'$limit': 1})], query_args=[{'_id': ATTR.ID()}, {'var': ATTR.STR()}], ), 'retrieve_file': METHOD( permissions=[PERM(privilege='*', query_mod={'type': 'global'})], get_method=True, ), } async def on_create(self, results, skip_events, env, query, doc, payload): if doc['type'] in ['user', 'user_sys']: if doc['user'] == env['session'].user._id and doc['var'] in Config.user_doc_settings: env['session'].user[doc['var']] = doc['val'] return (results, skip_events, env, query, doc, payload) async def pre_update(self, skip_events, env, query, doc, payload): for attr in doc.keys(): if attr == 'val' or attr.startswith('val.'): val_attr = attr break else: raise self.exception( status=400, msg='Could not match doc with any of the required doc_args. Failed sets:[\'val\': Missing]', args={'code': 'INVALID_DOC'}, ) setting_results = await self.read(skip_events=[Event.PERM], env=env, query=query) if not setting_results.args.count: raise self.exception( status=400, msg='Invalid Setting doc', args={'code': 'INVALID_SETTING'} ) setting = setting_results.args.docs[0] # [DOC] Attempt to validate val against Setting val_type try: exception_raised: Exception = None setting_val_type, _ = generate_dynamic_attr(dynamic_attr=setting.val_type) await validate_doc( mode='update', doc=doc, attrs={'val': setting_val_type}, skip_events=skip_events, env=env, query=query, ) except Exception as e: exception_raised = e if exception_raised or doc[val_attr] == None: raise self.exception( status=400, msg=f'Invalid value for for Setting doc of type \'{type(doc[val_attr])}\' with required type \'{setting.val_type}\'', args={'code': 'INVALID_ATTR'}, ) return (skip_events, env, query, doc, payload) async def on_update(self, results, skip_events, env, query, doc, payload): # [TODO] Update according to the changes of Doc Opers try: if ( query['type'][0] in ['user', 'user_sys'] and query['user'][0] == env['session'].user._id and query['var'][0] in Config.user_doc_settings ): if type(doc['val']) == dict and '$add' in doc['val'].keys(): env['session'].user[query['var'][0]] += doc['val']['$add'] elif type(doc['val']) == dict and '$multiply' in doc['val'].keys(): env['session'].user[query['var'][0]] *= doc['val']['$multiply'] elif type(doc['val']) == dict and '$append' in doc['val'].keys(): env['session'].user[query['var'][0]].append(doc['val']['$append']) elif type(doc['val']) == dict and '$set_index' in doc['val'].keys(): env['session'].user[query['var'][0]][doc['val']['$index']] = doc['val'][ '$set_index' ] elif type(doc['val']) == dict and '$del_val' in doc['val'].keys(): for val in doc['val']['$del_val']: env['session'].user[query['var'][0]].remove(val) elif type(doc['val']) == dict and '$del_index' in doc['val'].keys(): del env['session'].user[query['var'][0]][doc['val']['$index']] else: env['session'].user[query['var'][0]] = doc['val'] except: pass return (results, skip_events, env, query, doc, payload)