async def _check_permissions( skip_events: List[Event], env: NAWAH_ENV, query: Union[NAWAH_QUERY, Query], doc: NAWAH_DOC, module: 'BaseModule', permissions: List[PERM], ): user = env['session'].user permissions = copy.deepcopy(permissions) for permission in permissions: logger.debug( f'checking permission: {permission} against: {user.privileges}') permission_pass = False if permission.privilege == '*': permission_pass = True if not permission_pass: if permission.privilege.find('.') == -1: permission_module = module.module_name permission_attr = permission.privilege elif permission.privilege.find('.') != -1: permission_module = permission.privilege.split('.')[0] permission_attr = permission.privilege.split('.')[1] if '*' in user.privileges.keys(): user.privileges[permission_module] = copy.deepcopy( user.privileges['*']) if permission_module in user.privileges.keys(): if (type(user.privileges[permission_module]) == list and '*' in user.privileges[permission_module]): user.privileges[permission_module] += copy.deepcopy( Registry.module(permission_module).privileges) if permission_module not in user.privileges.keys(): user.privileges[permission_module] = [] if permission_attr in user.privileges[permission_module]: permission_pass = True if permission_pass: query_mod = await _parse_permission_args( skip_events=skip_events, env=env, query=query, doc=doc, permission_args=permission.query_mod, ) doc_mod = await _parse_permission_args( skip_events=skip_events, env=env, query=query, doc=doc, permission_args=permission.doc_mod, ) return {'query_mod': query_mod, 'doc_mod': doc_mod} # [DOC] If all permission checks fail raise InvalidPermissionsExcpetion()
async def retrieve_cache_results(self, skip_events=[], env={}, query=[], doc={}): cache_set_query = list( Registry.module(query['module'][0]).cache[ query['cache_set'][0]].queries.keys())[query['query'][0]] return self.status( status=200, msg='Module Cache Sets results retrieved.', args={ 'results': Registry.module(query['module'][0]).cache[ query['cache_set'][0]].queries[cache_set_query].results, 'query_time': Registry.module( query['module'][0]).cache[query['cache_set'][0]]. queries[cache_set_query].query_time.isoformat(), }, )
class File(BaseModule): '''`File` module provides functionality for `File Upload Workflow`.''' collection = 'files' attrs = { 'user': ATTR.ID(desc='`_id` of `User` doc file belongs to.'), 'file': ATTR.FILE(desc='File object.'), 'create_time': ATTR.DATETIME( desc='Python `datetime` ISO format of the doc creation.' ), } methods = { 'read': METHOD(permissions=[PERM(privilege='__sys')]), 'create': METHOD( permissions=[PERM(privilege='create')], post_method=True, ), 'delete': METHOD(permissions=[PERM(privilege='__sys')]), } async def on_read(self, results, skip_events, env, query, doc, payload): for i in range(len(results['docs'])): results['docs'][i]['file']['lastModified'] = int( results['docs'][i]['file']['lastModified'] ) return (results, skip_events, env, query, doc, payload) async def pre_create(self, skip_events, env, query, doc, payload): if Config.file_upload_limit != -1 and len(doc['file']) > Config.file_upload_limit: raise self.exception( status=400, msg=f'File size is beyond allowed limit.', args={ 'code': 'INVALID_SIZE', 'attr': doc['__attr'].decode('utf-8'), 'name': doc['name'].decode('utf-8'), }, ) if (module := doc['__module'].decode('utf-8')) not in Config.modules.keys(): raise self.exception( status=400, msg=f'Invalid module \'{module}\'', args={'code': 'INVALID_MODULE'}, ) try: attr_type = _extract_attr( scope=Registry.module(module).attrs, attr_path='$__' + (attr := doc['__attr'].decode('utf-8')), )
async def retrieve_cache_queries(self, skip_events=[], env={}, query=[], doc={}): return self.status( status=200, msg='Module Cache Sets queries retrieved.', args={ 'queries': list( Registry.module(query['module'][0]).cache[ query['cache_set'][0]].queries.keys()) }, )
async def retrieve_cache_sets(self, skip_events=[], env={}, query=[], doc={}): return self.status( status=200, msg='Module Cache Sets retrieved.', args={ 'sets': [ _extract_lambda_body(cache_set.condition) for cache_set in Registry.module(query['module'][0]).cache ] }, )
async def auth(self, skip_events=[], env={}, query=[], doc={}): for attr in Registry.module('user').unique_attrs: if attr in doc.keys(): key = attr break user_query = [{key: doc[key], '$limit': 1}] if 'groups' in doc.keys(): user_query.append([{'groups': {'$in': doc['groups']}}, {'privileges': {'*': ['*']}}]) user_results = await Registry.module('user').read( skip_events=[Event.PERM, Event.ON], env=env, query=user_query ) if not user_results.args.count or not pbkdf2_sha512.verify( doc['hash'], user_results.args.docs[0][f'{key}_hash'], ): raise self.exception( status=403, msg='Wrong auth credentials.', args={'code': 'INVALID_CREDS'}, ) user = user_results.args.docs[0] if Event.ON not in skip_events: if user.status in ['banned', 'deleted']: raise self.exception( status=403, msg=f'User is {user.status}.', args={'code': 'INVALID_USER'}, ) elif user.status == 'disabled_password': raise self.exception( status=403, msg='User password is disabled.', args={'code': 'INVALID_USER'}, ) token = secrets.token_urlsafe(32) session = { 'user': user._id, 'groups': doc['groups'] if 'groups' in doc.keys() else [], 'host_add': env['REMOTE_ADDR'], 'user_agent': env['HTTP_USER_AGENT'], 'expiry': (datetime.datetime.utcnow() + datetime.timedelta(days=30)).isoformat(), 'token_hash': pbkdf2_sha512.using(rounds=100000).hash(token), } results = await self.create(skip_events=[Event.PERM], env=env, doc=session) if results.status != 200: return results session['_id'] = results.args.docs[0]._id session['user'] = user del session['token_hash'] session['token'] = token results.args.docs[0] = BaseModel(session) # [DOC] read user privileges and return them user_results = await Registry.module('user').read_privileges( skip_events=[Event.PERM], env=env, query=[{'_id': user._id}] ) if user_results.status != 200: return user_results results.args.docs[0]['user'] = user_results.args.docs[0] # [DOC] Create CONN_AUTH Analytic doc if Config.analytics_events['session_conn_auth']: analytic_doc = { 'event': 'CONN_AUTH', 'subevent': env['client_app'], 'args': { 'user': user_results.args.docs[0]._id, 'session': results.args.docs[0]._id, 'REMOTE_ADDR': env['REMOTE_ADDR'], 'HTTP_USER_AGENT': env['HTTP_USER_AGENT'], }, } analytic_results = await Registry.module('analytic').create( skip_events=[Event.PERM], env=env, doc=analytic_doc ) if analytic_results.status != 200: logger.error( f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}' ) # [DOC] Create USER_AUTH Analytic doc if Config.analytics_events['session_user_auth']: analytic_doc = { 'event': 'USER_AUTH', 'subevent': user_results.args.docs[0]._id, 'args': { 'session': results.args.docs[0]._id, 'REMOTE_ADDR': env['REMOTE_ADDR'], 'HTTP_USER_AGENT': env['HTTP_USER_AGENT'], 'client_app': env['client_app'], }, } analytic_results = await Registry.module('analytic').create( skip_events=[Event.PERM], env=env, doc=analytic_doc ) if analytic_results.status != 200: logger.error( f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}' ) return self.status( status=200, msg='You were successfully authed.', args={'session': results.args.docs[0]}, )