def setup_test(*, modules=None, l10n_dicts=None, vars=None, types=None): MockRegistry.modules = {} Config.l10n = {} Config.vars = {} Config.types = {} if modules: for module_class in modules.keys(): module = module_class() module.package_name = ( module.__module__.replace('modules.', '').upper().split('.')[-2] ) module.module_name = re.sub( r'([A-Z])', r'_\1', module.__class__.__name__[0].lower() + module.__class__.__name__[1:], ).lower() for method_name in modules[module_class].keys(): mock_method = mock.AsyncMock() if type(return_value := modules[module_class][method_name]) == dict: if set(return_value.keys()) == {'status', 'msg', 'args'}: return_value = modules[module_class][method_name] = DictObj( status=return_value['status'], msg=return_value['msg'], args=DictObj(return_value['args']), ) if 'docs' in return_value.args: return_value.args['docs'] = [DictObj(doc) for doc in return_value.args.docs] mock_method.return_value = return_value setattr(module, method_name, mock_method) MockRegistry.modules[module.module_name] = module
def _(*, session=None, user=None, privileges=None): if not session: session = { '_id': ObjectId('f00000000000000000000012'), 'user': None, 'host_add': '127.0.0.1', 'user_agent': '__ANON_TOKEN_000000000000000000000000', 'timestamp': '1970-01-01T00:00:00', 'expiry': '1970-01-01T00:00:00', 'token': '__ANON_TOKEN_000000000000000000000000', 'token_hash': '__ANON_TOKEN_000000000000000000000000', } if not user: user = { '_id': ObjectId('f00000000000000000000011'), 'name': {'na_NA': '__ANON'}, 'groups': [], 'privileges': None, 'locale': 'na_NA', } if not privileges: privileges = {} session = DictObj(session) user = DictObj(user) session['user'] = user user['privileges'] = privileges return { 'id': 0, 'conn': None, 'REMOTE_ADDR': '127.0.0.1', 'ws': None, 'watch_tasks': {}, 'init': False, 'last_call': datetime.datetime.utcnow(), 'quota': { 'counter': 0, 'last_check': datetime.datetime.utcnow(), }, 'HTTP_USER_AGENT': '', 'HTTP_ORIGIN': '', 'session': session, }
def _(status: int, count: int, doc: Dict[str, Any] = None, code: str = None): return DictObj({ 'status': status, 'args': DictObj({ 'count': count, 'code': code, 'docs': [ DictObj(doc if doc else {'_id': ObjectId()}) for __ in range(count) ], }), })
async def return_results(self, ws: Optional[WebSocketResponse], results: DictObj, call_id: Optional[str]) -> Optional[DictObj]: if call_id and call_id != '__TEST__': results.args['call_id'] = call_id ws = cast(WebSocketResponse, ws) await ws.send_str(JSONEncoder().encode(results)) return None else: return results
async def watch_loop( self, ws: WebSocketResponse, stream: AsyncGenerator[DictObj, DictObj], call_id: str, watch_task: WATCH_TASK, ) -> None: logger.debug('Preparing async loop at BaseMethod') async for results in stream: logger.debug(f'Received watch results at BaseMethod: {results}') # [DOC] Update watch_task stream value with stream object if 'stream' in results.keys(): watch_task['stream'] = results['stream'] continue results = DictObj(results) try: results['args'] = DictObj(results.args) except Exception: results['args'] = DictObj({}) results.args['call_id'] = call_id results.args['watch'] = call_id await ws.send_str(JSONEncoder().encode(results)) logger.debug('Generator ended at BaseMethod.')
def exception(self, *, status, msg, args=None): status_dict = {'status': status, 'msg': msg, 'args': {}} if args and type(args) == DictObj: if 'code' in args: args[ 'code' ] = f'{self.package_name.upper()}_{self.module_name.upper()}_{args["code"]}' status_dict['args'] = args elif args and type(args) == dict: if 'code' in args.keys(): args[ 'code' ] = f'{self.package_name.upper()}_{self.module_name.upper()}_{args["code"]}' status_dict['args'] = args return MethodException(DictObj(status_dict))
async def _( *, module, handler, results, skip_events=[], env=None, query=[], doc={}, payload={} ): if not env: env = mock_env() query = Query(query) if 'docs' in results.keys(): results['docs'] = [DictObj(doc) for doc in results['docs']] return await getattr(MockRegistry.modules[module], handler)( results=results, skip_events=skip_events, env=env, query=query, doc=doc, payload=payload, )
async def signout(self, skip_events=[], env={}, query=[], doc={}): if str(query['_id'][0]) == 'f00000000000000000000012': raise self.exception( status=400, msg='Singout is not allowed for \'__ANON\' user.', args={'code': 'ANON_SIGNOUT'}, ) results = await self.read( skip_events=[Event.PERM], env=env, query=[{'_id': query['_id'][0]}] ) if not results.args.count: raise self.exception( status=403, msg='Session is invalid.', args={'code': 'INVALID_SESSION'} ) results = await self.delete( skip_events=[Event.PERM], env=env, query=[{'_id': env['session']._id}] ) # [DOC] Create CONN_AUTH Analytic doc if Config.analytics_events['session_conn_deauth']: analytic_doc = { 'event': 'CONN_DEAUTH', 'subevent': env['client_app'], 'args': { 'user': env['session'].user._id, 'session': env['session']._id, 'REMOTE_ADDR': env['REMOTE_ADDR'], 'HTTP_USER_AGENT': env['HTTP_USER_AGENT'], }, } analytic_results = await Registry.module('analytic').create( skip_events=[Event.PERM], env=env, doc=analytic_doc ) if analytic_results.status != 200: logger.error( f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}' ) # [DOC] Create USER_AUTH Analytic doc if Config.analytics_events['session_user_deauth']: analytic_doc = { 'event': 'USER_DEAUTH', 'subevent': env['session'].user._id, 'args': { 'session': env['session']._id, 'REMOTE_ADDR': env['REMOTE_ADDR'], 'HTTP_USER_AGENT': env['HTTP_USER_AGENT'], 'client_app': env['client_app'], }, } analytic_results = await Registry.module('analytic').create( skip_events=[Event.PERM], env=env, doc=analytic_doc ) if analytic_results.status != 200: logger.error( f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}' ) return self.status( status=200, msg='You are successfully signed-out.', args={'session': DictObj({'_id': 'f00000000000000000000012'})}, )
async def _config_data(): from nawah.utils import generate_attr # [TODO] Add validator for user_attrs, user_doc_settings # [DOC] Check app packages if Config._app_packages or len(Config.packages_versions.keys()) > 2: logger.debug( 'Found \'_app_packages\' Config Attr. Attempting to validate all loaded packages are matching _app_packages Config Attr value.' ) Config._app_packages['core'] = Config.packages_versions['core'] Config._app_packages[ Config._app_default_package] = Config.packages_versions[ Config._app_default_package] missing_packages = [ package for package in Config._app_packages.keys() if package not in Config.packages_versions.keys() ] if missing_packages: logger.error( f'At least one package is missing that is required by app. Missing package[s]: \'{", ".join(missing_packages)}\'. Exiting.' ) exit(1) extra_packages = [ package for package in Config.packages_versions.keys() if package not in Config._app_packages.keys() ] if extra_packages: logger.error( f'At least one extra package is present in \'packages\' folder that is not required by app. Extra package[s]: \'{", ".join(extra_packages)}\'. Exiting.' ) exit(1) # [DOC] Check for version mismatch for package, version in Config._app_packages.items(): # [DOC] Skip core and default_packages if package in ['core', Config._app_default_package]: continue if version != Config.packages_versions[package]: logger.error( f'Package \'{package}\' version \'{Config.packages_versions[package]}\' is added to app but not matching required version \'{version}\'. Exiting.' ) exit(1) # [DOC] Check API version if not Config.packages_api_levels: logger.warning( 'No API-level specified for the app. Nawah would continue to run the app, but the developer should consider adding API-level to eliminate specs mismatch.' ) else: nawah_level = '.'.join(Config._nawah_version.split('.')[0:2]) for package, api_level in Config.packages_api_levels.items(): if api_level != nawah_level: logger.error( f'Nawah framework is on API-level \'{nawah_level}\', but the app package \'{package}\' requires API-level \'{api_level}\'. Exiting.' ) exit(1) try: versions = ((requests.get( 'https://raw.githubusercontent.com/masaar/nawah_versions/master/versions.txt' ).content).decode('utf-8').split('\n')) version_detected = '' for version in versions: if version.startswith(f'{nawah_level}.'): if version_detected and int(version.split('.')[-1]) < int( version_detected.split('.')[-1]): continue version_detected = version if version_detected and version_detected != Config._nawah_version: logger.warning( f'Your app is using Nawah version \'{Config._nawah_version}\' while newer version \'{version_detected}\' of the API-level is available. Please, update.' ) except: logger.warning( 'An error occurred while attempting to check for latest update to Nawah. Please, check for updates on your own.' ) # [DOC] Check for jobs if Config.jobs: # [DOC] Check jobs schedule validity Config._jobs_base = datetime.datetime.utcnow() for job_name in Config.jobs.keys(): job = Config.jobs[job_name] if not croniter.is_valid(job.schedule): logger.error( f'Job with schedule \'{job_name}\' schedule is invalid. Exiting.' ) exit(1) job._cron_schedule = croniter(job.schedule, Config._jobs_base) job._next_time = datetime.datetime.fromtimestamp( job._cron_schedule.get_next(), datetime.timezone.utc).isoformat()[:16] # [DOC] Check for presence of user_auth_attrs if not Config.user_attrs.keys(): logger.error('No \'user_attrs\' are provided. Exiting.') exit(1) # [DOC] Check default values security_warning = '[SECURITY WARNING] {config_attr} is not explicitly set. It has been defaulted to \'{val}\' but in production environment you should consider setting it to your own to protect your app from breaches.' if Config.admin_password == '__ADMIN': logger.warning( security_warning.format(config_attr='Admin password', val='__ADMIN')) if Config.anon_token == '__ANON_TOKEN_f00000000000000000000012': logger.warning( security_warning.format( config_attr='Anon token', val='__ANON_TOKEN_f00000000000000000000012', )) # [DOC] Check for Env Vars attrs_defaults = { 'data_server': 'mongodb://localhost', 'data_name': 'nawah_data', 'data_ssl': False, 'data_ca_name': False, 'data_ca': False, 'emulate_test': False, } for attr_name in attrs_defaults.keys(): attr_val = getattr(Config, attr_name) if type(attr_val) == str and attr_val.startswith('$__env.'): logger.debug( f'Detected Env Variable for config attr \'{attr_name}\'') if not os.getenv(attr_val[7:]): logger.warning( f'Couldn\'t read Env Variable for config attr \'{attr_name}\'. Defaulting to \'{attrs_defaults[attr_name]}\'' ) setattr(Config, attr_name, attrs_defaults[attr_name]) else: # [DOC] Set data_ssl to True rather than string Env Variable value if attr_name == 'ssl': attr_val = True else: attr_val = os.getenv(attr_val[7:]) logger.warning( f'Setting Env Variable for config attr \'{attr_name}\' to \'{attr_val}\'' ) setattr(Config, attr_name, attr_val) # [DOC] Check SSL settings if Config.data_ca and Config.data_ca_name: __location__ = os.path.realpath(os.path.join('.')) if not os.path.exists(os.path.join(__location__, 'certs')): os.makedirs(os.path.join(__location__, 'certs')) with open(os.path.join(__location__, 'certs', Config.data_ca_name), 'w') as f: f.write(Config.data_ca) from nawah import data as Data # [DOC] Create default env dict anon_user = _compile_anon_user() anon_session = DictObj(_compile_anon_session()) anon_session = cast(BaseModel, anon_session) anon_session['user'] = DictObj(anon_user) Config._sys_conn = Data.create_conn() Config._sys_env = { 'conn': Config._sys_conn, 'REMOTE_ADDR': '127.0.0.1', 'HTTP_USER_AGENT': 'Nawah', 'client_app': '__sys', 'session': anon_session, 'watch_tasks': {}, } if Config.data_azure_mongo: for module in Config.modules.keys(): try: if Config.modules[module].collection: logger.debug( f'Attempting to create shard collection: {Config.modules[module].collection}.' ) Config._sys_conn[Config.data_name].command( 'shardCollection', f'{Config.data_name}.{Config.modules[module].collection}', key={'_id': 'hashed'}, ) else: logger.debug(f'Skipping service module: {module}.') except Exception as err: logger.error(err) # [DOC] Check test mode if Config.test: logger.debug('Test mode detected.') logger.setLevel(logging.DEBUG) __location__ = os.path.realpath(os.path.join('.')) if not os.path.exists(os.path.join(__location__, 'tests')): os.makedirs(os.path.join(__location__, 'tests')) for module in Config.modules.keys(): module_collection = Config.modules[module].collection if module_collection: logger.debug( f'Updating collection name \'{module_collection}\' of module {module}' ) module_collection = Config.modules[ module].collection = f'test_{module_collection}' if Config.test: logger.debug( f'Flushing test collection \'{module_collection}\'') await Data.drop( env=Config._sys_env, collection_name=module_collection, ) else: logger.debug(f'Skipping service module {module}') # [DOC] Test user_settings logger.debug('Testing user_settings.') if Config.user_settings: for user_setting in Config.user_settings.keys(): logger.debug(f'Testing {user_setting}') if type(Config.user_settings[user_setting]) != USER_SETTING: logger.error( f'Invalid Config Attr \'user_settings\' with key \'{user_setting}\' of type \'{type(Config.user_settings[user_setting])}\' with required type \'USER_SETTING\'. Exiting.' ) exit(1) # [DOC] Validate USER_SETTING Config.user_settings[user_setting]._validate() # [DOC] Checking users collection # [TODO] Updated sequence to handle users logger.debug('Testing users collection.') user_results = await Config.modules['user'].read( skip_events=[Event.PERM, Event.ON], env=Config._sys_env, query=[{ '_id': 'f00000000000000000000010' }], ) if not user_results.args.count: logger.debug('ADMIN user not found, creating it.') # [DOC] Prepare base ADMIN user doc admin_create_doc = { '_id': ObjectId('f00000000000000000000010'), 'name': { Config.locale: '__ADMIN' }, 'groups': [], 'privileges': { '*': ['*'] }, 'locale': Config.locale, } # [DOC] Update ADMIN user doc with admin_doc Config Attr admin_create_doc.update(Config.admin_doc) for auth_attr in Config.user_attrs.keys(): admin_create_doc[f'{auth_attr}_hash'] = pbkdf2_sha512.using( rounds=100000 ).hash( f'{auth_attr}{admin_create_doc[auth_attr]}{Config.admin_password}{Config.anon_token}' .encode('utf-8')) admin_results = await Config.modules['user'].create( skip_events=[Event.PERM], env=Config._sys_env, doc=admin_create_doc, ) logger.debug(f'ADMIN user creation results: {admin_results}') if admin_results.status != 200: logger.error('Config step failed. Exiting.') exit(1) elif not Config.force_admin_check: logger.warning( 'ADMIN user found, skipping check due to force_admin_check Config Attr.' ) else: logger.warning( 'ADMIN user found, checking it due to force_admin_check Config Attr.' ) admin_doc: BaseModel = user_results.args.docs[0] admin_doc_update = {} for attr in Config.admin_doc.keys(): if (attr not in admin_doc or not admin_doc[attr] or Config.admin_doc[attr] != admin_doc[attr]): if (type(Config.admin_doc[attr]) == dict and Config.locale in Config.admin_doc[attr].keys() and type(admin_doc[attr]) == dict and ((Config.locale in admin_doc[attr].keys() and Config.admin_doc[attr][Config.locale] == admin_doc[attr][Config.locale]) or (Config.locale not in admin_doc[attr].keys()))): continue logger.debug( f'Detected change in \'admin_doc.{attr}\' Config Attr.') admin_doc_update[attr] = Config.admin_doc[attr] for auth_attr in Config.user_attrs.keys(): auth_attr_hash = pbkdf2_sha512.using(rounds=100000).hash( f'{auth_attr}{admin_doc[auth_attr]}{Config.admin_password}{Config.anon_token}' .encode('utf-8')) if (f'{auth_attr}_hash' not in admin_doc or auth_attr_hash != admin_doc[f'{auth_attr}_hash']): logger.debug( f'Detected change in \'admin_password\' Config Attr.') admin_doc_update[f'{auth_attr}_hash'] = auth_attr_hash if len(admin_doc_update.keys()): logger.debug( f'Attempting to update ADMIN user with doc: \'{admin_doc_update}\'' ) admin_results = await Config.modules['user'].update( skip_events=[Event.PERM, Event.PRE, Event.ON], env=Config._sys_env, query=[{ '_id': ObjectId('f00000000000000000000010') }], doc=admin_doc_update, ) logger.debug(f'ADMIN user update results: {admin_results}') if admin_results.status != 200: logger.error('Config step failed. Exiting.') exit(1) else: logger.debug('ADMIN user is up-to-date.') Config._sys_docs[ObjectId('f00000000000000000000010')] = SYS_DOC( module='user') # [DOC] Test if ANON user exists user_results = await Config.modules['user'].read( skip_events=[Event.PERM, Event.ON], env=Config._sys_env, query=[{ '_id': 'f00000000000000000000011' }], ) if not user_results.args.count: logger.debug('ANON user not found, creating it.') anon_results = await Config.modules['user'].create( skip_events=[Event.PERM, Event.PRE, Event.ON], env=Config._sys_env, doc=_compile_anon_user(), ) logger.debug(f'ANON user creation results: {anon_results}') if anon_results.status != 200: logger.error('Config step failed. Exiting.') exit(1) else: logger.debug('ANON user found, checking it.') anon_doc = _compile_anon_user() anon_doc_update = {} for attr in Config.user_attrs.keys(): if attr not in anon_doc or not anon_doc[attr]: logger.debug( f'Detected change in \'anon_doc.{attr}\' Config Attr.') anon_doc_update[attr] = generate_attr( attr_type=Config.user_attrs[attr]) for module in Config.anon_privileges.keys(): if module not in anon_doc or set(anon_doc[module]) != set( Config.anon_privileges[module]): logger.debug( f'Detected change in \'anon_privileges\' Config Attr.') anon_doc_update[ f'privileges.{module}'] = Config.anon_privileges[module] for auth_attr in Config.user_attrs.keys(): if (f'{auth_attr}_hash' not in anon_doc or anon_doc[f'{auth_attr}_hash'] != Config.anon_token): logger.debug(f'Detected change in \'anon_token\' Config Attr.') anon_doc_update[attr] = Config.anon_token anon_doc_update[f'{auth_attr}_hash'] = Config.anon_token if len(anon_doc_update.keys()): logger.debug( f'Attempting to update ANON user with doc: \'{anon_doc_update}\'' ) anon_results = await Config.modules['user'].update( skip_events=[Event.PERM, Event.PRE, Event.ON], env=Config._sys_env, query=[{ '_id': ObjectId('f00000000000000000000011') }], doc=anon_doc_update, ) logger.debug(f'ANON user update results: {anon_results}') if anon_results.status != 200: logger.error('Config step failed. Exiting.') exit(1) else: logger.debug('ANON user is up-to-date.') Config._sys_docs[ObjectId('f00000000000000000000011')] = SYS_DOC( module='user') logger.debug('Testing sessions collection.') # [DOC] Test if ANON session exists session_results = await Config.modules['session'].read( skip_events=[Event.PERM, Event.ON], env=Config._sys_env, query=[{ '_id': 'f00000000000000000000012' }], ) if not session_results.args.count: logger.debug('ANON session not found, creating it.') anon_results = await Config.modules['session'].create( skip_events=[Event.PERM, Event.PRE, Event.ON], env=Config._sys_env, doc=_compile_anon_session(), ) logger.debug(f'ANON session creation results: {anon_results}') if anon_results.status != 200: logger.error('Config step failed. Exiting.') exit(1) Config._sys_docs[ObjectId('f00000000000000000000012')] = SYS_DOC( module='session') logger.debug('Testing groups collection.') # [DOC] Test if DEFAULT group exists group_results = await Config.modules['group'].read( skip_events=[Event.PERM, Event.ON], env=Config._sys_env, query=[{ '_id': 'f00000000000000000000013' }], ) if not group_results.args.count: logger.debug('DEFAULT group not found, creating it.') group_create_doc = { '_id': ObjectId('f00000000000000000000013'), 'user': ObjectId('f00000000000000000000010'), 'name': {locale: '__DEFAULT' for locale in Config.locales}, 'bio': {locale: '__DEFAULT' for locale in Config.locales}, 'privileges': Config.default_privileges, } group_results = await Config.modules['group'].create( skip_events=[Event.PERM, Event.PRE, Event.ON], env=Config._sys_env, doc=group_create_doc, ) logger.debug(f'DEFAULT group creation results: {group_results}') if group_results.status != 200: logger.error('Config step failed. Exiting.') exit(1) else: logger.debug('DEFAULT group found, checking it.') default_doc = group_results.args.docs[0] default_doc_update: Dict[str, Any] = {} for module in Config.default_privileges.keys(): if module not in default_doc.privileges.keys() or set( default_doc.privileges[module]) != set( Config.default_privileges[module]): logger.debug( f'Detected change in \'default_privileges\' Config Attr.') default_doc_update[ f'privileges.{module}'] = Config.default_privileges[module] if len(default_doc_update.keys()): logger.debug( f'Attempting to update DEFAULT group with doc: \'{default_doc_update}\'' ) default_results = await Config.modules['group'].update( skip_events=[Event.PERM, Event.PRE, Event.ON], env=Config._sys_env, query=[{ '_id': ObjectId('f00000000000000000000013') }], doc=default_doc_update, ) logger.debug(f'DEFAULT group update results: {default_results}') if anon_results.status != 200: logger.error('Config step failed. Exiting.') exit(1) else: logger.debug('DEFAULT group is up-to-date.') Config._sys_docs[ObjectId('f00000000000000000000013')] = SYS_DOC( module='group') # [DOC] Test app-specific groups logger.debug('Testing app-specific groups collection.') for group in Config.groups: group_results = await Config.modules['group'].read( skip_events=[Event.PERM, Event.ON], env=Config._sys_env, query=[{ '_id': group['_id'] }], ) if not group_results.args.count: logger.debug( f'App-specific group with name \'{group["name"]}\' not found, creating it.' ) group_results = await Config.modules['group'].create( skip_events=[Event.PERM, Event.PRE, Event.ON], env=Config._sys_env, doc=group, ) logger.debug( f'App-specific group with name {group["name"]} creation results: {group_results}' ) if group_results.status != 200: logger.error('Config step failed. Exiting.') exit(1) else: logger.debug( f'App-specific group with name \'{group["name"]}\' found, checking it.' ) group_doc = group_results.args.docs[0] group_doc_update = {} if 'privileges' in group.keys(): for module in group['privileges'].keys(): if module not in group_doc.privileges.keys() or set( group_doc.privileges[module]) != set( group['privileges'][module]): logger.debug( f'Detected change in \'privileges\' Doc Arg for group with name \'{group["name"]}\'.' ) group_doc_update[f'privileges.{module}'] = group[ 'privileges'][module] if len(group_doc_update.keys()): logger.debug( f'Attempting to update group with name \'{group["name"]}\' with doc: \'{group_doc_update}\'' ) group_results = await Config.modules['group'].update( skip_events=[Event.PERM, Event.PRE, Event.ON], env=Config._sys_env, query=[{ '_id': group['_id'] }], doc=group_doc_update, ) logger.debug( f'Group with name \'{group["name"]}\' update results: {group_results}' ) if group_results.status != 200: logger.error('Config step failed. Exiting.') exit(1) else: logger.debug( f'Group with name \'{group["name"]}\' is up-to-date.') Config._sys_docs[ObjectId(group['_id'])] = SYS_DOC(module='group') # [DOC] Test app-specific data indexes logger.debug('Testing data indexes') for index in Config.data_indexes: logger.debug(f'Attempting to create data index: {index}') try: Config._sys_conn[Config.data_name][ index['collection']].create_index(index['index']) except Exception as e: logger.error( f'Failed to create data index: {index}, with error: {e}') logger.error('Evaluate error and take action manually.') logger.debug( 'Creating \'var\', \'type\', \'user\' data indexes for settings collections.' ) Config._sys_conn[Config.data_name]['settings'].create_index([('var', 1)]) Config._sys_conn[Config.data_name]['settings'].create_index([('type', 1)]) Config._sys_conn[Config.data_name]['settings'].create_index([('user', 1)]) logger.debug( 'Creating \'user\', \'event\', \'subevent\' data indexes for analytics collections.' ) Config._sys_conn[Config.data_name]['analytics'].create_index([('user', 1)]) Config._sys_conn[Config.data_name]['analytics'].create_index([('event', 1) ]) Config._sys_conn[Config.data_name]['analytics'].create_index([('subevent', 1)]) logger.debug('Creating \'__deleted\' data indexes for all collections.') for module in Config.modules: if Config.modules[module].collection: logger.debug( f'Attempting to create \'__deleted\' data index for collection: {Config.modules[module].collection}' ) Config._sys_conn[Config.data_name][ Config.modules[module].collection].create_index([('__deleted', 1)]) # [DOC] Test app-specific docs logger.debug('Testing docs.') for doc in Config.docs: if type(doc) != SYS_DOC: logger.error(f'Invalid Config Attr \'docs\'. Exiting.') exit(1) doc_results = await Config.modules[doc.module].read( skip_events=[Event.PERM, Event.PRE, Event.ON, Event.ARGS], env=Config._sys_env, query=[{ doc.key: doc.key_value }], # type: ignore ) if not doc_results.args.count: skip_events = [Event.PERM] if doc.skip_args == True: skip_events.append(Event.ARGS) doc.doc = cast(NAWAH_DOC, doc.doc) doc_results = await Config.modules[doc.module].create( skip_events=skip_events, env=Config._sys_env, doc=doc.doc) logger.debug( 'App-specific doc with %s \'%s\' of module \'%s\' creation results: %s', doc.key, doc.key_value, doc.module, doc_results, ) if doc_results.status != 200: logger.error('Config step failed. Exiting.') exit(1) Config._sys_docs[ObjectId( doc_results.args.docs[0]._id)] = SYS_DOC(module=doc.module) # [DOC] Check for emulate_test mode if Config.emulate_test: Config.test = True
async def handle_msg( env: NAWAH_ENV, msg: aiohttp.WSMessage, decline_quota: str = None, ): try: env['last_call'] = datetime.datetime.utcnow() try: env['session'].token except Exception: anon_user = _compile_anon_user() anon_session = _compile_anon_session() anon_session['user'] = DictObj(anon_user) env['session'] = BaseModel(anon_session) res = json.loads(msg.data) try: res = jwt.decode(res['token'], env['session'].token, algorithms=['HS256']) except Exception: await env['ws'].send_str(JSONEncoder().encode({ 'status': 403, 'msg': 'Request token is not accepted.', 'args': { 'call_id': res['call_id'] if 'call_id' in res.keys() else None, 'code': 'CORE_REQ_INVALID_TOKEN', }, })) if env['init'] == False: await env['ws'].close() return else: return # [DOC] Check if msg should be denied for quota hit if decline_quota == 'ip': await env['ws'].send_str(JSONEncoder().encode({ 'status': 429, 'msg': 'You have hit calls quota from this IP.', 'args': { 'call_id': res['call_id'] if 'call_id' in res.keys() else None, 'code': 'CORE_REQ_IP_QUOTA_HIT', }, })) return elif decline_quota == 'session': await env['ws'].send_str(JSONEncoder().encode({ 'status': 429, 'msg': 'You have hit calls quota.', 'args': { 'call_id': res['call_id'] if 'call_id' in res.keys() else None, 'code': 'CORE_REQ_SESSION_QUOTA_HIT', }, })) return logger.debug(f'Decoded request: {JSONEncoder().encode(res)}') if 'endpoint' not in res.keys(): await env['ws'].send_str(JSONEncoder().encode({ 'status': 400, 'msg': 'Request missing endpoint.', 'args': { 'call_id': res['call_id'] if 'call_id' in res.keys() else None, 'code': 'CORE_REQ_NO_ENDPOINT', }, })) return if env['init'] == False: if res['endpoint'] != 'conn/verify': await env['ws'].send_str(JSONEncoder().encode({ 'status': 1008, 'msg': 'Request token is not accepted.', 'args': { 'call_id': res['call_id'] if 'call_id' in res.keys() else None, 'code': 'CORE_REQ_NO_VERIFY', }, })) await env['ws'].close() return else: if len(Config.client_apps.keys()) and ( 'doc' not in res.keys() or 'app' not in res['doc'].keys() or res['doc']['app'] not in Config.client_apps.keys() or (Config.client_apps[res['doc']['app']]['type'] == 'web' and env['HTTP_ORIGIN'] not in Config.client_apps[ res['doc']['app']]['origin'])): await env['ws'].send_str(JSONEncoder().encode({ 'status': 1008, 'msg': 'Request token is not accepted.', 'args': { 'call_id': res['call_id'] if 'call_id' in res.keys() else None, 'code': 'CORE_REQ_NO_VERIFY', }, })) await env['ws'].close() return else: env['init'] = True if not Config.client_apps: env['client_app'] = '__public' else: env['client_app'] = res['doc']['app'] logger.debug( f'Connection on session #\'{env["id"]}\' is verified.' ) if Config.analytics_events['app_conn_verified']: asyncio.create_task( Config.modules['analytic'].create( skip_events=[Event.PERM], env=env, doc={ 'event': 'CONN_VERIFIED', 'subevent': env['client_app'], 'args': { 'REMOTE_ADDR': env['REMOTE_ADDR'], 'HTTP_USER_AGENT': env['HTTP_USER_AGENT'], }, }, )) await env['ws'].send_str(JSONEncoder().encode({ 'status': 200, 'msg': 'Connection established', 'args': { 'call_id': res['call_id'] if 'call_id' in res.keys() else None, 'code': 'CORE_CONN_OK', }, })) return if res['endpoint'] == 'conn/close': logger.debug( f'Received connection close instructions on session #\'{env["id"]}\'.' ) await env['ws'].close() return if res['endpoint'] == 'heart/beat': logger.debug( f'Received connection heartbeat on session #\'{env["id"]}\'.' ) await env['ws'].send_str(JSONEncoder().encode({ 'status': 200, 'msg': 'Heartbeat received.', 'args': { 'call_id': res['call_id'] if 'call_id' in res.keys() else None, 'code': 'CORE_HEARTBEAT_OK', }, })) return res['endpoint'] = res['endpoint'].lower() if (res['endpoint'] in ['session/auth', 'session/reauth'] and str(env['session']._id) != 'f00000000000000000000012'): await env['ws'].send_str(JSONEncoder().encode({ 'status': 400, 'msg': 'You are already authed.', 'args': { 'call_id': res['call_id'] if 'call_id' in res.keys() else None, 'code': 'CORE_SESSION_ALREADY_AUTHED', }, })) return elif (res['endpoint'] == 'session/signout' and str(env['session']._id) == 'f00000000000000000000012'): await env['ws'].send_str(JSONEncoder().encode({ 'status': 400, 'msg': 'Singout is not allowed for \'__ANON\' user.', 'args': { 'call_id': res['call_id'] if 'call_id' in res.keys() else None, 'code': 'CORE_SESSION_ANON_SIGNOUT', }, })) return if 'query' not in res.keys(): res['query'] = [] if 'doc' not in res.keys(): res['doc'] = {} if 'call_id' not in res.keys(): res['call_id'] = '' request = { 'call_id': res['call_id'], 'sid': res['sid'] or False, 'query': res['query'], 'doc': res['doc'], 'path': res['endpoint'].split('/'), } if len(request['path']) != 2: await env['ws'].send_str(JSONEncoder().encode({ 'status': 400, 'msg': 'Endpoint path is invalid.', 'args': { 'call_id': request['call_id'], 'code': 'CORE_REQ_INVALID_PATH', }, })) return module = request['path'][0].lower() if module == 'watch' and request['path'][1].lower() == 'delete': logger.debug( 'Received watch task delete request for: %s', request['query'][0]['watch'], ) try: if request['query'][0]['watch'] == '__all': for watch_task in env['watch_tasks'].values(): watch_task['stream'].close() watch_task['task'].cancel() await env['ws'].send_str(JSONEncoder().encode({ 'status': 200, 'msg': 'All watch tasks deleted.', 'args': { 'call_id': request['call_id'], 'watch': list(env['watch_tasks'].keys()), }, })) env['watch_tasks'] = {} else: env['watch_tasks'][request['query'][0] ['watch']]['stream'].close() env['watch_tasks'][request['query'][0] ['watch']]['task'].cancel() await env['ws'].send_str(JSONEncoder().encode({ 'status': 200, 'msg': 'Watch task deleted.', 'args': { 'call_id': request['call_id'], 'watch': [request['query'][0]['watch']], }, })) del env['watch_tasks'][request['query'][0]['watch']] except: await env['ws'].send_str(JSONEncoder().encode({ 'status': 400, 'msg': 'Watch is invalid.', 'args': { 'call_id': request['call_id'], 'code': 'CORE_WATCH_INVALID_WATCH', }, })) return if module not in Config.modules.keys(): await env['ws'].send_str(JSONEncoder().encode({ 'status': 400, 'msg': 'Endpoint module is invalid.', 'args': { 'call_id': request['call_id'], 'code': 'CORE_REQ_INVALID_MODULE', }, })) return if request['path'][1].lower( ) not in Config.modules[module].methods.keys(): await env['ws'].send_str(JSONEncoder().encode({ 'status': 400, 'msg': 'Endpoint method is invalid.', 'args': { 'call_id': request['call_id'], 'code': 'CORE_REQ_INVALID_METHOD', }, })) return if Config.modules[module].methods[request['path'] [1].lower()].get_method: await env['ws'].send_str(JSONEncoder().encode({ 'status': 400, 'msg': 'Endpoint method is a GET method.', 'args': { 'call_id': request['call_id'], 'code': 'CORE_REQ_GET_METHOD', }, })) return if not request['sid']: request['sid'] = 'f00000000000000000000012' method = Config.modules[module].methods[request['path'][1].lower()] query = request['query'] doc = request['doc'] await _process_file_obj(doc=doc, modules=Config.modules, env=env) asyncio.create_task( method( skip_events=[], env=env, query=query, doc=doc, call_id=request['call_id'], )) except Exception as e: logger.error( f'An error occurred. Details: {traceback.format_exc()}.') if Config.debug: await env['ws'].send_str(JSONEncoder().encode({ 'status': 500, 'msg': f'Unexpected error has occurred [{str(e)}].', 'args': { 'code': 'CORE_SERVER_ERROR', 'err': str(e) }, })) else: await env['ws'].send_str(JSONEncoder().encode({ 'status': 500, 'msg': 'Unexpected error has occurred.', 'args': { 'code': 'CORE_SERVER_ERROR' }, }))
async def http_handler(request: aiohttp.web.Request): headers = MultiDict([ ('Server', 'Nawah'), ('Powered-By', 'Nawah, https://nawah.masaar.com'), ('Access-Control-Allow-Origin', '*'), ('Access-Control-Allow-Methods', 'GET,POST,OPTIONS'), ( 'Access-Control-Allow-Headers', 'Content-Type,X-Auth-Bearer,X-Auth-Token,X-Auth-App', ), ('Access-Control-Expose-Headers', 'Content-Disposition'), ]) logger.debug( f'Received new {request.method} request: {request.match_info}') if request.method == 'OPTIONS': return aiohttp.web.Response( status=200, headers=headers, body=JSONEncoder().encode({ 'status': 200, 'msg': 'OPTIONS request is allowed.', }), ) # [DOC] Check for IP quota if str(request.remote) not in ip_quota: ip_quota[str(request.remote)] = { 'counter': Config.quota_ip_min, 'last_check': datetime.datetime.utcnow(), } else: if (datetime.datetime.utcnow() - ip_quota[str(request.remote)]['last_check']).seconds > 259: ip_quota[str(request.remote )]['last_check'] = datetime.datetime.utcnow() ip_quota[str(request.remote)]['counter'] = Config.quota_ip_min else: if ip_quota[str(request.remote)]['counter'] - 1 <= 0: logger.warning( f'Denying \'{request.method}\' request from \'{request.remote}\' for hitting IP quota.' ) headers['Content-Type'] = 'application/json; charset=utf-8' return aiohttp.web.Response( status=429, headers=headers, body=JSONEncoder().encode({ 'status': 429, 'msg': 'You have hit calls quota from this IP.', 'args': { 'code': 'CORE_REQ_IP_QUOTA_HIT' }, }), ) else: ip_quota[str(request.remote)]['counter'] -= 1 module = request.url.parts[1].lower() method = request.url.parts[2].lower() request_args = dict(request.match_info.items()) # [DOC] Extract Args Sets based on request.method args_sets = Config.modules[module].methods[method].query_args args_sets = cast(List[Dict[str, ATTR]], args_sets) # [DOC] Attempt to validate query as doc for args_set in args_sets: if len(args_set.keys()) == len(args_set.keys()) and sum( 1 for arg in args_set.keys() if arg in args_set.keys()) == len(args_set.keys()): # [DOC] Check presence and validate all attrs in doc args try: exception: Exception await validate_doc(mode='create', doc=request_args, attrs=args_set) # type: ignore except InvalidAttrException as e: exception = e headers['Content-Type'] = 'application/json; charset=utf-8' return aiohttp.web.Response( status=400, headers=headers, body=JSONEncoder().encode({ 'status': 400, 'msg': f'{str(e)} for \'{request.method}\' request on module \'{Config.modules[module].package_name.upper()}_{module.upper()}\'.', 'args': { 'code': f'{Config.modules[module].package_name.upper()}_{module.upper()}_INVALID_ATTR' }, }).encode('utf-8'), ) except ConvertAttrException as e: exception = e headers['Content-Type'] = 'application/json; charset=utf-8' return aiohttp.web.Response( status=400, headers=headers, body=JSONEncoder().encode({ 'status': 400, 'msg': f'{str(e)} for \'{request.method}\' request on module \'{Config.modules[module].package_name.upper()}_{module.upper()}\'.', 'args': { 'code': f'{Config.modules[module].package_name.upper()}_{module.upper()}_CONVERT_INVALID_ATTR' }, }).encode('utf-8'), ) break conn = Data.create_conn() env: NAWAH_ENV = { 'conn': conn, 'REMOTE_ADDR': request.remote, 'client_app': '__public', } try: env['HTTP_USER_AGENT'] = request.headers['user-agent'] env['HTTP_ORIGIN'] = request.headers['origin'] except: env['HTTP_USER_AGENT'] = '' env['HTTP_ORIGIN'] = '' if 'X-Auth-Bearer' in request.headers or 'X-Auth-Token' in request.headers: logger.debug('Detected \'X-Auth\' header[s].') if ('X-Auth-Bearer' not in request.headers or 'X-Auth-Token' not in request.headers or 'X-Auth-App' not in request.headers): logger.debug( 'Denying request due to missing \'X-Auth\' header.') headers['Content-Type'] = 'application/json; charset=utf-8' return aiohttp.web.Response( status=400, headers=headers, body=JSONEncoder().encode({ 'status': 400, 'msg': 'One \'X-Auth\' headers was set but not the other.', }).encode('utf-8'), ) if len(Config.client_apps.keys()) and ( request.headers['X-Auth-App'] not in Config.client_apps.keys() or (Config.client_apps[request.headers['X-Auth-App']]['type'] == 'web' and env['HTTP_ORIGIN'] not in Config.client_apps[ request.headers['X-Auth-App']]['origin'])): logger.debug('Denying request due to unauthorised client_app.') headers['Content-Type'] = 'application/json; charset=utf-8' return aiohttp.web.Response( status=403, headers=headers, body=JSONEncoder().encode({ 'status': 403, 'msg': 'X-Auth headers could not be verified.', 'args': { 'code': 'CORE_SESSION_INVALID_XAUTH' }, }).encode('utf-8'), ) try: session_results = await Config.modules['session'].read( skip_events=[Event.PERM], env=env, query=[{ '_id': request.headers['X-Auth-Bearer'], }], ) except: headers['Content-Type'] = 'application/json; charset=utf-8' if Config.debug: return aiohttp.web.Response( status=500, headers=headers, body=JSONEncoder().encode({ 'status': 500, 'msg': f'Unexpected error has occurred [{str(exception)}].', 'args': { 'code': 'CORE_SERVER_ERROR', 'err': str(exception) }, }).encode('utf-8'), ) else: return aiohttp.web.Response( status=500, headers=headers, body=JSONEncoder().encode({ 'status': 500, 'msg': 'Unexpected error has occurred.', 'args': { 'code': 'CORE_SERVER_ERROR' }, }).encode('utf-8'), ) if not session_results.args.count or not pbkdf2_sha512.verify( request.headers['X-Auth-Token'], session_results.args.docs[0].token_hash, ): logger.debug( 'Denying request due to missing failed Call Authorisation.' ) headers['Content-Type'] = 'application/json; charset=utf-8' return aiohttp.web.Response( status=403, headers=headers, body=JSONEncoder().encode({ 'status': 403, 'msg': 'X-Auth headers could not be verified.', 'args': { 'code': 'CORE_SESSION_INVALID_XAUTH' }, }).encode('utf-8'), ) else: session = session_results.args.docs[0] session_results = await Config.modules['session'].reauth( skip_events=[Event.PERM], env=env, query=[{ '_id': request.headers['X-Auth-Bearer'], 'token': request.headers['X-Auth-Token'], }], ) logger.debug('Denying request due to fail to reauth.') if session_results.status != 200: headers['Content-Type'] = 'application/json; charset=utf-8' return aiohttp.web.Response( status=403, headers=headers, body=JSONEncoder().encode(session_results).encode( 'utf-8'), ) else: session = session_results.args.session else: anon_user = _compile_anon_user() anon_session = _compile_anon_session() anon_session['user'] = DictObj(anon_user) session = DictObj(anon_session) env['session'] = session doc_content = await request.content.read() try: doc = json.loads(doc_content) except: try: multipart_content_type = request.headers['Content-Type'] doc = { part.headers[b'Content-Disposition'].decode( 'utf-8').replace('form-data; name=', '').replace('"', '').split(';')[0]: part.content for part in decoder.MultipartDecoder( doc_content, multipart_content_type).parts } except Exception as e: doc = {} results = await Config.modules[module].methods[method]( env=env, query=[request_args], doc=doc) logger.debug('Closing connection.') env['conn'].close() if 'return' not in results.args or results.args['return'] == 'json': if 'return' in results.args: del results.args['return'] headers['Content-Type'] = 'application/json; charset=utf-8' if results.status == 404: return aiohttp.web.Response( status=results.status, headers=headers, body=JSONEncoder().encode({ 'status': 404, 'msg': 'Requested content not found.' }).encode('utf-8'), ) else: return aiohttp.web.Response( status=results.status, headers=headers, body=JSONEncoder().encode(results), ) elif results.args['return'] == 'file': del results.args['return'] expiry_time = datetime.datetime.utcnow() + datetime.timedelta( days=30) headers['lastModified'] = str(results.args.docs[0].lastModified) headers['Content-Type'] = results.args.docs[0].type headers['Cache-Control'] = 'public, max-age=31536000' headers['Expires'] = expiry_time.strftime( '%a, %d %b %Y %H:%M:%S GMT') return aiohttp.web.Response( status=results.status, headers=headers, body=results.args.docs[0].content, ) elif results.args['return'] == 'msg': del results.args['return'] headers['Content-Type'] = 'application/json; charset=utf-8' return aiohttp.web.Response(status=results.status, headers=headers, body=results.msg) headers['Content-Type'] = 'application/json; charset=utf-8' return aiohttp.web.Response( status=405, headers=headers, body=JSONEncoder().encode({ 'status': 405, 'msg': 'METHOD NOT ALLOWED' }), )
async def __call__( self, *, skip_events: NAWAH_EVENTS = None, env: NAWAH_ENV = None, query: Union[NAWAH_QUERY, Query] = None, doc: NAWAH_DOC = None, call_id: str = None, ) -> Optional[DictObj]: if skip_events == None: skip_events = [] if env == None: env = {} if query == None: query = [] if doc == None: doc = {} skip_events = cast(NAWAH_EVENTS, skip_events) env = cast(NAWAH_ENV, env) query = cast(Union[NAWAH_QUERY, Query], query) doc = cast(NAWAH_DOC, doc) call_id = cast(str, call_id) # [DOC] Convert list query to Query object query = Query(copy.deepcopy(query)) # [DOC] deepcopy() doc object ro prevent mutating original doc doc = copy.deepcopy(doc) logger.debug( f'Calling: {self.module.module_name}.{self.method}, with skip_events:{skip_events}, query:{str(query)[:250]}, doc.keys:{doc.keys()}' ) if call_id: for analytics_set in self.module.analytics: if analytics_set.condition( skip_events=skip_events, env=env, query=query, doc=doc, method=self.method, ): try: analytic_doc = analytics_set.doc( skip_events=skip_events, env=env, query=query, doc=doc, method=self.method, ) analytic_results = await Config.modules[ 'analytic'].create(skip_events=[Event.PERM], env=env, doc=analytic_doc) except Exception as e: logger.error( f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}' ) if analytic_results.status != 200: logger.error( f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}' ) if Event.PERM not in skip_events and env['session']: try: permissions_check = await _check_permissions( skip_events=skip_events, env=env, query=query, doc=doc, module=self.module, permissions=self.permissions, ) logger.debug(f'permissions_check: Pass.') except Exception as e: logger.debug(f'permissions_check: Fail.') # [DOC] InvalidAttrException, usually raised by Attr Type TYPE if type(e) == InvalidAttrException: return await self.return_results( ws=env['ws'] if 'ws' in env.keys() else None, results=DictObj({ 'status': 400, 'msg': str(e), 'args': DictObj({'code': 'INVALID_ARGS'}), }), call_id=call_id, ) # [DOC] Any other exception, treat as server error elif type(e) != InvalidPermissionsExcpetion: logger.error( f'An error occurred. Details: {traceback.format_exc()}.' ) tb = sys.exc_info()[2] if tb is not None: prev = tb current = tb.tb_next while current is not None: prev = current current = current.tb_next logger.error( f'Scope variables: {JSONEncoder().encode(prev.tb_frame.f_locals)}' ) return await self.return_results( ws=env['ws'] if 'ws' in env.keys() else None, results=DictObj({ 'status': 500, 'msg': 'Unexpected error has occurred.', 'args': DictObj({'code': 'CORE_SERVER_ERROR'}), }), call_id=call_id, ) # [DOC] Regular InvalidPermissionsExcpetion failure return await self.return_results( ws=env['ws'] if 'ws' in env.keys() else None, results=DictObj({ 'status': 403, 'msg': 'You don\'t have permissions to access this endpoint.', 'args': DictObj({'code': 'CORE_SESSION_FORBIDDEN'}), }), call_id=call_id, ) else: if type(permissions_check['query_mod']) == dict: permissions_check['query_mod'] = [ permissions_check['query_mod'] ] for i in range(len(permissions_check['query_mod'])): # [DOC] attempt to process query_set as nested-list (OR) even if it's dict if type(permissions_check['query_mod'][i]) == dict: query_set_list = [permissions_check['query_mod'][i]] elif type(permissions_check['query_mod'][i]) == list: query_set_list = permissions_check['query_mod'][i] # [DOC] loop over query_set_list, query_set for query_set in query_set_list: del_args = [] for attr in query_set.keys(): # [DOC] Flag attr for deletion if value is None # [TODO] Check why the condition included (or type(query_set[attr]) == ATTR_MOD:) if query_set[attr] == None: del_args.append(attr) for attr in del_args: del query_set[attr] # [DOC] Append query permissions args to query query.append(permissions_check['query_mod']) del_args = [] for attr in permissions_check['doc_mod'].keys(): # [DOC] Replace None value with NONE_VALUE to bypass later validate step if permissions_check['doc_mod'][attr] == None: permissions_check['doc_mod'][ attr] = NAWAH_VALUES.NONE_VALUE for attr in del_args: del permissions_check['doc_mod'][attr] # [DOC] Update doc with doc permissions args doc.update(permissions_check['doc_mod']) doc = { attr: doc[attr] for attr in doc.keys() if doc[attr] != NAWAH_VALUES.NONE_VALUE } if Event.ARGS not in skip_events: try: await _validate_args(args=query, args_list_label='query', args_list=self.query_args) except InvalidCallArgsException as e: test_query = e.args[0] for i in range(len(test_query)): test_query[i] = ('[' + ', '.join([ f'\'{arg}\': {val.capitalize()}' for arg, val in test_query[i].items() if val != True ]) + ']') return await self.return_results( ws=env['ws'] if 'ws' in env.keys() else None, results=DictObj({ 'status': 400, 'msg': 'Could not match query with any of the required query_args. Failed sets:' + ', '.join(test_query), 'args': DictObj({ 'code': f'{self.module.package_name.upper()}_{self.module.module_name.upper()}_INVALID_QUERY' }), }), call_id=call_id, ) try: await _validate_args(args=doc, args_list_label='doc', args_list=self.doc_args) except InvalidCallArgsException as e: test_doc = e.args[0] for i in range(len(test_doc)): test_doc[i] = ('[' + ', '.join([ f'\'{arg}\': {val.capitalize()}' for arg, val in test_doc[i].items() if val != True ]) + ']') return await self.return_results( ws=env['ws'] if 'ws' in env.keys() else None, results=DictObj({ 'status': 400, 'msg': 'Could not match doc with any of the required doc_args. Failed sets:' + ', '.join(test_doc), 'args': DictObj({ 'code': f'{self.module.package_name.upper()}_{self.module.module_name.upper()}_INVALID_DOC' }), }), call_id=call_id, ) for arg in doc.keys(): if type(doc[arg]) == BaseModel: doc[arg] = doc[arg]._id # type: ignore # [DOC] check if $soft oper is set to add it to events if '$soft' in query and query['$soft'] == True: skip_events.append(Event.SOFT) del query['$soft'] # [DOC] check if $extn oper is set to add it to events if '$extn' in query and query['$extn'] == False: skip_events.append(Event.EXTN) del query['$extn'] try: # [DOC] Use getattr to get the method implementation as module._method_METHOD_NAME, which is a fake name that allows BaseModule.__getattribute__ to correctly return the implementation rather than BaseMethod method = getattr(self.module, f'_method_{self.method}') # [DOC] Call method function if self.watch_method: await env['ws'].send_str(JSONEncoder().encode({ 'status': 200, 'msg': 'Created watch task.', 'args': { 'code': 'CORE_WATCH_OK', 'watch': call_id, 'call_id': call_id, }, })) watch_loop = self.watch_loop( ws=env['ws'], stream=method(skip_events=skip_events, env=env, query=query, doc=doc), call_id=call_id, watch_task=env['watch_tasks'][call_id], ) env['watch_tasks'][call_id] = {'watch': watch_loop} env['watch_tasks'][call_id]['task'] = asyncio.create_task( watch_loop) return None else: try: results = await method(skip_events=skip_events, env=env, query=query, doc=doc) except MethodException as e: results = e.args[0] if type(results) == coroutine: raise TypeError( 'Method returned coroutine rather than acceptable results format.' ) results = DictObj(results) try: results['args'] = DictObj(results.args) except Exception: results['args'] = DictObj({}) logger.debug(f'Call results: {JSONEncoder().encode(results)}') # [DOC] Check for session in results if 'session' in results.args: if results.args.session._id == 'f00000000000000000000012': # [DOC] Updating session to __ANON anon_user = _compile_anon_user() anon_session = _compile_anon_session() anon_session['user'] = DictObj(anon_user) env['session'] = BaseModel(anon_session) else: # [DOC] Updating session to user env['session'] = results.args.session return await self.return_results( ws=env['ws'] if 'ws' in env.keys() else None, results=results, call_id=call_id) # query = Query([]) except Exception as e: logger.error( f'An error occurred. Details: {traceback.format_exc()}.') tb = sys.exc_info()[2] if tb is not None: prev = tb current = tb.tb_next while current is not None: prev = current current = current.tb_next logger.error( f'Scope variables: {JSONEncoder().encode(prev.tb_frame.f_locals)}' ) query = Query([]) if Config.debug: return await self.return_results( ws=env['ws'] if 'ws' in env.keys() else None, results=DictObj({ 'status': 500, 'msg': f'Unexpected error has occurred [method:{self.module.module_name}.{self.method}] [{str(e)}].', 'args': DictObj({ 'code': 'CORE_SERVER_ERROR', 'method': f'{self.module.module_name}.{self.method}', 'err': str(e), }), }), call_id=call_id, ) else: return await self.return_results( ws=env['ws'] if 'ws' in env.keys() else None, results=DictObj({ 'status': 500, 'msg': 'Unexpected error has occurred.', 'args': DictObj({'code': 'CORE_SERVER_ERROR'}), }), call_id=call_id, )