class Analytic(BaseModule): '''`Analytic` module provides data type and controller from `Analytics Workflow` and accompanying analytics docs. It uses `pre_create` handler to assure no events duplications occur and all occurrences of the same event are recorded in one doc.''' collection = 'analytics' attrs = { 'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'), 'event': ATTR.STR(desc='Analytics event name.'), 'subevent': ATTR.ANY( desc= 'Analytics subevent distinguishing attribute. This is usually `STR`, or `ID` but it is introduced in the module as `ANY` to allow wider use-cases by developers.' ), 'occurrences': ATTR.LIST( desc='All occurrences of the event as list.', list=[ ATTR.DICT( desc='Single occurrence of the event details.', dict={ 'args': ATTR.DICT( desc= 'Key-value `dict` containing event args, if any.', dict={ '__key': ATTR.STR(), '__val': ATTR.ANY() }), 'score': ATTR.INT( desc='Numerical score for occurrence of the event.' ), 'create_time': ATTR.DATETIME( desc= 'Python `datetime` ISO format of the occurrence of the event.' ), }) ]), 'score': ATTR.INT( desc= 'Total score of all scores of all occurrences of the event. This can be used for data analysis.' ), } unique_attrs = [('user', 'event', 'subevent')] methods = { 'read': { 'permissions': [PERM(privilege='read')] }, 'create': { 'permissions': [PERM(privilege='__sys')], 'doc_args': { 'event': ATTR.STR(), 'subevent': ATTR.ANY(), 'args': ATTR.DICT(dict={ '__key': ATTR.STR(), '__val': ATTR.ANY() }), }, }, 'update': { 'permissions': [PERM(privilege='__sys')] }, 'delete': { 'permissions': [PERM(privilege='delete')] }, } async def pre_create(self, skip_events, env, query, doc, payload): analytic_results = await self.read( skip_events=[Event.PERM], env=env, query=[ { 'user': env['session'].user._id, 'event': doc['event'], 'subevent': doc['subevent'], }, { '$limit': 1 }, ], ) if analytic_results.args.count: analytic_results = await self.update( skip_events=[Event.PERM], env=env, query=[{ '_id': analytic_results.args.docs[0]._id }], doc={ 'occurrences': { '$append': { 'args': doc['args'], 'score': doc['score'] if 'score' in doc.keys() else 0, 'create_time': datetime.datetime.utcnow().isoformat(), } }, 'score': { '$add': doc['score'] if 'score' in doc.keys() else 0 }, }, ) return analytic_results else: doc = { 'event': doc['event'], 'subevent': doc['subevent'], 'occurrences': [{ 'args': doc['args'], 'score': doc['score'] if 'score' in doc.keys() else 0, 'create_time': datetime.datetime.utcnow().isoformat(), }], 'score': doc['score'] if 'score' in doc.keys() else 0, } return (skip_events, env, query, doc, payload)
class Diff(BaseModule): '''`Diff` module provides data type and controller for `Diff Workflow`. It is meant for use by internal calls only. Best practice to accessing diff docs is by creating proxy modules or writing LIMP methods that expose the diff docs.''' collection = 'diff' attrs = { 'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'), 'module': ATTR.STR(desc='Name of the module the original doc is part of.'), 'doc': ATTR.ID(desc='`_id` of the original doc.'), 'vars': ATTR.DICT( desc= 'Key-value `dict` containing all attrs that have been updated from the original doc.', dict={ '__key': ATTR.STR(), '__val': ATTR.ANY() }), 'remarks': ATTR.STR( desc= 'Human-readable remarks of the doc. This is introduced to allow developers to add log messages to diff docs.' ), 'create_time': ATTR.DATETIME( desc='Python `datetime` ISO format of the doc creation.'), } defaults = {'doc': None, 'remarks': ''} methods = { 'read': { 'permissions': [PERM(privilege='read')] }, 'create': { 'permissions': [PERM(privilege='__sys')] }, 'delete': { 'permissions': [PERM(privilege='delete')] }, } async def pre_create(self, skip_events, env, query, doc, payload): # [DOC] Detect non-_id update query: if '_id' not in query: results = await Config.modules[doc['module'] ].read(skip_events=[Event.PERM], env=env, query=query) if results.args.count > 1: query.append( {'_id': { '$in': [doc._id for doc in results.args.docs] }}) elif results.args.count == 1: query.append({'_id': results.args.docs[0]._id}) else: return self.status(status=400, msg='No update docs matched.', args={'code': 'NO_MATCH'}) if '_id' in query and type(query['_id'][0]) == list: for i in range(len(query['_id'][0]) - 1): self.create( skip_events=[Event.PERM], env=env, query=[{ '_id': query['_id'][0][i] }], doc=doc, ) query['_id'][0] = query['_id'][0][-1] doc['doc'] = ObjectId(query['_id'][0]) return (skip_events, env, query, doc, payload)
class User(BaseModule): '''`User` module provides data type and controller for users in LIMP eco-system. This module is supposed to be used for internal calls only, however it has wide-access permissions in order to allow admins, proxy modules to easily expose the methods.''' collection = 'users' attrs = { 'name': ATTR.LOCALE(desc='Name of the user as `LOCALE`.'), 'locale': ATTR.LOCALES(desc='Default locale of the user.'), 'create_time': ATTR.DATETIME( desc='Python `datetime` ISO format of the doc creation.'), 'login_time': ATTR.DATETIME(desc='Python `datetime` ISO format of the last login.'), 'groups': ATTR.LIST( desc='List of `_id` for every group the user is member of.', list=[ATTR.ID(desc='`_id` of Group doc the user is member of.')]), 'privileges': ATTR.DICT( desc= 'Privileges of the user. These privileges are always available to the user regardless of whether groups user is part of have them or not.', dict={ '__key': ATTR.STR(), '__val': ATTR.LIST(list=[ATTR.STR()]) }), 'status': ATTR.LITERAL( desc= 'Status of the user to determine whether user has access to the app or not.', literal=['active', 'banned', 'deleted', 'disabled_password']), } defaults = { 'login_time': None, 'status': 'active', 'groups': [], 'privileges': {} } unique_attrs = [] methods = { 'read': { 'permissions': [ PERM(privilege='admin'), PERM(privilege='read', query_mod={'_id': '$__user'}), ] }, 'create': { 'permissions': [PERM(privilege='admin')] }, 'update': { 'permissions': [ PERM(privilege='admin', doc_mod={'groups': None}), PERM( privilege='update', query_mod={'_id': '$__user'}, doc_mod={ 'groups': None, 'privileges': None }, ), ], 'query_args': { '_id': ATTR.ID() }, }, 'delete': { 'permissions': [ PERM(privilege='admin'), PERM(privilege='delete', query_mod={'_id': '$__user'}), ], 'query_args': { '_id': ATTR.ID() }, }, 'read_privileges': { 'permissions': [ PERM(privilege='admin'), PERM(privilege='read', query_mod={'_id': '$__user'}), ], 'query_args': { '_id': ATTR.ID() }, }, 'add_group': { 'permissions': [PERM(privilege='admin')], 'query_args': { '_id': ATTR.ID() }, 'doc_args': [{ 'group': ATTR.ID() }, { 'group': ATTR.LIST(list=[ATTR.ID()]) }], }, 'delete_group': { 'permissions': [PERM(privilege='admin')], 'query_args': { '_id': ATTR.ID(), 'group': ATTR.ID() }, }, 'retrieve_file': { 'permissions': [PERM(privilege='__sys')], 'get_method': True }, 'create_file': { 'permissions': [PERM(privilege='__sys')] }, 'delete_file': { 'permissions': [PERM(privilege='__sys')] }, } async def on_read(self, results, skip_events, env, query, doc, payload): for i in range(len(results['docs'])): user = results['docs'][i] user['settings'] = {} for auth_attr in Config.user_auth_attrs: del user[f'{auth_attr}_hash'] if len(Config.user_doc_settings): setting_results = await Config.modules['setting'].read( skip_events=[Event.PERM, Event.ARGS], env=env, query=[{ 'user': user._id, 'var': { '$in': Config.user_doc_settings } }], ) if setting_results.args.count: user['settings'] = { setting_doc['var']: setting_doc['val'] for setting_doc in setting_results.args.docs } return (results, skip_events, env, query, doc, payload) async def pre_create(self, skip_events, env, query, doc, payload): if Event.ARGS not in skip_events: if Config.realm: realm_results = await Config.modules['realm'].read( skip_events=[Event.PERM], env=env) realm = realm_results.args.docs[0] doc['groups'] = [realm.default] else: doc['groups'] = [ObjectId('f00000000000000000000013')] if 'settings' in doc.keys(): payload['settings'] = doc['settings'] return (skip_events, env, query, doc, payload) async def on_create(self, results, skip_events, env, query, doc, payload): if 'settings' in payload.keys(): for setting in payload['settings'].keys(): if callable(payload['settings'][setting]['val']): setting_val = payload['settings'][setting]['val']( skip_events=skip_events, env=env, query=query, doc=doc) else: setting_val = payload['settings'][setting]['val'] setting_results = await Config.modules['setting'].create( skip_events=[Event.PERM, Event.ARGS], env=env, doc={ 'user': results['docs'][0]._id, 'var': setting, 'val': setting_val, 'type': payload['settings'][setting]['type'], }, ) if setting_results.status != 200: return setting_results return (results, skip_events, env, query, doc, payload) async def read_privileges(self, skip_events=[], env={}, query=[], doc={}): # [DOC] Confirm _id is valid results = await self.read(skip_events=[Event.PERM], env=env, query=[{ '_id': query['_id'][0] }]) if not results.args.count: return self.status(status=400, msg='User is invalid.', args={'code': 'INVALID_USER'}) user = results.args.docs[0] for group in user.groups: group_results = await Config.modules['group'].read( skip_events=[Event.PERM], env=env, query=[{ '_id': group }]) group = group_results.args.docs[0] for privilege in group.privileges.keys(): if privilege not in user.privileges.keys(): user.privileges[privilege] = [] for i in range(len(group.privileges[privilege])): if group.privileges[privilege][i] not in user.privileges[ privilege]: user.privileges[privilege].append( group.privileges[privilege][i]) return results async def add_group(self, skip_events=[], env={}, query=[], doc={}): # [DOC] Check for list group attr if type(doc['group']) == list: for i in range(0, len(doc['group']) - 1): await self.add_group( skip_events=skip_events, env=env, query=query, doc={'group': doc['group'][i]}, ) doc['group'] = doc['group'][-1] # [DOC] Confirm all basic args are provided doc['group'] = ObjectId(doc['group']) # [DOC] Confirm group is valid results = await Config.modules['group'].read(skip_events=[Event.PERM], env=env, query=[{ '_id': doc['group'] }]) if not results.args.count: return self.status(status=400, msg='Group is invalid.', args={'code': 'INVALID_GROUP'}) # [DOC] Get user details results = await self.read(skip_events=[Event.PERM], env=env, query=query) if not results.args.count: return self.status(status=400, msg='User is invalid.', args={'code': 'INVALID_USER'}) user = results.args.docs[0] # [DOC] Confirm group was not added before if doc['group'] in user.groups: return self.status( status=400, msg='User is already a member of the group.', args={'code': 'GROUP_ADDED'}, ) user.groups.append(doc['group']) # [DOC] Update the user results = await self.update(skip_events=[Event.PERM], env=env, query=query, doc={'groups': user.groups}) return results async def delete_group(self, skip_events=[], env={}, query=[], doc={}): # [DOC] Confirm group is valid results = await Config.modules['group'].read(skip_events=[Event.PERM], env=env, query=[{ '_id': query['group'][0] }]) if not results.args.count: return self.status(status=400, msg='Group is invalid.', args={'code': 'INVALID_GROUP'}) # [DOC] Get user details results = await self.read(skip_events=[Event.PERM], env=env, query=[{ '_id': query['_id'][0] }]) if not results.args.count: return self.status(status=400, msg='User is invalid.', args={'code': 'INVALID_USER'}) user = results.args.docs[0] # [DOC] Confirm group was not added before if query['group'][0] not in user.groups: return self.status( status=400, msg='User is not a member of the group.', args={'code': 'GROUP_NOT_ADDED'}, ) # [DOC] Update the user results = await self.update( skip_events=[Event.PERM], env=env, query=[{ '_id': query['_id'][0] }], doc={'groups': { '$remove': [query['group'][0]] }}, ) return results
class Group(BaseModule): '''`Group` module provides data type and controller for groups in LIMP eco-system.''' collection = 'groups' attrs = { 'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'), 'name': ATTR.LOCALE(desc='Name of the groups as `LOCALE`.'), 'desc': ATTR.LOCALE( desc= 'Description of the group as `LOCALE`. This can be used for dynamic generated groups that are meant to be exposed to end-users.' ), 'privileges': ATTR.DICT( desc='Privileges that any user is a member of the group has.', dict={ '__key': ATTR.STR(), '__val': ATTR.LIST(list=[ATTR.STR()]) }), 'settings': ATTR.DICT( desc= '`Setting` docs to be created, or required for members users when added to the group.', dict={ '__key': ATTR.STR(), '__val': ATTR.ANY() }), 'create_time': ATTR.DATETIME( desc='Python `datetime` ISO format of the doc creation.'), } defaults = { 'desc': {locale: '' for locale in Config.locales}, 'privileges': {}, 'settings': {}, } methods = { 'read': { 'permissions': [PERM(privilege='admin')] }, 'create': { 'permissions': [PERM(privilege='admin')] }, 'update': { 'permissions': [ PERM(privilege='admin'), PERM( privilege='update', query_mod={'user': '******'}, doc_mod={'privileges': None}, ), ], 'query_args': { '_id': ATTR.ID() }, }, 'delete': { 'permissions': [ PERM(privilege='admin'), PERM(privilege='delete', query_mod={'user': '******'}), ], 'query_args': { '_id': ATTR.ID() }, }, } async def pre_create(self, skip_events, env, query, doc, payload): return (skip_events, env, query, doc, payload) async def pre_update(self, skip_events, env, query, doc, payload): # [DOC] Make sure no attrs overwriting would happen if 'attrs' in doc.keys(): results = await self.read(skip_events=[Event.PERM], env=env, query=query) if not results.args.count: return self.status(status=400, msg='Group is invalid.', args={'code': 'INVALID_GROUP'}) if results.args.count > 1: return self.status( status=400, msg= 'Updating group attrs can be done only to individual groups.', args={'code': 'MULTI_ATTRS_UPDATE'}, ) results.args.docs[0]['attrs'].update({ attr: doc['attrs'][attr] for attr in doc['attrs'].keys() if doc['attrs'][attr] != None and doc['attrs'][attr] != '' }) doc['attrs'] = results.args.docs[0]['attrs'] return (skip_events, env, query, doc, payload)
def validate_attr( *, attr_name: str, attr_type: ATTR, attr_val: Any, allow_opers: bool = False, allow_none: bool = False, skip_events: List[str] = None, env: Dict[str, Any] = None, query: Union[LIMP_QUERY, Query] = None, doc: LIMP_DOC = None, scope: LIMP_DOC = None, ): from config import Config try: return validate_default( attr_type=attr_type, attr_val=attr_val, skip_events=skip_events, env=env, query=query, doc=doc, scope=scope if scope else doc, allow_none=allow_none, ) except: pass attr_oper = False if allow_opers and type(attr_val) == dict: if '$add' in attr_val.keys(): attr_oper = '$add' attr_val = attr_val['$add'] elif '$multiply' in attr_val.keys(): attr_oper = '$multiply' attr_val = attr_val['$multiply'] elif '$append' in attr_val.keys(): attr_oper = '$append' if '$unique' in attr_val.keys() and attr_val['$unique'] == True: attr_oper = '$append__unique' attr_val = [attr_val['$append']] elif '$remove' in attr_val.keys(): attr_oper = '$remove' attr_val = attr_val['$remove'] try: if attr_type._type == 'ANY': return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'ACCESS': if (type(attr_val) == dict and set(attr_val.keys()) == {'anon', 'users', 'groups'} and type(attr_val['anon']) == bool and type(attr_val['users']) == list and type(attr_val['groups']) == list): return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'BOOL': if type(attr_val) == bool: return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'DATE': if re.match(r'^[0-9]{4}-[0-9]{2}-[0-9]{2}$', attr_val): return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'DATETIME': if re.match( r'^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}(:[0-9]{2}(\.[0-9]{6})?)?$', attr_val, ): return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'DICT': if type(attr_val) == dict: if '__key' in attr_type._args['dict'].keys(): if '__min' in attr_type._args['dict'].keys(): if len(attr_val.keys() ) < attr_type._args['dict']['__min']: raise InvalidAttrException( attr_name=attr_name, attr_type=attr_type, val_type=type(attr_val), ) if '__max' in attr_type._args['dict'].keys(): if len(attr_val.keys() ) > attr_type._args['dict']['__max']: raise InvalidAttrException( attr_name=attr_name, attr_type=attr_type, val_type=type(attr_val), ) shadow_attr_val = {} for child_attr_val in attr_val.keys(): shadow_attr_val[validate_attr( attr_name=f'{attr_name}.{child_attr_val}', attr_type=attr_type._args['dict']['__key'], attr_val=child_attr_val, allow_opers=allow_opers, allow_none=allow_none, skip_events=skip_events, env=env, query=query, doc=doc, scope=attr_val, )] = validate_attr( attr_name=f'{attr_name}.{child_attr_val}', attr_type=attr_type._args['dict']['__val'], attr_val=attr_val[child_attr_val], allow_opers=allow_opers, allow_none=allow_none, skip_events=skip_events, env=env, query=query, doc=doc, scope=attr_val, ) if '__req' in attr_type._args['dict'].keys(): for req_key in attr_type._args['dict']['__req']: if req_key not in shadow_attr_val.keys(): raise InvalidAttrException( attr_name=attr_name, attr_type=attr_type, val_type=type(attr_val), ) return return_valid_attr(attr_val=shadow_attr_val, attr_oper=attr_oper) else: for child_attr_type in attr_type._args['dict'].keys(): if child_attr_type not in attr_val.keys(): attr_val[child_attr_type] = None attr_val[child_attr_type] = validate_attr( attr_name=f'{attr_name}.{child_attr_type}', attr_type=attr_type._args['dict'][child_attr_type], attr_val=attr_val[child_attr_type], allow_opers=allow_opers, allow_none=allow_none, skip_events=skip_events, env=env, query=query, doc=doc, scope=attr_val, ) return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'EMAIL': if re.match(r'^[^@]+@[^@]+\.[^@]+$', attr_val): return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'FILE': if type(attr_val) == list and len(attr_val): try: validate_attr( attr_name=attr_name, attr_type=attr_type, attr_val=attr_val[0], allow_opers=allow_opers, allow_none=allow_none, skip_events=skip_events, env=env, query=query, doc=doc, scope=attr_val, ) attr_val = attr_val[0] except: raise InvalidAttrException( attr_name=attr_name, attr_type=attr_type, val_type=type(attr_val), ) file_type = ( type(attr_val) == dict and set(attr_val.keys()) == {'name', 'lastModified', 'type', 'size', 'content'} and type(attr_val['name']) == str and type(attr_val['lastModified']) == int and type(attr_val['size']) == int and type(attr_val['content']) in [binary.Binary, bytes]) if not file_type: raise InvalidAttrException(attr_name=attr_name, attr_type=attr_type, val_type=type(attr_val)) if attr_type._args['types']: for file_type in attr_type._args['types']: if attr_val['type'].split('/')[0] == file_type.split( '/')[0]: if (file_type.split('/')[1] == '*' or attr_val['type'].split('/')[1] == file_type.split('/')[1]): return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) else: return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'FLOAT': if type(attr_val) == str and re.match(r'^[0-9]+(\.[0-9]+)?$', attr_val): attr_val = float(attr_val) elif type(attr_val) == int: attr_val = float(attr_val) if type(attr_val) == float: if attr_type._args['range']: if int(attr_val) in attr_type._args['range']: return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) else: return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'GEO': if (type(attr_val) == dict and list(attr_val.keys()) == ['type', 'coordinates'] and attr_val['type'] in ['Point'] and type(attr_val['coordinates']) == list and len(attr_val['coordinates']) == 2 and type(attr_val['coordinates'][0]) in [int, float] and type(attr_val['coordinates'][1]) in [int, float]): return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'ID': if type(attr_val) == BaseModel or type(attr_val) == DictObj: return return_valid_attr(attr_val=attr_val._id, attr_oper=attr_oper) elif type(attr_val) == ObjectId: return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif type(attr_val) == str: try: return return_valid_attr(attr_val=ObjectId(attr_val), attr_oper=attr_oper) except: raise ConvertAttrException( attr_name=attr_name, attr_type=attr_type, val_type=type(attr_val), ) elif attr_type._type == 'INT': if type(attr_val) == str and re.match(r'^[0-9]+$', attr_val): attr_val = int(attr_val) if type(attr_val) == int: if attr_type._args['range']: if attr_val in attr_type._args['range']: return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) else: return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'IP': if re.match( r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$', attr_val, ): return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'LIST': if type(attr_val) == list: for i in range(len(attr_val)): child_attr_val = attr_val[i] child_attr_check = False for child_attr_type in attr_type._args['list']: try: attr_val[i] = validate_attr( attr_name=attr_name, attr_type=child_attr_type, attr_val=child_attr_val, allow_opers=allow_opers, allow_none=allow_none, skip_events=skip_events, env=env, query=query, doc=doc, scope=attr_val, ) child_attr_check = True break except: pass if not child_attr_check: raise InvalidAttrException( attr_name=attr_name, attr_type=attr_type, val_type=type(attr_val), ) return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'LOCALE': attr_val = validate_attr( attr_name=attr_name, attr_type=ATTR.DICT( dict={ '__key': ATTR.LITERAL( literal=[locale for locale in Config.locales]), '__val': ATTR.STR(), '__min': 1, '__req': [Config.locale], }), attr_val=attr_val, allow_opers=allow_opers, allow_none=allow_none, skip_events=skip_events, env=env, query=query, doc=doc, scope=attr_val, ) attr_val = { locale: attr_val[locale] if locale in attr_val.keys() else attr_val[Config.locale] for locale in Config.locales } return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'LOCALES': if attr_val in Config.locales: return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'PHONE': if attr_type._args['codes']: for phone_code in attr_type._args['codes']: if re.match(fr'^\+{phone_code}[0-9]+$', attr_val): return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) else: if re.match(r'^\+[0-9]+$', attr_val): return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'STR': if type(attr_val) == str: if attr_type._args['pattern']: if re.match(f'^{attr_type._args["pattern"]}$', attr_val): return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) else: return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'TIME': if re.match(r'^[0-9]{2}:[0-9]{2}(:[0-9]{2}(\.[0-9]{6})?)?$', attr_val): return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'URI_WEB': if re.match( r'^https?:\/\/(?:[\w\-\_]+\.)(?:\.?[\w]{2,})+([\?\/].*)?$', attr_val): return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'LITERAL': if attr_val in attr_type._args['literal']: return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'UNION': for child_attr in attr_type._args['union']: try: validate_attr( attr_name=attr_name, attr_type=child_attr, attr_val=attr_val, allow_opers=allow_opers, allow_none=allow_none, skip_events=skip_events, env=env, query=query, doc=doc, scope=attr_val, ) except: continue return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper) elif attr_type._type == 'TYPE': return return_valid_attr( attr_val=Config.types[attr_type._args['type']]( attr_name=attr_name, attr_type=attr_type, attr_val=attr_val), attr_oper=attr_oper, ) except Exception as e: if type(e) in [InvalidAttrException, ConvertAttrException]: if allow_none: return None elif attr_type._default != LIMP_VALUES.NONE_VALUE: return attr_type._default else: raise e raise InvalidAttrException(attr_name=attr_name, attr_type=attr_type, val_type=type(attr_val))