async def test_validate_attr_LITERAL_None(): with pytest.raises(InvalidAttrException): await validate_attr( attr_name='test_validate_attr_LITERAL', attr_type=ATTR.LITERAL(literal=['str', 0, 1.1]), attr_val=None, mode='create', )
async def test_validate_attr_LITERAL_None_allow_none(): attr_val = await validate_attr( attr_name='test_validate_attr_LITERAL', attr_type=ATTR.LITERAL(literal=['str', 0, 1.1]), attr_val=None, mode='update', ) assert attr_val == None
async def test_validate_attr_LITERAL_int(): attr_val = await validate_attr( attr_name='test_validate_attr_LITERAL', attr_type=ATTR.LITERAL(literal=['str', 0, 1.1]), attr_val=0, mode='create', ) assert attr_val == 0
async def test_validate_attr_LITERAL_default_int_allow_none(): attr_type = ATTR.LITERAL(literal=['str', 0, 1.1]) attr_type._default = 'test_validate_attr_LITERAL' attr_val = await validate_attr( attr_name='test_validate_attr_LITERAL', attr_type=attr_type, attr_val=1, mode='update', ) assert attr_val == None
async def attr_query_mod_type( *, mode, attr_name, attr_type, attr_val, skip_events, env, query, doc, scope, ): if 'type' not in query or query['type'][0] == 'user_sys': raise InvalidAttrException( attr_name='type', attr_type=ATTR.LITERAL(literal=['global', 'user']), val_type=str, )
class User(BaseModule): '''`User` module provides data type and controller for users in Nawah eco-system. The permissions of the module methods are designed to be as secure for exposed calls, and as flexible for privileged-access.''' collection = 'users' attrs = { 'name': ATTR.LOCALE(desc='Name of the user as `LOCALE`.'), 'locale': ATTR.LOCALES(desc='Default locale of the user.'), 'create_time': ATTR.DATETIME( desc='Python `datetime` ISO format of the doc creation.'), 'login_time': ATTR.DATETIME(desc='Python `datetime` ISO format of the last login.'), 'groups': ATTR.LIST( desc='List of `_id` for every group the user is member of.', list=[ATTR.ID(desc='`_id` of Group doc the user is member of.')], ), 'privileges': ATTR.KV_DICT( desc= 'Privileges of the user. These privileges are always available to the user regardless of whether groups user is part of have them or not.', key=ATTR.STR(), val=ATTR.LIST(list=[ATTR.STR()]), ), 'status': ATTR.LITERAL( desc= 'Status of the user to determine whether user has access to the app or not.', literal=['active', 'banned', 'deleted', 'disabled_password'], ), } defaults = { 'login_time': None, 'status': 'active', 'groups': [], 'privileges': {} } unique_attrs = [] methods = { 'read': METHOD(permissions=[ PERM(privilege='admin'), PERM(privilege='read', query_mod={'_id': '$__user'}), ]), 'create': METHOD(permissions=[PERM(privilege='admin')]), 'update': METHOD( permissions=[ PERM(privilege='admin', doc_mod={'groups': None}), PERM( privilege='update', query_mod={'_id': '$__user'}, doc_mod={ 'groups': None, 'privileges': None }, ), ], query_args={'_id': ATTR.ID()}, ), 'delete': METHOD( permissions=[ PERM(privilege='admin'), PERM(privilege='delete', query_mod={'_id': '$__user'}), ], query_args={'_id': ATTR.ID()}, ), 'read_privileges': METHOD( permissions=[ PERM(privilege='admin'), PERM(privilege='read', query_mod={'_id': '$__user'}), ], query_args={'_id': ATTR.ID()}, ), 'add_group': METHOD( permissions=[PERM(privilege='admin')], query_args={'_id': ATTR.ID()}, doc_args=[{ 'group': ATTR.ID() }, { 'group': ATTR.LIST(list=[ATTR.ID()]) }], ), 'delete_group': METHOD( permissions=[PERM(privilege='admin')], query_args={ '_id': ATTR.ID(), 'group': ATTR.ID() }, ), 'retrieve_file': METHOD(permissions=[PERM(privilege='__sys')], get_method=True), 'create_file': METHOD(permissions=[PERM(privilege='__sys')]), 'delete_file': METHOD(permissions=[PERM(privilege='__sys')]), } async def on_read(self, results, skip_events, env, query, doc, payload): for i in range(len(results['docs'])): user = results['docs'][i] for auth_attr in Config.user_attrs.keys(): del user[f'{auth_attr}_hash'] if len(Config.user_doc_settings): setting_results = await Registry.module('setting').read( skip_events=[Event.PERM, Event.ARGS], env=env, query=[{ 'user': user._id, 'var': { '$in': Config.user_doc_settings } }], ) user_doc_settings = copy.copy(Config.user_doc_settings) if setting_results.args.count: for setting_doc in setting_results.args.docs: user_doc_settings.remove(setting_doc['var']) user[setting_doc['var']] = setting_doc['val'] # [DOC] Forward-compatibility: If user was created before presence of any user_doc_settings, add them with default value for setting_attr in user_doc_settings: user[setting_attr] = Config.user_settings[ setting_attr].default # [DOC] Set NAWAH_VALUES.NONE_VALUE to None if it was default if user[setting_attr] == NAWAH_VALUES.NONE_VALUE: user[setting_attr] = None return (results, skip_events, env, query, doc, payload) async def pre_create(self, skip_events, env, query, doc, payload): if Event.ARGS not in skip_events: doc['groups'] = [ObjectId('f00000000000000000000013')] user_settings = {} for attr in Config.user_settings.keys(): if Config.user_settings[attr].type == 'user_sys': user_settings[attr] = copy.deepcopy( Config.user_settings[attr].default) else: if attr in doc.keys(): try: await validate_attr( mode='create', attr_name=attr, attr_type=Config.user_settings[attr].val_type, attr_val=doc[attr], ) user_settings[attr] = doc[attr] except: raise self.exception( status=400, msg= f'Invalid settings attr \'{attr}\' for \'create\' request on module \'CORE_USER\'', args={'code': 'INVALID_ATTR'}, ) else: if Config.user_settings[ attr].default == NAWAH_VALUES.NONE_VALUE: raise self.exception( status=400, msg= f'Missing settings attr \'{attr}\' for \'create\' request on module \'CORE_USER\'', args={'code': 'MISSING_ATTR'}, ) else: user_settings[attr] = copy.deepcopy( Config.user_settings[attr].default) payload['user_settings'] = user_settings return (skip_events, env, query, doc, payload) async def on_create(self, results, skip_events, env, query, doc, payload): if 'user_settings' in payload.keys(): for setting in payload['user_settings'].keys(): setting_results = await Registry.module('setting').create( skip_events=[Event.PERM, Event.ARGS], env=env, doc={ 'user': results['docs'][0]._id, 'var': setting, 'val_type': encode_attr_type( attr_type=Config.user_settings[setting].val_type), 'val': payload['user_settings'][setting], 'type': Config.user_settings[setting].type, }, ) if setting_results.status != 200: return setting_results return (results, skip_events, env, query, doc, payload) async def read_privileges(self, skip_events=[], env={}, query=[], doc={}): # [DOC] Confirm _id is valid results = await self.read(skip_events=[Event.PERM], env=env, query=[{ '_id': query['_id'][0] }]) if not results.args.count: raise self.exception(status=400, msg='User is invalid.', args={'code': 'INVALID_USER'}) user = results.args.docs[0] for group in user.groups: group_results = await Registry.module('group').read( skip_events=[Event.PERM], env=env, query=[{ '_id': group }]) group = group_results.args.docs[0] for privilege in group.privileges.keys(): if privilege not in user.privileges.keys(): user.privileges[privilege] = [] for i in range(len(group.privileges[privilege])): if group.privileges[privilege][i] not in user.privileges[ privilege]: user.privileges[privilege].append( group.privileges[privilege][i]) return results async def add_group(self, skip_events=[], env={}, query=[], doc={}): # [DOC] Check for list group attr if type(doc['group']) == list: for i in range(0, len(doc['group']) - 1): await self.add_group( skip_events=skip_events, env=env, query=query, doc={'group': doc['group'][i]}, ) doc['group'] = doc['group'][-1] # [DOC] Confirm all basic args are provided doc['group'] = ObjectId(doc['group']) # [DOC] Confirm group is valid results = await Registry.module('group').read(skip_events=[Event.PERM], env=env, query=[{ '_id': doc['group'] }]) if not results.args.count: raise self.exception(status=400, msg='Group is invalid.', args={'code': 'INVALID_GROUP'}) # [DOC] Get user details results = await self.read(skip_events=[Event.PERM], env=env, query=query) if not results.args.count: raise self.exception(status=400, msg='User is invalid.', args={'code': 'INVALID_USER'}) user = results.args.docs[0] # [DOC] Confirm group was not added before if doc['group'] in user.groups: raise self.exception( status=400, msg='User is already a member of the group.', args={'code': 'GROUP_ADDED'}, ) user.groups.append(doc['group']) # [DOC] Update the user results = await self.update(skip_events=[Event.PERM], env=env, query=query, doc={'groups': user.groups}) # [DOC] if update fails, return update results if results.status != 200: return results # [DOC] Check if the updated User doc belongs to current session and update it if env['session'].user._id == user._id: user_results = await self.read_privileges(skip_events=[Event.PERM], env=env, query=[{ '_id': user._id }]) env['session']['user'] = user_results.args.docs[0] return results async def delete_group(self, skip_events=[], env={}, query=[], doc={}): # [DOC] Confirm group is valid results = await Registry.module('group').read(skip_events=[Event.PERM], env=env, query=[{ '_id': query['group'][0] }]) if not results.args.count: raise self.exception(status=400, msg='Group is invalid.', args={'code': 'INVALID_GROUP'}) # [DOC] Get user details results = await self.read(skip_events=[Event.PERM], env=env, query=[{ '_id': query['_id'][0] }]) if not results.args.count: raise self.exception(status=400, msg='User is invalid.', args={'code': 'INVALID_USER'}) user = results.args.docs[0] # [DOC] Confirm group was not added before if query['group'][0] not in user.groups: raise self.exception( status=400, msg='User is not a member of the group.', args={'code': 'GROUP_NOT_ADDED'}, ) # [DOC] Update the user results = await self.update( skip_events=[Event.PERM], env=env, query=[{ '_id': query['_id'][0] }], doc={'groups': { '$del_val': [query['group'][0]] }}, ) # [DOC] if update fails, return update results if results.status != 200: return results # [DOC] Check if the updated User doc belongs to current session and update it if env['session'].user._id == user._id: user_results = await self.read_privileges(skip_events=[Event.PERM], env=env, query=[{ '_id': user._id }]) env['session']['user'] = user_results.args.docs[0] return results
def test_generate_attr_LITERAL(): attr_val = utils.generate_attr(attr_type=ATTR.LITERAL( literal=['abc', 321, False, 12.34, 'foo', 'bar', 'baz'])) assert attr_val in ['abc', 321, False, 12.34, 'foo', 'bar', 'baz']
def test_generate_attr_KV_DICT_key_LITERAL(): attr_val = utils.generate_attr(attr_type=ATTR.KV_DICT( key=ATTR.LITERAL(literal=['foo', 'bar']), val=ATTR.INT(), min=2)) assert len(attr_val.keys()) == 2 assert set(attr_val.keys()) == {'foo', 'bar'}
class Setting(BaseModule): '''`Setting` module module provides data type and controller for settings in Nawah eco-system. This is used by `User` module tp provide additional user-wise settings. It also allows for global-typed settings.''' collection = 'settings' attrs = { 'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'), 'var': ATTR.STR( desc='Name of the setting. This is unique for every `user` in the module.' ), 'val': ATTR.ANY(desc='Value of the setting.'), 'val_type': ATTR.DYNAMIC_ATTR(), 'type': ATTR.LITERAL( desc='Type of the setting. This sets whether setting is global, or belong to user, and whether use can update it or not.', literal=['global', 'user', 'user_sys'], ), } diff = True unique_attrs = [('user', 'var', 'type')] extns = { 'val': ATTR.TYPE(type=attr_extn_val), } methods = { 'read': METHOD( permissions=[ PERM(privilege='admin', query_mod={'$limit': 1}), PERM( privilege='read', query_mod={ 'user': '******', 'type': ATTR.TYPE(type=attr_query_mod_type), '$limit': 1, }, ), ], query_args=[ { '_id': ATTR.ID(), 'type': ATTR.LITERAL(literal=['global', 'user', 'user_sys']), }, { 'var': ATTR.STR(), 'type': ATTR.LITERAL(literal=['global']), }, { 'var': ATTR.STR(), 'user': ATTR.ID(), 'type': ATTR.LITERAL(literal=['user', 'user_sys']), }, ], ), 'create': METHOD( permissions=[ PERM(privilege='admin'), PERM(privilege='create', doc_mod={'type': 'user'}), ] ), 'update': METHOD( permissions=[ PERM(privilege='admin', query_mod={'$limit': 1}), PERM( privilege='update', query_mod={'type': 'user', 'user': '******', '$limit': 1}, doc_mod={'var': None, 'val_type': None, 'type': None}, ), ], query_args=[ { '_id': ATTR.ID(), 'type': ATTR.LITERAL(literal=['global', 'user', 'user_sys']), }, { 'var': ATTR.STR(), 'type': ATTR.LITERAL(literal=['global']), }, { 'var': ATTR.STR(), 'user': ATTR.ID(), 'type': ATTR.LITERAL(literal=['user', 'user_sys']), }, ], ), 'delete': METHOD( permissions=[PERM(privilege='admin', query_mod={'$limit': 1})], query_args=[{'_id': ATTR.ID()}, {'var': ATTR.STR()}], ), 'retrieve_file': METHOD( permissions=[PERM(privilege='*', query_mod={'type': 'global'})], get_method=True, ), } async def on_create(self, results, skip_events, env, query, doc, payload): if doc['type'] in ['user', 'user_sys']: if doc['user'] == env['session'].user._id and doc['var'] in Config.user_doc_settings: env['session'].user[doc['var']] = doc['val'] return (results, skip_events, env, query, doc, payload) async def pre_update(self, skip_events, env, query, doc, payload): for attr in doc.keys(): if attr == 'val' or attr.startswith('val.'): val_attr = attr break else: raise self.exception( status=400, msg='Could not match doc with any of the required doc_args. Failed sets:[\'val\': Missing]', args={'code': 'INVALID_DOC'}, ) setting_results = await self.read(skip_events=[Event.PERM], env=env, query=query) if not setting_results.args.count: raise self.exception( status=400, msg='Invalid Setting doc', args={'code': 'INVALID_SETTING'} ) setting = setting_results.args.docs[0] # [DOC] Attempt to validate val against Setting val_type try: exception_raised: Exception = None setting_val_type, _ = generate_dynamic_attr(dynamic_attr=setting.val_type) await validate_doc( mode='update', doc=doc, attrs={'val': setting_val_type}, skip_events=skip_events, env=env, query=query, ) except Exception as e: exception_raised = e if exception_raised or doc[val_attr] == None: raise self.exception( status=400, msg=f'Invalid value for for Setting doc of type \'{type(doc[val_attr])}\' with required type \'{setting.val_type}\'', args={'code': 'INVALID_ATTR'}, ) return (skip_events, env, query, doc, payload) async def on_update(self, results, skip_events, env, query, doc, payload): # [TODO] Update according to the changes of Doc Opers try: if ( query['type'][0] in ['user', 'user_sys'] and query['user'][0] == env['session'].user._id and query['var'][0] in Config.user_doc_settings ): if type(doc['val']) == dict and '$add' in doc['val'].keys(): env['session'].user[query['var'][0]] += doc['val']['$add'] elif type(doc['val']) == dict and '$multiply' in doc['val'].keys(): env['session'].user[query['var'][0]] *= doc['val']['$multiply'] elif type(doc['val']) == dict and '$append' in doc['val'].keys(): env['session'].user[query['var'][0]].append(doc['val']['$append']) elif type(doc['val']) == dict and '$set_index' in doc['val'].keys(): env['session'].user[query['var'][0]][doc['val']['$index']] = doc['val'][ '$set_index' ] elif type(doc['val']) == dict and '$del_val' in doc['val'].keys(): for val in doc['val']['$del_val']: env['session'].user[query['var'][0]].remove(val) elif type(doc['val']) == dict and '$del_index' in doc['val'].keys(): del env['session'].user[query['var'][0]][doc['val']['$index']] else: env['session'].user[query['var'][0]] = doc['val'] except: pass return (results, skip_events, env, query, doc, payload)
def generate_attr(*, attr_type: ATTR) -> Any: attr_val: Any if attr_type._type == 'ANY': return '__any' elif attr_type._type == 'ACCESS': return {'anon': True, 'users': [], 'groups': []} elif attr_type._type == 'BOOL': attr_val = random.choice([True, False]) return attr_val elif attr_type._type == 'COUNTER': counter_groups = re.findall( r'(\$__(?:values:[0-9]+|counters\.[a-z0-9_]+))', attr_type._args['pattern'] ) attr_val = attr_type._args['pattern'] for group in counter_groups: for group in counter_groups: if group.startswith('$__values:'): value_callable = attr_type._args['values'][int(group.replace('$__values:', ''))] attr_val = attr_val.replace( group, str(value_callable(skip_events=[], env={}, query=[], doc={})) ) elif group.startswith('$__counters.'): attr_val = attr_val.replace(group, str(42)) return attr_val elif attr_type._type == 'DATE': if attr_type._args['ranges']: datetime_range = attr_type._args['ranges'][0] # [DOC] Be lazy! find a whether start, end of range is a datetime and base the value on it if datetime_range[0][0] in ['+', '-'] and datetime_range[1][0] in ['+', '-']: # [DOC] Both start, end are dynamic, process start datetime_range_delta = {} if datetime_range[0][-1] == 'd': datetime_range_delta = {'days': int(datetime_range[0][:-1])} elif datetime_range[0][-1] == 'w': datetime_range_delta = {'weeks': int(datetime_range[0][:-1])} attr_val = ( (datetime.datetime.utcnow() + datetime.timedelta(**datetime_range_delta)) .isoformat() .split('T')[0] ) else: if datetime_range[0][0] not in ['+', '-']: attr_val = datetime_range[0] else: attr_val = ( (datetime.datetime.fromisoformat(datetime_range[1]) - datetime.timedelta(days=1)) .isoformat() .split('T')[0] ) else: attr_val = datetime.datetime.utcnow().isoformat().split('T')[0] return attr_val elif attr_type._type == 'DATETIME': if attr_type._args['ranges']: datetime_range = attr_type._args['ranges'][0] # [DOC] Be lazy! find a whether start, end of range is a datetime and base the value on it if datetime_range[0][0] in ['+', '-'] and datetime_range[1][0] in ['+', '-']: # [DOC] Both start, end are dynamic, process start datetime_range_delta = {} if datetime_range[0][-1] == 'd': datetime_range_delta = {'days': int(datetime_range[0][:-1])} elif datetime_range[0][-1] == 's': datetime_range_delta = {'seconds': int(datetime_range[0][:-1])} elif datetime_range[0][-1] == 'm': datetime_range_delta = {'minutes': int(datetime_range[0][:-1])} elif datetime_range[0][-1] == 'h': datetime_range_delta = {'hours': int(datetime_range[0][:-1])} elif datetime_range[0][-1] == 'w': datetime_range_delta = {'weeks': int(datetime_range[0][:-1])} attr_val = ( datetime.datetime.utcnow() + datetime.timedelta(**datetime_range_delta) ).isoformat() else: if datetime_range[0][0] not in ['+', '-']: attr_val = datetime_range[0] else: attr_val = ( datetime.datetime.fromisoformat(datetime_range[1]) - datetime.timedelta(days=1) ).isoformat() else: attr_val = datetime.datetime.utcnow().isoformat() return attr_val elif attr_type._type == 'KV_DICT': attr_val = {} if attr_type._args['req']: attr_val = { generate_attr(attr_type=ATTR.LITERAL(literal=[req])): generate_attr( attr_type=attr_type._args['val'] ) for req in attr_type._args['req'] } for _ in range(attr_type._args['min'] or 0): attr_val[generate_attr(attr_type=attr_type._args['key'])] = generate_attr( attr_type=attr_type._args['val'] ) if len(attr_val.keys()) < (attr_type._args['min'] or 0): attr_val = generate_attr(attr_type=attr_type) return attr_val elif attr_type._type == 'TYPED_DICT': attr_val = { child_attr: generate_attr(attr_type=attr_type._args['dict'][child_attr]) for child_attr in attr_type._args['dict'].keys() } return attr_val elif attr_type._type == 'EMAIL': attr_val = f'some-{math.ceil(random.random() * 10000)}@mail.provider.com' if attr_type._args['allowed_domains']: if attr_type._args['strict']: domain = 'mail.provider.com' else: domain = 'provider.com' attr_val = attr_val.replace( domain, random.choice(attr_type._args['allowed_domains']) ) return attr_val elif attr_type._type == 'FILE': attr_file_type = 'text/plain' attr_file_extension = 'txt' if attr_type._args['types']: for file_type in attr_type._args['types']: if '/' in file_type: attr_file_type = file_type if '*.' in file_type: attr_file_extension = file_type.replace('*.', '') file_name = f'__file-{math.ceil(random.random() * 10000)}.{attr_file_extension}' return { 'name': file_name, 'lastModified': 100000, 'type': attr_file_type, 'size': 6, 'content': b'__file', } elif attr_type._type == 'FLOAT': if attr_type._args['ranges']: attr_val = random.choice( range( math.ceil(attr_type._args['ranges'][0][0]), math.floor(attr_type._args['ranges'][0][1]), ) ) if ( attr_val != attr_type._args['ranges'][0][0] and (attr_val - 0.01) != attr_type._args['ranges'][0][0] ): attr_val -= 0.01 elif (attr_val + 0.01) < attr_type._args['ranges'][0][1]: attr_val += 0.01 else: attr_val = float(attr_val) else: attr_val = random.random() * 10000 return attr_val elif attr_type._type == 'GEO': return { 'type': 'Point', 'coordinates': [ math.ceil(random.random() * 100000) / 1000, math.ceil(random.random() * 100000) / 1000, ], } elif attr_type._type == 'ID': return ObjectId() elif attr_type._type == 'INT': if attr_type._args['ranges']: attr_val = random.choice( range(attr_type._args['ranges'][0][0], attr_type._args['ranges'][0][1]) ) else: attr_val = math.ceil(random.random() * 10000) return attr_val elif attr_type._type == 'IP': return '127.0.0.1' elif attr_type._type == 'LIST': return [ generate_attr(attr_type=random.choice(attr_type._args['list'])) for _ in range(attr_type._args['min'] or 0) ] elif attr_type._type == 'LOCALE': return { locale: f'__locale-{math.ceil(random.random() * 10000)}' for locale in Config.locales } elif attr_type._type == 'LOCALES': return Config.locale elif attr_type._type == 'PHONE': attr_phone_code = '000' if attr_type._args['codes']: attr_phone_code = random.choice(attr_type._args['codes']) return f'+{attr_phone_code}{math.ceil(random.random() * 10000)}' elif attr_type._type == 'STR': if attr_type._args['pattern']: logger.warning('Generator for Attr Type STR can\'t handle patterns. Ignoring.') return f'__str-{math.ceil(random.random() * 10000)}' elif attr_type._type == 'TIME': if attr_type._args['ranges']: datetime_range = attr_type._args['ranges'][0] # [DOC] Be lazy! find a whether start, end of range is a datetime and base the value on it if datetime_range[0][0] in ['+', '-'] and datetime_range[1][0] in ['+', '-']: # [DOC] Both start, end are dynamic, process start datetime_range_delta = {} if datetime_range[0][-1] == 's': datetime_range_delta = {'seconds': int(datetime_range[0][:-1])} elif datetime_range[0][-1] == 'm': datetime_range_delta = {'minutes': int(datetime_range[0][:-1])} elif datetime_range[0][-1] == 'h': datetime_range_delta = {'hours': int(datetime_range[0][:-1])} attr_val = ( (datetime.datetime.utcnow() + datetime.timedelta(**datetime_range_delta)) .isoformat() .split('T')[1] ) else: if datetime_range[0][0] not in ['+', '-']: attr_val = datetime_range[0] else: # [REF]: https://stackoverflow.com/a/656394/2393762 attr_val = ( ( datetime.datetime.combine( datetime.date.today(), datetime.time.fromisoformat(datetime_range[1]) ) - datetime.timedelta(minutes=1) ) .isoformat() .split('T')[1] ) else: attr_val = datetime.datetime.utcnow().isoformat().split('T')[1] return attr_val elif attr_type._type == 'URI_WEB': attr_val = f'https://sub.domain.com/page-{math.ceil(random.random() * 10000)}/' if attr_type._args['allowed_domains']: if attr_type._args['strict']: domain = 'sub.domain.com' else: domain = 'domain.com' attr_val = attr_val.replace( domain, random.choice(attr_type._args['allowed_domains']) ) return attr_val elif attr_type._type == 'LITERAL': attr_val = random.choice(attr_type._args['literal']) return attr_val elif attr_type._type == 'UNION': attr_val = generate_attr(attr_type=random.choice(attr_type._args['union'])) return attr_val raise Exception(f'Unknown generator attr \'{attr_type}\'')