Esempio n. 1
0
async def test_validate_attr_ID_None_allow_none():
    attr_val = await validate_attr(
        attr_name='test_validate_attr_ID',
        attr_type=ATTR.ID(),
        attr_val=None,
        mode='update',
    )
    assert attr_val == None
Esempio n. 2
0
async def test_validate_attr_ID_None():
    with pytest.raises(InvalidAttrException):
        await validate_attr(
            attr_name='test_validate_attr_ID',
            attr_type=ATTR.ID(),
            attr_val=None,
            mode='create',
        )
Esempio n. 3
0
async def test_validate_attr_ID_objectid():
    attr_val = await validate_attr(
        attr_name='test_validate_attr_ID',
        attr_type=ATTR.ID(),
        attr_val=ObjectId('000000000000000000000000'),
        mode='create',
    )
    assert attr_val == ObjectId('000000000000000000000000')
Esempio n. 4
0
async def test_validate_attr_ID_default_int_allow_none():
    attr_type = ATTR.ID()
    attr_type._default = 'test_validate_attr_ID'
    attr_val = await validate_attr(
        attr_name='test_validate_attr_ID',
        attr_type=attr_type,
        attr_val=1,
        mode='update',
    )
    assert attr_val == None
Esempio n. 5
0
async def test_validate_attr_ID_default_int():
    attr_type = ATTR.ID()
    attr_type._default = 'test_validate_attr_ID'
    attr_val = await validate_attr(
        attr_name='test_validate_attr_ID',
        attr_type=attr_type,
        attr_val=1,
        mode='create',
    )
    assert attr_val == 'test_validate_attr_ID'
Esempio n. 6
0
class File(BaseModule):
	'''`File` module provides functionality for `File Upload Workflow`.'''

	collection = 'files'
	attrs = {
		'user': ATTR.ID(desc='`_id` of `User` doc file belongs to.'),
		'file': ATTR.FILE(desc='File object.'),
		'create_time': ATTR.DATETIME(
			desc='Python `datetime` ISO format of the doc creation.'
		),
	}
	methods = {
		'read': METHOD(permissions=[PERM(privilege='__sys')]),
		'create': METHOD(
			permissions=[PERM(privilege='create')],
			post_method=True,
		),
		'delete': METHOD(permissions=[PERM(privilege='__sys')]),
	}

	async def on_read(self, results, skip_events, env, query, doc, payload):
		for i in range(len(results['docs'])):
			results['docs'][i]['file']['lastModified'] = int(
				results['docs'][i]['file']['lastModified']
			)
		return (results, skip_events, env, query, doc, payload)

	async def pre_create(self, skip_events, env, query, doc, payload):
		if Config.file_upload_limit != -1 and len(doc['file']) > Config.file_upload_limit:
			raise self.exception(
				status=400,
				msg=f'File size is beyond allowed limit.',
				args={
					'code': 'INVALID_SIZE',
					'attr': doc['__attr'].decode('utf-8'),
					'name': doc['name'].decode('utf-8'),
				},
			)
		if (module := doc['__module'].decode('utf-8')) not in Config.modules.keys():
			raise self.exception(
				status=400,
				msg=f'Invalid module \'{module}\'',
				args={'code': 'INVALID_MODULE'},
			)

		try:
			attr_type = _extract_attr(
				scope=Registry.module(module).attrs,
				attr_path='$__' + (attr := doc['__attr'].decode('utf-8')),
			)
Esempio n. 7
0
def test_generate_attr_ID():
    attr_val = utils.generate_attr(attr_type=ATTR.ID())
    assert type(attr_val) == ObjectId
Esempio n. 8
0
class Analytic(BaseModule):
    '''`Analytic` module provides data type and controller from `Analytics Workflow` and accompanying analytics docs. It uses `pre_create` handler to assure no events duplications occur and all occurrences of the same event are recorded in one doc.'''

    collection = 'analytics'
    attrs = {
        'user':
        ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'),
        'event':
        ATTR.STR(desc='Analytics event name.'),
        'subevent':
        ATTR.ANY(
            desc=
            'Analytics subevent distinguishing attribute. This is usually `STR`, or `ID` but it is introduced in the module as `ANY` to allow wider use-cases by developers.'
        ),
        'date':
        ATTR.DATE(
            desc=
            'Analytics event date. This allows clustering of events occupancies to limit doc size.'
        ),
        'occurrences':
        ATTR.LIST(
            desc='All occurrences of the event as list.',
            list=[
                ATTR.TYPED_DICT(
                    desc='Single occurrence of the event details.',
                    dict={
                        'args':
                        ATTR.KV_DICT(
                            desc=
                            'Key-value `dict` containing event args, if any.',
                            key=ATTR.STR(),
                            val=ATTR.ANY(),
                        ),
                        'score':
                        ATTR.INT(
                            desc='Numerical score for occurrence of the event.'
                        ),
                        'create_time':
                        ATTR.DATETIME(
                            desc=
                            'Python `datetime` ISO format of the occurrence of the event.'
                        ),
                    },
                )
            ],
        ),
        'score':
        ATTR.INT(
            desc=
            'Total score of all scores of all occurrences of the event. This can be used for data analysis.'
        ),
    }
    unique_attrs = [('user', 'event', 'subevent', 'date')]
    methods = {
        'read':
        METHOD(permissions=[PERM(privilege='read')]),
        'create':
        METHOD(
            permissions=[PERM(privilege='__sys')],
            doc_args={
                'event': ATTR.STR(),
                'subevent': ATTR.ANY(),
                'args': ATTR.KV_DICT(key=ATTR.STR(), val=ATTR.ANY()),
            },
        ),
        'update':
        METHOD(permissions=[PERM(privilege='__sys')]),
        'delete':
        METHOD(permissions=[PERM(privilege='delete')]),
    }

    async def pre_create(self, skip_events, env, query, doc, payload):
        analytic_results = await self.read(
            skip_events=[Event.PERM],
            env=env,
            query=[
                {
                    'user': env['session'].user._id,
                    'event': doc['event'],
                    'subevent': doc['subevent'],
                    'date': datetime.date.today().isoformat(),
                },
                {
                    '$limit': 1
                },
            ],
        )
        if analytic_results.args.count:
            analytic_results = await self.update(
                skip_events=[Event.PERM],
                env=env,
                query=[{
                    '_id': analytic_results.args.docs[0]._id
                }],
                doc={
                    'occurrences': {
                        '$append': {
                            'args': doc['args'],
                            'score':
                            doc['score'] if 'score' in doc.keys() else 0,
                            'create_time':
                            datetime.datetime.utcnow().isoformat(),
                        }
                    },
                    'score': {
                        '$add': doc['score'] if 'score' in doc.keys() else 0
                    },
                },
            )
            if analytic_results.status == 200:
                return (skip_events, env, query, doc, {
                    '__results': analytic_results
                })
            else:
                raise self.exception(
                    status=analytic_results.status,
                    msg=analytic_results.msg,
                    args=analytic_results.args,
                )
        else:
            doc = {
                'event':
                doc['event'],
                'subevent':
                doc['subevent'],
                'date':
                datetime.date.today().isoformat(),
                'occurrences': [{
                    'args':
                    doc['args'],
                    'score':
                    doc['score'] if 'score' in doc.keys() else 0,
                    'create_time':
                    datetime.datetime.utcnow().isoformat(),
                }],
                'score':
                doc['score'] if 'score' in doc.keys() else 0,
            }
            return (skip_events, env, query, doc, payload)
Esempio n. 9
0
class Diff(BaseModule):
	'''`Diff` module provides data type and controller for `Diff Workflow`. It is meant for use by internal calls only.'''

	collection = 'diff'
	attrs = {
		'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'),
		'module': ATTR.STR(desc='Name of the module the original doc is part of.'),
		'doc': ATTR.ID(desc='`_id` of the original doc.'),
		'vars': ATTR.KV_DICT(
			desc='Key-value `dict` containing all attrs that have been updated from the original doc.',
			key=ATTR.STR(),
			val=ATTR.ANY(),
		),
		'remarks': ATTR.STR(
			desc='Human-readable remarks of the doc. This is introduced to allow developers to add log messages to diff docs.'
		),
		'create_time': ATTR.DATETIME(
			desc='Python `datetime` ISO format of the doc creation.'
		),
	}
	defaults = {'doc': None, 'remarks': ''}
	methods = {
		'read': METHOD(permissions=[PERM(privilege='read')]),
		'create': METHOD(permissions=[PERM(privilege='__sys')]),
		'delete': METHOD(permissions=[PERM(privilege='delete')]),
	}

	async def pre_create(self, skip_events, env, query, doc, payload):
		# [DOC] format Doc Oper with prefixed underscores to avoid data errors
		doc = self.format_doc_oper(doc=doc)
		# [DOC] Detect non-_id update query:
		if '_id' not in query:
			results = await Registry.module(doc['module']).read(
				skip_events=[Event.PERM], env=env, query=query
			)
			if results.args.count > 1:
				query.append({'_id': {'$in': [doc._id for doc in results.args.docs]}})
			elif results.args.count == 1:
				query.append({'_id': results.args.docs[0]._id})
			else:
				raise self.exception(
					status=400, msg='No update docs matched.', args={'code': 'NO_MATCH'}
				)
		if '_id' in query and type(query['_id'][0]) == list:
			for i in range(len(query['_id'][0]) - 1):
				self.create(
					skip_events=[Event.PERM],
					env=env,
					query=[{'_id': query['_id'][0][i]}],
					doc=doc,
				)
			query['_id'][0] = query['_id'][0][-1]
		doc['doc'] = ObjectId(query['_id'][0])
		return (skip_events, env, query, doc, payload)

	def format_doc_oper(self, *, doc: NAWAH_DOC):
		shadow_doc: NAWAH_DOC = {}
		for attr in doc.keys():
			if attr[0] == '$':
				shadow_doc[f'__{attr}'] = doc[attr]
			elif type(doc[attr]) == dict:
				shadow_doc[attr] = self.format_doc_oper(doc=doc[attr])
			else:
				shadow_doc[attr] = doc[attr]
		return shadow_doc
Esempio n. 10
0
async def _import_modules():
    sys.path.append(os.path.join(nawah.__path__[0], 'packages'))
    sys.path.append(os.path.join(Config._app_path, 'packages'))

    # [DOC] Iterate over packages in modules folder
    for _, pkgname, ispkg in pkgutil.iter_modules([
            os.path.join(nawah.__path__[0], 'packages'),
            os.path.join(Config._app_path, 'packages'),
    ]):
        if not ispkg:
            continue

        logger.debug(f'Importing package: {pkgname}')

        # [DOC] Load package and attempt to load config
        package = __import__(pkgname, fromlist='*')
        _process_config(config=package.config, pkgname=pkgname)

        # [DOC] Add package to loaded packages dict
        Config.modules_packages[pkgname] = []

        # [DOC] Iterate over python modules in package
        package_prefix = package.__name__ + '.'
        for _, modname, ispkg in pkgutil.iter_modules(package.__path__,
                                                      package_prefix):
            # [DOC] Iterate over Python modules in package
            module = __import__(modname, fromlist='*')
            # [DOC] For __l10n__ Python module, extract all l10n dicts and skip processing
            if modname.endswith('__l10n__'):
                for l10n_name in dir(module):
                    if type(getattr(module, l10n_name)) == L10N:
                        if l10n_name not in Config.l10n.keys():
                            Config.l10n[l10n_name] = {}
                        Config.l10n[l10n_name].update(
                            getattr(module, l10n_name))
                continue

            for clsname in dir(module):
                # [DOC] Confirm class is subclass of BaseModule
                if (clsname != 'BaseModule'
                        and inspect.isclass(getattr(module, clsname))
                        and getattr(getattr(module, clsname), '_nawah_module',
                                    False)):
                    # [DOC] Deny loading Nawah-reserved named Nawah modules
                    if clsname.lower() in ['conn', 'heart', 'watch']:
                        logger.error(
                            f'Module with Nawah-reserved name \'{clsname.lower()}\' was found. Exiting.'
                        )
                        exit(1)
                    # [DOC] Load Nawah module and assign module_name attr
                    cls = getattr(module, clsname)
                    module_name = re.sub(r'([A-Z])', r'_\1',
                                         clsname[0].lower() +
                                         clsname[1:]).lower()
                    # [DOC] Deny duplicate Nawah modules names
                    if module_name in Config.modules.keys():
                        logger.error(
                            f'Duplicate module name \'{module_name}\'. Exiting.'
                        )
                        exit(1)
                    # [DOC] Add module to loaded modules dict
                    Config.modules[module_name] = cls()
                    Config.modules_packages[pkgname].append(module_name)

    # [DOC] Update User, Session modules with populated attrs
    Config.modules['user'].attrs.update(Config.user_attrs)
    if (sum(1 for attr in Config.user_settings.keys()
            if attr in Config.user_attrs.keys()) != 0):
        logger.error(
            'At least on attr from \'user_settings\' is conflicting with an attr from \'user_attrs\'. Exiting.'
        )
        exit(1)
    Config.modules['user'].defaults['locale'] = Config.locale
    for attr in Config.user_attrs.keys():
        Config.modules['user'].unique_attrs.append(attr)
        Config.modules['user'].attrs[f'{attr}_hash'] = ATTR.STR()
        Config.modules['session'].methods['auth'].doc_args.append({
            'hash':
            ATTR.STR(),
            attr:
            Config.user_attrs[attr],
            'groups':
            ATTR.LIST(list=[ATTR.ID()]),
        })
        Config.modules['session'].methods['auth'].doc_args.append({
            'hash':
            ATTR.STR(),
            attr:
            Config.user_attrs[attr]
        })

    # [DOC] Attempt to validate all packages required vars (via vars_types Config Attr) are met
    for var in Config.vars_types.keys():
        if var not in Config.vars.keys():
            logger.error(
                f'Package \'{Config.vars_types[var]["package"]}\' requires \'{var}\' Var, but not found in App Config. Exiting.'
            )
            exit(1)
        try:
            await validate_attr(
                mode='create',
                attr_name=var,
                attr_type=Config.vars_types[var]['type'],
                attr_val=Config.vars[var],
            )
        except:
            logger.error(
                f'Package \'{Config.vars_types[var]["package"]}\' requires \'{var}\' Var of type \'{Config.vars_types[var]["type"]._type}\', but validation failed. Exiting.'
            )
            exit(1)

    # [DOC] Call update_modules, effectively finalise initialising modules
    for module in Config.modules.values():
        module._initialise()
Esempio n. 11
0
def test_compile_query_step_one_step_match_attr_ID_val_str():
    aggregate_prefix = [
        {
            '$match': {
                '__deleted': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__create_draft': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__update_draft': {
                    '$exists': False
                }
            }
        },
    ]
    aggregate_suffix = [{'$group': {'_id': '$_id'}}]
    aggregate_match = []
    collection_name = 'collection_name'
    attrs = {'attr': ATTR.ID()}
    step = {'attr': '000000000000000000000000'}
    watch_mode = False
    _query._compile_query_step(
        aggregate_prefix=aggregate_prefix,
        aggregate_suffix=aggregate_suffix,
        aggregate_match=aggregate_match,
        collection_name=collection_name,
        attrs=attrs,
        step=step,
        watch_mode=watch_mode,
    )
    assert aggregate_prefix == [
        {
            '$match': {
                '__deleted': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__create_draft': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__update_draft': {
                    '$exists': False
                }
            }
        },
    ]
    assert aggregate_suffix == [{'$group': {'_id': '$_id'}}]
    assert aggregate_match == [{'attr': ObjectId('000000000000000000000000')}]
Esempio n. 12
0
def test_compile_query_step_one_step_match_attr_ID_val_invalid_id(caplog):
    aggregate_prefix = [
        {
            '$match': {
                '__deleted': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__create_draft': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__update_draft': {
                    '$exists': False
                }
            }
        },
    ]
    aggregate_suffix = [{'$group': {'_id': '$_id'}}]
    aggregate_match = []
    collection_name = 'collection_name'
    attrs = {'attr': ATTR.ID()}
    step = {'attr': 'match_term'}
    watch_mode = False
    _query._compile_query_step(
        aggregate_prefix=aggregate_prefix,
        aggregate_suffix=aggregate_suffix,
        aggregate_match=aggregate_match,
        collection_name=collection_name,
        attrs=attrs,
        step=step,
        watch_mode=watch_mode,
    )
    for record in caplog.records:
        assert record.levelname == 'WARNING'
        assert record.message == 'Failed to convert attr to ObjectId: match_term'
    assert aggregate_prefix == [
        {
            '$match': {
                '__deleted': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__create_draft': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__update_draft': {
                    '$exists': False
                }
            }
        },
    ]
    assert aggregate_suffix == [{'$group': {'_id': '$_id'}}]
    assert aggregate_match == [{'attr': 'match_term'}]
Esempio n. 13
0
def test_compile_query_step_one_step_match_attr_ID_val_list_invalid_id(caplog):
    aggregate_prefix = [
        {
            '$match': {
                '__deleted': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__create_draft': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__update_draft': {
                    '$exists': False
                }
            }
        },
    ]
    aggregate_suffix = [{'$group': {'_id': '$_id'}}]
    aggregate_match = []
    collection_name = 'collection_name'
    attrs = {'attr': ATTR.ID()}
    step = {'attr': {'$in': ['match_term']}}
    watch_mode = False
    _query._compile_query_step(
        aggregate_prefix=aggregate_prefix,
        aggregate_suffix=aggregate_suffix,
        aggregate_match=aggregate_match,
        collection_name=collection_name,
        attrs=attrs,
        step=step,
        watch_mode=watch_mode,
    )
    # [DOC] Allow caplog.records to exceed 1 in case of other emitter of wanrnings
    assert len(caplog.records) >= 1
    for record in caplog.records:
        assert record.levelname == 'WARNING'
        assert record.message == 'Failed to convert child_attr to ObjectId: match_term'
    assert aggregate_prefix == [
        {
            '$match': {
                '__deleted': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__create_draft': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__update_draft': {
                    '$exists': False
                }
            }
        },
    ]
    assert aggregate_suffix == [{'$group': {'_id': '$_id'}}]
    assert aggregate_match == [{'attr': {'$in': ['match_term']}}]
Esempio n. 14
0
class Setting(BaseModule):
	'''`Setting` module module provides data type and controller for settings in Nawah eco-system. This is used by `User` module tp provide additional user-wise settings. It also allows for global-typed settings.'''

	collection = 'settings'
	attrs = {
		'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'),
		'var': ATTR.STR(
			desc='Name of the setting. This is unique for every `user` in the module.'
		),
		'val': ATTR.ANY(desc='Value of the setting.'),
		'val_type': ATTR.DYNAMIC_ATTR(),
		'type': ATTR.LITERAL(
			desc='Type of the setting. This sets whether setting is global, or belong to user, and whether use can update it or not.',
			literal=['global', 'user', 'user_sys'],
		),
	}
	diff = True
	unique_attrs = [('user', 'var', 'type')]
	extns = {
		'val': ATTR.TYPE(type=attr_extn_val),
	}
	methods = {
		'read': METHOD(
			permissions=[
				PERM(privilege='admin', query_mod={'$limit': 1}),
				PERM(
					privilege='read',
					query_mod={
						'user': '******',
						'type': ATTR.TYPE(type=attr_query_mod_type),
						'$limit': 1,
					},
				),
			],
			query_args=[
				{
					'_id': ATTR.ID(),
					'type': ATTR.LITERAL(literal=['global', 'user', 'user_sys']),
				},
				{
					'var': ATTR.STR(),
					'type': ATTR.LITERAL(literal=['global']),
				},
				{
					'var': ATTR.STR(),
					'user': ATTR.ID(),
					'type': ATTR.LITERAL(literal=['user', 'user_sys']),
				},
			],
		),
		'create': METHOD(
			permissions=[
				PERM(privilege='admin'),
				PERM(privilege='create', doc_mod={'type': 'user'}),
			]
		),
		'update': METHOD(
			permissions=[
				PERM(privilege='admin', query_mod={'$limit': 1}),
				PERM(
					privilege='update',
					query_mod={'type': 'user', 'user': '******', '$limit': 1},
					doc_mod={'var': None, 'val_type': None, 'type': None},
				),
			],
			query_args=[
				{
					'_id': ATTR.ID(),
					'type': ATTR.LITERAL(literal=['global', 'user', 'user_sys']),
				},
				{
					'var': ATTR.STR(),
					'type': ATTR.LITERAL(literal=['global']),
				},
				{
					'var': ATTR.STR(),
					'user': ATTR.ID(),
					'type': ATTR.LITERAL(literal=['user', 'user_sys']),
				},
			],
		),
		'delete': METHOD(
			permissions=[PERM(privilege='admin', query_mod={'$limit': 1})],
			query_args=[{'_id': ATTR.ID()}, {'var': ATTR.STR()}],
		),
		'retrieve_file': METHOD(
			permissions=[PERM(privilege='*', query_mod={'type': 'global'})],
			get_method=True,
		),
	}

	async def on_create(self, results, skip_events, env, query, doc, payload):
		if doc['type'] in ['user', 'user_sys']:
			if doc['user'] == env['session'].user._id and doc['var'] in Config.user_doc_settings:
				env['session'].user[doc['var']] = doc['val']
		return (results, skip_events, env, query, doc, payload)

	async def pre_update(self, skip_events, env, query, doc, payload):
		for attr in doc.keys():
			if attr == 'val' or attr.startswith('val.'):
				val_attr = attr
				break
		else:
			raise self.exception(
				status=400,
				msg='Could not match doc with any of the required doc_args. Failed sets:[\'val\': Missing]',
				args={'code': 'INVALID_DOC'},
			)

		setting_results = await self.read(skip_events=[Event.PERM], env=env, query=query)
		if not setting_results.args.count:
			raise self.exception(
				status=400, msg='Invalid Setting doc', args={'code': 'INVALID_SETTING'}
			)

		setting = setting_results.args.docs[0]
		# [DOC] Attempt to validate val against Setting val_type
		try:
			exception_raised: Exception = None
			setting_val_type, _ = generate_dynamic_attr(dynamic_attr=setting.val_type)
			await validate_doc(
				mode='update',
				doc=doc,
				attrs={'val': setting_val_type},
				skip_events=skip_events,
				env=env,
				query=query,
			)
		except Exception as e:
			exception_raised = e

		if exception_raised or doc[val_attr] == None:
			raise self.exception(
				status=400,
				msg=f'Invalid value for for Setting doc of type \'{type(doc[val_attr])}\' with required type \'{setting.val_type}\'',
				args={'code': 'INVALID_ATTR'},
			)

		return (skip_events, env, query, doc, payload)

	async def on_update(self, results, skip_events, env, query, doc, payload):
		# [TODO] Update according to the changes of Doc Opers
		try:
			if (
				query['type'][0] in ['user', 'user_sys']
				and query['user'][0] == env['session'].user._id
				and query['var'][0] in Config.user_doc_settings
			):
				if type(doc['val']) == dict and '$add' in doc['val'].keys():
					env['session'].user[query['var'][0]] += doc['val']['$add']
				elif type(doc['val']) == dict and '$multiply' in doc['val'].keys():
					env['session'].user[query['var'][0]] *= doc['val']['$multiply']
				elif type(doc['val']) == dict and '$append' in doc['val'].keys():
					env['session'].user[query['var'][0]].append(doc['val']['$append'])
				elif type(doc['val']) == dict and '$set_index' in doc['val'].keys():
					env['session'].user[query['var'][0]][doc['val']['$index']] = doc['val'][
						'$set_index'
					]
				elif type(doc['val']) == dict and '$del_val' in doc['val'].keys():
					for val in doc['val']['$del_val']:
						env['session'].user[query['var'][0]].remove(val)
				elif type(doc['val']) == dict and '$del_index' in doc['val'].keys():
					del env['session'].user[query['var'][0]][doc['val']['$index']]
				else:
					env['session'].user[query['var'][0]] = doc['val']
		except:
			pass
		return (results, skip_events, env, query, doc, payload)
Esempio n. 15
0
class Session(BaseModule):
	'''`Session` module provides data type and controller for sessions in Nawah eco-system. CRUD methods of the module are supposed to used for internal calls only, while methods `auth`, `reauth`, and `signout` are available for use by API as well as internal calls when needed.'''

	collection = 'sessions'
	attrs = {
		'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'),
		'groups': ATTR.LIST(
			desc='List of `_id` for every group the session is authenticated against. This attr is set by `auth` method when called with `groups` Doc Arg for Controller Auth Sequence.',
			list=[ATTR.ID(desc='`_id` of Group doc the session is authenticated against.')],
		),
		'host_add': ATTR.IP(desc='IP of the host the user used to authenticate.'),
		'user_agent': ATTR.STR(desc='User-agent of the app the user used to authenticate.'),
		'expiry': ATTR.DATETIME(desc='Python `datetime` ISO format of session expiry.'),
		'token_hash': ATTR.STR(desc='Hashed system-generated session token.'),
		'create_time': ATTR.DATETIME(
			desc='Python `datetime` ISO format of the doc creation.'
		),
	}
	defaults = {'groups': []}
	extns = {'user': EXTN(module='user', attrs=['*'], force=True)}
	methods = {
		'read': METHOD(permissions=[PERM(privilege='read', query_mod={'user': '******'})]),
		'create': METHOD(permissions=[PERM(privilege='create')]),
		'update': METHOD(
			permissions=[
				PERM(
					privilege='update',
					query_mod={'user': '******'},
					doc_mod={'user': None},
				)
			],
			query_args={'_id': ATTR.ID()},
		),
		'delete': METHOD(
			permissions=[PERM(privilege='delete', query_mod={'user': '******'})],
			query_args={'_id': ATTR.ID()},
		),
		'auth': METHOD(permissions=[PERM(privilege='*')], doc_args=[]),
		'reauth': METHOD(
			permissions=[PERM(privilege='*')],
			query_args=[
				{
					'_id': ATTR.ID(),
					'token': ATTR.STR(),
					'groups': ATTR.LIST(list=[ATTR.ID()]),
				},
				{'_id': ATTR.ID(), 'token': ATTR.STR()},
			],
		),
		'signout': METHOD(
			permissions=[PERM(privilege='*')],
			query_args={'_id': ATTR.ID()},
		),
	}

	async def auth(self, skip_events=[], env={}, query=[], doc={}):
		for attr in Registry.module('user').unique_attrs:
			if attr in doc.keys():
				key = attr
				break
		user_query = [{key: doc[key], '$limit': 1}]
		if 'groups' in doc.keys():
			user_query.append([{'groups': {'$in': doc['groups']}}, {'privileges': {'*': ['*']}}])
		user_results = await Registry.module('user').read(
			skip_events=[Event.PERM, Event.ON], env=env, query=user_query
		)
		if not user_results.args.count or not pbkdf2_sha512.verify(
			doc['hash'],
			user_results.args.docs[0][f'{key}_hash'],
		):
			raise self.exception(
				status=403,
				msg='Wrong auth credentials.',
				args={'code': 'INVALID_CREDS'},
			)

		user = user_results.args.docs[0]

		if Event.ON not in skip_events:
			if user.status in ['banned', 'deleted']:
				raise self.exception(
					status=403,
					msg=f'User is {user.status}.',
					args={'code': 'INVALID_USER'},
				)

			elif user.status == 'disabled_password':
				raise self.exception(
					status=403,
					msg='User password is disabled.',
					args={'code': 'INVALID_USER'},
				)

		token = secrets.token_urlsafe(32)
		session = {
			'user': user._id,
			'groups': doc['groups'] if 'groups' in doc.keys() else [],
			'host_add': env['REMOTE_ADDR'],
			'user_agent': env['HTTP_USER_AGENT'],
			'expiry': (datetime.datetime.utcnow() + datetime.timedelta(days=30)).isoformat(),
			'token_hash': pbkdf2_sha512.using(rounds=100000).hash(token),
		}

		results = await self.create(skip_events=[Event.PERM], env=env, doc=session)
		if results.status != 200:
			return results

		session['_id'] = results.args.docs[0]._id
		session['user'] = user
		del session['token_hash']
		session['token'] = token
		results.args.docs[0] = BaseModel(session)

		# [DOC] read user privileges and return them
		user_results = await Registry.module('user').read_privileges(
			skip_events=[Event.PERM], env=env, query=[{'_id': user._id}]
		)
		if user_results.status != 200:
			return user_results
		results.args.docs[0]['user'] = user_results.args.docs[0]

		# [DOC] Create CONN_AUTH Analytic doc
		if Config.analytics_events['session_conn_auth']:
			analytic_doc = {
				'event': 'CONN_AUTH',
				'subevent': env['client_app'],
				'args': {
					'user': user_results.args.docs[0]._id,
					'session': results.args.docs[0]._id,
					'REMOTE_ADDR': env['REMOTE_ADDR'],
					'HTTP_USER_AGENT': env['HTTP_USER_AGENT'],
				},
			}
			analytic_results = await Registry.module('analytic').create(
				skip_events=[Event.PERM], env=env, doc=analytic_doc
			)
			if analytic_results.status != 200:
				logger.error(
					f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}'
				)
		# [DOC] Create USER_AUTH Analytic doc
		if Config.analytics_events['session_user_auth']:
			analytic_doc = {
				'event': 'USER_AUTH',
				'subevent': user_results.args.docs[0]._id,
				'args': {
					'session': results.args.docs[0]._id,
					'REMOTE_ADDR': env['REMOTE_ADDR'],
					'HTTP_USER_AGENT': env['HTTP_USER_AGENT'],
					'client_app': env['client_app'],
				},
			}
			analytic_results = await Registry.module('analytic').create(
				skip_events=[Event.PERM], env=env, doc=analytic_doc
			)
			if analytic_results.status != 200:
				logger.error(
					f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}'
				)

		return self.status(
			status=200,
			msg='You were successfully authed.',
			args={'session': results.args.docs[0]},
		)

	async def reauth(self, skip_events=[], env={}, query=[], doc={}):
		if str(query['_id'][0]) == 'f00000000000000000000012':
			raise self.exception(
				status=400,
				msg='Reauth is not required for \'__ANON\' user.',
				args={'code': 'ANON_REAUTH'},
			)

		session_query = [{'_id': query['_id'][0]}]
		if 'groups' in query:
			session_query.append({'groups': {'$in': query['groups'][0]}})
		results = await self.read(skip_events=[Event.PERM], env=env, query=session_query)
		if not results.args.count:
			raise self.exception(
				status=403, msg='Session is invalid.', args={'code': 'INVALID_SESSION'}
			)

		if not pbkdf2_sha512.verify(query['token'][0], results.args.docs[0].token_hash):
			raise self.exception(
				status=403,
				msg='Reauth token hash invalid.',
				args={'code': 'INVALID_REAUTH_HASH'},
			)

		del results.args.docs[0]['token_hash']
		results.args.docs[0]['token'] = query['token'][0]

		if results.args.docs[0].expiry < datetime.datetime.utcnow().isoformat():
			results = await self.delete(
				skip_events=[Event.PERM, Event.SOFT],
				env=env,
				query=[{'_id': env['session']._id}],
			)
			raise self.exception(
				status=403, msg='Session had expired.', args={'code': 'SESSION_EXPIRED'}
			)

		# [DOC] update user's last_login timestamp
		await Registry.module('user').update(
			skip_events=[Event.PERM],
			env=env,
			query=[{'_id': results.args.docs[0].user}],
			doc={'login_time': datetime.datetime.utcnow().isoformat()},
		)
		await self.update(
			skip_events=[Event.PERM],
			env=env,
			query=[{'_id': results.args.docs[0]._id}],
			doc={
				'expiry': (datetime.datetime.utcnow() + datetime.timedelta(days=30)).isoformat()
			},
		)
		# [DOC] read user privileges and return them
		user_results = await Registry.module('user').read_privileges(
			skip_events=[Event.PERM],
			env=env,
			query=[{'_id': results.args.docs[0].user._id}],
		)
		results.args.docs[0]['user'] = user_results.args.docs[0]

		# [DOC] Create CONN_AUTH Analytic doc
		if Config.analytics_events['session_conn_reauth']:
			analytic_doc = {
				'event': 'CONN_REAUTH',
				'subevent': env['client_app'],
				'args': {
					'user': user_results.args.docs[0]._id,
					'session': results.args.docs[0]._id,
					'REMOTE_ADDR': env['REMOTE_ADDR'],
					'HTTP_USER_AGENT': env['HTTP_USER_AGENT'],
				},
			}
			analytic_results = await Registry.module('analytic').create(
				skip_events=[Event.PERM], env=env, doc=analytic_doc
			)
			if analytic_results.status != 200:
				logger.error(
					f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}'
				)
		# [DOC] Create USER_AUTH Analytic doc
		if Config.analytics_events['session_user_reauth']:
			analytic_doc = {
				'event': 'USER_REAUTH',
				'subevent': user_results.args.docs[0]._id,
				'args': {
					'session': results.args.docs[0]._id,
					'REMOTE_ADDR': env['REMOTE_ADDR'],
					'HTTP_USER_AGENT': env['HTTP_USER_AGENT'],
					'client_app': env['client_app'],
				},
			}
			analytic_results = await Registry.module('analytic').create(
				skip_events=[Event.PERM], env=env, doc=analytic_doc
			)
			if analytic_results.status != 200:
				logger.error(
					f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}'
				)

		return self.status(
			status=200,
			msg='You were successfully reauthed.',
			args={'session': results.args.docs[0]},
		)

	async def signout(self, skip_events=[], env={}, query=[], doc={}):
		if str(query['_id'][0]) == 'f00000000000000000000012':
			raise self.exception(
				status=400,
				msg='Singout is not allowed for \'__ANON\' user.',
				args={'code': 'ANON_SIGNOUT'},
			)

		results = await self.read(
			skip_events=[Event.PERM], env=env, query=[{'_id': query['_id'][0]}]
		)

		if not results.args.count:
			raise self.exception(
				status=403, msg='Session is invalid.', args={'code': 'INVALID_SESSION'}
			)

		results = await self.delete(
			skip_events=[Event.PERM], env=env, query=[{'_id': env['session']._id}]
		)

		# [DOC] Create CONN_AUTH Analytic doc
		if Config.analytics_events['session_conn_deauth']:
			analytic_doc = {
				'event': 'CONN_DEAUTH',
				'subevent': env['client_app'],
				'args': {
					'user': env['session'].user._id,
					'session': env['session']._id,
					'REMOTE_ADDR': env['REMOTE_ADDR'],
					'HTTP_USER_AGENT': env['HTTP_USER_AGENT'],
				},
			}
			analytic_results = await Registry.module('analytic').create(
				skip_events=[Event.PERM], env=env, doc=analytic_doc
			)
			if analytic_results.status != 200:
				logger.error(
					f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}'
				)
		# [DOC] Create USER_AUTH Analytic doc
		if Config.analytics_events['session_user_deauth']:
			analytic_doc = {
				'event': 'USER_DEAUTH',
				'subevent': env['session'].user._id,
				'args': {
					'session': env['session']._id,
					'REMOTE_ADDR': env['REMOTE_ADDR'],
					'HTTP_USER_AGENT': env['HTTP_USER_AGENT'],
					'client_app': env['client_app'],
				},
			}
			analytic_results = await Registry.module('analytic').create(
				skip_events=[Event.PERM], env=env, doc=analytic_doc
			)
			if analytic_results.status != 200:
				logger.error(
					f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}'
				)

		return self.status(
			status=200,
			msg='You are successfully signed-out.',
			args={'session': DictObj({'_id': 'f00000000000000000000012'})},
		)
Esempio n. 16
0
class User(BaseModule):
    '''`User` module provides data type and controller for users in Nawah eco-system. The permissions of the module methods are designed to be as secure for exposed calls, and as flexible for privileged-access.'''

    collection = 'users'
    attrs = {
        'name':
        ATTR.LOCALE(desc='Name of the user as `LOCALE`.'),
        'locale':
        ATTR.LOCALES(desc='Default locale of the user.'),
        'create_time':
        ATTR.DATETIME(
            desc='Python `datetime` ISO format of the doc creation.'),
        'login_time':
        ATTR.DATETIME(desc='Python `datetime` ISO format of the last login.'),
        'groups':
        ATTR.LIST(
            desc='List of `_id` for every group the user is member of.',
            list=[ATTR.ID(desc='`_id` of Group doc the user is member of.')],
        ),
        'privileges':
        ATTR.KV_DICT(
            desc=
            'Privileges of the user. These privileges are always available to the user regardless of whether groups user is part of have them or not.',
            key=ATTR.STR(),
            val=ATTR.LIST(list=[ATTR.STR()]),
        ),
        'status':
        ATTR.LITERAL(
            desc=
            'Status of the user to determine whether user has access to the app or not.',
            literal=['active', 'banned', 'deleted', 'disabled_password'],
        ),
    }
    defaults = {
        'login_time': None,
        'status': 'active',
        'groups': [],
        'privileges': {}
    }
    unique_attrs = []
    methods = {
        'read':
        METHOD(permissions=[
            PERM(privilege='admin'),
            PERM(privilege='read', query_mod={'_id': '$__user'}),
        ]),
        'create':
        METHOD(permissions=[PERM(privilege='admin')]),
        'update':
        METHOD(
            permissions=[
                PERM(privilege='admin', doc_mod={'groups': None}),
                PERM(
                    privilege='update',
                    query_mod={'_id': '$__user'},
                    doc_mod={
                        'groups': None,
                        'privileges': None
                    },
                ),
            ],
            query_args={'_id': ATTR.ID()},
        ),
        'delete':
        METHOD(
            permissions=[
                PERM(privilege='admin'),
                PERM(privilege='delete', query_mod={'_id': '$__user'}),
            ],
            query_args={'_id': ATTR.ID()},
        ),
        'read_privileges':
        METHOD(
            permissions=[
                PERM(privilege='admin'),
                PERM(privilege='read', query_mod={'_id': '$__user'}),
            ],
            query_args={'_id': ATTR.ID()},
        ),
        'add_group':
        METHOD(
            permissions=[PERM(privilege='admin')],
            query_args={'_id': ATTR.ID()},
            doc_args=[{
                'group': ATTR.ID()
            }, {
                'group': ATTR.LIST(list=[ATTR.ID()])
            }],
        ),
        'delete_group':
        METHOD(
            permissions=[PERM(privilege='admin')],
            query_args={
                '_id': ATTR.ID(),
                'group': ATTR.ID()
            },
        ),
        'retrieve_file':
        METHOD(permissions=[PERM(privilege='__sys')], get_method=True),
        'create_file':
        METHOD(permissions=[PERM(privilege='__sys')]),
        'delete_file':
        METHOD(permissions=[PERM(privilege='__sys')]),
    }

    async def on_read(self, results, skip_events, env, query, doc, payload):
        for i in range(len(results['docs'])):
            user = results['docs'][i]
            for auth_attr in Config.user_attrs.keys():
                del user[f'{auth_attr}_hash']
            if len(Config.user_doc_settings):
                setting_results = await Registry.module('setting').read(
                    skip_events=[Event.PERM, Event.ARGS],
                    env=env,
                    query=[{
                        'user': user._id,
                        'var': {
                            '$in': Config.user_doc_settings
                        }
                    }],
                )
                user_doc_settings = copy.copy(Config.user_doc_settings)
                if setting_results.args.count:
                    for setting_doc in setting_results.args.docs:
                        user_doc_settings.remove(setting_doc['var'])
                        user[setting_doc['var']] = setting_doc['val']
                # [DOC] Forward-compatibility: If user was created before presence of any user_doc_settings, add them with default value
                for setting_attr in user_doc_settings:
                    user[setting_attr] = Config.user_settings[
                        setting_attr].default
                    # [DOC] Set NAWAH_VALUES.NONE_VALUE to None if it was default
                    if user[setting_attr] == NAWAH_VALUES.NONE_VALUE:
                        user[setting_attr] = None
        return (results, skip_events, env, query, doc, payload)

    async def pre_create(self, skip_events, env, query, doc, payload):
        if Event.ARGS not in skip_events:
            doc['groups'] = [ObjectId('f00000000000000000000013')]
        user_settings = {}
        for attr in Config.user_settings.keys():
            if Config.user_settings[attr].type == 'user_sys':
                user_settings[attr] = copy.deepcopy(
                    Config.user_settings[attr].default)
            else:
                if attr in doc.keys():
                    try:
                        await validate_attr(
                            mode='create',
                            attr_name=attr,
                            attr_type=Config.user_settings[attr].val_type,
                            attr_val=doc[attr],
                        )
                        user_settings[attr] = doc[attr]
                    except:
                        raise self.exception(
                            status=400,
                            msg=
                            f'Invalid settings attr \'{attr}\' for \'create\' request on module \'CORE_USER\'',
                            args={'code': 'INVALID_ATTR'},
                        )

                else:
                    if Config.user_settings[
                            attr].default == NAWAH_VALUES.NONE_VALUE:
                        raise self.exception(
                            status=400,
                            msg=
                            f'Missing settings attr \'{attr}\' for \'create\' request on module \'CORE_USER\'',
                            args={'code': 'MISSING_ATTR'},
                        )

                    else:
                        user_settings[attr] = copy.deepcopy(
                            Config.user_settings[attr].default)
        payload['user_settings'] = user_settings
        return (skip_events, env, query, doc, payload)

    async def on_create(self, results, skip_events, env, query, doc, payload):
        if 'user_settings' in payload.keys():
            for setting in payload['user_settings'].keys():
                setting_results = await Registry.module('setting').create(
                    skip_events=[Event.PERM, Event.ARGS],
                    env=env,
                    doc={
                        'user':
                        results['docs'][0]._id,
                        'var':
                        setting,
                        'val_type':
                        encode_attr_type(
                            attr_type=Config.user_settings[setting].val_type),
                        'val':
                        payload['user_settings'][setting],
                        'type':
                        Config.user_settings[setting].type,
                    },
                )
                if setting_results.status != 200:
                    return setting_results
        return (results, skip_events, env, query, doc, payload)

    async def read_privileges(self, skip_events=[], env={}, query=[], doc={}):
        # [DOC] Confirm _id is valid
        results = await self.read(skip_events=[Event.PERM],
                                  env=env,
                                  query=[{
                                      '_id': query['_id'][0]
                                  }])
        if not results.args.count:
            raise self.exception(status=400,
                                 msg='User is invalid.',
                                 args={'code': 'INVALID_USER'})
        user = results.args.docs[0]
        for group in user.groups:
            group_results = await Registry.module('group').read(
                skip_events=[Event.PERM], env=env, query=[{
                    '_id': group
                }])
            group = group_results.args.docs[0]
            for privilege in group.privileges.keys():
                if privilege not in user.privileges.keys():
                    user.privileges[privilege] = []
                for i in range(len(group.privileges[privilege])):
                    if group.privileges[privilege][i] not in user.privileges[
                            privilege]:
                        user.privileges[privilege].append(
                            group.privileges[privilege][i])
        return results

    async def add_group(self, skip_events=[], env={}, query=[], doc={}):
        # [DOC] Check for list group attr
        if type(doc['group']) == list:
            for i in range(0, len(doc['group']) - 1):
                await self.add_group(
                    skip_events=skip_events,
                    env=env,
                    query=query,
                    doc={'group': doc['group'][i]},
                )
            doc['group'] = doc['group'][-1]
        # [DOC] Confirm all basic args are provided
        doc['group'] = ObjectId(doc['group'])
        # [DOC] Confirm group is valid
        results = await Registry.module('group').read(skip_events=[Event.PERM],
                                                      env=env,
                                                      query=[{
                                                          '_id':
                                                          doc['group']
                                                      }])
        if not results.args.count:
            raise self.exception(status=400,
                                 msg='Group is invalid.',
                                 args={'code': 'INVALID_GROUP'})
        # [DOC] Get user details
        results = await self.read(skip_events=[Event.PERM],
                                  env=env,
                                  query=query)
        if not results.args.count:
            raise self.exception(status=400,
                                 msg='User is invalid.',
                                 args={'code': 'INVALID_USER'})
        user = results.args.docs[0]
        # [DOC] Confirm group was not added before
        if doc['group'] in user.groups:
            raise self.exception(
                status=400,
                msg='User is already a member of the group.',
                args={'code': 'GROUP_ADDED'},
            )
        user.groups.append(doc['group'])
        # [DOC] Update the user
        results = await self.update(skip_events=[Event.PERM],
                                    env=env,
                                    query=query,
                                    doc={'groups': user.groups})
        # [DOC] if update fails, return update results
        if results.status != 200:
            return results
        # [DOC] Check if the updated User doc belongs to current session and update it
        if env['session'].user._id == user._id:
            user_results = await self.read_privileges(skip_events=[Event.PERM],
                                                      env=env,
                                                      query=[{
                                                          '_id': user._id
                                                      }])
            env['session']['user'] = user_results.args.docs[0]

        return results

    async def delete_group(self, skip_events=[], env={}, query=[], doc={}):
        # [DOC] Confirm group is valid
        results = await Registry.module('group').read(skip_events=[Event.PERM],
                                                      env=env,
                                                      query=[{
                                                          '_id':
                                                          query['group'][0]
                                                      }])
        if not results.args.count:
            raise self.exception(status=400,
                                 msg='Group is invalid.',
                                 args={'code': 'INVALID_GROUP'})
        # [DOC] Get user details
        results = await self.read(skip_events=[Event.PERM],
                                  env=env,
                                  query=[{
                                      '_id': query['_id'][0]
                                  }])
        if not results.args.count:
            raise self.exception(status=400,
                                 msg='User is invalid.',
                                 args={'code': 'INVALID_USER'})
        user = results.args.docs[0]
        # [DOC] Confirm group was not added before
        if query['group'][0] not in user.groups:
            raise self.exception(
                status=400,
                msg='User is not a member of the group.',
                args={'code': 'GROUP_NOT_ADDED'},
            )
        # [DOC] Update the user
        results = await self.update(
            skip_events=[Event.PERM],
            env=env,
            query=[{
                '_id': query['_id'][0]
            }],
            doc={'groups': {
                '$del_val': [query['group'][0]]
            }},
        )
        # [DOC] if update fails, return update results
        if results.status != 200:
            return results
        # [DOC] Check if the updated User doc belongs to current session and update it
        if env['session'].user._id == user._id:
            user_results = await self.read_privileges(skip_events=[Event.PERM],
                                                      env=env,
                                                      query=[{
                                                          '_id': user._id
                                                      }])
            env['session']['user'] = user_results.args.docs[0]

        return results
Esempio n. 17
0
class Group(BaseModule):
    '''`Group` module provides data type and controller for groups in Nawah eco-system.'''

    collection = 'groups'
    attrs = {
        'user':
        ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'),
        'name':
        ATTR.LOCALE(desc='Name of the groups as `LOCALE`.'),
        'desc':
        ATTR.LOCALE(
            desc=
            'Description of the group as `LOCALE`. This can be used for dynamic generated groups that are meant to be exposed to end-users.'
        ),
        'privileges':
        ATTR.KV_DICT(
            desc='Privileges that any user is a member of the group has.',
            key=ATTR.STR(),
            val=ATTR.LIST(list=[ATTR.STR()]),
        ),
        'settings':
        ATTR.KV_DICT(
            desc=
            '`Setting` docs to be created, or required for members users when added to the group.',
            key=ATTR.STR(),
            val=ATTR.ANY(),
        ),
        'create_time':
        ATTR.DATETIME(
            desc='Python `datetime` ISO format of the doc creation.'),
    }
    defaults = {
        'desc': {locale: ''
                 for locale in Config.locales},
        'privileges': {},
        'settings': {},
    }
    methods = {
        'read':
        METHOD(permissions=[PERM(privilege='admin')]),
        'create':
        METHOD(permissions=[PERM(privilege='admin')]),
        'update':
        METHOD(
            permissions=[
                PERM(privilege='admin'),
                PERM(
                    privilege='update',
                    query_mod={'user': '******'},
                    doc_mod={'privileges': None},
                ),
            ],
            query_args={'_id': ATTR.ID()},
        ),
        'delete':
        METHOD(
            permissions=[
                PERM(privilege='admin'),
                PERM(privilege='delete', query_mod={'user': '******'}),
            ],
            query_args={'_id': ATTR.ID()},
        ),
    }

    async def pre_update(self, skip_events, env, query, doc, payload):
        # [DOC] Make sure no attrs overwriting would happen
        if 'attrs' in doc.keys():
            results = await self.read(skip_events=[Event.PERM],
                                      env=env,
                                      query=query)
            if not results.args.count:
                raise self.exception(status=400,
                                     msg='Group is invalid.',
                                     args={'code': 'INVALID_GROUP'})

            if results.args.count > 1:
                raise self.exception(
                    status=400,
                    msg=
                    'Updating group attrs can be done only to individual groups.',
                    args={'code': 'MULTI_ATTRS_UPDATE'},
                )

            results.args.docs[0]['attrs'].update({
                attr: doc['attrs'][attr]
                for attr in doc['attrs'].keys()
                if doc['attrs'][attr] != None and doc['attrs'][attr] != ''
            })
            doc['attrs'] = results.args.docs[0]['attrs']
        return (skip_events, env, query, doc, payload)
Esempio n. 18
0
def test_compile_query_step_one_step_match_attr_ID_val_list_id(caplog):
    aggregate_prefix = [
        {
            '$match': {
                '__deleted': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__create_draft': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__update_draft': {
                    '$exists': False
                }
            }
        },
    ]
    aggregate_suffix = [{'$group': {'_id': '$_id'}}]
    aggregate_match = []
    collection_name = 'collection_name'
    attrs = {'attr': ATTR.ID()}
    step = {'attr': {'$in': [ObjectId('000000000000000000000000')]}}
    watch_mode = False
    _query._compile_query_step(
        aggregate_prefix=aggregate_prefix,
        aggregate_suffix=aggregate_suffix,
        aggregate_match=aggregate_match,
        collection_name=collection_name,
        attrs=attrs,
        step=step,
        watch_mode=watch_mode,
    )
    for record in caplog.records:
        assert (
            record.message !=
            'Failed to convert child_attr to ObjectId: 000000000000000000000000'
        )
    assert aggregate_prefix == [
        {
            '$match': {
                '__deleted': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__create_draft': {
                    '$exists': False
                }
            }
        },
        {
            '$match': {
                '__update_draft': {
                    '$exists': False
                }
            }
        },
    ]
    assert aggregate_suffix == [{'$group': {'_id': '$_id'}}]
    assert aggregate_match == [{
        'attr': {
            '$in': [ObjectId('000000000000000000000000')]
        }
    }]