Example #1
0
class Analytic(BaseModule):
    '''`Analytic` module provides data type and controller from `Analytics Workflow` and accompanying analytics docs. It uses `pre_create` handler to assure no events duplications occur and all occurrences of the same event are recorded in one doc.'''
    collection = 'analytics'
    attrs = {
        'user':
        ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'),
        'event':
        ATTR.STR(desc='Analytics event name.'),
        'subevent':
        ATTR.ANY(
            desc=
            'Analytics subevent distinguishing attribute. This is usually `STR`, or `ID` but it is introduced in the module as `ANY` to allow wider use-cases by developers.'
        ),
        'occurrences':
        ATTR.LIST(
            desc='All occurrences of the event as list.',
            list=[
                ATTR.DICT(
                    desc='Single occurrence of the event details.',
                    dict={
                        'args':
                        ATTR.DICT(
                            desc=
                            'Key-value `dict` containing event args, if any.',
                            dict={
                                '__key': ATTR.STR(),
                                '__val': ATTR.ANY()
                            }),
                        'score':
                        ATTR.INT(
                            desc='Numerical score for occurrence of the event.'
                        ),
                        'create_time':
                        ATTR.DATETIME(
                            desc=
                            'Python `datetime` ISO format of the occurrence of the event.'
                        ),
                    })
            ]),
        'score':
        ATTR.INT(
            desc=
            'Total score of all scores of all occurrences of the event. This can be used for data analysis.'
        ),
    }
    unique_attrs = [('user', 'event', 'subevent')]
    methods = {
        'read': {
            'permissions': [PERM(privilege='read')]
        },
        'create': {
            'permissions': [PERM(privilege='__sys')],
            'doc_args': {
                'event': ATTR.STR(),
                'subevent': ATTR.ANY(),
                'args': ATTR.DICT(dict={
                    '__key': ATTR.STR(),
                    '__val': ATTR.ANY()
                }),
            },
        },
        'update': {
            'permissions': [PERM(privilege='__sys')]
        },
        'delete': {
            'permissions': [PERM(privilege='delete')]
        },
    }

    async def pre_create(self, skip_events, env, query, doc, payload):
        analytic_results = await self.read(
            skip_events=[Event.PERM],
            env=env,
            query=[
                {
                    'user': env['session'].user._id,
                    'event': doc['event'],
                    'subevent': doc['subevent'],
                },
                {
                    '$limit': 1
                },
            ],
        )
        if analytic_results.args.count:
            analytic_results = await self.update(
                skip_events=[Event.PERM],
                env=env,
                query=[{
                    '_id': analytic_results.args.docs[0]._id
                }],
                doc={
                    'occurrences': {
                        '$append': {
                            'args': doc['args'],
                            'score':
                            doc['score'] if 'score' in doc.keys() else 0,
                            'create_time':
                            datetime.datetime.utcnow().isoformat(),
                        }
                    },
                    'score': {
                        '$add': doc['score'] if 'score' in doc.keys() else 0
                    },
                },
            )
            return analytic_results
        else:
            doc = {
                'event':
                doc['event'],
                'subevent':
                doc['subevent'],
                'occurrences': [{
                    'args':
                    doc['args'],
                    'score':
                    doc['score'] if 'score' in doc.keys() else 0,
                    'create_time':
                    datetime.datetime.utcnow().isoformat(),
                }],
                'score':
                doc['score'] if 'score' in doc.keys() else 0,
            }
            return (skip_events, env, query, doc, payload)
Example #2
0
File: group.py Project: masaar/limp
class Group(BaseModule):
    '''`Group` module provides data type and controller for groups in LIMP eco-system.'''
    collection = 'groups'
    attrs = {
        'user':
        ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'),
        'name':
        ATTR.LOCALE(desc='Name of the groups as `LOCALE`.'),
        'desc':
        ATTR.LOCALE(
            desc=
            'Description of the group as `LOCALE`. This can be used for dynamic generated groups that are meant to be exposed to end-users.'
        ),
        'privileges':
        ATTR.DICT(
            desc='Privileges that any user is a member of the group has.',
            dict={
                '__key': ATTR.STR(),
                '__val': ATTR.LIST(list=[ATTR.STR()])
            }),
        'settings':
        ATTR.DICT(
            desc=
            '`Setting` docs to be created, or required for members users when added to the group.',
            dict={
                '__key': ATTR.STR(),
                '__val': ATTR.ANY()
            }),
        'create_time':
        ATTR.DATETIME(
            desc='Python `datetime` ISO format of the doc creation.'),
    }
    defaults = {
        'desc': {locale: ''
                 for locale in Config.locales},
        'privileges': {},
        'settings': {},
    }
    methods = {
        'read': {
            'permissions': [PERM(privilege='admin')]
        },
        'create': {
            'permissions': [PERM(privilege='admin')]
        },
        'update': {
            'permissions': [
                PERM(privilege='admin'),
                PERM(
                    privilege='update',
                    query_mod={'user': '******'},
                    doc_mod={'privileges': None},
                ),
            ],
            'query_args': {
                '_id': ATTR.ID()
            },
        },
        'delete': {
            'permissions': [
                PERM(privilege='admin'),
                PERM(privilege='delete', query_mod={'user': '******'}),
            ],
            'query_args': {
                '_id': ATTR.ID()
            },
        },
    }

    async def pre_create(self, skip_events, env, query, doc, payload):
        return (skip_events, env, query, doc, payload)

    async def pre_update(self, skip_events, env, query, doc, payload):
        # [DOC] Make sure no attrs overwriting would happen
        if 'attrs' in doc.keys():
            results = await self.read(skip_events=[Event.PERM],
                                      env=env,
                                      query=query)
            if not results.args.count:
                return self.status(status=400,
                                   msg='Group is invalid.',
                                   args={'code': 'INVALID_GROUP'})
            if results.args.count > 1:
                return self.status(
                    status=400,
                    msg=
                    'Updating group attrs can be done only to individual groups.',
                    args={'code': 'MULTI_ATTRS_UPDATE'},
                )
            results.args.docs[0]['attrs'].update({
                attr: doc['attrs'][attr]
                for attr in doc['attrs'].keys()
                if doc['attrs'][attr] != None and doc['attrs'][attr] != ''
            })
            doc['attrs'] = results.args.docs[0]['attrs']
        return (skip_events, env, query, doc, payload)
Example #3
0
class Diff(BaseModule):
    '''`Diff` module provides data type and controller for `Diff Workflow`. It is meant for use by internal calls only. Best practice to accessing diff docs is by creating proxy modules or writing LIMP methods that expose the diff docs.'''
    collection = 'diff'
    attrs = {
        'user':
        ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'),
        'module':
        ATTR.STR(desc='Name of the module the original doc is part of.'),
        'doc':
        ATTR.ID(desc='`_id` of the original doc.'),
        'vars':
        ATTR.DICT(
            desc=
            'Key-value `dict` containing all attrs that have been updated from the original doc.',
            dict={
                '__key': ATTR.STR(),
                '__val': ATTR.ANY()
            }),
        'remarks':
        ATTR.STR(
            desc=
            'Human-readable remarks of the doc. This is introduced to allow developers to add log messages to diff docs.'
        ),
        'create_time':
        ATTR.DATETIME(
            desc='Python `datetime` ISO format of the doc creation.'),
    }
    defaults = {'doc': None, 'remarks': ''}
    methods = {
        'read': {
            'permissions': [PERM(privilege='read')]
        },
        'create': {
            'permissions': [PERM(privilege='__sys')]
        },
        'delete': {
            'permissions': [PERM(privilege='delete')]
        },
    }

    async def pre_create(self, skip_events, env, query, doc, payload):
        # [DOC] Detect non-_id update query:
        if '_id' not in query:
            results = await Config.modules[doc['module']
                                           ].read(skip_events=[Event.PERM],
                                                  env=env,
                                                  query=query)
            if results.args.count > 1:
                query.append(
                    {'_id': {
                        '$in': [doc._id for doc in results.args.docs]
                    }})
            elif results.args.count == 1:
                query.append({'_id': results.args.docs[0]._id})
            else:
                return self.status(status=400,
                                   msg='No update docs matched.',
                                   args={'code': 'NO_MATCH'})
        if '_id' in query and type(query['_id'][0]) == list:
            for i in range(len(query['_id'][0]) - 1):
                self.create(
                    skip_events=[Event.PERM],
                    env=env,
                    query=[{
                        '_id': query['_id'][0][i]
                    }],
                    doc=doc,
                )
            query['_id'][0] = query['_id'][0][-1]
        doc['doc'] = ObjectId(query['_id'][0])
        return (skip_events, env, query, doc, payload)
Example #4
0
    async def config_data(cls) -> None:
        # [DOC] Check API version
        if not cls.api_level:
            logger.warning(
                'No API-level sepecified for the app. LIMPd would continue to run the app, but the developer should consider adding API-level to eliminate specs mismatch.'
            )
        elif type(cls.api_level) != str:
            logger.warning(
                'Skipping API-level check due to incompatible \'api_level\' Config Attr value type.'
            )
        else:
            limp_level = '.'.join(cls._limp_version.split('.')[0:2])
            if cls.api_level != limp_level:
                logger.error(
                    f'LIMPd is on API-level \'{limp_level}\', but the app requires API-level \'{cls.api_level}\'. Exiting.'
                )
                exit()
            try:
                versions = ((requests.get(
                    'https://raw.githubusercontent.com/masaar/limp-versions/master/versions.txt'
                ).content).decode('utf-8').split('\n'))
                version_detected = ''
                for version in versions:
                    if version.startswith(f'{limp_level}.'):
                        if version_detected and int(
                                version.split('.')[-1]) < int(
                                    version_detected.split('.')[-1]):
                            continue
                        version_detected = version
                if version_detected and version_detected != cls._limp_version:
                    logger.warning(
                        f'Your app is using LIMPs version \'{cls._limp_version}\' while newer version \'{version_detected}\' of the API-level is available. Please, update.'
                    )
            except:
                logger.warning(
                    'An error occured while attempting to check for latest update to LIMPs. Please, check for updates on your own.'
                )

        # [DOC] Check for jobs
        if cls.jobs:
            # [DOC] Create _jobs_env
            cls._jobs_session = DictObj({
                **cls.compile_anon_session(), 'user':
                DictObj(cls.compile_anon_user())
            })
            # [DOC] Check jobs schedule validity
            cls._jobs_base = datetime.datetime.utcnow()
            for job in cls.jobs:
                if not croniter.is_valid(job['schedule']):
                    logger.error(
                        f'Job with schedule \'{job["schedule"]}\' is invalid. Exiting.'
                    )
                    exit()
                else:
                    job['schedule'] = croniter(job['schedule'], cls._jobs_base)
                    job['next_time'] = datetime.datetime.fromtimestamp(
                        job['schedule'].get_next(),
                        datetime.timezone.utc).isoformat()[:16]

        # [DOC] Check for presence of user_auth_attrs
        if len(cls.user_auth_attrs) < 1 or sum(
                1 for attr in cls.user_auth_attrs
                if attr in cls.user_attrs.keys()) != len(cls.user_auth_attrs):
            logger.error(
                'Either no \'user_auth_attrs\' are provided, or one of \'user_auth_attrs\' not present in \'user_attrs\'. Exiting.'
            )
            exit()

        # [DOC] Check default values
        security_warning = '[SECURITY WARNING] {config_attr} is not explicitly set. It has been defaulted to \'{val}\' but in production environment you should consider setting it to your own to protect your app from breaches.'
        if cls.admin_password == '__ADMIN':
            logger.warning(
                security_warning.format(config_attr='Admin password',
                                        val='__ADMIN'))
        if cls.anon_token == '__ANON_TOKEN_f00000000000000000000012':
            logger.warning(
                security_warning.format(
                    config_attr='Anon token',
                    val='__ANON_TOKEN_f00000000000000000000012'))

        # [DOC] Check for env data variables
        data_attrs = {
            'server': 'mongodb://localhost',
            'name': 'limp_data',
            'ssl': False,
            'ca_name': False,
            'ca': False,
        }
        for data_attr_name in data_attrs.keys():
            data_attr = getattr(cls, f'data_{data_attr_name}')
            if type(data_attr) == str and data_attr.startswith('$__env.'):
                logger.debug(
                    f'Detected env variable for config attr \'data_{data_attr_name}\''
                )
                if not os.getenv(data_attr[7:]):
                    logger.warning(
                        f'Couldn\'t read env variable for config attr \'data_{data_attr_name}\'. Defaulting to \'{data_attrs[data_attr_name]}\''
                    )
                    setattr(cls, f'data_{data_attr_name}',
                            data_attrs[data_attr_name])
                else:
                    # [DOC] Set data_ssl to True rather than string env variable value
                    if data_attr_name == 'ssl':
                        data_attr = True
                    else:
                        data_attr = os.getenv(data_attr[7:])
                    logger.warning(
                        f'Setting env variable for config attr \'data_{data_attr_name}\' to \'{data_attr}\''
                    )
                    setattr(cls, f'data_{data_attr_name}', data_attr)

        # [DOC] Check SSL settings
        if cls.data_ca:
            __location__ = os.path.realpath(
                os.path.join(os.getcwd(), os.path.dirname(__file__)))
            if not os.path.exists(os.path.join(__location__, 'certs')):
                os.makedirs(os.path.join(__location__, 'certs'))
            with open(os.path.join(__location__, 'certs', cls.data_ca_name),
                      'w') as f:
                f.write(cls.data_ca)

        from data import Data

        # [DOC] Create default env dict
        anon_user = cls.compile_anon_user()
        anon_session = cls.compile_anon_session()
        anon_session['user'] = DictObj(anon_user)
        cls._sys_conn = Data.create_conn()
        cls._sys_env = {
            'conn': cls._sys_conn,
            'REMOTE_ADDR': '127.0.0.1',
            'HTTP_USER_AGENT': 'LIMPd',
            'client_app': '__sys',
            'session': DictObj(anon_session),
            'ws': None,
            'watch_tasks': {},
        }

        if cls.data_azure_mongo:
            for module in cls.modules.keys():
                try:
                    if cls.modules[module].collection:
                        logger.debug(
                            f'Attempting to create shard collection: {cls.modules[module].collection}.'
                        )
                        cls._sys_conn[cls.data_name].command(
                            'shardCollection',
                            f'{cls.data_name}.{cls.modules[module].collection}',
                            key={'_id': 'hashed'},
                        )
                    else:
                        logger.debug(f'Skipping service module: {module}.')
                except Exception as err:
                    logger.error(err)

        # [DOC] Check test mode
        if cls.test or cls.test_collections:
            logger.debug('Test mode or Test Collections Mode detected.')
            __location__ = os.path.realpath(
                os.path.join(os.getcwd(), os.path.dirname(__file__)))
            if not os.path.exists(os.path.join(__location__, 'tests')):
                os.makedirs(os.path.join(__location__, 'tests'))
            if not cls.test_env:
                for module in cls.modules.keys():
                    if cls.modules[module].collection:
                        logger.debug(
                            f'Updating collection name \'{cls.modules[module].collection}\' of module {module}'
                        )
                        cls.modules[
                            module].collection = f'test_{cls.modules[module].collection}'
                        if cls.test and not cls.test_skip_flush:
                            logger.debug(
                                f'Flushing test collection \'{cls.modules[module].collection}\''
                            )
                            await Data.drop(
                                env=cls._sys_env,
                                collection=cls.modules[module].collection,
                            )
                    else:
                        logger.debug(f'Skipping service module {module}')
            else:
                logger.warning(
                    f'Testing on \'{cls.env}\' env. LIMPd would be sleeping for 5secs to give you chance to abort test workflow if this was a mistake.'
                )
                time.sleep(5)

        logger.debug('Testing realm mode.')
        if cls.realm:
            # [DOC] Append realm to env dict
            cls._sys_env['realm'] = '__global'
            # [DOC] Append realm attrs to all modules attrs and set at as required in query_args and doc_args
            for module in cls.modules.keys():
                if module != 'realm':
                    logger.debug(
                        f'Updating module \'{module}\' for realm mode.')
                    cls.modules[module].attrs['realm'] = ATTR.STR()
                    for method in cls.modules[module].methods.keys():
                        # [DOC] Attempt required changes to query_args to add realm query_arg
                        if not cls.modules[module].methods[method].query_args:
                            cls.modules[module].methods[method].query_args = [
                                {}
                            ]
                        elif (type(
                                cls.modules[module].methods[method].query_args)
                              == dict):
                            cls.modules[module].methods[method].query_args = [
                                cls.modules[module].methods[method].query_args
                            ]
                        for query_args_set in (cls.modules[module].
                                               methods[method].query_args):
                            query_args_set['realm'] = ATTR.STR()
                        # [DOC] Attempt required changes to doc_args to add realm doc_arg
                        if not cls.modules[module].methods[method].doc_args:
                            cls.modules[module].methods[method].doc_args = [{}]
                        elif type(cls.modules[module].methods[method].doc_args
                                  ) == dict:
                            cls.modules[module].methods[method].doc_args = [
                                cls.modules[module].methods[method].doc_args
                            ]
                        for doc_args_set in (
                                cls.modules[module].methods[method].doc_args):
                            doc_args_set['realm'] = ATTR.STR()
            # [DOC] Query all realms to provide access to available realms and to add realm docs to _sys_docs
            realm_results = await cls.modules['realm'].read(
                skip_events=[Event.PERM, Event.ARGS], env=cls._sys_env)
            logger.debug(
                f'Found {realm_results.args.count} realms. Namely; {[doc.name for doc in realm_results.args.docs]}'
            )
            for doc in realm_results.args.docs:
                cls._realms[doc.name] = doc
                cls._sys_docs[doc._id] = {'module': 'realm'}
            # [DOC] Create __global realm
            if '__global' not in cls._realms:
                logger.debug('GLOBAL realm not found, creating it.')
                realm_results = await cls.modules['realm'].create(
                    skip_events=[Event.PERM, Event.PRE],
                    env=cls._sys_env,
                    doc={
                        '_id': ObjectId('f00000000000000000000014'),
                        'user': ObjectId('f00000000000000000000010'),
                        'name': '__global',
                        'default': 'f00000000000000000000013',
                    },
                )
                logger.debug(f'GLOBAL realm creation results: {realm_results}')
                if realm_results.status != 200:
                    logger.error('Config step failed. Exiting.')
                    exit()

        # [DOC] Checking users collection
        logger.debug('Testing users collection.')
        user_results = await cls.modules['user'].read(
            skip_events=[Event.PERM, Event.ON],
            env=cls._sys_env,
            query=[{
                '_id': 'f00000000000000000000010'
            }],
        )
        if not user_results.args.count:
            logger.debug('ADMIN user not found, creating it.')
            # [DOC] Prepare base ADMIN user doc
            admin_doc = {
                '_id': ObjectId('f00000000000000000000010'),
                'name': {
                    cls.locale: '__ADMIN'
                },
                'groups': [],
                'privileges': {
                    '*': ['*']
                },
                'locale': cls.locale,
            }
            # [DOC] Update ADMIN user doc with admin_doc Config Attr
            admin_doc.update(cls.admin_doc)

            for auth_attr in cls.user_auth_attrs:
                admin_doc[f'{auth_attr}_hash'] = (jwt.encode(
                    {
                        'hash': [
                            auth_attr,
                            admin_doc[auth_attr],
                            cls.admin_password,
                            cls.anon_token,
                        ]
                    },
                    cls.admin_password,
                ).decode('utf-8').split('.')[1])
            if cls.realm:
                admin_doc['realm'] = '__global'
            admin_results = await cls.modules['user'].create(
                skip_events=[Event.PERM, Event.PRE, Event.ON],
                env=cls._sys_env,
                doc=admin_doc,
            )
            logger.debug(f'ADMIN user creation results: {admin_results}')
            if admin_results.status != 200:
                logger.error('Config step failed. Exiting.')
                exit()
        elif not cls.force_admin_check:
            logger.warning(
                'ADMIN user found, skipping check due to force_admin_check Config Attr.'
            )
        else:
            logger.warning(
                'ADMIN user found, checking it due to force_admin_check Config Attr.'
            )
            admin_doc = user_results.args.docs[0]
            admin_doc_update = {}
            for attr in cls.admin_doc.keys():
                if (attr not in admin_doc or not admin_doc[attr]
                        or cls.admin_doc[attr] != admin_doc[attr]):
                    if (type(cls.admin_doc[attr]) == dict
                            and cls.locale in cls.admin_doc[attr].keys()
                            and type(admin_doc[attr]) == dict
                            and ((cls.locale in admin_doc[attr].keys()
                                  and cls.admin_doc[attr][cls.locale]
                                  == admin_doc[attr][cls.locale]) or
                                 (cls.locale not in admin_doc[attr].keys()))):
                        continue
                    logger.debug(
                        f'Detected change in \'admin_doc.{attr}\' Config Attr.'
                    )
                    admin_doc_update[attr] = cls.admin_doc[attr]
            for auth_attr in cls.user_auth_attrs:
                auth_attr_hash = (jwt.encode(
                    {
                        'hash': [
                            auth_attr,
                            admin_doc[auth_attr],
                            cls.admin_password,
                            cls.anon_token,
                        ]
                    },
                    cls.admin_password,
                ).decode('utf-8').split('.')[1])
                if (f'{auth_attr}_hash' not in admin_doc
                        or auth_attr_hash != admin_doc[f'{auth_attr}_hash']):
                    logger.debug(
                        f'Detected change in \'admin_password\' Config Attr.')
                    admin_doc_update[f'{auth_attr}_hash'] = auth_attr_hash
            if len(admin_doc_update.keys()):
                logger.debug(
                    f'Attempting to update ADMIN user with doc: \'{admin_doc_update}\''
                )
                admin_results = await cls.modules['user'].update(
                    skip_events=[Event.PERM, Event.PRE, Event.ON],
                    env=cls._sys_env,
                    query=[{
                        '_id': ObjectId('f00000000000000000000010')
                    }],
                    doc=admin_doc_update,
                )
                logger.debug(f'ADMIN user update results: {admin_results}')
                if admin_results.status != 200:
                    logger.error('Config step failed. Exiting.')
                    exit()
            else:
                logger.debug('ADMIN user is up-to-date.')

        cls._sys_docs[ObjectId('f00000000000000000000010')] = {
            'module': 'user'
        }

        # [DOC] Test if ANON user exists
        user_results = await cls.modules['user'].read(
            skip_events=[Event.PERM, Event.ON],
            env=cls._sys_env,
            query=[{
                '_id': 'f00000000000000000000011'
            }],
        )
        if not user_results.args.count:
            logger.debug('ANON user not found, creating it.')
            anon_results = await cls.modules['user'].create(
                skip_events=[Event.PERM, Event.PRE, Event.ON],
                env=cls._sys_env,
                doc=cls.compile_anon_user(),
            )
            logger.debug(f'ANON user creation results: {anon_results}')
            if anon_results.status != 200:
                logger.error('Config step failed. Exiting.')
                exit()
        else:
            logger.debug('ANON user found, checking it.')
            anon_doc = cls.compile_anon_user()
            anon_doc_update = {}
            for attr in cls.user_attrs.keys():
                if attr not in anon_doc or not anon_doc[attr]:
                    logger.debug(
                        f'Detected change in \'anon_doc.{attr}\' Config Attr.')
                    anon_doc_update[attr] = generate_attr(
                        attr_type=cls.user_attrs[attr])
            for module in cls.anon_privileges.keys():
                if module not in anon_doc or set(anon_doc[module]) != set(
                        cls.anon_privileges[module]):
                    logger.debug(
                        f'Detected change in \'anon_privileges\' Config Attr.')
                    anon_doc_update[
                        f'privileges.{module}'] = cls.anon_privileges[module]
            for auth_attr in cls.user_auth_attrs:
                if (f'{auth_attr}_hash' not in anon_doc
                        or anon_doc[f'{auth_attr}_hash'] != cls.anon_token):
                    logger.debug(
                        f'Detected change in \'anon_token\' Config Attr.')
                    anon_doc_update[attr] = cls.anon_token
                anon_doc_update[f'{auth_attr}_hash'] = cls.anon_token
            if len(anon_doc_update.keys()):
                logger.debug(
                    f'Attempting to update ANON user with doc: \'{anon_doc_update}\''
                )
                anon_results = await cls.modules['user'].update(
                    skip_events=[Event.PERM, Event.PRE, Event.ON],
                    env=cls._sys_env,
                    query=[{
                        '_id': ObjectId('f00000000000000000000011')
                    }],
                    doc=anon_doc_update,
                )
                logger.debug(f'ANON user update results: {anon_results}')
                if anon_results.status != 200:
                    logger.error('Config step failed. Exiting.')
                    exit()
            else:
                logger.debug('ANON user is up-to-date.')

        cls._sys_docs[ObjectId('f00000000000000000000011')] = {
            'module': 'user'
        }

        logger.debug('Testing sessions collection.')
        # [DOC] Test if ANON session exists
        session_results = await cls.modules['session'].read(
            skip_events=[Event.PERM, Event.ON],
            env=cls._sys_env,
            query=[{
                '_id': 'f00000000000000000000012'
            }],
        )
        if not session_results.args.count:
            logger.debug('ANON session not found, creating it.')
            anon_results = await cls.modules['session'].create(
                skip_events=[Event.PERM, Event.PRE, Event.ON],
                env=cls._sys_env,
                doc=cls.compile_anon_session(),
            )
            logger.debug(f'ANON session creation results: {anon_results}')
            if anon_results.status != 200:
                logger.error('Config step failed. Exiting.')
                exit()
        cls._sys_docs[ObjectId('f00000000000000000000012')] = {
            'module': 'session'
        }

        logger.debug('Testing groups collection.')
        # [DOC] Test if DEFAULT group exists
        group_results = await cls.modules['group'].read(
            skip_events=[Event.PERM, Event.ON],
            env=cls._sys_env,
            query=[{
                '_id': 'f00000000000000000000013'
            }],
        )
        if not group_results.args.count:
            logger.debug('DEFAULT group not found, creating it.')
            group_doc = {
                '_id': ObjectId('f00000000000000000000013'),
                'user': ObjectId('f00000000000000000000010'),
                'name': {locale: '__DEFAULT'
                         for locale in cls.locales},
                'bio': {locale: '__DEFAULT'
                        for locale in cls.locales},
                'privileges': cls.default_privileges,
            }
            if cls.realm:
                group_doc['realm'] = '__global'
            group_results = await cls.modules['group'].create(
                skip_events=[Event.PERM, Event.PRE, Event.ON],
                env=cls._sys_env,
                doc=group_doc,
            )
            logger.debug(f'DEFAULT group creation results: {group_results}')
            if group_results.status != 200:
                logger.error('Config step failed. Exiting.')
                exit()
        else:
            logger.debug('DEFAULT group found, checking it.')
            default_doc = group_results.args.docs[0]
            default_doc_update = {}
            for module in cls.default_privileges.keys():
                if module not in default_doc.privileges.keys() or set(
                        default_doc.privileges[module]) != set(
                            cls.default_privileges[module]):
                    logger.debug(
                        f'Detected change in \'default_privileges\' Config Attr.'
                    )
                    default_doc_update[
                        f'privileges.{module}'] = cls.default_privileges[
                            module]
            if len(default_doc_update.keys()):
                logger.debug(
                    f'Attempting to update DEFAULT group with doc: \'{default_doc_update}\''
                )
                default_results = await cls.modules['group'].update(
                    skip_events=[Event.PERM, Event.PRE, Event.ON],
                    env=cls._sys_env,
                    query=[{
                        '_id': ObjectId('f00000000000000000000013')
                    }],
                    doc=default_doc_update,
                )
                logger.debug(
                    f'DEFAULT group update results: {default_results}')
                if anon_results.status != 200:
                    logger.error('Config step failed. Exiting.')
                    exit()
            else:
                logger.debug('DEFAULT group is up-to-date.')

        cls._sys_docs[ObjectId('f00000000000000000000013')] = {
            'module': 'group'
        }

        # [DOC] Test app-specific groups
        logger.debug('Testing app-specific groups collection.')
        for group in cls.groups:
            group_results = await cls.modules['group'].read(
                skip_events=[Event.PERM, Event.ON],
                env=cls._sys_env,
                query=[{
                    '_id': group['_id']
                }],
            )
            if not group_results.args.count:
                logger.debug(
                    f'App-specific group with name \'{group["name"]}\' not found, creating it.'
                )
                if cls.realm:
                    group['realm'] = '__global'
                group_results = await cls.modules['group'].create(
                    skip_events=[Event.PERM, Event.PRE, Event.ON],
                    env=cls._sys_env,
                    doc=group,
                )
                logger.debug(
                    f'App-specific group with name {group["name"]} creation results: {group_results}'
                )
                if group_results.status != 200:
                    logger.error('Config step failed. Exiting.')
                    exit()
            else:
                logger.debug(
                    f'App-specific group with name \'{group["name"]}\' found, checking it.'
                )
                group_doc = group_results.args.docs[0]
                group_doc_update = {}
                if 'privileges' in group.keys():
                    for module in group['privileges'].keys():
                        if module not in group_doc.privileges.keys() or set(
                                group_doc.privileges[module]) != set(
                                    group['privileges'][module]):
                            logger.debug(
                                f'Detected change in \'privileges\' Doc Arg for group with name \'{group["name"]}\'.'
                            )
                            group_doc_update[f'privileges.{module}'] = group[
                                'privileges'][module]
                if len(group_doc_update.keys()):
                    logger.debug(
                        f'Attempting to update group with name \'{group["name"]}\' with doc: \'{group_doc_update}\''
                    )
                    group_results = await cls.modules['group'].update(
                        skip_events=[Event.PERM, Event.PRE, Event.ON],
                        env=cls._sys_env,
                        query=[{
                            '_id': group['_id']
                        }],
                        doc=group_doc_update,
                    )
                    logger.debug(
                        f'Group with name \'{group["name"]}\' update results: {group_results}'
                    )
                    if group_results.status != 200:
                        logger.error('Config step failed. Exiting.')
                        exit()
                else:
                    logger.debug(
                        f'Group with name \'{group["name"]}\' is up-to-date.')

            cls._sys_docs[ObjectId(group['_id'])] = {'module': 'group'}

        # [DOC] Test app-specific data indexes
        logger.debug('Testing data indexes')
        for index in cls.data_indexes:
            logger.debug(f'Attempting to create data index: {index}')
            cls._sys_conn[cls.data_name][index['collection']].create_index(
                index['index'])
        logger.debug(
            'Creating \'var\', \'type\', \'user\' data indexes for settings collections.'
        )
        cls._sys_conn[cls.data_name]['settings'].create_index([('var', 1)])
        cls._sys_conn[cls.data_name]['settings'].create_index([('type', 1)])
        cls._sys_conn[cls.data_name]['settings'].create_index([('user', 1)])
        logger.debug(
            'Creating \'user\', \'event\', \'subevent\' data indexes for analytics collections.'
        )
        cls._sys_conn[cls.data_name]['analytics'].create_index([('user', 1)])
        cls._sys_conn[cls.data_name]['analytics'].create_index([('event', 1)])
        cls._sys_conn[cls.data_name]['analytics'].create_index([('subevent', 1)
                                                                ])
        logger.debug(
            'Creating \'__deleted\' data indexes for all collections.')
        for module in cls.modules:
            if cls.modules[module].collection:
                logger.debug(
                    f'Attempting to create \'__deleted\' data index for collection: {cls.modules[module].collection}'
                )
                cls._sys_conn[cls.data_name][
                    cls.modules[module].collection].create_index([('__deleted',
                                                                   1)])
        if cls.realm:
            logger.debug(
                'Creating \'realm\' data indexes for all collections.')
            for module in cls.modules:
                if module != 'realm' and cls.modules[module].collection:
                    logger.debug(
                        f'Attempting to create \'realm\' data index for collection: {cls.modules[module].collection}'
                    )
                    cls._sys_conn[cls.data_name][
                        cls.modules[module].collection].create_index([
                            ('realm', 'text')
                        ])

        # [DOC] Test app-specific docs
        logger.debug('Testing docs.')
        for doc in cls.docs:
            doc_results = await cls.modules[doc['module']].read(
                skip_events=[Event.PERM, Event.PRE, Event.ON],
                env=cls._sys_env,
                query=[{
                    '_id': doc['doc']['_id']
                }],
            )
            if not doc_results.args.count:
                if cls.realm:
                    doc['doc']['realm'] = '__global'
                skip_events = [Event.PERM]
                if 'skip_args' in doc.keys() and doc['skip_args'] == True:
                    skip_events.append(Event.ARGS)
                doc_results = await cls.modules[doc['module']].create(
                    skip_events=skip_events, env=cls._sys_env, doc=doc['doc'])
                logger.debug(
                    'App-specific doc with _id \'%s\' of module \'%s\' creation results: %s',
                    doc['doc']['_id'],
                    doc['module'],
                    doc_results,
                )
                if doc_results.status != 200:
                    logger.error('Config step failed. Exiting.')
                    exit()
            cls._sys_docs[ObjectId(doc['doc']['_id'])] = {
                'module': doc['module']
            }

        # [DOC] Check for test mode
        if cls.test:
            from test import Test

            logger.debug('Running tests')
            anon_session = cls.compile_anon_session()
            anon_session['user'] = DictObj(cls.compile_anon_user())
            Test.session = DictObj(anon_session)
            Test.env = cls._sys_env
            await Test.run_test(test_name=cls.test)
            exit()

        # [DOC] Check for emulate_test mode
        if cls.emulate_test:
            cls.test = True
Example #5
0
File: user.py Project: masaar/limp
class User(BaseModule):
    '''`User` module provides data type and controller for users in LIMP eco-system. This module is supposed to be used for internal calls only, however it has wide-access permissions in order to allow admins, proxy modules to easily expose the methods.'''
    collection = 'users'
    attrs = {
        'name':
        ATTR.LOCALE(desc='Name of the user as `LOCALE`.'),
        'locale':
        ATTR.LOCALES(desc='Default locale of the user.'),
        'create_time':
        ATTR.DATETIME(
            desc='Python `datetime` ISO format of the doc creation.'),
        'login_time':
        ATTR.DATETIME(desc='Python `datetime` ISO format of the last login.'),
        'groups':
        ATTR.LIST(
            desc='List of `_id` for every group the user is member of.',
            list=[ATTR.ID(desc='`_id` of Group doc the user is member of.')]),
        'privileges':
        ATTR.DICT(
            desc=
            'Privileges of the user. These privileges are always available to the user regardless of whether groups user is part of have them or not.',
            dict={
                '__key': ATTR.STR(),
                '__val': ATTR.LIST(list=[ATTR.STR()])
            }),
        'status':
        ATTR.LITERAL(
            desc=
            'Status of the user to determine whether user has access to the app or not.',
            literal=['active', 'banned', 'deleted', 'disabled_password']),
    }
    defaults = {
        'login_time': None,
        'status': 'active',
        'groups': [],
        'privileges': {}
    }
    unique_attrs = []
    methods = {
        'read': {
            'permissions': [
                PERM(privilege='admin'),
                PERM(privilege='read', query_mod={'_id': '$__user'}),
            ]
        },
        'create': {
            'permissions': [PERM(privilege='admin')]
        },
        'update': {
            'permissions': [
                PERM(privilege='admin', doc_mod={'groups': None}),
                PERM(
                    privilege='update',
                    query_mod={'_id': '$__user'},
                    doc_mod={
                        'groups': None,
                        'privileges': None
                    },
                ),
            ],
            'query_args': {
                '_id': ATTR.ID()
            },
        },
        'delete': {
            'permissions': [
                PERM(privilege='admin'),
                PERM(privilege='delete', query_mod={'_id': '$__user'}),
            ],
            'query_args': {
                '_id': ATTR.ID()
            },
        },
        'read_privileges': {
            'permissions': [
                PERM(privilege='admin'),
                PERM(privilege='read', query_mod={'_id': '$__user'}),
            ],
            'query_args': {
                '_id': ATTR.ID()
            },
        },
        'add_group': {
            'permissions': [PERM(privilege='admin')],
            'query_args': {
                '_id': ATTR.ID()
            },
            'doc_args': [{
                'group': ATTR.ID()
            }, {
                'group': ATTR.LIST(list=[ATTR.ID()])
            }],
        },
        'delete_group': {
            'permissions': [PERM(privilege='admin')],
            'query_args': {
                '_id': ATTR.ID(),
                'group': ATTR.ID()
            },
        },
        'retrieve_file': {
            'permissions': [PERM(privilege='__sys')],
            'get_method': True
        },
        'create_file': {
            'permissions': [PERM(privilege='__sys')]
        },
        'delete_file': {
            'permissions': [PERM(privilege='__sys')]
        },
    }

    async def on_read(self, results, skip_events, env, query, doc, payload):
        for i in range(len(results['docs'])):
            user = results['docs'][i]
            user['settings'] = {}
            for auth_attr in Config.user_auth_attrs:
                del user[f'{auth_attr}_hash']
            if len(Config.user_doc_settings):
                setting_results = await Config.modules['setting'].read(
                    skip_events=[Event.PERM, Event.ARGS],
                    env=env,
                    query=[{
                        'user': user._id,
                        'var': {
                            '$in': Config.user_doc_settings
                        }
                    }],
                )
                if setting_results.args.count:
                    user['settings'] = {
                        setting_doc['var']: setting_doc['val']
                        for setting_doc in setting_results.args.docs
                    }
        return (results, skip_events, env, query, doc, payload)

    async def pre_create(self, skip_events, env, query, doc, payload):
        if Event.ARGS not in skip_events:
            if Config.realm:
                realm_results = await Config.modules['realm'].read(
                    skip_events=[Event.PERM], env=env)
                realm = realm_results.args.docs[0]
                doc['groups'] = [realm.default]
            else:
                doc['groups'] = [ObjectId('f00000000000000000000013')]
        if 'settings' in doc.keys():
            payload['settings'] = doc['settings']
        return (skip_events, env, query, doc, payload)

    async def on_create(self, results, skip_events, env, query, doc, payload):
        if 'settings' in payload.keys():
            for setting in payload['settings'].keys():
                if callable(payload['settings'][setting]['val']):
                    setting_val = payload['settings'][setting]['val'](
                        skip_events=skip_events, env=env, query=query, doc=doc)
                else:
                    setting_val = payload['settings'][setting]['val']
                setting_results = await Config.modules['setting'].create(
                    skip_events=[Event.PERM, Event.ARGS],
                    env=env,
                    doc={
                        'user': results['docs'][0]._id,
                        'var': setting,
                        'val': setting_val,
                        'type': payload['settings'][setting]['type'],
                    },
                )
                if setting_results.status != 200:
                    return setting_results
        return (results, skip_events, env, query, doc, payload)

    async def read_privileges(self, skip_events=[], env={}, query=[], doc={}):
        # [DOC] Confirm _id is valid
        results = await self.read(skip_events=[Event.PERM],
                                  env=env,
                                  query=[{
                                      '_id': query['_id'][0]
                                  }])
        if not results.args.count:
            return self.status(status=400,
                               msg='User is invalid.',
                               args={'code': 'INVALID_USER'})
        user = results.args.docs[0]
        for group in user.groups:
            group_results = await Config.modules['group'].read(
                skip_events=[Event.PERM], env=env, query=[{
                    '_id': group
                }])
            group = group_results.args.docs[0]
            for privilege in group.privileges.keys():
                if privilege not in user.privileges.keys():
                    user.privileges[privilege] = []
                for i in range(len(group.privileges[privilege])):
                    if group.privileges[privilege][i] not in user.privileges[
                            privilege]:
                        user.privileges[privilege].append(
                            group.privileges[privilege][i])
        return results

    async def add_group(self, skip_events=[], env={}, query=[], doc={}):
        # [DOC] Check for list group attr
        if type(doc['group']) == list:
            for i in range(0, len(doc['group']) - 1):
                await self.add_group(
                    skip_events=skip_events,
                    env=env,
                    query=query,
                    doc={'group': doc['group'][i]},
                )
            doc['group'] = doc['group'][-1]
        # [DOC] Confirm all basic args are provided
        doc['group'] = ObjectId(doc['group'])
        # [DOC] Confirm group is valid
        results = await Config.modules['group'].read(skip_events=[Event.PERM],
                                                     env=env,
                                                     query=[{
                                                         '_id': doc['group']
                                                     }])
        if not results.args.count:
            return self.status(status=400,
                               msg='Group is invalid.',
                               args={'code': 'INVALID_GROUP'})
        # [DOC] Get user details
        results = await self.read(skip_events=[Event.PERM],
                                  env=env,
                                  query=query)
        if not results.args.count:
            return self.status(status=400,
                               msg='User is invalid.',
                               args={'code': 'INVALID_USER'})
        user = results.args.docs[0]
        # [DOC] Confirm group was not added before
        if doc['group'] in user.groups:
            return self.status(
                status=400,
                msg='User is already a member of the group.',
                args={'code': 'GROUP_ADDED'},
            )
        user.groups.append(doc['group'])
        # [DOC] Update the user
        results = await self.update(skip_events=[Event.PERM],
                                    env=env,
                                    query=query,
                                    doc={'groups': user.groups})
        return results

    async def delete_group(self, skip_events=[], env={}, query=[], doc={}):
        # [DOC] Confirm group is valid
        results = await Config.modules['group'].read(skip_events=[Event.PERM],
                                                     env=env,
                                                     query=[{
                                                         '_id':
                                                         query['group'][0]
                                                     }])
        if not results.args.count:
            return self.status(status=400,
                               msg='Group is invalid.',
                               args={'code': 'INVALID_GROUP'})
        # [DOC] Get user details
        results = await self.read(skip_events=[Event.PERM],
                                  env=env,
                                  query=[{
                                      '_id': query['_id'][0]
                                  }])
        if not results.args.count:
            return self.status(status=400,
                               msg='User is invalid.',
                               args={'code': 'INVALID_USER'})
        user = results.args.docs[0]
        # [DOC] Confirm group was not added before
        if query['group'][0] not in user.groups:
            return self.status(
                status=400,
                msg='User is not a member of the group.',
                args={'code': 'GROUP_NOT_ADDED'},
            )
        # [DOC] Update the user
        results = await self.update(
            skip_events=[Event.PERM],
            env=env,
            query=[{
                '_id': query['_id'][0]
            }],
            doc={'groups': {
                '$remove': [query['group'][0]]
            }},
        )
        return results
Example #6
0
class Session(BaseModule):
    '''`Session` module provides data type and controller for sessions in LIMP eco-system. CRUD methods of the module are supposed to used for internal calls only, while methods `auth`, `reauth`, and `signout` are available for use by API as well as internal calls when needed.'''
    collection = 'sessions'
    attrs = {
        'user':
        ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'),
        'groups':
        ATTR.LIST(
            desc=
            'List of `_id` for every group the session is authenticated against. This attr is set by `auth` method when called with `groups` Doc Arg for Controller Auth Sequence.',
            list=[
                ATTR.ID(
                    desc=
                    '`_id` of Group doc the session is authenticated against.')
            ]),
        'host_add':
        ATTR.IP(desc='IP of the host the user used to authenticate.'),
        'user_agent':
        ATTR.STR(desc='User-agent of the app the user used to authenticate.'),
        'expiry':
        ATTR.DATETIME(desc='Python `datetime` ISO format of session expiry.'),
        'token':
        ATTR.STR(desc='System-generated session token.'),
        'create_time':
        ATTR.DATETIME(
            desc='Python `datetime` ISO format of the doc creation.'),
    }
    defaults = {'groups': []}
    extns = {'user': EXTN(module='user', force=True)}
    methods = {
        'read': {
            'permissions':
            [PERM(privilege='read', query_mod={'user': '******'})]
        },
        'create': {
            'permissions': [PERM(privilege='create')]
        },
        'update': {
            'permissions': [
                PERM(
                    privilege='update',
                    query_mod={'user': '******'},
                    doc_mod={'user': None},
                )
            ],
            'query_args': {
                '_id': ATTR.ID()
            },
        },
        'delete': {
            'permissions':
            [PERM(privilege='delete', query_mod={'user': '******'})],
            'query_args': {
                '_id': ATTR.ID()
            },
        },
        'auth': {
            'permissions': [PERM(privilege='*')],
            'doc_args': []
        },
        'reauth': {
            'permissions': [PERM(privilege='*')],
            'query_args': [
                {
                    '_id': ATTR.ID(),
                    'hash': ATTR.STR(),
                    'groups': ATTR.LIST(list=[ATTR.ID()]),
                },
                {
                    '_id': ATTR.ID(),
                    'hash': ATTR.STR()
                },
            ],
        },
        'signout': {
            'permissions': [PERM(privilege='*')],
            'query_args': {
                '_id': ATTR.ID()
            },
        },
    }

    async def auth(self, skip_events=[], env={}, query=[], doc={}):
        for attr in Config.modules['user'].unique_attrs:
            if attr in doc.keys():
                key = attr
                break
        user_query = [{key: doc[key], f'{key}_hash': doc['hash'], '$limit': 1}]
        if 'groups' in doc.keys():
            user_query.append([{
                'groups': {
                    '$in': doc['groups']
                }
            }, {
                'privileges': {
                    '*': ['*']
                }
            }])
        user_results = await Config.modules['user'].read(
            skip_events=[Event.PERM, Event.ON], env=env, query=user_query)
        if not user_results.args.count:
            return self.status(
                status=403,
                msg='Wrong auth credentials.',
                args={'code': 'INVALID_CREDS'},
            )
        user = user_results.args.docs[0]

        if Event.ON not in skip_events:
            if user.status in ['banned', 'deleted']:
                return self.status(
                    status=403,
                    msg=f'User is {user.status}.',
                    args={'code': 'INVALID_USER'},
                )
            elif user.status == 'disabled_password':
                return self.status(
                    status=403,
                    msg='User password is disabled.',
                    args={'code': 'INVALID_USER'},
                )

        token = secrets.token_urlsafe(32)
        session = {
            'user':
            user._id,
            'groups':
            doc['groups'] if 'groups' in doc.keys() else [],
            'host_add':
            env['REMOTE_ADDR'],
            'user_agent':
            env['HTTP_USER_AGENT'],
            'expiry': (datetime.datetime.utcnow() +
                       datetime.timedelta(days=30)).isoformat(),
            'token':
            token,
        }

        results = await self.create(skip_events=[Event.PERM],
                                    env=env,
                                    doc=session)
        if results.status != 200:
            return results

        session['_id'] = results.args.docs[0]._id
        session['user'] = user
        results.args.docs[0] = BaseModel(session)

        # [DOC] read user privileges and return them
        user_results = await Config.modules['user'].read_privileges(
            skip_events=[Event.PERM], env=env, query=[{
                '_id': user._id
            }])
        if user_results.status != 200:
            return user_results
        results.args.docs[0]['user'] = user_results.args.docs[0]

        # [DOC] Create CONN_AUTH Analytic doc
        if Config.analytics_events['session_conn_auth']:
            analytic_doc = {
                'event': 'CONN_AUTH',
                'subevent': env['client_app'],
                'args': {
                    'user': user_results.args.docs[0]._id,
                    'session': results.args.docs[0]._id,
                    'REMOTE_ADDR': env['REMOTE_ADDR'],
                    'HTTP_USER_AGENT': env['HTTP_USER_AGENT'],
                },
            }
            analytic_results = await Config.modules['analytic'].create(
                skip_events=[Event.PERM], env=env, doc=analytic_doc)
            if analytic_results.status != 200:
                logger.error(
                    f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}'
                )
        # [DOC] Create USER_AUTH Analytic doc
        if Config.analytics_events['session_user_auth']:
            analytic_doc = {
                'event': 'USER_AUTH',
                'subevent': user_results.args.docs[0]._id,
                'args': {
                    'session': results.args.docs[0]._id,
                    'REMOTE_ADDR': env['REMOTE_ADDR'],
                    'HTTP_USER_AGENT': env['HTTP_USER_AGENT'],
                    'client_app': env['client_app'],
                },
            }
            analytic_results = await Config.modules['analytic'].create(
                skip_events=[Event.PERM], env=env, doc=analytic_doc)
            if analytic_results.status != 200:
                logger.error(
                    f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}'
                )

        return self.status(
            status=200,
            msg='You were successfully authed.',
            args={'session': results.args.docs[0]},
        )

    async def reauth(self, skip_events=[], env={}, query=[], doc={}):
        if str(query['_id'][0]) == 'f00000000000000000000012':
            return self.status(
                status=400,
                msg='Reauth is not required for \'__ANON\' user.',
                args={'code': 'ANON_REAUTH'},
            )
        session_query = [{'_id': query['_id'][0]}]
        if 'groups' in query:
            session_query.append({'groups': {'$in': query['groups'][0]}})
        results = await self.read(skip_events=[Event.PERM],
                                  env=env,
                                  query=session_query)
        if not results.args.count:
            return self.status(status=403,
                               msg='Session is invalid.',
                               args={'code': 'INVALID_SESSION'})

        if (jwt.encode({
                'token': results.args.docs[0].token
        }, results.args.docs[0].token).decode('utf-8').split('.')[1] !=
                query['hash'][0]):
            return self.status(
                status=403,
                msg='Reauth token hash invalid.',
                args={'code': 'INVALID_REAUTH_HASH'},
            )
        if results.args.docs[0].expiry < datetime.datetime.utcnow().isoformat(
        ):
            results = await self.delete(
                skip_events=[Event.PERM, Event.SOFT],
                env=env,
                query=[{
                    '_id': env['session']._id
                }],
            )
            return self.status(status=403,
                               msg='Session had expired.',
                               args={'code': 'SESSION_EXPIRED'})
        # [DOC] update user's last_login timestamp
        await Config.modules['user'].update(
            skip_events=[Event.PERM],
            env=env,
            query=[{
                '_id': results.args.docs[0].user
            }],
            doc={'login_time': datetime.datetime.utcnow().isoformat()},
        )
        await self.update(
            skip_events=[Event.PERM],
            env=env,
            query=[{
                '_id': results.args.docs[0]._id
            }],
            doc={
                'expiry': (datetime.datetime.utcnow() +
                           datetime.timedelta(days=30)).isoformat()
            },
        )
        # [DOC] read user privileges and return them
        user_results = await Config.modules['user'].read_privileges(
            skip_events=[Event.PERM],
            env=env,
            query=[{
                '_id': results.args.docs[0].user._id
            }],
        )
        results.args.docs[0]['user'] = user_results.args.docs[0]

        # [DOC] Create CONN_AUTH Analytic doc
        if Config.analytics_events['session_conn_reauth']:
            analytic_doc = {
                'event': 'CONN_REAUTH',
                'subevent': env['client_app'],
                'args': {
                    'user': user_results.args.docs[0]._id,
                    'session': results.args.docs[0]._id,
                    'REMOTE_ADDR': env['REMOTE_ADDR'],
                    'HTTP_USER_AGENT': env['HTTP_USER_AGENT'],
                },
            }
            analytic_results = await Config.modules['analytic'].create(
                skip_events=[Event.PERM], env=env, doc=analytic_doc)
            if analytic_results.status != 200:
                logger.error(
                    f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}'
                )
        # [DOC] Create USER_AUTH Analytic doc
        if Config.analytics_events['session_user_reauth']:
            analytic_doc = {
                'event': 'USER_REAUTH',
                'subevent': user_results.args.docs[0]._id,
                'args': {
                    'session': results.args.docs[0]._id,
                    'REMOTE_ADDR': env['REMOTE_ADDR'],
                    'HTTP_USER_AGENT': env['HTTP_USER_AGENT'],
                    'client_app': env['client_app'],
                },
            }
            analytic_results = await Config.modules['analytic'].create(
                skip_events=[Event.PERM], env=env, doc=analytic_doc)
            if analytic_results.status != 200:
                logger.error(
                    f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}'
                )

        return self.status(
            status=200,
            msg='You were succefully reauthed.',
            args={'session': results.args.docs[0]},
        )

    async def signout(self, skip_events=[], env={}, query=[], doc={}):
        if str(query['_id'][0]) == 'f00000000000000000000012':
            return self.status(
                status=400,
                msg='Singout is not allowed for \'__ANON\' user.',
                args={'code': 'ANON_SIGNOUT'},
            )
        results = await self.read(skip_events=[Event.PERM],
                                  env=env,
                                  query=[{
                                      '_id': query['_id'][0]
                                  }])

        if not results.args.count:
            return self.status(status=403,
                               msg='Session is invalid.',
                               args={'code': 'INVALID_SESSION'})
        results = await self.delete(skip_events=[Event.PERM],
                                    env=env,
                                    query=[{
                                        '_id': env['session']._id
                                    }])

        # [DOC] Create CONN_AUTH Analytic doc
        if Config.analytics_events['session_conn_deauth']:
            analytic_doc = {
                'event': 'CONN_DEAUTH',
                'subevent': env['client_app'],
                'args': {
                    'user': env['session'].user._id,
                    'session': env['session']._id,
                    'REMOTE_ADDR': env['REMOTE_ADDR'],
                    'HTTP_USER_AGENT': env['HTTP_USER_AGENT'],
                },
            }
            analytic_results = await Config.modules['analytic'].create(
                skip_events=[Event.PERM], env=env, doc=analytic_doc)
            if analytic_results.status != 200:
                logger.error(
                    f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}'
                )
        # [DOC] Create USER_AUTH Analytic doc
        if Config.analytics_events['session_user_deauth']:
            analytic_doc = {
                'event': 'USER_DEAUTH',
                'subevent': env['session'].user._id,
                'args': {
                    'session': env['session']._id,
                    'REMOTE_ADDR': env['REMOTE_ADDR'],
                    'HTTP_USER_AGENT': env['HTTP_USER_AGENT'],
                    'client_app': env['client_app'],
                },
            }
            analytic_results = await Config.modules['analytic'].create(
                skip_events=[Event.PERM], env=env, doc=analytic_doc)
            if analytic_results.status != 200:
                logger.error(
                    f'Failed to create \'Analytic\' doc: {analytic_doc}. Results: {analytic_results}'
                )

        return self.status(
            status=200,
            msg='You are successfully signed-out.',
            args={'session': DictObj({'_id': 'f00000000000000000000012'})},
        )

    def check_permissions(
        self,
        skip_events: List[str],
        env: Dict[str, Any],
        query: Union[LIMP_QUERY, Query],
        doc: LIMP_DOC,
        module: BaseModule,
        permissions: List[PERM],
    ):
        user = env['session'].user

        permissions = copy.deepcopy(permissions)

        for permission in permissions:
            logger.debug(
                f'checking permission: {permission} against: {user.privileges}'
            )
            permission_pass = False
            if permission.privilege == '*':
                permission_pass = True

            if not permission_pass:
                if permission.privilege.find('.') == -1:
                    permission_module = module.module_name
                    permission_attr = permission.privilege
                elif permission.privilege.find('.') != -1:
                    permission_module = permission.privilege.split('.')[0]
                    permission_attr = permission.privilege.split('.')[1]

                if ('*' in user.privileges.keys()
                        and permission_module not in user.privileges.keys()):
                    user.privileges[permission_module] = copy.deepcopy(
                        user.privileges['*'])
                if permission_module in user.privileges.keys():
                    if (type(user.privileges[permission_module]) == list
                            and '*' in user.privileges[permission_module]):
                        user.privileges[permission_module] += copy.deepcopy(
                            module.privileges)
                if permission_module not in user.privileges.keys():
                    user.privileges[permission_module] = []

                if permission_attr in user.privileges[permission_module]:
                    permission_pass = True

            if permission_pass:
                query = self._parse_permission_args(
                    skip_events=skip_events,
                    env=env,
                    query=query,
                    doc=doc,
                    permission_args=permission.query_mod,
                )
                doc = self._parse_permission_args(
                    skip_events=skip_events,
                    env=env,
                    query=query,
                    doc=doc,
                    permission_args=permission.doc_mod,
                )
                return {'query': query, 'doc': doc}
        # [DOC] If all permission checks fail
        return False

    def _parse_permission_args(
        self,
        skip_events: List[str],
        env: Dict[str, Any],
        query: Union[LIMP_QUERY, Query],
        doc: LIMP_DOC,
        permission_args: Any,
    ):
        user = env['session'].user

        if type(permission_args) == list:
            args_iter = range(len(permission_args))
        elif type(permission_args) == dict:
            args_iter = list(permission_args.keys())

        for j in args_iter:
            if type(permission_args[j]) == ATTR_MOD:
                # [DOC] If attr is of type ATTR_MOD, call condition callable
                if permission_args[j].condition(skip_events=skip_events,
                                                env=env,
                                                query=query,
                                                doc=doc):
                    # [DOC] If condition return is True, update attr value
                    if callable(permission_args[j].default):
                        permission_args[j] = permission_args[j].default(
                            skip_events=skip_events,
                            env=env,
                            query=query,
                            doc=doc)
                        if type(permission_args[j]) == Exception:
                            raise permission_args[j]
                    else:
                        permission_args[j] = permission_args[j].default
            elif type(permission_args[j]) == dict:
                # [DOC] Check opers
                for oper in [
                        '$gt',
                        '$lt',
                        '$gte',
                        '$lte',
                        '$bet',
                        '$ne',
                        '$regex',
                        '$all',
                        '$in',
                ]:
                    if oper in permission_args[j].keys():
                        if oper == '$bet':
                            permission_args[j][
                                '$bet'] = self._parse_permission_args(
                                    skip_events=skip_events,
                                    env=env,
                                    query=query,
                                    doc=doc,
                                    permission_args=permission_args[j]['$bet'],
                                )
                        else:
                            permission_args[j][
                                oper] = self._parse_permission_args(
                                    skip_events=skip_events,
                                    env=env,
                                    query=query,
                                    doc=doc,
                                    permission_args=[permission_args[j][oper]],
                                )[0]
                        # [DOC] Continue the iteration
                        continue
                # [DOC] Child args, parse
                permission_args[j] = self._parse_permission_args(
                    skip_events=skip_events,
                    env=env,
                    query=query,
                    doc=doc,
                    permission_args=permission_args[j],
                )
            elif type(permission_args[j]) == list:
                permission_args[j] = self._parse_permission_args(
                    skip_events=skip_events,
                    env=env,
                    query=query,
                    doc=doc,
                    permission_args=permission_args[j],
                )
            elif type(permission_args[j]) == str:
                # [DOC] Check for variables
                if permission_args[j] == '$__user':
                    permission_args[j] = user._id
                elif permission_args[j].startswith('$__user.'):
                    permission_args[j] = extract_attr(
                        scope=user,
                        attr_path=permission_args[j].replace(
                            '$__user.', '$__'),
                    )
                elif permission_args[j] == '$__access':
                    permission_args[j] = {
                        '$__user': user._id,
                        '$__groups': user.groups
                    }
                elif permission_args[j] == '$__datetime':
                    permission_args[j] = datetime.datetime.utcnow().isoformat()
                elif permission_args[j] == '$__date':
                    permission_args[j] = datetime.date.today().isoformat()
                elif permission_args[j] == '$__time':
                    permission_args[j] = datetime.datetime.now().time(
                    ).isoformat()

        return permission_args
Example #7
0
class Realm(BaseModule):
	'''`Realm` module module provides data type and controller for Realm Mode in LIMP eco-system.'''
	collection = 'realms'
	attrs = {
		'user': ATTR.ID(desc='`_id` of `User` doc the doc belongs to. This is also the ADMIN of the realm.'),
		'name': ATTR.STR(desc='Name of the realm. This is both readable as well as unique name.'),
		'default': ATTR.ID(desc='`_id` of `Group` doc that serves as `DEFAULT` group of the realm.'),
		'create_time': ATTR.DATETIME(desc='Python `datetime` ISO format of the doc creation.'),
	}
	methods = {
		'read': {'permissions': [PERM(privilege='read')]},
		'create': {'permissions': [PERM(privilege='create')]},
		'update': {
			'permissions': [
				PERM(privilege='update', doc_mod={'user': None, 'create_time': None})
			],
			'query_args': {'_id': ATTR.ID()},
		},
		'delete': {
			'permissions': [PERM(privilege='delete')],
			'query_args': {'_id': ATTR.ID()},
		},
	}

	async def pre_create(self, skip_events, env, query, doc, payload):
		user_doc = {attr: doc['user'][attr] for attr in Config.user_attrs}
		user_doc.update(
			{
				'locale': Config.locale,
				'groups': [],
				'privileges': {'*': '*'},
				'status': 'active',
				'attrs': {},
				'realm': doc['name'],
			}
		)
		user_results = await Config.modules['user'].create(
			skip_events=[Event.PERM, Event.ARGS, Event.PRE], env=env, doc=user_doc
		)
		if user_results.status != 200:
			return user_results
		user = user_results.args.docs[0]

		group_results = await Config.modules['group'].create(
			skip_events=[Event.PERM, Event.ARGS],
			env=env,
			doc={
				'user': user._id,
				'name': {locale: '__DEFAULT' for locale in Config.locales},
				'bio': {locale: '__DEFAULT' for locale in Config.locales},
				'privileges': Config.default_privileges,
				'attrs': {},
				'realm': doc['name'],
			},
		)
		if group_results.status != 200:
			return group_results
		group = group_results.args.docs[0]

		skip_events.append(Event.ARGS)
		doc['user'] = user._id
		doc['default'] = group._id
		return (skip_events, env, query, doc, payload)

	async def on_create(self, results, skip_events, env, query, doc, payload):
		for doc in results['docs']:
			realm_results = await self.read(
				skip_events=[Event.PERM, Event.ARGS], env=env, query=[{'_id': doc._id}]
			)
			realm = realm_results.args.docs[0]
			Config._realms[realm.name] = realm
			Config._sys_docs[realm._id] = {'module': 'realm'}
		return (results, skip_events, env, query, doc, payload)
Example #8
0
File: utils.py Project: masaar/limp
def validate_attr(
    *,
    attr_name: str,
    attr_type: ATTR,
    attr_val: Any,
    allow_opers: bool = False,
    allow_none: bool = False,
    skip_events: List[str] = None,
    env: Dict[str, Any] = None,
    query: Union[LIMP_QUERY, Query] = None,
    doc: LIMP_DOC = None,
    scope: LIMP_DOC = None,
):
    from config import Config

    try:
        return validate_default(
            attr_type=attr_type,
            attr_val=attr_val,
            skip_events=skip_events,
            env=env,
            query=query,
            doc=doc,
            scope=scope if scope else doc,
            allow_none=allow_none,
        )
    except:
        pass

    attr_oper = False
    if allow_opers and type(attr_val) == dict:
        if '$add' in attr_val.keys():
            attr_oper = '$add'
            attr_val = attr_val['$add']
        elif '$multiply' in attr_val.keys():
            attr_oper = '$multiply'
            attr_val = attr_val['$multiply']
        elif '$append' in attr_val.keys():
            attr_oper = '$append'
            if '$unique' in attr_val.keys() and attr_val['$unique'] == True:
                attr_oper = '$append__unique'
            attr_val = [attr_val['$append']]
        elif '$remove' in attr_val.keys():
            attr_oper = '$remove'
            attr_val = attr_val['$remove']

    try:
        if attr_type._type == 'ANY':
            return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper)

        elif attr_type._type == 'ACCESS':
            if (type(attr_val) == dict
                    and set(attr_val.keys()) == {'anon', 'users', 'groups'}
                    and type(attr_val['anon']) == bool
                    and type(attr_val['users']) == list
                    and type(attr_val['groups']) == list):
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)

        elif attr_type._type == 'BOOL':
            if type(attr_val) == bool:
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)

        elif attr_type._type == 'DATE':
            if re.match(r'^[0-9]{4}-[0-9]{2}-[0-9]{2}$', attr_val):
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)

        elif attr_type._type == 'DATETIME':
            if re.match(
                    r'^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}(:[0-9]{2}(\.[0-9]{6})?)?$',
                    attr_val,
            ):
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)

        elif attr_type._type == 'DICT':
            if type(attr_val) == dict:
                if '__key' in attr_type._args['dict'].keys():
                    if '__min' in attr_type._args['dict'].keys():
                        if len(attr_val.keys()
                               ) < attr_type._args['dict']['__min']:
                            raise InvalidAttrException(
                                attr_name=attr_name,
                                attr_type=attr_type,
                                val_type=type(attr_val),
                            )
                    if '__max' in attr_type._args['dict'].keys():
                        if len(attr_val.keys()
                               ) > attr_type._args['dict']['__max']:
                            raise InvalidAttrException(
                                attr_name=attr_name,
                                attr_type=attr_type,
                                val_type=type(attr_val),
                            )
                    shadow_attr_val = {}
                    for child_attr_val in attr_val.keys():
                        shadow_attr_val[validate_attr(
                            attr_name=f'{attr_name}.{child_attr_val}',
                            attr_type=attr_type._args['dict']['__key'],
                            attr_val=child_attr_val,
                            allow_opers=allow_opers,
                            allow_none=allow_none,
                            skip_events=skip_events,
                            env=env,
                            query=query,
                            doc=doc,
                            scope=attr_val,
                        )] = validate_attr(
                            attr_name=f'{attr_name}.{child_attr_val}',
                            attr_type=attr_type._args['dict']['__val'],
                            attr_val=attr_val[child_attr_val],
                            allow_opers=allow_opers,
                            allow_none=allow_none,
                            skip_events=skip_events,
                            env=env,
                            query=query,
                            doc=doc,
                            scope=attr_val,
                        )
                    if '__req' in attr_type._args['dict'].keys():
                        for req_key in attr_type._args['dict']['__req']:
                            if req_key not in shadow_attr_val.keys():
                                raise InvalidAttrException(
                                    attr_name=attr_name,
                                    attr_type=attr_type,
                                    val_type=type(attr_val),
                                )
                    return return_valid_attr(attr_val=shadow_attr_val,
                                             attr_oper=attr_oper)
                else:
                    for child_attr_type in attr_type._args['dict'].keys():
                        if child_attr_type not in attr_val.keys():
                            attr_val[child_attr_type] = None
                        attr_val[child_attr_type] = validate_attr(
                            attr_name=f'{attr_name}.{child_attr_type}',
                            attr_type=attr_type._args['dict'][child_attr_type],
                            attr_val=attr_val[child_attr_type],
                            allow_opers=allow_opers,
                            allow_none=allow_none,
                            skip_events=skip_events,
                            env=env,
                            query=query,
                            doc=doc,
                            scope=attr_val,
                        )
                    return return_valid_attr(attr_val=attr_val,
                                             attr_oper=attr_oper)

        elif attr_type._type == 'EMAIL':
            if re.match(r'^[^@]+@[^@]+\.[^@]+$', attr_val):
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)

        elif attr_type._type == 'FILE':
            if type(attr_val) == list and len(attr_val):
                try:
                    validate_attr(
                        attr_name=attr_name,
                        attr_type=attr_type,
                        attr_val=attr_val[0],
                        allow_opers=allow_opers,
                        allow_none=allow_none,
                        skip_events=skip_events,
                        env=env,
                        query=query,
                        doc=doc,
                        scope=attr_val,
                    )
                    attr_val = attr_val[0]
                except:
                    raise InvalidAttrException(
                        attr_name=attr_name,
                        attr_type=attr_type,
                        val_type=type(attr_val),
                    )
            file_type = (
                type(attr_val) == dict and set(attr_val.keys())
                == {'name', 'lastModified', 'type', 'size', 'content'}
                and type(attr_val['name']) == str
                and type(attr_val['lastModified']) == int
                and type(attr_val['size']) == int
                and type(attr_val['content']) in [binary.Binary, bytes])
            if not file_type:
                raise InvalidAttrException(attr_name=attr_name,
                                           attr_type=attr_type,
                                           val_type=type(attr_val))
            if attr_type._args['types']:
                for file_type in attr_type._args['types']:
                    if attr_val['type'].split('/')[0] == file_type.split(
                            '/')[0]:
                        if (file_type.split('/')[1] == '*'
                                or attr_val['type'].split('/')[1]
                                == file_type.split('/')[1]):
                            return return_valid_attr(attr_val=attr_val,
                                                     attr_oper=attr_oper)
            else:
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)

        elif attr_type._type == 'FLOAT':
            if type(attr_val) == str and re.match(r'^[0-9]+(\.[0-9]+)?$',
                                                  attr_val):
                attr_val = float(attr_val)
            elif type(attr_val) == int:
                attr_val = float(attr_val)

            if type(attr_val) == float:
                if attr_type._args['range']:
                    if int(attr_val) in attr_type._args['range']:
                        return return_valid_attr(attr_val=attr_val,
                                                 attr_oper=attr_oper)
                else:
                    return return_valid_attr(attr_val=attr_val,
                                             attr_oper=attr_oper)

        elif attr_type._type == 'GEO':
            if (type(attr_val) == dict
                    and list(attr_val.keys()) == ['type', 'coordinates']
                    and attr_val['type'] in ['Point']
                    and type(attr_val['coordinates']) == list
                    and len(attr_val['coordinates']) == 2
                    and type(attr_val['coordinates'][0]) in [int, float]
                    and type(attr_val['coordinates'][1]) in [int, float]):
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)

        elif attr_type._type == 'ID':
            if type(attr_val) == BaseModel or type(attr_val) == DictObj:
                return return_valid_attr(attr_val=attr_val._id,
                                         attr_oper=attr_oper)
            elif type(attr_val) == ObjectId:
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)
            elif type(attr_val) == str:
                try:
                    return return_valid_attr(attr_val=ObjectId(attr_val),
                                             attr_oper=attr_oper)
                except:
                    raise ConvertAttrException(
                        attr_name=attr_name,
                        attr_type=attr_type,
                        val_type=type(attr_val),
                    )

        elif attr_type._type == 'INT':
            if type(attr_val) == str and re.match(r'^[0-9]+$', attr_val):
                attr_val = int(attr_val)

            if type(attr_val) == int:
                if attr_type._args['range']:
                    if attr_val in attr_type._args['range']:
                        return return_valid_attr(attr_val=attr_val,
                                                 attr_oper=attr_oper)
                else:
                    return return_valid_attr(attr_val=attr_val,
                                             attr_oper=attr_oper)

        elif attr_type._type == 'IP':
            if re.match(
                    r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$',
                    attr_val,
            ):
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)

        elif attr_type._type == 'LIST':
            if type(attr_val) == list:
                for i in range(len(attr_val)):
                    child_attr_val = attr_val[i]
                    child_attr_check = False
                    for child_attr_type in attr_type._args['list']:
                        try:
                            attr_val[i] = validate_attr(
                                attr_name=attr_name,
                                attr_type=child_attr_type,
                                attr_val=child_attr_val,
                                allow_opers=allow_opers,
                                allow_none=allow_none,
                                skip_events=skip_events,
                                env=env,
                                query=query,
                                doc=doc,
                                scope=attr_val,
                            )
                            child_attr_check = True
                            break
                        except:
                            pass
                    if not child_attr_check:
                        raise InvalidAttrException(
                            attr_name=attr_name,
                            attr_type=attr_type,
                            val_type=type(attr_val),
                        )
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)

        elif attr_type._type == 'LOCALE':
            attr_val = validate_attr(
                attr_name=attr_name,
                attr_type=ATTR.DICT(
                    dict={
                        '__key':
                        ATTR.LITERAL(
                            literal=[locale for locale in Config.locales]),
                        '__val':
                        ATTR.STR(),
                        '__min':
                        1,
                        '__req': [Config.locale],
                    }),
                attr_val=attr_val,
                allow_opers=allow_opers,
                allow_none=allow_none,
                skip_events=skip_events,
                env=env,
                query=query,
                doc=doc,
                scope=attr_val,
            )
            attr_val = {
                locale: attr_val[locale]
                if locale in attr_val.keys() else attr_val[Config.locale]
                for locale in Config.locales
            }
            return return_valid_attr(attr_val=attr_val, attr_oper=attr_oper)

        elif attr_type._type == 'LOCALES':
            if attr_val in Config.locales:
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)

        elif attr_type._type == 'PHONE':
            if attr_type._args['codes']:
                for phone_code in attr_type._args['codes']:
                    if re.match(fr'^\+{phone_code}[0-9]+$', attr_val):
                        return return_valid_attr(attr_val=attr_val,
                                                 attr_oper=attr_oper)
            else:
                if re.match(r'^\+[0-9]+$', attr_val):
                    return return_valid_attr(attr_val=attr_val,
                                             attr_oper=attr_oper)

        elif attr_type._type == 'STR':
            if type(attr_val) == str:
                if attr_type._args['pattern']:
                    if re.match(f'^{attr_type._args["pattern"]}$', attr_val):
                        return return_valid_attr(attr_val=attr_val,
                                                 attr_oper=attr_oper)
                else:
                    return return_valid_attr(attr_val=attr_val,
                                             attr_oper=attr_oper)

        elif attr_type._type == 'TIME':
            if re.match(r'^[0-9]{2}:[0-9]{2}(:[0-9]{2}(\.[0-9]{6})?)?$',
                        attr_val):
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)

        elif attr_type._type == 'URI_WEB':
            if re.match(
                    r'^https?:\/\/(?:[\w\-\_]+\.)(?:\.?[\w]{2,})+([\?\/].*)?$',
                    attr_val):
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)

        elif attr_type._type == 'LITERAL':
            if attr_val in attr_type._args['literal']:
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)

        elif attr_type._type == 'UNION':
            for child_attr in attr_type._args['union']:
                try:
                    validate_attr(
                        attr_name=attr_name,
                        attr_type=child_attr,
                        attr_val=attr_val,
                        allow_opers=allow_opers,
                        allow_none=allow_none,
                        skip_events=skip_events,
                        env=env,
                        query=query,
                        doc=doc,
                        scope=attr_val,
                    )
                except:
                    continue
                return return_valid_attr(attr_val=attr_val,
                                         attr_oper=attr_oper)

        elif attr_type._type == 'TYPE':
            return return_valid_attr(
                attr_val=Config.types[attr_type._args['type']](
                    attr_name=attr_name,
                    attr_type=attr_type,
                    attr_val=attr_val),
                attr_oper=attr_oper,
            )

    except Exception as e:
        if type(e) in [InvalidAttrException, ConvertAttrException]:
            if allow_none:
                return None
            elif attr_type._default != LIMP_VALUES.NONE_VALUE:
                return attr_type._default
            else:
                raise e

    raise InvalidAttrException(attr_name=attr_name,
                               attr_type=attr_type,
                               val_type=type(attr_val))
Example #9
0
File: utils.py Project: masaar/limp
def import_modules(*, packages=None):
    import modules as package
    from base_module import BaseModule
    from config import Config
    from test import TEST

    # [DOC] Assign required variables
    modules: Dict[str, BaseModule] = {}
    modules_packages: Dict[str, List[str]] = {}
    user_config = {
        'user_attrs': {},
        'user_auth_attrs': [],
        'user_attrs_defaults': {}
    }

    # [DOC] Iterate over packages in modules folder
    package_prefix = package.__name__ + '.'
    for _, pkgname, _ in pkgutil.iter_modules(package.__path__,
                                              package_prefix):  # pylint: disable=unused-variable
        # [DOC] Check if package should be skipped
        if packages and pkgname.replace('modules.', '') not in packages:
            logger.debug(f'Skipping package: {pkgname}')
            continue
        logger.debug(f'Importing package: {pkgname}')

        # [DOC] Load package and attempt to load config
        child_package = __import__(pkgname, fromlist='*')
        for k, v in child_package.config().items():
            if k == 'packages_versions':
                Config.packages_versions[pkgname.replace('modules.', '')] = v
            elif k in ['tests', 'l10n']:
                logger.warning(
                    f'Defining \'{k}\' in package config is not recommended. define your values in separate Python module with the name \'__{k}__\'. Refer to LIMP Docs for more.'
                )
            elif k == 'envs':
                if Config.env:
                    if Config.env in v.keys():
                        for kk, vv in v[Config.env].items():
                            setattr(Config, kk, vv)
                    else:
                        logger.warning(
                            f'Package \'{pkgname.replace("modules.", "")}\' has \'envs\' Config Attr defined, but \'env\' defintion \'{Config.env}\' not found.'
                        )
            elif k in ['user_attrs', 'user_auth_attrs', 'user_attrs_defaults']:
                user_config[k] = v
                setattr(Config, k, v)
            elif type(v) == dict:
                if not getattr(Config, k):
                    setattr(Config, k, {})
                getattr(Config, k).update(v)
            else:
                setattr(Config, k, v)

        # [DOC] Iterate over python modules in package
        child_prefix = child_package.__name__ + '.'
        for importer, modname, ispkg in pkgutil.iter_modules(
                child_package.__path__, child_prefix):
            # [DOC] Iterate over python classes in module
            module = __import__(modname, fromlist='*')
            if modname.endswith('__tests__'):
                for test_name in dir(module):
                    if type(getattr(module, test_name)) == TEST:
                        Config.tests[test_name] = getattr(module, test_name)
                continue
            elif modname.endswith('__l10n__'):
                for l10n_name in dir(module):
                    if type(getattr(module, l10n_name)) == L10N:
                        Config.l10n[l10n_name] = getattr(module, l10n_name)
                continue
            for clsname in dir(module):
                # [DOC] Confirm class is subclass of BaseModule
                if (clsname != 'BaseModule'
                        and inspect.isclass(getattr(module, clsname))
                        and issubclass(getattr(module, clsname), BaseModule)):
                    # [DOC] Deny loading LIMPd-reserved named LIMP modules
                    if clsname.lower() in ['conn', 'heart', 'file', 'watch']:
                        logger.error(
                            f'Module with LIMPd-reserved name \'{clsname.lower()}\' was found. Exiting.'
                        )
                        exit()
                    # [DOC] Load LIMP module and assign module_name attr
                    cls = getattr(module, clsname)
                    module_name = re.sub(r'([A-Z])', r'_\1',
                                         clsname[0].lower() +
                                         clsname[1:]).lower()
                    # [DOC] Deny duplicat LIMP modules names
                    if module_name in modules.keys():
                        logger.error(
                            f'Duplicate module name \'{module_name}\'. Exiting.'
                        )
                        exit()
                    # [DOC] Add module to loaded modules dict
                    modules[module_name] = cls()
                    if pkgname not in modules_packages.keys():
                        modules_packages[pkgname] = []
                    modules_packages[pkgname].append(module_name)
    # [DOC] Update User, Session modules with populated attrs
    modules['user'].attrs.update(user_config['user_attrs'])
    modules['user'].defaults['locale'] = Config.locale
    for attr in user_config['user_auth_attrs']:
        modules['user'].unique_attrs.append(attr)
        modules['user'].attrs[f'{attr}_hash'] = ATTR.STR()
        modules['session'].methods['auth']['doc_args'].append({
            'hash':
            ATTR.STR(),
            attr:
            user_config['user_attrs'][attr],
            'groups':
            ATTR.LIST(list=[ATTR.ID()]),
        })
        modules['session'].methods['auth']['doc_args'].append({
            'hash':
            ATTR.STR(),
            attr:
            user_config['user_attrs'][attr]
        })
    modules['user'].defaults.update(user_config['user_attrs_defaults'])
    # [DOC] Call update_modules, effectively finalise initlising modules
    Config.modules = modules
    for module in modules.values():
        module._initialise()
    # [DOC] Write api_ref if generate_ref mode
    if Config.generate_ref:
        generate_ref(modules_packages=modules_packages, modules=modules)
Example #10
0
class Setting(BaseModule):
    '''`Setting` module module provides data type and controller for settings in LIMP eco-system. This is used by `User` module tp provide additional user-wise settings. It also allows for global-typed settings.'''
    collection = 'settings'
    attrs = {
        'user':
        ATTR.ID(desc='`_id` of `User` doc the doc belongs to.'),
        'var':
        ATTR.STR(
            desc=
            'Name of the setting. This is unique for every `user` in the module.'
        ),
        'val':
        ATTR.ANY(desc='Value of the setting.'),
        'type':
        ATTR.LITERAL(
            desc=
            'Type of the setting. This sets whether setting is global, or belong to user, and whether use can update it or not.',
            literal=['global', 'user', 'user_sys']),
    }
    diff = True
    unique_attrs = [('user', 'var', 'type')]
    extns = {
        'val':
        ATTR_MOD(
            condition=lambda skip_events, env, query, doc, scope: type(scope)
            == dict and '__extn' in scope.keys(),
            default=lambda skip_events, env, query, doc, scope: {
                '__extn':
                EXTN(
                    module=scope['__extn']['__module'],
                    attrs=scope['__extn']['__attrs'],
                    force=scope['__extn']['__force'],
                ),
                '__val':
                scope['__extn']['__val'],
            },
        )
    }
    methods = {
        'read': {
            'permissions': [
                PERM(privilege='admin', query_mod={'$limit': 1}),
                PERM(
                    privilege='read',
                    query_mod={
                        'user':
                        '******',
                        'type':
                        ATTR_MOD(
                            condition=lambda skip_events, env, query, doc:
                            'type' in doc.keys() and doc['type'] == 'user_sys',
                            default=lambda skip_events, env, query, doc:
                            InvalidAttrException(
                                attr_name='type',
                                attr_type=ATTR.LITERAL(literal=
                                                       ['global', 'user']),
                                val_type=str,
                            ),
                        ),
                        '$limit':
                        1,
                    },
                ),
            ],
            'query_args': [
                {
                    '_id': ATTR.ID(),
                    'type':
                    ATTR.LITERAL(literal=['global', 'user', 'user_sys']),
                },
                {
                    'var': ATTR.STR(),
                    'type': ATTR.LITERAL(literal=['global']),
                },
                {
                    'var': ATTR.STR(),
                    'user': ATTR.ID(),
                    'type': ATTR.LITERAL(literal=['user', 'user_sys']),
                },
            ],
        },
        'create': {
            'permissions': [
                PERM(privilege='admin'),
                PERM(privilege='create', doc_mod={'type': 'user'}),
            ]
        },
        'update': {
            'permissions': [
                PERM(privilege='admin', query_mod={'$limit': 1}),
                PERM(
                    privilege='update',
                    query_mod={
                        'type': 'user',
                        'user': '******',
                        '$limit': 1
                    },
                    doc_mod={'type': None},
                ),
            ],
            'query_args': [
                {
                    '_id': ATTR.ID(),
                    'type':
                    ATTR.LITERAL(literal=['global', 'user', 'user_sys']),
                },
                {
                    'var': ATTR.STR(),
                    'type': ATTR.LITERAL(literal=['global']),
                },
                {
                    'var': ATTR.STR(),
                    'user': ATTR.ID(),
                    'type': ATTR.LITERAL(literal=['user', 'user_sys']),
                },
            ],
            'doc_args': {
                'val': ATTR.ANY()
            },
        },
        'delete': {
            'permissions': [PERM(privilege='admin', query_mod={'$limit': 1})],
            'query_args': [{
                '_id': ATTR.ID()
            }, {
                'var': ATTR.STR()
            }],
        },
        'retrieve_file': {
            'permissions': [PERM(privilege='*', query_mod={'type': 'global'})],
            'get_method': True,
        },
    }

    async def pre_create(self, skip_events, env, query, doc, payload):
        if (type(doc['val']) == list and len(doc['val']) == 1
                and type(doc['val'][0]) == dict
                and 'content' in doc['val'][0].keys()):
            doc['val'] = doc['val'][0]
        return (skip_events, env, query, doc, payload)

    async def on_create(self, results, skip_events, env, query, doc, payload):
        if doc['type'] in ['user', 'user_sys']:
            if doc['user'] == env['session'].user._id:
                env['session'].user.settings[doc['var']] = doc['val']
        return (results, skip_events, env, query, doc, payload)

    async def pre_update(self, skip_events, env, query, doc, payload):
        if (type(doc['val']) == list and len(doc['val']) == 1
                and type(doc['val'][0]) == dict
                and 'content' in doc['val'][0].keys()):
            doc['val'] = doc['val'][0]
        return (skip_events, env, query, doc, payload)

    async def on_update(self, results, skip_events, env, query, doc, payload):
        if query['type'][0] in ['user', 'user_sys']:
            if query['user'][0] == env['session'].user._id:
                if type(doc['val']) == dict and '$add' in doc['val'].keys():
                    env['session'].user.settings[query['var']
                                                 [0]] += doc['val']['$add']
                elif type(doc['val']
                          ) == dict and '$multiply' in doc['val'].keys():
                    env['session'].user.settings[
                        query['var'][0]] *= doc['val']['$multiply']
                elif type(
                        doc['val']) == dict and '$append' in doc['val'].keys():
                    env['session'].user.settings[query['var'][0]].append(
                        doc['val']['$append'])
                elif type(
                        doc['val']) == dict and '$remove' in doc['val'].keys():
                    env['session'].user.settings[query['var'][0]].remove(
                        doc['val']['$remove'])
                else:
                    env['session'].user.settings[query['var'][0]] = doc['val']
        return (results, skip_events, env, query, doc, payload)