示例#1
0
    def __init__(self, testing=False, init=False):

        # Input
        self._testing = testing

        # Some initialization
        self._endpoints = []
        self._definitions = {}
        self._configurations = {}
        self._query_params = {}
        self._schemas_map = {}

        # Reading configuration
        confs_path = os.path.join(os.curdir, CONF_PATH)
        defaults_path = CONF_FOLDERS.get('defaults_path', confs_path)
        base_path = CONF_FOLDERS.get('base_path', confs_path)
        projects_path = CONF_FOLDERS.get('projects_path', confs_path)
        submodules_path = CONF_FOLDERS.get('submodules_path', confs_path)

        try:
            self._configurations, self._extended_project, self._extended_path = \
                read_configuration(
                    default_file_path=defaults_path,
                    base_project_path=base_path,
                    projects_path=projects_path,
                    submodules_path=submodules_path
                )
        except AttributeError as e:
            log.exit(e)

        if not init:
            self.do_schema()
            self.find_endpoints()
            self.do_swagger()
示例#2
0
    def custom_init(self,
                    pinit=False,
                    pdestroy=False,
                    abackend=None,
                    **kwargs):
        # NOTE: we ignore args here

        # if pinit and not self.variables.get('external'):
        #     log.debug("waiting for internal certificates")
        #     # should actually connect with user and password
        #     # and verify if GSI is already registered with admin rodsminer
        #     import time
        #     time.sleep(5)

        # recover instance with the parent method
        session = super().custom_init()

        # IF variable 'IRODS_ANONYMOUS? is set THEN
        # Check if external iRODS / B2SAFE has the 'anonymous' user available
        user = '******'
        if self.variables.get('external') and self.variables.get(user):
            if not session.query_user_exists(user):
                log.exit(
                    "Cannot find '{}' inside " +
                    "the currently connected iRODS instance",
                    user,
                )

        return session
示例#3
0
    def retry(self, retry_interval=3, max_retries=-1):
        retry_count = 0

        # Get the exception which will signal a missing connection
        exceptions = self.set_connection_exception()
        if exceptions is None:
            exceptions = (BaseException, )

        while max_retries != 0 or retry_count < max_retries:

            retry_count += 1
            if retry_count > 1:
                log.verbose("testing again in {} secs", retry_interval)

            try:
                obj = self.custom_connection()
            except exceptions as e:
                log.error("Catched: {}({})", e.__class__.__name__, e)
                log.exit("Service '{}' not available", self.name)
            else:
                break

            # Increment sleeps time if doing a lot of retries
            if retry_count % 3 == 0:
                log.debug("Incrementing interval")
                retry_interval += retry_interval

        return obj
示例#4
0
    def get_models(self):
        """ Read models from base/custom yaml files """

        # BASE definitions
        path = os.path.join(ABS_RESTAPI_PATH, MODELS_DIR)
        try:
            data = load_yaml_file('swagger.yaml', path=path)
        except AttributeError as e:
            log.exit(e)

        # EXTENDED definitions, if any
        extended_models = None
        if EXTENDED_PACKAGE != EXTENDED_PROJECT_DISABLED:
            path = os.path.join(os.curdir, EXTENDED_PACKAGE, MODELS_DIR)
            # NOTE: with logger=False I skip the warning if this file doesn't exist
            try:
                extended_models = load_yaml_file('swagger.yaml', path=path)
            except AttributeError as e:
                log.verbose(e)

        # CUSTOM definitions
        path = os.path.join(os.curdir, CUSTOM_PACKAGE, MODELS_DIR)
        try:
            custom_models = load_yaml_file('swagger.yaml', path=path)
        except AttributeError as e:
            log.verbose(e)
            custom_models = {}

        if extended_models is None:
            return mix(data, custom_models)

        m1 = mix(data, extended_models)
        return mix(m1, custom_models)
示例#5
0
    def check_configuration(self):

        try:
            self.services_configuration = load_yaml_file(
                file='services.yaml', path=ABS_RESTAPI_CONFSPATH)
        except AttributeError as e:
            log.exit(e)

        for service in self.services_configuration:

            name, prefix = self.prefix_name(service)

            # Was this service enabled from the developer?
            enable_var = str(prefix + 'enable').upper()
            self.available_services[name] = self.get_bool_from_os(enable_var)

            if self.available_services[name]:

                # read variables
                variables = self.load_variables(service, enable_var, prefix)
                service['variables'] = variables

                # set auth service
                if name == self.authentication_name:
                    self.authentication_service = variables.get('service')

        if self.authentication_service is None:
            log.info("No service defined for authentication")
        else:
            log.info(
                "Authentication based on '{}' service",
                self.authentication_service
            )
示例#6
0
    def custom_init(self, pinit=False, pdestroy=False, abackend=None, **kwargs):
        """ Note: we ignore args here """

        # recover instance with the parent method
        graph = super().custom_init()

        # db.init_app(self.app)

        with self.app.app_context():

            if pdestroy:
                log.critical("Destroy current Neo4j data")
                from neomodel import clear_neo4j_database

                clear_neo4j_database(graph.db)

            if pinit:

                auto_index = self.variables.get("autoindexing", 'True') == 'True'

                if auto_index:
                    try:
                        from neomodel import remove_all_labels, install_all_labels
                        remove_all_labels()
                        install_all_labels()
                    except BaseException as e:
                        log.exit(str(e))

        return graph
示例#7
0
    def import_secret(self, abs_filename):
        """
        Load the jwt secret from a file
        """

        try:
            self.JWT_SECRET = open(abs_filename, 'rb').read()
            return self.JWT_SECRET
        except IOError:
            log.exit("Jwt secret file {} not found", abs_filename)
示例#8
0
    def do_swagger(self):

        # SWAGGER read endpoints definition
        swag = BeSwagger(self._endpoints, self)
        swag_dict = swag.swaggerish()

        # TODO: update internal endpoints from swagger
        self._endpoints = swag._endpoints[:]

        # SWAGGER validation
        if not swag.validation(swag_dict):
            log.exit("Current swagger definition is invalid")

        self._definitions = swag_dict
示例#9
0
    def load_classes(self):

        for service in self.services_configuration:

            name, _ = self.prefix_name(service)

            if not self.available_services.get(name):
                continue
            log.verbose("Looking for class {}", name)

            variables = service.get('variables')
            ext_name = service.get('class')

            # Get the existing class
            try:
                MyClass = self.load_class_from_module(ext_name, service=service)

                # Passing variables
                MyClass.set_variables(variables)

                if service.get('load_models'):

                    base_models = self.meta.import_models(
                        name, BACKEND_PACKAGE, exit_on_fail=True
                    )
                    if EXTENDED_PACKAGE == EXTENDED_PROJECT_DISABLED:
                        extended_models = {}
                    else:
                        extended_models = self.meta.import_models(
                            name, EXTENDED_PACKAGE, exit_on_fail=False
                        )

                    custom_models = self.meta.import_models(
                        name, CUSTOM_PACKAGE, exit_on_fail=False
                    )

                    MyClass.set_models(base_models, extended_models, custom_models)

            except AttributeError as e:
                log.error(str(e))
                log.exit('Invalid Extension class: {}', ext_name)

            # Save
            self.services_classes[name] = MyClass
            log.debug("Got class definition for {}", MyClass)

        if len(self.services_classes) < 1:
            raise KeyError("No classes were recovered!")

        return self.services_classes
示例#10
0
    def myinit(cls):

        credentials = get_project_configuration(
            "variables.backend.credentials")

        if credentials.get('username') is not None:
            log.exit("Obsolete use of variables.backend.credentials.username")

        if credentials.get('password') is not None:
            log.exit("Obsolete use of variables.backend.credentials.password")

        # cls.default_user = credentials.get('username', None)
        # cls.default_password = credentials.get('password', None)
        cls.default_user = Detector.get_global_var('AUTH_DEFAULT_USERNAME')
        cls.default_password = Detector.get_global_var('AUTH_DEFAULT_PASSWORD')
        if cls.default_user is None or cls.default_password is None:
            log.exit("Default credentials are unavailable!")

        roles = credentials.get('roles', {})
        cls.default_role = roles.get('default')
        cls.role_admin = roles.get('admin')
        cls.default_roles = [
            roles.get('user'),
            roles.get('internal'), cls.role_admin
        ]
        if cls.default_role is None or None in cls.default_roles:
            log.exit("Default roles are not available!")
示例#11
0
def get_service_address(variables, host_var, port_var, service):

    host = variables.get(host_var)
    if host is None:
        log.exit("Cannot find any variable matching {} for {}", host_var,
                 service)

    port = variables.get(port_var)
    if port is None:
        log.exit("Cannot find any variable matching {} for {}", port_var,
                 service)

    log.debug("Checking address: {}:{}", host, port)

    return host, int(port)
示例#12
0
    def load_class_from_module(self, classname, service=None):

        if service is None:
            flaskext = ''
        else:
            flaskext = '.' + service.get('extension')

        # Try inside our extensions
        module = Meta.get_module_from_string(
            modulestring=BACKEND_PACKAGE + '.flask_ext' + flaskext,
            exit_on_fail=True
        )
        if module is None:
            log.exit("Missing {} for {}", flaskext, service)

        return getattr(module, classname)
示例#13
0
def mywait():
    """
    Wait for a service on his host:port configuration
    basing the check on a socket connection.
    """
    from restapi.services.detect import detector

    for name, myclass in detector.services_classes.items():

        if name == 'authentication':
            continue

        if name == 'celery':

            broker = myclass.variables.get('broker')

            if broker == 'RABBIT':
                service_vars = detector.load_variables({'prefix': 'rabbitmq'})
            elif broker == 'REDIS':
                service_vars = detector.load_variables({'prefix': 'redis'})
            else:
                log.exit("Invalid celery broker: {}", broker)

            host, port = get_service_address(service_vars, 'host', 'port',
                                             broker)

            wait_socket(host, port, broker)

            backend = myclass.variables.get('backend')
            if backend == 'RABBIT':
                service_vars = detector.load_variables({'prefix': 'rabbitmq'})
            elif backend == 'REDIS':
                service_vars = detector.load_variables({'prefix': 'redis'})
            elif backend == 'MONGODB':
                service_vars = detector.load_variables({'prefix': 'mongo'})
            else:
                log.exit("Invalid celery backend: {}", backend)

            host, port = get_service_address(service_vars, 'host', 'port',
                                             backend)

            wait_socket(host, port, backend)
        else:
            host, port = get_service_address(myclass.variables, 'host', 'port',
                                             name)

            wait_socket(host, port, name)
示例#14
0
def tests(wait, core, file, folder):
    """Compute tests and coverage"""

    if wait:
        while starting_up():
            log.debug('Waiting service startup')
            time.sleep(5)
        mywait()

    log.debug("Starting unit tests: {}", pretty_errors)

    # launch unittests and also compute coverage
    log.warning("Running all tests and computing coverage.\n" +
                "This may take some minutes.")

    parameters = []
    if core:
        parameters.append(current_package)
    elif file is not None:
        if not os.path.isfile(os.path.join("tests", file)):
            log.exit("File not found: {}", file)
        else:
            parameters.append("default")
            parameters.append(file)
    elif folder is not None:
        if not os.path.isdir(os.path.join("tests", folder)):
            log.exit("Folder not found: {}", folder)
        else:
            parameters.append("default")
            parameters.append(folder)

    try:

        # TODO: convert the `pyunittests` script from the docker image into python
        # Pattern in plumbum library for executing a shell command
        from plumbum import local
        command = local["pyunittests"]
        log.verbose("Executing command pyunittests {}", parameters)
        output = command(parameters)

    except Exception as e:
        log.error(str(e))
        raise e

    log.info("Completed:\n{}", output)
示例#15
0
def verify(services):
    """Verify connected service"""
    from restapi.services.detect import detector

    if len(services) == 0:
        log.warning("Empty list of services, nothing to be verified.")
        log.info("Provide list of services by using --services option")

    for service in services:
        myclass = detector.services_classes.get(service)
        if myclass is None:
            log.exit("Service \"{}\" was NOT detected", service)
        log.info("Verifying service: {}", service)
        host, port = get_service_address(myclass.variables, 'host', 'port',
                                         service)
        wait_socket(host, port, service)

    log.info("Completed successfully")
示例#16
0
def launch():
    """Launch the RAPyDo-based HTTP API server"""
    args = [
        'run',
        '--host',
        BIND_INTERFACE,
        '--port',
        os.environ.get('FLASK_PORT'),
        '--reload',
        '--no-debugger',
        '--eager-loading',
        '--with-threads',
    ]

    if starting_up():
        log.exit(
            "Please wait few more seconds: resources are still starting up")
    else:
        main(args)
        log.warning("Server shutdown")
示例#17
0
文件: meta.py 项目: fossabot/http-api
    def get_module_from_string(modulestring,
                               prefix_package=False,
                               exit_if_not_found=False,
                               exit_on_fail=False):
        """
        Getting a module import
        when your module is stored as a string in a variable
        """

        module = None
        if prefix_package:
            modulestring = BACKEND_PACKAGE + '.' + modulestring.lstrip('.')

        # which version of python is this?
        # Retrocompatibility for Python < 3.6
        try:
            import_exceptions = (ModuleNotFoundError, ImportError)
        except NameError:
            import_exceptions = ImportError

        try:
            # Meta language for dinamically import
            module = import_module(modulestring)
        except import_exceptions as e:  # pylint:disable=catching-non-exception
            if exit_on_fail:
                raise e
            elif exit_if_not_found:
                log.exit("Failed to load module:\n{}", e)
            # else:
            #     log.warning("Failed to load module:\n{}", e)
        except BaseException as e:
            if exit_on_fail:
                raise e
            else:
                log.error("Module {} not found.\nError: {}", modulestring, e)

        return module
示例#18
0
"""
Sql handling authentication process
"""

import pytz
import sqlalchemy
from datetime import datetime, timedelta
from restapi.services.authentication import BaseAuthentication
from restapi.services.detect import detector
from restapi.exceptions import RestApiException
from restapi.utilities.htmlcodes import hcodes
from restapi.utilities.uuid import getUUID
from restapi.utilities.logs import log

if not detector.check_availability(__name__):
    log.exit("No sqlalchemy service available for authentication")


class Authentication(BaseAuthentication):

    # Also used by POST user
    def create_user(self, userdata, roles):

        if "authmethod" not in userdata:
            userdata["authmethod"] = "credentials"

        if "password" in userdata:
            userdata["password"] = self.get_password_hash(userdata["password"])

        if "uuid" not in userdata:
            userdata["uuid"] = getUUID()
示例#19
0
# -*- coding: utf-8 -*-
"""
Mongodb based implementation
"""

from pytz import utc
from datetime import datetime, timedelta
from restapi.services.authentication import BaseAuthentication
from restapi.flask_ext.flask_mongo import AUTH_DB
from restapi.utilities.uuid import getUUID
from restapi.services.detect import detector
from restapi.utilities.logs import log

if not detector.check_availability(__name__):
    log.exit("No mongodb service available for authentication")


class Authentication(BaseAuthentication):
    def __init__(self):

        # Read init credentials and configuration
        super().__init__()

        # Get the instance for mongodb
        name = __name__.split('.')[::-1][0]  # returns 'mongo'

        extension = detector.services_classes.get(name)
        self.db = extension().get_instance(dbname=AUTH_DB)

    def custom_user_properties(self, userdata):
        new_userdata = super(Authentication,
示例#20
0
    def init_services(
        self, app, worker_mode=False, project_init=False, project_clean=False
    ):

        instances = {}
        auth_backend = None

        for service in self.services_configuration:

            name, _ = self.prefix_name(service)

            if not self.available_services.get(name):
                continue

            if name == self.authentication_name and auth_backend is None:
                if self.authentication_service is None:
                    log.warning("No authentication")
                    continue
                else:
                    log.exit(
                        "Auth service '{}' is unreachable".format(
                            self.authentication_service)
                    )

            args = {}
            if name == self.task_service_name:
                args['worker_mode'] = worker_mode

            # Get extension class and build the extension object
            ExtClass = self.services_classes.get(name)
            try:
                ext_instance = ExtClass(app, **args)
            except TypeError as e:
                log.exit('Your class {} is not compliant:\n{}', name, e)
            else:
                self.extensions_instances[name] = ext_instance

            if not project_init:
                do_init = False
            elif name == self.authentication_service:
                do_init = True
            elif name == self.authentication_name:
                do_init = True
            else:
                do_init = False

            # Initialize the real service getting the first service object
            log.debug("Initializing {} (pinit={})", name, do_init)
            service_instance = ext_instance.custom_init(
                pinit=do_init, pdestroy=project_clean, abackend=auth_backend
            )
            instances[name] = service_instance

            if name == self.authentication_service:
                auth_backend = service_instance

            # NOTE: commented, looks like a duplicate from try/expect above
            # self.extensions_instances[name] = ext_instance

            # Injecting into the Celery Extension Class
            # all celery tasks found in *vanilla_package/tasks*
            if name == self.task_service_name:
                do_init = True

                task_package = "{}.tasks".format(CUSTOM_PACKAGE)

                submodules = self.meta.import_submodules_from_package(
                    task_package, exit_on_fail=True
                )
                for submodule in submodules:
                    tasks = Meta.get_celery_tasks_from_module(submodule)

                    for func_name, funct in tasks.items():
                        setattr(ExtClass, func_name, funct)

        if len(self.extensions_instances) < 1:
            raise KeyError("No instances available for modules")

        # Only once in a lifetime
        if project_init:
            self.project_initialization(instances, app=app)

        return self.extensions_instances
示例#21
0
# -*- coding: utf-8 -*-

import sys
import os
import yaml
import json

from restapi.utilities.configuration import mix
from restapi.utilities.logs import log

if len(sys.argv) <= 1:
    log.exit("Usage: {} project_name", sys.argv[0])

PROJECT = sys.argv[1]
PROJECT_DIR = "projects/{}/backend/swagger".format(PROJECT)

if not os.path.exists(PROJECT_DIR):
    log.exit("{} folder does not exist", PROJECT_DIR)

if not os.path.isdir(PROJECT_DIR):
    log.exit("{} is not a folder", PROJECT_DIR)

yamls = ["specs", "get", "post", "put", "patch", "delete", "head"]
for swagger_folder in os.listdir(PROJECT_DIR):

    conf_output = ""
    decorators_output = ""
    pfile = None
    pclass = None
    mappings = None
    for y in yamls:
示例#22
0
from restapi.confs import get_project_configuration


if Detector.get_global_var("AUTH_SECOND_FACTOR_AUTHENTICATION", '') == 'TOTP':
    try:
        import pyotp
        import pyqrcode

        # import base64
        from io import BytesIO
    # FIXME: cannot use the proper exception (available in python 3.6+)
    # because we are stuck on python 3.5 con IMC
    # except ModuleNotFoundError:
    except BaseException:
        log.exit(
            "Missing libraries for TOTP 2FA authentication"
        )


class Authenticator(BaseExtension):
    """
    The generic authentication Flask extension
    """

    def custom_connection(self, **kwargs):

        # What service will hold authentication?
        auth_service = self.variables.get('service')
        auth_module = Meta.get_authentication_module(auth_service)
        custom_auth = auth_module.Authentication()
示例#23
0
def read_configuration(default_file_path, base_project_path, projects_path,
                       submodules_path):
    """
    Read default configuration
    """

    custom_configuration = load_yaml_file(PROJECT_CONF_FILENAME,
                                          path=base_project_path,
                                          keep_order=True)

    # Verify custom project configuration
    project = custom_configuration.get('project')
    if project is None:
        raise AttributeError("Missing project configuration")

    variables = ['title', 'description', 'version', 'rapydo']

    for key in variables:
        if project.get(key) is None:

            log.exit(
                "Project not configured, missing key '{}' in file {}/{}",
                key,
                base_project_path,
                PROJECT_CONF_FILENAME,
            )

    if default_file_path is None:
        base_configuration = {}
    else:
        base_configuration = load_yaml_file(file=PROJECTS_DEFAULTS_FILE,
                                            path=default_file_path,
                                            keep_order=True)

    extended_project = project.get('extends')

    if extended_project is None:
        # Mix default and custom configuration
        return mix(base_configuration, custom_configuration), None, None

    extends_from = project.get('extends-from', 'projects')

    if extends_from == "projects":
        extend_path = projects_path
    elif extends_from.startswith("submodules/"):
        repository_name = (extends_from.split("/")[1]).strip()
        if repository_name == '':
            log.exit('Invalid repository name in extends-from, name is empty')

        extend_path = submodules_path
    else:
        suggest = "Expected values: 'projects' or 'submodules/${REPOSITORY_NAME}'"
        log.exit("Invalid extends-from parameter: {}.\n{}", extends_from,
                 suggest)

    if not os.path.exists(extend_path):
        log.exit("From project not found: {}", extend_path)

    extend_file = "extended_{}".format(PROJECT_CONF_FILENAME)
    extended_configuration = load_yaml_file(file=extend_file,
                                            path=extend_path,
                                            keep_order=True)
    m1 = mix(base_configuration, extended_configuration)
    return mix(m1, custom_configuration), extended_project, extend_path
示例#24
0
import os
import pwd
import re
from datetime import datetime, timedelta

import pytz

from restapi.utilities.logs import log

try:
    from OpenSSL import crypto
    from plumbum import local
    import dateutil.parser
except ImportError as e:
    log.exit("\nThis module requires an extra package:\n{}", e)


class Certificates:

    _dir = os.environ.get('CERTDIR')
    _proxyfile = 'userproxy.crt'

    def __init__(self):
        log.warning(
            "All methods of this class are static, no need to create an instance"
        )

    @classmethod
    def get_dn_from_cert(cls, certdir, certfilename, ext='pem'):
示例#25
0
    def find_endpoints(self):

        ##################
        # Walk folders looking for endpoints

        endpoints_folders = []
        # base swagger dir (rapydo/http-ap)
        endpoints_folders.append({'path': ABS_RESTAPI_PATH, 'iscore': True})

        # swagger dir from extended project, if any
        if self._extended_project is not None:

            endpoints_folders.append({
                'path':
                os.path.join(os.curdir, self._extended_project),
                'iscore':
                False
            })

        # custom swagger dir
        endpoints_folders.append({
            'path':
            os.path.join(os.curdir, CUSTOM_PACKAGE),
            'iscore':
            False
        })

        # already_loaded = {}
        for folder in endpoints_folders:

            base_dir = folder.get('path')
            iscore = folder.get('iscore')
            # get last item of the path
            # normapath is required to strip final / is any
            base_module = os.path.basename(os.path.normpath(base_dir))

            if iscore:
                apis_dir = os.path.join(base_dir, 'resources')
                apiclass_module = '{}.resources'.format(base_module)
            else:
                apis_dir = os.path.join(base_dir, 'apis')
                apiclass_module = '{}.apis'.format(base_module)

            # Looking for all file in apis folder
            for epfiles in os.listdir(apis_dir):

                # get module name (es: apis.filename)
                module_file = os.path.splitext(epfiles)[0]
                module_name = "{}.{}".format(apiclass_module, module_file)
                # Convert module name into a module
                log.debug("Importing {}", module_name)
                try:
                    module = Meta.get_module_from_string(
                        module_name, exit_on_fail=True, exit_if_not_found=True)
                except BaseException as e:
                    log.exit("Cannot import {}\nError: {}", module_name, e)

                # Extract classes from the module
                # classes = meta.get_classes_from_module(module)
                classes = meta.get_new_classes_from_module(module)
                for class_name in classes:
                    ep_class = classes.get(class_name)
                    # Filtering out classes without required data
                    if not hasattr(ep_class, "methods"):
                        continue
                    if ep_class.methods is None:
                        continue

                    # if class_name in already_loaded:
                    #     log.warning(
                    #         "Skipping import of {} from {}.{}, already loded from {}",
                    #         class_name,
                    #         apis_dir,
                    #         module_file,
                    #         already_loaded[class_name],
                    #     )
                    #     continue
                    # already_loaded[class_name] = "{}.{}".format(apis_dir, module_file)
                    log.debug("Importing {} from {}.{}", class_name, apis_dir,
                              module_file)
                    if not self._testing:
                        skip = False
                        for var in ep_class.depends_on:
                            pieces = var.strip().split(' ')
                            pieces_num = len(pieces)
                            if pieces_num == 1:
                                dependency = pieces.pop()
                                negate = False
                            elif pieces_num == 2:
                                negate, dependency = pieces
                                negate = negate.lower() == 'not'
                            else:
                                log.exit('Wrong parameter: {}', var)

                            check = detector.get_bool_from_os(dependency)
                            if negate:
                                check = not check

                            # Skip if not meeting the requirements of the dependency
                            if not check:
                                skip = True
                                break

                        if skip:
                            log.debug(
                                "Skipping '{} {}' due to unmet dependency: {}",
                                module_name, class_name, dependency)
                            continue

                    # Building endpoint
                    endpoint = EndpointElements(custom={})

                    endpoint.cls = ep_class
                    endpoint.exists = True
                    endpoint.iscore = iscore

                    # Global tags to be applied to all methods
                    endpoint.tags = ep_class.labels

                    # base URI
                    base = ep_class.baseuri
                    if base not in BASE_URLS:
                        log.warning("Invalid base {}", base)
                        base = API_URL
                    base = base.strip('/')
                    endpoint.base_uri = base

                    endpoint.uris = {}  # attrs python lib bug?
                    endpoint.custom['schema'] = {
                        'expose': ep_class.expose_schema,
                        'publish': {},
                    }

                    endpoint.methods = {}

                    mapping_lists = []
                    for m in ep_class.methods:
                        method_name = "_{}".format(m)
                        if not hasattr(ep_class, method_name):

                            method_name = m
                            if not hasattr(ep_class, method_name):
                                log.warning("{} configuration not found in {}",
                                            m, class_name)
                                continue
                            # Enable this warning to start conversions GET -> _GET
                            # Find other warning like this by searching:
                            # **FASTAPI**
                            # else:
                            #     log.warning(
                            #         "Obsolete dict {} in {}", m, class_name
                            #     )

                        conf = getattr(ep_class, method_name)
                        kk = conf.keys()
                        mapping_lists.extend(kk)
                        endpoint.methods[m.lower()] = copy.deepcopy(conf)

                    if endpoint.custom['schema']['expose']:
                        for uri in mapping_lists:
                            total_uri = '/{}{}'.format(endpoint.base_uri, uri)
                            schema_uri = '{}/schemas{}'.format(API_URL, uri)

                            p = hex(id(endpoint.cls))
                            self._schema_endpoint.uris[uri + p] = schema_uri

                            self._schemas_map[schema_uri] = total_uri

                    self._endpoints.append(endpoint)
示例#26
0
MATCH (n) OPTIONAL MATCH (n)-[r]-() DELETE n,r

Remove tokens:
MATCH (a:Token) WHERE NOT (a)<-[]-() DELETE a

"""

from datetime import datetime, timedelta
import pytz
from restapi.utilities.uuid import getUUID
from restapi.services.authentication import BaseAuthentication
from restapi.services.detect import detector
from restapi.utilities.logs import log

if not detector.check_availability(__name__):
    log.exit("No neo4j GraphDB service found for authentication")


class Authentication(BaseAuthentication):
    def get_user_object(self, username=None, payload=None):

        from neomodel.exceptions import DeflateError
        from neo4j.exceptions import ServiceUnavailable

        user = None
        try:
            if username is not None:
                user = self.db.User.nodes.get(email=username)
            if payload is not None and 'user_id' in payload:
                user = self.db.User.nodes.get(uuid=payload['user_id'])
        except ServiceUnavailable as e:
示例#27
0
    def custom_connection(self, **kwargs):

        if len(kwargs) > 0:
            print("TODO: use args for connection?", kwargs)

        uri = '{}://{}:{}@{}:{}/{}'.format(
            self.variables.get('dbtype', 'postgresql'),
            self.variables.get('user'),
            self.variables.get('password'),
            self.variables.get('host'),
            self.variables.get('port'),
            self.variables.get('db'),
        )

        # TODO: in case we need different connection binds
        # (multiple connections with sql) then:
        # SQLALCHEMY_BINDS = {
        #     'users':        'mysqldb://localhost/users',
        #     'appmeta':      'sqlite:////path/to/appmeta.db'
        # }
        self.app.config['SQLALCHEMY_DATABASE_URI'] = uri

        # self.app.config['SQLALCHEMY_POOL_TIMEOUT'] = 3
        self.app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False

        # pool_size = self.variables.get('poolsize')
        # if pool_size is not None:
        #     # sqlalchemy docs: http://j.mp/2xT0GOc
        #     # defaults: overflow=10, pool_size=5
        #     # self.app.config['SQLALCHEMY_MAX_OVERFLOW'] = 0
        #     self.app.config['SQLALCHEMY_POOL_SIZE'] = int(pool_size)
        #     log.debug("Setting SQLALCHEMY_POOL_SIZE = {}", pool_size)

        obj_name = 'db'
        # search the original sqlalchemy object into models
        db = Meta.obj_from_models(obj_name, self.name, CUSTOM_PACKAGE)

        try:
            from flask_migrate import Migrate

            # The Alembic package, which handles the migration work, does not recognize
            # type changes in columns by default. If you want that fine level of
            # detection you need to enable the compare_type option
            Migrate(self.app, db, compare_type=True)
        except BaseException as e:
            log.warning("Flask Migrate not enabled")
            log.error(str(e))

        # no 'db' set in CUSTOM_PACKAGE, looking for EXTENDED PACKAGE, if any
        if db is None and EXTENDED_PACKAGE != EXTENDED_PROJECT_DISABLED:
            db = Meta.obj_from_models(obj_name, self.name, EXTENDED_PACKAGE)

        if db is None:
            log.warning("No sqlalchemy db imported in custom package")
            db = Meta.obj_from_models(obj_name, self.name, BACKEND_PACKAGE)
        if db is None:
            log.exit("Could not get {} within {} models", obj_name, self.name)

        # Overwrite db.session created by flask_alchemy due to errors
        # with transaction when concurrent requests...
        from sqlalchemy import create_engine
        from sqlalchemy.orm import scoped_session
        from sqlalchemy.orm import sessionmaker

        db.engine_bis = create_engine(uri)
        db.session = scoped_session(sessionmaker(bind=db.engine_bis))

        return db
示例#28
0
def create_app(
    name=__name__,
    init_mode=False,
    destroy_mode=False,
    worker_mode=False,
    testing_mode=False,
    skip_endpoint_mapping=False,
    **kwargs,
):
    """ Create the server istance for Flask application """

    if PRODUCTION and testing_mode:
        log.exit("Unable to execute tests in production")

    # Initialize reading of all files
    mem.customizer = Customizer(testing_mode, init_mode)
    mem.geo_reader = geolite2.reader()
    # when to close??
    # geolite2.close()

    # Add template dir for output in HTML
    kwargs['template_folder'] = os.path.join(ABS_RESTAPI_PATH, 'templates')

    # Flask app instance
    microservice = Flask(name, **kwargs)

    # Add commands to 'flask' binary
    if init_mode:
        microservice.config['INIT_MODE'] = init_mode
        skip_endpoint_mapping = True
    elif destroy_mode:
        microservice.config['DESTROY_MODE'] = destroy_mode
        skip_endpoint_mapping = True
    elif testing_mode:
        microservice.config['TESTING'] = testing_mode
        init_mode = True
    elif worker_mode:
        skip_endpoint_mapping = True

    # Fix proxy wsgi for production calls
    microservice.wsgi_app = ProxyFix(microservice.wsgi_app)

    # CORS
    if not PRODUCTION:
        cors = CORS(
            allow_headers=[
                'Content-Type', 'Authorization', 'X-Requested-With',
                'x-upload-content-length', 'x-upload-content-type',
                'content-range'
            ],
            supports_credentials=['true'],
            methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'],
        )

        cors.init_app(microservice)
        log.verbose("FLASKING! Injected CORS")

    # Enabling our internal Flask customized response
    microservice.response_class = InternalResponse

    # Flask configuration from config file
    microservice.config.from_object(config)
    log.debug("Flask app configured")

    if PRODUCTION:
        log.info("Production server mode is ON")

    # Find services and try to connect to the ones available
    extensions = detector.init_services(
        app=microservice,
        worker_mode=worker_mode,
        project_init=init_mode,
        project_clean=destroy_mode,
    )

    if worker_mode:
        microservice.extensions = extensions

    # Restful plugin
    if not skip_endpoint_mapping:
        # Triggering automatic mapping of REST endpoints
        rest_api = Api(catch_all_404s=True)

        # Basic configuration (simple): from example class
        if len(mem.customizer._endpoints) < 1:
            log.error("No endpoints found!")

            raise AttributeError("Follow the docs and define your endpoints")

        for resource in mem.customizer._endpoints:
            # urls = [uri for _, uri in resource.uris.items()]
            urls = list(resource.uris.values())

            # Create the restful resource with it;
            # this method is from RESTful plugin
            rest_api.add_resource(resource.cls, *urls)

            log.verbose("Map '{}' to {}", resource.cls.__name__, urls)

        # Enable all schema endpoints to be mapped with this extra step
        if len(mem.customizer._schema_endpoint.uris) > 0:
            log.debug("Found one or more schema to expose")
            urls = [
                uri for _, uri in mem.customizer._schema_endpoint.uris.items()
            ]
            rest_api.add_resource(mem.customizer._schema_endpoint.cls, *urls)

        # HERE all endpoints will be registered by using FlaskRestful
        rest_api.init_app(microservice)

        microservice.services_instances = {}
        for m in detector.services_classes:
            ExtClass = detector.services_classes.get(m)
            microservice.services_instances[m] = ExtClass(microservice)

        # FlaskApiSpec experimentation
        from apispec import APISpec
        from flask_apispec import FlaskApiSpec
        from apispec.ext.marshmallow import MarshmallowPlugin
        # from apispec_webframeworks.flask import FlaskPlugin

        microservice.config.update({
            'APISPEC_SPEC':
            APISpec(
                title=glom(mem.customizer._configurations,
                           'project.title',
                           default='0.0.1'),
                version=glom(mem.customizer._configurations,
                             'project.version',
                             default='Your application name'),
                openapi_version="2.0",
                # OpenApi 3 not working with FlaskApiSpec
                # -> Duplicate parameter with name body and location body
                # https://github.com/jmcarp/flask-apispec/issues/170
                # Find other warning like this by searching:
                # **FASTAPI**
                # openapi_version="3.0.2",
                plugins=[MarshmallowPlugin()],
            ),
            'APISPEC_SWAGGER_URL':
            '/api/swagger',
            # 'APISPEC_SWAGGER_UI_URL': '/api/swagger-ui',
            # Disable Swagger-UI
            'APISPEC_SWAGGER_UI_URL':
            None,
        })
        docs = FlaskApiSpec(microservice)

        with microservice.app_context():
            for resource in mem.customizer._endpoints:
                urls = list(resource.uris.values())
                try:
                    docs.register(resource.cls)
                except TypeError as e:
                    # log.warning("{} on {}", type(e), resource.cls)
                    # Enable this warning to start conversion to FlaskFastApi
                    # Find other warning like this by searching:
                    # **FASTAPI**
                    log.verbose("{} on {}", type(e), resource.cls)

    # Clean app routes
    ignore_verbs = {"HEAD", "OPTIONS"}

    for rule in microservice.url_map.iter_rules():

        rulename = str(rule)
        # Skip rules that are only exposing schemas
        if '/schemas/' in rulename:
            continue

        endpoint = microservice.view_functions[rule.endpoint]
        if not hasattr(endpoint, 'view_class'):
            continue
        newmethods = ignore_verbs.copy()

        for verb in rule.methods - ignore_verbs:
            method = verb.lower()
            if method in mem.customizer._original_paths[rulename]:
                # remove from flask mapping
                # to allow 405 response
                newmethods.add(verb)
            else:
                log.verbose("Removed method {}.{} from mapping", rulename,
                            verb)

        rule.methods = newmethods

    # Logging responses
    @microservice.after_request
    def log_response(response):

        response.headers["_RV"] = str(__version__)

        PROJECT_VERSION = get_project_configuration("project.version",
                                                    default=None)
        if PROJECT_VERSION is not None:
            response.headers["Version"] = str(PROJECT_VERSION)
        # NOTE: if it is an upload,
        # I must NOT consume request.data or request.json,
        # otherwise the content gets lost
        do_not_log_types = ['application/octet-stream', 'multipart/form-data']

        if request.mimetype in do_not_log_types:
            data = 'STREAM_UPLOAD'
        else:
            try:
                data = handle_log_output(request.data)
                # Limit the parameters string size, sometimes it's too big
                for k in data:
                    try:
                        if isinstance(data[k], dict):
                            for kk in data[k]:
                                v = str(data[k][kk])
                                if len(v) > MAX_CHAR_LEN:
                                    v = v[:MAX_CHAR_LEN] + "..."
                                data[k][kk] = v
                            continue

                        if not isinstance(data[k], str):
                            data[k] = str(data[k])

                        if len(data[k]) > MAX_CHAR_LEN:
                            data[k] = data[k][:MAX_CHAR_LEN] + "..."
                    except IndexError:
                        pass
            except Exception:
                data = 'OTHER_UPLOAD'

        # Obfuscating query parameters
        url = urllib_parse.urlparse(request.url)
        try:
            params = urllib_parse.unquote(
                urllib_parse.urlencode(handle_log_output(url.query)))
            url = url._replace(query=params)
        except TypeError:
            log.error("Unable to url encode the following parameters:")
            print(url.query)

        url = urllib_parse.urlunparse(url)
        log.info("{} {} {} {}", request.method, url, data, response)

        return response

    if send_mail_is_active():
        if not test_smtp_client():
            log.critical("Bad SMTP configuration, unable to create a client")
        else:
            log.info("SMTP configuration verified")
    # and the flask App is ready now:
    log.info("Boot completed")

    if SENTRY_URL is not None:

        if not PRODUCTION:
            log.info("Skipping Sentry, only enabled in PRODUCTION mode")
        else:
            import sentry_sdk
            from sentry_sdk.integrations.flask import FlaskIntegration

            sentry_sdk.init(dsn=SENTRY_URL, integrations=[FlaskIntegration()])
            log.info("Enabled Sentry {}", SENTRY_URL)

    # return our flask app
    return microservice
示例#29
0
    def read_my_swagger(self, method, endpoint, mapping=None):

        # content has to be a dictionary
        if not isinstance(mapping, dict):
            raise TypeError("Wrong type: {}".format(type(mapping)))

        # read common
        commons = mapping.pop('common', {})
        if commons:
            # Deprecated since 0.7.0
            log.warning("Commons specs are deprecated")

        # Check if there is at least one except for common
        if len(mapping) < 1:
            raise ValueError("No definition found in: {}".format(mapping))

        ################################
        # Using 'attrs': a way to save external attributes

        # Instance
        extra = ExtraAttributes()

        ################################
        # Specs should contain only labels written in spec before

        pattern = re.compile(r'\<([^\>]+)\>')

        for label, specs in mapping.items():

            uri = '/{}{}'.format(endpoint.base_uri, label)
            # This will be used by server.py.add
            if uri not in endpoint.uris:
                endpoint.uris[uri] = uri

            ################################
            # add common elements to all specs
            for key, value in commons.items():
                if key not in specs:
                    specs[key] = value

            ################################
            # Separate external definitions

            # Find any custom part which is not swagger definition
            custom = specs.pop('custom', {})

            # Publish the specs on the final Swagger JSON
            # Default is to do it if not otherwise specified
            extra.publish = custom.get('publish', True)
            if not extra.publish:
                # Deprecated since 0.7.0
                log.warning("Publish setting is deprecated")

            # extra.auth = None

            ###########################
            # Strip the uri of the parameter
            # and add it to 'parameters'
            newuri = uri[:]  # create a copy
            if 'parameters' not in specs:
                specs['parameters'] = []

            ###########################
            # Read Form Data Custom parameters
            cparam = specs.pop('custom_parameters', None)
            if cparam is not None:
                for fdp in cparam:

                    params = self._fdp.get(fdp)
                    if params is None:
                        log.exit("No custom form data '{}'", fdp)
                    else:
                        # Unable to extend with list by using extends() because
                        # it add references to the original object and do not
                        # create copies. Without copying, the same objects will
                        # be modified several times leading to errors
                        for p in params:
                            specs['parameters'].append(p.copy())

            ###########################
            # Read normal parameters
            for parameter in pattern.findall(uri):

                # create parameters
                x = parameter.split(':')
                xlen = len(x)
                paramtype = 'string'

                if xlen == 1:
                    paramname = x[0]
                elif xlen == 2:
                    paramtype = x[0]
                    paramname = x[1]

                # FIXME: complete for all types
                # http://swagger.io/specification/#data-types-12
                if paramtype == 'int':
                    paramtype = 'number'
                if paramtype == 'path':
                    paramtype = 'string'

                path_parameter = {
                    'name': paramname,
                    'type': paramtype,
                    'in': 'path',
                    'required': True,
                }

                specs['parameters'].append(path_parameter)

                # replace in a new uri
                # <param> -> {param}
                newuri = newuri.replace('<{}>'.format(parameter),
                                        '{{{}}}'.format(paramname))

            # cycle parameters and add them to the endpoint class
            query_params = []
            for param in specs['parameters']:

                if param["in"] != 'path':
                    if uri not in self._parameter_schemas:
                        self._parameter_schemas[uri] = {}

                    if method not in self._parameter_schemas[uri]:
                        self._parameter_schemas[uri][method] = []

                    self._parameter_schemas[uri][method].append(param.copy())

                extrainfo = param.pop('custom', {})

                if len(extrainfo) and endpoint.custom['schema']['expose']:

                    # TODO: read a 'custom.publish' in every yaml
                    # to decide if the /schema uri should be in swagger

                    if uri not in endpoint.custom['params']:
                        endpoint.custom['params'][uri] = {}
                    endpoint.custom['params'][uri][method] = extrainfo

                enum = param.pop("enum", None)
                if enum is not None:
                    param["enum"] = []
                    for option in enum:
                        if isinstance(option, str):
                            param["enum"].append(option)
                        else:
                            # enum [{key1: value1}, {key2: value2}]
                            # became enum [key1, ke2]
                            for k in option:
                                param["enum"].append(k)

                # handle parameters in URI for Flask
                if param['in'] == 'query':
                    query_params.append(param)

            if len(query_params) > 0:
                self.query_parameters(endpoint.cls,
                                      method=method,
                                      uri=uri,
                                      params=query_params)

            # Swagger does not like empty arrays
            if len(specs['parameters']) < 1:
                specs.pop('parameters')

            ##################
            # Save definition for checking
            if uri not in self._original_paths:
                self._original_paths[uri] = {}
            self._original_paths[uri][method] = specs

            ##################
            # Skip what the developers does not want to be public in swagger
            # NOTE: do not skip if in testing mode
            if not extra.publish and not self._customizer._testing:
                continue

            # Handle global tags
            if 'tags' not in specs and len(endpoint.tags) > 0:
                specs['tags'] = []
            for tag in endpoint.tags:
                self._used_swagger_tags[tag] = True
                if tag not in specs['tags']:
                    specs['tags'].append(tag)

            ##################
            # NOTE: whatever is left inside 'specs' will be
            # passed later on to Swagger Validator...

            # Save definition for publishing
            if newuri not in self._paths:
                self._paths[newuri] = {}
            self._paths[newuri][method] = specs

            log.verbose("Built definition '{}:{}'", method.upper(), newuri)

        endpoint.custom['methods'][method] = extra
        return endpoint
示例#30
0
    def custom_connection(self, **kwargs):

        # set here to avoid warnings like 'Possible hardcoded password'
        EMPTY = ""

        broker = self.variables.get("broker")

        if broker is None:
            log.exit("Unable to start Celery, missing broker service")
            # celery_app = None
            # return celery_app

        # Do not import before loading the ext!
        from restapi.services.detect import Detector

        if broker == 'RABBIT':
            service_vars = Detector.load_variables({'prefix': 'rabbitmq'})
            BROKER_HOST = service_vars.get("host")
            BROKER_PORT = int(service_vars.get("port"))
            BROKER_USER = service_vars.get("user", "")
            BROKER_PASSWORD = service_vars.get("password", "")
            BROKER_VHOST = service_vars.get("vhost", "")
            BROKER_USE_SSL = Detector.get_bool_envvar(
                service_vars.get("ssl_enabled", False)
            )
        elif broker == 'REDIS':
            service_vars = Detector.load_variables({'prefix': 'redis'})
            BROKER_HOST = service_vars.get("host")
            BROKER_PORT = int(service_vars.get("port"))
            BROKER_USER = None
            BROKER_PASSWORD = None
            BROKER_VHOST = ""
            BROKER_USE_SSL = False
        else:
            log.exit("Invalid celery broker: {}", broker)

        if BROKER_USER == "":
            BROKER_USER = None
        if BROKER_PASSWORD == EMPTY:
            BROKER_PASSWORD = None

        if BROKER_VHOST != "":
            BROKER_VHOST = "/{}".format(BROKER_VHOST)

        if BROKER_USER is not None and BROKER_PASSWORD is not None:
            BROKER_CREDENTIALS = '{}:{}@'.format(BROKER_USER, BROKER_PASSWORD)
        else:
            BROKER_CREDENTIALS = ""

        if broker == 'RABBIT':
            BROKER_URL = 'amqp://{}{}:{}{}'.format(
                BROKER_CREDENTIALS,
                BROKER_HOST,
                BROKER_PORT,
                BROKER_VHOST,
            )
            log.info(
                "Configured RabbitMQ as Celery broker {}", obfuscate_url(BROKER_URL))
        elif broker == 'REDIS':
            BROKER_URL = 'redis://{}{}:{}/0'.format(
                BROKER_CREDENTIALS,
                BROKER_HOST,
                BROKER_PORT,
            )
            log.info(
                "Configured Redis as Celery broker {}", obfuscate_url(BROKER_URL))
        else:
            log.error("Unable to start Celery unknown broker service: {}", broker)
            celery_app = None
            return celery_app

        backend = self.variables.get("backend", broker)

        if backend == 'RABBIT':
            service_vars = Detector.load_variables({'prefix': 'rabbitmq'})
            BACKEND_HOST = service_vars.get("host")
            BACKEND_PORT = int(service_vars.get("port"))
            BACKEND_USER = service_vars.get("user", "")
            BACKEND_PASSWORD = service_vars.get("password", "")
        elif backend == 'REDIS':
            service_vars = Detector.load_variables({'prefix': 'redis'})
            BACKEND_HOST = service_vars.get("host")
            BACKEND_PORT = int(service_vars.get("port"))
            BACKEND_USER = ""
            BACKEND_PASSWORD = None
        elif backend == 'MONGODB':
            service_vars = Detector.load_variables({'prefix': 'mongo'})
            BACKEND_HOST = service_vars.get("host")
            BACKEND_PORT = int(service_vars.get("port"))
            BACKEND_USER = service_vars.get("user", "")
            BACKEND_PASSWORD = service_vars.get("password", "")
        else:
            log.exit("Invalid celery backend: {}", backend)

        if BACKEND_USER == EMPTY:
            BACKEND_USER = None
        if BACKEND_PASSWORD == EMPTY:
            BACKEND_PASSWORD = None

        if BACKEND_USER is not None and BACKEND_PASSWORD is not None:
            BACKEND_CREDENTIALS = '{}:{}@'.format(BACKEND_USER, BACKEND_PASSWORD)
        else:
            BACKEND_CREDENTIALS = ""

        if backend == 'RABBIT':
            BACKEND_URL = 'rpc://{}{}:{}/0'.format(
                BACKEND_CREDENTIALS,
                BACKEND_HOST,
                BACKEND_PORT,
            )
            log.info(
                "Configured RabbitMQ as Celery backend {}", obfuscate_url(BACKEND_URL))
        elif backend == 'REDIS':
            BACKEND_URL = 'redis://{}{}:{}/0'.format(
                BACKEND_CREDENTIALS,
                BACKEND_HOST,
                BACKEND_PORT,
            )
            log.info(
                "Configured Redis as Celery backend {}", obfuscate_url(BACKEND_URL))
        elif backend == 'MONGODB':
            BACKEND_URL = 'mongodb://{}{}:{}'.format(
                BACKEND_CREDENTIALS,
                BACKEND_HOST,
                BACKEND_PORT,
            )
            log.info(
                "Configured MongoDB as Celery backend {}", obfuscate_url(BACKEND_URL))
        else:
            log.exit("Unable to start Celery unknown backend service: {}", backend)
            # celery_app = None
            # return celery_app

        celery_app = Celery('RestApiQueue', broker=BROKER_URL, backend=BACKEND_URL)
        celery_app.conf['broker_use_ssl'] = BROKER_USE_SSL

        # if not worker_mode:

        #     from celery.task.control import inspect

        #     insp = inspect()
        #     if not insp.stats():
        #         log.warning("No running Celery workers were found")

        # Skip initial warnings, avoiding pickle format (deprecated)
        celery_app.conf.accept_content = ['json']
        celery_app.conf.task_serializer = 'json'
        celery_app.conf.result_serializer = 'json'
        # Max priority default value for all queues
        # Required to be able to set priority parameter on apply_async calls
        # celery_app.conf.task_queue_max_priority = 10

        # default_exchange = Exchange('default', type='direct')
        # priority_exchange = Exchange('priority', type='direct')
        # celery_app.conf.task_queues = [
        #     Queue(
        #         'priority',
        #         priority_exchange,
        #         routing_key='priority',
        #         queue_arguments={
        #             'x-max-priority': 10
        #         }
        #     )
        # ]
        # If you want to apply a more strict priority to items
        # probably prefetching should also be disabled:

        # CELERY_ACKS_LATE = True
        # CELERYD_PREFETCH_MULTIPLIER = 1

        # celery_app.conf.broker_pool_limit = None

        if Detector.get_bool_from_os('CELERY_BEAT_ENABLED'):

            CeleryExt.CELERY_BEAT_SCHEDULER = backend

            if backend == 'MONGODB':
                SCHEDULER_DB = 'celery'
                celery_app.conf['CELERY_MONGODB_SCHEDULER_DB'] = SCHEDULER_DB
                celery_app.conf['CELERY_MONGODB_SCHEDULER_COLLECTION'] = "schedules"
                celery_app.conf['CELERY_MONGODB_SCHEDULER_URL'] = BACKEND_URL

                import mongoengine

                m = mongoengine.connect(SCHEDULER_DB, host=BACKEND_URL)
                log.info("Celery-beat connected to MongoDB: {}", m)
            elif backend == 'REDIS':

                BEAT_BACKEND_URL = 'redis://{}{}:{}/1'.format(
                    BACKEND_CREDENTIALS,
                    BACKEND_HOST,
                    BACKEND_PORT,
                )

                celery_app.conf['REDBEAT_REDIS_URL'] = BEAT_BACKEND_URL
                celery_app.conf['REDBEAT_KEY_PREFIX'] = CeleryExt.REDBEAT_KEY_PREFIX
                log.info("Celery-beat connected to Redis: {}", BEAT_BACKEND_URL)
            else:
                log.warning("Cannot configure celery beat scheduler")

        if CeleryExt.celery_app is None:
            CeleryExt.celery_app = celery_app

        return celery_app