Exemple #1
0
    def extract_endpoints(self, base_dir):

        endpoints_classes = []
        # get last item of the path
        # normpath is required to strip final / if any
        base_module = os.path.basename(os.path.normpath(base_dir))

        apis_dir = os.path.join(base_dir, "endpoints")
        apiclass_module = f"{base_module}.endpoints"
        for epfiles in glob.glob(f"{apis_dir}/*.py"):

            # get module name (es: endpoints.filename)
            module_file = os.path.basename(os.path.splitext(epfiles)[0])
            module_name = f"{apiclass_module}.{module_file}"
            # Convert module name into a module
            log.debug("Importing {}", module_name)
            module = Meta.get_module_from_string(
                module_name,
                exit_on_fail=True,
            )

            # Extract classes from the module
            # module can't be none because of exit_on_fail=True...
            # but my-py can't understand this
            classes = Meta.get_new_classes_from_module(module)  # type: ignore
            for class_name, epclss in classes.items():
                # Filtering out classes without expected data
                if not hasattr(epclss, "methods") or epclss.methods is None:
                    continue

                log.debug("Importing {} from {}.{}", class_name, apis_dir,
                          module_file)

                skip, dependency = self.skip_endpoint(epclss.depends_on)

                if skip:
                    log.debug(
                        "Skipping '{} {}' due to unmet dependency: {}",
                        module_name,
                        class_name,
                        dependency,
                    )
                    continue

                endpoints_classes.append(epclss)

        return endpoints_classes
Exemple #2
0
    def load_models(connectors: List[str]) -> None:

        for connector in connectors:
            # Models are strictly core-dependent. If you need to enable models starting
            # from a custom connector this function has to be refactored:
            # 1) now is checked the existence of models.py in ABS_RESTAPI_PATH/connector
            # 2) Core model is mandatory
            # 3) Connector class, used to inject models is taken from BACKEND_PACKAGE

            models_path = ABS_RESTAPI_PATH.joinpath(
                CONNECTORS_FOLDER, connector, "models.py"
            )

            if not models_path.is_file():
                log.debug("No model found for {}", connector)
                continue

            log.debug("Loading models from {}", connector)
            base_models = Meta.import_models(connector, BACKEND_PACKAGE, mandatory=True)
            if EXTENDED_PACKAGE == EXTENDED_PROJECT_DISABLED:
                extended_models = {}
            else:
                extended_models = Meta.import_models(connector, EXTENDED_PACKAGE)
            custom_models = Meta.import_models(connector, CUSTOM_PACKAGE)

            log.debug(
                "Models loaded from {}: core {}, extended {}, custom {}",
                connector,
                len(base_models),
                len(extended_models),
                len(custom_models),
            )
            connector_module = Connector.get_module(connector, BACKEND_PACKAGE)
            connector_class = Connector.get_class(connector_module)
            if connector_class:
                connector_class.set_models(base_models, extended_models, custom_models)
            else:  # pragma: no cover
                log.error("Connector class not found for {}", connector)
Exemple #3
0
    def load_connectors(cls, path: str, module: str, services: Services) -> Services:

        main_folder = os.path.join(path, CONNECTORS_FOLDER)
        if not os.path.isdir(main_folder):
            log.debug("Connectors folder not found: {}", main_folder)
            return services

        for connector in os.listdir(main_folder):
            connector_path = os.path.join(path, CONNECTORS_FOLDER, connector)
            if not os.path.isdir(connector_path):
                continue
            if connector.startswith("_"):
                continue

            # This is the only exception... we should rename sqlalchemy as alchemy
            if connector == "sqlalchemy":
                variables = Env.load_variables_group(prefix="alchemy")
            else:
                variables = Env.load_variables_group(prefix=connector)

            if not Env.to_bool(
                variables.get("enable_connector", True)
            ):  # pragma: no cover
                log.info("{} connector is disabled", connector)
                continue

            # if host is not in variables (like for Celery) do not consider it
            external = False
            if "host" in variables:
                if host := variables.get("host"):
                    external = cls.is_external(host)
                # HOST found in variables but empty... never happens during tests
                else:  # pragma: no cover
                    variables["enable"] = "0"

            enabled = Env.to_bool(variables.get("enable"))
            available = enabled or external

            if not available:
                continue

            connector_module = Connector.get_module(connector, module)
            connector_class = Connector.get_class(connector_module)

            # Can't test connector misconfiguration...
            if not connector_class:  # pragma: no cover
                log.error("No connector class found in {}/{}", main_folder, connector)
                continue

            try:
                # This is to test the Connector compliance,
                # i.e. to verify instance and get_instance in the connector module
                # and verify that the Connector can be instanced
                connector_module.instance
                connector_module.get_instance
                connector_class()
            except AttributeError as e:  # pragma: no cover
                print_and_exit(e)

            services[connector] = variables

            log.debug("Got class definition for {}", connector_class)
Exemple #4
0
            def wrapper(*args: Any, **kwargs: Any) -> Any:
                # Recover the auth object
                auth_type, token = HTTPTokenAuth.get_authorization_token(
                    allow_access_token_parameter=allow_access_token_parameter)

                # Internal API 'self' reference
                caller = Meta.get_self_reference_from_args(*args)

                if caller is None:  # pragma: no cover
                    # An exit here is really really dangerous, but even if
                    # get_self_reference_from_args can return None, this case is quite
                    # impossible... however with None the server can't continue!
                    print_and_exit(
                        "Server misconfiguration, self reference can't be None!"
                    )

                if auth_type is None or auth_type != HTTPAUTH_SCHEME:
                    # Wrong authentication string
                    msg = (
                        "Missing credentials in headers"
                        f", e.g. {HTTPAUTH_AUTH_FIELD}: '{HTTPAUTH_SCHEME} TOKEN'"
                    )
                    log.debug("Unauthorized request: missing credentials")
                    return caller.response(msg,
                                           code=401,
                                           headers=HTTPAUTH_ERR_HEADER,
                                           allow_html=True)

                # Handling OPTIONS forwarded to our application:
                # ignore headers and let go, avoid unwanted interactions with CORS
                if request.method != "OPTIONS":

                    # valid, token, jti, user
                    valid, token, _, user = caller.auth.verify_token(token)
                    # Check authentication
                    if not valid:
                        # Clear TCP receive buffer of any pending data
                        _ = request.data
                        # Mimic the response from a normal endpoint
                        # To use the same standards
                        # log.info("Invalid token received '{}'", token)
                        log.debug("Invalid token received")
                        return caller.response(
                            "Invalid token received",
                            headers=HTTPAUTH_ERR_HEADER,
                            code=401,
                            allow_html=True,
                        )
                    request.environ[TOKEN_VALIDATED_KEY] = True

                # Check roles
                if not caller.auth.verify_roles(
                        user, roles, required_roles=required_roles):
                    log.info("Unauthorized request: missing privileges.")
                    return caller.response(
                        "You are not authorized: missing privileges",
                        code=401,
                        allow_html=True,
                    )

                caller.authorized_user = user.uuid
                kwargs["user"] = user
                return func(*args, **kwargs)
Exemple #5
0
from restapi.config import SENTRY_URL
from restapi.exceptions import (
    BadRequest,
    Conflict,
    DatabaseDuplicatedEntry,
    RestApiException,
)
from restapi.models import PartialSchema, fields, validate
from restapi.rest.annotations import inject_apispec_docs
from restapi.rest.bearer import TOKEN_VALIDATED_KEY
from restapi.rest.bearer import HTTPTokenAuth as auth  # imported as alias for endpoints
from restapi.utilities.globals import mem
from restapi.utilities.logs import log

log.debug("Auth loaded {}", auth)
log.debug("Marshal loaded {}", marshal_with)

SYSTEM_EXCEPTIONS = ["AttributeError", "ValueError", "KeyError", "SystemError"]


F = TypeVar("F", bound=Callable[..., Any])


# same definition as in:
# https://github.com/jmcarp/flask-apispec/blob/master/flask_apispec/annotations.py
def use_kwargs(args, location=None, inherit=None, apply=None, **kwargs):
    # this use_kwargs is used override the default location (json)
    # with a more extensive default location (body)
    # This trick will prevent to add location='body' to mostly all models
    # Please note that body is also used as "in" parameter in swagger specs and well
Exemple #6
0
 def test_01_x(self, client):
     log.debug("Executing tests from {}", self.__class__.__module__)
     endpoint = API_URI + '/tests/1'
     r = client.get(endpoint)
     assert r.status_code == 200
Exemple #7
0
 def save_token(
     self, user: User, token: str, payload: Payload, token_type: Optional[str] = None
 ) -> None:
     log.debug("Tokens is not saved in base authentication")
Exemple #8
0
    def get(self) -> Response:

        specs = mem.docs.spec.to_dict()

        if self.get_user():
            # Set security requirements for endpoint
            for key, data in specs.items():

                # Find endpoint mapping flagged as private
                if key == "paths":
                    for uri, endpoint in data.items():
                        u = uri.replace("{", "<").replace("}", ">")
                        for method, definition in endpoint.items():

                            auth_required = glom(
                                mem.authenticated_endpoints,
                                f"{u}.{method}",
                                default=False,
                            )

                            if auth_required:
                                definition["security"] = [{"Bearer": []}]

            return self.response(specs)

        log.info("Unauthenticated request, filtering out private endpoints")
        # Remove sensible data
        filtered_specs: Dict[str, Dict[str, Dict[str, Any]]] = {}
        # schemaName => True|False (private|public)
        privatedefs: Dict[str, bool] = {}
        # schemaName => [list of definitions including this]
        parentdefs: Dict[str, List[Any]] = {}
        for key, data in specs.items():

            # Find endpoint mapping flagged as private
            if key == "paths":
                for uri, endpoint in data.items():
                    u = uri.replace("{", "<").replace("}", ">")
                    for method, definition in endpoint.items():

                        is_private = glom(
                            mem.private_endpoints,
                            f"{u}.{method}",
                            default=False,
                        )

                        defs = definition.get("parameters", [])[:]
                        for p in defs:

                            if "schema" not in p:
                                continue
                            if "$ref" not in p["schema"]:
                                continue
                            ref = p["schema"]["$ref"]
                            def_name = ref.replace("#/definitions/", "")

                            privatedefs.setdefault(def_name, True)
                            # Will be True if all occurrences are private
                            privatedefs[def_name] = privatedefs[
                                def_name] and is_private

                        if is_private:
                            log.debug("Skipping {} {}", method, uri)
                            continue

                        auth_required = glom(
                            mem.authenticated_endpoints,
                            f"{u}.{method}",
                            default=False,
                        )

                        if auth_required:
                            definition["security"] = [{"Bearer": []}]

                        filtered_specs.setdefault(key, {})
                        filtered_specs[key].setdefault(uri, {})
                        filtered_specs[key][uri].setdefault(method, definition)

                        # definitions
            elif key == "definitions":

                # Saving definition inclusion, will be used later to determine
                # if a definition is private or not
                # If a definition is referenced by an endpoint, the definition
                # visibility matches the endpoint visibility
                # If a definition is referenced by other definitions, its visibility
                # will be calculated as AND(parent definitions)
                # Verification postponed
                for schema, definition in data.items():

                    # parentdefs
                    for d in definition.get("properties", {}).values():
                        if "$ref" in d:
                            ref = d["$ref"]
                            def_name = ref.replace("#/definitions/", "")

                            parentdefs.setdefault(def_name, [])
                            parentdefs[def_name].append(schema)

                        # Generated by Nested?
                        # 'allOf': [{'$ref': '#/definitions/MyReferenceName'}]}
                        if "allOf" in d:
                            for nested in d["allOf"]:
                                ref = nested["$ref"]
                                def_name = ref.replace("#/definitions/", "")

                                parentdefs.setdefault(def_name, [])
                                parentdefs[def_name].append(schema)
            else:
                filtered_specs.setdefault(key, data)

        if "definitions" in specs:

            filtered_specs.setdefault("definitions", {})
            for schema, definition in specs["definitions"].items():

                if self.is_definition_private(schema, privatedefs, parentdefs):
                    log.debug("Skipping private definition {}", schema)
                    continue
                filtered_specs["definitions"].setdefault(schema, definition)

        return self.response(filtered_specs)
Exemple #9
0
 def save_user(self, user):
     log.debug("User is not saved in base authentication")
Exemple #10
0
 def save_token(self, user, token, jti, token_type=None):
     log.debug("Token is not saved in base authentication")
Exemple #11
0
from restapi.services.detect import detector

from restapi.utilities.logs import log

################################################
# Reload Flask app code also for the worker
# This is necessary to have the app context available
# app = create_app(worker_mode=True)
app = Flask("beat")

app.extensions = detector.init_services(
    app=app, worker_mode=True, project_init=False, project_clean=False
)

celery_app = app.extensions.get('celery').celery_app
celery_app.app = app


def get_service(service, **kwargs):
    ext = celery_app.app.extensions.get(service)
    if ext is None:
        log.error("{} is not enabled", service)
        return None
    return ext.get_instance(**kwargs)


celery_app.get_service = get_service

log.debug("Celery beat is ready {}", celery_app)
Exemple #12
0
    def pre_connection(self, **kwargs):

        session = kwargs.get('user_session')

        external = self.variables.get('external')

        # Retrieve authentication schema
        self.authscheme = kwargs.get('authscheme')
        # Authentication scheme fallback to default from project_configuration
        if self.authscheme is None or self.authscheme.strip() == '':
            self.authscheme = self.variables.get('authscheme')
        # Authentication scheme fallback to default (credentials)
        if self.authscheme is None or self.authscheme.strip() == '':
            self.authscheme = NORMAL_AUTH_SCHEME

        if session is not None:
            user = session.email
        else:
            user = kwargs.get('user')
            self.password = kwargs.get('password')

            gss = kwargs.get('gss', False)
            myproxy_host = self.variables.get("myproxy_host")

            admin = kwargs.get('be_admin', False)
            if user is None:
                user_key = 'default_admin_user' if admin else 'user'
                user = self.variables.get(user_key)

            if self.password is None:
                if self.authscheme == NORMAL_AUTH_SCHEME:
                    self.password = self.variables.get('password')
                elif self.authscheme == PAM_AUTH_SCHEME:
                    self.password = self.variables.get('password')

            log.verbose(
                "Check connection parameters:" +
                "\nexternal[{}], auth[{}], user[{}], admin[{}]",
                external,
                self.authscheme,
                user,
                admin,
            )

            # Check if the user requested for GSI explicitely
            if self.authscheme == GSI_AUTH_SCHEME:
                # if self.variables.get('external'):
                gss = True

        if user is None:
            raise AttributeError("No user is defined")
        else:
            self.user = user
            log.debug("Irods user: {}", self.user)

        ######################
        # Irods/b2safe direct credentials
        if session is not None:
            return True
        ######################
        # Identity with GSI
        elif gss:

            if self.authscheme != GSI_AUTH_SCHEME:
                log.debug("Forcing {} authscheme", GSI_AUTH_SCHEME)
                self.authscheme = GSI_AUTH_SCHEME

            proxy_cert_name = "{}{}".format(
                self.variables.get('certificates_prefix', ""),
                kwargs.get("proxy_cert_name"),
            )

            valid_cert = Certificates.globus_proxy(
                proxy_file=kwargs.get('proxy_file'),
                user_proxy=self.user,
                cert_dir=self.variables.get("x509_cert_dir"),
                myproxy_host=myproxy_host,
                cert_name=proxy_cert_name,
                cert_pwd=kwargs.get("proxy_pass"),
            )

            if not valid_cert:
                return False

        elif self.authscheme == PAM_AUTH_SCHEME:
            pass

        elif self.password is not None:
            self.authscheme = NORMAL_AUTH_SCHEME

        else:
            raise NotImplementedError(
                "Unable to create session: invalid iRODS-auth scheme")

        return True
Exemple #13
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""

RESTful API Python 3 Flask server

"""

import os
import pretty_errors
from restapi.confs import PRODUCTION
from restapi.server import create_app
from restapi.utilities.logs import log

# Connection internal to containers, proxy handle all HTTPS calls
# We may safely disable HTTPS on OAUTHLIB requests
if PRODUCTION:
    # http://stackoverflow.com/a/27785830/2114395
    os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
BIND_INTERFACE = "0.0.0.0"

#############################
# BE FLASK
app = create_app(name='REST_API')

if __name__ == "__main__":
    log.debug("Server running (w/ {})", pretty_errors.__name__)
    app.run(host=BIND_INTERFACE, threaded=True)
Exemple #14
0
    def send(self, body, routing_key="", exchange="", headers=None):
        """
        Send a message to the RabbitMQ queue

        :param body: the data to be send.
                        If this message should be json-encoded please use .send_json()
        :param exchange: RabbitMQ exchange where the message should be sent.
                         Empty for default exchange.
        :param queue: RabbitMQ routing key.
        """

        # Settings for the message:
        permanent_delivery = 2  # make message persistent

        if headers is None:
            headers = {}

        props = pika.BasicProperties(
            delivery_mode=permanent_delivery,
            headers=headers,
            # This should be the same used by the connect method, i.e.:
            # self.variables + kwargs
            # Otherwise it will fail with error:
            # Failed to write message, channel is dead (
            #     (406, "PRECONDITION_FAILED - user_id property
            #            set to 'CUSTOM' but authenticated user was 'BASE'
            #           "
            #     )
            # )
            user_id=self.variables.get("user"),
        )

        try:

            channel = self.get_channel()
            channel.basic_publish(
                exchange=exchange,
                routing_key=routing_key,
                body=body,
                properties=props,
                mandatory=True,
            )
            log.debug("Message sent to RabbitMQ")
            return True
        except UnroutableError as e:
            log.error(e)

        except ConnectionClosed as e:
            # TODO: This happens often. Check if heartbeat solves problem.
            log.error("Failed to write message, connection is dead ({})", e)

        except AMQPConnectionError as e:
            log.error("Failed to write message, connection failed ({})", e)

        except AMQPChannelError as e:
            log.error("Failed to write message, channel is dead ({})", e)
            self.channel = None

        except AttributeError as e:  # pragma: no cover
            log.error("Failed to write message:, {}", e)

        return False
Exemple #15
0
    def delete_exchange(self, exchange: str) -> None:

        channel = self.get_channel()
        out = channel.exchange_delete(exchange, if_unused=False)
        log.debug(out)
Exemple #16
0
from restapi.connectors import celery
from restapi.server import ServerModes, create_app
from restapi.utilities.logs import log

instance = celery.get_instance()
# Used by Celery to run the instance (--app app)
celery_app = instance.celery_app

# Reload Flask app code for the worker (needed to have the app context available)
celery.CeleryExt.app = create_app(mode=ServerModes.WORKER)

log.debug("Celery worker is ready {}", celery_app)
Exemple #17
0
 def modify_user_password(self, user, password):
     log.debug("Changing {} password", user)
     return self.prc.users.modify(user, 'password', password)
Exemple #18
0
    def send(
        self,
        body: str,
        subject: str,
        to_address: Optional[str] = None,
        from_address: Optional[str] = None,
        cc: Union[None, str, List[str]] = None,
        bcc: Union[None, str, List[str]] = None,
        plain_body: Optional[str] = None,
    ) -> bool:

        if not to_address:
            to_address = self.instance_variables.get("admin")
        if not to_address:
            log.error("Skipping send email: destination address not configured")
            return False

        if not from_address:
            from_address = self.instance_variables.get("noreply")
        if not from_address:
            from_address = self.instance_variables.get("admin")
        if not from_address:
            log.error("Skipping send email: from address not configured")
            return False

        try:

            if plain_body is not None:
                msg = MIMEMultipart("alternative")
            else:
                msg = MIMEText(body)

            msg["Subject"] = subject
            msg["From"] = from_address
            msg["To"] = to_address

            dest_addresses = [to_address]

            if cc is None:
                pass
            elif isinstance(cc, str):
                msg["Cc"] = cc
                dest_addresses.append(cc.split(","))
            elif isinstance(cc, list):
                msg["Cc"] = ",".join(cc)
                dest_addresses.append(cc)
            else:
                log.warning("Invalid CC value: {}", cc)
                cc = None

            if bcc is None:
                pass
            elif isinstance(bcc, str):
                msg["Bcc"] = bcc
                dest_addresses.append(bcc.split(","))
            elif isinstance(bcc, list):
                msg["Bcc"] = ",".join(bcc)
                dest_addresses.append(bcc)
            else:
                log.warning("Invalid BCC value: {}", bcc)
                bcc = None

            msg["Date"] = formatdate()

            if plain_body is not None:
                part1 = MIMEText(plain_body, "plain")
                part2 = MIMEText(body, "html")
                msg.attach(part1)
                msg.attach(part2)

            log.debug("Sending email to {}", to_address)

            self.smtp.sendmail(from_address, dest_addresses, msg.as_string())

            log.info(
                "Successfully sent email to {} [cc={}], [bcc={}]",
                to_address,
                cc,
                bcc,
            )

            return True
        except SMTPException as e:
            log.error("Unable to send email to {} ({})", to_address, e)
            # Force the invalidation of this client
            self.disconnect()
            return False
        except Exception as e:
            log.error(str(e))
            return False
Exemple #19
0
            # Set up secure data connection
            ftp_tls_conn.prot_p()

            self.connection = ftp_tls_conn
        else:
            ftp_conn = FTP(timeout=10)
            # ftp_conn.debugging = 1

            ftp_conn.connect(host, port)
            ftp_conn.login(user, password)
            ftp_conn.set_pasv(True)

            self.connection = ftp_conn

        self.initialized = True
        log.debug("Current directory: {}", self.connection.pwd())
        return self

    def disconnect(self) -> None:
        self.disconnected = True
        if self.connection and self.initialized and self.connection.sock:
            self.connection.quit()
            self.initialized = False

    def is_connected(self) -> bool:
        # Can't happen because connection is not Optional[]
        if not self.connection:  # pragma: no cover
            return False

        if self.disconnected:
            return False
Exemple #20
0
    def swaggerish(self):
        """
        Go through all endpoints configured by the current development.

        Provide the minimum required data according to swagger specs.
        """

        # Better chosen dinamically from endpoint.py
        schemes = ['http']
        if PRODUCTION:
            schemes = ['https']

        # A template base
        output = {
            # TODO: update to 3.0.1? Replace bravado with something else?
            # https://github.com/Yelp/bravado/issues/306
            "swagger": "2.0",
            "info": {
                "version": "0.0.1",
                "title": "Your application name"
            },
            "schemes": schemes,
            # "host": "localhost"  # chosen dinamically
            "basePath": "/",
            "securityDefinitions": {
                "Bearer": {
                    "type": "apiKey",
                    "name": "Authorization",
                    "in": "header"
                }
            },
            "security": [{
                "Bearer": []
            }],
        }

        ###################
        # Set existing values
        proj = self._customizer._configurations['project']
        if 'version' in proj:
            output['info']['version'] = proj['version']
        if 'title' in proj:
            output['info']['title'] = proj['title']

        ###################
        models = self.get_models()
        self._fdp = models.pop('FormDataParameters', {})

        for k in ["definitions", "parameters", "responses"]:
            if k in models:
                output[k] = models.get(k, {})

        output['consumes'] = [
            JSON_APPLICATION,
            # required for parameters with "in: formData"
            "application/x-www-form-urlencoded",
            # required for parameters of "type: file"
            "multipart/form-data"
        ]
        output['produces'] = [JSON_APPLICATION]

        ###################
        # Read endpoints swagger files
        for key, endpoint in enumerate(self._endpoints):

            endpoint.custom['methods'] = {}
            endpoint.custom['params'] = {}

            for method, mapping in endpoint.methods.items():
                # add the custom part to the endpoint

                self._endpoints[key] = self.read_my_swagger(
                    method, endpoint, mapping)

        ###################
        # Save query parameters globally
        self._customizer._query_params = self._qparams
        self._customizer._parameter_schemas = self._parameter_schemas
        output['paths'] = self._paths

        ###################
        tags = []
        for tag, desc in self._customizer._configurations['tags'].items():
            if tag not in self._used_swagger_tags:
                log.debug("Skipping unsed tag: {}", tag)
                continue
            tags.append({'name': tag, 'description': desc})
        output['tags'] = tags

        self._customizer._original_paths = self._original_paths
        return output
Exemple #21
0
        def get(self, task_id=None):

            data = []
            # Inspect all worker nodes
            celery = self.get_service_instance('celery')

            if task_id is not None:
                task_result = celery.AsyncResult(task_id)
                res = task_result.result
                if not isinstance(res, dict):
                    res = str(res)
                return self.force_response({
                    'status': task_result.status,
                    # 'info': task_result.info,
                    'output': res,
                })

            #############################
            # FAST WAY
            stats = celery.control.inspect().stats()
            workers = list(stats.keys())

            active_tasks = {}
            revoked_tasks = {}
            scheduled_tasks = {}
            reserved_tasks = {}

            for worker in workers:
                i = celery.control.inspect([worker])
                log.debug('checked worker: {}', worker)
                for key, value in i.active().items():
                    active_tasks[key] = value
                for key, value in i.revoked().items():
                    revoked_tasks[key] = value
                for key, value in i.reserved().items():
                    reserved_tasks[key] = value
                for key, value in i.scheduled().items():
                    scheduled_tasks[key] = value

            #############################
            # workers = celery.control.inspect()
            # SLOW WAY
            # active_tasks = workers.active()
            # revoked_tasks = workers.revoked()
            # reserved_tasks = workers.reserved()
            # scheduled_tasks = workers.scheduled()
            # SLOW WAY
            # if active_tasks is None:
            #     active_tasks = []
            # if revoked_tasks is None:
            #     revoked_tasks = []
            # if scheduled_tasks is None:
            #     scheduled_tasks = []
            # if reserved_tasks is None:
            #     reserved_tasks = []

            log.verbose('listing items')
            for worker, tasks in active_tasks.items():
                for task in tasks:
                    if task_id is not None and task["id"] != task_id:
                        continue

                    row = {}
                    row['status'] = 'ACTIVE'
                    row['worker'] = worker
                    row['ETA'] = task["time_start"]
                    row['task_id'] = task["id"]
                    row['task'] = task["name"]
                    row['args'] = task["args"]

                    if task_id is not None:
                        task_result = celery.AsyncResult(task_id)
                        row['task_status'] = task_result.status
                        row['info'] = task_result.info
                    data.append(row)

            for worker, tasks in revoked_tasks.items():
                for task in tasks:
                    if task_id is not None and task != task_id:
                        continue
                    row = {}
                    row['status'] = 'REVOKED'
                    row['task_id'] = task
                    data.append(row)

            for worker, tasks in scheduled_tasks.items():
                for task in tasks:
                    if task_id is not None and task["request"]["id"] != task_id:
                        continue

                    row = {}
                    row['status'] = 'SCHEDULED'
                    row['worker'] = worker
                    row['ETA'] = task["eta"]
                    row['task_id'] = task["request"]["id"]
                    row['priority'] = task["priority"]
                    row['task'] = task["request"]["name"]
                    row['args'] = task["request"]["args"]
                    data.append(row)

            for worker, tasks in reserved_tasks.items():
                for task in tasks:
                    if task_id is not None and task["id"] != task_id:
                        continue

                    data.append(
                        {
                            'status': 'SCHEDULED',
                            'worker': worker,
                            'ETA': task['time_start'],
                            'task_id': task["id"],
                            'priority': task['delivery_info']["priority"],
                            'task': task["name"],
                            'args': task["args"],
                        }
                    )

            # from celery.task.control import inspect
            # tasks = inspect()
            log.verbose('listing completed')

            return self.force_response(data)
Exemple #22
0
    def upload(self, subfolder=None, force=False):

        if 'file' not in request.files:

            # # the PUT problem for uploading?
            # tmp = request.stream.read()
            # print("TEST", len(tmp))
            # with open('uploaded_image.jpg', 'w') as f:
            #     f.write(request.stream.read())
            # # print("TEST", request.data)

            return self.force_response(
                errors={"Missing file": "No files specified"},
                code=hcodes.HTTP_BAD_METHOD_NOT_ALLOWED,
            )

        myfile = request.files['file']

        # Check file extension?
        if not self.allowed_file(myfile.filename):
            return self.force_response(
                errors={"Wrong extension": "File extension not allowed"})

        # Check file name
        filename = secure_filename(myfile.filename)
        abs_file = self.absolute_upload_file(filename, subfolder)
        log.info("File request for [{}]({})", myfile, abs_file)

        # ## IMPORTANT NOTE TO SELF:
        # If you are going to receive chunks here there could be problems.
        # In fact a chunk will truncate the connection
        # and make a second request.
        # You will end up with having already the file
        # But corrupted...
        if os.path.exists(abs_file):

            log.warning("Already exists")
            if force:
                os.remove(abs_file)
                log.debug("Forced removal")
            else:
                return self.force_response(
                    errors={
                        "File '" + filename + "' already exists.":
                        "Change file name or use the force parameter"
                    },
                    code=hcodes.HTTP_BAD_REQUEST,
                )

        # Save the file
        try:
            myfile.save(abs_file)
            log.debug("Absolute file path should be '{}'", abs_file)
        except Exception:
            return self.force_response(
                errors={"Permissions": "Failed to write uploaded file"},
                code=hcodes.HTTP_DEFAULT_SERVICE_FAIL,
            )

        # Check exists
        if not os.path.exists(abs_file):
            return self.force_response(
                errors={
                    "Server file system": "Unable to recover the uploaded file"
                },
                code=hcodes.HTTP_DEFAULT_SERVICE_FAIL,
            )

        # Extra info
        ftype = None
        fcharset = None
        try:
            # Check the type
            from plumbum.cmd import file

            out = file["-ib", abs_file]()
            tmp = out.split(';')
            ftype = tmp[0].strip()
            fcharset = tmp[1].split('=')[1].strip()
        except Exception:
            log.warning("Unknown type for '{}'", abs_file)

        ########################
        # ## Final response

        # Default redirect is to 302 state, which makes client
        # think that response was unauthorized....
        # see http://dotnet.dzone.com/articles/getting-know-cross-origin

        return self.force_response(
            {
                'filename': filename,
                'meta': {
                    'type': ftype,
                    'charset': fcharset
                }
            },
            code=hcodes.HTTP_OK_BASIC,
        )
Exemple #23
0
class BaseAuthentication(metaclass=abc.ABCMeta):

    """
    An almost abstract class with methods
    to be implemented with a new service
    that aims to store credentials of users and roles.
    """

    # Secret loaded from secret.key file
    JWT_SECRET: Optional[bytes] = None
    # JWT_ALGO = 'HS256'
    # Should be faster on 64bit machines
    JWT_ALGO = "HS512"

    # 1 month in seconds
    DEFAULT_TOKEN_TTL = Env.get_int("AUTH_JWT_TOKEN_TTL", 2_592_000)
    # Grace period before starting to evaluate IP address on token validation
    GRACE_PERIOD = timedelta(seconds=Env.get_int("AUTH_TOKEN_IP_GRACE_PERIOD", 7200))
    SAVE_LAST_ACCESS_EVERY = timedelta(
        seconds=Env.get_int("AUTH_TOKEN_SAVE_FREQUENCY", 60)
    )

    FULL_TOKEN = "f"
    PWD_RESET = "r"
    ACTIVATE_ACCOUNT = "a"
    TOTP = "TOTP"
    MIN_PASSWORD_LENGTH = Env.get_int("AUTH_MIN_PASSWORD_LENGTH", 8)

    SECOND_FACTOR_AUTHENTICATION = Env.get_bool(
        "AUTH_SECOND_FACTOR_AUTHENTICATION", False
    )

    # enabled if explicitly set or for 2FA is enabled
    FORCE_FIRST_PASSWORD_CHANGE = SECOND_FACTOR_AUTHENTICATION or Env.get_bool(
        "AUTH_FORCE_FIRST_PASSWORD_CHANGE", False
    )

    # enabled if explicitly set or for 2FA is enabled
    VERIFY_PASSWORD_STRENGTH = SECOND_FACTOR_AUTHENTICATION or Env.get_bool(
        "AUTH_VERIFY_PASSWORD_STRENGTH", False
    )
    MAX_PASSWORD_VALIDITY: Optional[timedelta] = get_timedelta(
        Env.get_int("AUTH_MAX_PASSWORD_VALIDITY", 0),
        MAX_PASSWORD_VALIDITY_MIN_TESTNIG_VALUE,
    )

    DISABLE_UNUSED_CREDENTIALS_AFTER: Optional[timedelta] = get_timedelta(
        Env.get_int("AUTH_DISABLE_UNUSED_CREDENTIALS_AFTER", 0),
        # min 60 seconds are required when testing
        DISABLE_UNUSED_CREDENTIALS_AFTER_MIN_TESTNIG_VALUE,
    )

    MAX_LOGIN_ATTEMPTS = get_max_login_attempts(
        Env.get_int("AUTH_MAX_LOGIN_ATTEMPTS", 0)
    )

    FAILED_LOGINS_EXPIRATION: timedelta = timedelta(
        seconds=Env.get_int("AUTH_LOGIN_BAN_TIME", 3600)
    )

    default_user: Optional[str] = None
    default_password: Optional[str] = None
    roles: List[str] = []
    roles_data: Dict[str, str] = {}
    default_role: str = Role.USER.value

    # To be stored on DB
    failed_logins: Dict[str, List[FailedLogin]] = {}

    # Executed once by Connector in init_app
    @classmethod
    def module_initialization(cls) -> None:
        cls.load_default_user()
        cls.load_roles()
        cls.import_secret(SECRET_KEY_FILE)

    @staticmethod
    def load_default_user() -> None:

        BaseAuthentication.default_user = Env.get("AUTH_DEFAULT_USERNAME")
        BaseAuthentication.default_password = Env.get("AUTH_DEFAULT_PASSWORD")
        if (
            BaseAuthentication.default_user is None
            or BaseAuthentication.default_password is None
        ):  # pragma: no cover
            print_and_exit("Default credentials are unavailable!")

    @staticmethod
    def load_roles() -> None:
        BaseAuthentication.roles_data = get_project_configuration(
            "variables.roles"
        ).copy()
        if not BaseAuthentication.roles_data:  # pragma: no cover
            print_and_exit("No roles configured")

        BaseAuthentication.default_role = BaseAuthentication.roles_data.pop("default")

        BaseAuthentication.roles = []
        for role, description in BaseAuthentication.roles_data.items():
            if description != ROLE_DISABLED:
                BaseAuthentication.roles.append(role)

        if not BaseAuthentication.default_role:  # pragma: no cover
            print_and_exit(
                "Default role {} not available!", BaseAuthentication.default_role
            )

    def make_login(self, username: str, password: str) -> Tuple[str, Payload, User]:
        """ The method which will check if credentials are good to go """

        try:
            user = self.get_user(username=username)
        except ValueError as e:  # pragma: no cover
            # SqlAlchemy can raise the following error:
            # A string literal cannot contain NUL (0x00) characters.
            log.error(e)
            raise BadRequest("Invalid input received")
        except BaseException as e:  # pragma: no cover
            log.error("Unable to connect to auth backend\n[{}] {}", type(e), e)

            raise ServiceUnavailable("Unable to connect to auth backend")

        if user is None:
            self.register_failed_login(username)

            self.log_event(
                Events.failed_login,
                payload={"username": username},
                user=user,
            )

            raise Unauthorized("Invalid access credentials", is_warning=True)

        # Check if Oauth2 is enabled
        if user.authmethod != "credentials":  # pragma: no cover
            raise BadRequest("Invalid authentication method")

        # New hashing algorithm, based on bcrypt
        if self.verify_password(password, user.password):
            # Token expiration is capped by the user expiration date, if set
            payload, full_payload = self.fill_payload(user, expiration=user.expiration)
            token = self.create_token(payload)

            self.log_event(Events.login, user=user)
            return token, full_payload, user

        self.log_event(
            Events.failed_login,
            payload={"username": username},
            user=user,
        )
        self.register_failed_login(username)
        raise Unauthorized("Invalid access credentials", is_warning=True)

    @classmethod
    def import_secret(cls, abs_filename: str) -> None:
        try:
            cls.JWT_SECRET = open(abs_filename, "rb").read()
        except OSError:  # pragma: no cover
            print_and_exit("Jwt secret file {} not found", abs_filename)

    # #####################
    # # Password handling #
    ####################
    @staticmethod
    def verify_password(plain_password: str, hashed_password: str) -> bool:
        try:
            return cast(bool, pwd_context.verify(plain_password, hashed_password))
        except ValueError as e:  # pragma: no cover
            log.error(e)

            return False

    @staticmethod
    def get_password_hash(password):
        if not password:
            raise Unauthorized("Invalid password")
        return pwd_context.hash(password)

    @staticmethod
    def get_remote_ip() -> str:
        try:
            if forwarded_ips := request.headers.getlist("X-Forwarded-For"):
                # it can be something like: ['IP1, IP2']
                return str(forwarded_ips[-1].split(",")[0].strip())

            if PRODUCTION and not TESTING:  # pragma: no cover
                log.warning(
                    "Production mode is enabled, but X-Forwarded-For header is missing"
                )

            if request.remote_addr:
                return request.remote_addr

        # Raised when get_remote_ip is executed outside request context
        # For example when creating tokens in initialize_testing_environment
        except RuntimeError as e:
            log.debug(e)
Exemple #24
0
    def upload_data(self, filename, subfolder=None, force=False):

        filename = secure_filename(filename)

        # Check file extension?
        if not self.allowed_file(filename):
            return self.force_response(
                errors=["Wrong extension, file extension not allowed"])

        content = request.data

        abs_file = self.absolute_upload_file(filename, subfolder)
        log.info("File request for {}", abs_file)

        if os.path.exists(abs_file):

            log.warning("File already exists")
            if force:
                os.remove(abs_file)
                log.debug("Forced removal")
            else:
                return self.force_response(
                    errors=["File '" + filename + "' already exists"],
                    code=hcodes.HTTP_BAD_REQUEST,
                )

        with open(abs_file, "ab") as f:
            f.write(content)
            f.close()

        # Check exists
        if not os.path.exists(abs_file):
            return self.force_response(
                errors=["Server error: unable to recover the uploaded file"],
                code=hcodes.HTTP_DEFAULT_SERVICE_FAIL,
            )

        # Extra info
        ftype = None
        fcharset = None
        try:
            # Check the type
            from plumbum.cmd import file

            out = file["-ib", abs_file]()
            tmp = out.split(';')
            ftype = tmp[0].strip()
            fcharset = tmp[1].split('=')[1].strip()
        except Exception:
            log.warning("Unknown type for '{}'", abs_file)

        ########################
        # ## Final response

        # Default redirect is to 302 state, which makes client
        # think that response was unauthorized....
        # see http://dotnet.dzone.com/articles/getting-know-cross-origin

        return self.force_response(
            {
                'filename': filename,
                'meta': {
                    'type': ftype,
                    'charset': fcharset
                }
            },
            code=hcodes.HTTP_OK_BASIC,
        )
Exemple #25
0
 def close_connection(self):
     # TODO: This must be called!
     if self.__connection.is_closed or self.__connection.is_closing:
         log.debug('Connection already closed or closing.')
     else:
         self.__connection.close()
Exemple #26
0
    def load_connectors(cls, path: Path, module: str, services: Services) -> Services:

        main_folder = path.joinpath(CONNECTORS_FOLDER)
        if not main_folder.is_dir():
            log.debug("Connectors folder not found: {}", main_folder)
            return services

        for connector in main_folder.iterdir():
            if not connector.is_dir():
                continue

            connector_name = connector.name
            if connector_name.startswith("_"):
                continue

            # This is the only exception... we should rename sqlalchemy as alchemy
            if connector_name == "sqlalchemy":
                variables = Env.load_variables_group(prefix="alchemy")
            else:
                variables = Env.load_variables_group(prefix=connector_name)

            if not Env.to_bool(
                variables.get("enable_connector", True)
            ):  # pragma: no cover
                log.debug("{} connector is disabled", connector_name)
                continue

            external = False
            if "host" in variables:
                if host := variables.get("host"):
                    external = cls.is_external(host)
                # HOST found in variables but empty... never happens during tests
                else:  # pragma: no cover
                    variables["enable"] = "0"

            enabled = Env.to_bool(variables.get("enable"))

            # Celery is always enabled, if connector is enabled
            # No further check is needed on host/external
            available = enabled or external or connector_name == "celery"

            if not available:
                continue

            connector_module = Connector.get_module(connector_name, module)
            connector_class = Connector.get_class(connector_module)

            # Can't test connector misconfiguration...
            if not connector_class:  # pragma: no cover
                log.error("No connector class found in {}/{}", main_folder, connector)
                continue

            try:
                # This is to test the Connector compliance,
                # i.e. to verify instance and get_instance in the connector module
                # and verify that the Connector can be instanced
                connector_module.instance
                connector_module.get_instance
                connector_class()
            except AttributeError as e:  # pragma: no cover
                print_and_exit(e)

            services[connector_name] = variables

            log.debug("Got class definition for {}", connector_class)
Exemple #27
0
    try:
        if request.mimetype in [
                "application/octet-stream", "multipart/form-data"
        ]:
            return " STREAM_UPLOAD"

        if request.data:
            if data := handle_log_output(request.data):
                return f" {data}"

        if request.form:
            if data := obfuscate_dict(request.form):
                return f" {data}"

    except Exception as e:  # pragma: no cover
        log.debug(e)

    return ""


def handle_response(response: FlaskResponse) -> FlaskResponse:

    response.headers["_RV"] = str(version)

    PROJECT_VERSION = get_project_configuration("project.version", default="0")
    if PROJECT_VERSION is not None:
        response.headers["Version"] = str(PROJECT_VERSION)

    data_string = get_data_from_request()

    url = obfuscate_query_parameters(request.url)
Exemple #28
0
    def get_instance(
        self: T,
        verification: Optional[int] = None,
        expiration: Optional[int] = None,
        **kwargs: str,
    ) -> T:

        if not Connector.check_availability(self.name):
            raise ServiceUnavailable(f"Service {self.name} is not available")

        if verification is None:
            # this should be the default value for this connector
            verification = Env.to_int(self.variables.get("verification_time"))

        if expiration is None:
            # this should be the default value for this connector
            expiration = Env.to_int(self.variables.get("expiration_time"))

        # When context is empty this is a connection at loading time
        # Do not save it
        if stack.top is None:
            log.debug("First connection for {}", self.name)
            # can raise ServiceUnavailable exception
            obj = self.initialize_connection(expiration, verification, **kwargs)
            return obj

        unique_hash = str(sorted(kwargs.items()))

        obj = self.get_object(name=self.name, key=unique_hash)

        # if an expiration time is set, verify the instance age
        if obj and obj.connection_expiration_time:

            # the instance is invalidated if older than the expiration time
            if datetime.now() >= obj.connection_expiration_time:

                log.info("{} connection is expired", self.name)
                obj.disconnect()
                obj = None

        # If a verification time is set, verify the instance age
        if obj and obj.connection_verification_time:
            now = datetime.now()

            # the instance is verified if older than the verification time
            if now >= obj.connection_verification_time:
                # if the connection is still valid, set a new verification time
                if obj.is_connected():
                    # Set the new verification time
                    ver = timedelta(seconds=verification)
                    obj.connection_verification_time = now + ver
                # if the connection is no longer valid, invalidate the instance
                else:  # pragma: no cover
                    log.info(
                        "{} is no longer connected, connector invalidated", self.name
                    )
                    obj.disconnected = True

        # return the instance only if still connected
        # (and not invalidated by the verification check)
        if obj and not obj.disconnected:
            return obj

        # can raise ServiceUnavailable exception
        obj = self.initialize_connection(expiration, verification, **kwargs)
        self.set_object(name=self.name, obj=obj, key=unique_hash)
        return obj
Exemple #29
0
    def get(self, test_num):
        sql = self.get_service_instance('sqlalchemy')

        log.debug(sql)

        return self.response("1")
Exemple #30
0
 def callback(self, result, message):
     if result:
         log.debug('Message successfully published on pushpin')
     else:
         log.error('Publish failed on pushpin: {}', message)