コード例 #1
0
ファイル: __init__.py プロジェクト: joskid/http-api
    def get_bindings(self, exchange: str) -> Optional[List[Dict[str, str]]]:
        if not self.exchange_exists(exchange):
            log.critical("Does not exist")
            return None

        host = self.variables.get("host", "")
        schema = ""
        if not host.startswith("http"):
            if Env.to_bool(self.variables.get("ssl_enabled")):
                schema = "https://"
            else:
                schema = "http://"

        port = self.variables.get("management_port")
        # url-encode unsafe characters by also including / (thanks to safe parameter)
        # / -> %2F
        vhost = urllib.parse.quote(self.variables.get("vhost", "/"), safe="")
        user = self.variables.get("user")
        password = self.variables.get("password")
        # API Reference:
        # A list of all bindings in which a given exchange is the source.
        r = requests.get(
            f"{schema}{host}:{port}/api/exchanges/{vhost}/{exchange}/bindings/source",
            auth=HTTPBasicAuth(user, password),
            verify=False,
        )
        response = r.json()
        if r.status_code != 200:  # pragma: no cover
            raise RestApiException(
                {"RabbitMQ": response.get("error", "Unknown error")},
                status_code=r.status_code,
            )

        bindings = []
        for row in response:
            # row == {
            #   'source': exchange-name,
            #   'vhost': probably '/',
            #   'destination': queue-or-dest-exchange-name,
            #   'destination_type': 'queue' or 'exchange',
            #   'routing_key': routing_key,
            #   'arguments': Dict,
            #   'properties_key': ?? as routing_key?
            # }

            bindings.append({
                "exchange": row["source"],
                "routing_key": row["routing_key"],
                "queue": row["destination"],
            })

        return bindings
コード例 #2
0
ファイル: __init__.py プロジェクト: joskid/http-api
    def connect(self, **kwargs):

        variables = self.variables.copy()
        # Beware, if you specify a user different by the default,
        # then the send method will fail to to PRECONDITION_FAILED because
        # the user_id will not pass the verification
        # Locally save self.variables + kwargs to be used in send()
        variables.update(kwargs)

        ssl_enabled = Env.to_bool(variables.get("ssl_enabled"))

        log.info("Connecting to the Rabbit (SSL = {})", ssl_enabled)

        if (host := variables.get("host")) is None:
            raise ServiceUnavailable("Missing hostname")
コード例 #3
0
        variables = self.variables.copy()

        variables.update(kwargs)

        if (host := variables.get("host")) is None:  # pragma: no cover
            raise ServiceUnavailable("Missing hostname")

        if (user := variables.get("user")) is None:  # pragma: no cover
            raise ServiceUnavailable("Missing credentials")

        if (password := variables.get("password")) is None:  # pragma: no cover
            raise ServiceUnavailable("Missing credentials")

        port = Env.get_int(variables.get("port"), 21)

        ssl_enabled = Env.to_bool(variables.get("ssl_enabled"))

        if ssl_enabled:
            context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)

            context.load_default_certs()

            # Disable certificate verification:
            # context.verify_mode = ssl.CERT_NONE
            # Enable certificate verification:
            context.verify_mode = ssl.CERT_REQUIRED
            context.check_hostname = False
            # Path to pem file to verify self signed certificates
            context.load_verify_locations(cafile=SSL_CERTIFICATE)
            # System CA to verify true cerificates
            context.load_verify_locations(cafile=certifi.where())
コード例 #4
0
    def test_env(self, faker: Faker) -> None:

        assert not Env.to_bool(None)
        assert Env.to_bool(None, True)
        assert not Env.to_bool(False)
        assert Env.to_bool(True)
        assert not Env.to_bool(0)
        assert Env.to_bool(1)
        assert Env.to_bool(1 + faker.pyint())
        assert Env.to_bool(-faker.pyint() - 1)
        assert not Env.to_bool("")
        assert not Env.to_bool("false")
        assert not Env.to_bool("False")
        assert not Env.to_bool("FALSE")
        assert Env.to_bool("true")
        assert Env.to_bool("True")
        assert Env.to_bool("TRUE")
        assert Env.to_bool(faker.pystr())
        assert not Env.to_bool(object)
        assert Env.to_bool(object, True)

        random_default = faker.pyint()
        assert Env.to_int(None) == 0
        assert Env.to_int(None, random_default) == random_default
        assert Env.to_int(random_default) == random_default
        assert Env.to_int("42") == 42
        assert Env.to_int("-42") == -42
        assert Env.to_int(str(random_default)) == random_default
        assert Env.to_int(faker.pystr()) == 0
        assert Env.to_int(faker.pystr(), random_default) == random_default
        assert Env.to_bool(object) == 0

        prefix = faker.pystr().lower()
        var1 = faker.pystr()
        var2 = faker.pystr().lower()
        var3 = faker.pystr().upper()
        val1 = faker.pystr()
        val2 = faker.pystr()
        val3 = faker.pystr()

        os.environ[f"{prefix}_{var1}"] = val1
        os.environ[f"{prefix}_{var2}"] = val2
        os.environ[f"{prefix}_{var3}"] = val3
        variables = Env.load_variables_group(prefix)
        assert variables is not None
        assert isinstance(variables, dict)
        assert len(variables) == 3
        assert var2 in variables
        assert var3 not in variables
        assert var3.lower() in variables
        assert variables.get(var1.lower()) == val1
        assert variables.get(var2.lower()) == val2
        assert variables.get(var3.lower()) == val3
コード例 #5
0
    def load_connectors(cls, path: Path, module: str, services: Services) -> Services:

        main_folder = path.joinpath(CONNECTORS_FOLDER)
        if not main_folder.is_dir():
            log.debug("Connectors folder not found: {}", main_folder)
            return services

        for connector in main_folder.iterdir():
            if not connector.is_dir():
                continue

            connector_name = connector.name
            if connector_name.startswith("_"):
                continue

            # This is the only exception... we should rename sqlalchemy as alchemy
            if connector_name == "sqlalchemy":
                variables = Env.load_variables_group(prefix="alchemy")
            else:
                variables = Env.load_variables_group(prefix=connector_name)

            if not Env.to_bool(
                variables.get("enable_connector", True)
            ):  # pragma: no cover
                log.debug("{} connector is disabled", connector_name)
                continue

            external = False
            if "host" in variables:
                if host := variables.get("host"):
                    external = cls.is_external(host)
                # HOST found in variables but empty... never happens during tests
                else:  # pragma: no cover
                    variables["enable"] = "0"

            enabled = Env.to_bool(variables.get("enable"))

            # Celery is always enabled, if connector is enabled
            # No further check is needed on host/external
            available = enabled or external or connector_name == "celery"

            if not available:
                continue

            connector_module = Connector.get_module(connector_name, module)
            connector_class = Connector.get_class(connector_module)

            # Can't test connector misconfiguration...
            if not connector_class:  # pragma: no cover
                log.error("No connector class found in {}/{}", main_folder, connector)
                continue

            try:
                # This is to test the Connector compliance,
                # i.e. to verify instance and get_instance in the connector module
                # and verify that the Connector can be instanced
                connector_module.instance
                connector_module.get_instance
                connector_class()
            except AttributeError as e:  # pragma: no cover
                print_and_exit(e)

            services[connector_name] = variables

            log.debug("Got class definition for {}", connector_class)
コード例 #6
0
ファイル: __init__.py プロジェクト: joskid/http-api
    def load_connectors(cls, path: str, module: str, services: Services) -> Services:

        main_folder = os.path.join(path, CONNECTORS_FOLDER)
        if not os.path.isdir(main_folder):
            log.debug("Connectors folder not found: {}", main_folder)
            return services

        for connector in os.listdir(main_folder):
            connector_path = os.path.join(path, CONNECTORS_FOLDER, connector)
            if not os.path.isdir(connector_path):
                continue
            if connector.startswith("_"):
                continue

            # This is the only exception... we should rename sqlalchemy as alchemy
            if connector == "sqlalchemy":
                variables = Env.load_variables_group(prefix="alchemy")
            else:
                variables = Env.load_variables_group(prefix=connector)

            if not Env.to_bool(
                variables.get("enable_connector", True)
            ):  # pragma: no cover
                log.info("{} connector is disabled", connector)
                continue

            # if host is not in variables (like for Celery) do not consider it
            external = False
            if "host" in variables:
                if host := variables.get("host"):
                    external = cls.is_external(host)
                # HOST found in variables but empty... never happens during tests
                else:  # pragma: no cover
                    variables["enable"] = "0"

            enabled = Env.to_bool(variables.get("enable"))
            available = enabled or external

            if not available:
                continue

            connector_module = Connector.get_module(connector, module)
            connector_class = Connector.get_class(connector_module)

            # Can't test connector misconfiguration...
            if not connector_class:  # pragma: no cover
                log.error("No connector class found in {}/{}", main_folder, connector)
                continue

            try:
                # This is to test the Connector compliance,
                # i.e. to verify instance and get_instance in the connector module
                # and verify that the Connector can be instanced
                connector_module.instance
                connector_module.get_instance
                connector_class()
            except AttributeError as e:  # pragma: no cover
                print_and_exit(e)

            services[connector] = variables

            log.debug("Got class definition for {}", connector_class)
コード例 #7
0
ファイル: __init__.py プロジェクト: joskid/http-api
    def connect(self, **kwargs):

        variables = self.variables.copy()
        variables.update(kwargs)
        broker = variables.get("broker")

        if broker is None:  # pragma: no cover
            print_and_exit("Unable to start Celery, missing broker service")

        if broker == "RABBIT":
            service_vars = Env.load_variables_group(prefix="rabbitmq")

            self.celery_app.conf.broker_use_ssl = Env.to_bool(
                service_vars.get("ssl_enabled"))

            self.celery_app.conf.broker_url = self.get_rabbit_url(
                service_vars, protocol="amqp")

        elif broker == "REDIS":
            service_vars = Env.load_variables_group(prefix="redis")

            self.celery_app.conf.broker_use_ssl = False

            self.celery_app.conf.broker_url = self.get_redis_url(
                service_vars, protocol="redis")

        else:  # pragma: no cover
            print_and_exit(
                "Unable to start Celery: unknown broker service: {}", broker)

        log.info(
            "Configured {} as broker {}",
            broker,
            obfuscate_url(self.celery_app.conf.broker_url),
        )
        # From the guide: "Default: Taken from broker_url."
        # But it is not true, connection fails if not explicitly set
        self.celery_app.conf.broker_read_url = self.celery_app.conf.broker_url
        self.celery_app.conf.broker_write_url = self.celery_app.conf.broker_url

        backend = variables.get("backend", broker)

        if backend == "RABBIT":
            service_vars = Env.load_variables_group(prefix="rabbitmq")

            log.warning(
                "RABBIT backend is quite limited and not fully supported. "
                "Consider to enable Redis or MongoDB as a backend database")
            self.celery_app.conf.result_backend = self.get_rabbit_url(
                service_vars, protocol="rpc")

        elif backend == "REDIS":
            service_vars = Env.load_variables_group(prefix="redis")

            self.celery_app.conf.result_backend = self.get_redis_url(
                service_vars, protocol="redis")
            # set('redis_backend_use_ssl', kwargs.get('redis_backend_use_ssl'))

        elif backend == "MONGODB":
            service_vars = Env.load_variables_group(prefix="mongo")

            self.celery_app.conf.result_backend = self.get_mongodb_url(
                service_vars, protocol="mongodb")

        else:  # pragma: no cover
            print_and_exit(
                "Unable to start Celery: unknown backend service: {}", backend)

        log.info(
            "Configured {} as backend {}",
            backend,
            obfuscate_url(self.celery_app.conf.result_backend),
        )

        # Should be enabled?
        # Default: Disabled by default (transient messages).
        # If set to True, result messages will be persistent.
        # This means the messages won’t be lost after a broker restart.
        # self.celery_app.conf.result_persistent = True

        # Skip initial warnings, avoiding pickle format (deprecated)
        self.celery_app.conf.accept_content = ["json"]
        self.celery_app.conf.task_serializer = "json"
        self.celery_app.conf.result_serializer = "json"

        # Already enabled by default to use UTC
        # self.celery_app.conf.enable_utc
        # self.celery_app.conf.timezone

        # Not needed, because tasks are dynamcally injected
        # self.celery_app.conf.imports
        # self.celery_app.conf.includes

        # Max priority default value for all queues
        # Required to be able to set priority parameter on task calls
        self.celery_app.conf.task_queue_max_priority = 10

        # Default priority for taks (if not specified)
        self.celery_app.conf.task_default_priority = 5

        # If you want to apply a more strict priority to items
        # probably prefetching should also be disabled:

        # Late ack means the task messages will be acknowledged after the task
        # has been executed, not just before (the default behavior).
        # self.celery_app.conf.task_acks_late = True

        # How many messages to prefetch at a time multiplied by the number
        # of concurrent processes. The default is 4 (four messages for each process).
        # The default setting is usually a good choice, however – if you have very
        # long running tasks waiting in the queue and you have to start the workers,
        # note that the first worker to start will receive four times the number
        # of messages initially. Thus the tasks may not be fairly distributed to
        # the workers. To disable prefetching, set worker_prefetch_multiplier to 1.
        # Changing that setting to 0 will allow the worker to keep consuming as many
        # messages as it wants.
        self.celery_app.conf.worker_prefetch_multiplier = 1

        if Env.get_bool("CELERYBEAT_ENABLED"):

            CeleryExt.CELERYBEAT_SCHEDULER = backend

            if backend == "MONGODB":
                service_vars = Env.load_variables_group(prefix="mongo")
                url = self.get_mongodb_url(service_vars, protocol="mongodb")
                SCHEDULER_DB = "celery"
                self.celery_app.conf[
                    "CELERY_MONGODB_SCHEDULER_DB"] = SCHEDULER_DB
                self.celery_app.conf[
                    "CELERY_MONGODB_SCHEDULER_COLLECTION"] = "schedules"
                self.celery_app.conf["CELERY_MONGODB_SCHEDULER_URL"] = url

                import mongoengine

                m = mongoengine.connect(SCHEDULER_DB, host=url)
                log.info("Celery-beat connected to MongoDB: {}", m)
            elif backend == "REDIS":

                service_vars = Env.load_variables_group(prefix="redis")
                url = self.get_redis_url(service_vars, protocol="redis")

                self.celery_app.conf["REDBEAT_REDIS_URL"] = url
                self.celery_app.conf["REDBEAT_KEY_PREFIX"] = REDBEAT_KEY_PREFIX
                log.info("Celery-beat connected to Redis: {}",
                         obfuscate_url(url))
            else:  # pragma: no cover
                log.warning(
                    "Cannot configure celery beat scheduler with backend: {}",
                    backend)

        # self.disconnected = False

        conf = self.celery_app.conf
        # Replace the previous App with new settings
        self.celery_app = Celery("RAPyDo",
                                 broker=conf["broker_url"],
                                 backend=conf["result_backend"])
        self.celery_app.conf = conf

        for funct in Meta.get_celery_tasks(f"{CUSTOM_PACKAGE}.tasks"):
            # Weird errors due to celery-stubs?
            # "Callable[[], Any]" has no attribute "register"
            # The code is correct... let's ignore it
            self.celery_app.tasks.register(funct)  # type: ignore

        return self
コード例 #8
0
    def connect(self, **kwargs: str) -> "CeleryExt":

        variables = self.variables.copy()
        variables.update(kwargs)
        broker = variables.get("broker_service")

        if HOST_TYPE == DOCS:  # pragma: no cover
            broker = "RABBIT"

        if broker is None:  # pragma: no cover
            print_and_exit("Unable to start Celery, missing broker service")

        if broker == "RABBIT":
            service_vars = Env.load_variables_group(prefix="rabbitmq")

            if Env.to_bool(service_vars.get("ssl_enabled")):
                # The setting can be a dict with the following keys:
                #   ssl_cert_reqs (required): one of the SSLContext.verify_mode values:
                #         ssl.CERT_NONE
                #         ssl.CERT_OPTIONAL
                #         ssl.CERT_REQUIRED
                #   ssl_ca_certs (optional): path to the CA certificate
                #   ssl_certfile (optional): path to the client certificate
                #   ssl_keyfile (optional): path to the client key

                server_hostname = RabbitExt.get_hostname(
                    service_vars.get("host", ""))
                force_self_signed = Env.get_bool("SSL_FORCE_SELF_SIGNED")
                ca_certs = (SSL_CERTIFICATE if server_hostname == "localhost"
                            or force_self_signed else certifi.where())
                self.celery_app.conf.broker_use_ssl = {
                    # 'keyfile': '/var/ssl/private/worker-key.pem',
                    # 'certfile': '/var/ssl/amqp-server-cert.pem',
                    # 'ca_certs': '/var/ssl/myca.pem',
                    # 'cert_reqs': ssl.CERT_REQUIRED
                    # 'cert_reqs': ssl.CERT_OPTIONAL
                    "cert_reqs": ssl.CERT_REQUIRED,
                    "server_hostname": server_hostname,
                    "ca_certs": ca_certs,
                }

            self.celery_app.conf.broker_url = self.get_rabbit_url(
                service_vars, protocol="pyamqp")

        elif broker == "REDIS":
            service_vars = Env.load_variables_group(prefix="redis")

            self.celery_app.conf.broker_use_ssl = False

            self.celery_app.conf.broker_url = self.get_redis_url(
                service_vars, protocol="redis", db=RedisExt.CELERY_BROKER_DB)

        else:  # pragma: no cover
            print_and_exit(
                "Unable to start Celery: unknown broker service: {}", broker)

        log.info(
            "Configured {} as broker {}",
            broker,
            obfuscate_url(self.celery_app.conf.broker_url),
        )
        # From the guide: "Default: Taken from broker_url."
        # But it is not true, connection fails if not explicitly set
        self.celery_app.conf.broker_read_url = self.celery_app.conf.broker_url
        self.celery_app.conf.broker_write_url = self.celery_app.conf.broker_url

        backend = variables.get("backend_service", broker)

        if backend == "RABBIT":
            service_vars = Env.load_variables_group(prefix="rabbitmq")

            log.warning(
                "RABBIT backend is quite limited and not fully supported. "
                "Consider to enable Redis as a backend database")
            self.celery_app.conf.result_backend = self.get_rabbit_url(
                service_vars, protocol="rpc")

        elif backend == "REDIS":
            service_vars = Env.load_variables_group(prefix="redis")

            self.celery_app.conf.result_backend = self.get_redis_url(
                service_vars, protocol="redis", db=RedisExt.CELERY_BACKEND_DB)
            # set('redis_backend_use_ssl', kwargs.get('redis_backend_use_ssl'))

        else:  # pragma: no cover
            print_and_exit(
                "Unable to start Celery: unknown backend service: {}", backend)

        log.info(
            "Configured {} as backend {}",
            backend,
            obfuscate_url(self.celery_app.conf.result_backend),
        )

        # Should be enabled?
        # Default: Disabled by default (transient messages).
        # If set to True, result messages will be persistent.
        # This means the messages won’t be lost after a broker restart.
        # self.celery_app.conf.result_persistent = True

        # Decides if publishing task messages will be retried in the case of
        # connection loss or other connection errors
        self.celery_app.conf.task_publish_retry = True

        # Already enabled by default to use UTC
        # self.celery_app.conf.enable_utc
        # self.celery_app.conf.timezone

        # Not needed, because tasks are dynamcally injected
        # self.celery_app.conf.imports
        # self.celery_app.conf.includes

        # Note about priority: multi-queues is better than prioritized tasks
        # https://docs.celeryproject.org/en/master/faq.html#does-celery-support-task-priorities

        # Max priority default value for all queues
        # Required to be able to set priority parameter on task calls
        self.celery_app.conf.task_queue_max_priority = 10

        # Default priority for taks (if not specified)
        self.celery_app.conf.task_default_priority = 5

        # If you want to apply a more strict priority to items
        # probably prefetching should also be disabled:

        # Late ack means the task messages will be acknowledged after the task
        # has been executed, not just before (the default behavior).
        # self.celery_app.conf.task_acks_late = True

        # How many messages to prefetch at a time multiplied by the number
        # of concurrent processes. The default is 4 (four messages for each process).
        # The default setting is usually a good choice, however – if you have very
        # long running tasks waiting in the queue and you have to start the workers,
        # note that the first worker to start will receive four times the number
        # of messages initially. Thus the tasks may not be fairly distributed to
        # the workers. To disable prefetching, set worker_prefetch_multiplier to 1.
        # Changing that setting to 0 will allow the worker to keep consuming as many
        # messages as it wants.
        self.celery_app.conf.worker_prefetch_multiplier = 1

        # Introduced in Celery 5.1: on connection loss cancels all currently executed
        # tasks with late acknowledgement enabled.
        # These tasks cannot be acknowledged as the connection is gone,
        # and the tasks are automatically redelivered back to the queue.
        # In Celery 5.1 it is set to False by default.
        # The setting will be set to True by default in Celery 6.0.
        self.celery_app.conf.worker_cancel_long_running_tasks_on_connection_loss = True

        if not PRODUCTION:
            # Skip initial warnings by avoiding pickle format (deprecated)
            # Only set in DEV mode since in PROD mode the auth serializer is used
            self.celery_app.conf.accept_content = ["json"]
            self.celery_app.conf.task_serializer = "json"
            self.celery_app.conf.result_serializer = "json"

        if Env.get_bool("CELERYBEAT_ENABLED"):

            CeleryExt.CELERYBEAT_SCHEDULER = backend

            if backend == "REDIS":

                service_vars = Env.load_variables_group(prefix="redis")
                url = self.get_redis_url(service_vars,
                                         protocol="redis",
                                         db=RedisExt.CELERY_BEAT_DB)

                self.celery_app.conf["REDBEAT_REDIS_URL"] = url
                self.celery_app.conf["REDBEAT_KEY_PREFIX"] = REDBEAT_KEY_PREFIX
                log.info("Celery-beat connected to Redis: {}",
                         obfuscate_url(url))
            else:  # pragma: no cover
                log.warning(
                    "Cannot configure celery beat scheduler with backend: {}",
                    backend)

        # self.disconnected = False

        conf = self.celery_app.conf
        # Replace the previous App with new settings
        self.celery_app = Celery("RAPyDo",
                                 broker=conf.broker_url,
                                 backend=conf.result_backend)
        self.celery_app.conf = conf

        if PRODUCTION:

            # https://docs.celeryq.dev/en/stable/userguide/security.html#message-signing
            self.celery_app.conf.update(
                security_key=SSL_SECRET,
                security_certificate=SSL_CERTIFICATE,
                security_cert_store=SSL_CERTIFICATE,
                security_digest="sha256",
                task_serializer="auth",
                result_serializer="auth",
                event_serializer="auth",
                accept_content=["auth"],
            )

            self.celery_app.setup_security()

        for funct in Meta.get_celery_tasks(f"{CUSTOM_PACKAGE}.tasks"):
            # Weird errors due to celery-stubs?
            # "Callable[[], Any]" has no attribute "register"
            # The code is correct... let's ignore it
            self.celery_app.tasks.register(funct)  # type: ignore

        return self