コード例 #1
0
    def test_meta(self) -> None:

        # This is a valid package containing other packages... but no task will be found
        tasks = Meta.get_celery_tasks("restapi.utilities")
        assert isinstance(tasks, list)
        assert len(tasks) == 0

        tasks = Meta.get_celery_tasks("this-should-not-exist")
        assert isinstance(tasks, list)
        assert len(tasks) == 0

        mcls = Meta.get_classes_from_module(
            "this-should-not-exist")  # type: ignore
        assert isinstance(mcls, dict)
        assert len(mcls) == 0

        assert not Meta.get_module_from_string("this-should-not-exist")

        try:
            Meta.get_module_from_string(
                "this-should-not-exist",
                exit_on_fail=True,
            )
            pytest.fail("ModuleNotFoundError not raised")  # pragma: no cover
        except ModuleNotFoundError:
            pass

        # This method is not very robust... but... let's test the current implementation
        # It basicaly return the first args if it is an instance of some classes
        assert not Meta.get_self_reference_from_args()
        selfref = Meta.get_self_reference_from_args("test")
        assert selfref == "test"

        models = Meta.import_models("this-should",
                                    "not-exist",
                                    mandatory=False)
        assert isinstance(models, dict)
        assert len(models) == 0

        try:
            Meta.import_models("this-should", "not-exist", mandatory=True)
            pytest.fail("SystemExit not raised")  # pragma: no cover
        except SystemExit:
            pass

        # Check exit_on_fail default value
        models = Meta.import_models("this-should", "not-exist")
        assert isinstance(models, dict)
        assert len(models) == 0

        assert Meta.get_instance("invalid.path", "InvalidClass") is None
        assert Meta.get_instance("customization", "InvalidClass") is None
        assert Meta.get_instance("customization", "Customizer") is not None
コード例 #2
0
ファイル: __init__.py プロジェクト: joskid/http-api
    def connect(self, **kwargs):

        variables = self.variables.copy()
        variables.update(kwargs)
        broker = variables.get("broker")

        if broker is None:  # pragma: no cover
            print_and_exit("Unable to start Celery, missing broker service")

        if broker == "RABBIT":
            service_vars = Env.load_variables_group(prefix="rabbitmq")

            self.celery_app.conf.broker_use_ssl = Env.to_bool(
                service_vars.get("ssl_enabled"))

            self.celery_app.conf.broker_url = self.get_rabbit_url(
                service_vars, protocol="amqp")

        elif broker == "REDIS":
            service_vars = Env.load_variables_group(prefix="redis")

            self.celery_app.conf.broker_use_ssl = False

            self.celery_app.conf.broker_url = self.get_redis_url(
                service_vars, protocol="redis")

        else:  # pragma: no cover
            print_and_exit(
                "Unable to start Celery: unknown broker service: {}", broker)

        log.info(
            "Configured {} as broker {}",
            broker,
            obfuscate_url(self.celery_app.conf.broker_url),
        )
        # From the guide: "Default: Taken from broker_url."
        # But it is not true, connection fails if not explicitly set
        self.celery_app.conf.broker_read_url = self.celery_app.conf.broker_url
        self.celery_app.conf.broker_write_url = self.celery_app.conf.broker_url

        backend = variables.get("backend", broker)

        if backend == "RABBIT":
            service_vars = Env.load_variables_group(prefix="rabbitmq")

            log.warning(
                "RABBIT backend is quite limited and not fully supported. "
                "Consider to enable Redis or MongoDB as a backend database")
            self.celery_app.conf.result_backend = self.get_rabbit_url(
                service_vars, protocol="rpc")

        elif backend == "REDIS":
            service_vars = Env.load_variables_group(prefix="redis")

            self.celery_app.conf.result_backend = self.get_redis_url(
                service_vars, protocol="redis")
            # set('redis_backend_use_ssl', kwargs.get('redis_backend_use_ssl'))

        elif backend == "MONGODB":
            service_vars = Env.load_variables_group(prefix="mongo")

            self.celery_app.conf.result_backend = self.get_mongodb_url(
                service_vars, protocol="mongodb")

        else:  # pragma: no cover
            print_and_exit(
                "Unable to start Celery: unknown backend service: {}", backend)

        log.info(
            "Configured {} as backend {}",
            backend,
            obfuscate_url(self.celery_app.conf.result_backend),
        )

        # Should be enabled?
        # Default: Disabled by default (transient messages).
        # If set to True, result messages will be persistent.
        # This means the messages won’t be lost after a broker restart.
        # self.celery_app.conf.result_persistent = True

        # Skip initial warnings, avoiding pickle format (deprecated)
        self.celery_app.conf.accept_content = ["json"]
        self.celery_app.conf.task_serializer = "json"
        self.celery_app.conf.result_serializer = "json"

        # Already enabled by default to use UTC
        # self.celery_app.conf.enable_utc
        # self.celery_app.conf.timezone

        # Not needed, because tasks are dynamcally injected
        # self.celery_app.conf.imports
        # self.celery_app.conf.includes

        # Max priority default value for all queues
        # Required to be able to set priority parameter on task calls
        self.celery_app.conf.task_queue_max_priority = 10

        # Default priority for taks (if not specified)
        self.celery_app.conf.task_default_priority = 5

        # If you want to apply a more strict priority to items
        # probably prefetching should also be disabled:

        # Late ack means the task messages will be acknowledged after the task
        # has been executed, not just before (the default behavior).
        # self.celery_app.conf.task_acks_late = True

        # How many messages to prefetch at a time multiplied by the number
        # of concurrent processes. The default is 4 (four messages for each process).
        # The default setting is usually a good choice, however – if you have very
        # long running tasks waiting in the queue and you have to start the workers,
        # note that the first worker to start will receive four times the number
        # of messages initially. Thus the tasks may not be fairly distributed to
        # the workers. To disable prefetching, set worker_prefetch_multiplier to 1.
        # Changing that setting to 0 will allow the worker to keep consuming as many
        # messages as it wants.
        self.celery_app.conf.worker_prefetch_multiplier = 1

        if Env.get_bool("CELERYBEAT_ENABLED"):

            CeleryExt.CELERYBEAT_SCHEDULER = backend

            if backend == "MONGODB":
                service_vars = Env.load_variables_group(prefix="mongo")
                url = self.get_mongodb_url(service_vars, protocol="mongodb")
                SCHEDULER_DB = "celery"
                self.celery_app.conf[
                    "CELERY_MONGODB_SCHEDULER_DB"] = SCHEDULER_DB
                self.celery_app.conf[
                    "CELERY_MONGODB_SCHEDULER_COLLECTION"] = "schedules"
                self.celery_app.conf["CELERY_MONGODB_SCHEDULER_URL"] = url

                import mongoengine

                m = mongoengine.connect(SCHEDULER_DB, host=url)
                log.info("Celery-beat connected to MongoDB: {}", m)
            elif backend == "REDIS":

                service_vars = Env.load_variables_group(prefix="redis")
                url = self.get_redis_url(service_vars, protocol="redis")

                self.celery_app.conf["REDBEAT_REDIS_URL"] = url
                self.celery_app.conf["REDBEAT_KEY_PREFIX"] = REDBEAT_KEY_PREFIX
                log.info("Celery-beat connected to Redis: {}",
                         obfuscate_url(url))
            else:  # pragma: no cover
                log.warning(
                    "Cannot configure celery beat scheduler with backend: {}",
                    backend)

        # self.disconnected = False

        conf = self.celery_app.conf
        # Replace the previous App with new settings
        self.celery_app = Celery("RAPyDo",
                                 broker=conf["broker_url"],
                                 backend=conf["result_backend"])
        self.celery_app.conf = conf

        for funct in Meta.get_celery_tasks(f"{CUSTOM_PACKAGE}.tasks"):
            # Weird errors due to celery-stubs?
            # "Callable[[], Any]" has no attribute "register"
            # The code is correct... let's ignore it
            self.celery_app.tasks.register(funct)  # type: ignore

        return self
コード例 #3
0
    def connect(self, **kwargs: str) -> "CeleryExt":

        variables = self.variables.copy()
        variables.update(kwargs)
        broker = variables.get("broker_service")

        if HOST_TYPE == DOCS:  # pragma: no cover
            broker = "RABBIT"

        if broker is None:  # pragma: no cover
            print_and_exit("Unable to start Celery, missing broker service")

        if broker == "RABBIT":
            service_vars = Env.load_variables_group(prefix="rabbitmq")

            if Env.to_bool(service_vars.get("ssl_enabled")):
                # The setting can be a dict with the following keys:
                #   ssl_cert_reqs (required): one of the SSLContext.verify_mode values:
                #         ssl.CERT_NONE
                #         ssl.CERT_OPTIONAL
                #         ssl.CERT_REQUIRED
                #   ssl_ca_certs (optional): path to the CA certificate
                #   ssl_certfile (optional): path to the client certificate
                #   ssl_keyfile (optional): path to the client key

                server_hostname = RabbitExt.get_hostname(
                    service_vars.get("host", ""))
                force_self_signed = Env.get_bool("SSL_FORCE_SELF_SIGNED")
                ca_certs = (SSL_CERTIFICATE if server_hostname == "localhost"
                            or force_self_signed else certifi.where())
                self.celery_app.conf.broker_use_ssl = {
                    # 'keyfile': '/var/ssl/private/worker-key.pem',
                    # 'certfile': '/var/ssl/amqp-server-cert.pem',
                    # 'ca_certs': '/var/ssl/myca.pem',
                    # 'cert_reqs': ssl.CERT_REQUIRED
                    # 'cert_reqs': ssl.CERT_OPTIONAL
                    "cert_reqs": ssl.CERT_REQUIRED,
                    "server_hostname": server_hostname,
                    "ca_certs": ca_certs,
                }

            self.celery_app.conf.broker_url = self.get_rabbit_url(
                service_vars, protocol="pyamqp")

        elif broker == "REDIS":
            service_vars = Env.load_variables_group(prefix="redis")

            self.celery_app.conf.broker_use_ssl = False

            self.celery_app.conf.broker_url = self.get_redis_url(
                service_vars, protocol="redis", db=RedisExt.CELERY_BROKER_DB)

        else:  # pragma: no cover
            print_and_exit(
                "Unable to start Celery: unknown broker service: {}", broker)

        log.info(
            "Configured {} as broker {}",
            broker,
            obfuscate_url(self.celery_app.conf.broker_url),
        )
        # From the guide: "Default: Taken from broker_url."
        # But it is not true, connection fails if not explicitly set
        self.celery_app.conf.broker_read_url = self.celery_app.conf.broker_url
        self.celery_app.conf.broker_write_url = self.celery_app.conf.broker_url

        backend = variables.get("backend_service", broker)

        if backend == "RABBIT":
            service_vars = Env.load_variables_group(prefix="rabbitmq")

            log.warning(
                "RABBIT backend is quite limited and not fully supported. "
                "Consider to enable Redis as a backend database")
            self.celery_app.conf.result_backend = self.get_rabbit_url(
                service_vars, protocol="rpc")

        elif backend == "REDIS":
            service_vars = Env.load_variables_group(prefix="redis")

            self.celery_app.conf.result_backend = self.get_redis_url(
                service_vars, protocol="redis", db=RedisExt.CELERY_BACKEND_DB)
            # set('redis_backend_use_ssl', kwargs.get('redis_backend_use_ssl'))

        else:  # pragma: no cover
            print_and_exit(
                "Unable to start Celery: unknown backend service: {}", backend)

        log.info(
            "Configured {} as backend {}",
            backend,
            obfuscate_url(self.celery_app.conf.result_backend),
        )

        # Should be enabled?
        # Default: Disabled by default (transient messages).
        # If set to True, result messages will be persistent.
        # This means the messages won’t be lost after a broker restart.
        # self.celery_app.conf.result_persistent = True

        # Decides if publishing task messages will be retried in the case of
        # connection loss or other connection errors
        self.celery_app.conf.task_publish_retry = True

        # Already enabled by default to use UTC
        # self.celery_app.conf.enable_utc
        # self.celery_app.conf.timezone

        # Not needed, because tasks are dynamcally injected
        # self.celery_app.conf.imports
        # self.celery_app.conf.includes

        # Note about priority: multi-queues is better than prioritized tasks
        # https://docs.celeryproject.org/en/master/faq.html#does-celery-support-task-priorities

        # Max priority default value for all queues
        # Required to be able to set priority parameter on task calls
        self.celery_app.conf.task_queue_max_priority = 10

        # Default priority for taks (if not specified)
        self.celery_app.conf.task_default_priority = 5

        # If you want to apply a more strict priority to items
        # probably prefetching should also be disabled:

        # Late ack means the task messages will be acknowledged after the task
        # has been executed, not just before (the default behavior).
        # self.celery_app.conf.task_acks_late = True

        # How many messages to prefetch at a time multiplied by the number
        # of concurrent processes. The default is 4 (four messages for each process).
        # The default setting is usually a good choice, however – if you have very
        # long running tasks waiting in the queue and you have to start the workers,
        # note that the first worker to start will receive four times the number
        # of messages initially. Thus the tasks may not be fairly distributed to
        # the workers. To disable prefetching, set worker_prefetch_multiplier to 1.
        # Changing that setting to 0 will allow the worker to keep consuming as many
        # messages as it wants.
        self.celery_app.conf.worker_prefetch_multiplier = 1

        # Introduced in Celery 5.1: on connection loss cancels all currently executed
        # tasks with late acknowledgement enabled.
        # These tasks cannot be acknowledged as the connection is gone,
        # and the tasks are automatically redelivered back to the queue.
        # In Celery 5.1 it is set to False by default.
        # The setting will be set to True by default in Celery 6.0.
        self.celery_app.conf.worker_cancel_long_running_tasks_on_connection_loss = True

        if not PRODUCTION:
            # Skip initial warnings by avoiding pickle format (deprecated)
            # Only set in DEV mode since in PROD mode the auth serializer is used
            self.celery_app.conf.accept_content = ["json"]
            self.celery_app.conf.task_serializer = "json"
            self.celery_app.conf.result_serializer = "json"

        if Env.get_bool("CELERYBEAT_ENABLED"):

            CeleryExt.CELERYBEAT_SCHEDULER = backend

            if backend == "REDIS":

                service_vars = Env.load_variables_group(prefix="redis")
                url = self.get_redis_url(service_vars,
                                         protocol="redis",
                                         db=RedisExt.CELERY_BEAT_DB)

                self.celery_app.conf["REDBEAT_REDIS_URL"] = url
                self.celery_app.conf["REDBEAT_KEY_PREFIX"] = REDBEAT_KEY_PREFIX
                log.info("Celery-beat connected to Redis: {}",
                         obfuscate_url(url))
            else:  # pragma: no cover
                log.warning(
                    "Cannot configure celery beat scheduler with backend: {}",
                    backend)

        # self.disconnected = False

        conf = self.celery_app.conf
        # Replace the previous App with new settings
        self.celery_app = Celery("RAPyDo",
                                 broker=conf.broker_url,
                                 backend=conf.result_backend)
        self.celery_app.conf = conf

        if PRODUCTION:

            # https://docs.celeryq.dev/en/stable/userguide/security.html#message-signing
            self.celery_app.conf.update(
                security_key=SSL_SECRET,
                security_certificate=SSL_CERTIFICATE,
                security_cert_store=SSL_CERTIFICATE,
                security_digest="sha256",
                task_serializer="auth",
                result_serializer="auth",
                event_serializer="auth",
                accept_content=["auth"],
            )

            self.celery_app.setup_security()

        for funct in Meta.get_celery_tasks(f"{CUSTOM_PACKAGE}.tasks"):
            # Weird errors due to celery-stubs?
            # "Callable[[], Any]" has no attribute "register"
            # The code is correct... let's ignore it
            self.celery_app.tasks.register(funct)  # type: ignore

        return self