Example #1
0
def init_worker(**kwargs):
    from koku.feature_flags import UNLEASH_CLIENT

    LOG.info("Initializing UNLEASH_CLIENT for celery worker.")
    unleash_init_start = datetime.utcnow()
    UNLEASH_CLIENT.initialize_client()
    LOG.info(
        "UNLEASH_CLIENT initialized for celery worker in "
        f"{(datetime.utcnow() - unleash_init_start).total_seconds()} seconds."
    )
Example #2
0
def enable_hcs_processing(schema_name):  # pragma: no cover #noqa
    """Helper to determine if source is enabled for HCS."""
    if schema_name and not schema_name.startswith("acct"):
        schema_name = f"acct{schema_name}"

    context = {"schema": schema_name}
    LOG.info(f"enable_hcs_processing context: {context}")
    return bool(UNLEASH_CLIENT.is_enabled("hcs-data-processor", context))
Example #3
0
    def handle(self, *args, **kwargs):
        httpd = start_probe_server(SourcesProbeServer)

        timeout = 5
        # This is a special case because check_migrations() returns three values
        # True means migrations are up-to-date
        while check_migrations() != True:  # noqa
            LOG.warning(f"Migrations not done. Sleeping {timeout} seconds.")
            time.sleep(timeout)

        httpd.RequestHandlerClass.ready = True  # Set `ready` to true to indicate migrations are done.

        LOG.info("Initializing UNLEASH_CLIENT for sources-listener.")
        UNLEASH_CLIENT.initialize_client()

        LOG.info("Starting Sources Kafka Handler")
        initialize_sources_integration()
Example #4
0
    def handle(self, *args, **kwargs):
        """Initialize the prometheus exporter and koku-listener."""
        httpd = start_probe_server(ListenerProbeServer)

        # This is a special case because check_migrations() returns three values
        # True means migrations are up-to-date
        while check_migrations() != True:  # noqa
            LOG.warning("Migrations not done. Sleeping")
            time.sleep(5)

        httpd.RequestHandlerClass.ready = True  # Set `ready` to true to indicate migrations are done.

        LOG.info("Initializing UNLEASH_CLIENT for masu-listener.")
        UNLEASH_CLIENT.initialize_client()

        LOG.info("Starting Kafka handler")
        LOG.debug("handle args: %s, kwargs: %s", str(args), str(kwargs))
        initialize_kafka_handler()
Example #5
0
def enable_trino_processing(source_uuid, source_type, account):  # noqa
    """Helper to determine if source is enabled for Trino."""
    if account and not account.startswith("acct"):
        account = f"acct{account}"

    context = {
        "schema": account,
        "source-type": source_type,
        "source-uuid": source_uuid
    }
    LOG.info(f"enable_trino_processing context: {context}")
    return bool(settings.ENABLE_PARQUET_PROCESSING
                or source_uuid in settings.ENABLE_TRINO_SOURCES
                or source_type in settings.ENABLE_TRINO_SOURCE_TYPE
                or account in settings.ENABLE_TRINO_ACCOUNTS
                or UNLEASH_CLIENT.is_enabled("cost-trino-processor", context))
Example #6
0
def shutdown_worker(**kwargs):
    from koku.feature_flags import UNLEASH_CLIENT

    LOG.info("Shutting down UNLEASH_CLIENT for celery worker.")
    UNLEASH_CLIENT.destroy()
Example #7
0
def worker_exit(server, worker):
    """Called just after a worker has been exited, in the worker process."""
    worker.log.info("Shutting down UNLEASH_CLIENT for gunicorn worker.")
    UNLEASH_CLIENT.destroy()
Example #8
0
def post_fork(server, worker):
    """Called just after a worker has been forked."""
    UNLEASH_CLIENT.unleash_instance_id += f"_pid_{worker.pid}"
    worker.log.info("Initializing UNLEASH_CLIENT for gunicorn worker.")
    UNLEASH_CLIENT.initialize_client()