def test_user_deployment_checksum_changes(template: HelmTemplate): pre_upgrade_helm_values = DagsterHelmValues.construct( dagsterUserDeployments=UserDeployments( enabled=True, enableSubchart=True, deployments=[ create_simple_user_deployment("deployment-one"), create_simple_user_deployment("deployment-two"), ], )) post_upgrade_helm_values = DagsterHelmValues.construct( dagsterUserDeployments=UserDeployments( enabled=True, enableSubchart=True, deployments=[ create_complex_user_deployment("deployment-one"), create_complex_user_deployment("deployment-two"), ], )) pre_upgrade_templates = template.render(pre_upgrade_helm_values) post_upgrade_templates = template.render(post_upgrade_helm_values) # User deployment templates with the same Helm values should not redeploy in a Helm upgrade for pre_upgrade_user_deployment, post_upgrade_user_deployment in zip( pre_upgrade_templates, post_upgrade_templates): pre_upgrade_checksum = pre_upgrade_user_deployment.spec.template.metadata.annotations[ "checksum/dagster-user-deployment"] post_upgrade_checksum = post_upgrade_user_deployment.spec.template.metadata.annotations[ "checksum/dagster-user-deployment"] assert pre_upgrade_checksum != post_upgrade_checksum
def test_custom_compute_log_manager_config(template: HelmTemplate): module = "a_module" class_ = "Class" config_field_one = "1" config_field_two = "two" config = {"config_field_one": config_field_one, "config_field_two": config_field_two} helm_values = DagsterHelmValues.construct( computeLogManager=ComputeLogManager.construct( type=ComputeLogManagerType.CUSTOM, config=ComputeLogManagerConfig.construct( customComputeLogManager=ConfigurableClass.construct( module=module, class_=class_, config=config, ) ), ) ) configmaps = template.render(helm_values) instance = yaml.full_load(configmaps[0].data["dagster.yaml"]) compute_logs_config = instance["compute_logs"] assert compute_logs_config["module"] == module assert compute_logs_config["class"] == class_ assert compute_logs_config["config"] == config
def test_ingress(template_function, output, model, api_version): template = template_function(output, model) helm_values = DagsterHelmValues.construct( ingress=Ingress.construct( enabled=True, apiVersion=api_version, dagit=DagitIngressConfiguration.construct( host="foobar.com", path="bing", pathType=IngressPathType.IMPLEMENTATION_SPECIFIC, precedingPaths=[ IngressPath( path="/*", pathType=IngressPathType.IMPLEMENTATION_SPECIFIC, serviceName="ssl-redirect", servicePort="use-annotion", ) ], ), ) ) ingress_template = template.render(helm_values) assert len(ingress_template) == 1 ingress = ingress_template[0] assert len(ingress.spec.rules) == 1 rule = ingress.spec.rules[0] assert rule.host == "foobar.com"
def test_workspace_renders_from_helm_dagit(template: HelmTemplate): servers = [ Server(host="another-deployment-one", port=4000), Server(host="another-deployment-two", port=4001), Server(host="another-deployment-three", port=4002), ] helm_values = DagsterHelmValues.construct( dagit=Dagit.construct( workspace=Workspace(enabled=True, servers=servers)), dagsterUserDeployments=UserDeployments( enabled=True, enableSubchart=True, deployments=[ create_simple_user_deployment("deployment-one"), create_simple_user_deployment("deployment-two"), ], ), ) workspace_templates = template.render(helm_values) assert len(workspace_templates) == 1 workspace_template = workspace_templates[0] workspace = yaml.full_load(workspace_template.data["workspace.yaml"]) grpc_servers = workspace["load_from"] assert len(grpc_servers) == len(servers) for grpc_server, server in zip(grpc_servers, servers): assert grpc_server["grpc_server"]["host"] == server.host assert grpc_server["grpc_server"]["port"] == server.port assert grpc_server["grpc_server"]["location_name"] == server.host
def test_ingress_read_only(template: HelmTemplate): helm_values = DagsterHelmValues.construct( ingress=Ingress.construct( enabled=True, dagit=DagitIngressConfiguration.construct( host="foobar.com", path="bing", precedingPaths=[ IngressPath(path="/*", serviceName="ssl-redirect", servicePort="use-annotion") ], ), readOnlyDagit=DagitIngressConfiguration.construct( host="dagster.io", succeedingPaths=[ IngressPath(path="/*", serviceName="ssl-redirect", servicePort="use-annotion") ], ), ), dagit=Dagit.construct(enableReadOnly=True), ) ingress_template = template.render(helm_values) assert len(ingress_template) == 1 ingress = ingress_template[0] assert len(ingress.spec.rules) == 2 assert [rule.host for rule in ingress.spec.rules] == ["foobar.com", "dagster.io"]
def test_storage_postgres_db_config(template: HelmTemplate, storage: str): postgresql_username = "******" postgresql_host = "1.1.1.1" postgresql_database = "database" postgresql_params = { "connect_timeout": 10, "application_name": "myapp", "options": "-c synchronous_commit=off", } postgresql_port = 8080 helm_values = DagsterHelmValues.construct(postgresql=PostgreSQL.construct( postgresqlUsername=postgresql_username, postgresqlHost=postgresql_host, postgresqlDatabase=postgresql_database, postgresqlParams=postgresql_params, service=Service(port=postgresql_port), )) configmaps = template.render(helm_values) assert len(configmaps) == 1 instance = yaml.full_load(configmaps[0].data["dagster.yaml"]) assert instance[storage] postgres_db = instance[storage]["config"]["postgres_db"] assert postgres_db["username"] == postgresql_username assert postgres_db["password"] == {"env": "DAGSTER_PG_PASSWORD"} assert postgres_db["hostname"] == postgresql_host assert postgres_db["db_name"] == postgresql_database assert postgres_db["port"] == postgresql_port assert postgres_db["params"] == postgresql_params
def test_celery_backend_with_redis_with_password(template: HelmTemplate): redis_password = "******" redis_host = "host" redis_port = 6379 broker_db_number = 20 backend_db_number = 21 helm_values = DagsterHelmValues.construct( generateCeleryConfigSecret=True, runLauncher=RunLauncher.construct(type=RunLauncherType.CELERY), redis=Redis.construct( enabled=True, usePassword=True, password=redis_password, host=redis_host, port=redis_port, brokerDbNumber=broker_db_number, backendDbNumber=backend_db_number, ), ) [secret] = template.render(helm_values) expected_celery_broker = ( f"redis://:{redis_password}@{redis_host}:{redis_port}/{broker_db_number}" ) expected_celery_backend = ( f"redis://:{redis_password}@{redis_host}:{redis_port}/{backend_db_number}" ) assert secret.data["DAGSTER_CELERY_BROKER_URL"] == base64.b64encode( bytes(expected_celery_broker, encoding="utf-8") ).decode("utf-8") assert secret.data["DAGSTER_CELERY_BACKEND_URL"] == base64.b64encode( bytes(expected_celery_backend, encoding="utf-8") ).decode("utf-8")
def test_queued_run_coordinator(instance_template: HelmTemplate, ): # pylint: disable=redefined-outer-name helm_values = DagsterHelmValues.construct(dagsterDaemon=Daemon.construct( runCoordinator=RunCoordinator.construct( enabled=True, type=RunCoordinatorType.QUEUED, config=RunCoordinatorConfig.construct( queuedRunCoordinator=QueuedRunCoordinatorConfig.construct( tagConcurrencyLimits=[ TagConcurrencyLimit.construct( key="foo", value="hi", limit=1) ]), ), ))) configmaps = instance_template.render(helm_values) assert len(configmaps) == 1 instance = yaml.full_load(configmaps[0].data["dagster.yaml"]) assert instance["run_coordinator"]["class"] == "QueuedRunCoordinator" assert instance["run_coordinator"]["config"]["tag_concurrency_limits"] == [ { "key": "foo", "value": "hi", "limit": 1 } ]
def test_gcs_compute_log_manager(template: HelmTemplate): bucket = "bucket" local_dir = "/dir" prefix = "prefix" helm_values = DagsterHelmValues.construct( computeLogManager=ComputeLogManager.construct( type=ComputeLogManagerType.GCS, config=ComputeLogManagerConfig.construct( gcsComputeLogManager=GCSComputeLogManagerModel( bucket=bucket, localDir=local_dir, prefix=prefix ) ), ) ) configmaps = template.render(helm_values) instance = yaml.full_load(configmaps[0].data["dagster.yaml"]) compute_logs_config = instance["compute_logs"] assert compute_logs_config["module"] == "dagster_gcp.gcs.compute_log_manager" assert compute_logs_config["class"] == "GCSComputeLogManager" assert compute_logs_config["config"] == { "bucket": bucket, "local_dir": local_dir, "prefix": prefix, } # Test all config fields in configurable class assert compute_logs_config["config"].keys() == GCSComputeLogManager.config_type().keys()
def test_job_instance_migrate_renders(template: HelmTemplate): helm_values_migrate_enabled = DagsterHelmValues.construct(migrate=Migrate( enabled=True)) jobs = template.render(helm_values_migrate_enabled) assert len(jobs) == 1
def test_custom_run_coordinator_config(template: HelmTemplate): module = "a_module" class_ = "Class" config_field_one = "1" config_field_two = "two" config = { "config_field_one": config_field_one, "config_field_two": config_field_two } helm_values = DagsterHelmValues.construct(dagsterDaemon=Daemon.construct( runCoordinator=RunCoordinator.construct( enabled=True, type=RunCoordinatorType.CUSTOM, config=RunCoordinatorConfig.construct( customRunCoordinator=ConfigurableClass.construct( module=module, class_=class_, config=config, )), ))) configmaps = template.render(helm_values) assert len(configmaps) == 1 instance = yaml.full_load(configmaps[0].data["dagster.yaml"]) assert instance["run_coordinator"]["module"] == module assert instance["run_coordinator"]["class"] == class_ assert instance["run_coordinator"]["config"] == config
def test_celery_secret_does_not_render_without_celery_run_launcher( template: HelmTemplate): with pytest.raises(subprocess.CalledProcessError): helm_values_generate_celery_secret_enabled_no_run_launcher = DagsterHelmValues.construct( generateCeleryConfigSecret=True) template.render( helm_values_generate_celery_secret_enabled_no_run_launcher)
def test_postgresql_secret_renders(template: HelmTemplate): helm_values_generate_postgresql_secret_enabled = DagsterHelmValues.construct( generatePostgresqlPasswordSecret=True) secrets = template.render(helm_values_generate_postgresql_secret_enabled) assert len(secrets) == 1
def test_custom_python_logs_config(template: HelmTemplate): log_level = "INFO" managed_python_loggers = ["foo", "bar", "baz"] handler_config = { "handlers": { "myHandler": { "class": "logging.StreamHandler", "level": "INFO", "stream": "foo" } }, "formatters": { "myFormatter": { "format": "%(message)s" } }, } helm_values = DagsterHelmValues.construct(pythonLogs=PythonLogs.construct( pythonLogLevel=log_level, managedPythonLoggers=managed_python_loggers, dagsterHandlerConfig=handler_config, )) configmaps = template.render(helm_values) instance = yaml.full_load(configmaps[0].data["dagster.yaml"]) python_logs_config = instance["python_logs"] assert python_logs_config["python_log_level"] == log_level assert python_logs_config[ "managed_python_loggers"] == managed_python_loggers assert python_logs_config["dagster_handler_config"] == handler_config
def test_chart_does_render(user_deployments: UserDeployments, full_template: HelmTemplate): values = DagsterHelmValues.construct( dagsterUserDeployments=user_deployments) templates = full_template.render(values) assert templates
def test_workspace_renders_from_helm_user_deployments(template: HelmTemplate): deployments = [ create_simple_user_deployment("deployment-one"), create_simple_user_deployment("deployment-two"), ] helm_values = DagsterHelmValues.construct( dagsterUserDeployments=UserDeployments( enabled=True, enableSubchart=True, deployments=deployments, )) workspace_templates = template.render(helm_values) assert len(workspace_templates) == 1 workspace_template = workspace_templates[0] workspace = yaml.full_load(workspace_template.data["workspace.yaml"]) grpc_servers = workspace["load_from"] assert len(grpc_servers) == len(deployments) for grpc_server, deployment in zip(grpc_servers, deployments): assert grpc_server["grpc_server"]["host"] == deployment.name assert grpc_server["grpc_server"]["port"] == deployment.port assert grpc_server["grpc_server"]["location_name"] == deployment.name
def helm_values_single_complex_user_deployment() -> DagsterHelmValues: return DagsterHelmValues.construct( dagsterUserDeployments=UserDeployments( enabled=True, enableSubchart=True, deployments=[create_complex_user_deployment("complex-deployment-one")], ) )
def test_job_instance_migrate_does_not_render(template: HelmTemplate, capsys): with pytest.raises(subprocess.CalledProcessError): helm_values_migrate_disabled = DagsterHelmValues.construct(migrate=Migrate(enabled=False)) template.render(helm_values_migrate_disabled) _, err = capsys.readouterr() assert "Error: could not find template" in err
def test_dagit_read_only_disabled(deployment_template: HelmTemplate): helm_values = DagsterHelmValues.construct() dagit_template = deployment_template.render(helm_values) assert len(dagit_template) == 1 assert "--read-only" not in "".join( dagit_template[0].spec.template.spec.containers[0].command)
def test_postgresql_secret_does_not_render(template: HelmTemplate, capsys): with pytest.raises(subprocess.CalledProcessError): helm_values_generate_postgresql_secret_disabled = DagsterHelmValues.construct( generatePostgresqlPasswordSecret=False) template.render(helm_values_generate_postgresql_secret_disabled) _, err = capsys.readouterr() assert "Error: could not find template" in err
def test_telemetry(template: HelmTemplate, enabled: bool): helm_values = DagsterHelmValues.construct(telemetry=Telemetry.construct( enabled=enabled)) configmaps = template.render(helm_values) instance = yaml.full_load(configmaps[0].data["dagster.yaml"]) telemetry_config = instance.get("telemetry") assert telemetry_config["enabled"] == enabled
def helm_values_multi_user_deployment() -> DagsterHelmValues: return DagsterHelmValues.construct(dagsterUserDeployments=UserDeployments( enabled=True, enableSubchart=True, deployments=[ create_simple_user_deployment("simple-deployment-one"), create_simple_user_deployment("simple-deployment-two"), ], ))
def test_deployments_do_not_render( user_deployments: UserDeployments, template: HelmTemplate, capsys ): with pytest.raises(subprocess.CalledProcessError): values = DagsterHelmValues.construct(dagsterUserDeployments=user_deployments) template.render(values) _, err = capsys.readouterr() assert "Error: could not find template" in err
def test_celery_secret_renders(template: HelmTemplate): helm_values_generate_celery_secret_enabled = DagsterHelmValues.construct( generateCeleryConfigSecret=True, runLauncher=RunLauncher.construct(type=RunLauncherType.CELERY), ) secrets = template.render(helm_values_generate_celery_secret_enabled) assert len(secrets) == 1
def test_dagit_db_statement_timeout(deployment_template: HelmTemplate): db_statement_timeout_ms = 9000 helm_values = DagsterHelmValues.construct(dagit=Dagit.construct( dbStatementTimeout=db_statement_timeout_ms)) dagit_deployments = deployment_template.render(helm_values) command = " ".join( dagit_deployments[0].spec.template.spec.containers[0].command) assert f"--db-statement-timeout {db_statement_timeout_ms}" in command
def test_celery_backend_override_only_one(template: HelmTemplate): custom_url = "host:6380,password=password,ssl=True" default_url = "redis://myhost:6379/0" # Override broker, not backend helm_values = DagsterHelmValues.construct( generateCeleryConfigSecret=True, runLauncher=RunLauncher.construct(type=RunLauncherType.CELERY), redis=Redis.construct( enabled=True, brokerUrl=custom_url, host="myhost", ), ) [secret] = template.render(helm_values) assert secret.data["DAGSTER_CELERY_BROKER_URL"] == base64.b64encode( bytes(custom_url, encoding="utf-8") ).decode("utf-8") assert secret.data["DAGSTER_CELERY_BACKEND_URL"] == base64.b64encode( bytes(default_url, encoding="utf-8") ).decode("utf-8") # Override backend, not broker helm_values = DagsterHelmValues.construct( generateCeleryConfigSecret=True, runLauncher=RunLauncher.construct(type=RunLauncherType.CELERY), redis=Redis.construct( enabled=True, backendUrl=custom_url, host="myhost", ), ) [secret] = template.render(helm_values) assert secret.data["DAGSTER_CELERY_BROKER_URL"] == base64.b64encode( bytes(default_url, encoding="utf-8") ).decode("utf-8") assert secret.data["DAGSTER_CELERY_BACKEND_URL"] == base64.b64encode( bytes(custom_url, encoding="utf-8") ).decode("utf-8")
def test_run_monitoring(instance_template: HelmTemplate, ): # pylint: disable=redefined-outer-name helm_values = DagsterHelmValues.construct(dagsterDaemon=Daemon.construct( runMonitoring={"enabled": True})) configmaps = instance_template.render(helm_values) assert len(configmaps) == 1 instance = yaml.full_load(configmaps[0].data["dagster.yaml"]) assert instance["run_monitoring"]["enabled"] == True
def test_service_account_does_not_render(template: HelmTemplate, capsys): with pytest.raises(subprocess.CalledProcessError): service_account_values = DagsterHelmValues.construct( serviceAccount=ServiceAccount.construct( name="service-account-name", create=False), ) template.render(service_account_values) _, err = capsys.readouterr() assert "Error: could not find template" in err
def test_chart_does_not_render(user_deployments: UserDeployments, full_template: HelmTemplate, capsys): with pytest.raises(subprocess.CalledProcessError): values = DagsterHelmValues.construct( dagsterUserDeployments=user_deployments) full_template.render(values) _, err = capsys.readouterr() assert ( "dagster-user-deployments subchart cannot be enabled if workspace.yaml is not created." in err)
def test_custom_python_logs_missing_config(template: HelmTemplate): helm_values = DagsterHelmValues.construct(pythonLogs=PythonLogs.construct( pythonLogLevel="INFO")) configmaps = template.render(helm_values) instance = yaml.full_load(configmaps[0].data["dagster.yaml"]) python_logs_config = instance["python_logs"] assert python_logs_config["python_log_level"] == "INFO" assert "managed_python_loggers" not in python_logs_config assert "dagster_handler_config" not in python_logs_config