Beispiel #1
0
def test_user_deployment_checksum_changes(template: HelmTemplate):
    pre_upgrade_helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments(
            enabled=True,
            enableSubchart=True,
            deployments=[
                create_simple_user_deployment("deployment-one"),
                create_simple_user_deployment("deployment-two"),
            ],
        ))
    post_upgrade_helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments(
            enabled=True,
            enableSubchart=True,
            deployments=[
                create_complex_user_deployment("deployment-one"),
                create_complex_user_deployment("deployment-two"),
            ],
        ))

    pre_upgrade_templates = template.render(pre_upgrade_helm_values)
    post_upgrade_templates = template.render(post_upgrade_helm_values)

    # User deployment templates with the same Helm values should not redeploy in a Helm upgrade
    for pre_upgrade_user_deployment, post_upgrade_user_deployment in zip(
            pre_upgrade_templates, post_upgrade_templates):
        pre_upgrade_checksum = pre_upgrade_user_deployment.spec.template.metadata.annotations[
            "checksum/dagster-user-deployment"]
        post_upgrade_checksum = post_upgrade_user_deployment.spec.template.metadata.annotations[
            "checksum/dagster-user-deployment"]

        assert pre_upgrade_checksum != post_upgrade_checksum
Beispiel #2
0
def test_workspace_renders_from_helm_user_deployments(template: HelmTemplate):
    deployments = [
        create_simple_user_deployment("deployment-one"),
        create_simple_user_deployment("deployment-two"),
    ]
    helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments(
            enabled=True,
            enableSubchart=True,
            deployments=deployments,
        ))

    workspace_templates = template.render(helm_values)

    assert len(workspace_templates) == 1

    workspace_template = workspace_templates[0]

    workspace = yaml.full_load(workspace_template.data["workspace.yaml"])
    grpc_servers = workspace["load_from"]

    assert len(grpc_servers) == len(deployments)

    for grpc_server, deployment in zip(grpc_servers, deployments):
        assert grpc_server["grpc_server"]["host"] == deployment.name
        assert grpc_server["grpc_server"]["port"] == deployment.port
        assert grpc_server["grpc_server"]["location_name"] == deployment.name
Beispiel #3
0
def test_workspace_renders_from_helm_dagit(template: HelmTemplate):
    servers = [
        Server(host="another-deployment-one", port=4000),
        Server(host="another-deployment-two", port=4001),
        Server(host="another-deployment-three", port=4002),
    ]
    helm_values = DagsterHelmValues.construct(
        dagit=Dagit.construct(
            workspace=Workspace(enabled=True, servers=servers)),
        dagsterUserDeployments=UserDeployments(
            enabled=True,
            enableSubchart=True,
            deployments=[
                create_simple_user_deployment("deployment-one"),
                create_simple_user_deployment("deployment-two"),
            ],
        ),
    )

    workspace_templates = template.render(helm_values)

    assert len(workspace_templates) == 1

    workspace_template = workspace_templates[0]

    workspace = yaml.full_load(workspace_template.data["workspace.yaml"])
    grpc_servers = workspace["load_from"]

    assert len(grpc_servers) == len(servers)

    for grpc_server, server in zip(grpc_servers, servers):
        assert grpc_server["grpc_server"]["host"] == server.host
        assert grpc_server["grpc_server"]["port"] == server.port
        assert grpc_server["grpc_server"]["location_name"] == server.host
Beispiel #4
0
def helm_values_single_complex_user_deployment() -> DagsterHelmValues:
    return DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments(
            enabled=True,
            enableSubchart=True,
            deployments=[create_complex_user_deployment("complex-deployment-one")],
        )
    )
def helm_values_multi_user_deployment() -> DagsterHelmValues:
    return DagsterHelmValues.construct(dagsterUserDeployments=UserDeployments(
        enabled=True,
        enableSubchart=True,
        deployments=[
            create_simple_user_deployment("simple-deployment-one"),
            create_simple_user_deployment("simple-deployment-two"),
        ],
    ))
Beispiel #6
0
def test_workspace_does_not_render(template: HelmTemplate, capsys):
    helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments(
            enabled=False,
            enableSubchart=False,
            deployments=[create_simple_user_deployment("deployment-one")],
        ))

    with pytest.raises(subprocess.CalledProcessError):
        template.render(helm_values)

        _, err = capsys.readouterr()
        assert "Error: could not find template" in err in err
Beispiel #7
0
def test_user_deployment_secrets_and_configmaps(
    template: HelmTemplate, include_config_in_launched_runs: bool
):
    name = "foo"

    secrets = [{"name": "my-secret"}, {"name": "my-other-secret"}]

    configmaps = [{"name": "my-configmap"}, {"name": "my-other-configmap"}]

    deployment = UserDeployment(
        name=name,
        image=kubernetes.Image(repository=f"repo/{name}", tag="tag1", pullPolicy="Always"),
        dagsterApiGrpcArgs=["-m", name],
        port=3030,
        envConfigMaps=[
            kubernetes.ConfigMapEnvSource.construct(None, **configmap) for configmap in configmaps
        ],
        envSecrets=[kubernetes.SecretEnvSource.construct(None, **secret) for secret in secrets],
        includeConfigInLaunchedRuns=UserDeploymentIncludeConfigInLaunchedRuns(
            enabled=include_config_in_launched_runs
        ),
    )

    helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments(
            enabled=True,
            enableSubchart=True,
            deployments=[deployment],
        )
    )

    user_deployments = template.render(helm_values)

    assert len(user_deployments) == 1

    if include_config_in_launched_runs:
        container_context = user_deployments[0].spec.template.spec.containers[0].env[2]
        assert container_context.name == "DAGSTER_CLI_API_GRPC_CONTAINER_CONTEXT"
        assert json.loads(container_context.value) == {
            "k8s": {
                "image_pull_policy": "Always",
                "env_secrets": ["my-secret", "my-other-secret"],
                "env_config_maps": [
                    "release-name-dagster-user-deployments-foo-user-env",
                    "my-configmap",
                    "my-other-configmap",
                ],
            }
        }
    else:
        _assert_no_container_context(user_deployments[0])
Beispiel #8
0
def test_workspace_renders_fail(template: HelmTemplate, capsys):
    helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments(
            enabled=False,
            enableSubchart=True,
            deployments=[],
        ))

    with pytest.raises(subprocess.CalledProcessError):
        template.render(helm_values)

        _, err = capsys.readouterr()
        assert (
            "dagster-user-deployments subchart cannot be enabled if workspace.yaml is not created."
            in err)
def test_startup_probe_enabled(template: HelmTemplate, enabled: bool):
    deployment = create_simple_user_deployment("foo")
    deployment.startupProbe = kubernetes.StartupProbe.construct(enabled=enabled)
    helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments.construct(deployments=[deployment])
    )

    dagster_user_deployment = template.render(helm_values)
    assert len(dagster_user_deployment) == 1
    dagster_user_deployment = dagster_user_deployment[0]

    assert len(dagster_user_deployment.spec.template.spec.containers) == 1
    container = dagster_user_deployment.spec.template.spec.containers[0]

    assert (container.startup_probe is not None) == enabled
Beispiel #10
0
def test_readiness_probes(template: HelmTemplate):
    deployment = create_simple_user_deployment("foo")
    deployment.readinessProbe = kubernetes.ReadinessProbe.construct(timeout_seconds=3)
    helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments.construct(deployments=[deployment])
    )

    dagster_user_deployment = template.render(helm_values)
    assert len(dagster_user_deployment) == 1
    dagster_user_deployment = dagster_user_deployment[0]

    assert len(dagster_user_deployment.spec.template.spec.containers) == 1
    container = dagster_user_deployment.spec.template.spec.containers[0]

    assert container.startup_probe is None
    assert container.startup_probe is None
    assert container.readiness_probe is not None
Beispiel #11
0
def test_chart_does_not_render(full_template: HelmTemplate, capsys):
    helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments(
            enabled=False,
            enableSubchart=True,
            deployments=[
                create_simple_user_deployment("simple-deployment-one")
            ],
        ))

    with pytest.raises(subprocess.CalledProcessError):
        full_template.render(helm_values)

        _, err = capsys.readouterr()
        assert (
            "dagster-user-deployments subchart cannot be enabled if workspace.yaml is not created."
            in err)
Beispiel #12
0
def test_user_deployment_image(template: HelmTemplate):
    deployment = create_simple_user_deployment("foo")
    helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments(
            enabled=True,
            enableSubchart=True,
            deployments=[deployment],
        ))

    user_deployments = template.render(helm_values)

    assert len(user_deployments) == 1

    image = user_deployments[0].spec.template.spec.containers[0].image
    image_name, image_tag = image.split(":")

    assert image_name == deployment.image.repository
    assert image_tag == deployment.image.tag
Beispiel #13
0
def test_startup_probe_exec(template: HelmTemplate):
    deployment = create_simple_user_deployment("foo")
    deployment.startupProbe = kubernetes.StartupProbe.construct(
        enabled=True, exec=dict(command=["my", "command"]))
    helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments.construct(
            deployments=[deployment]))

    dagster_user_deployment = template.render(helm_values)
    assert len(dagster_user_deployment) == 1
    dagster_user_deployment = dagster_user_deployment[0]

    assert len(dagster_user_deployment.spec.template.spec.containers) == 1
    container = dagster_user_deployment.spec.template.spec.containers[0]

    assert container.startup_probe._exec.command == [  # pylint:disable=protected-access
        "my",
        "command",
    ]
Beispiel #14
0
def test_user_deployment_labels(template: HelmTemplate, include_config_in_launched_runs: bool):
    name = "foo"

    labels = {"my-label-key": "my-label-val", "my-other-label-key": "my-other-label-val"}

    deployment = UserDeployment(
        name=name,
        image=kubernetes.Image(repository=f"repo/{name}", tag="tag1", pullPolicy="Always"),
        dagsterApiGrpcArgs=["-m", name],
        port=3030,
        labels=labels,
        includeConfigInLaunchedRuns=UserDeploymentIncludeConfigInLaunchedRuns(
            enabled=include_config_in_launched_runs
        ),
    )

    helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments(
            enabled=True,
            enableSubchart=True,
            deployments=[deployment],
        )
    )

    user_deployments = template.render(helm_values)

    assert len(user_deployments) == 1

    if include_config_in_launched_runs:
        container_context = user_deployments[0].spec.template.spec.containers[0].env[2]
        assert container_context.name == "DAGSTER_CLI_API_GRPC_CONTAINER_CONTEXT"
        assert json.loads(container_context.value) == {
            "k8s": {
                "image_pull_policy": "Always",
                "env_config_maps": [
                    "release-name-dagster-user-deployments-foo-user-env",
                ],
                "labels": labels,
            }
        }
    else:
        _assert_no_container_context(user_deployments[0])
Beispiel #15
0
def test_startup_probe_default_exec(template: HelmTemplate):
    deployment = create_simple_user_deployment("foo")
    deployment.startupProbe = kubernetes.StartupProbe.construct(enabled=True)
    helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments.construct(
            deployments=[deployment]))

    dagster_user_deployment = template.render(helm_values)
    assert len(dagster_user_deployment) == 1
    dagster_user_deployment = dagster_user_deployment[0]

    assert len(dagster_user_deployment.spec.template.spec.containers) == 1
    container = dagster_user_deployment.spec.template.spec.containers[0]

    assert container.startup_probe._exec.command == [  # pylint: disable=protected-access
        "dagster",
        "api",
        "grpc-health-check",
        "-p",
        str(deployment.port),
    ]
Beispiel #16
0
def test_user_deployment_include_config(template: HelmTemplate):
    deployment = create_simple_user_deployment("foo", include_config_in_launched_runs=True)
    helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments(
            enabled=True,
            enableSubchart=True,
            deployments=[deployment],
        )
    )

    user_deployments = template.render(helm_values)

    assert len(user_deployments) == 1

    container_context = user_deployments[0].spec.template.spec.containers[0].env[2]
    assert container_context.name == "DAGSTER_CLI_API_GRPC_CONTAINER_CONTEXT"
    assert json.loads(container_context.value) == {
        "k8s": {
            "image_pull_policy": "Always",
            "env_config_maps": ["release-name-dagster-user-deployments-foo-user-env"],
        }
    }
Beispiel #17
0
def test_workspace_renders_empty(template: HelmTemplate):
    servers: List[Server] = []
    helm_values = DagsterHelmValues.construct(
        dagit=Dagit.construct(
            workspace=Workspace(enabled=True, servers=servers)),
        dagsterUserDeployments=UserDeployments(
            enabled=True,
            enableSubchart=True,
            deployments=[],
        ),
    )

    workspace_templates = template.render(helm_values)

    assert len(workspace_templates) == 1

    workspace_template = workspace_templates[0]

    workspace = yaml.full_load(workspace_template.data["workspace.yaml"])
    grpc_servers = workspace["load_from"]

    assert len(grpc_servers) == len(servers)
def test_daemon_command_without_user_deployments(template: HelmTemplate):
    repository = "repository"
    tag = "tag"
    helm_values = DagsterHelmValues.construct(
        dagsterDaemon=Daemon.construct(
            image=kubernetes.Image.construct(repository=repository, tag=tag)),
        dagsterUserDeployments=UserDeployments(
            enabled=False,
            enableSubchart=False,
            deployments=[],
        ),
    )
    daemon_deployments = template.render(helm_values)

    assert len(daemon_deployments) == 1

    command = daemon_deployments[0].spec.template.spec.containers[0].command
    assert command == [
        "/bin/bash",
        "-c",
        "dagster-daemon run",
    ]
def test_daemon_command_with_user_deployments(template: HelmTemplate):
    repository = "repository"
    tag = "tag"
    helm_values = DagsterHelmValues.construct(
        dagsterDaemon=Daemon.construct(
            image=kubernetes.Image.construct(repository=repository, tag=tag)),
        dagsterUserDeployments=UserDeployments(
            enabled=True,
            enableSubchart=True,
            deployments=[
                create_simple_user_deployment("simple-deployment-one")
            ],
        ),
    )
    daemon_deployments = template.render(helm_values)

    assert len(daemon_deployments) == 1

    command = daemon_deployments[0].spec.template.spec.containers[0].command
    assert command == [
        "/bin/bash",
        "-c",
        "dagster-daemon run -w /dagster-workspace/workspace.yaml",
    ]
Beispiel #20
0
def test_user_deployment_volumes(template: HelmTemplate):

    name = "foo"

    volumes = [
        {
            "name": "test-volume",
            "configMap": {
                "name": "test-volume-configmap"
            }
        },
        {
            "name": "test-pvc",
            "persistentVolumeClaim": {
                "claimName": "my_claim",
                "readOnly": False
            }
        },
    ]

    volume_mounts = [{
        "name": "test-volume",
        "mountPath": "/opt/dagster/test_mount_path/volume_mounted_file.yaml",
        "subPath": "volume_mounted_file.yaml",
    }]

    deployment = UserDeployment(
        name=name,
        image=kubernetes.Image(repository=f"repo/{name}",
                               tag="tag1",
                               pullPolicy="Always"),
        dagsterApiGrpcArgs=["-m", name],
        port=3030,
        volumes=[
            kubernetes.Volume.construct(None, **volume) for volume in volumes
        ],
        volumeMounts=[
            kubernetes.VolumeMount.construct(None, **volume_mount)
            for volume_mount in volume_mounts
        ],
    )

    helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments(
            enabled=True,
            enableSubchart=True,
            deployments=[deployment],
        ))

    user_deployments = template.render(helm_values)

    assert len(user_deployments) == 1

    image = user_deployments[0].spec.template.spec.containers[0].image
    image_name, image_tag = image.split(":")

    deployed_volume_mounts = user_deployments[0].spec.template.spec.containers[
        0].volume_mounts
    assert deployed_volume_mounts == [
        k8s_model_from_dict(k8s_client.models.V1VolumeMount, volume_mount)
        for volume_mount in volume_mounts
    ]

    deployed_volumes = user_deployments[0].spec.template.spec.volumes
    assert deployed_volumes == [
        k8s_model_from_dict(k8s_client.models.V1Volume, volume)
        for volume in volumes
    ]

    assert image_name == deployment.image.repository
    assert image_tag == deployment.image.tag
Beispiel #21
0
        # Assert resources
        if deployment_values.resources:
            template_resources = t.api_client.sanitize_for_serialization(
                template.spec.template.spec.containers[0].resources
            )
            resource_values = json.loads(deployment_values.resources.json(exclude_none=True))

            assert template_resources == resource_values


@pytest.mark.parametrize(
    "user_deployments",
    [
        UserDeployments(
            enabled=False,
            enableSubchart=False,
            deployments=[create_simple_user_deployment("simple-deployment-one")],
        ),
        UserDeployments(
            enabled=False,
            enableSubchart=True,
            deployments=[create_simple_user_deployment("simple-deployment-one")],
        ),
        UserDeployments(
            enabled=True,
            enableSubchart=False,
            deployments=[create_simple_user_deployment("simple-deployment-one")],
        ),
    ],
    ids=[
        "user deployments disabled, subchart disabled",
Beispiel #22
0
def test_user_deployment_volumes(template: HelmTemplate, include_config_in_launched_runs: bool):
    name = "foo"

    volumes = [
        {"name": "test-volume", "configMap": {"name": "test-volume-configmap"}},
        {"name": "test-pvc", "persistentVolumeClaim": {"claimName": "my_claim", "readOnly": False}},
    ]

    volume_mounts = [
        {
            "name": "test-volume",
            "mountPath": "/opt/dagster/test_mount_path/volume_mounted_file.yaml",
            "subPath": "volume_mounted_file.yaml",
        }
    ]

    deployment = UserDeployment(
        name=name,
        image=kubernetes.Image(repository=f"repo/{name}", tag="tag1", pullPolicy="Always"),
        dagsterApiGrpcArgs=["-m", name],
        port=3030,
        volumes=[kubernetes.Volume.construct(None, **volume) for volume in volumes],
        volumeMounts=[
            kubernetes.VolumeMount.construct(None, **volume_mount) for volume_mount in volume_mounts
        ],
        includeConfigInLaunchedRuns=UserDeploymentIncludeConfigInLaunchedRuns(
            enabled=include_config_in_launched_runs
        ),
    )

    helm_values = DagsterHelmValues.construct(
        dagsterUserDeployments=UserDeployments(
            enabled=True,
            enableSubchart=True,
            deployments=[deployment],
        )
    )

    user_deployments = template.render(helm_values)

    assert len(user_deployments) == 1

    image = user_deployments[0].spec.template.spec.containers[0].image
    image_name, image_tag = image.split(":")

    deployed_volume_mounts = user_deployments[0].spec.template.spec.containers[0].volume_mounts
    assert deployed_volume_mounts == [
        k8s_model_from_dict(
            k8s_client.models.V1VolumeMount,
            k8s_snake_case_dict(k8s_client.models.V1VolumeMount, volume_mount),
        )
        for volume_mount in volume_mounts
    ]

    deployed_volumes = user_deployments[0].spec.template.spec.volumes
    assert deployed_volumes == [
        k8s_model_from_dict(
            k8s_client.models.V1Volume, k8s_snake_case_dict(k8s_client.models.V1Volume, volume)
        )
        for volume in volumes
    ]

    assert image_name == deployment.image.repository
    assert image_tag == deployment.image.tag

    if include_config_in_launched_runs:
        container_context = user_deployments[0].spec.template.spec.containers[0].env[2]
        assert container_context.name == "DAGSTER_CLI_API_GRPC_CONTAINER_CONTEXT"
        assert json.loads(container_context.value) == {
            "k8s": {
                "env_config_maps": [
                    "release-name-dagster-user-deployments-foo-user-env",
                ],
                "image_pull_policy": "Always",
                "volume_mounts": volume_mounts,
                "volumes": volumes,
            }
        }
    else:
        _assert_no_container_context(user_deployments[0])
Beispiel #23
0
        if deployment_values.resources:
            template_resources = t.api_client.sanitize_for_serialization(
                template.spec.template.spec.containers[0].resources)
            resource_values = json.loads(
                deployment_values.resources.json(exclude_none=True))

            assert template_resources == resource_values


@pytest.mark.parametrize(
    "helm_values",
    [
        DagsterHelmValues.construct(dagsterUserDeployments=UserDeployments(
            enabled=False,
            enableSubchart=False,
            deployments=[
                create_simple_user_deployment("simple-deployment-one")
            ],
        )),
        DagsterHelmValues.construct(dagsterUserDeployments=UserDeployments(
            enabled=False,
            enableSubchart=True,
            deployments=[
                create_simple_user_deployment("simple-deployment-one")
            ],
        )),
        DagsterHelmValues.construct(dagsterUserDeployments=UserDeployments(
            enabled=True,
            enableSubchart=False,
            deployments=[
                create_simple_user_deployment("simple-deployment-one")