Exemplo n.º 1
0
def test_create_namespaced_job_fails_outside_cluster(job_spec_file):
    environment = KubernetesJobEnvironment(job_spec_file=job_spec_file)
    storage = Docker(registry_url="test1",
                     image_name="test2",
                     image_tag="test3")

    with pytest.raises(EnvironmentError):
        with set_temporary_config({"cloud.auth_token": "test"}):
            environment.execute(Flow("test", storage=storage))
def test_populate_job_yaml_no_defaults():
    with tempfile.TemporaryDirectory() as directory:

        with open(os.path.join(directory, "job.yaml"), "w+") as file:
            file.write("job")

        environment = KubernetesJobEnvironment(
            job_spec_file=os.path.join(directory, "job.yaml"))

        file_path = os.path.dirname(
            prefect.environments.execution.dask.k8s.__file__)

        with open(path.join(file_path, "job.yaml")) as job_file:
            job = yaml.safe_load(job_file)
            job["spec"]["template"]["spec"]["containers"][0] = {}
            del job["metadata"]
            del job["spec"]["template"]["metadata"]

        with set_temporary_config({
                "cloud.graphql": "gql_test",
                "cloud.auth_token": "auth_test"
        }):
            with prefect.context(flow_run_id="id_test",
                                 namespace="namespace_test"):
                yaml_obj = environment._populate_job_spec_yaml(
                    yaml_obj=job,
                    docker_name="test1/test2:test3",
                    flow_file_path="test4",
                )

        assert (yaml_obj["metadata"]["labels"]["identifier"] ==
                environment.identifier_label)
        assert yaml_obj["metadata"]["labels"]["flow_run_id"] == "id_test"
        assert (yaml_obj["spec"]["template"]["metadata"]["labels"]
                ["identifier"] == environment.identifier_label)

        env = yaml_obj["spec"]["template"]["spec"]["containers"][0]["env"]

        assert env[0]["value"] == "gql_test"
        assert env[1]["value"] == "auth_test"
        assert env[2]["value"] == "id_test"
        assert env[3]["value"] == "namespace_test"
        assert env[4]["value"] == "test1/test2:test3"
        assert env[5]["value"] == "test4"
        assert env[10]["value"] == "[]"

        assert (yaml_obj["spec"]["template"]["spec"]["containers"][0]["image"]
                == "test1/test2:test3")

        assert yaml_obj["spec"]["template"]["spec"]["containers"][0][
            "command"] == [
                "/bin/sh",
                "-c",
            ]
        assert yaml_obj["spec"]["template"]["spec"]["containers"][0]["args"] == [
            "python -c 'import prefect; prefect.Flow.load(prefect.context.flow_file_path).environment.run_flow()'"
        ]
Exemplo n.º 3
0
def test_create_k8s_job_environment_identifier_label():
    with tempfile.TemporaryDirectory() as directory:

        with open(os.path.join(directory, "job.yaml"), "w+") as file:
            file.write("job")

        environment = KubernetesJobEnvironment(
            job_spec_file=os.path.join(directory, "job.yaml"))
        assert environment.identifier_label
Exemplo n.º 4
0
def test_initialize_environment_with_spec_populates(monkeypatch, job_spec_file,
                                                    initial_job_spec,
                                                    default_command_args):
    environment = KubernetesJobEnvironment(job_spec_file=job_spec_file)
    assert environment._job_spec == initial_job_spec
    assert environment._job_spec["spec"]["template"]["spec"]["containers"][0][
        "command"] == ["/bin/sh", "-c"]
    assert (environment._job_spec["spec"]["template"]["spec"]["containers"][0]
            ["args"] == default_command_args)
Exemplo n.º 5
0
def test_create_k8s_job_environment(job_spec_file):
    environment = KubernetesJobEnvironment(job_spec_file=job_spec_file)
    assert environment.job_spec_file == job_spec_file
    assert environment.unique_job_name is False
    assert environment.executor is not None
    assert environment.labels == set()
    assert environment.on_start is None
    assert environment.on_exit is None
    assert environment.metadata == {}
    assert environment.logger.name == "prefect.KubernetesJobEnvironment"
Exemplo n.º 6
0
def test_create_k8s_job_callbacks(job_spec_file):
    def f():
        pass

    environment = KubernetesJobEnvironment(
        job_spec_file=job_spec_file, labels=["foo"], on_start=f, on_exit=f,
    )
    assert environment.labels == set(["foo"])
    assert environment.on_start is f
    assert environment.on_exit is f
def test_setup_k8s_job_environment_passes():
    with tempfile.TemporaryDirectory() as directory:

        with open(os.path.join(directory, "job.yaml"), "w+") as file:
            file.write("job")

        environment = KubernetesJobEnvironment(
            job_spec_file=os.path.join(directory, "job.yaml"))
        environment.setup(storage=Docker())
        assert environment
def test_execute_storage_missing_fields():
    with tempfile.TemporaryDirectory() as directory:

        with open(os.path.join(directory, "job.yaml"), "w+") as file:
            file.write("job")

        environment = KubernetesJobEnvironment(
            job_spec_file=os.path.join(directory, "job.yaml"))
        with pytest.raises(ValueError):
            environment.execute(storage=Docker(), flow_location="")
def test_execute_improper_storage():
    with tempfile.TemporaryDirectory() as directory:

        with open(os.path.join(directory, "job.yaml"), "w+") as file:
            file.write("job")

        environment = KubernetesJobEnvironment(
            job_spec_file=os.path.join(directory, "job.yaml"))
        with pytest.raises(TypeError):
            environment.execute(storage=Local(), flow_location="")
Exemplo n.º 10
0
def test_populate_job_yaml(job_spec_file, job, default_command_args):
    environment = KubernetesJobEnvironment(
        job_spec_file=job_spec_file, unique_job_name=True
    )

    job["spec"]["template"]["spec"]["containers"][0]["env"] = []
    environment._job_spec = job

    with set_temporary_config(
        {
            "cloud.graphql": "gql_test",
            "cloud.auth_token": "auth_test",
            "logging.extra_loggers": "['test_logger']",
        }
    ):
        with prefect.context(flow_run_id="id_test", namespace="namespace_test"):
            yaml_obj = environment._populate_run_time_job_spec_details(
                docker_name="test1/test2:test3",
            )

    assert "prefect-dask-job-" in yaml_obj["metadata"]["name"]
    assert len(yaml_obj["metadata"]["name"]) == 25

    assert (
        yaml_obj["metadata"]["labels"]["prefect.io/identifier"]
        == environment.identifier_label
    )
    assert yaml_obj["metadata"]["labels"]["prefect.io/flow_run_id"] == "id_test"
    assert (
        yaml_obj["spec"]["template"]["metadata"]["labels"]["prefect.io/identifier"]
        == environment.identifier_label
    )

    env = yaml_obj["spec"]["template"]["spec"]["containers"][0]["env"]

    assert env[0]["value"] == "gql_test"
    assert env[1]["value"] == "auth_test"
    assert env[2]["value"] == "id_test"
    assert env[3]["value"] == "namespace_test"
    assert env[4]["value"] == "test1/test2:test3"
    assert env[9]["value"] == "['test_logger']"

    assert (
        yaml_obj["spec"]["template"]["spec"]["containers"][0]["image"]
        == "test1/test2:test3"
    )

    assert yaml_obj["spec"]["template"]["spec"]["containers"][0]["command"] == [
        "/bin/sh",
        "-c",
    ]
    assert (
        yaml_obj["spec"]["template"]["spec"]["containers"][0]["args"]
        == default_command_args
    )
Exemplo n.º 11
0
def test_initialize_environment_with_spec_populates(monkeypatch):

    with tempfile.TemporaryDirectory() as directory:

        with open(os.path.join(directory, "job.yaml"), "w+") as file:
            file.write("job")

        environment = KubernetesJobEnvironment(
            job_spec_file=os.path.join(directory, "job.yaml"))

        assert environment._job_spec == "job"
Exemplo n.º 12
0
def test_hash_flow():

    flow = TEST_FLOW.copy()

    integration = prefect_saturn.PrefectCloudIntegration(
        prefect_cloud_project_name=TEST_PREFECT_PROJECT_NAME)

    with patch("prefect_saturn.core.Client", new=MockClient):
        flow_hash = integration._hash_flow(flow)
        assert isinstance(flow_hash, str) and len(flow_hash) > 0

        # should be deterministic
        flow_hash_again = integration._hash_flow(flow)
        assert flow_hash == flow_hash_again

        # should not be impacted by storage
        flow.storage = Webhook(
            build_request_kwargs={},
            build_request_http_method="POST",
            get_flow_request_kwargs={},
            get_flow_request_http_method="GET",
        )
        assert flow_hash == integration._hash_flow(flow)

        # should not be impacted by environment or run_config
        if RUN_CONFIG_AVAILABLE:
            flow.run_config = KubernetesRun()
        elif KUBE_JOB_ENV_AVAILABLE:
            flow.environment = KubernetesJobEnvironment()
        assert flow_hash == integration._hash_flow(flow)

        # should not change if you add a new task
        @task
        def goodbye_task():
            logger = prefect.context.get("logger")
            logger.info("adios")

        flow.tasks = [hello_task, goodbye_task]
        new_flow_hash = integration._hash_flow(flow)

        assert isinstance(new_flow_hash, str) and len(new_flow_hash) > 0
        assert new_flow_hash == flow_hash

        # should change if flow name changes
        flow.name = str(uuid.uuid4())
        new_flow_hash = integration._hash_flow(flow)
        assert new_flow_hash != flow_hash

        # should change if project name changes
        previous_flow_hash = new_flow_hash
        integration.prefect_cloud_project_name = str(uuid.uuid4())
        new_flow_hash = integration._hash_flow(flow)
        assert isinstance(new_flow_hash, str) and len(new_flow_hash) > 0
        assert new_flow_hash != previous_flow_hash
Exemplo n.º 13
0
def test_create_k8s_job_environment_with_deprecated_executor_kwargs(job_spec_file):
    with set_temporary_config(
        {"engine.executor.default_class": "prefect.engine.executors.LocalDaskExecutor"}
    ):
        with pytest.warns(UserWarning, match="executor_kwargs"):
            environment = KubernetesJobEnvironment(
                job_spec_file=job_spec_file,
                executor_kwargs={"scheduler": "synchronous"},
            )
        assert isinstance(environment.executor, LocalDaskExecutor)
        assert environment.executor.scheduler == "synchronous"
Exemplo n.º 14
0
def test_create_k8s_job_environment():
    with tempfile.TemporaryDirectory() as directory:

        with open(os.path.join(directory, "job.yaml"), "w+") as file:
            file.write("job")

        environment = KubernetesJobEnvironment(
            job_spec_file=os.path.join(directory, "job.yaml"))
        assert environment
        assert environment.job_spec_file == os.path.join(directory, "job.yaml")
        assert environment.labels == set()
        assert environment.logger.name == "prefect.KubernetesJobEnvironment"
Exemplo n.º 15
0
def test_create_k8s_job_environment_with_executor_kwargs():
    with tempfile.TemporaryDirectory() as directory:

        with open(os.path.join(directory, "job.yaml"), "w+") as file:
            file.write("job")

        environment = KubernetesJobEnvironment(
            job_spec_file=os.path.join(directory, "job.yaml"),
            executor_kwargs={"test": "here"},
        )
        assert environment
        assert environment.executor_kwargs == {"test": "here"}
Exemplo n.º 16
0
def test_create_flow_run_job_fails_outside_cluster():
    with tempfile.TemporaryDirectory() as directory:

        with open(os.path.join(directory, "job.yaml"), "w+") as file:
            file.write("job")

        environment = KubernetesJobEnvironment(
            job_spec_file=os.path.join(directory, "job.yaml"))

        with pytest.raises(EnvironmentError):
            with set_temporary_config({"cloud.auth_token": "test"}):
                environment.create_flow_run_job(
                    docker_name="test1/test2:test3", flow_file_path="test4")
Exemplo n.º 17
0
def test_roundtrip_cloudpickle():
    with tempfile.TemporaryDirectory() as directory:

        with open(os.path.join(directory, "job.yaml"), "w+") as file:
            file.write("job")

        environment = KubernetesJobEnvironment(
            job_spec_file=os.path.join(directory, "job.yaml"))

        assert environment._job_spec == "job"

        new = cloudpickle.loads(cloudpickle.dumps(environment))
        assert isinstance(new, KubernetesJobEnvironment)
        assert new._job_spec == "job"
Exemplo n.º 18
0
def test_roundtrip_cloudpickle(job_spec_file, initial_job_spec):
    environment = KubernetesJobEnvironment(job_spec_file=job_spec_file)

    assert environment._job_spec == initial_job_spec

    new = cloudpickle.loads(cloudpickle.dumps(environment))
    assert isinstance(new, KubernetesJobEnvironment)
    assert new._job_spec == initial_job_spec

    # Identifer labels do not persist
    assert environment.identifier_label
    assert new.identifier_label

    assert environment.identifier_label != new.identifier_label
Exemplo n.º 19
0
def test_create_namespaced_job_fails_outside_cluster():
    with tempfile.TemporaryDirectory() as directory:

        with open(os.path.join(directory, "job.yaml"), "w+") as file:
            file.write("job")

        environment = KubernetesJobEnvironment(
            job_spec_file=os.path.join(directory, "job.yaml"))
        storage = Docker(registry_url="test1",
                         image_name="test2",
                         image_tag="test3")

        with pytest.raises(EnvironmentError):
            with set_temporary_config({"cloud.auth_token": "test"}):
                environment.execute(Flow("test", storage=storage))
Exemplo n.º 20
0
def test_create_k8s_job_environment():
    with tempfile.TemporaryDirectory() as directory:

        with open(os.path.join(directory, "job.yaml"), "w+") as file:
            file.write("job")

        environment = KubernetesJobEnvironment(
            job_spec_file=os.path.join(directory, "job.yaml"))
        assert environment
        assert environment.job_spec_file == os.path.join(directory, "job.yaml")
        assert environment.unique_job_name == False
        assert environment.executor_kwargs == {}
        assert environment.labels == set()
        assert environment.on_start is None
        assert environment.on_exit is None
        assert environment.logger.name == "prefect.KubernetesJobEnvironment"
Exemplo n.º 21
0
def test_populate_job_yaml_no_defaults(job_spec_file, job):
    environment = KubernetesJobEnvironment(job_spec_file=job_spec_file)

    job["spec"]["template"]["spec"]["containers"][0] = {}
    del job["metadata"]
    del job["spec"]["template"]["metadata"]

    with set_temporary_config({
            "cloud.graphql": "gql_test",
            "cloud.auth_token": "auth_test"
    }):
        with prefect.context(flow_run_id="id_test",
                             namespace="namespace_test"):
            yaml_obj = environment._populate_job_spec_yaml(
                yaml_obj=job,
                docker_name="test1/test2:test3",
            )

    assert (yaml_obj["metadata"]["labels"]["prefect.io/identifier"] ==
            environment.identifier_label)
    assert yaml_obj["metadata"]["labels"][
        "prefect.io/flow_run_id"] == "id_test"
    assert (yaml_obj["spec"]["template"]["metadata"]["labels"]
            ["prefect.io/identifier"] == environment.identifier_label)

    env = yaml_obj["spec"]["template"]["spec"]["containers"][0]["env"]

    assert env[0]["value"] == "gql_test"
    assert env[1]["value"] == "auth_test"
    assert env[2]["value"] == "id_test"
    assert env[3]["value"] == "namespace_test"
    assert env[4]["value"] == "test1/test2:test3"
    assert env[9]["value"] == "[]"

    assert (yaml_obj["spec"]["template"]["spec"]["containers"][0]["image"] ==
            "test1/test2:test3")

    assert yaml_obj["spec"]["template"]["spec"]["containers"][0][
        "command"] == [
            "/bin/sh",
            "-c",
        ]
    assert yaml_obj["spec"]["template"]["spec"]["containers"][0]["args"] == [
        "python -c 'import prefect; prefect.environments.KubernetesJobEnvironment().run_flow()'"
    ]
Exemplo n.º 22
0
def test_create_k8s_job_callbacks():
    def f():
        pass

    with tempfile.TemporaryDirectory() as directory:

        with open(os.path.join(directory, "job.yaml"), "w+") as file:
            file.write("job")

        environment = KubernetesJobEnvironment(
            job_spec_file=os.path.join(directory, "job.yaml"),
            labels=["foo"],
            on_start=f,
            on_exit=f,
        )
        assert environment.labels == set(["foo"])
        assert environment.on_start is f
        assert environment.on_exit is f
Exemplo n.º 23
0
def test_create_flow_run_job(monkeypatch):
    file_path = os.path.dirname(
        prefect.environments.execution.dask.k8s.__file__)
    environment = KubernetesJobEnvironment(path.join(file_path, "job.yaml"))

    config = MagicMock()
    monkeypatch.setattr("kubernetes.config", config)

    batchv1 = MagicMock()
    monkeypatch.setattr("kubernetes.client",
                        MagicMock(BatchV1Api=MagicMock(return_value=batchv1)))

    with set_temporary_config({"cloud.auth_token": "test"}):
        environment.create_flow_run_job(docker_name="test1/test2:test3",
                                        flow_file_path="test4")

    assert (batchv1.create_namespaced_job.call_args[1]["body"]["apiVersion"] ==
            "batch/v1")
Exemplo n.º 24
0
def test_populate_job_yaml_no_defaults(job_spec_file, job):
    environment = KubernetesJobEnvironment(job_spec_file=job_spec_file)

    # only command and args are set on the container when the instance
    # is initialized
    job["spec"]["template"]["spec"]["containers"][0] = {
        "command": ["/bin/sh", "-c"],
        "args": default_command_args,
    }
    del job["metadata"]
    del job["spec"]["template"]["metadata"]
    environment._job_spec = job

    with set_temporary_config(
        {"cloud.graphql": "gql_test", "cloud.auth_token": "auth_test"}
    ):
        with prefect.context(flow_run_id="id_test", namespace="namespace_test"):
            yaml_obj = environment._populate_run_time_job_spec_details(
                docker_name="test1/test2:test3",
            )

    assert (
        yaml_obj["metadata"]["labels"]["prefect.io/identifier"]
        == environment.identifier_label
    )
    assert yaml_obj["metadata"]["labels"]["prefect.io/flow_run_id"] == "id_test"
    assert (
        yaml_obj["spec"]["template"]["metadata"]["labels"]["prefect.io/identifier"]
        == environment.identifier_label
    )

    env = yaml_obj["spec"]["template"]["spec"]["containers"][0]["env"]

    assert env[0]["value"] == "gql_test"
    assert env[1]["value"] == "auth_test"
    assert env[2]["value"] == "id_test"
    assert env[3]["value"] == "namespace_test"
    assert env[4]["value"] == "test1/test2:test3"
    assert env[9]["value"] == "[]"

    assert (
        yaml_obj["spec"]["template"]["spec"]["containers"][0]["image"]
        == "test1/test2:test3"
    )
Exemplo n.º 25
0
def test_run_flow_calls_callbacks(monkeypatch):
    start_func = MagicMock()
    exit_func = MagicMock()

    file_path = os.path.dirname(
        prefect.environments.execution.dask.k8s.__file__)
    environment = KubernetesJobEnvironment(path.join(file_path, "job.yaml"),
                                           on_start=start_func,
                                           on_exit=exit_func)

    flow_runner = MagicMock()
    monkeypatch.setattr(
        "prefect.engine.get_default_flow_runner_class",
        MagicMock(return_value=flow_runner),
    )

    with tempfile.TemporaryDirectory() as directory:
        d = Local(directory)
        d.add_flow(prefect.Flow("name"))

        gql_return = MagicMock(return_value=MagicMock(data=MagicMock(flow_run=[
            GraphQLResult({
                "flow":
                GraphQLResult({
                    "name": "name",
                    "storage": d.serialize(),
                })
            })
        ], )))
        client = MagicMock()
        client.return_value.graphql = gql_return
        monkeypatch.setattr("prefect.environments.execution.base.Client",
                            client)

        with set_temporary_config({"cloud.auth_token": "test"
                                   }), prefect.context({"flow_run_id": "id"}):
            environment.run_flow()

        assert flow_runner.call_args[1]["flow"].name == "name"

    assert start_func.called
    assert exit_func.called
Exemplo n.º 26
0
def test_execute(monkeypatch):
    with tempfile.TemporaryDirectory() as directory:

        with open(os.path.join(directory, "job.yaml"), "w+") as file:
            file.write("job")

        environment = KubernetesJobEnvironment(
            job_spec_file=os.path.join(directory, "job.yaml")
        )
        storage = Docker(registry_url="test1", image_name="test2", image_tag="test3")

        create_flow_run = MagicMock()
        monkeypatch.setattr(
            "prefect.environments.KubernetesJobEnvironment.create_flow_run_job",
            create_flow_run,
        )

        environment.execute(storage=storage, flow_location="")

        assert create_flow_run.call_args[1]["docker_name"] == "test1/test2:test3"
Exemplo n.º 27
0
def test_execute(monkeypatch):
    file_path = os.path.dirname(prefect.environments.execution.dask.k8s.__file__)
    environment = KubernetesJobEnvironment(os.path.join(file_path, "job.yaml"))

    config = MagicMock()
    monkeypatch.setattr("kubernetes.config", config)

    batchv1 = MagicMock()
    monkeypatch.setattr(
        "kubernetes.client", MagicMock(BatchV1Api=MagicMock(return_value=batchv1))
    )

    storage = Docker(registry_url="test1", image_name="test2", image_tag="test3")

    with set_temporary_config({"cloud.auth_token": "test"}):
        environment.execute(Flow("test", storage=storage))

    assert (
        batchv1.create_namespaced_job.call_args[1]["body"]["apiVersion"] == "batch/v1"
    )
Exemplo n.º 28
0
    def _get_environment(
        self,
        cluster_kwargs: Dict[str, Any],
        adapt_kwargs: Dict[str, Any],
    ):
        """
        Get an environment that customizes the execution of a Prefect flow run.
        """

        local_tmp_file = "/tmp/prefect-flow-run.yaml"
        with open(local_tmp_file, "w") as f:
            yaml.dump(self._flow_run_job_spec, stream=f, Dumper=yaml.RoundTripDumper)

        # saturn_flow_id is used by Saturn's custom Prefect agent
        k8s_environment = KubernetesJobEnvironment(
            metadata={"saturn_flow_id": self.flow_id, "image": self.image},
            executor=DaskExecutor(
                cluster_class="dask_saturn.SaturnCluster",
                cluster_kwargs=cluster_kwargs,
                adapt_kwargs=adapt_kwargs,
            ),
            job_spec_file=local_tmp_file,
            labels=self._saturn_flow_labels,
            unique_job_name=True,
        )

        # patch command and args to run the user's start script
        new_command = ["/bin/bash", "-ec"]
        k8s_environment._job_spec["spec"]["template"]["spec"]["containers"][0][
            "command"
        ] = new_command

        args_from_prefect = k8s_environment._job_spec["spec"]["template"]["spec"]["containers"][
            0
        ].get("args", [])
        args_from_prefect = " ".join(args_from_prefect)
        new_args = f"source /home/jovyan/.saturn/start_wrapper.sh; {args_from_prefect}"
        k8s_environment._job_spec["spec"]["template"]["spec"]["containers"][0]["args"] = [new_args]

        return k8s_environment
Exemplo n.º 29
0
def test_environment_run():
    class MyExecutor(LocalDaskExecutor):
        submit_called = False

        def submit(self, *args, **kwargs):
            self.submit_called = True
            return super().submit(*args, **kwargs)

    global_dict = {}

    @prefect.task
    def add_to_dict():
        global_dict["run"] = True

    executor = MyExecutor()
    environment = KubernetesJobEnvironment(executor=executor)
    flow = prefect.Flow("test", tasks=[add_to_dict], environment=environment)

    environment.run(flow=flow)

    assert global_dict.get("run") is True
    assert executor.submit_called
Exemplo n.º 30
0
def test_populate_job_yaml_command_and_args_not_overridden_at_run_time(job_spec_file):
    environment = KubernetesJobEnvironment(job_spec_file=job_spec_file)

    test_command = ["/bin/bash", "-acdefg"]
    test_args = "echo 'hello'; python -c 'import prefect; prefect.environments.execution.load_and_run_flow()'"
    environment._job_spec["spec"]["template"]["spec"]["containers"][0][
        "command"
    ] = test_command
    environment._job_spec["spec"]["template"]["spec"]["containers"][0][
        "args"
    ] = test_args

    with set_temporary_config(
        {"cloud.graphql": "gql_test", "cloud.auth_token": "auth_test"}
    ):
        with prefect.context(flow_run_id="id_test", namespace="namespace_test"):
            yaml_obj = environment._populate_run_time_job_spec_details(
                docker_name="test1/test2:test3",
            )

    assert (
        yaml_obj["spec"]["template"]["spec"]["containers"][0]["command"] == test_command
    )
    assert yaml_obj["spec"]["template"]["spec"]["containers"][0]["args"] == test_args