Example #1
0
    def test_docker_basic_run(self):
        repo = self.mk_repo()
        conf = ConfigLoader.load(workspace_dir=repo.working_dir)

        with WorkflowRunner(conf) as r:
            wf_data = {"steps": [{"uses": "popperized/bin/sh@master", "args": ["ls"],}]}
            r.run(WorkflowParser.parse(wf_data=wf_data))

            wf_data = {
                "steps": [
                    {
                        "uses": "docker://alpine:3.9",
                        "args": ["sh", "-c", "echo $FOO > hello.txt ; pwd"],
                        "env": {"FOO": "bar"},
                    }
                ]
            }
            r.run(WorkflowParser.parse(wf_data=wf_data))
            with open(os.path.join(repo.working_dir, "hello.txt"), "r") as f:
                self.assertEqual(f.read(), "bar\n")

            wf_data = {
                "steps": [
                    {
                        "uses": "docker://alpine:3.9",
                        "args": ["nocommandisnamedlikethis"],
                    }
                ]
            }
            self.assertRaises(SystemExit, r.run, WorkflowParser.parse(wf_data=wf_data))

        repo.close()
        shutil.rmtree(repo.working_dir, ignore_errors=True)
Example #2
0
    def test_skip_steps(self):
        wf_data = {
            "steps": [
                {
                    "uses": "foo",
                    "id": "one"
                },
                {
                    "uses": "bar",
                    "id": "two"
                },
                {
                    "uses": "baz",
                    "id": "three"
                },
            ]
        }
        # skip one step
        wf = WorkflowParser.parse(wf_data=wf_data, skipped_steps=["two"])
        self.assertEqual(2, len(wf.steps))
        self.assertEqual("one", wf.steps[0].id)
        self.assertEqual("three", wf.steps[1].id)

        # more than one
        wf = WorkflowParser.parse(wf_data=wf_data,
                                  skipped_steps=["one", "three"])
        self.assertEqual(1, len(wf.steps))
        self.assertEqual("two", wf.steps[0].id)

        # non-existing name
        self.assertRaises(SystemExit, WorkflowParser.parse, **{
            "wf_data": wf_data,
            "skipped_steps": ["four"]
        })

        # skip one step
        wf = WorkflowParser.parse(wf_data=wf_data, skipped_steps=["two"])
        self.assertEqual(2, len(wf.steps))
        self.assertEqual("one", wf.steps[0].id)
        self.assertEqual("three", wf.steps[1].id)

        # without id
        wf_data = {
            "steps": [
                {
                    "uses": "foo"
                },
                {
                    "uses": "bar"
                },
                {
                    "uses": "baz"
                },
            ]
        }
        wf = WorkflowParser.parse(wf_data=wf_data, skipped_steps=["1", "3"])
        self.assertEqual(1, len(wf.steps))
        self.assertEqual("2", wf.steps[0].id)
Example #3
0
 def test_empty_file(self):
     try:
         f = open("test.yml", "a")
         f.close()
         WorkflowParser.parse(file="test.yml")
         self.assertTrue(
             False, "Empty Workflow file does not raise an exception.")
     except SystemExit:
         self.assertTrue(True)
     else:
         self.assertTrue(
             False, "Empty Workflow file does not raise a SystemExit.")
Example #4
0
    def test_docker_basic_run(self):
        repo = self.mk_repo()
        conf = ConfigLoader.load(workspace_dir=repo.working_dir)
        test_string = "STEP_INFO:popper:Successfully tagged popperized/bin:master"

        with WorkflowRunner(conf) as r:
            wf_data = {
                "steps": [{
                    "uses": "popperized/bin/sh@master",
                    "args": ["ls"],
                }]
            }
            with self.assertLogs(log, level="STEP_INFO") as cm:
                r.run(WorkflowParser.parse(wf_data=wf_data))
            self.assertTrue(test_string in cm.output,
                            f"Got cmd output: {cm.output}")

            wf_data = {
                "steps": [{
                    "uses": "docker://*****:*****@master",
                    "args": ["ls"],
                }]
            }
            with self.assertLogs(log, level="STEP_INFO") as cm:
                r.run(WorkflowParser.parse(wf_data=wf_data))
            self.assertTrue(test_string not in cm.output)

        repo.close()
        shutil.rmtree(repo.working_dir, ignore_errors=True)
Example #5
0
    def test_exec_mpi_failure(self, mock_kill):
        config_dict = {
            "engine": {
                "name": "singularity",
                "options": {},
            },
            "resource_manager": {
                "name": "slurm",
                "options": {
                    "1": {
                        "nodes": 2,
                        "nodelist": "worker1,worker2",
                        "overcommit": True
                    }
                },
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

        self.Popen.set_command(
            "sbatch "
            "--wait --overcommit "
            f"popper_1_{config.wid}.sh",
            returncode=12,
        )

        self.Popen.set_command(f"tail -f popper_1_{config.wid}.out",
                               returncode=0)

        with WorkflowRunner(config) as r:
            wf_data = {"steps": [{"uses": "docker://alpine", "args": ["ls"]}]}
            self.assertRaises(SystemExit, r.run,
                              WorkflowParser.parse(wf_data=wf_data))
Example #6
0
    def test_check_secrets(self):
        wf_data = {
            "steps": [{
                "uses": "docker://alpine:3.9",
                "args": ["ls", "-ltr"],
                "secrets": ["SECRET_ONE", "SECRET_TWO"],
            }]
        }
        wf = WorkflowParser.parse(wf_data=wf_data)

        # in dry-run, secrets are ignored
        runner = WorkflowRunner(ConfigLoader.load(dry_run=True))
        runner._process_secrets(wf)

        # now go back to not dry-running
        runner = WorkflowRunner(ConfigLoader.load())

        # when CI=true it should fail
        os.environ["CI"] = "true"
        self.assertRaises(SystemExit, runner._process_secrets, wf)

        # add one secret
        os.environ["SECRET_ONE"] = "1234"

        # it should fail again, as we're missing one
        self.assertRaises(SystemExit, runner._process_secrets, wf)

        os.environ.pop("CI")

        # now is fine
        with patch("getpass.getpass", return_value="5678"):
            runner._process_secrets(wf)

        # pop the other
        os.environ.pop("SECRET_ONE")
Example #7
0
    def test_clone_repos(self):
        wf_data = {"steps": [{"uses": "popperized/bin/sh@master"}]}
        wf = WorkflowParser.parse(wf_data=wf_data)

        cache_dir = os.path.join(os.environ["HOME"], ".cache/popper/")

        # clone repos in the default cache directory.
        conf = ConfigLoader.load()
        runner = WorkflowRunner(conf)
        runner._clone_repos(wf)
        step_dir = os.path.join(cache_dir, conf.wid,
                                "github.com/popperized/bin")
        self.assertTrue(os.path.exists(step_dir))

        # clone repos in custom cache directory
        os.environ["POPPER_CACHE_DIR"] = "/tmp/smdir"
        conf = ConfigLoader.load()
        runner = WorkflowRunner(conf)
        runner._clone_repos(wf)
        step_dir = os.path.join("/tmp/smdir", conf.wid,
                                "github.com/popperized/bin")
        self.assertTrue(os.path.exists(step_dir))
        os.environ.pop("POPPER_CACHE_DIR")

        # check failure when container is not available and we skip cloning
        shutil.rmtree("/tmp/smdir")
        shutil.rmtree(cache_dir)
        conf = ConfigLoader.load(skip_clone=True)
        runner = WorkflowRunner(conf)
        self.assertRaises(SystemExit, runner._clone_repos, wf)
Example #8
0
def cli(ctx, file, step, entrypoint, skip_pull, skip_clone, substitution,
        conf):
    """Opens an interactive shell using all the attributes defined in the workflow file
    for the given STEP, but ignoring ``runs`` and ``args``. By default, it invokes
    /bin/bash. If you need to invoke another one, you can specify it in the --entrypoint
    flag.

    NOTE: this command only works for (local) host runner in Docker.
    """
    wf = WorkflowParser.parse(file=file,
                              step=step,
                              immutable=False,
                              substitutions=substitution)

    # override entrypoint
    step = wf.steps[0]
    step.args = []
    step.runs = entrypoint

    # configure runner so containers execute in attached mode and create a tty
    config = ConfigLoader.load(
        engine_name="docker",
        pty=True,
        skip_pull=skip_pull,
        skip_clone=skip_clone,
        config_file=conf,
    )

    with WorkflowRunner(config) as runner:
        try:
            runner.run(wf)
        except Exception as e:
            log.debug(traceback.format_exc())
            log.fail(e)
Example #9
0
    def test_new_workflow(self):
        wf_data = {}
        self.assertRaises(SystemExit, WorkflowParser.parse,
                          **{"wf_data": wf_data})

        wf_data = {"unexpected": []}
        self.assertRaises(SystemExit, WorkflowParser.parse,
                          **{"wf_data": wf_data})

        wf_data = {
            "steps": [
                {
                    "uses": "foo",
                    "id": "step",
                    "env": {
                        "EN": "EE"
                    },
                    "secrets": ["S"],
                    "dir": "/path/to/",
                    "options": {
                        "name": "spam"
                    },
                },
                {
                    "uses": "bar",
                    "runs": ["a", "b"],
                    "args": ["c"],
                    "skip_pull": True
                },
            ],
            "options": {
                "env": {
                    "FOO": "bar"
                },
                "secrets": ["Z"],
            },
        }
        wf = WorkflowParser.parse(wf_data=wf_data)

        step = wf.steps[0]
        self.assertEqual("step", step.id)
        self.assertEqual("foo", step.uses)
        self.assertEqual(("Z", "S"), step.secrets)
        self.assertEqual({"EN": "EE", "FOO": "bar"}, step.env)
        self.assertEqual("/path/to/", step.dir)
        self.assertEqual("spam", step.options.name)
        self.assertTrue(not step.runs)
        self.assertTrue(not step.args)
        self.assertFalse(step.skip_pull)

        step = wf.steps[1]
        self.assertEqual("bar", step.uses)
        self.assertEqual(("a", "b"), step.runs)
        self.assertEqual(("c", ), step.args)
        self.assertTrue(step.skip_pull)
        self.assertTrue(not step.dir)
        self.assertEqual({"FOO": "bar"}, step.env)
        self.assertEqual(("Z", ), step.secrets)
        self.assertEqual({"FOO": "bar"}, wf.options.env)
        self.assertEqual(("Z", ), wf.options.secrets)
Example #10
0
    def test_filter_all_but_given_step(self):
        wf_data = {
            "steps": [
                {
                    "uses": "foo",
                    "id": "one"
                },
                {
                    "uses": "bar",
                    "id": "two"
                },
                {
                    "uses": "baz",
                    "id": "three"
                },
            ]
        }
        wf = WorkflowParser.parse(wf_data=wf_data, step="two")
        self.assertEqual(1, len(wf.steps))
        self.assertEqual("two", wf.steps[0].id)
        self.assertEqual("bar", wf.steps[0].uses)

        # non-existing name
        self.assertRaises(SystemExit, WorkflowParser.parse, **{
            "wf_data": wf_data,
            "step": "four"
        })

        # without id
        wf_data = {
            "steps": [
                {
                    "uses": "foo"
                },
                {
                    "uses": "bar"
                },
                {
                    "uses": "baz"
                },
            ]
        }
        wf = WorkflowParser.parse(wf_data=wf_data, step="2")
        self.assertEqual(1, len(wf.steps))
        self.assertEqual("2", wf.steps[0].id)
Example #11
0
    def test_host_run(self):

        repo = self.mk_repo()
        conf = ConfigLoader.load(workspace_dir=repo.working_dir)

        with WorkflowRunner(conf) as r:
            wf_data = {
                "steps": [{"uses": "sh", "runs": ["cat"], "args": ["README.md"],}]
            }
            r.run(WorkflowParser.parse(wf_data=wf_data))

            wf_data = {
                "steps": [
                    {
                        "uses": "sh",
                        "runs": ["bash", "-c", "echo $FOO > hello.txt ; pwd"],
                        "env": {"FOO": "bar"},
                    }
                ]
            }
            r.run(WorkflowParser.parse(wf_data=wf_data))
            with open(os.path.join(repo.working_dir, "hello.txt"), "r") as f:
                self.assertEqual(f.read(), "bar\n")

            wf_data = {"steps": [{"uses": "sh", "runs": ["nocommandisnamedlikethis"]}]}
            self.assertRaises(SystemExit, r.run, WorkflowParser.parse(wf_data=wf_data))

            # check exit code 78
            wf_data = {
                "steps": [
                    {"uses": "sh", "runs": ["touch", "one.txt"]},
                    {"uses": "sh", "runs": ["bash", "-c", "exit 78"]},
                    {"uses": "sh", "runs": ["touch", "two.txt"]},
                ]
            }
            r.run(WorkflowParser.parse(wf_data=wf_data))
            self.assertTrue(os.path.isfile(os.path.join(repo.working_dir, "one.txt")))
            self.assertFalse(os.path.isfile(os.path.join(repo.working_dir, "two.txt")))

        repo.close()
        shutil.rmtree(repo.working_dir, ignore_errors=True)
Example #12
0
    def test_run(self, mock_kill):
        config_dict = {
            "engine": {
                "name": "podman",
                "options": {
                    "privileged": True,
                    "hostname": "popper.local",
                    "domainname": "www.example.org",
                    "volumes": ["/path/in/host:/path/in/container"],
                    "environment": {
                        "FOO": "bar"
                    },
                },
            },
            "resource_manager": {
                "name": "slurm"
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

        self.Popen.set_command(
            f"sbatch --wait --job-name popper_1_{config.wid} "
            f"--output {slurm_cache_dir}/popper_1_{config.wid}.out "
            f"{slurm_cache_dir}/popper_1_{config.wid}.sh",
            returncode=0,
        )

        self.Popen.set_command(
            f"tail -f {slurm_cache_dir}/popper_1_{config.wid}.out",
            returncode=0)

        with WorkflowRunner(config) as r:
            wf_data = {
                "steps": [{
                    "uses": "popperized/bin/sh@master",
                    "runs": ["cat"],
                    "args": ["README.md"],
                }]
            }
            r.run(WorkflowParser.parse(wf_data=wf_data))

        with open(f"{slurm_cache_dir}/popper_1_{config.wid}.sh", "r") as f:
            # fmt: off
            expected = f"""#!/bin/bash
podman rm -f popper_1_{config.wid} || true
podman build -t popperized/bin:master {os.environ['HOME']}/.cache/popper/{config.wid}/github.com/popperized/bin/sh
podman create --name popper_1_{config.wid} --workdir /workspace --entrypoint cat -v /w:/workspace:Z -v /path/in/host:/path/in/container -e FOO=bar   --privileged --hostname popper.local --domainname www.example.org popperized/bin:master README.md
podman start --attach popper_1_{config.wid}"""
            # fmt: on
            actual = f.read()
            self.maxDiff = None
            self.assertEqual(expected, actual)
Example #13
0
    def test_dry_run(self):
        config = ConfigLoader.load(engine_name="singularity",
                                   resman_name="slurm",
                                   dry_run=True)

        with WorkflowRunner(config) as r:
            wf_data = {
                "steps": [{
                    "uses": "docker://alpine",
                    "runs": ["cat"],
                    "args": ["README.md"],
                }]
            }
            r.run(WorkflowParser.parse(wf_data=wf_data))

        self.assertEqual(self.Popen.all_calls, [])
Example #14
0
def cli(ctx, file, skip, colors):
    """Creates a graph in the .dot format representing the workflow."""
    wf = WorkflowParser.parse(file=file)

    node_attrs = 'shape=box, style="filled{}", fillcolor=transparent{}'
    wf_attr = node_attrs.format(",rounded", ",color=red" if colors else "")
    act_attr = node_attrs.format("", ",color=cyan" if colors else "")
    dot_str = ""
    dot_str += f'  "Workflow" [{wf_attr}];\n'
    for i, step in enumerate(wf.steps):
        n = wf.steps[i]["id"]
        dot_str += f'  "{n}" [{act_attr}];\n'
        if i == 0:
            parent = "Workflow"
        else:
            parent = wf.steps[i - 1]["id"]
        dot_str += f'  "{parent}" -> "{n}";\n'
    log.info("digraph G { graph [bgcolor=transparent];\n" + dot_str + "}\n")
Example #15
0
    def test_slurm_singularity_run(self, mock_kill):
        config_dict = {
            "engine": {
                "name": "singularity",
                "options": {
                    "hostname": "popper.local",
                    "bind": ["/path/in/host:/path/in/container"],
                },
            },
            "resource_manager": {
                "name": "slurm"
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

        # fmt: off
        self.Popen.set_command(
            f"sbatch --wait --job-name popper_1_{config.wid} --output {slurm_cache_dir}/popper_1_{config.wid}.out {slurm_cache_dir}/popper_1_{config.wid}.sh",
            returncode=0,
        )
        # fmt: on

        self.Popen.set_command(
            f"tail -f {slurm_cache_dir}/popper_1_{config.wid}.out",
            returncode=0)

        with WorkflowRunner(config) as r:
            wf_data = {
                "steps": [{
                    "uses": "popperized/bin/sh@master",
                    "args": ["ls"],
                }]
            }
            r.run(WorkflowParser.parse(wf_data=wf_data))

        with open(f"{slurm_cache_dir}/popper_1_{config.wid}.sh", "r") as f:
            # fmt: off
            expected = f"""#!/bin/bash
singularity run --userns --pwd /workspace --bind /w:/workspace --bind /path/in/host:/path/in/container --hostname popper.local {os.environ['HOME']}/.cache/popper/singularity/{config.wid}/popper_1_{config.wid}.sif ls"""
            # fmt: on
            actual = f.read()
        self.assertEqual(expected, actual)
Example #16
0
    def test_substitutions(self):
        # test wrong format for substitution key
        wf_data = {"steps": [{"uses": "whatever"}]}
        self.assertRaises(
            SystemExit, WorkflowParser.parse, **{
                "wf_data": wf_data,
                "substitutions": ["SUB1=WRONG"]
            })

        # expect error when not all given subs are used
        wf_data = {
            "steps": [{
                "uses": "some_$_SUB1",
                "id": "some other $_SUB2",
                "env": {
                    "FOO": "env_$_SUB3"
                },
                "secrets": ["secret_$_SUB4"],
            }]
        }
        substitutions = [
            "_SUB1=ONE",
            "_SUB2=TWO",
            "_SUB3=THREE",
            "_SUB4=4",
            "_SUB5=UNUSED",
        ]
        self.assertRaises(
            SystemExit, WorkflowParser.parse, **{
                "wf_data": wf_data,
                "substitutions": substitutions
            })

        # allow loose substitutions
        wf = WorkflowParser.parse(wf_data=wf_data,
                                  substitutions=substitutions,
                                  allow_loose=True)
        step = wf.steps[0]
        self.assertEqual("some_ONE", step.uses)
        self.assertEqual("some other TWO", step.id)
        self.assertEqual("env_THREE", step.env["FOO"])
        self.assertEqual(("secret_4", ), step.secrets)
Example #17
0
    def test_run(self, mock_kill):
        self.maxDiff = None
        config_dict = {
            "engine": {
                "name": "singularity",
                "options": {
                    "hostname": "popper.local",
                    "bind": ["/path/in/host:/path/in/container"],
                },
            },
            "resource_manager": {
                "name": "slurm",
                "options": {
                    "1": {
                        "nodes": 2,
                        "ntasks": 2,
                        "nodelist": "worker1,worker2"
                    }
                },
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

        # fmt: off
        self.Popen.set_command(
            "sbatch "
            "--wait "
            f"popper_1_{config.wid}.sh",
            returncode=0,
        )
        # fmt: on

        self.Popen.set_command(f"tail -f popper_1_{config.wid}.out",
                               returncode=0)

        with WorkflowRunner(config) as r:
            wf_data = {"steps": [{"uses": "docker://alpine", "args": ["ls"]}]}
            r.run(WorkflowParser.parse(wf_data=wf_data))

        with open(f"popper_1_{config.wid}.sh", "r") as f:
            # fmt: off
            expected = f"""#!/bin/bash
#SBATCH --job-name=popper_1_{config.wid}
#SBATCH --output=popper_1_{config.wid}.out
#SBATCH --nodes=2
#SBATCH --ntasks=2
#SBATCH --ntasks-per-node=1
#SBATCH --nodelist=worker1,worker2
mpirun singularity run --userns --pwd /workspace --bind /w:/workspace --bind /path/in/host:/path/in/container --hostname popper.local {os.environ['HOME']}/.cache/popper/singularity/{config.wid}/popper_1_{config.wid}.sif ls"""
            # fmt: on
            actual = f.read()
            self.assertEqual(expected, actual)

        config_dict = {
            "engine": {
                "name": "singularity",
                "options": {
                    "hostname": "popper.local",
                    "bind": ["/path/in/host:/path/in/container"],
                },
            },
            "resource_manager": {
                "name": "slurm",
                "options": {
                    "1": {
                        "mpi": False,
                        "nodes": 2,
                        "ntasks": 2,
                        "nodelist": "worker1,worker2",
                    }
                },
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

        self.Popen.set_command(
            f"srun --nodes 2 --ntasks 2 --ntasks-per-node 1 --nodelist worker1,worker2 rm -rf popper_1_{config.wid}.sif",
            returncode=0,
        )

        self.Popen.set_command(
            f"srun --nodes 2 --ntasks 2 --ntasks-per-node 1 --nodelist worker1,worker2 singularity pull docker://alpine:latest",
            returncode=0,
        )

        self.Popen.set_command(
            f"srun --nodes 2 --ntasks 2 --ntasks-per-node 1 --nodelist worker1,worker2 singularity run --userns --pwd /workspace --bind /w:/workspace --bind /path/in/host:/path/in/container --hostname popper.local {os.environ['HOME']}/.cache/popper/singularity/{config.wid}/popper_1_{config.wid}.sif ls",
            returncode=0,
        )

        with WorkflowRunner(config) as r:
            wf_data = {"steps": [{"uses": "docker://alpine", "args": ["ls"]}]}
            r.run(WorkflowParser.parse(wf_data=wf_data))
Example #18
0
    def test_submit_job_failure(self, mock_kill):
        config_dict = {
            "engine": {
                "name": "docker",
                "options": {}
            },
            "resource_manager": {
                "name": "slurm",
                "options": {}
            },
        }

        config = ConfigLoader.load(workspace_dir="/w", config_file=config_dict)

        self.Popen.set_command(
            f"sbatch --wait --job-name popper_1_{config.wid} "
            f"--output {slurm_cache_dir}/popper_1_{config.wid}.out "
            f"{slurm_cache_dir}/popper_1_{config.wid}.sh",
            returncode=12,
        )

        self.Popen.set_command(
            f"tail -f {slurm_cache_dir}/popper_1_{config.wid}.out",
            returncode=0)

        with WorkflowRunner(config) as r:
            wf_data = {
                "steps": [{
                    "uses": "popperized/bin/sh@master",
                    "runs": ["cat"],
                    "args": ["README.md"],
                }]
            }
            self.assertRaises(SystemExit, r.run,
                              WorkflowParser.parse(wf_data=wf_data))

            call_tail = call.Popen(
                ["tail", "-f", f"{slurm_cache_dir}/popper_1_{config.wid}.out"],
                cwd=os.getcwd(),
                env=None,
                preexec_fn=os.setsid,
                stderr=-2,
                stdout=-1,
                universal_newlines=True,
            )

            call_sbatch = call.Popen(
                [
                    "sbatch",
                    "--wait",
                    "--job-name",
                    f"popper_1_{config.wid}",
                    "--output",
                    f"{slurm_cache_dir}/popper_1_{config.wid}.out",
                    f"{slurm_cache_dir}/popper_1_{config.wid}.sh",
                ],
                cwd=os.getcwd(),
                env=None,
                preexec_fn=os.setsid,
                stderr=-2,
                stdout=-1,
                universal_newlines=True,
            )

            self.assertEqual(call_tail in self.Popen.all_calls, True)
            self.assertEqual(call_sbatch in self.Popen.all_calls, True)
Example #19
0
    def test_singularity_start(self):
        repo = self.mk_repo()
        conf = ConfigLoader.load(engine_name="singularity",
                                 workspace_dir=repo.working_dir)

        step = Box(
            {
                "uses": "docker://*****:*****@master",
                    "args": ["ls"],
                }]
            }
            r.run(WorkflowParser.parse(wf_data=wf_data))

            wf_data = {
                "steps": [{
                    "uses": "docker://alpine:3.9",
                    "args": ["sh", "-c", "echo $FOO > hello.txt ; pwd"],
                    "env": {
                        "FOO": "bar"
                    },
                }]
            }
            r.run(WorkflowParser.parse(wf_data=wf_data))
            with open(os.path.join(repo.working_dir, "hello.txt"), "r") as f:
                self.assertEqual(f.read(), "bar\n")

            wf_data = {
                "steps": [{
                    "uses": "docker://alpine:3.9",
                    "args": ["nocommandisnamedlikethis"],
                }]
            }
            self.assertRaises(SystemExit, r.run,
                              WorkflowParser.parse(wf_data=wf_data))

        repo.close()
Example #20
0
    def test_substitutions(self):
        # test wrong format for substitution key
        wf_data = {"steps": [{"uses": "whatever"}]}
        self.assertRaises(
            SystemExit,
            WorkflowParser.parse,
            **{"wf_data": wf_data, "substitutions": ["SUB1=WRONG"]}
        )

        # allow non-str values
        wf_data = {
            "steps": [
                {
                    "uses": "some_image",
                    "id": "some_$_DUMMY",
                    "env": {"FOO": "BAR"},
                    "secrets": ["a-secret"],
                    "options": {"auto_remove": True},
                }
            ]
        }
        substitutions = [
            "_DUMMY=dummy",
        ]
        wf = WorkflowParser.parse(
            wf_data=wf_data, substitutions=substitutions, allow_loose=True
        )
        step = wf.steps[0]
        self.assertEqual("some_image", step.uses)
        self.assertEqual("some_dummy", step.id)
        self.assertEqual("BAR", step.env["FOO"])
        self.assertEqual(("a-secret",), step.secrets)
        self.assertEqual({"auto_remove": True}, step.options)

        # expect error when not all given subs are used
        wf_data = {
            "steps": [
                {
                    "uses": "some_$_SUB1",
                    "id": "some other $_SUB2",
                    "env": {"FOO": "env_$_SUB3"},
                    "secrets": ["secret_$_SUB4"],
                }
            ]
        }
        substitutions = [
            "_SUB1=ONE",
            "_SUB2=TWO",
            "_SUB3=THREE",
            "_SUB4=4",
            "_SUB5=UNUSED",
        ]
        self.assertRaises(
            SystemExit,
            WorkflowParser.parse,
            **{"wf_data": wf_data, "substitutions": substitutions}
        )

        # substitute nested
        wf_data = {
            "steps": [
                {
                    "uses": "some_$_SUB1",
                    "id": "some other $_SUB2",
                    "env": {"FOO": "env_$_SUB3"},
                    "secrets": ["secret_$_SUB4"],
                    "options": {"labels": {"timestamp": "$_TIMESTAMP"}},
                }
            ]
        }
        substitutions = [
            "_TIMESTAMP=1613916937",
            "_SUB1=ONE",
            "_SUB2=TWO",
            "_SUB3=THREE",
            "_SUB4=4",
        ]
        wf = WorkflowParser.parse(wf_data=wf_data, substitutions=substitutions)
        step = wf.steps[0]
        self.assertEqual("some_ONE", step.uses)
        self.assertEqual("some other TWO", step.id)
        self.assertEqual("env_THREE", step.env["FOO"])
        self.assertEqual(("secret_4",), step.secrets)
        self.assertEqual({"timestamp": "1613916937"}, step.options.labels)

        # allow loose substitutions
        wf = WorkflowParser.parse(
            wf_data=wf_data, substitutions=substitutions, allow_loose=True
        )
        step = wf.steps[0]
        self.assertEqual("some_ONE", step.uses)
        self.assertEqual("some other TWO", step.id)
        self.assertEqual("env_THREE", step.env["FOO"])
        self.assertEqual(("secret_4",), step.secrets)
Example #21
0
 def test_add_missing_ids(self):
     wf_data = {"steps": [{"uses": "foo"}, {"uses": "bar"}]}
     # skip one step
     wf = WorkflowParser.parse(wf_data=wf_data)
     self.assertEqual("1", wf.steps[0].id)
     self.assertEqual("2", wf.steps[1].id)
Example #22
0
def cli(
    ctx,
    step,
    wfile,
    debug,
    dry_run,
    log_file,
    quiet,
    reuse,
    engine,
    resource_manager,
    skip,
    skip_pull,
    skip_clone,
    substitution,
    allow_loose,
    workspace,
    conf,
):
    """Runs a Popper workflow. Only executes STEP if given.

    To specify a container engine to use other than docker, use the --engine/-e
    flag. For executing on a resource manager such as SLURM or Kubernetes, use
    the --resource-manager/-r flag. Alternatively, a configuration file can be
    given (--conf flag) that can specify container options, resource manager
    options, or both (see "Workflow Syntax and Execution Runtime" section of
    the Popper documentation for more).

    If the container engine (-e) or resource manager (-r) are specified with a
    flag and a configuration file is given as well, the values passed via the
    flags are given preference over those contained in the configuration file.
    """
    # set the logging levels.
    level = "STEP_INFO"
    if quiet:
        level = "INFO"
    if debug:
        level = "DEBUG"
    log.setLevel(level)

    if dry_run:
        logging.msg_prefix = "DRYRUN: "

    if log_file:
        # also log to a file
        logging.add_log(log, log_file)

    # check conflicting flags and fail if needed
    if skip and step:
        log.fail("`--skip` can not be used when STEP argument is passed.")

    # invoke wf factory; handles formats, validations, filtering
    wf = WorkflowParser.parse(
        wfile,
        step=step,
        skipped_steps=skip,
        substitutions=substitution,
        allow_loose=allow_loose,
    )

    config = ConfigLoader.load(
        engine_name=engine,
        resman_name=resource_manager,
        config_file=conf,
        reuse=reuse,
        dry_run=dry_run,
        skip_pull=skip_pull,
        skip_clone=skip_clone,
        workspace_dir=workspace,
    )

    with WorkflowRunner(config) as runner:
        try:
            runner.run(wf)
        except Exception as e:
            log.debug(traceback.format_exc())
            log.fail(e)