def assert_parser_volumes(image_flag, image, tmpdir):
    # test valid volumes
    args = [
        image_flag,
        image,
        "--volumes",
        tmpdir.mkdir("vol1").strpath,
        "/vol1",
        "--volumes",
        tmpdir.mkdir("vol2").strpath,
        "/vol2",
        "--workDir",
        tmpdir.mkdir("workDir").strpath,
        "jobstore",
    ]

    assert parsers.ContainerArgumentParser().parse_args(args)

    # test invalid volumes, dst volume is not an absolute path
    args = [
        "--volumes",
        tmpdir.join("vol1").strpath,
        "vol1",
        image_flag,
        image,
        "jobstore",
    ]

    with pytest.raises(exceptions.ValidationError):
        parsers.ContainerArgumentParser().parse_args(args)
def assert_pipeline(image_flag, image, tmpdir):
    """
    Make sure parallel jobs work.

                head
                |  |
          child_a  child_b
                |  |
                tail
    """
    jobstore = tmpdir.join("jobstore")
    workdir = tmpdir.mkdir("working_dir")
    local_volume = tmpdir.mkdir("volume")
    container_volume = "/volume"

    # set parser
    parser = parsers.ContainerArgumentParser()
    args = [jobstore.strpath, "--workDir", workdir.strpath]

    if image_flag:
        args += [
            image_flag, image, "--volumes", local_volume.strpath,
            container_volume
        ]

    # set testing variables
    out_file = "bottle.txt"
    tmp_file_local = workdir.join(out_file)
    tmp_file_container = join("/tmp", out_file)
    vol_file_local = local_volume.join(out_file)
    vol_file_container = join(container_volume, out_file)

    if image_flag is None:
        tmp_file_container = tmp_file_local.strpath
        vol_file_container = vol_file_local.strpath

    # create jobs
    options = parser.parse_args(args)
    head = ContainerTestJob(options)
    child_a = ContainerTestJob(options)
    child_b = ContainerTestJob(options)
    tail = ContainerTestJob(options)

    # assign commands and attributes
    cmd = ["/bin/bash", "-c"]

    # test cwd and workDir, _rm_tmp_dir is used to prevent tmpdir to be removed
    head._rm_tmp_dir = False
    head.cwd = "/bin"
    head.cmd = cmd + ["pwd >> " + tmp_file_container]

    # test env
    child_a.env = {"FOO": "BAR"}
    child_a.cmd = cmd + ["echo $FOO >> " + vol_file_container]

    # test check_output
    child_b.check_output = False
    child_b.cmd = cmd + ["echo check_call >> " + vol_file_container]

    # test volumes
    tail.cmd = cmd + ["echo volume >> " + vol_file_container]

    # build dag
    head.addChild(child_a)
    head.addChild(child_b)
    head.addFollowOn(tail)

    # start pipeline
    jobs.ContainerJob.Runner.startToil(head, options)

    if image_flag:
        pattern = join(_TMP_PREFIX + "*", out_file)
        tmp_file_local = next(workdir.visit(pattern))

    # Test the output
    with open(tmp_file_local.strpath) as f:
        result = f.read()
        assert "/bin" in result

    if image_flag:
        with open(vol_file_local.strpath) as f:
            result = f.read()
            assert "volume" in result
            assert "BAR" in result
            assert "check_call" in result
def test_container_parser_cant_use_docker_and_singularity_together():
    with pytest.raises(click.UsageError) as error:
        args = ["--singularity", "i", "--docker", "j", "jobstore"]
        parsers.ContainerArgumentParser().parse_args(args)

    assert "use --singularity or --docker, not both." in str(error.value)
def test_help_toil_container_parser():
    parser = parsers.ContainerArgumentParser()
    check_help_toil(parser)
    assert "container arguments" in parser.format_help()
def test_container_parser_singularity_invalid_image():
    with pytest.raises(exceptions.ValidationError):
        args = ["--singularity", "florentino-ariza-img", "jobstore"]
        assert parsers.ContainerArgumentParser().parse_args(args)
def test_container_parser_singularity_valid_image():
    args = ["--singularity", SINGULARITY_IMAGE, "jobstore"]
    assert parsers.ContainerArgumentParser().parse_args(args)
def test_container_parser_docker_valid_image():
    args = ["--docker", DOCKER_IMAGE, "jobstore"]
    assert parsers.ContainerArgumentParser().parse_args(args)
def test_volumes_only_used_with_containers():
    with pytest.raises(click.UsageError) as error:
        args = ["--volumes", "foo", "bar", "jobstore"]
        parsers.ContainerArgumentParser().parse_args(args)

    assert "--volumes should be used only " in str(error.value)
Exemple #9
0
def assert_pipeline(image_flag, image, tmpdir, parallel=True):
    """
    Make sure parallel jobs work.

                head
                |  |
          child_a  child_b
                |  |
                tail

    Make sure a failing job output is sent to the main pipeline logs

                head
                  |
               child_c
    """
    jobstore = tmpdir.join("jobstore")
    workdir = tmpdir.mkdir("working_dir")
    local_volume = tmpdir.mkdir("volume")
    container_volume = "/volume"

    # set parser
    parser = parsers.ContainerArgumentParser()
    args = [jobstore.strpath, "--workDir", workdir.strpath]

    if image_flag:
        args += [
            image_flag, image, "--volumes", local_volume.strpath,
            container_volume
        ]

    # set testing variables
    out_file = "bottle.txt"
    tmp_file_local = workdir.join(out_file)
    tmp_file_container = join("/tmp", out_file)
    vol_file_local = local_volume.join(out_file)
    vol_file_container = join(container_volume, out_file)

    if image_flag is None:
        tmp_file_container = tmp_file_local.strpath
        vol_file_container = vol_file_local.strpath

    # create jobs
    options = parser.parse_args(args)

    if parallel:
        # Run the parallel pipeline
        head = ContainerTestJob(options)
        child_a = ContainerTestJob(options)
        child_b = ContainerTestJob(options)
        tail = ContainerTestJob(options)

        # assign commands and attributes
        cmd = ["/bin/bash", "-c"]

        # test cwd and workDir, _rm_tmp_dir is used to prevent tmpdir to be removed
        head._rm_tmp_dir = False
        head.cwd = "/bin"
        head.cmd = cmd + ["pwd >> " + tmp_file_container]

        # test env
        child_a.env = {"FOO": "BAR"}
        child_a.cmd = cmd + ["echo $FOO >> " + vol_file_container]

        # test check_output
        child_b.check_output = False
        child_b.cmd = cmd + ["echo check_call >> " + vol_file_container]

        # test volumes
        tail.cmd = cmd + ["echo volume >> " + vol_file_container]

        # build dag
        head.addChild(child_a)
        head.addChild(child_b)
        head.addFollowOn(tail)

        # start pipeline
        jobs.ContainerJob.Runner.startToil(head, options)

        if image_flag:
            pattern = join(_TMP_PREFIX + "*", out_file)
            tmp_file_local = next(workdir.visit(pattern))

        # Test the output
        with open(tmp_file_local.strpath) as f:
            result = f.read()
            assert "/bin" in result

        if image_flag:
            with open(vol_file_local.strpath) as f:
                result = f.read()
                assert "volume" in result
                assert "BAR" in result
                assert "check_call" in result

    else:
        # Run the failing pipeline
        options.retryCount = 0
        head = ContainerTestJob(options)
        child_c = ContainerTestJob(options)
        child_c.cmd = ["rm", "/florentino-arisa"]

        head.addChild(child_c)

        with pytest.raises(FailedJobsException) as captured_error:
            # start pipeline
            jobs.ContainerJob.Runner.startToil(head, options)

        assert (
            "rm: cannot remove '/florentino-arisa': No such file or directory"
        ) in captured_error.value.msg