Esempio n. 1
0
def test_git_install_skip_existing_nongit(path=None):
    with swallow_logs(new_level=logging.WARNING) as log:
        dist_dir = GitDistribution(name="git",
                                   packages=[
                                       GitRepo(path=path,
                                               remotes={
                                                   "origin": {
                                                       "url": "doesnt-matter",
                                                       "contains": True
                                                   }
                                               })
                                   ])
        dist_dir.install_packages()
        assert "not a Git repository; skipping" in log.out

    with swallow_logs(new_level=logging.WARNING) as log:
        dist_dir = GitDistribution(name="git",
                                   packages=[
                                       GitRepo(path=op.join(path, "foo"),
                                               remotes={
                                                   "origin": {
                                                       "url": "doesnt-matter",
                                                       "contains": True
                                                   }
                                               })
                                   ])
        dist_dir.install_packages()
        assert "not a directory; skipping" in log.out
Esempio n. 2
0
def test_multi_debian_files():
    with swallow_logs() as log:
        args = ['diff', multi_debian_yaml, diff_1_yaml]
        with raises(SystemExit):
            main(args)
        assert_in_in("multiple <class 'reproman.distributions.debian.DebianDistribution'> found", log.lines)
    with swallow_logs() as log:
        args = ['diff', diff_1_yaml, multi_debian_yaml]
        with raises(SystemExit):
            main(args)
        assert_in_in("multiple <class 'reproman.distributions.debian.DebianDistribution'> found", log.lines)
Esempio n. 3
0
def test_create_and_start(tmpdir):
    runner = Runner()
    tmpdir = str(tmpdir)
    cfg_file = op.join(tmpdir, "custom.cfg")
    inventory_file = op.join(tmpdir, "inventory.yml")
    with open(cfg_file, "w") as cfg_fh:
        cfg_fh.write("[general]\ninventory_file = {}\n".format(inventory_file))

    def run_reproman(args):
        runner(["reproman", "--config", cfg_file] + args,
               expect_stderr=True)

    run_reproman(["create", "--resource-type=shell", "myshell"])

    with open(inventory_file) as ifh:
        dumped = ifh.read()
    assert "myshell" in dumped
    assert "id" in dumped

    # Running with a different config fails ...
    empty_cfg_file = op.join(tmpdir, "empty.cfg")
    with open(empty_cfg_file, "w"):
        pass

    with swallow_logs(new_level=logging.ERROR) as cml:
        with pytest.raises(CommandError):
            runner(["reproman", "--config", empty_cfg_file,
                    "start", "myshell"])
        if os.environ.get("REPROMAN_LOGTARGET", "stderr") == "stderr":
            assert "ResourceNotFoundError" in cml.out
    # ... but using the same config works.
    run_reproman(["start", "myshell"])
Esempio n. 4
0
def test_run_and_fetch(context):
    path = context["directory"]
    run = context["run_fn"]
    jobs = context["jobs_fn"]
    registry = context["registry"]

    create_tree(path,
                tree={
                    "js0.yaml": ("resource_name: myshell\n"
                                 "command_str: 'touch ok'\n"
                                 "outputs: ['ok']")
                })

    run(job_specs=["js0.yaml"])

    with swallow_logs(new_level=logging.INFO) as log:
        with swallow_outputs() as output:
            jobs(queries=[], status=True)
            assert "myshell" in output.out
            assert len(registry.find_job_files()) == 1
            jobs(queries=[], action="fetch", all_=True)
            assert len(registry.find_job_files()) == 0
            jobs(queries=[], status=True)
            assert "No jobs" in log.out

    assert op.exists(op.join(path, "ok"))
Esempio n. 5
0
def test_parse_dpkgquery_line():
    parse = DebTracer()._parse_dpkgquery_line

    mock_values = {
        "unique": {
            "name": "pkg",
            "path": "/path/to/file",
            "pkgs_rest": None
        },
        "multi_dir": {
            "name": "pkg",
            "path": os.getcwd(),
            "pkgs_rest": ", more, packages"
        },
        "multi_file": {
            "name": "pkg",
            "path": __file__,
            "pkgs_rest": ", more, packages"
        }
    }

    with mock.patch("reproman.distributions.debian.parse_dpkgquery_line",
                    mock_values.get):
        assert parse("unique") == {"name": "pkg", "path": "/path/to/file"}
        assert parse("multi_dir") is None
        with swallow_logs(new_level=logging.WARNING) as log:
            assert parse("multi_file") == {"name": "pkg", "path": __file__}
            assert any("multiple packages " in ln for ln in log.lines)
Esempio n. 6
0
def test_git_install(traced_repo_copy, tmpdir):
    git_dist = traced_repo_copy["git_dist"]
    git_pkg = git_dist.packages[0]
    tmpdir = str(tmpdir)

    # Install package to a new location.
    install_dir = op.join(tmpdir, "installed")
    git_pkg.path = install_dir

    install(git_dist, install_dir, check=True)
    # Installing a second time works if the root hexsha's match.
    install(git_dist, install_dir, check=True)

    runner = GitRunner(cwd=install_dir)

    # We don't try to change the state of the repository if it's dirty.
    runner(["git", "reset", "--hard", "HEAD^"])
    hexsha_existing = current_hexsha(runner)
    create_tree(install_dir, {"dirt": "dirt"})
    with swallow_logs(new_level=logging.WARNING) as log:
        install(git_dist, install_dir)
        assert "repository is dirty" in log.out
    assert current_hexsha(runner) == hexsha_existing

    # We end up on the intended commit (detached) if the existing installation
    # repo is clean.
    os.remove(op.join(install_dir, "dirt"))
    install(git_dist, install_dir)
    assert current_hexsha(runner) == git_pkg.hexsha
    assert not current_branch(runner)
Esempio n. 7
0
def test_git_install_no_remote():
    dist = GitDistribution(name="git",
                           packages=[GitRepo(path="/tmp/shouldnt/matter")])

    with swallow_logs(new_level=logging.WARNING) as log:
        dist.initiate()
        dist.install_packages()
        assert "No remote known" in log.out
Esempio n. 8
0
def test_get_distributions(demo1_spec):

    # Test reading the distributions from the ReproMan spec file.
    provenance = Provenance.factory(demo1_spec, 'reproman')

    with swallow_logs(new_level=logging.DEBUG) as log:
        distributions = provenance.get_distributions()
        assert len(distributions) == 2
Esempio n. 9
0
def test_retrace(reprozip_spec2):
    """
    Test installing packages on the localhost.
    """
    with swallow_logs(new_level=logging.DEBUG) as log:
        args = ['retrace',
                '--spec', reprozip_spec2,
                ]
        main(args)
        assert_in("reading spec file " + reprozip_spec2, log.lines)
Esempio n. 10
0
def test_jobs_deleted_local_directory(context):
    path = context["directory"]
    run = context["run_fn"]
    jobs = context["jobs_fn"]

    run(command=["touch", "ok"], outputs=["ok"], resref="myshell")
    shutil.rmtree(path)
    with swallow_logs(new_level=logging.ERROR) as log:
        jobs(queries=[], status=True)
        assert "no longer exists" in log.out
Esempio n. 11
0
def test_orc_log_failed(failed):
    nfailed = len(failed)
    with swallow_logs(new_level=logging.INFO) as log:
        orcs.Orchestrator._log_failed("jid", "metadir", failed)
        assert "{} subjob".format(nfailed) in log.out
        assert "jid stderr:" in log.out
        if nfailed > 6:
            assert "stderr.*" in log.out
        elif nfailed == 1:
            assert "stderr.{}".format(failed[0]) in log.out
        else:
            assert "stderr.{" in log.out
Esempio n. 12
0
def test_run_and_follow_action(context, action):
    run = context["run_fn"]
    expect = "does not support the 'stop' feature"
    with swallow_logs(new_level=logging.INFO) as log:
        run(command=["false"], resref="myshell",
            follow=action)
        if action.endswith("-if-success"):
            assert expect not in log.out
        else:
            assert expect in log.out

    if action != "delete":
        with swallow_logs(new_level=logging.INFO) as log:
            run(command=["true"], resref="myshell",
                follow=action)
            assert expect in log.out

    if action.startswith("delete"):
        resman = context["resource_manager"]
        with pytest.raises(ResourceNotFoundError):
            resman.get_resource("myshell", resref_type="name")
Esempio n. 13
0
def test_run_and_follow(context):
    path = context["directory"]
    run = context["run_fn"]
    jobs = context["jobs_fn"]
    registry = context["registry"]

    run(command=["touch", "ok"], outputs=["ok"], resref="myshell", follow=True)

    with swallow_logs(new_level=logging.INFO) as log:
        jobs(queries=[])
        assert len(registry.find_job_files()) == 0
        assert "No jobs" in log.out

    assert op.exists(op.join(path, "ok"))
Esempio n. 14
0
def test_orc_datalad_run_failed(job_spec, dataset, shell):
    job_spec["command_str"] = "iwillfail"
    job_spec["inputs"] = []

    with chpwd(dataset.path):
        orc = orcs.DataladLocalRunOrchestrator(
            shell, submission_type="local", job_spec=job_spec)
        orc.prepare_remote()
        orc.submit()
        orc.follow()
        with swallow_logs(new_level=logging.INFO) as log:
            orc.fetch()
            assert "Job status" in log.out
            assert "stderr:" in log.out
Esempio n. 15
0
def test_jobs_query_unknown(context):
    run = context["run_fn"]
    jobs = context["jobs_fn"]
    registry = context["registry"]

    run(command=["doesntmatter"], resref="myshell")

    jobfiles = registry.find_job_files()
    assert len(jobfiles) == 1
    jobid = list(jobfiles.keys())[0]
    with swallow_logs(new_level=logging.WARNING) as log:
        jobs(queries=[jobid + "-trailing-garbage"])
        assert "No jobs matched" in log.out
    assert len(registry.find_job_files()) == 1
Esempio n. 16
0
def test_git_install_hexsha_not_found(traced_repo_copy, tmpdir):
    git_dist = traced_repo_copy["git_dist"]
    git_pkg = git_dist.packages[0]
    tmpdir = str(tmpdir)

    # Install package to a new location.
    install_dir = op.join(tmpdir, "installed")
    git_pkg.path = install_dir
    install(git_dist, install_dir)

    # No existing hexsha.
    git_pkg.hexsha = "0" * 40
    with swallow_logs(new_level=logging.WARNING) as log:
        install(git_dist, install_dir)
        assert "expected hexsha wasn't found" in log.out
Esempio n. 17
0
def test_git_install_skip_different_git(git_repo):
    with swallow_logs(new_level=logging.WARNING) as log:
        dist_dir = GitDistribution(
            name="git",
            packages=[
                GitRepo(path=git_repo,
                        root_hexsha="definitely doesn't match",
                        remotes={
                            "origin": {
                                "url": "doesnt-matter",
                                "contains": True
                            }
                        })
            ])
        dist_dir.install_packages()
        assert "doesn't match expected hexsha; skipping" in log.out
Esempio n. 18
0
def test_delete_interface():
    """
    Test deleting a resource.
    """

    with patch('docker.Client') as client, \
        patch('reproman.resource.ResourceManager._save'), \
        patch('reproman.resource.ResourceManager._get_inventory') as get_inventory, \
        swallow_logs(new_level=logging.DEBUG) as log:

        client.return_value = MagicMock(
            containers=lambda all: [{
                'Id': '326b0fdfbf838',
                'Names': ['/my-resource'],
                'State': 'running'
            }])

        get_inventory.return_value = {
            "my-resource": {
                "status": "running",
                "engine_url": "tcp://127.0.0.1:2375",
                "type": "docker-container",
                "name": "my-resource",
                "id": "326b0fdfbf838"
            }
        }

        args = ['delete', '--skip-confirmation', 'my-resource']
        with patch("reproman.interface.delete.get_manager",
                   return_value=ResourceManager()):
            main(args)

        calls = [
            call(base_url='tcp://127.0.0.1:2375'),
            call().remove_container(
                {
                    'State': 'running',
                    'Id': '326b0fdfbf838',
                    'Names': ['/my-resource']
                },
                force=True)
        ]
        client.assert_has_calls(calls, any_order=True)

        assert_in('Deleted the environment my-resource', log.lines)
Esempio n. 19
0
def test_venv_install_noop():
    dist = VenvDistribution(
        name="venv",
        path="/tmp/doesnt/matter/",
        venv_version="15.1.0",
        environments=[
            VenvEnvironment(
                path="/tmp/doesnt/matter/venv",
                python_version="3.7",
                packages=[
                    VenvPackage(
                        name="imeditable",
                        version="0.1.0",
                        editable=True,
                        local=True)])])
    with swallow_logs(new_level=logging.INFO) as log:
        dist.install_packages()
        assert "No local, non-editable packages found" in log.out
Esempio n. 20
0
def test_orc_datalad_pair_merge_conflict(job_spec, dataset, shell):
    with chpwd(dataset.path):
        job_spec["_resolved_command_str"] = "sh -c 'echo baz >baz'"
        job_spec["inputs"] = []
        job_spec["outputs"] = []

        orc0 = orcs.DataladPairOrchestrator(shell,
                                            submission_type="local",
                                            job_spec=job_spec)
        orc0.prepare_remote()
        orc0.submit()
        orc0.follow()
        # Introduce a conflict.
        (dataset.pathobj / "baz").write_text("different")
        dataset.save()
        with swallow_logs(new_level=logging.WARNING) as logs:
            orc0.fetch()
            assert "Failed to merge in changes" in logs.out
        assert dataset.repo.call_git(["ls-files", "--unmerged"]).strip()
Esempio n. 21
0
def test_jobs_orc_error(context):
    run = context["run_fn"]
    jobs = context["jobs_fn"]
    registry = context["registry"]

    run(command=["doesntmatter1"], resref="myshell")

    jobfiles = registry.find_job_files()
    assert len(jobfiles) == 1

    with swallow_outputs() as output:
        with swallow_logs(new_level=logging.ERROR) as log:
            def die_orc(*args, **kwargs):
                raise OrchestratorError("resurrection failed")

            with patch("reproman.interface.jobs.show_oneline",
                       side_effect=die_orc):
                jobs(queries=[], status=True)
            assert "myshell" not in output.out
            assert "resurrection failed" in log.out
Esempio n. 22
0
def test_create_interface():
    """
    Test creating an environment
    """

    with patch('docker.Client') as client, \
        patch('reproman.resource.ResourceManager.save_inventory'), \
        patch('reproman.resource.ResourceManager._get_inventory'), \
        swallow_logs(new_level=logging.DEBUG) as log:

        client.return_value = MagicMock(
            containers=lambda all: [],
            pull=lambda repository, stream: [
                b'{ "status" : "status 1", "progress" : "progress 1" }',
                b'{ "status" : "status 2", "progress" : "progress 2" }'
            ],
            create_container=lambda name, image, stdin_open, tty, command: {
                'Id': '18b31b30e3a5'
            }
        )

        args = ['create',
                '--resource-type', 'docker-container',
                '--backend', 'engine_url=tcp://127.0.0.1:2376',
                '--',
                'my-test-resource'
        ]
        with patch("reproman.interface.create.get_manager",
                   return_value=ResourceManager()):
            main(args)

        calls = [
            call(base_url='tcp://127.0.0.1:2376'),
            call().start(container='18b31b30e3a5')
        ]
        client.assert_has_calls(calls, any_order=True)

        assert_in("status 1 progress 1", log.lines)
        assert_in("status 2 progress 2", log.lines)
        assert_in("Created the environment my-test-resource", log.lines)
Esempio n. 23
0
def test_login_interface():
    """
    Test logging into an environment
    """

    with patch('docker.Client') as client, \
        patch('reproman.resource.ResourceManager._get_inventory') as get_inventory, \
        patch('dockerpty.start'), \
        swallow_logs(new_level=logging.DEBUG) as log:

        client.return_value = MagicMock(
            containers=lambda all: [{
                'Id': '18b31b30e3a5',
                'Names': ['/my-test-resource'],
                'State': 'running'
            }], )

        get_inventory.return_value = {
            "my-test-resource": {
                "status": "running",
                "engine_url": "tcp://127.0.0.1:2375",
                "type": "docker-container",
                "name": "my-test-resource",
                "id": "18b31b30e3a5"
            }
        }

        args = ['login', 'my-test-resource']

        with patch("reproman.interface.login.get_manager",
                   return_value=ResourceManager()):
            main(args)

        assert client.call_count == 1

        calls = [call(base_url='tcp://127.0.0.1:2375')]
        client.assert_has_calls(calls, any_order=True)

        assert_in("Opening TTY connection to docker container.", log.lines)
Esempio n. 24
0
def test_jobs_deleted_resource(context):
    run = context["run_fn"]
    jobs = context["jobs_fn"]
    registry = context["registry"]
    resman = context["resource_manager"]

    resman.create("todelete", resource_type="shell")

    run(command=["doesntmatter0"], resref="todelete")
    run(command=["doesntmatter1"], resref="myshell")

    resman.delete(resman.get_resource("todelete"))

    jobfiles = registry.find_job_files()
    assert len(jobfiles) == 2

    with swallow_outputs() as output:
        with swallow_logs(new_level=logging.ERROR) as log:
            jobs(queries=[], status=True)
            assert "todelete" in log.out
            # The deleted resource won't be there
            assert "todelete" not in output.out
            # ... but the alive one will.
            assert "myshell" in output.out
Esempio n. 25
0
def test_globbedpaths(path=None):
    dotdir = op.curdir + op.sep

    for patterns, expected in [
        (["1.txt", "2.dat"], {"1.txt", "2.dat"}),
        ([dotdir + "1.txt", "2.dat"], {dotdir + "1.txt", "2.dat"}),
        (["*.txt", "*.dat"], {"1.txt", "2.dat", u"bβ.dat", "3.txt"}),
        ([dotdir + "*.txt",
          "*.dat"], {dotdir + "1.txt", "2.dat", u"bβ.dat", dotdir + "3.txt"}),
        (["subdir/*.txt"], {"subdir/1.txt", "subdir/2.txt"}),
        ([dotdir + "subdir/*.txt"],
         {dotdir + p
          for p in ["subdir/1.txt", "subdir/2.txt"]}),
        (["*.txt"], {"1.txt", "3.txt"})
    ]:
        gp = GlobbedPaths(patterns, pwd=path)
        eq_(set(gp.expand()), expected)
        eq_(set(gp.expand(full=True)), {op.join(path, p) for p in expected})

    pardir = op.pardir + op.sep
    subdir_path = op.join(path, "subdir")
    for patterns, expected in [
        (["*.txt"], {"1.txt", "2.txt"}),
        ([dotdir + "*.txt"], {dotdir + p
                              for p in ["1.txt", "2.txt"]}),
        ([pardir + "*.txt"], {pardir + p
                              for p in ["1.txt", "3.txt"]}),
        ([dotdir + pardir + "*.txt"],
         {dotdir + pardir + p
          for p in ["1.txt", "3.txt"]}), (["subdir/"], {"subdir/"})
    ]:
        gp = GlobbedPaths(patterns, pwd=subdir_path)
        eq_(set(gp.expand()), expected)
        eq_(set(gp.expand(full=True)),
            {op.join(subdir_path, p)
             for p in expected})

    # Full patterns still get returned as relative to pwd.
    gp = GlobbedPaths([op.join(path, "*.dat")], pwd=path)
    eq_(gp.expand(), ["2.dat", u"bβ.dat"])

    # "." gets special treatment.
    gp = GlobbedPaths([".", "*.dat"], pwd=path)
    eq_(set(gp.expand()), {"2.dat", u"bβ.dat", "."})
    eq_(gp.expand(dot=False), ["2.dat", u"bβ.dat"])
    gp = GlobbedPaths(["."], pwd=path, expand=False)
    eq_(gp.expand(), ["."])
    eq_(gp.paths, ["."])

    # We can the glob outputs.
    glob_results = {"z": "z", "a": ["x", "d", "b"]}
    with patch('glob.glob', glob_results.get):
        gp = GlobbedPaths(["z", "a"])
        eq_(gp.expand(), ["z", "b", "d", "x"])

    # glob expansion for paths property is determined by expand argument.
    for expand, expected in [(True, ["2.dat", u"bβ.dat"]), (False, ["*.dat"])]:
        gp = GlobbedPaths(["*.dat"], pwd=path, expand=expand)
        eq_(gp.paths, expected)

    with swallow_logs(new_level=logging.DEBUG) as cml:
        GlobbedPaths(["not here"], pwd=path).expand()
        assert_in("No matching files found for 'not here'", cml.out)
Esempio n. 26
0
def test_backend_parameters_unknown_resource():
    with swallow_logs(new_level=logging.WARNING) as log:
        backend_parameters(["i'm-unknown"])
        assert "Failed to import" in log.out
Esempio n. 27
0
def test_distributions(demo1_spec):

    def mock_execute_command(command, env=None):
        if isinstance(command, list):
            if command == ['apt-cache', 'policy', 'libc6-dev:amd64']:
                return (
                    b'libc6-dev: \
                        Installed: (none) \
                        Candidate: 2.19-18+deb8u4 \
                        Version table: \
                            2.19-18+deb8u4 500 \
                            500 http://archive.ubuntu.com/ubuntu xenial/universe amd64 Packages',
                    0
                )
            if command == ['apt-cache', 'policy', 'afni:amd64']:
                return (
                    b'afni: \
                        Installed: 16.2.07~dfsg.1-2~nd90+1 \
                        Candidate: 16.2.07~dfsg.1-2~nd90+1 \
                        Version table: \
                            16.2.07~dfsg.1-2~nd90+1 500 \
                            500 http://archive.ubuntu.com/ubuntu xenial/universe amd64 Packages',
                    0
                )
            if command == ['apt-cache', 'policy', 'dcm2niix:amd64']:
                return (
                    b'dcm2niix: \
                        Installed: (none) \
                        Candidate: 1:1.0.20171017+git3-g9ccc4c0-1~nd16.04+1 \
                        Version table: \
                            1:1.0.20171017+git3-g9ccc4c0-1~nd16.04+1 500 \
                            500 http://archive.ubuntu.com/ubuntu xenial/universe amd64 Packages',
                    0
                )
        if isinstance(command, str):
            if command.startswith('grep'):
                return (None, 1)

    provenance = Provenance.factory(demo1_spec)
    distributions = provenance.get_distributions()
    distributions = items_to_dict(distributions)
    assert set(distributions.keys()) == {'conda', 'debian'}
    # Test DebianDistribution class.
    debian_distribution = distributions['debian']
    environment = MagicMock()
    environment.execute_command = mock_execute_command
    environment.exists.return_value = False

    with patch('requests.get') as requests, \
        swallow_logs(new_level=logging.DEBUG) as log:

        requests.return_value = type("TestObject", (object,), {})()
        requests.return_value.text = '<a href="/archive/debian/20171208T032012Z/dists/sid/">next change</a>'

        debian_distribution.initiate(environment)
        debian_distribution.install_packages(environment)

        assert_in("Adding Debian update to environment command list.", log.lines)
        assert_in("Adding line 'deb http://snapshot.debian.org/archive/debian/20170531T084046Z/ sid main \
contrib non-free' to /etc/apt/sources.list.d/reproman.sources.list", log.lines)
        assert_in("Adding line 'deb http://snapshot-neuro.debian.net:5002/archive/neurodebian/20171208T032012Z/ \
xenial main contrib non-free' to /etc/apt/sources.list.d/reproman.sources.list", log.lines)
        assert_in("Adding line 'deb http://snapshot-neuro.debian.net:5002/archive/neurodebian/20171208T032012Z/ \
xenial main contrib non-free' to /etc/apt/sources.list.d/reproman.sources.list", log.lines)
        assert_in('Installing libc6-dev=2.19-18+deb8u4, afni=16.2.07~dfsg.1-5~nd16.04+1, \
dcm2niix=1:1.0.20171017+git3-g9ccc4c0-1~nd16.04+1', log.lines)

    """
Esempio n. 28
0
 def try_():
     with swallow_logs(new_level=logging.INFO) as log:
         fetch_fn()
         return "Not fetching incomplete job" not in log.out