Example #1
0
def test_matrix_environments(tmpdir, dummy_packages):
    conf = config.Config()

    conf.env_dir = six.text_type(tmpdir.join("env"))

    conf.pythons = [PYTHON_VER1, PYTHON_VER2]
    conf.matrix = {
        "asv_dummy_test_package_1": [DUMMY1_VERSION, None],
        "asv_dummy_test_package_2": DUMMY2_VERSIONS
    }
    environments = list(environment.get_environments(conf, None))

    assert len(environments) == 2 * 2 * 2

    # Only test the first two environments, since this is so time
    # consuming
    for env in environments[:2]:
        env.create()

        output = env.run([
            '-c',
            'import asv_dummy_test_package_1 as p, sys; sys.stdout.write(p.__version__)'
        ],
                         valid_return_codes=None)
        if 'asv_dummy_test_package_1' in env._requirements:
            assert output.startswith(
                six.text_type(env._requirements['asv_dummy_test_package_1']))

        output = env.run([
            '-c',
            'import asv_dummy_test_package_2 as p, sys; sys.stdout.write(p.__version__)'
        ])
        assert output.startswith(
            six.text_type(env._requirements['asv_dummy_test_package_2']))
Example #2
0
def test_environment_select_autodetect():
    conf = config.Config()
    conf.environment_type = "conda"
    conf.pythons = [PYTHON_VER1]
    conf.matrix = {
        "six": ["1.10"],
    }

    # Check autodetect
    environments = list(environment.get_environments(conf,
                                                     [":" + PYTHON_VER1]))
    assert len(environments) == 1
    assert environments[0].python == PYTHON_VER1
    assert environments[0].tool_name in ("virtualenv", "conda")

    # Check interaction with exclude
    conf.exclude = [{'environment_type': 'matches nothing'}]
    environments = list(environment.get_environments(conf,
                                                     [":" + PYTHON_VER1]))
    assert len(environments) == 1

    conf.exclude = [{'environment_type': 'virtualenv|conda'}]
    environments = list(environment.get_environments(conf,
                                                     [":" + PYTHON_VER1]))
    assert len(environments) == 1

    conf.exclude = [{'environment_type': 'conda'}]
    environments = list(
        environment.get_environments(conf, ["conda:" + PYTHON_VER1]))
    assert len(environments) == 1
Example #3
0
def test_filter_date_period(tmpdir, dvcs_type):
    tmpdir = six.text_type(tmpdir)

    dates = [
        datetime.datetime(2001, 1, 1),
        datetime.datetime(2001, 1, 2),
        datetime.datetime(2001, 1, 8)
    ]

    dvcs = tools.generate_repo_from_ops(
        tmpdir, dvcs_type,
        [("commit", j, dates[j]) for j in range(len(dates))])
    commits = dvcs.get_branch_hashes()[::-1]
    assert len(commits) == len(dates)

    conf = config.Config()
    conf.dvcs = dvcs_type
    conf.repo = dvcs.path
    r = repo.get_repo(conf)

    # Basic filtering
    weekly_commits = r.filter_date_period(commits, 60*60*24*7)
    assert weekly_commits == [commits[0], commits[2]]

    daily_commits = r.filter_date_period(commits, 60*60*24)
    assert daily_commits == commits

    # Test with old_commits specified
    monthly_commits = r.filter_date_period(commits[1:], 60*60*24*30, commits[:1])
    assert monthly_commits == []
Example #4
0
def test_matrix_environments(tmpdir, dummy_packages):
    conf = config.Config()

    conf.env_dir = six.text_type(tmpdir.join("env"))

    conf.pythons = [PYTHON_VER1, PYTHON_VER2]
    conf.matrix = {
        "docutils": [DOCUTILS_VERSION, None],
        "colorama": COLORAMA_VERSIONS
    }
    environments = list(environment.get_environments(conf, None))

    assert len(environments) == 2 * 2 * 2

    # Only test the first two environments, since this is so time
    # consuming
    for env in environments[:2]:
        env.create()

        output = env.run([
            '-c',
            'import docutils, sys; sys.stdout.write(docutils.__version__)'
        ],
                         valid_return_codes=None)
        if 'docutils' in env._requirements:
            assert output.startswith(
                six.text_type(env._requirements['docutils']))

        output = env.run([
            '-c',
            'import colorama, sys; sys.stdout.write(colorama.__version__)'
        ])
        assert output.startswith(six.text_type(env._requirements['colorama']))
Example #5
0
def test_install_env_matrix_values(tmpdir):
    tmpdir = six.text_type(tmpdir)

    dvcs = generate_test_repo(tmpdir, [0], dvcs_type='git')
    commit_hash = dvcs.get_branch_hashes()[0]

    conf = config.Config()
    conf.env_dir = os.path.join(tmpdir, "env")
    conf.pythons = [PYTHON_VER1]
    conf.repo = os.path.abspath(dvcs.path)
    conf.matrix = {
        'env': {
            'SOME_ASV_TEST_BUILD_VALUE': '1'
        },
        'env_nobuild': {
            'SOME_ASV_TEST_NON_BUILD_VALUE': '1'
        }
    }

    repo = get_repo(conf)

    env = list(environment.get_environments(conf, None))[0]
    env.create()
    env.install_project(conf, repo, commit_hash)

    env.run([
        '-c', 'import asv_test_repo.build_time_env as t, sys; '
        'sys.exit(0 if t.env["SOME_ASV_TEST_BUILD_VALUE"] == "1" else 1)'
    ])

    env.run([
        '-c', 'import asv_test_repo.build_time_env as t, sys; '
        'sys.exit(0 if "SOME_ASV_TEST_NON_BUILD_VALUE" not in t.env else 1)'
    ])
Example #6
0
def test_environment_environ_path(environment_type, tmpdir, monkeypatch):
    # Check that virtualenv binary dirs are in the PATH
    conf = config.Config()
    conf.env_dir = six.text_type(tmpdir.join("env"))
    conf.environment_type = environment_type
    conf.pythons = [PYTHON_VER1]
    conf.matrix = {}

    env, = environment.get_environments(conf, [])
    env.create()
    output = env.run(['-c', 'import os; print(os.environ["PATH"])'])
    paths = output.strip().split(os.pathsep)
    assert os.path.commonprefix([paths[0], conf.env_dir]) == conf.env_dir

    # Check user-site directory is not in sys.path
    output = env.run(['-c', 'import site; print(site.ENABLE_USER_SITE)'])
    usersite_in_syspath = output.strip()
    assert usersite_in_syspath == "False"

    # Check PYTHONPATH is ignored
    monkeypatch.setenv(str('PYTHONPATH'), str(tmpdir))
    output = env.run(
        ['-c', 'import os; print(os.environ.get("PYTHONPATH", ""))'])
    assert output.strip() == ""

    monkeypatch.setenv(str('ASV_PYTHONPATH'), str("Hello python path"))
    output = env.run(['-c', 'import os; print(os.environ["PYTHONPATH"])'])
    assert output.strip() == "Hello python path"
Example #7
0
def test_build_isolation(tmpdir):
    # build should not fail with build_cache on projects that have pyproject.toml
    tmpdir = six.text_type(tmpdir)

    # Create installable repository with pyproject.toml in it
    dvcs = generate_test_repo(tmpdir, [0], dvcs_type='git')
    fn = os.path.join(dvcs.path, 'pyproject.toml')
    with open(fn, 'w') as f:
        f.write('[build-system]\n' 'requires = ["wheel", "setuptools"]')
    dvcs.add(fn)
    dvcs.commit("Add pyproject.toml")
    commit_hash = dvcs.get_hash("master")

    # Setup config
    conf = config.Config()
    conf.env_dir = os.path.join(tmpdir, "env")
    conf.pythons = [PYTHON_VER1]
    conf.matrix = {}
    conf.repo = os.path.abspath(dvcs.path)
    conf.build_cache_size = 8

    repo = get_repo(conf)

    env = list(environment.get_environments(conf, None))[0]
    env.create()

    # Project installation should succeed
    env.install_project(conf, repo, commit_hash)
Example #8
0
def test_install_success(tmpdir):
    # Check that install_project really installs the package. (gh-805)
    # This may fail if pip in install_command e.g. gets confused by an .egg-info
    # directory in its cwd to think the package is already installed.
    tmpdir = six.text_type(tmpdir)

    dvcs = generate_test_repo(tmpdir, [0], dvcs_type='git')
    commit_hash = dvcs.get_branch_hashes()[0]

    conf = config.Config()
    conf.env_dir = os.path.join(tmpdir, "env")
    conf.pythons = [PYTHON_VER1]
    conf.repo = os.path.abspath(dvcs.path)
    conf.matrix = {}
    conf.build_cache_size = 0

    repo = get_repo(conf)

    env = list(environment.get_environments(conf, None))[0]
    env.create()
    env.install_project(conf, repo, commit_hash)

    env.run([
        '-c',
        'import asv_test_repo as t, sys; sys.exit(0 if t.dummy_value == 0 else 1)'
    ])
Example #9
0
def test_root_ceiling(dvcs_type, tmpdir):
    # Check that git/hg does not try to look for repository in parent
    # directories.
    tmpdir = six.text_type(tmpdir)
    dvcs1 = tools.generate_repo_from_ops(tmpdir, dvcs_type, [("commit", 1)])
    dvcs2 = tools.generate_repo_from_ops(tmpdir, dvcs_type, [("commit", 2)])
    commit1 = dvcs1.get_branch_hashes()[0]
    commit2 = dvcs2.get_branch_hashes()[0]

    conf = config.Config()
    conf.branches = []
    conf.dvcs = dvcs_type
    conf.project = join(tmpdir, "repo")
    conf.repo = dvcs1.path

    r = repo.get_repo(conf)

    # Checkout into a subdir inside another repository
    workcopy_dir = join(dvcs2.path, "workcopy")
    r.checkout(workcopy_dir, commit1)

    # Corrupt the checkout
    for pth in ['.hg', '.git']:
        pth = os.path.join(workcopy_dir, pth)
        if os.path.isdir(pth):
            shutil.rmtree(pth)

    # Operation must fail (commit2 is not in dvcs1), not use the
    # parent repository
    with pytest.raises(Exception):
        r.checkout(workcopy_dir, commit2)
Example #10
0
def test_conda_environment_file(tmpdir, dummy_packages):
    env_file_name = six.text_type(tmpdir.join("environment.yml"))
    with open(env_file_name, "w") as temp_environment_file:
        temp_environment_file.write(
            'name: test_conda_envs\ndependencies:\n  - asv_dummy_test_package_2'
        )

    conf = config.Config()
    conf.env_dir = six.text_type(tmpdir.join("env"))
    conf.environment_type = "conda"
    conf.pythons = [PYTHON_VER1]
    conf.conda_environment_file = env_file_name
    conf.matrix = {"asv_dummy_test_package_1": [DUMMY1_VERSION]}

    environments = list(environment.get_environments(conf, None))

    assert len(environments) == 1 * 1 * 1

    for env in environments:
        env.create()

        output = env.run([
            '-c',
            'import asv_dummy_test_package_1 as p, sys; sys.stdout.write(p.__version__)'
        ])
        assert output.startswith(six.text_type(DUMMY1_VERSION))

        output = env.run([
            '-c',
            'import asv_dummy_test_package_2 as p, sys; sys.stdout.write(p.__version__)'
        ])
        assert output.startswith(six.text_type(DUMMY2_VERSIONS[1]))
Example #11
0
def test_no_such_name_error(dvcs_type, tmpdir):
    tmpdir = six.text_type(tmpdir)
    dvcs = tools.generate_test_repo(tmpdir, values=[0], dvcs_type=dvcs_type)

    conf = config.Config()
    conf.branches = []
    conf.dvcs = dvcs_type
    conf.project = "project"
    conf.repo = dvcs.path

    r = repo.get_repo(conf)

    # Check that NoSuchNameError error gets raised correctly
    assert r.get_hash_from_name(None) == dvcs.get_hash(r._default_branch)
    with pytest.raises(repo.NoSuchNameError):
        r.get_hash_from_name("badbranch")

    if dvcs_type == "git":
        # Corrupted repository/etc should not give NoSuchNameError
        util.long_path_rmtree(join(dvcs.path, ".git"))
        with pytest.raises(Exception) as excinfo:
            r.get_hash_from_name(None)
        assert excinfo.type not in (AssertionError, repo.NoSuchNameError)
    elif dvcs_type == "hg":
        # hglib seems to do some caching, so this doesn't work
        pass
Example #12
0
def test_iter_env_matrix_combinations():
    conf = config.Config()
    conf.environment_type = 'something'
    conf.pythons = ["2.6"]
    conf.matrix = {}
    conf.include = []

    # (matrix, expected)
    env_matrices = [
        ({'var0': ['val0', 'val1'], 'var1': ['val2', 'val3']},
         [{'var0': 'val0', 'var1': 'val2'},
          {'var0': 'val0', 'var1': 'val3'},
          {'var0': 'val1', 'var1': 'val2'},
          {'var0': 'val1', 'var1': 'val3'}]),
        ({'var0': ['val0', 'val1'], 'var1': ['val2', None]},
         [{'var0': 'val0', 'var1': 'val2'}, {'var0': 'val0'},
          {'var0': 'val1', 'var1': 'val2'}, {'var0': 'val1'}]),
        ({'var0': ['val0', 'val1']},
         [{'var0': 'val0'}, {'var0': 'val1'}]),
        ({}, [{}]),
    ]

    for matrix, expected in env_matrices:
        conf.matrix = {'env': matrix}
        expected = [{('env', key): value for key, value in item.items()}
                    for item in expected]
        for m in expected:
            m['python', None] = "2.6"
        result = _sorted_dict_list(environment.iter_matrix(conf.environment_type, conf.pythons, conf))
        assert result == _sorted_dict_list(expected)
Example #13
0
def test_matrix_expand_include():
    conf = config.Config()
    conf.environment_type = 'something'
    conf.pythons = ["2.6"]
    conf.matrix = {'a': '1'}
    conf.include = [
        {'python': '3.4', 'b': '2'},
        {'sys_platform': sys.platform, 'python': '2.7', 'b': '3'},
        {'sys_platform': sys.platform + 'nope', 'python': '2.7', 'b': '3'},
        {'environment_type': 'nope', 'python': '2.7', 'b': '4'},
        {'environment_type': 'something', 'python': '2.7', 'b': '5'},
    ]

    combinations = _sorted_dict_list(environment.iter_requirement_matrix(conf))
    expected = _sorted_dict_list([
        {'python': '2.6', 'a': '1'},
        {'python': '3.4', 'b': '2'},
        {'python': '2.7', 'b': '3'},
        {'python': '2.7', 'b': '5'}
    ])
    assert combinations == expected

    conf.include = [
        {'b': '2'}
    ]
    with pytest.raises(util.UserError):
        list(environment.iter_requirement_matrix(conf))
Example #14
0
def test_matrix_expand_basic():
    conf = config.Config()
    conf.environment_type = 'something'
    conf.pythons = ["2.6", "2.7"]
    conf.matrix = {
        'pkg1': None,
        'pkg2': '',
        'pkg3': [''],
        'pkg4': ['1.2', '3.4'],
        'pkg5': []
    }

    combinations = _sorted_dict_list(environment.iter_matrix(
        conf.environment_type, conf.pythons, conf))
    expected = _sorted_dict_list([
        {('python', None): '2.6', ('req', 'pkg2'): '', ('req', 'pkg3'): '',
         ('req', 'pkg4'): '1.2', ('req', 'pkg5'): ''},
        {('python', None): '2.6', ('req', 'pkg2'): '', ('req', 'pkg3'): '',
         ('req', 'pkg4'): '3.4', ('req', 'pkg5'): ''},
        {('python', None): '2.7', ('req', 'pkg2'): '', ('req', 'pkg3'): '',
         ('req', 'pkg4'): '1.2', ('req', 'pkg5'): ''},
        {('python', None): '2.7', ('req', 'pkg2'): '', ('req', 'pkg3'): '',
         ('req', 'pkg4'): '3.4', ('req', 'pkg5'): ''},
    ])
    assert combinations == expected
Example #15
0
def test_matrix_environments(tmpdir):
    conf = config.Config()

    conf.env_dir = six.text_type(tmpdir.join("env"))

    conf.pythons = ["2.7", "3.4"]
    conf.matrix = {
        "six": ["1.4", None],
        "colorama": ["0.3.1", "0.3.3"]
    }
    environments = list(environment.get_environments(conf))

    assert len(environments) == 2 * 2 * 2

    # Only test the first two environments, since this is so time
    # consuming
    for env in environments[:2]:
        env.create()

        output = env.run(
            ['-c', 'import six, sys; sys.stdout.write(six.__version__)'],
            valid_return_codes=None)
        if 'six' in env._requirements:
            assert output.startswith(six.text_type(env._requirements['six']))

        output = env.run(
            ['-c', 'import colorama, sys; sys.stdout.write(colorama.__version__)'])
        assert output.startswith(six.text_type(env._requirements['colorama']))
Example #16
0
def test_matrix_empty():
    conf = config.Config()
    conf.environment_type = ""
    conf.pythons = [PYTHON_VER1]
    conf.matrix = {}

    # Check default environment config
    environments = list(environment.get_environments(conf, None))
    items = [env.python for env in environments]
    assert items == [PYTHON_VER1]
Example #17
0
def test_repo_hg(tmpdir):
    tmpdir = six.text_type(tmpdir)

    conf = config.Config()

    dvcs = tools.generate_test_repo(tmpdir,
                                    list(range(10)),
                                    dvcs_type='hg',
                                    extra_branches=[
                                        ('default~4', 'some-branch',
                                         [11, 12, 13])
                                    ])

    mirror_dir = join(tmpdir, "repo")

    def test_it(is_remote=False):
        conf.project = mirror_dir
        conf.repo = dvcs.path
        _test_generic_repo(conf,
                           tmpdir,
                           hash_range="tip:-4",
                           master="tip",
                           branch="tag5",
                           is_remote=is_remote)

        conf.branches = ['default', 'some-branch']
        branch_commits = {
            'default': [dvcs.get_hash('default'),
                        dvcs.get_hash('default~6')],
            'some-branch':
            [dvcs.get_hash('some-branch'),
             dvcs.get_hash('some-branch~6')]
        }
        _test_branches(conf, branch_commits)

    test_it()

    # local repo, so it should not not have cloned it
    assert not os.path.isdir(mirror_dir)

    # try again, pretending the repo is not local
    from asv.plugins.mercurial import Hg
    old_local_method = Hg.is_local_repo
    old_url_match = Hg.url_match
    try:
        Hg.is_local_repo = classmethod(
            lambda cls, path: path != dvcs.path and old_local_method(path))
        Hg.url_match = classmethod(lambda cls, url: os.path.isdir(url))
        test_it(is_remote=True)
        assert os.path.isdir(mirror_dir)
    finally:
        Hg.is_local_repo = old_local_method
        Hg.url_match = old_url_match
Example #18
0
def test_environment_name_sanitization():
    conf = config.Config()
    conf.environment_type = "conda"
    conf.pythons = ["3.5"]
    conf.matrix = {
        "pip+git+http://github.com/space-telescope/asv.git": [],
    }

    # Check name sanitization
    environments = list(environment.get_environments(conf, []))
    assert len(environments) == 1
    assert environments[0].name == "conda-py3.5-pip+git+http___github.com_space-telescope_asv.git"
Example #19
0
def test_presence_checks(tmpdir, monkeypatch):
    conf = config.Config()

    if WIN:
        # Tell conda to not use hardlinks: on Windows it's not possible
        # to delete hard links to files in use, which causes problem when
        # trying to cleanup environments during this test
        monkeypatch.setenv(str('CONDA_ALWAYS_COPY'), str('True'))

    conf.env_dir = str(tmpdir.join("env"))

    conf.pythons = [PYTHON_VER1]
    conf.matrix = {}
    environments = list(environment.get_environments(conf, None))

    for env in environments:
        env.create()
        assert env.check_presence()

        # Check env is recreated when info file is clobbered
        info_fn = os.path.join(env._path, 'asv-env-info.json')
        data = util.load_json(info_fn)
        data['python'] = '0'
        data = util.write_json(info_fn, data)
        env._is_setup = False
        env.create()
        data = util.load_json(info_fn)
        assert data['python'] == PYTHON_VER1
        env.run(['-c', 'import os'])

        # Check env is recreated if crucial things are missing
        pip_fns = [
            os.path.join(env._path, 'bin', 'pip')
        ]
        if WIN:
            pip_fns += [
                os.path.join(env._path, 'bin', 'pip.exe'),
                os.path.join(env._path, 'Scripts', 'pip'),
                os.path.join(env._path, 'Scripts', 'pip.exe')
            ]

        some_removed = False
        for pip_fn in pip_fns:
            if os.path.isfile(pip_fn):
                some_removed = True
                os.remove(pip_fn)
        assert some_removed

        env._is_setup = False
        env.create()
        assert os.path.isfile(pip_fn)
        env.run(['-c', 'import os'])
Example #20
0
def test_repo_git_annotated_tag_date(tmpdir):
    tmpdir = six.text_type(tmpdir)

    dvcs = tools.generate_test_repo(tmpdir, list(range(5)), dvcs_type='git')

    conf = config.Config()
    conf.project = 'sometest'
    conf.repo = dvcs.path

    r = repo.get_repo(conf)
    d1 = r.get_date('tag1')
    d2 = r.get_date(r.get_hash_from_name('tag1'))
    assert d1 == d2
Example #21
0
def test_environment_select_autodetect():
    conf = config.Config()
    conf.environment_type = "conda"
    conf.pythons = ["3.4"]
    conf.matrix = {
        "six": ["1.4"],
    }

    # Check autodetect
    environments = list(environment.get_environments(conf, [":2.7"]))
    assert len(environments) == 1
    assert environments[0].python == "2.7"
    assert environments[0].tool_name in ("virtualenv", "conda")
Example #22
0
def two_branch_repo_case(request, tmpdir):
    r"""
    This test ensure we follow the first parent in case of merges

    The revision graph looks like this:

        @  Revision 6 (default)
        |
        | o  Revision 5 (stable)
        | |
        | o  Merge master
        |/|
        o |  Revision 4
        | |
        o |  Merge stable
        |\|
        o |  Revision 3
        | |
        | o  Revision 2
        |/
        o  Revision 1

    """
    dvcs_type = request.param
    tmpdir = six.text_type(tmpdir)
    if dvcs_type == "git":
        master = "master"
    elif dvcs_type == "hg":
        master = "default"
    dvcs = tools.generate_repo_from_ops(tmpdir, dvcs_type, [
        ("commit", 1),
        ("checkout", "stable", master),
        ("commit", 2),
        ("checkout", master),
        ("commit", 3),
        ("merge", "stable"),
        ("commit", 4),
        ("checkout", "stable"),
        ("merge", master, "Merge master"),
        ("commit", 5),
        ("checkout", master),
        ("commit", 6),
    ])

    conf = config.Config()
    conf.branches = [master, "stable"]
    conf.repo = dvcs.path
    conf.project = join(tmpdir, "repo")
    r = repo.get_repo(conf)
    return dvcs, master, r, conf
Example #23
0
    def test_it(is_remote=False):
        conf = config.Config()

        conf.project = mirror_dir
        conf.repo = dvcs.path
        _test_generic_repo(conf, tmpdir, 'master~4..master', 'master', 'tag5',
                           is_remote=is_remote)

        conf.branches = ['master', 'some-branch']
        branch_commits = {
            'master': [dvcs.get_hash('master'), dvcs.get_hash('master~6')],
            'some-branch': [dvcs.get_hash('some-branch'), dvcs.get_hash('some-branch~6')]
        }
        _test_branches(conf, branch_commits, require_describe=True)
Example #24
0
def test_matrix_expand_include_detect_env_type():
    conf = config.Config()
    conf.environment_type = None
    conf.pythons = ["2.7"]
    conf.matrix = {}
    conf.exclude = [{}]
    conf.include = [
        {'sys_platform': sys.platform, 'python': '2.7'},
    ]

    combinations = _sorted_dict_list(environment.iter_requirement_matrix(conf))
    expected = _sorted_dict_list([
        {'python': '2.7'},
    ])
    assert combinations == expected
Example #25
0
def test_matrix_existing():
    conf = config.Config()
    conf.environment_type = "existing"
    conf.pythons = ["same"]
    conf.matrix = {'foo': ['a', 'b'], 'bar': ['c', 'd']}

    # ExistingEnvironment should ignore the matrix
    environments = list(environment.get_environments(conf, None))
    items = [(env.tool_name, tuple(env.requirements.keys())) for env in environments]
    assert items == [('existing', ())]

    conf.exclude = {'environment_type': '.*'}
    environments = list(environment.get_environments(conf, None))
    items = [(env.tool_name, tuple(env.requirements.keys())) for env in environments]
    assert items == [('existing', ())]
Example #26
0
def test_matrix_expand_include_detect_env_type():
    conf = config.Config()
    conf.environment_type = None
    conf.pythons = [PYTHON_VER1]
    conf.matrix = {}
    conf.exclude = [{}]
    conf.include = [
        {'sys_platform': sys.platform, 'python': PYTHON_VER1},
    ]

    combinations = _sorted_dict_list(environment.iter_matrix(
        conf.environment_type, conf.pythons, conf))
    expected = _sorted_dict_list([
        {('python', None): PYTHON_VER1},
    ])
    assert combinations == expected
Example #27
0
def test_conda_run_executable(tmpdir):
    # test that we can install with pip into a conda environment.
    conf = config.Config()

    conf.env_dir = six.text_type(tmpdir.join("env"))

    conf.environment_type = "conda"
    conf.pythons = [PYTHON_VER1]
    conf.matrix = {}
    environments = list(environment.get_environments(conf, None))

    assert len(environments) == 1 * 1 * 1

    for env in environments:
        env.create()
        env.run_executable('conda', ['info'])
Example #28
0
def test_presence_checks(tmpdir):
    conf = config.Config()

    conf.env_dir = six.text_type(tmpdir.join("env"))

    conf.pythons = ["2.7"]
    conf.matrix = {}
    environments = list(environment.get_environments(conf))

    for env in environments:
        env.create()
        assert env.check_presence()

        # Check env is recreated when info file is clobbered
        info_fn = os.path.join(env._path, 'asv-env-info.json')
        data = util.load_json(info_fn)
        data['python'] = '3.4'
        data = util.write_json(info_fn, data)
        env._is_setup = False
        env.create()
        data = util.load_json(info_fn)
        assert data['python'] == '2.7'
        env.run(['-c', 'import os'])

        # Check env is recreated if crucial things are missing
        pip_fns = [
            os.path.join(env._path, 'bin', 'pip')
        ]
        if WIN:
            pip_fns += [
                os.path.join(env._path, 'bin', 'pip.exe'),
                os.path.join(env._path, 'Scripts', 'pip'),
                os.path.join(env._path, 'Scripts', 'pip.exe')
            ]

        some_removed = False
        for pip_fn in pip_fns:
            if os.path.isfile(pip_fn):
                some_removed = True
                os.remove(pip_fn)
        assert some_removed

        env._is_setup = False
        env.create()
        assert os.path.isfile(pip_fn)
        env.run(['-c', 'import os'])
Example #29
0
def test_installed_commit_hash(tmpdir):
    tmpdir = six.text_type(tmpdir)
    tmpdir = six.text_type(tmpdir)

    dvcs = generate_test_repo(tmpdir, [0], dvcs_type='git')
    commit_hash = dvcs.get_branch_hashes()[0]

    conf = config.Config()
    conf.env_dir = os.path.join(tmpdir, "env")
    conf.pythons = [PYTHON_VER1]
    conf.repo = os.path.abspath(dvcs.path)
    conf.matrix = {}
    conf.build_cache_size = 0

    repo = get_repo(conf)

    def get_env():
        return list(environment.get_environments(conf, None))[0]

    env = get_env()
    env.create()

    # Check updating installed_commit_hash
    assert env.installed_commit_hash == None
    assert env._env_vars.get('ASV_COMMIT') == None
    env.install_project(conf, repo, commit_hash)
    assert env.installed_commit_hash == commit_hash
    assert env._env_vars.get('ASV_COMMIT') == commit_hash

    env = get_env()
    assert env.installed_commit_hash == commit_hash
    assert env._env_vars.get('ASV_COMMIT') == commit_hash

    # Configuration change results to reinstall
    env._project = "something"
    assert env.installed_commit_hash == None

    # Uninstall resets hash (but not ASV_COMMIT)
    env = get_env()
    env._uninstall_project()
    assert env.installed_commit_hash == None
    assert env._env_vars.get('ASV_COMMIT') != None

    env = get_env()
    assert env.installed_commit_hash == None
    assert env._env_vars.get('ASV_COMMIT') == None
Example #30
0
def test_pypy_virtualenv(tmpdir):
    # test that we can setup a pypy environment
    conf = config.Config()

    conf.env_dir = six.text_type(tmpdir.join("env"))

    conf.environment_type = "virtualenv"
    conf.pythons = ["pypy"]
    conf.matrix = {}
    environments = list(environment.get_environments(conf, None))

    assert len(environments) == 1

    for env in environments:
        env.create()
        output = env.run(['-c', 'import sys; print(sys.pypy_version_info)'])
        assert output.startswith(six.text_type("(major="))