Example #1
0
def test_timeout():
    timeout_code = r"""
import sys
import time

sys.stdout.write("Stdout before waiting\n")
sys.stderr.write("Stderr before waiting\n")
sys.stdout.flush()
sys.stderr.flush()
time.sleep(60)
sys.stdout.write("Stdout after waiting\n")
sys.stderr.write("Stderr after waiting\n")
    """

    t = time.time()
    try:
        util.check_output([
            sys.executable, "-c", timeout_code], timeout=1)
    except util.ProcessError as e:
        assert len(e.stdout.strip().split('\n')) == 1
        assert len(e.stderr.strip().split('\n')) == 1
        print(e.stdout)
        assert e.stdout.strip() == "Stdout before waiting"
        assert e.stderr.strip() == "Stderr before waiting"
    else:
        assert False, "Expected timeout exception"
    # Make sure the timeout is triggered in a sufficiently short amount of time
    assert time.time() - t < 5.0
Example #2
0
def test_timeout():
    timeout_code = r"""
import sys
import time

sys.stdout.write("Stdout before waiting\n")
sys.stderr.write("Stderr before waiting\n")
sys.stdout.flush()
sys.stderr.flush()
time.sleep(60)
sys.stdout.write("Stdout after waiting\n")
sys.stderr.write("Stderr after waiting\n")
    """

    t = time.time()
    try:
        util.check_output([sys.executable, "-c", timeout_code], timeout=1)
    except util.ProcessError as e:
        assert len(e.stdout.strip().split('\n')) == 1
        assert len(e.stderr.strip().split('\n')) == 1
        print(e.stdout)
        assert e.stdout.strip() == "Stdout before waiting"
        assert e.stderr.strip() == "Stderr before waiting"
    else:
        assert False, "Expected timeout exception"
    # Make sure the timeout is triggered in a sufficiently short amount of time
    assert time.time() - t < 5.0
Example #3
0
def test_stderr_redirect():
    # Check redirecting stderr to stdout works
    code = ("import sys;"
            "sys.stdout.write('OUT\\n');"
            "sys.stdout.flush();"
            "sys.stderr.write('ERR\\n')")
    out = util.check_output([sys.executable, "-c", code], redirect_stderr=True)
    assert out.splitlines() == ['OUT', 'ERR']
    out, err, retcode = util.check_output([sys.executable, "-c", code],
                                          return_stderr=True, redirect_stderr=True)
    assert out.splitlines() == ['OUT', 'ERR']
    assert err == ''
    assert retcode == 0
Example #4
0
def test_stderr_redirect():
    # Check redirecting stderr to stdout works
    code = ("import sys;"
            "sys.stdout.write('OUT\\n');"
            "sys.stdout.flush();"
            "sys.stderr.write('ERR\\n')")
    out = util.check_output([sys.executable, "-c", code], redirect_stderr=True)
    assert out.splitlines() == ['OUT', 'ERR']
    out, err, retcode = util.check_output([sys.executable, "-c", code],
                                          return_stderr=True, redirect_stderr=True)
    assert out.splitlines() == ['OUT', 'ERR']
    assert err == ''
    assert retcode == 0
Example #5
0
def test_timeout():
    timeout_codes = []
    timeout_codes.append(r"""
import sys
import time

sys.stdout.write("Stdout before waiting\n")
sys.stderr.write("Stderr before waiting\n")
sys.stdout.flush()
sys.stderr.flush()
time.sleep(60)
sys.stdout.write("Stdout after waiting\n")
sys.stderr.write("Stderr after waiting\n")
    """)

    # Another example, where timeout is due to a hanging sub-subprocess
    if getattr(os, 'setpgid', None):
        # only on posix
        timeout_codes.append(r"""
import sys
import time
import subprocess

sys.stdout.write("Stdout before waiting\n")
sys.stderr.write("Stderr before waiting\n")
sys.stdout.flush()
sys.stderr.flush()
subprocess.call([sys.executable, "-c",
    "import sys, subprocess; subprocess.call([sys.executable, '-c', 'import time; time.sleep(60)'])"])
sys.stdout.write("Stdout after waiting\n")
sys.stderr.write("Stderr after waiting\n")
        """)

    for timeout_code in timeout_codes:
        t = time.time()
        try:
            util.check_output([
                sys.executable, "-c", timeout_code], timeout=1)
        except util.ProcessError as e:
            assert len(e.stdout.strip().split('\n')) == 1
            assert len(e.stderr.strip().split('\n')) == 1
            print(e.stdout)
            assert e.stdout.strip() == "Stdout before waiting"
            assert e.stderr.strip() == "Stderr before waiting"
            assert e.retcode == util.TIMEOUT_RETCODE
            assert "timed out" in str(e)
        else:
            assert False, "Expected timeout exception"
        # Make sure the timeout is triggered in a sufficiently short amount of time
        assert time.time() - t < 5.0
Example #6
0
def test_timeout():
    timeout_codes = []
    timeout_codes.append(r"""
import sys
import time

sys.stdout.write("Stdout before waiting\n")
sys.stderr.write("Stderr before waiting\n")
sys.stdout.flush()
sys.stderr.flush()
time.sleep(60)
sys.stdout.write("Stdout after waiting\n")
sys.stderr.write("Stderr after waiting\n")
    """)

    # Another example, where timeout is due to a hanging sub-subprocess
    timeout_codes.append(r"""
import sys
import subprocess

sys.stdout.write("Stdout before waiting\n")
sys.stderr.write("Stderr before waiting\n")
sys.stdout.flush()
sys.stderr.flush()
subprocess.call([
    sys.executable,
    "-c",
    "import sys, subprocess; subprocess.call("
    "[sys.executable, '-c', 'import time; time.sleep(360)'])"
])
sys.stdout.write("Stdout after waiting\n")
sys.stderr.write("Stderr after waiting\n")
    """)

    for timeout_code in timeout_codes:
        t = time.time()
        try:
            util.check_output([sys.executable, "-c", timeout_code], timeout=1)
        except util.ProcessError as e:
            assert len(e.stdout.strip().split('\n')) == 1
            assert len(e.stderr.strip().split('\n')) == 1
            print(e.stdout)
            assert e.stdout.strip() == "Stdout before waiting"
            assert e.stderr.strip() == "Stderr before waiting"
            assert e.retcode == util.TIMEOUT_RETCODE
            assert "timed out" in str(e)
        else:
            assert False, "Expected timeout exception"
        # Make sure the timeout is triggered in a sufficiently short amount of time
        assert time.time() - t < 5.0
Example #7
0
    def _setup(self):
        log.info("Creating oggm conda environment for {0}".format(self.name))

        try:
            conda = _find_conda()
        except IOError as e:
            raise util.UserError(str(e))

        env_file = tempfile.NamedTemporaryFile(mode="w",
                                               delete=False,
                                               suffix=".yml")
        try:
            pyver = str(self._python).replace(".", "")[:2]
            oggm_env = OGGM_CONDA_ENVS[pyver]
            req = requests.get(OGGM_CONDA_ENV_URL.format(oggm_env))
            req.raise_for_status()
            env_text = req.text

            for line in env_text.splitlines():
                if line.startswith("prefix:") or self._has_requirement(line):
                    continue
                elif line.startswith("name:"):
                    env_file.write("name: {0}\n".format(self.name))
                else:
                    env_file.write(line + "\n")

            conda_args, pip_args = self._get_requirements(conda)
            env_file.writelines(('  - %s\n' % s for s in conda_args))
            if pip_args:
                env_file.write('  - pip:\n')
                env_file.writelines(('    - %s\n' % s for s in pip_args))

            env_file.close()

            util.check_output([conda] + [
                'env', 'create', '-f', env_file.name, '-p', self._path,
                '--force'
            ])
        except Exception as exc:
            if os.path.isfile(env_file.name):
                with open(env_file.name, "r") as f:
                    text = f.read()
                log.info(
                    "oggm conda env create failed: in {} with:\n{}".format(
                        self._path, text))
            raise
        finally:
            os.unlink(env_file.name)
Example #8
0
    def _nox_prep_env(self, setup: bool = False) -> None:
        message = f"Running Nox environment update for: {self.name}"
        log.info(message)

        build_root_path = Path(self._build_root)
        env_path = Path(self._path)

        def copy_asv_files(src_parent: Path, dst_parent: Path) -> None:
            """For copying between self._path and a temporary cache."""
            asv_files = list(src_parent.glob("asv*"))
            # build_root_path.name usually == "project" .
            asv_files += [src_parent / build_root_path.name]
            for src_path in asv_files:
                dst_path = dst_parent / src_path.name
                if not dst_path.exists():
                    # Only cache-ing in case Nox has rebuilt the env @
                    #  self._path. If the dst_path already exists: rebuilding
                    #  hasn't happened. Also a non-issue when copying in the
                    #  reverse direction because the cache dir is temporary.
                    if src_path.is_dir():
                        func = copytree
                    else:
                        func = copy2
                    func(src_path, dst_path)

        with TemporaryDirectory(prefix="nox_asv_cache_") as asv_cache:
            asv_cache_path = Path(asv_cache)
            if setup:
                noxfile = self.setup_noxfile
            else:
                # Cache all of ASV's files as Nox may remove and re-build the environment.
                copy_asv_files(env_path, asv_cache_path)
                # Get location of noxfile in cache.
                noxfile_original = (build_root_path / self._repo_subdir /
                                    self.noxfile_rel_path)
                noxfile_subpath = noxfile_original.relative_to(
                    build_root_path.parent)
                noxfile = asv_cache_path / noxfile_subpath

            nox_cmd = [
                "nox",
                f"--noxfile={noxfile}",
                # Place the env in the ASV env directory, instead of the default.
                f"--envdir={env_path.parent}",
                f"--session={self.nox_session_name}",
                f"--python={self._python}",
                "--install-only",
                "--no-error-on-external-run",
                "--verbose",
            ]

            _ = asv_util.check_output(nox_cmd)
            if not env_path.is_dir():
                message = f"Expected Nox environment not found: {env_path}"
                log.error(message)
                raise RuntimeError(message)

            if not setup:
                # Restore ASV's files from the cache (if necessary).
                copy_asv_files(asv_cache_path, env_path)
Example #9
0
    def _get_nox_session_name(self, python: str) -> str:
        nox_cmd_substring = (f"--noxfile={self.setup_noxfile} "
                             f"--session={self.nox_session_name} "
                             f"--python={python}")

        list_output = asv_util.check_output(
            ["nox", "--list", *nox_cmd_substring.split(" ")],
            display_error=False,
            dots=False,
        )
        list_output = list_output.split("\n")
        list_matches = list(filter(lambda s: s.startswith("*"), list_output))
        matches_count = len(list_matches)

        if matches_count == 0:
            message = f"No Nox sessions found for: {nox_cmd_substring} ."
            log.error(message)
            raise RuntimeError(message)
        elif matches_count > 1:
            message = (
                f"Ambiguous - >1 Nox session found for: {nox_cmd_substring} .")
            log.error(message)
            raise RuntimeError(message)
        else:
            line = list_matches[0]
            session_name = line.split(" ")[1]
            assert isinstance(session_name, str)
            return session_name
Example #10
0
def test_exception():
    code = r"""
import sys
sys.stdout.write("Stdout before error\n")
sys.stderr.write("Stderr before error\n")
sys.exit(1)
"""
    try:
        util.check_output([sys.executable, "-c", code])
    except util.ProcessError as e:
        assert len(e.stdout.strip().split('\n')) == 1
        assert len(e.stderr.strip().split('\n')) == 1
        assert e.stdout.strip() == "Stdout before error"
        assert e.stderr.strip() == "Stderr before error"
    else:
        assert False, "Expected exception"
Example #11
0
def test_find(capfd, tmpdir):
    values = [
        (None, None),
        (1, 1),
        (3, 1),
        (None, 1),
        (6, None),
        (5, 1),
        (6, 1),
        (6, 1),
        (6, 6),
        (6, 6),
    ]

    tmpdir, local, conf, machine_file = generate_basic_conf(
        tmpdir, values=values, dummy_packages=False)

    # Test find at least runs
    tools.run_asv_with_conf(conf,
                            'find',
                            "master~5..master",
                            "params_examples.track_find_test",
                            _machine_file=machine_file)

    # Check it found the first commit after the initially tested one
    output, err = capfd.readouterr()

    regression_hash = check_output([which('git'), 'rev-parse', 'master^'],
                                   cwd=conf.repo)

    assert "Greatest regression found: {0}".format(
        regression_hash[:8]) in output
Example #12
0
def test_large_output():
    # More data than a pipe buffer can hold
    data = util.check_output([
        sys.executable, "-c",
        "import sys; [sys.stdout.write('x'*1000) for j in range(5000)]"
    ])
    assert data == 'x' * 5000000
Example #13
0
 def run_git(self, args, chdir=True, **kwargs):
     if chdir:
         cwd = self.path
     else:
         cwd = None
     kwargs['cwd'] = cwd
     return util.check_output([self._git] + args, **kwargs)
Example #14
0
def test_no_timeout():
    # Check that timeout=None is allowed.
    code = "import time; time.sleep(0.05)"
    out, err, retcode = util.check_output([sys.executable, "-c", code], timeout=None,
                                          return_stderr=True)
    assert out == ''
    assert err == ''
    assert retcode == 0
Example #15
0
def test_no_timeout():
    # Check that timeout=None is allowed.
    code = "import time; time.sleep(0.05)"
    out, err, retcode = util.check_output([sys.executable, "-c", code], timeout=None,
                                          return_stderr=True)
    assert out == ''
    assert err == ''
    assert retcode == 0
Example #16
0
File: tools.py Project: hamogu/asv
 def _run_git(self, args, chdir=True, **kwargs):
     if chdir:
         cwd = self.path
     else:
         cwd = None
     kwargs['cwd'] = cwd
     return util.check_output(
         [self._git] + args, **kwargs)
Example #17
0
def test_exception():
    code = r"""
import sys
sys.stdout.write("Stdout before error\n")
sys.stderr.write("Stderr before error\n")
sys.exit(1)
"""
    try:
        util.check_output([
            sys.executable, "-c", code])
    except util.ProcessError as e:
        assert len(e.stdout.strip().split('\n')) == 1
        assert len(e.stderr.strip().split('\n')) == 1
        assert e.stdout.strip() == "Stdout before error"
        assert e.stderr.strip() == "Stderr before error"
    else:
        assert False, "Expected exception"
Example #18
0
def test_popen():
    # Check that timeout=None is allowed.
    popen = util.check_output([sys.executable, "-c", "pass"], return_popen=True)
    popen.wait()

    assert popen.returncode == 0

    # close handles
    popen.communicate()
Example #19
0
    def run(self, args, **kwargs):
        """
        Run the python executable from our environment.
        """
        if self._outdir is None:
            raise Exception('Cannot run without out dir')

        return util.check_output([path.join(self._outdir, 'bin', 'python')] +
                                 args, **kwargs)
Example #20
0
def test_output_timeout():
    # Check that timeout is determined based on last output, not based
    # on start time.
    code = r"""
import time
import sys
for j in range(3):
    time.sleep(0.5)
    sys.stdout.write('.')
    sys.stdout.flush()
"""
    output = util.check_output([sys.executable, "-c", code], timeout=0.75)
    assert output == '.'*3

    try:
        util.check_output([sys.executable, "-c", code], timeout=0.25)
    except util.ProcessError as e:
        assert e.retcode == util.TIMEOUT_RETCODE
    else:
        assert False, "Expected exception"
Example #21
0
def test_output_timeout():
    # Check that timeout is determined based on last output, not based
    # on start time.
    code = r"""
import time
import sys
for j in range(3):
    sys.stdout.write('.')
    sys.stdout.flush()
    time.sleep(1.0)
"""
    output = util.check_output([sys.executable, "-c", code], timeout=1.5)
    assert output == '.' * 3

    try:
        util.check_output([sys.executable, "-c", code], timeout=0.5)
    except util.ProcessError as e:
        assert e.retcode == util.TIMEOUT_RETCODE
    else:
        assert False, "Expected exception"
Example #22
0
def test_exception():
    code = r"""
import sys
sys.stdout.write("Stdout before error\n")
sys.stderr.write("Stderr before error\n")
sys.exit(1)
"""
    try:
        util.check_output([
            sys.executable, "-c", code])
    except util.ProcessError as e:
        assert len(e.stdout.strip().split('\n')) == 1
        err = [x for x in e.stderr.strip().split('\n')
               if not x.startswith('Coverage')]
        assert len(err) == 1
        assert e.stdout.strip() == "Stdout before error"
        assert err[0] == "Stderr before error"
        assert e.retcode == 1
        assert "returned non-zero exit status 1" in str(e)
    else:
        assert False, "Expected exception"
Example #23
0
def test_env():
    code = r"""
import os
print(os.environ['TEST_ASV_FOO'])
print(os.environ['TEST_ASV_BAR'])
"""
    env = os.environ.copy()
    env['TEST_ASV_FOO'] = 'foo'
    # Force unicode string on Python 2
    env['TEST_ASV_BAR'] = u'bar'
    output = util.check_output([sys.executable, "-c", code], env=env)
    assert output.splitlines() == ['foo', 'bar']
Example #24
0
def test_exception():
    code = r"""
import sys
sys.stdout.write("Stdout before error\n")
sys.stderr.write("Stderr before error\n")
sys.exit(1)
"""
    try:
        util.check_output([
            sys.executable, "-c", code])
    except util.ProcessError as e:
        assert len(e.stdout.strip().split('\n')) == 1
        err = [x for x in e.stderr.strip().split('\n')
               if not x.startswith('Coverage')]
        assert len(err) == 1
        assert e.stdout.strip() == "Stdout before error"
        assert err[0] == "Stderr before error"
        assert e.retcode == 1
        assert "returned non-zero exit status 1" in str(e)
    else:
        assert False, "Expected exception"
Example #25
0
def test_env():
    code = r"""
import os
print(os.environ['TEST_ASV_FOO'])
print(os.environ['TEST_ASV_BAR'])
"""
    env = os.environ.copy()
    env['TEST_ASV_FOO'] = 'foo'
    # Force unicode string on Python 2
    env['TEST_ASV_BAR'] = u'bar'
    output = util.check_output([sys.executable, "-c", code], env=env)
    assert output.splitlines() == ['foo', 'bar']
Example #26
0
 def install(self, package):
     # Build the checked-out project and its dependencies
     try:
         self._outdir = util.check_output([
             'nix-build', '--show-trace', '-o',
             path.join(self._envdir, self.name.replace('/', '_')), '-E',
             self._expr()
         ],
                                          cwd=self._build_root).strip()
     except util.ProcessError as e:
         import sys
         sys.stderr.write('STDOUT\n' + e.stdout + '\nEND STDOUT\n')
         sys.stderr.write('STDERR\n' + e.stderr + '\nEND STDERR\n')
         raise e
Example #27
0
def test_conda_channel_addition(tmpdir,
                                channel_list,
                                expected_channel):
    # test that we can add conda channels to environments
    # and that we respect the specified priority order
    # of channels
    conf = config.Config()
    conf.env_dir = six.text_type(tmpdir.join("env"))
    conf.environment_type = "conda"
    conf.pythons = [PYTHON_VER1]
    conf.matrix = {}
    # these have to be valid channels
    # available for online access
    conf.conda_channels = channel_list
    environments = list(environment.get_environments(conf, None))

    # should have one environment per Python version
    assert len(environments) == 1

    # create the environments
    for env in environments:
        env.create()
        # generate JSON output from conda list
        # and parse to verify added channels
        # for current env
        # (conda info would be more direct, but
        # seems to reflect contents of condarc file,
        # which we are intentionally trying not to modify)
        conda = util.which('conda')
        print("\n**conda being used:", conda)
        out_str = six.text_type(util.check_output([conda,
                                                    'list',
                                                    '-p',
                                                    os.path.normpath(env._path),
                                                    '--json']))
        json_package_list = json.loads(out_str)
        print(json_package_list)
        for installed_package in json_package_list:
            # check only explicitly installed packages
            if installed_package['name'] not in ('python',):
                continue
            print(installed_package)
            assert installed_package['channel'] == expected_channel
Example #28
0
def test_conda_channel_addition(tmpdir,
                                channel_list,
                                expected_channel):
    # test that we can add conda channels to environments
    # and that we respect the specified priority order
    # of channels
    conf = config.Config()
    conf.env_dir = six.text_type(tmpdir.join("env"))
    conf.environment_type = "conda"
    conf.pythons = [PYTHON_VER1]
    conf.matrix = {}
    # these have to be valid channels
    # available for online access
    conf.conda_channels = channel_list
    environments = list(environment.get_environments(conf, None))

    # should have one environment per Python version
    assert len(environments) == 1

    # create the environments
    for env in environments:
        env.create()
        # generate JSON output from conda list
        # and parse to verify added channels
        # for current env
        # (conda info would be more direct, but
        # seems to reflect contents of condarc file,
        # which we are intentionally trying not to modify)
        conda = util.which('conda')
        print("\n**conda being used:", conda)
        out_str = six.text_type(util.check_output([conda,
                                                    'list',
                                                    '-p',
                                                    os.path.normpath(env._path),
                                                    '--json']))
        json_package_list = json.loads(out_str)
        print(json_package_list)
        for installed_package in json_package_list:
            # check only explicitly installed packages
            if installed_package['name'] not in ('python',):
                continue
            print(installed_package)
            assert installed_package['channel'] == expected_channel
Example #29
0
def test_find(capfd, basic_conf):
    tmpdir, local, conf, machine_file = basic_conf

    if WIN and os.path.basename(sys.argv[0]).lower().startswith('py.test'):
        # Multiprocessing in spawn mode can result to problems with py.test
        # Find.run calls Setup.run in parallel mode by default
        pytest.skip("Multiprocessing spawn mode on Windows not safe to run "
                    "from py.test runner.")

    # Test find at least runs
    tools.run_asv_with_conf(conf, 'find', "master~5..master", "params_examples.track_find_test",
                            _machine_file=machine_file)

    # Check it found the first commit after the initially tested one
    output, err = capfd.readouterr()

    regression_hash = check_output(
        [which('git'), 'rev-parse', 'master^'], cwd=conf.repo)

    assert "Greatest regression found: {0}".format(regression_hash[:8]) in output
Example #30
0
def test_find(capfd, basic_conf):
    tmpdir, local, conf, machine_file = basic_conf

    if WIN and os.path.basename(sys.argv[0]).lower().startswith('py.test'):
        # Multiprocessing in spawn mode can result to problems with py.test
        # Find.run calls Setup.run in parallel mode by default
        pytest.skip("Multiprocessing spawn mode on Windows not safe to run "
                    "from py.test runner.")

    # Test find at least runs
    tools.run_asv_with_conf(conf, 'find', "master~5..master", "params_examples.track_find_test",
                            _machine_file=machine_file)

    # Check it found the first commit after the initially tested one
    output, err = capfd.readouterr()

    regression_hash = check_output(
        [which('git'), 'rev-parse', 'master^'], cwd=conf.repo)

    assert "Greatest regression found: {0}".format(regression_hash[:8]) in output
Example #31
0
def test_find_timeout(capfd, tmpdir):
    values = [(1, 0), (1, 0), (1, -1)]

    tmpdir, local, conf, machine_file = generate_basic_conf(
        tmpdir, values=values, dummy_packages=False)

    # Test find at least runs
    tools.run_asv_with_conf(conf,
                            'find',
                            "-e",
                            "master",
                            "params_examples.time_find_test_timeout",
                            _machine_file=machine_file)

    # Check it found the first commit after the initially tested one
    output, err = capfd.readouterr()

    regression_hash = check_output([which('git'), 'rev-parse', 'master'],
                                   cwd=conf.repo)

    assert "Greatest regression found: {0}".format(
        regression_hash[:8]) in output
    assert "asv: benchmark timed out (timeout 1.0s)" in output
Example #32
0
    def run_executable(self, executable, args, **kwargs):
        env = dict(kwargs.pop('env', os.environ),
                   PYTHONNOUSERSITE='True').copy()
        env.update(self._env_vars)

        # Insert bin dirs to PATH
        if 'PATH' in env:
            paths = env['PATH'].split(os.pathsep)
        else:
            paths = []

        if util.WIN:
            subpaths = [
                'Library\\mingw-w64\\bin', 'Library\\bin', 'Library\\usr\\bin',
                'Scripts'
            ]
            for sub in subpaths[::-1]:
                paths.insert(0, os.path.join(self._path, sub))
            paths.insert(0, self._path)
        else:
            paths.insert(0, os.path.join(self._path, 'bin'))

        # Discard PYTHONPATH, which can easily break the environment isolation
        if 'ASV_PYTHONPATH' in env:
            env['PYTHONPATH'] = env['ASV_PYTHONPATH']
            env.pop('ASV_PYTHONPATH', None)
        else:
            env.pop('PYTHONPATH', None)

        # When running pip, we need to set PIP_USER to false, as --user (which
        # may have been set from a pip config file) is incompatible with virtualenvs.
        kwargs['env'] = dict(env,
                             PIP_USER=str('false'),
                             PATH=str(os.pathsep.join(paths)))
        conda_cmd = self.activate_conda(executable, args)

        return util.check_output(conda_cmd, **kwargs)
Example #33
0
def test_find_inverted(capfd, tmpdir):
    values = [
        (5, 6),
        (6, 6),
        (6, 6),
        (6, 1),
        (6, 1),
    ]

    tmpdir, local, conf, machine_file = generate_basic_conf(
        tmpdir, values=values, dummy_packages=False)
    tools.run_asv_with_conf(*[
        conf, 'find', "-i", "master~4..master",
        "params_examples.track_find_test"
    ],
                            _machine_file=machine_file)

    output, err = capfd.readouterr()

    regression_hash = check_output([which('git'), 'rev-parse', 'master^'],
                                   cwd=conf.repo)

    formatted = "Greatest improvement found: {0}".format(regression_hash[:8])
    assert formatted in output
Example #34
0
    def _nox_prep_env(self, setup: bool = False) -> None:
        message = f"Running Nox environment update for: {self.name}"
        log.info(message)

        build_root_path = Path(self._build_root)
        env_path = Path(self._path)

        def copy_asv_files(src_parent: Path, dst_parent: Path) -> None:
            """For copying between self._path and a temporary cache."""
            asv_files = list(src_parent.glob("asv*"))
            # build_root_path.name usually == "project" .
            asv_files += [src_parent / build_root_path.name]
            for src_path in asv_files:
                dst_path = dst_parent / src_path.name
                if not dst_path.exists():
                    # Only cache-ing in case Nox has rebuilt the env @
                    #  self._path. If the dst_path already exists: rebuilding
                    #  hasn't happened. Also a non-issue when copying in the
                    #  reverse direction because the cache dir is temporary.
                    if src_path.is_dir():
                        func = copytree
                    else:
                        func = copy2
                    func(src_path, dst_path)

        with TemporaryDirectory(prefix="nox_asv_cache_") as asv_cache:
            asv_cache_path = Path(asv_cache)
            if setup:
                noxfile_path = self.setup_noxfile
            else:
                # Cache all of ASV's files as Nox may remove and re-build the environment.
                copy_asv_files(env_path, asv_cache_path)
                # Get location of noxfile in cache.
                noxfile_path_build = (build_root_path / self._repo_subdir /
                                      NOXFILE_REL_PATH)
                noxfile_path = asv_cache_path / noxfile_path_build.relative_to(
                    build_root_path.parent)

            nox_cmd = [
                "nox",
                f"--noxfile={noxfile_path}",
                f"--envdir={env_path.parent}",
                f"--session={SESSION_NAME}",
                f"--python={self._python}",
                "--install-only",
                "--no-error-on-external-run",
                "--verbose",
            ]

            _ = asv_util.check_output(nox_cmd)
            if not env_path.is_dir():
                message = f"Expected Nox environment not found: {env_path}"
                log.error(message)

            if not setup:
                # Restore ASV's files from the cache (if necessary).
                copy_asv_files(asv_cache_path, env_path)

        if (not setup) and self._extra_reqs_path.is_file():
            # No need during initial ASV setup - this will be run again before
            #  any benchmarks are run.
            cmd = f"{self.conda} env update -f {self._extra_reqs_path} -p {env_path}"
            asv_util.check_output(cmd.split(" "))
Example #35
0
def test_check_output_exit_code(capsys):
    with pytest.raises(util.ProcessError):
        util.check_output([sys.executable, '-c', 'import sys; sys.exit(1)'])
    out, err = capsys.readouterr()
    assert '(exit status 1)' in out
Example #36
0
def test_check_output_exit_code(capsys):
    with pytest.raises(util.ProcessError):
        util.check_output([sys.executable, '-c', 'import sys; sys.exit(1)'])
    out, err = capsys.readouterr()
    assert '(exit status 1)' in out
Example #37
0
    def _prep_env(self) -> None:
        """Run the custom environment script(s) and switch to using that environment."""
        message = f"Running delegated environment management for: {self.name}"
        log.info(message)
        env_path = Path(self._path)

        def copy_asv_files(src_parent: Path, dst_parent: Path) -> None:
            """For copying between self._path and a temporary cache."""
            asv_files = list(src_parent.glob("asv*"))
            # build_root_path.name usually == "project" .
            asv_files += [src_parent / Path(self._build_root).name]
            for src_path in asv_files:
                dst_path = dst_parent / src_path.name
                if not dst_path.exists():
                    # Only caching in case the environment has been rebuilt.
                    #  If the dst_path already exists: rebuilding hasn't
                    #  happened. Also a non-issue when copying in the reverse
                    #  direction because the cache dir is temporary.
                    if src_path.is_dir():
                        func = copytree
                    else:
                        func = copy2
                    func(src_path, dst_path)

        with TemporaryDirectory(prefix="delegated_asv_cache_") as asv_cache:
            asv_cache_path = Path(asv_cache)
            # Cache all of ASV's files as delegated command may remove and
            #  re-build the environment.
            copy_asv_files(env_path.resolve(), asv_cache_path)

            # Adapt the build_dir to the cache location.
            build_root_path = Path(self._build_root)
            build_dir_original = build_root_path / self._repo_subdir
            build_dir_subpath = build_dir_original.relative_to(
                build_root_path.parent)
            build_dir = asv_cache_path / build_dir_subpath

            # Run the script(s) for delegated environment creation/updating.
            # (An adaptation of self._interpolate_and_run_commands).
            for command, env, return_codes, cwd in self._env_commands:
                local_envs = dict(environ)
                local_envs.update(env)
                if cwd is None:
                    cwd = str(build_dir)
                _ = asv_util.check_output(
                    command,
                    timeout=self._install_timeout,
                    cwd=cwd,
                    env=local_envs,
                    valid_return_codes=return_codes,
                )

            # Replace the env that ASV created with a symlink to the env
            #  created/updated by the custom script.
            delegated_env_path = sorted(
                self._delegated_env_parent.glob("*"),
                key=getmtime,
                reverse=True,
            )[0]
            if env_path.resolve() != delegated_env_path:
                try:
                    env_path.unlink(missing_ok=True)
                except IsADirectoryError:
                    rmtree(env_path)
                env_path.symlink_to(delegated_env_path,
                                    target_is_directory=True)

            # Check that environment exists.
            try:
                env_path.resolve(strict=True)
            except FileNotFoundError:
                message = f"Path does not resolve to environment: {env_path}"
                log.error(message)
                raise RuntimeError(message)

            # Restore ASV's files from the cache (if necessary).
            copy_asv_files(asv_cache_path, env_path.resolve())

            # Record new environment information in properties.
            self._update_info()