Example #1
0
def generate_result_dir(tmpdir, dvcs, values, branches=None):
    result_dir = join(tmpdir, "results")
    os.makedirs(result_dir)
    html_dir = join(tmpdir, "html")
    machine_dir = join(result_dir, "tarzan")
    os.makedirs(machine_dir)

    if branches is None:
        branches = [None]

    conf = config.Config.from_json({
        'results_dir': result_dir,
        'html_dir': html_dir,
        'repo': dvcs.path,
        'project': 'asv',
        'branches': branches or [None],
    })
    repo = get_repo(conf)

    util.write_json(join(machine_dir, "machine.json"), {
        'machine': 'tarzan',
        'version': 1,
    })

    timestamp = datetime.datetime.utcnow()

    benchmark_version = sha256(os.urandom(16)).hexdigest()

    params = None
    param_names = None
    for commit, value in values.items():
        if isinstance(value, dict):
            params = value["params"]
        result = Results({"machine": "tarzan"}, {}, commit,
                         repo.get_date_from_name(commit), "2.7", None)
        value = {
            'result': [value],
            'params': [],
            'started_at': timestamp,
            'ended_at': timestamp,
            'stats': None,
            'samples': None,
            'number': None,
        }
        result.add_result("time_func", value, benchmark_version)
        result.save(result_dir)

    if params:
        param_names = ["param{}".format(k) for k in range(len(params))]

    util.write_json(join(result_dir, "benchmarks.json"), {
        "time_func": {
            "name": "time_func",
            "params": params or [],
            "param_names": param_names or [],
            "version": benchmark_version,
        }
    },
                    api_version=1)
    return conf
Example #2
0
def locked_cache_dir(config, cache_key, timeout=900, tag=None):
    base_dir = config.cache.makedir(cache_key)

    lockfile = join(six.text_type(base_dir), 'lock')
    cache_dir = join(six.text_type(base_dir), 'cache')

    lock = FileLock(lockfile)
    lock.acquire(timeout=timeout)
    try:
        # Clear cache dir contents if it was generated with different
        # asv version
        tag_fn = join(six.text_type(base_dir), 'tag.json')
        tag_content = [asv.__version__, repr(tag)]
        if os.path.isdir(cache_dir):
            try:
                if util.load_json(tag_fn) != tag_content:
                    raise ValueError()
            except (IOError, ValueError, util.UserError):
                shutil.rmtree(cache_dir)

        if not os.path.isdir(cache_dir):
            os.makedirs(cache_dir)

        yield cache_dir

        util.write_json(tag_fn, tag_content)
    finally:
        lock.release()
Example #3
0
def locked_cache_dir(config, cache_key, timeout=900, tag=None):
    if LockFile is DummyLock:
        cache_key = cache_key + os.environ.get('PYTEST_XDIST_WORKER', '')

    base_dir = config.cache.makedir(cache_key)

    lockfile = join(six.text_type(base_dir), 'lock')
    cache_dir = join(six.text_type(base_dir), 'cache')

    lock = LockFile(lockfile)
    lock.acquire(timeout=timeout)
    try:
        # Clear cache dir contents if it was generated with different
        # asv version
        tag_fn = join(six.text_type(base_dir), 'tag.json')
        tag_content = [asv.__version__, repr(tag)]
        if os.path.isdir(cache_dir):
            try:
                if util.load_json(tag_fn) != tag_content:
                    raise ValueError()
            except (IOError, ValueError, util.UserError):
                shutil.rmtree(cache_dir)

        if not os.path.isdir(cache_dir):
            os.makedirs(cache_dir)

        yield cache_dir

        util.write_json(tag_fn, tag_content)
    finally:
        lock.release()
Example #4
0
File: tools.py Project: philpep/asv
def generate_result_dir(tmpdir, dvcs, values, branches=None):
    result_dir = join(tmpdir, "results")
    os.makedirs(result_dir)
    html_dir = join(tmpdir, "html")
    machine_dir = join(result_dir, "tarzan")
    os.makedirs(machine_dir)

    if branches is None:
        branches = [None]

    conf = config.Config.from_json({
        'results_dir': result_dir,
        'html_dir': html_dir,
        'repo': dvcs.path,
        'project': 'asv',
        'branches': branches or [None],
    })
    repo = get_repo(conf)

    util.write_json(join(machine_dir, "machine.json"), {
        'machine': 'tarzan',
        'version': 1,
    })

    timestamp = datetime.datetime.utcnow()

    benchmark_version = sha256(os.urandom(16)).hexdigest()

    params = None
    param_names = None
    for commit, value in values.items():
        if isinstance(value, dict):
            params = value["params"]
        result = Results({"machine": "tarzan"}, {}, commit,
                         repo.get_date_from_name(commit), "2.7", None)
        value = {
            'result': [value],
            'params': [],
            'started_at': timestamp,
            'ended_at': timestamp,
            'stats': None,
            'samples': None,
            'number': None,
        }
        result.add_result("time_func", value, benchmark_version)
        result.save(result_dir)

    if params:
        param_names = ["param{}".format(k) for k in range(len(params))]

    util.write_json(join(result_dir, "benchmarks.json"), {
        "time_func": {
            "name": "time_func",
            "params": params or [],
            "param_names": param_names or [],
            "version": benchmark_version,
        }
    }, api_version=1)
    return conf
Example #5
0
def test_json_non_ascii(tmpdir):
    non_ascii_data = [{'😼': '難', 'ä': 3}]

    fn = os.path.join(str(tmpdir), "nonascii.json")
    util.write_json(fn, non_ascii_data)
    data = util.load_json(fn)

    assert data == non_ascii_data
Example #6
0
def test_json_non_ascii(tmpdir):
    non_ascii_data = [{'😼': '難', 'ä': 3}]

    fn = os.path.join(str(tmpdir), "nonascii.json")
    util.write_json(fn, non_ascii_data)
    data = util.load_json(fn)

    assert data == non_ascii_data
Example #7
0
def test_update_simple(monkeypatch, generate_result_dir):
    conf, repo, commits = generate_result_dir(5 * [1] + 5 * [10])

    basedir = os.path.abspath(os.path.dirname(conf.results_dir))
    local = os.path.abspath(os.path.dirname(__file__))

    shutil.copyfile(os.path.join(local, 'asv-machine.json'),
                    os.path.join(basedir, 'asv-machine.json'))
    machine_file = 'asv-machine.json'

    conf_values = {}
    for key in ['results_dir', 'html_dir', 'repo', 'project', 'branches']:
        conf_values[key] = getattr(conf, key)

    util.write_json(os.path.join(basedir, 'asv.conf.json'),
                    conf_values,
                    api_version=1)

    # Check renaming of long result files
    machine_dir = os.path.join(basedir, 'results', 'tarzan')

    result_fns = [
        fn for fn in sorted(os.listdir(machine_dir)) if fn != 'machine.json'
    ]
    long_result_fn = 'abbacaca-' + 'a' * 128 + '.json'
    hash_result_fn = ('abbacaca-env-' + hashlib.md5(b'a' * 128).hexdigest() +
                      '.json')

    shutil.copyfile(os.path.join(machine_dir, result_fns[0]),
                    os.path.join(machine_dir, long_result_fn))

    old_env_name = util.load_json(os.path.join(machine_dir,
                                               result_fns[0]))['env_name']

    # Should succeed
    monkeypatch.chdir(basedir)
    tools.run_asv_with_conf(conf, "update", _machine_file=machine_file)

    # Check file rename
    items = [
        fn for fn in sorted(os.listdir(machine_dir)) if fn != 'machine.json'
    ]
    assert long_result_fn.lower() not in [x.lower() for x in items]
    assert hash_result_fn.lower() in [x.lower() for x in items]

    # Check env name is preserved
    new_env_name = util.load_json(os.path.join(machine_dir,
                                               items[0]))['env_name']
    assert old_env_name == new_env_name
Example #8
0
def test_presence_checks(tmpdir):
    conf = config.Config()

    conf.env_dir = six.text_type(tmpdir.join("env"))

    conf.pythons = ["2.7"]
    conf.matrix = {}
    environments = list(environment.get_environments(conf))

    for env in environments:
        env.create()

        # Check env is recreated when info file is clobbered
        info_fn = os.path.join(env._path, 'asv-env-info.json')
        data = util.load_json(info_fn)
        data['python'] = '3.4'
        data = util.write_json(info_fn, data)
        env._is_setup = False
        env.create()
        data = util.load_json(info_fn)
        assert data['python'] == '2.7'
        env.run(['-c', 'import os'])

        # Check env is recreated if crucial things are missing
        pip_fn = os.path.join(env._path, 'bin', 'pip')
        os.remove(pip_fn)
        env._is_setup = False
        env.create()
        assert os.path.isfile(pip_fn)
        env.run(['-c', 'import os'])
Example #9
0
File: tools.py Project: wrwrwr/asv
def generate_result_dir(tmpdir, dvcs, values, branches=None):
    result_dir = join(tmpdir, "results")
    os.makedirs(result_dir)
    html_dir = join(tmpdir, "html")
    machine_dir = join(result_dir, "tarzan")
    os.makedirs(machine_dir)

    if branches is None:
        branches = [None]

    conf = config.Config.from_json({
        'results_dir': result_dir,
        'html_dir': html_dir,
        'repo': dvcs.path,
        'project': 'asv',
        'branches': branches or [None],
    })
    repo = get_repo(conf)

    util.write_json(join(machine_dir, "machine.json"), {
        'machine': 'tarzan',
        'version': 1,
    })

    timestamp = datetime.datetime.utcnow()

    params = None
    for commit, value in values.items():
        if isinstance(value, dict):
            params = value["params"]
        result = Results({"machine": "tarzan"}, {}, commit,
                         repo.get_date_from_name(commit), "2.7", None)
        result.add_result("time_func", value, timestamp, timestamp)
        result.save(result_dir)

    util.write_json(join(result_dir, "benchmarks.json"), {
        "time_func": {
            "name": "time_func",
            "params": params or [],
            "param_names": params or [],
        }
    },
                    api_version=1)
    return conf
Example #10
0
def test_compare_name_lookup(dvcs_type, capsys, tmpdir, example_results):
    tmpdir = str(tmpdir)
    os.chdir(tmpdir)

    repo = tools.generate_test_repo(tmpdir, dvcs_type=dvcs_type)
    branch_name = 'master' if dvcs_type == 'git' else 'default'
    commit_hash = repo.get_branch_hashes(branch_name)[0]

    result_dir = os.path.join(tmpdir, 'results')

    src = os.path.join(example_results, 'cheetah')
    dst = os.path.join(result_dir, 'cheetah')
    os.makedirs(dst)

    for fn in ['feea15ca-py2.7-Cython-numpy1.8.json', 'machine.json']:
        shutil.copyfile(os.path.join(src, fn), os.path.join(dst, fn))

    shutil.copyfile(os.path.join(example_results, 'benchmarks.json'),
                    os.path.join(result_dir, 'benchmarks.json'))

    # Copy to different commit
    fn_1 = os.path.join(dst, 'feea15ca-py2.7-Cython-numpy1.8.json')
    fn_2 = os.path.join(dst, commit_hash[:8] + '-py2.7-Cython-numpy1.8.json')
    data = util.load_json(fn_1)
    data['commit_hash'] = commit_hash
    util.write_json(fn_2, data)

    conf = config.Config.from_json({
        'results_dir': result_dir,
        'repo': repo.path,
        'project': 'asv',
        'environment_type': "shouldn't matter what"
    })

    # Lookup with symbolic name
    tools.run_asv_with_conf(conf, 'compare', branch_name, 'feea15ca',
                            '--machine=cheetah', '--factor=2',
                            '--environment=py2.7-Cython-numpy1.8',
                            '--only-changed')

    # Nothing should be printed since no results were changed
    text, err = capsys.readouterr()
    assert text.strip() == ''
Example #11
0
File: tools.py Project: craig8/asv
def generate_result_dir(tmpdir, dvcs, values, branches=None):
    result_dir = join(tmpdir, "results")
    os.makedirs(result_dir)
    html_dir = join(tmpdir, "html")
    machine_dir = join(result_dir, "tarzan")
    os.makedirs(machine_dir)

    if branches is None:
        branches = [None]

    conf = config.Config.from_json({
        'results_dir': result_dir,
        'html_dir': html_dir,
        'repo': dvcs.path,
        'project': 'asv',
        'branches': branches or [None],
    })
    repo = get_repo(conf)

    util.write_json(join(machine_dir, "machine.json"), {
        'machine': 'tarzan',
    })

    timestamp = datetime.datetime.utcnow()

    params = None
    for commit, value in values.items():
        if isinstance(value, dict):
            params = value["params"]
        result = Results({"machine": "tarzan"}, {}, commit,
                         repo.get_date_from_name(commit), "2.7", None)
        result.add_result("time_func", value, timestamp, timestamp)
        result.save(result_dir)

    util.write_json(join(result_dir, "benchmarks.json"), {
        "time_func": {
            "name": "time_func",
            "params": params or [],
            "param_names": params or [],
        }
    }, api_version=1)
    return conf
Example #12
0
def test_compare_name_lookup(dvcs_type, capsys, tmpdir):
    tmpdir = six.text_type(tmpdir)
    os.chdir(tmpdir)

    repo = tools.generate_test_repo(tmpdir, dvcs_type=dvcs_type)
    branch_name = 'master' if dvcs_type == 'git' else 'default'
    commit_hash = repo.get_branch_hashes(branch_name)[0]

    result_dir = os.path.join(tmpdir, 'results')

    src = os.path.join(RESULT_DIR, 'cheetah')
    dst = os.path.join(result_dir, 'cheetah')
    os.makedirs(dst)

    for fn in ['feea15ca-py2.7-Cython-numpy1.8.json', 'machine.json']:
        shutil.copyfile(os.path.join(src, fn), os.path.join(dst, fn))

    shutil.copyfile(os.path.join(RESULT_DIR, 'benchmarks.json'),
                    os.path.join(result_dir, 'benchmarks.json'))

    # Copy to different commit
    fn_1 = os.path.join(dst, 'feea15ca-py2.7-Cython-numpy1.8.json')
    fn_2 = os.path.join(dst, commit_hash[:8] + '-py2.7-Cython-numpy1.8.json')
    data = util.load_json(fn_1)
    data['commit_hash'] = commit_hash
    util.write_json(fn_2, data)

    conf = config.Config.from_json(
        {'results_dir': result_dir,
         'repo': repo.path,
         'project': 'asv',
         'environment_type': "shouldn't matter what"})

    # Lookup with symbolic name
    tools.run_asv_with_conf(conf, 'compare', branch_name, 'feea15ca', '--machine=cheetah',
                            '--factor=2', '--environment=py2.7-Cython-numpy1.8',
                            '--only-changed')

    # Nothing should be printed since no results were changed
    text, err = capsys.readouterr()
    assert text.strip() == ''
Example #13
0
def test_write_load_json(tmpdir):
    data = {
        'a': 1,
        'b': 2,
        'c': 3
    }
    orig_data = dict(data)

    filename = os.path.join(str(tmpdir), 'test.json')

    util.write_json(filename, data)
    data2 = util.load_json(filename)
    assert data == orig_data
    assert data2 == orig_data

    util.write_json(filename, data, 3)
    data2 = util.load_json(filename, 3)
    assert data == orig_data
    assert data2 == orig_data

    # Wrong API version must fail to load
    with pytest.raises(util.UserError):
        util.load_json(filename, 2)
    with pytest.raises(util.UserError):
        util.load_json(filename, 4)
    util.write_json(filename, data)
    with pytest.raises(util.UserError):
        util.load_json(filename, 3)
Example #14
0
def test_write_load_json(tmpdir):
    data = {
        'a': 1,
        'b': 2,
        'c': 3
    }
    orig_data = dict(data)

    filename = os.path.join(six.text_type(tmpdir), 'test.json')

    util.write_json(filename, data)
    data2 = util.load_json(filename)
    assert data == orig_data
    assert data2 == orig_data

    util.write_json(filename, data, 3)
    data2 = util.load_json(filename, 3)
    assert data == orig_data
    assert data2 == orig_data

    # Wrong API version must fail to load
    with pytest.raises(util.UserError):
        util.load_json(filename, 2)
    with pytest.raises(util.UserError):
        util.load_json(filename, 4)
    util.write_json(filename, data)
    with pytest.raises(util.UserError):
        util.load_json(filename, 3)
Example #15
0
def test_presence_checks(tmpdir, monkeypatch):
    conf = config.Config()

    if WIN:
        # Tell conda to not use hardlinks: on Windows it's not possible
        # to delete hard links to files in use, which causes problem when
        # trying to cleanup environments during this test
        monkeypatch.setenv(str('CONDA_ALWAYS_COPY'), str('True'))

    conf.env_dir = str(tmpdir.join("env"))

    conf.pythons = [PYTHON_VER1]
    conf.matrix = {}
    environments = list(environment.get_environments(conf, None))

    for env in environments:
        env.create()
        assert env.check_presence()

        # Check env is recreated when info file is clobbered
        info_fn = os.path.join(env._path, 'asv-env-info.json')
        data = util.load_json(info_fn)
        data['python'] = '0'
        data = util.write_json(info_fn, data)
        env._is_setup = False
        env.create()
        data = util.load_json(info_fn)
        assert data['python'] == PYTHON_VER1
        env.run(['-c', 'import os'])

        # Check env is recreated if crucial things are missing
        pip_fns = [
            os.path.join(env._path, 'bin', 'pip')
        ]
        if WIN:
            pip_fns += [
                os.path.join(env._path, 'bin', 'pip.exe'),
                os.path.join(env._path, 'Scripts', 'pip'),
                os.path.join(env._path, 'Scripts', 'pip.exe')
            ]

        some_removed = False
        for pip_fn in pip_fns:
            if os.path.isfile(pip_fn):
                some_removed = True
                os.remove(pip_fn)
        assert some_removed

        env._is_setup = False
        env.create()
        assert os.path.isfile(pip_fn)
        env.run(['-c', 'import os'])
Example #16
0
def test_presence_checks(tmpdir):
    conf = config.Config()

    conf.env_dir = six.text_type(tmpdir.join("env"))

    conf.pythons = ["2.7"]
    conf.matrix = {}
    environments = list(environment.get_environments(conf))

    for env in environments:
        env.create()
        assert env.check_presence()

        # Check env is recreated when info file is clobbered
        info_fn = os.path.join(env._path, 'asv-env-info.json')
        data = util.load_json(info_fn)
        data['python'] = '3.4'
        data = util.write_json(info_fn, data)
        env._is_setup = False
        env.create()
        data = util.load_json(info_fn)
        assert data['python'] == '2.7'
        env.run(['-c', 'import os'])

        # Check env is recreated if crucial things are missing
        pip_fns = [
            os.path.join(env._path, 'bin', 'pip')
        ]
        if WIN:
            pip_fns += [
                os.path.join(env._path, 'bin', 'pip.exe'),
                os.path.join(env._path, 'Scripts', 'pip'),
                os.path.join(env._path, 'Scripts', 'pip.exe')
            ]

        some_removed = False
        for pip_fn in pip_fns:
            if os.path.isfile(pip_fn):
                some_removed = True
                os.remove(pip_fn)
        assert some_removed

        env._is_setup = False
        env.create()
        assert os.path.isfile(pip_fn)
        env.run(['-c', 'import os'])
Example #17
0
def test_presence_checks(tmpdir):
    conf = config.Config()

    conf.env_dir = six.text_type(tmpdir.join("env"))

    conf.pythons = [PYTHON_VER1]
    conf.matrix = {}
    environments = list(environment.get_environments(conf, None))

    for env in environments:
        env.create()
        assert env.check_presence()

        # Check env is recreated when info file is clobbered
        info_fn = os.path.join(env._path, 'asv-env-info.json')
        data = util.load_json(info_fn)
        data['python'] = '0'
        data = util.write_json(info_fn, data)
        env._is_setup = False
        env.create()
        data = util.load_json(info_fn)
        assert data['python'] == PYTHON_VER1
        env.run(['-c', 'import os'])

        # Check env is recreated if crucial things are missing
        pip_fns = [
            os.path.join(env._path, 'bin', 'pip')
        ]
        if WIN:
            pip_fns += [
                os.path.join(env._path, 'bin', 'pip.exe'),
                os.path.join(env._path, 'Scripts', 'pip'),
                os.path.join(env._path, 'Scripts', 'pip.exe')
            ]

        some_removed = False
        for pip_fn in pip_fns:
            if os.path.isfile(pip_fn):
                some_removed = True
                os.remove(pip_fn)
        assert some_removed

        env._is_setup = False
        env.create()
        assert os.path.isfile(pip_fn)
        env.run(['-c', 'import os'])
Example #18
0
def test_publish(tmpdir):
    tmpdir = six.text_type(tmpdir)
    os.chdir(tmpdir)

    result_dir = join(tmpdir, 'sample_results')
    os.makedirs(result_dir)
    os.makedirs(join(result_dir, 'cheetah'))

    # Synthesize history with two branches that both have commits
    result_files = [fn for fn in os.listdir(join(RESULT_DIR, 'cheetah'))
                    if fn.endswith('.json') and fn != 'machine.json']
    result_files.sort()
    master_values = list(range(len(result_files)*2//3))
    branch_values = list(range(len(master_values), len(result_files)))
    dvcs = tools.generate_test_repo(tmpdir, master_values, 'git',
                                    [('master~6', 'some-branch', branch_values)])

    # Copy and modify result files, fixing commit hashes and setting result
    # dates to distinguish the two branches
    master_commits = dvcs.get_branch_hashes('master')
    only_branch = [x for x in dvcs.get_branch_hashes('some-branch')
                   if x not in master_commits]
    commits = master_commits + only_branch
    for k, item in enumerate(zip(result_files, commits)):
        fn, commit = item
        src = join(RESULT_DIR, 'cheetah', fn)
        dst = join(result_dir, 'cheetah', commit[:8] + fn[8:])
        data = util.load_json(src, cleanup=False)
        data['commit_hash'] = commit
        util.write_json(dst, data)

    shutil.copyfile(join(RESULT_DIR, 'benchmarks.json'),
                    join(result_dir, 'benchmarks.json'))
    shutil.copyfile(join(RESULT_DIR, 'cheetah', 'machine.json'),
                    join(result_dir, 'cheetah', 'machine.json'))

    # Publish the synthesized data
    conf = config.Config.from_json(
        {'benchmark_dir': BENCHMARK_DIR,
         'results_dir': result_dir,
         'html_dir': join(tmpdir, 'html'),
         'repo': dvcs.path,
         'project': 'asv'})

    tools.run_asv_with_conf(conf, 'publish')

    # Check output
    assert isfile(join(tmpdir, 'html', 'index.html'))
    assert isfile(join(tmpdir, 'html', 'index.json'))
    assert isfile(join(tmpdir, 'html', 'asv.js'))
    assert isfile(join(tmpdir, 'html', 'asv.css'))
    assert not isdir(join(tmpdir, 'html', 'graphs', 'Cython', 'arch-x86_64',
                          'branch-some-branch'))
    assert not isdir(join(tmpdir, 'html', 'graphs', 'Cython-null', 'arch-x86_64',
                          'branch-some-branch'))
    index = util.load_json(join(tmpdir, 'html', 'index.json'))
    assert index['params']['branch'] == ['master']

    repo = get_repo(conf)
    revision_to_hash = dict((r, h) for h, r in six.iteritems(repo.get_revisions(commits)))

    def check_file(branch, cython):
        fn = join(tmpdir, 'html', 'graphs', cython, 'arch-x86_64', 'branch-' + branch,
                  'cpu-Intel(R) Core(TM) i5-2520M CPU @ 2.50GHz (4 cores)',
                  'machine-cheetah', 'numpy-1.8', 'os-Linux (Fedora 20)', 'python-2.7', 'ram-8.2G',
                  'time_coordinates.time_latitude.json')
        data = util.load_json(fn, cleanup=False)
        data_commits = [revision_to_hash[x[0]] for x in data]
        if branch == "master":
            assert all(c in master_commits for c in data_commits)
        else:
            # Must contains commits from some-branch
            assert any(c in only_branch for c in data_commits)
            # And commits from master
            assert any(c in master_commits for c in data_commits)

        # Check that revisions are strictly increasing
        assert all(x[0] < y[0] for x, y in zip(data, data[1:]))

    check_file("master", "Cython")
    check_file("master", "Cython-null")

    # Publish with branches set in the config
    conf.branches = ['master', 'some-branch']
    tools.run_asv_with_conf(conf, 'publish')

    # Check output
    check_file("master", "Cython")
    check_file("master", "Cython-null")
    check_file("some-branch", "Cython")
    check_file("some-branch", "Cython-null")

    index = util.load_json(join(tmpdir, 'html', 'index.json'))
    assert index['params']['branch'] == ['master', 'some-branch']
    assert index['params']['Cython'] == ['', None]
    assert index['params']['ram'] == ['8.2G', 8804682956.8]

    expected_graph_list = [{'Cython': cython, 'arch': 'x86_64',
                            'branch': branch,
                            'cpu': 'Intel(R) Core(TM) i5-2520M CPU @ 2.50GHz (4 cores)',
                            'machine': 'cheetah',
                            'numpy': '1.8',
                            'os': 'Linux (Fedora 20)',
                            'python': '2.7',
                            'ram': '8.2G'}
                            for cython in ["", None] for branch in ["master", "some-branch"]]
    d = dict(expected_graph_list[0])
    d['ram'] = 8804682956.8
    expected_graph_list.append(d)

    assert len(index['graph_param_list']) == len(expected_graph_list)
    for item in expected_graph_list:
        assert item in index['graph_param_list']
Example #19
0
def run_asv(args, current_repo=False):
    cwd = os.path.abspath(os.path.dirname(__file__))

    if current_repo:
        try:
            from asv.util import load_json, write_json
            conf = load_json(os.path.join(cwd, 'asv.conf.json'))
            conf['repo'] = os.path.normpath(os.path.join(cwd, '..'))
            cfg_fn = os.path.join(cwd, '.asvconf.tmp')
            write_json(cfg_fn, conf)
            args = ['--config', cfg_fn] + args
        except ImportError:
            pass

    repo_dir = os.path.join(cwd, 'scipy')
    if is_git_repo_root(repo_dir):
        if current_repo:
            url = os.path.normpath(os.path.join(cwd, '..'))
        else:
            url = "https://github.com/scipy/scipy.git"
        subprocess.call(['git', 'remote', 'set-url', "origin", url],
                        cwd=repo_dir)

    cmd = ['asv'] + list(args)
    env = dict(os.environ)

    # Inject ccache/f90cache paths
    if sys.platform.startswith('linux'):
        env['PATH'] = os.pathsep.join(EXTRA_PATH + env.get('PATH', '').split(os.pathsep))

    # Control BLAS and CFLAGS
    env['OPENBLAS_NUM_THREADS'] = '1'
    env['CFLAGS'] = drop_bad_flags(sysconfig.get_config_var('CFLAGS'))

    # Limit memory usage
    try:
        set_mem_rlimit()
    except (ImportError, RuntimeError):
        pass

    # Check scipy version if in dev mode; otherwise clone and setup results
    # repository
    if args and (args[0] == 'dev' or '--python=same' in args):
        import scipy
        print("Running benchmarks for Scipy version %s at %s" % (scipy.__version__, scipy.__file__))

    # Override gh-pages
    if 'gh-pages' in args:
        print("gh-pages command is disabled")
        return 1

    # Run
    try:
        return subprocess.call(cmd, env=env, cwd=cwd)
    except OSError as err:
        if err.errno == 2:
            print("Error when running '%s': %s\n" % (" ".join(cmd), str(err),))
            print("You need to install Airspeed Velocity https://spacetelescope.github.io/asv/")
            print("to run Scipy benchmarks")
            return 1
        raise
Example #20
0
def generate_result_dir(tmpdir, dvcs, values, branches=None):
    result_dir = join(tmpdir, "results")
    os.makedirs(result_dir)
    html_dir = join(tmpdir, "html")
    machine_dir = join(result_dir, "tarzan")
    os.makedirs(machine_dir)

    if branches is None:
        branches = [None]

    conf = config.Config.from_json({
        'results_dir': result_dir,
        'html_dir': html_dir,
        'repo': dvcs.path,
        'project': 'asv',
        'branches': branches or [None],
    })
    repo = get_repo(conf)

    util.write_json(join(machine_dir, "machine.json"), {
        'machine': 'tarzan',
        'version': 1,
    })

    timestamp = datetime.datetime.utcnow()

    benchmark_version = sha256(os.urandom(16)).hexdigest()

    params = []
    param_names = None
    for commit, value in values.items():
        if isinstance(value, dict):
            params = value["params"]
            value = value["result"]
        else:
            value = [value]
        result = Results({"machine": "tarzan"}, {}, commit,
                         repo.get_date_from_name(commit), "2.7", None, {})
        value = runner.BenchmarkResult(result=value,
                                       samples=[None] * len(value),
                                       number=[None] * len(value),
                                       errcode=0,
                                       stderr='',
                                       profile=None)
        result.add_result(
            {
                "name": "time_func",
                "version": benchmark_version,
                "params": params
            },
            value,
            started_at=timestamp,
            duration=1.0)
        result.save(result_dir)

    if params:
        param_names = ["param{}".format(k) for k in range(len(params))]

    util.write_json(join(result_dir, "benchmarks.json"), {
        "time_func": {
            "name": "time_func",
            "params": params or [],
            "param_names": param_names or [],
            "version": benchmark_version,
        }
    },
                    api_version=2)
    return conf
Example #21
0
def test_git_submodule(tmpdir):
    tmpdir = six.text_type(tmpdir)

    # State 0 (no submodule)
    dvcs = tools.generate_test_repo(tmpdir, values=[0], dvcs_type='git')
    sub_dvcs = tools.generate_test_repo(tmpdir, values=[0], dvcs_type='git')
    ssub_dvcs = tools.generate_test_repo(tmpdir, values=[0], dvcs_type='git')
    commit_hash_0 = dvcs.get_hash("master")

    # State 1 (one submodule)
    dvcs.run_git(['submodule', 'add', sub_dvcs.path, 'sub1'])
    dvcs.commit('Add sub1')
    commit_hash_1 = dvcs.get_hash("master")

    # State 2 (one submodule with sub-submodule)
    dvcs.run_git(['submodule', 'update', '--init'])
    sub1_dvcs = tools.Git(join(dvcs.path, 'sub1'))
    sub_dvcs.run_git(['submodule', 'add', ssub_dvcs.path, 'ssub1'])
    sub_dvcs.commit('Add sub1')
    sub1_dvcs.run_git(['pull'])
    dvcs.run_git(['add', 'sub1'])
    dvcs.commit('Update sub1')
    sub1_hash_2 = sub1_dvcs.get_hash("master")
    commit_hash_2 = dvcs.get_hash("master")

    # State 3 (one submodule; sub-submodule removed)
    sub_dvcs.run_git(['rm', '-f', 'ssub1'])
    sub_dvcs.commit('Remove ssub1')
    sub1_dvcs.run_git(['pull'])
    dvcs.run_git(['add', 'sub1'])
    dvcs.commit('Update sub1 again')
    commit_hash_3 = dvcs.get_hash("master")

    # State 4 (back to one submodule with sub-submodule)
    sub1_dvcs.run_git(['checkout', sub1_hash_2])
    dvcs.run_git(['add', 'sub1'])
    dvcs.commit('Update sub1 3rd time')
    commit_hash_4 = dvcs.get_hash("master")

    # State 5 (remove final submodule)
    dvcs.run_git(['rm', '-f', 'sub1'])
    dvcs.commit('Remove sub1')
    commit_hash_5 = dvcs.get_hash("master")


    # Verify clean operation
    conf = config.Config()
    conf.branches = [None]
    conf.repo = dvcs.path
    conf.project = join(tmpdir, "repo")
    r = repo.get_repo(conf)

    checkout_dir = join(tmpdir, "checkout")

    # State 0
    r.checkout(checkout_dir, commit_hash_0)
    assert os.path.isfile(join(checkout_dir, 'README'))
    assert not os.path.exists(join(checkout_dir, 'sub1'))

    # State 1
    r.checkout(checkout_dir, commit_hash_1)
    assert os.path.isfile(join(checkout_dir, 'sub1', 'README'))
    assert not os.path.exists(join(checkout_dir, 'sub1', 'ssub1'))

    # State 2
    r.checkout(checkout_dir, commit_hash_2)
    assert os.path.isfile(join(checkout_dir, 'sub1', 'ssub1', 'README'))

    # State 3
    r.checkout(checkout_dir, commit_hash_3)
    assert os.path.isfile(join(checkout_dir, 'sub1', 'README'))
    assert not os.path.exists(join(checkout_dir, 'sub1', 'ssub1'))

    # State 4
    r.checkout(checkout_dir, commit_hash_4)
    assert os.path.isfile(join(checkout_dir, 'sub1', 'ssub1', 'README'))

    # State 4 (check clean -fdx runs in sub-sub modules)
    garbage_filename = join(checkout_dir, 'sub1', 'ssub1', '.garbage')
    util.write_json(garbage_filename, {})
    assert os.path.isfile(garbage_filename)
    r.checkout(checkout_dir, commit_hash_4)
    assert not os.path.isfile(garbage_filename)

    # State 5
    r.checkout(checkout_dir, commit_hash_5)
    assert os.path.isfile(join(checkout_dir, 'README'))
    assert not os.path.isdir(join(checkout_dir, 'sub1'))
Example #22
0
def test_publish(tmpdir, example_results):
    tmpdir = str(tmpdir)
    os.chdir(tmpdir)

    result_dir = join(tmpdir, 'sample_results')
    os.makedirs(result_dir)
    os.makedirs(join(result_dir, 'cheetah'))

    # Synthesize history with two branches that both have commits
    result_files = [fn for fn in os.listdir(join(example_results, 'cheetah'))
                    if fn.endswith('.json') and fn != 'machine.json']
    result_files.sort()
    master_values = list(range(len(result_files) * 2 // 3))
    branch_values = list(range(len(master_values), len(result_files)))
    dvcs = tools.generate_test_repo(tmpdir, master_values, 'git',
                                    [('master~6', 'some-branch', branch_values)])

    # Copy and modify result files, fixing commit hashes and setting result
    # dates to distinguish the two branches
    master_commits = dvcs.get_branch_hashes('master')
    only_branch = [x for x in dvcs.get_branch_hashes('some-branch')
                   if x not in master_commits]
    commits = master_commits + only_branch
    for k, item in enumerate(zip(result_files, commits)):
        fn, commit = item
        src = join(example_results, 'cheetah', fn)
        dst = join(result_dir, 'cheetah', commit[:8] + fn[8:])
        try:
            data = util.load_json(src)
        except util.UserError:
            # intentionally malformed file, ship it as is
            shutil.copyfile(src, dst)
            continue
        data['commit_hash'] = commit
        util.write_json(dst, data)

    shutil.copyfile(join(example_results, 'benchmarks.json'),
                    join(result_dir, 'benchmarks.json'))
    shutil.copyfile(join(example_results, 'cheetah', 'machine.json'),
                    join(result_dir, 'cheetah', 'machine.json'))

    # Publish the synthesized data
    conf = config.Config.from_json(
        {'benchmark_dir': BENCHMARK_DIR,
         'results_dir': result_dir,
         'html_dir': join(tmpdir, 'html'),
         'repo': dvcs.path,
         'project': 'asv'})

    tools.run_asv_with_conf(conf, 'publish')

    # Check output
    assert isfile(join(tmpdir, 'html', 'index.html'))
    assert isfile(join(tmpdir, 'html', 'index.json'))
    assert isfile(join(tmpdir, 'html', 'asv.js'))
    assert isfile(join(tmpdir, 'html', 'asv.css'))
    assert not isdir(join(tmpdir, 'html', 'graphs', 'Cython', 'arch-x86_64',
                          'branch-some-branch'))
    assert not isdir(join(tmpdir, 'html', 'graphs', 'Cython-null', 'arch-x86_64',
                          'branch-some-branch'))
    index = util.load_json(join(tmpdir, 'html', 'index.json'))
    assert index['params']['branch'] == ['master']

    repo = get_repo(conf)
    revision_to_hash = dict((r, h) for h, r in repo.get_revisions(commits).items())

    def check_file(branch, cython):
        fn = join(tmpdir, 'html', 'graphs', cython, 'arch-x86_64', 'branch-' + branch,
                  'cpu-Intel(R) Core(TM) i5-2520M CPU @ 2.50GHz (4 cores)',
                  'machine-cheetah', 'numpy-1.8', 'os-Linux (Fedora 20)', 'python-2.7', 'ram-8.2G',
                  'time_coordinates.time_latitude.json')
        data = util.load_json(fn)
        data_commits = [revision_to_hash[x[0]] for x in data]
        if branch == "master":
            assert all(c in master_commits for c in data_commits)
        else:
            # Must contains commits from some-branch
            assert any(c in only_branch for c in data_commits)
            # And commits from master
            assert any(c in master_commits for c in data_commits)

        # Check that revisions are strictly increasing
        assert all(x[0] < y[0] for x, y in zip(data, data[1:]))

    check_file("master", "Cython")
    check_file("master", "Cython-null")

    # Publish with branches set in the config
    conf.branches = ['master', 'some-branch']
    tools.run_asv_with_conf(conf, 'publish')

    # Check output
    check_file("master", "Cython")
    check_file("master", "Cython-null")
    check_file("some-branch", "Cython")
    check_file("some-branch", "Cython-null")

    index = util.load_json(join(tmpdir, 'html', 'index.json'))
    assert index['params']['branch'] == ['master', 'some-branch']
    assert index['params']['Cython'] == ['', None]
    assert index['params']['ram'] == ['8.2G', 8804682956.8]

    expected_graph_list = [{'Cython': cython, 'arch': 'x86_64',
                            'branch': branch,
                            'cpu': 'Intel(R) Core(TM) i5-2520M CPU @ 2.50GHz (4 cores)',
                            'machine': 'cheetah',
                            'numpy': '1.8',
                            'os': 'Linux (Fedora 20)',
                            'python': '2.7',
                            'ram': '8.2G'}
                           for cython in ["", None] for branch in ["master", "some-branch"]]
    d = dict(expected_graph_list[0])
    d['ram'] = 8804682956.8
    expected_graph_list.append(d)

    assert len(index['graph_param_list']) == len(expected_graph_list)
    for item in expected_graph_list:
        assert item in index['graph_param_list']
Example #23
0
def basic_html(request):
    if hasattr(request.config, 'cache'):
        # Cache the generated html, if py.test is new enough to support it
        cache_dir = request.config.cache.makedir("asv-test_web-basic_html")
        tmpdir = join(six.text_type(cache_dir), 'cached')

        if os.path.isdir(tmpdir):
            # Cached result found
            try:
                if util.load_json(join(tmpdir, 'tag.json')) != [asv.__version__]:
                    raise ValueError()

                html_dir = join(tmpdir, 'html')
                dvcs = tools.Git(join(tmpdir, 'repo'))
                return html_dir, dvcs
            except (IOError, ValueError):
                shutil.rmtree(tmpdir)

        os.makedirs(tmpdir)
    else:
        tmpdir = tempfile.mkdtemp()
        request.addfinalizer(lambda: shutil.rmtree(tmpdir))

    local = abspath(dirname(__file__))
    cwd = os.getcwd()

    os.chdir(tmpdir)
    try:
        machine_file = join(tmpdir, 'asv-machine.json')

        shutil.copyfile(join(local, 'asv-machine.json'),
                        machine_file)

        values = [[x]*2 for x in [0, 0, 0, 0, 0,
                                  1, 1, 1, 1, 1,
                                  3, 3, 3, 3, 3,
                                  2, 2, 2, 2, 2]]
        dvcs = tools.generate_test_repo(tmpdir, values)
        first_tested_commit_hash = dvcs.get_hash('master~14')

        repo_path = dvcs.path
        shutil.move(repo_path, join(tmpdir, 'repo'))
        dvcs = tools.Git(join(tmpdir, 'repo'))

        conf = config.Config.from_json({
            'env_dir': join(tmpdir, 'env'),
            'benchmark_dir': join(local, 'benchmark'),
            'results_dir': join(tmpdir, 'results_workflow'),
            'html_dir': join(tmpdir, 'html'),
            'repo': join(tmpdir, 'repo'),
            'dvcs': 'git',
            'project': 'asv',
            'matrix': {},
            'regressions_first_commits': {
                '.*': first_tested_commit_hash
            },
        })

        tools.run_asv_with_conf(conf, 'run', 'ALL',
                                '--show-stderr', '--quick', '--bench=params_examples.*track_.*',
                                _machine_file=machine_file)

        # Swap CPU info and obtain some results
        info = util.load_json(machine_file, api_version=1)

        # Put in parameter values that need quoting in file names
        info['orangutan']['cpu'] = 'Not /really/ <fast>'
        info['orangutan']['ram'] = '?'
        info['orangutan']['NUL'] = ''

        util.write_json(machine_file, info, api_version=1)

        tools.run_asv_with_conf(conf, 'run', 'master~10..', '--steps=3',
                                '--show-stderr', '--quick', '--bench=params_examples.*track_.*',
                                _machine_file=machine_file)

        # Output
        tools.run_asv_with_conf(conf, 'publish')

        shutil.rmtree(join(tmpdir, 'env'))
    finally:
        os.chdir(cwd)

    util.write_json(join(tmpdir, 'tag.json'), [asv.__version__])

    return conf.html_dir, dvcs
Example #24
0
def _rebuild_basic_html(basedir):
    local = abspath(dirname(__file__))
    cwd = os.getcwd()

    if os.path.isdir(basedir):
        html_dir = join(basedir, 'html')
        dvcs = tools.Git(join(basedir, 'repo'))
        return html_dir, dvcs

    os.makedirs(basedir)
    os.chdir(basedir)
    try:
        machine_file = join(basedir, 'asv-machine.json')

        shutil.copyfile(join(local, 'asv-machine.json'), machine_file)

        values = [[x] * 2 for x in
                  [0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2]]
        dvcs = tools.generate_test_repo(basedir, values)
        first_tested_commit_hash = dvcs.get_hash('master~14')

        repo_path = dvcs.path
        shutil.move(repo_path, join(basedir, 'repo'))
        dvcs = tools.Git(join(basedir, 'repo'))

        conf = config.Config.from_json({
            'env_dir':
            join(basedir, 'env'),
            'benchmark_dir':
            join(local, 'benchmark'),
            'results_dir':
            join(basedir, 'results_workflow'),
            'html_dir':
            join(basedir, 'html'),
            'repo':
            join(basedir, 'repo'),
            'dvcs':
            'git',
            'project':
            'asv',
            'matrix': {
                "env": {
                    "SOME_TEST_VAR": ["1"]
                }
            },
            'regressions_first_commits': {
                '.*': first_tested_commit_hash
            },
        })

        if WIN:
            # Tell conda to not use hardlinks: on Windows it's not possible
            # to delete hard links to files in use, which causes problem when
            # trying to cleanup environments during this test (since the
            # same cache directory may get reused).
            conf.matrix["env"]["CONDA_ALWAYS_COPY"] = ["True"]

        tools.run_asv_with_conf(conf,
                                'run',
                                'ALL',
                                '--show-stderr',
                                '--quick',
                                '--bench=params_examples[a-z0-9_.]*track_',
                                _machine_file=machine_file)

        # Swap CPU info and obtain some results
        info = util.load_json(machine_file, api_version=1)

        # Put in parameter values that need quoting in file names
        info['orangutan']['cpu'] = 'Not /really/ <fast>'
        info['orangutan']['ram'] = '?'
        info['orangutan']['NUL'] = ''

        util.write_json(machine_file, info, api_version=1)

        tools.run_asv_with_conf(conf,
                                'run',
                                'master~10..',
                                '--steps=3',
                                '--show-stderr',
                                '--quick',
                                '--bench=params_examples[a-z0-9_.]*track_',
                                _machine_file=machine_file)

        # Output
        tools.run_asv_with_conf(conf, 'publish')

        shutil.rmtree(join(basedir, 'env'))
    finally:
        os.chdir(cwd)

    return conf.html_dir, dvcs
Example #25
0
def test_publish(tmpdir):
    tmpdir = six.text_type(tmpdir)
    os.chdir(tmpdir)

    result_dir = join(tmpdir, 'sample_results')
    os.makedirs(result_dir)
    os.makedirs(join(result_dir, 'cheetah'))

    # Synthesize history with two branches that both have commits
    result_files = [fn for fn in os.listdir(join(RESULT_DIR, 'cheetah'))
                    if fn.endswith('.json') and fn != 'machine.json']
    master_values = list(range(len(result_files)*2//3))
    branch_values = list(range(len(master_values), len(result_files)))
    dvcs = tools.generate_test_repo(tmpdir, master_values, 'git',
                                    [('master~6', 'some-branch', branch_values)])

    # Copy and modify result files, fixing commit hashes and setting result
    # dates to distinguish the two branches
    master_commits = dvcs.get_branch_hashes('master')
    only_branch = [x for x in dvcs.get_branch_hashes('some-branch')
                   if x not in master_commits]
    commits = master_commits + only_branch
    for k, item in enumerate(zip(result_files, commits)):
        fn, commit = item
        src = join(RESULT_DIR, 'cheetah', fn)
        dst = join(result_dir, 'cheetah', commit[:8] + fn[8:])
        data = util.load_json(src, cleanup=False)
        data['commit_hash'] = commit
        if commit in only_branch:
            data['date'] = -k
        else:
            data['date'] = k
        util.write_json(dst, data)

    shutil.copyfile(join(RESULT_DIR, 'benchmarks.json'),
                    join(result_dir, 'benchmarks.json'))
    shutil.copyfile(join(RESULT_DIR, 'cheetah', 'machine.json'),
                    join(result_dir, 'cheetah', 'machine.json'))


    # Publish the synthesized data
    conf = config.Config.from_json(
        {'benchmark_dir': BENCHMARK_DIR,
         'results_dir': result_dir,
         'html_dir': join(tmpdir, 'html'),
         'repo': dvcs.path,
         'project': 'asv'})

    Publish.run(conf)

    # Check output
    assert isfile(join(tmpdir, 'html', 'index.html'))
    assert isfile(join(tmpdir, 'html', 'index.json'))
    assert isfile(join(tmpdir, 'html', 'asv.js'))
    assert isfile(join(tmpdir, 'html', 'asv.css'))
    assert not isdir(join(tmpdir, 'html', 'graphs', 'Cython', 'arch-x86_64',
                          'branch-some-branch'))
    index = util.load_json(join(tmpdir, 'html', 'index.json'))
    assert index['params']['branch'] == ['master']

    def check_file(branch):
        fn = join(tmpdir, 'html', 'graphs', 'Cython', 'arch-x86_64', 'branch-' + branch,
                  'cpu-Intel(R) Core(TM) i5-2520M CPU @ 2.50GHz (4 cores)',
                  'machine-cheetah', 'numpy-1.8', 'os-Linux (Fedora 20)', 'python-2.7', 'ram-8.2G',
                  'time_coordinates.time_latitude.json')
        data = util.load_json(fn, cleanup=False)
        if branch == 'master':
            # we set all dates positive for master above
            assert all(x[0] >= 0 for x in data)
        else:
            # we set some dates negative for some-branch above
            assert any(x[0] < 0 for x in data) and any(x[0] >= 0 for x in data)

    check_file("master")

    # Publish with branches set in the config
    conf.branches = ['master', 'some-branch']
    Publish.run(conf)

    # Check output
    check_file("master")
    check_file("some-branch")

    index = util.load_json(join(tmpdir, 'html', 'index.json'))
    assert index['params']['branch'] == ['master', 'some-branch']
Example #26
0
def _rebuild_basic_html(basedir):
    local = abspath(dirname(__file__))
    cwd = os.getcwd()

    if os.path.isdir(basedir):
        html_dir = join(basedir, 'html')
        dvcs = tools.Git(join(basedir, 'repo'))
        return html_dir, dvcs

    os.makedirs(basedir)
    os.chdir(basedir)
    try:
        machine_file = join(basedir, 'asv-machine.json')

        shutil.copyfile(join(local, 'asv-machine.json'),
                        machine_file)

        values = [[x]*2 for x in [0, 0, 0, 0, 0,
                                  1, 1, 1, 1, 1,
                                  3, 3, 3, 3, 3,
                                  2, 2, 2, 2, 2]]
        dvcs = tools.generate_test_repo(basedir, values)
        first_tested_commit_hash = dvcs.get_hash('master~14')

        repo_path = dvcs.path
        shutil.move(repo_path, join(basedir, 'repo'))
        dvcs = tools.Git(join(basedir, 'repo'))

        conf = config.Config.from_json({
            'env_dir': join(basedir, 'env'),
            'benchmark_dir': join(local, 'benchmark'),
            'results_dir': join(basedir, 'results_workflow'),
            'html_dir': join(basedir, 'html'),
            'repo': join(basedir, 'repo'),
            'dvcs': 'git',
            'project': 'asv',
            'matrix': {},
            'regressions_first_commits': {
                '.*': first_tested_commit_hash
            },
        })

        tools.run_asv_with_conf(conf, 'run', 'ALL',
                                '--show-stderr', '--quick', '--bench=params_examples.*track_.*',
                                _machine_file=machine_file)

        # Swap CPU info and obtain some results
        info = util.load_json(machine_file, api_version=1)

        # Put in parameter values that need quoting in file names
        info['orangutan']['cpu'] = 'Not /really/ <fast>'
        info['orangutan']['ram'] = '?'
        info['orangutan']['NUL'] = ''

        util.write_json(machine_file, info, api_version=1)

        tools.run_asv_with_conf(conf, 'run', 'master~10..', '--steps=3',
                                '--show-stderr', '--quick', '--bench=params_examples.*track_.*',
                                _machine_file=machine_file)

        # Output
        tools.run_asv_with_conf(conf, 'publish')

        shutil.rmtree(join(basedir, 'env'))
    finally:
        os.chdir(cwd)

    return conf.html_dir, dvcs
Example #27
0
File: test_web.py Project: eteq/asv
def _rebuild_basic_html(basedir):
    local = abspath(dirname(__file__))
    cwd = os.getcwd()

    if os.path.isdir(basedir):
        html_dir = join(basedir, 'html')
        dvcs = tools.Git(join(basedir, 'repo'))
        return html_dir, dvcs

    os.makedirs(basedir)
    os.chdir(basedir)
    try:
        machine_file = join(basedir, 'asv-machine.json')

        shutil.copyfile(join(local, 'asv-machine.json'), machine_file)

        values = [[x] * 2 for x in
                  [0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2]]
        dvcs = tools.generate_test_repo(basedir, values)
        first_tested_commit_hash = dvcs.get_hash('master~14')

        repo_path = dvcs.path
        shutil.move(repo_path, join(basedir, 'repo'))
        dvcs = tools.Git(join(basedir, 'repo'))

        conf = config.Config.from_json({
            'env_dir':
            join(basedir, 'env'),
            'benchmark_dir':
            join(local, 'benchmark'),
            'results_dir':
            join(basedir, 'results_workflow'),
            'html_dir':
            join(basedir, 'html'),
            'repo':
            join(basedir, 'repo'),
            'dvcs':
            'git',
            'project':
            'asv',
            'matrix': {},
            'regressions_first_commits': {
                '.*': first_tested_commit_hash
            },
        })

        tools.run_asv_with_conf(conf,
                                'run',
                                'ALL',
                                '--show-stderr',
                                '--quick',
                                '--bench=params_examples.*track_.*',
                                _machine_file=machine_file)

        # Swap CPU info and obtain some results
        info = util.load_json(machine_file, api_version=1)

        # Put in parameter values that need quoting in file names
        info['orangutan']['cpu'] = 'Not /really/ <fast>'
        info['orangutan']['ram'] = '?'
        info['orangutan']['NUL'] = ''

        util.write_json(machine_file, info, api_version=1)

        tools.run_asv_with_conf(conf,
                                'run',
                                'master~10..',
                                '--steps=3',
                                '--show-stderr',
                                '--quick',
                                '--bench=params_examples.*track_.*',
                                _machine_file=machine_file)

        # Output
        tools.run_asv_with_conf(conf, 'publish')

        shutil.rmtree(join(basedir, 'env'))
    finally:
        os.chdir(cwd)

    return conf.html_dir, dvcs
Example #28
0
def test_git_submodule(tmpdir):
    tmpdir = six.text_type(tmpdir)

    # State 0 (no submodule)
    dvcs = tools.generate_test_repo(tmpdir, values=[0], dvcs_type='git')
    sub_dvcs = tools.generate_test_repo(tmpdir, values=[0], dvcs_type='git')
    ssub_dvcs = tools.generate_test_repo(tmpdir, values=[0], dvcs_type='git')
    commit_hash_0 = dvcs.get_hash("master")

    # State 1 (one submodule)
    dvcs.run_git(['submodule', 'add', sub_dvcs.path, 'sub1'])
    dvcs.commit('Add sub1')
    commit_hash_1 = dvcs.get_hash("master")

    # State 2 (one submodule with sub-submodule)
    dvcs.run_git(['submodule', 'update', '--init'])
    sub1_dvcs = tools.Git(join(dvcs.path, 'sub1'))
    sub_dvcs.run_git(['submodule', 'add', ssub_dvcs.path, 'ssub1'])
    sub_dvcs.commit('Add sub1')
    sub1_dvcs.run_git(['pull'])
    dvcs.run_git(['add', 'sub1'])
    dvcs.commit('Update sub1')
    sub1_hash_2 = sub1_dvcs.get_hash("master")
    commit_hash_2 = dvcs.get_hash("master")

    # State 3 (one submodule; sub-submodule removed)
    sub_dvcs.run_git(['rm', '-f', 'ssub1'])
    sub_dvcs.commit('Remove ssub1')
    sub1_dvcs.run_git(['pull'])
    dvcs.run_git(['add', 'sub1'])
    dvcs.commit('Update sub1 again')
    commit_hash_3 = dvcs.get_hash("master")

    # State 4 (back to one submodule with sub-submodule)
    sub1_dvcs.run_git(['checkout', sub1_hash_2])
    dvcs.run_git(['add', 'sub1'])
    dvcs.commit('Update sub1 3rd time')
    commit_hash_4 = dvcs.get_hash("master")

    # State 5 (remove final submodule)
    dvcs.run_git(['rm', '-f', 'sub1'])
    dvcs.commit('Remove sub1')
    commit_hash_5 = dvcs.get_hash("master")

    # Verify clean operation
    conf = config.Config()
    conf.branches = [None]
    conf.repo = dvcs.path
    conf.project = join(tmpdir, "repo")
    r = repo.get_repo(conf)

    checkout_dir = join(tmpdir, "checkout")

    # State 0
    r.checkout(checkout_dir, commit_hash_0)
    assert os.path.isfile(join(checkout_dir, 'README'))
    assert not os.path.exists(join(checkout_dir, 'sub1'))

    # State 1
    r.checkout(checkout_dir, commit_hash_1)
    assert os.path.isfile(join(checkout_dir, 'sub1', 'README'))
    assert not os.path.exists(join(checkout_dir, 'sub1', 'ssub1'))

    # State 2
    r.checkout(checkout_dir, commit_hash_2)
    assert os.path.isfile(join(checkout_dir, 'sub1', 'ssub1', 'README'))

    # State 3
    r.checkout(checkout_dir, commit_hash_3)
    assert os.path.isfile(join(checkout_dir, 'sub1', 'README'))
    assert not os.path.exists(join(checkout_dir, 'sub1', 'ssub1'))

    # State 4
    r.checkout(checkout_dir, commit_hash_4)
    assert os.path.isfile(join(checkout_dir, 'sub1', 'ssub1', 'README'))

    # State 4 (check clean -fdx runs in sub-sub modules)
    garbage_filename = join(checkout_dir, 'sub1', 'ssub1', '.garbage')
    util.write_json(garbage_filename, {})
    assert os.path.isfile(garbage_filename)
    r.checkout(checkout_dir, commit_hash_4)
    assert not os.path.isfile(garbage_filename)

    # State 5
    r.checkout(checkout_dir, commit_hash_5)
    assert os.path.isfile(join(checkout_dir, 'README'))
    assert not os.path.isdir(join(checkout_dir, 'sub1'))
Example #29
0
def test_publish(tmpdir):
    tmpdir = six.text_type(tmpdir)
    os.chdir(tmpdir)

    result_dir = join(tmpdir, 'sample_results')
    os.makedirs(result_dir)
    os.makedirs(join(result_dir, 'cheetah'))

    # Synthesize history with two branches that both have commits
    result_files = [
        fn for fn in os.listdir(join(RESULT_DIR, 'cheetah'))
        if fn.endswith('.json') and fn != 'machine.json'
    ]
    master_values = list(range(len(result_files) * 2 // 3))
    branch_values = list(range(len(master_values), len(result_files)))
    dvcs = tools.generate_test_repo(
        tmpdir, master_values, 'git',
        [('master~6', 'some-branch', branch_values)])

    # Copy and modify result files, fixing commit hashes and setting result
    # dates to distinguish the two branches
    master_commits = dvcs.get_branch_hashes('master')
    only_branch = [
        x for x in dvcs.get_branch_hashes('some-branch')
        if x not in master_commits
    ]
    commits = master_commits + only_branch
    for k, item in enumerate(zip(result_files, commits)):
        fn, commit = item
        src = join(RESULT_DIR, 'cheetah', fn)
        dst = join(result_dir, 'cheetah', commit[:8] + fn[8:])
        data = util.load_json(src, cleanup=False)
        data['commit_hash'] = commit
        if commit in only_branch:
            data['date'] = -k
        else:
            data['date'] = k
        util.write_json(dst, data)

    shutil.copyfile(join(RESULT_DIR, 'benchmarks.json'),
                    join(result_dir, 'benchmarks.json'))
    shutil.copyfile(join(RESULT_DIR, 'cheetah', 'machine.json'),
                    join(result_dir, 'cheetah', 'machine.json'))

    # Publish the synthesized data
    conf = config.Config.from_json({
        'benchmark_dir': BENCHMARK_DIR,
        'results_dir': result_dir,
        'html_dir': join(tmpdir, 'html'),
        'repo': dvcs.path,
        'project': 'asv'
    })

    tools.run_asv_with_conf(conf, 'publish')

    # Check output
    assert isfile(join(tmpdir, 'html', 'index.html'))
    assert isfile(join(tmpdir, 'html', 'index.json'))
    assert isfile(join(tmpdir, 'html', 'asv.js'))
    assert isfile(join(tmpdir, 'html', 'asv.css'))
    assert not isdir(
        join(tmpdir, 'html', 'graphs', 'Cython', 'arch-x86_64',
             'branch-some-branch'))
    index = util.load_json(join(tmpdir, 'html', 'index.json'))
    assert index['params']['branch'] == ['master']

    def check_file(branch):
        fn = join(tmpdir, 'html', 'graphs', 'Cython', 'arch-x86_64',
                  'branch-' + branch,
                  'cpu-Intel(R) Core(TM) i5-2520M CPU @ 2.50GHz (4 cores)',
                  'machine-cheetah', 'numpy-1.8', 'os-Linux (Fedora 20)',
                  'python-2.7', 'ram-8.2G',
                  'time_coordinates.time_latitude.json')
        data = util.load_json(fn, cleanup=False)
        if branch == 'master':
            # we set all dates positive for master above
            assert all(x[0] >= 0 for x in data)
        else:
            # we set some dates negative for some-branch above
            assert any(x[0] < 0 for x in data) and any(x[0] >= 0 for x in data)

    check_file("master")

    # Publish with branches set in the config
    conf.branches = ['master', 'some-branch']
    tools.run_asv_with_conf(conf, 'publish')

    # Check output
    check_file("master")
    check_file("some-branch")

    index = util.load_json(join(tmpdir, 'html', 'index.json'))
    assert index['params']['branch'] == ['master', 'some-branch']