Ejemplo n.º 1
0
def update_me():
    """
    Update the webservice on Heroku by pushing a commit to this repo.
    """
    pkgs = ["conda-build", "conda-smithy", "conda-forge-pinning"]
    installed_vers = get_installed_version(root_dir, pkgs)
    index = get_index(channel_urls=['conda-forge'])
    r = Resolve(index)

    to_install = {}

    for pkg in pkgs:
        available_versions = [p.version for p in r.get_pkgs(MatchSpec(pkg))]
        available_versions = sorted(available_versions, key=VersionOrder)
        latest_version = available_versions[-1]
        print(latest_version, installed_vers[pkg])
        if VersionOrder(latest_version) > VersionOrder(installed_vers[pkg]):
            to_install[pkg] = latest_version

    if not to_install:
        return

    with tmp_directory() as tmp_dir:
        repo_name = "conda-forge-webservices"
        clone_dir = os.path.join(tmp_dir, repo_name)
        url = "https://{}@github.com/conda-forge/{}.git".format(
            os.environ['GH_TOKEN'], repo_name)

        repo = Repo.clone_from(url, clone_dir)
        msg_vers = ", ".join(
            ["{}={}".format(k, v) for k, v in to_install.items()])
        author = Actor("conda-forge-admin", "*****@*****.**")
        repo.index.commit("Empty commit to rebuild for {}".format(msg_vers))
        repo.git.push("origin", "master")
Ejemplo n.º 2
0
def distribution_exists_on_channel(binstar_cli,
                                   meta,
                                   fname,
                                   owner,
                                   channel='main'):
    """
    Determine whether a distribution exists on a specific channel.

    Note from @pelson: As far as I can see, there is no easy way to do this on binstar.

    """
    channel_url = '/'.join([owner, 'label', channel])
    fname = os.path.basename(fname)

    distributions_on_channel = get_index([channel_url],
                                         prepend=False,
                                         use_cache=False)

    try:
        on_channel = (
            distributions_on_channel[fname]['subdir'] == conda_subdir)
    except KeyError:
        on_channel = False

    return on_channel
Ejemplo n.º 3
0
def main():
    """Get current versions from the heroku app and update if they are old.

    Note this script runs on CircleCI, not on the heroku app.
    """
    # keep these imports here to protect the webservice from memory errors
    # due to conda
    from conda_build.conda_interface import (
        VersionOrder, MatchSpec, get_index, Resolve)

    r = requests.get(
        "https://conda-forge.herokuapp.com/conda-webservice-update/versions")
    r.raise_for_status()
    installed_vers = r.json()

    index = get_index(channel_urls=['conda-forge'])
    r = Resolve(index)

    to_install = {}
    final_install = {}

    for pkg in PKGS:
        available_versions = [p.version for p in r.get_pkgs(MatchSpec(pkg))]
        available_versions = sorted(available_versions, key=VersionOrder)
        latest_version = available_versions[-1]
        LOGGER.info("%s|latest|installed:" % pkg, latest_version, installed_vers[pkg])
        if VersionOrder(latest_version) != VersionOrder(installed_vers[pkg]):
            to_install[pkg] = latest_version
            final_install[pkg] = latest_version
        else:
            final_install[pkg] = installed_vers[pkg]

    if to_install:
        tmpdir = None
        try:
            tmpdir = tempfile.mkdtemp('_recipe')

            repo_name = "conda-forge-webservices"
            clone_dir = os.path.join(tmpdir, repo_name)
            url = "https://{}@github.com/conda-forge/{}.git".format(
                os.environ['GH_TOKEN'], repo_name)

            repo = Repo.clone_from(url, clone_dir, depth=1)

            # keep a record around
            pth = os.path.join(clone_dir, "pkg_versions.json")
            with open(pth, "w") as fp:
                json.dump(final_install, fp)
            repo.index.add(pth)

            msg_vers = ", ".join(["{}={}".format(k, v) for k, v in to_install.items()])
            repo.index.commit("redeploy for '%s'" % msg_vers)
            repo.git.push("origin", "master")

        finally:
            if tmpdir is not None:
                shutil.rmtree(tmpdir)
Ejemplo n.º 4
0
def test_installable(channel='defaults'):
    success = True
    log = get_logger(__name__)
    has_py = re.compile(r'py(\d)(\d)')
    for platform in ['osx-64', 'linux-32', 'linux-64', 'win-32', 'win-64']:
        log.info("######## Testing platform %s ########", platform)
        channels = [channel]
        index = get_index(channel_urls=channels,
                          prepend=False,
                          platform=platform)
        for _, rec in iteritems(index):
            # If we give channels at the command line, only look at
            # packages from those channels (not defaults).
            if channel != 'defaults' and rec.get('schannel',
                                                 'defaults') == 'defaults':
                continue
            name = rec['name']
            if name in {'conda', 'conda-build'}:
                # conda can only be installed in the root environment
                continue
            if name.endswith('@'):
                # this is a 'virtual' feature record that conda adds to the index for the solver
                # and should be ignored here
                continue
            # Don't fail just because the package is a different version of Python
            # than the default.  We should probably check depends rather than the
            # build string.
            build = rec['build']
            match = has_py.search(build)
            assert match if 'py' in build else True, build
            if match:
                additional_packages = [
                    'python=%s.%s' % (match.group(1), match.group(2))
                ]
            else:
                additional_packages = []

            version = rec['version']
            log.info('Testing %s=%s', name, version)

            try:
                install_steps = check_install([name + '=' + version] +
                                              additional_packages,
                                              channel_urls=channels,
                                              prepend=False,
                                              platform=platform)
                success &= bool(install_steps)
            except KeyboardInterrupt:
                raise
            # sys.exit raises an exception that doesn't subclass from Exception
            except BaseException as e:
                success = False
                log.error("FAIL: %s %s on %s with %s (%s)", name, version,
                          platform, additional_packages, e)
    return success
Ejemplo n.º 5
0
def check_install(packages, platform=None, channel_urls=(), prepend=True,
                  minimal_hint=False):
    prefix = tempfile.mkdtemp('conda')
    try:
        specs = specs_from_args(packages)
        index = get_index(channel_urls=channel_urls, prepend=prepend,
                          platform=platform, prefix=prefix)
        actions = install_actions(prefix, index, specs, pinned=False,
                                  minimal_hint=minimal_hint)
        display_actions(actions, index)
        return actions
    finally:
        rm_rf(prefix)
    return None
Ejemplo n.º 6
0
def latest_pkg_version(pkg):
    '''
    :returns: the latest version of the specified conda package available
    '''
    r = Resolve(get_index())
    try:
        pkg_list = sorted(r.get_pkgs(MatchSpec(pkg)))
    except:
        pkg_list = None
    if pkg_list:
        pkg_version = parse_version(pkg_list[-1].version)
    else:
        pkg_version = None
    return pkg_version
Ejemplo n.º 7
0
def latest_pkg_version(pkg):
    '''
    :returns: the latest version of the specified conda package available
    '''
    r = Resolve(get_index())
    try:
        pkg_list = sorted(r.get_pkgs(MatchSpec(pkg)))
    except:
        pkg_list = None
    if pkg_list:
        pkg_version = parse_version(pkg_list[-1].version)
    else:
        pkg_version = None
    return pkg_version
Ejemplo n.º 8
0
def check_install(packages, platform=None, channel_urls=(), prepend=True,
                  minimal_hint=False):
    prefix = tempfile.mkdtemp('conda')
    try:
        specs = specs_from_args(packages)
        index = get_index(channel_urls=channel_urls, prepend=prepend,
                          platform=platform, prefix=prefix)
        actions = install_actions(prefix, index, specs, pinned=False,
                                  minimal_hint=minimal_hint)
        display_actions(actions, index)
        return actions
    finally:
        rm_rf(prefix)
    return None
Ejemplo n.º 9
0
def test_installable(channel='defaults'):
    success = True
    log = get_logger(__name__)
    has_py = re.compile(r'py(\d)(\d)')
    for platform in ['osx-64', 'linux-32', 'linux-64', 'win-32', 'win-64']:
        log.info("######## Testing platform %s ########", platform)
        channels = [channel]
        index = get_index(channel_urls=channels, prepend=False, platform=platform)
        for _, rec in iteritems(index):
            # If we give channels at the command line, only look at
            # packages from those channels (not defaults).
            if channel != 'defaults' and rec.get('schannel', 'defaults') == 'defaults':
                continue
            name = rec['name']
            if name in {'conda', 'conda-build'}:
                # conda can only be installed in the root environment
                continue
            if name.endswith('@'):
                # this is a 'virtual' feature record that conda adds to the index for the solver
                # and should be ignored here
                continue
            # Don't fail just because the package is a different version of Python
            # than the default.  We should probably check depends rather than the
            # build string.
            build = rec['build']
            match = has_py.search(build)
            assert match if 'py' in build else True, build
            if match:
                additional_packages = ['python=%s.%s' % (match.group(1), match.group(2))]
            else:
                additional_packages = []

            version = rec['version']
            log.info('Testing %s=%s', name, version)

            try:
                install_steps = check_install([name + '=' + version] + additional_packages,
                                                channel_urls=channels, prepend=False,
                                                platform=platform)
                success &= bool(install_steps)
            except KeyboardInterrupt:
                raise
            # sys.exit raises an exception that doesn't subclass from Exception
            except BaseException as e:
                success = False
                log.error("FAIL: %s %s on %s with %s (%s)", name, version,
                          platform, additional_packages, e)
    return success
Ejemplo n.º 10
0
def _build_cdt_meta(recipes, dist_arch_slug):
    print("getting conda-forge/label/main channel index...", flush=True)
    channel_url = '/'.join(['conda-forge', 'label', 'main'])
    dist_index = get_index([channel_url], prepend=False, use_cache=False)
    channel_index = {
        c.to_filename(): a
        for c, a in dist_index.items() if a['subdir'] == 'noarch'
    }

    cdt_meta = {}
    for recipe in tqdm.tqdm(recipes, desc='building CDT metadata', ncols=80):
        if dist_arch_slug not in os.path.basename(recipe):
            continue
        node, attrs = _get_recipe_attrs(recipe, channel_index)
        cdt_meta[node] = attrs
    return cdt_meta
Ejemplo n.º 11
0
def get_dask_outputs(path,
                     packages=(),
                     filter_dirty=True,
                     git_rev='HEAD',
                     stop_rev=None,
                     steps=0,
                     visualize="",
                     test=False,
                     max_downstream=5,
                     **kwargs):
    checkout_rev = stop_rev or git_rev
    results = {}
    conda_build_test = '--{}test'.format("" if test else "no-")

    runs = ['test']
    # not testing means build and test
    if not test:
        runs.insert(0, 'build')

    output = []
    indexes = {}
    with checkout_git_rev(checkout_rev, path):
        for run in runs:
            platform_folder = '{}_platforms.d'.format(run)
            # loop over platforms here because each platform may have different dependencies
            # each platform will be submitted with a different label
            for platform in load_platforms(os.path.join(path,
                                                        platform_folder)):
                index_key = '-'.join(
                    [platform['platform'],
                     str(platform['arch'])])
                if index_key not in indexes:
                    indexes[index_key] = Resolve(get_index(platform=index_key))
                g = construct_graph(path,
                                    platform=platform['platform'],
                                    bits=platform['arch'],
                                    folders=packages,
                                    git_rev=git_rev,
                                    stop_rev=stop_rev,
                                    deps_type=run)
                # note that the graph is changed in place here.
                expand_run(g,
                           conda_resolve=indexes[index_key],
                           run=run,
                           steps=steps,
                           max_downstream=max_downstream)
                # sort build order, and also filter so that we have solely dirty nodes in subgraph
                subgraph, order = order_build(g, filter_dirty=filter_dirty)

                for node in order:
                    for configuration in expand_build_matrix(
                            node, path, label=platform['worker_label']):
                        configuration['variables'][
                            'TEST_MODE'] = conda_build_test
                        commit_sha = stop_rev or git_rev
                        dependencies = [
                            results[_platform_package_key(run, n, platform)]
                            for n in subgraph[node].keys() if n in subgraph
                        ]
                        key_name = _platform_package_key(run, node, platform)
                        # make the test run depend on the build run's completion
                        build_key_name = _platform_package_key(
                            "build", node, platform)
                        if build_key_name in results:
                            dependencies.append(results[build_key_name])

                        results[key_name] = delayed(_job, pure=True)(
                            configuration=configuration,
                            dependencies=dependencies,
                            commit_sha=commit_sha,
                            dask_key_name=key_name,
                            passthrough=visualize,
                            **kwargs)

                    output.append(results[key_name])
    return output