コード例 #1
0
ファイル: cpan.py プロジェクト: conda/conda-build
def latest_pkg_version(pkg):
    '''
    :returns: the latest version of the specified conda package available
    '''
    r = Resolve(get_index())
    try:
        pkg_list = sorted(r.get_pkgs(MatchSpec(pkg)))
    except:
        pkg_list = None
    if pkg_list:
        pkg_version = parse_version(pkg_list[-1].version)
    else:
        pkg_version = None
    return pkg_version
コード例 #2
0
def collect_tasks(path,
                  folders,
                  matrix_base_dir,
                  steps=0,
                  test=False,
                  max_downstream=5):
    # runs = ['test']
    upload_config_path = os.path.join(matrix_base_dir, 'uploads.d')
    # not testing means build and test
    # if not test:
    #     runs.insert(0, 'build')
    runs = ['build']

    task_graph = nx.DiGraph()
    config = conda_build.api.Config()
    for run in runs:
        platforms = parse_platforms(matrix_base_dir, run)
        # loop over platforms here because each platform may have different dependencies
        # each platform will be submitted with a different label
        for platform in platforms:
            index_key = '-'.join([platform['platform'], str(platform['arch'])])
            config.channel_urls = get_upload_channels(upload_config_path,
                                                      index_key)
            conda_resolve = Resolve(
                get_build_index(subdir=index_key,
                                bldpkgs_dir=config.bldpkgs_dir)[0])
            # this graph is potentially different for platform and for build or test mode ("run")
            g = construct_graph(path,
                                worker=platform,
                                folders=folders,
                                run=run,
                                matrix_base_dir=matrix_base_dir,
                                conda_resolve=conda_resolve)
            # Apply the build label to any nodes that need (re)building or testing
            expand_run(g,
                       conda_resolve=conda_resolve,
                       worker=platform,
                       run=run,
                       steps=steps,
                       max_downstream=max_downstream,
                       recipes_dir=path,
                       matrix_base_dir=matrix_base_dir)
            # merge this graph with the main one
            task_graph = nx.compose(task_graph, g)
    return task_graph
コード例 #3
0
def collect_tasks(path, folders, matrix_base_dir, channels=None, steps=0, test=False,
                  max_downstream=5, variant_config_files=None, platform_filters=None,
                  clobber_sections_file=None, append_sections_file=None, pass_throughs=None,
                  skip_existing=True):
    # runs = ['test']
    # not testing means build and test
    # if not test:
    #     runs.insert(0, 'build')
    runs = ['build']

    task_graph = nx.DiGraph()
    parsed_cli_args = _parse_python_numpy_from_pass_throughs(pass_throughs)
    config = conda_build.api.Config(clobber_sections_file=clobber_sections_file,
                                    append_sections_file=append_sections_file,
                                    skip_existing=skip_existing, **parsed_cli_args)
    platform_filters = ensure_list(platform_filters) if platform_filters else ['*']
    for run in runs:
        platforms = parse_platforms(matrix_base_dir, run, platform_filters)
        # loop over platforms here because each platform may have different dependencies
        # each platform will be submitted with a different label
        for platform in platforms:
            index_key = '-'.join([platform['platform'], str(platform['arch'])])
            config.channel_urls = channels or []
            config.variant_config_files = variant_config_files or []
            conda_resolve = Resolve(get_build_index(subdir=index_key,
                                                    bldpkgs_dir=config.bldpkgs_dir)[0])
            # this graph is potentially different for platform and for build or test mode ("run")
            g = construct_graph(path, worker=platform, folders=folders, run=run,
                                matrix_base_dir=matrix_base_dir, conda_resolve=conda_resolve,
                                config=config)
            # Apply the build label to any nodes that need (re)building or testing
            expand_run(g, config=config.copy(), conda_resolve=conda_resolve, worker=platform,
                       run=run, steps=steps, max_downstream=max_downstream, recipes_dir=path,
                       matrix_base_dir=matrix_base_dir)
            # merge this graph with the main one
            task_graph = nx.compose(task_graph, g)
    return task_graph
コード例 #4
0
def get_dask_outputs(path,
                     packages=(),
                     filter_dirty=True,
                     git_rev='HEAD',
                     stop_rev=None,
                     steps=0,
                     visualize="",
                     test=False,
                     max_downstream=5,
                     **kwargs):
    checkout_rev = stop_rev or git_rev
    results = {}
    conda_build_test = '--{}test'.format("" if test else "no-")

    runs = ['test']
    # not testing means build and test
    if not test:
        runs.insert(0, 'build')

    output = []
    indexes = {}
    with checkout_git_rev(checkout_rev, path):
        for run in runs:
            platform_folder = '{}_platforms.d'.format(run)
            # loop over platforms here because each platform may have different dependencies
            # each platform will be submitted with a different label
            for platform in load_platforms(os.path.join(path,
                                                        platform_folder)):
                index_key = '-'.join(
                    [platform['platform'],
                     str(platform['arch'])])
                if index_key not in indexes:
                    indexes[index_key] = Resolve(get_index(platform=index_key))
                g = construct_graph(path,
                                    platform=platform['platform'],
                                    bits=platform['arch'],
                                    folders=packages,
                                    git_rev=git_rev,
                                    stop_rev=stop_rev,
                                    deps_type=run)
                # note that the graph is changed in place here.
                expand_run(g,
                           conda_resolve=indexes[index_key],
                           run=run,
                           steps=steps,
                           max_downstream=max_downstream)
                # sort build order, and also filter so that we have solely dirty nodes in subgraph
                subgraph, order = order_build(g, filter_dirty=filter_dirty)

                for node in order:
                    for configuration in expand_build_matrix(
                            node, path, label=platform['worker_label']):
                        configuration['variables'][
                            'TEST_MODE'] = conda_build_test
                        commit_sha = stop_rev or git_rev
                        dependencies = [
                            results[_platform_package_key(run, n, platform)]
                            for n in subgraph[node].keys() if n in subgraph
                        ]
                        key_name = _platform_package_key(run, node, platform)
                        # make the test run depend on the build run's completion
                        build_key_name = _platform_package_key(
                            "build", node, platform)
                        if build_key_name in results:
                            dependencies.append(results[build_key_name])

                        results[key_name] = delayed(_job, pure=True)(
                            configuration=configuration,
                            dependencies=dependencies,
                            commit_sha=commit_sha,
                            dask_key_name=key_name,
                            passthrough=visualize,
                            **kwargs)

                    output.append(results[key_name])
    return output
コード例 #5
0
def collect_tasks(path,
                  folders,
                  matrix_base_dir,
                  channels=None,
                  steps=0,
                  test=False,
                  max_downstream=5,
                  variant_config_files=None,
                  platform_filters=None,
                  clobber_sections_file=None,
                  append_sections_file=None,
                  pass_throughs=None,
                  skip_existing=True,
                  build_config_vars={}):
    """ Return a graph of build tasks """
    task_graph = nx.DiGraph()
    parsed_cli_args = _parse_python_numpy_from_pass_throughs(pass_throughs)
    config = conda_build.api.Config(
        clobber_sections_file=clobber_sections_file,
        append_sections_file=append_sections_file,
        skip_existing=skip_existing,
        **parsed_cli_args,
    )
    platform_filters = ensure_list(platform_filters) if platform_filters else [
        '*'
    ]
    platforms = parse_platforms(matrix_base_dir, platform_filters,
                                build_config_vars)
    # loop over platforms here because each platform may have different dependencies
    # each platform will be submitted with a different label
    for platform in platforms:
        subdir = f"{platform['platform']}-{platform['arch']}"
        config.variants = get_package_variants(path, config,
                                               platform.get('variants'))
        config.channel_urls = channels or []
        config.variant_config_files = variant_config_files or []
        conda_resolve = Resolve(
            get_build_index(subdir=subdir,
                            bldpkgs_dir=config.bldpkgs_dir,
                            channel_urls=channels)[0])
        # this graph is potentially different for platform and for build or test mode ("run")
        graph = construct_graph(
            path,
            worker=platform,
            folders=folders,
            run="build",
            matrix_base_dir=matrix_base_dir,
            conda_resolve=conda_resolve,
            config=config,
        )
        # Apply the build label to any nodes that need (re)building or testing
        expand_run(
            graph,
            config=config.copy(),
            conda_resolve=conda_resolve,
            worker=platform,
            run="build",
            steps=steps,
            max_downstream=max_downstream,
            recipes_dir=path,
            matrix_base_dir=matrix_base_dir,
        )
        # merge this graph with the main one
        task_graph = nx.compose(task_graph, graph)
    collapse_noarch_python_nodes(task_graph)
    return task_graph