Esempio n. 1
0
def _create_recipes(repository, recipes, variant_config_files, variants,
                    channels):
    """
    Create a recipe dictionary for each recipe within a repository. The dictionary
    will have all of the information needed to build the recipe, as well as to
    create the dependency tree.
    """
    saved_working_directory = os.getcwd()
    os.chdir(repository)

    config_data, _ = build_feedstock.load_package_config()
    outputs = []
    for recipe in config_data.get('recipes', []):
        if recipes and not recipe.get('name') in recipes:
            continue
        packages, run_deps, host_deps, build_deps, test_deps = _get_package_dependencies(
            recipe.get('path'), variant_config_files, variants)
        outputs.append(
            BuildCommand(recipe=recipe.get('name', None),
                         repository=repository,
                         packages=packages,
                         python=variants['python'],
                         build_type=variants['build_type'],
                         mpi_type=variants['mpi_type'],
                         run_dependencies=run_deps,
                         host_dependencies=host_deps,
                         build_dependencies=build_deps,
                         test_dependencies=test_deps,
                         channels=channels if channels else []))

    os.chdir(saved_working_directory)
    return outputs
Esempio n. 2
0
def _create_recipes(repository, recipes, variant_config_files, variants,
                    channels):
    """
    Create a recipe dictionary for each recipe within a repository. The dictionary
    will have all of the information needed to build the recipe, as well as to
    create the dependency tree.
    """
    saved_working_directory = os.getcwd()
    os.chdir(repository)

    config_data, _ = build_feedstock.load_package_config()
    outputs = []
    for recipe in config_data.get('recipes', []):
        if recipes and not recipe.get('name') in recipes:
            continue
        packages, run_deps, host_deps, build_deps, test_deps = _get_package_dependencies(
            recipe.get('path'), variant_config_files, variants)
        outputs.append({
            'recipe': recipe.get('name', None),
            'repository': repository,
            'packages': packages,
            'run_dependencies': run_deps,
            'host_dependencies': host_deps,
            'build_dependencies': build_deps,
            'test_dependencies': test_deps,
            'channels': channels if channels else []
        })

    os.chdir(saved_working_directory)
    return outputs
Esempio n. 3
0
def _get_configs():
    build_config_data, _ = build_feedstock.load_package_config()
    config = get_or_merge_config(None)
    config.variant_config_files = [utils.DEFAULT_CONDA_BUILD_CONFIG]
    config.verbose = False
    recipe_conda_build_config = build_feedstock.get_conda_build_config()
    if recipe_conda_build_config:
        config.variant_config_files.append(recipe_conda_build_config)
    return build_config_data, config
Esempio n. 4
0
def _create_commands(repository, runtime_package, recipes,
                     variant_config_files, variants, channels, test_labels):  #pylint: disable=too-many-locals,too-many-arguments
    """
    Returns:
        A list of BuildCommands for each recipe within a repository.
        A list of TestCommands for an entire repository.
    """
    saved_working_directory = os.getcwd()
    os.chdir(repository)

    config_data, _ = build_feedstock.load_package_config(variants=variants)
    combined_config_files = variant_config_files

    feedstock_conda_build_config_file = build_feedstock.get_conda_build_config(
    )
    if feedstock_conda_build_config_file:
        combined_config_files.append(feedstock_conda_build_config_file)
    build_commands = []
    for recipe in config_data.get('recipes', []):
        if recipes and not recipe.get('name') in recipes:
            continue
        packages, run_deps, host_deps, build_deps, test_deps, output_files = _get_package_dependencies(
            recipe.get('path'), combined_config_files, variants)

        build_commands.append(
            BuildCommand(recipe=recipe.get('name', None),
                         repository=repository,
                         packages=packages,
                         runtime_package=runtime_package,
                         output_files=output_files,
                         python=variants['python'],
                         build_type=variants['build_type'],
                         mpi_type=variants['mpi_type'],
                         cudatoolkit=variants['cudatoolkit'],
                         run_dependencies=run_deps,
                         host_dependencies=host_deps,
                         build_dependencies=build_deps,
                         test_dependencies=test_deps,
                         channels=channels if channels else []))

    variant_copy = dict(variants)
    if test_labels:
        for test_label in test_labels:
            variant_copy[test_label] = True
    test_commands = test_feedstock.gen_test_commands(variants=variant_copy)

    os.chdir(saved_working_directory)
    return build_commands, test_commands
Esempio n. 5
0
def main(arg_strings=None):
    '''
    Entry function.
    '''
    parser = make_parser()
    args = parser.parse_args(arg_strings)
    variants = utils.make_variants(args.python_versions, args.build_types,
                                   args.mpi_types)

    build_config_data, _ = build_feedstock.load_package_config()

    pr_branch = utils.get_output("git log -1 --format='%H'")
    utils.run_and_log("git remote set-head origin -a")
    default_branch = utils.get_output(
        "git symbolic-ref refs/remotes/origin/HEAD | sed 's@^refs/remotes/origin/@@'"
    )

    config = get_or_merge_config(None)
    config.variant_config_files = [utils.DEFAULT_CONDA_BUILD_CONFIG]

    recipe_conda_build_config = build_feedstock.get_conda_build_config()
    if recipe_conda_build_config:
        config.variant_config_files.append(recipe_conda_build_config)
    config.verbose = False
    variant_build_results = dict()
    for variant in variants:
        utils.run_and_log("git checkout {}".format(default_branch))
        master_build_numbers = get_build_numbers(build_config_data, config,
                                                 variant)

        utils.run_and_log("git checkout {}".format(pr_branch))
        current_pr_build_numbers = get_build_numbers(build_config_data, config,
                                                     variant)

        print("Build Info for Variant:   {}".format(variant))
        print("Current PR Build Info:    {}".format(current_pr_build_numbers))
        print("Master Branch Build Info: {}".format(master_build_numbers))

        #No build numbers can go backwards without a version change.
        for package in master_build_numbers:
            if package in current_pr_build_numbers and current_pr_build_numbers[
                    package]["version"] == master_build_numbers[package][
                        "version"]:
                assert int(current_pr_build_numbers[package]["number"]) >= int(
                    master_build_numbers[package]["number"]
                ), "If the version doesn't change, the build number can't be reduced."

        #If packages are added or removed, don't require a version change
        if set(master_build_numbers.keys()) != set(
                current_pr_build_numbers.keys()):
            return

        #At least one package needs to increase the build number or change the version.
        checks = [
            current_pr_build_numbers[package]["version"] !=
            master_build_numbers[package]["version"]
            or int(current_pr_build_numbers[package]["number"]) > int(
                master_build_numbers[package]["number"])
            for package in master_build_numbers
        ]
        variant_build_results[utils.variant_string(
            variant["python"], variant["build_type"],
            variant["mpi_type"])] = any(checks)
    assert any(
        variant_build_results.values()
    ), "At least one package needs to increase the build number or change the version in at least one variant."
Esempio n. 6
0
def main(arg_strings=None):
    '''
    Entry function.
    '''
    parser = make_parser()
    args = parser.parse_args(arg_strings)
    variants = utils.make_variants(args.python_versions, args.build_types,
                                   args.mpi_types)

    build_config_data, _ = build_feedstock.load_package_config()

    pr_branch = utils.get_output("git log -1 --format='%H'")
    utils.run_and_log("git remote set-head origin -a")
    default_branch = utils.get_output(
        "git symbolic-ref refs/remotes/origin/HEAD | sed 's@^refs/remotes/origin/@@'"
    )

    config = get_or_merge_config(None)
    config.variant_config_files = [utils.DEFAULT_CONDA_BUILD_CONFIG]
    recipe_conda_build_config = os.path.join(os.getcwd(), "config",
                                             "conda_build_config.yaml")
    if os.path.exists(recipe_conda_build_config):
        config.variant_config_files += [recipe_conda_build_config]
    config.verbose = False

    utils.run_and_log("git checkout {}".format(default_branch))
    master_build_numbers = dict()
    for recipe in build_config_data["recipes"]:
        metas = conda_build.api.render(recipe['path'],
                                       config=config,
                                       variants=variants[0],
                                       bypass_env_check=True,
                                       finalize=False)
        for meta, _, _ in metas:
            master_build_numbers[meta.meta['package']['name']] = {
                "version": meta.meta['package']['version'],
                "number": meta.meta['build']['number']
            }

    utils.run_and_log("git checkout {}".format(pr_branch))
    current_pr_build_numbers = dict()
    for recipe in build_config_data["recipes"]:
        metas = conda_build.api.render(recipe['path'],
                                       config=config,
                                       variants=variants[0],
                                       bypass_env_check=True,
                                       finalize=False)
        for meta, _, _ in metas:
            current_pr_build_numbers[meta.meta['package']['name']] = {
                "version": meta.meta['package']['version'],
                "number": meta.meta['build']['number']
            }

    print("Current PR Build Info:    ", current_pr_build_numbers)
    print("Master Branch Build Info: ", master_build_numbers)

    #No build numbers can go backwards without a version change.
    for package in master_build_numbers:
        if package in current_pr_build_numbers and current_pr_build_numbers[
                package]["version"] == master_build_numbers[package]["version"]:
            assert int(current_pr_build_numbers[package]["number"]) >= int(
                master_build_numbers[package]["number"]
            ), "If the version doesn't change, the build number can't be reduced."

    #If packages are added or removed, don't require a version change
    if set(master_build_numbers.keys()) != set(
            current_pr_build_numbers.keys()):
        return

    #At least one package needs to increase the build number or change the version.
    checks = [
        current_pr_build_numbers[package]["version"] !=
        master_build_numbers[package]["version"]
        or int(current_pr_build_numbers[package]["number"]) > int(
            master_build_numbers[package]["number"])
        for package in master_build_numbers
    ]
    assert any(
        checks
    ), "At least one package needs to increase the build number or change the version."