Example #1
0
    def __init__(self,
                 env_config_files,
                 python_versions,
                 build_types,
                 mpi_types,
                 repository_folder="./",
                 git_location=utils.DEFAULT_GIT_LOCATION,
                 git_tag_for_env="master",
                 conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG):

        self._env_config_files = env_config_files
        self._repository_folder = repository_folder
        self._git_location = git_location
        self._git_tag_for_env = git_tag_for_env
        self._conda_build_config = conda_build_config
        self._external_dependencies = dict()

        # Create a dependency tree that includes recipes for every combination
        # of variants.
        self._possible_variants = utils.make_variants(python_versions,
                                                      build_types, mpi_types)
        self.build_commands = []
        for variant in self._possible_variants:
            try:
                variant_recipes, external_deps = self._create_all_recipes(
                    variant)
            except OpenCEError as exc:
                raise OpenCEError(Error.CREATE_BUILD_TREE, exc.msg) from exc
            self._external_dependencies[str(variant)] = external_deps
            # Add dependency tree information to the packages list
            _add_build_command_dependencies(variant_recipes,
                                            len(self.build_commands))
            self.build_commands += variant_recipes
        self._detect_cycle()
Example #2
0
def test_get_repo_for_nonexisting_patch(mocker):
    '''
    Test for `_get_repo` that verifies exception is thrown when patch application fails
    '''
    env_file = os.path.join(test_dir, 'test-env3.yaml')
    mock_build_tree = TestBuildTree([env_file], "3.6", "cpu", "openmpi",
                                    "10.2")

    dir_tracker = helpers.DirTracker()
    mocker.patch('os.getcwd', side_effect=dir_tracker.mocked_getcwd)
    mocker.patch('os.chdir', side_effect=dir_tracker.validate_chdir)
    mocker.patch('os.system',
                 side_effect=(lambda x: helpers.validate_cli(
                     x,
                     expect=["git apply"],
                     ignore=["git clone", "git checkout"],
                     retval=1)))

    possible_variants = utils.make_variants("3.6", "cpu", "openmpi", "10.2")
    for variant in possible_variants:
        # test-env3.yaml has defined "patches".
        env_config_data_list = env_config.load_env_config_files([env_file],
                                                                variant)
        for env_config_data in env_config_data_list:
            packages = env_config_data.get(env_config.Key.packages.name, [])
            for package in packages:

                # "package211" has specified a non-existing patch
                if package.get(env_config.Key.feedstock.name) == "package211":
                    with pytest.raises(OpenCEError) as exc:
                        _, _ = mock_build_tree._get_repo(
                            env_config_data, package)
                    assert "Failed to apply patch " in str(exc.value)
Example #3
0
def test_get_repo_with_patches(mocker, capsys):
    '''
    Test for `_get_repo` that verifies `patches` field
    '''
    env_file = os.path.join(test_dir, 'test-env3.yaml')
    mock_build_tree = TestBuildTree([env_file], "3.6", "cpu", "openmpi",
                                    "10.2")

    dir_tracker = helpers.DirTracker()
    mocker.patch('os.getcwd', side_effect=dir_tracker.mocked_getcwd)
    mocker.patch('os.chdir', side_effect=dir_tracker.validate_chdir)

    mocker.patch(
        'os.system',
        return_value=0,
        side_effect=(lambda x: helpers.validate_cli(
            x, expect=["git apply"], ignore=["git clone", "git checkout"])))

    possible_variants = utils.make_variants("3.6", "cpu", "openmpi", "10.2")
    for variant in possible_variants:
        # test-env3.yaml has specified "patches".
        env_config_data_list = env_config.load_env_config_files([env_file],
                                                                variant)
        for env_config_data in env_config_data_list:
            packages = env_config_data.get(env_config.Key.packages.name, [])
            for package in packages:

                # "package211" has specified a non-existing patch
                if package.get(env_config.Key.feedstock.name) != "package211":
                    _, _ = mock_build_tree._get_repo(env_config_data, package)
                    captured = capsys.readouterr()
                    assert "Patch apply command:  git apply" in captured.out
Example #4
0
def test_get_repo_git_tag_options(mocker, capsys):
    '''
    Test for `_get_repo` that verifies `git_tag` and `git_tag_for_env` priorities.
    '''
    env_file1 = os.path.join(test_dir, 'test-env1.yaml')
    mock_build_tree = TestBuildTree([env_file1], "3.6", "cpu", "openmpi",
                                    "10.2")

    dir_tracker = helpers.DirTracker()
    mocker.patch('os.getcwd', side_effect=dir_tracker.mocked_getcwd)
    mocker.patch('os.chdir', side_effect=dir_tracker.validate_chdir)
    mocker.patch('os.system',
                 return_value=0,
                 side_effect=(lambda x: helpers.validate_cli(
                     x, possible_expect=["git clone", "git checkout"])))

    possible_variants = utils.make_variants("3.6", "cpu", "openmpi", "10.2")
    for variant in possible_variants:

        # test-env1.yaml has defined "git_tag" and "git_tag_for_env".
        env_config_data_list = env_config.load_env_config_files([env_file1],
                                                                variant)
        for env_config_data in env_config_data_list:
            packages = env_config_data.get(env_config.Key.packages.name, [])
            for package in packages:
                _, _ = mock_build_tree._get_repo(env_config_data, package)
                validate_git_tags(mock_build_tree._git_tag_for_env,
                                  env_config_data, package, capsys)

        # Setting git_tag_for_env in BuildTree should override whatever is in the config file
        mock_build_tree._git_tag_for_env = "test_tag_for_all"
        env_config_data_list = env_config.load_env_config_files([env_file1],
                                                                variant)
        for env_config_data in env_config_data_list:
            packages = env_config_data.get(env_config.Key.packages.name, [])
            for package in packages:
                _, _ = mock_build_tree._get_repo(env_config_data, package)
                validate_git_tags(mock_build_tree._git_tag_for_env,
                                  env_config_data, package, capsys)

        # Setting git_tag_for_env in BuildTree back to Default and no git tags
        # specified in the config file too.
        mocker.patch('os.system',
                     return_value=0,
                     side_effect=(lambda x: helpers.validate_cli(
                         x,
                         possible_expect=["git clone", "git apply"],
                         reject=["git checkout"])))

        mock_build_tree._git_tag_for_env = None
        env_file2 = os.path.join(test_dir, 'test-env3.yaml')
        env_config_data_list = env_config.load_env_config_files([env_file2],
                                                                variant)
        for env_config_data in env_config_data_list:
            packages = env_config_data.get(env_config.Key.packages.name, [])
            for package in packages:
                _, _ = mock_build_tree._get_repo(env_config_data, package)
                validate_git_tags(mock_build_tree._git_tag_for_env,
                                  env_config_data, package, capsys)
Example #5
0
def main(arg_strings=None):
    '''
    Entry function.
    '''
    parser = make_parser()
    args = parser.parse_args(arg_strings)
    variants = utils.make_variants(args.python_versions, args.build_types,
                                   args.mpi_types, args.cuda_versions)

    pr_branch = utils.get_output("git log -1 --format='%H'")
    utils.run_and_log("git remote set-head origin -a")
    default_branch = utils.get_output(
        "git symbolic-ref refs/remotes/origin/HEAD | sed 's@^refs/remotes/origin/@@'"
    )

    variant_build_results = dict()
    for variant in variants:
        utils.run_and_log("git checkout {}".format(default_branch))
        master_build_config_data, master_config = _get_configs(variant)
        master_build_numbers = _get_build_numbers(master_build_config_data,
                                                  master_config, variant)

        utils.run_and_log("git checkout {}".format(pr_branch))
        pr_build_config_data, pr_config = _get_configs(variant)
        current_pr_build_numbers = _get_build_numbers(pr_build_config_data,
                                                      pr_config, variant)

        print("Build Info for Variant:   {}".format(variant))
        print("Current PR Build Info:    {}".format(current_pr_build_numbers))
        print("Master Branch Build Info: {}".format(master_build_numbers))

        #No build numbers can go backwards without a version change.
        for package in master_build_numbers:
            if package in current_pr_build_numbers and current_pr_build_numbers[
                    package]["version"] == master_build_numbers[package][
                        "version"]:
                assert int(current_pr_build_numbers[package]["number"]) >= int(
                    master_build_numbers[package]["number"]
                ), "If the version doesn't change, the build number can't be reduced."

        #If packages are added or removed, don't require a version change
        if set(master_build_numbers.keys()) != set(
                current_pr_build_numbers.keys()):
            return

        #At least one package needs to increase the build number or change the version.
        checks = [
            current_pr_build_numbers[package]["version"] !=
            master_build_numbers[package]["version"]
            or int(current_pr_build_numbers[package]["number"]) > int(
                master_build_numbers[package]["number"])
            for package in master_build_numbers
        ]
        variant_build_results[utils.variant_string(
            variant["python"], variant["build_type"], variant["mpi_type"],
            variant["cudatoolkit"])] = any(checks)
    assert any(
        variant_build_results.values()
    ), "At least one package needs to increase the build number or change the version in at least one variant."
Example #6
0
 def _initialize_dependency_dict(self):
     variants = utils.make_variants(self.python_versions, self.build_types,
                                    self.mpi_types)
     for variant in variants:
         key = utils.variant_string(variant['python'],
                                    variant['build_type'],
                                    variant['mpi_type'])
         self.dependency_dict[key] = set()
Example #7
0
def build_feedstock_from_command(
        command,  # pylint: disable=too-many-arguments
        recipe_config_file=None,
        output_folder=utils.DEFAULT_OUTPUT_FOLDER,
        extra_channels=None,
        conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG,
        local_src_dir=None):
    '''
    Build a feedstock from a build_command object.
    '''
    if not extra_channels:
        extra_channels = []
    saved_working_directory = None
    if command.repository:
        saved_working_directory = os.getcwd()
        os.chdir(os.path.abspath(command.repository))

    recipes_to_build = inputs.parse_arg_list(command.recipe)

    for variant in utils.make_variants(command.python, command.build_type,
                                       command.mpi_type, command.cudatoolkit):
        build_config_data, recipe_config_file = load_package_config(
            recipe_config_file, variant)

        # Build each recipe
        for recipe in build_config_data['recipes']:
            if recipes_to_build and recipe['name'] not in recipes_to_build:
                continue

            config = get_or_merge_config(None, variant=variant)
            config.skip_existing = True
            config.prefix_length = 225
            config.output_folder = output_folder
            config.variant_config_files = [conda_build_config]

            recipe_conda_build_config = os.path.join(
                os.getcwd(), "config", "conda_build_config.yaml")
            if os.path.exists(recipe_conda_build_config):
                config.variant_config_files.append(recipe_conda_build_config)

            config.channel_urls = extra_channels + command.channels + build_config_data.get(
                'channels', [])

            _set_local_src_dir(local_src_dir, recipe, recipe_config_file)

            try:
                conda_build.api.build(os.path.join(os.getcwd(),
                                                   recipe['path']),
                                      config=config)
            except Exception as exc:  # pylint: disable=broad-except
                traceback.print_exc()
                raise OpenCEError(
                    Error.BUILD_RECIPE,
                    recipe['name'] if 'name' in recipe else os.getcwd,
                    str(exc)) from exc

    if saved_working_directory:
        os.chdir(saved_working_directory)
def test_conda_env_file_for_only_selected_py():
    '''
    Tests that the conda env file is generated only for selected configurations.
    '''
    python_versions = "3.7"
    build_types = "cpu,cuda"
    mpi_types = "openmpi,system"
    mock_build_tree = TestBuildTree([], python_versions, build_types,
                                    mpi_types, external_deps)
    mock_build_tree.build_commands = sample_build_commands[
        2:4]  # Build cmds for py3.7

    output_dir = os.path.join(test_dir, '../condabuild')
    mock_conda_env_file_generator = TestCondaEnvFileGenerator(
        python_versions, build_types, mpi_types, None, output_dir)

    expected_channels = ["file:/{}".format(output_dir), "defaults"]
    actual_channels = mock_conda_env_file_generator.channels
    assert actual_channels == expected_channels

    variants = utils.make_variants(python_versions, build_types, mpi_types)
    expected_keys = [
        utils.variant_string(variant['python'], variant['build_type'],
                             variant['mpi_type']) for variant in variants
    ]

    actual_keys = list(mock_conda_env_file_generator.dependency_dict.keys())
    assert Counter(actual_keys) == Counter(expected_keys)

    for build_command in mock_build_tree:
        mock_conda_env_file_generator.update_conda_env_file_content(
            build_command, mock_build_tree)

    mock_conda_env_file_generator.write_conda_env_files(TMP_OPENCE_DIR)

    # Conda env files should be generated only for py3.7-cpu-openmpi and py3.7-cuda-system variants
    expected_files_keys = [
        utils.variant_string("3.7", "cpu", "openmpi"),
        utils.variant_string("3.7", "cuda", "system")
    ]

    # Check if conda env files are created for expected_files_keys
    for file_keys in expected_files_keys:
        cuda_env_file = os.path.join(
            TMP_OPENCE_DIR, "{}{}.yaml".format(utils.CONDA_ENV_FILENAME_PREFIX,
                                               file_keys))
        assert os.path.exists(cuda_env_file)

    # Check that no other env file exists other than the two expected ones
    for (_, _, files) in os.walk(TMP_OPENCE_DIR, topdown=True):
        assert len(files) == 2

    cleanup()
    assert not os.path.exists(TMP_OPENCE_DIR)
Example #9
0
def validate_env(args):
    '''Entry Function'''
    variants = utils.make_variants(args.python_versions, args.build_types,
                                   args.mpi_types)

    for variant in variants:
        try:
            env_config.load_env_config_files(args.env_config_file, variant)
        except OpenCEError as exc:
            raise OpenCEError(Error.VALIDATE_ENV, args.env_config_file,
                              str(variant), exc.msg) from exc
Example #10
0
def build_feedstock(args_string=None):
    '''
    Entry function.
    '''
    parser = make_parser()
    args = parser.parse_args(args_string)

    saved_working_directory = None
    if args.working_directory:
        saved_working_directory = os.getcwd()
        os.chdir(os.path.abspath(args.working_directory))

    build_config_data, recipe_config_file = load_package_config(
        args.recipe_config_file)

    args.recipes = utils.parse_arg_list(args.recipe_list)

    # Build each recipe
    for recipe in build_config_data['recipes']:
        if args.recipes and recipe['name'] not in args.recipes:
            continue

        config = get_or_merge_config(None)
        config.skip_existing = True
        config.output_folder = args.output_folder
        config.variant_config_files = [args.conda_build_config]

        recipe_conda_build_config = os.path.join(os.getcwd(), "config",
                                                 "conda_build_config.yaml")
        if os.path.exists(recipe_conda_build_config):
            config.variant_config_files.append(recipe_conda_build_config)

        config.channel_urls = args.channels_list + build_config_data.get(
            'channels', [])

        _set_local_src_dir(args.local_src_dir, recipe, recipe_config_file)

        try:
            for variant in utils.make_variants(args.python_versions,
                                               args.build_types,
                                               args.mpi_types):
                conda_build.api.build(os.path.join(os.getcwd(),
                                                   recipe['path']),
                                      config=config,
                                      variants=variant)
        except Exception as exc:  # pylint: disable=broad-except
            traceback.print_exc()
            raise OpenCEError(
                Error.BUILD_RECIPE,
                recipe['name'] if 'name' in recipe else os.getcwd,
                str(exc)) from exc

    if saved_working_directory:
        os.chdir(saved_working_directory)
Example #11
0
    def __init__(self,
                 env_config_files,
                 python_versions,
                 build_types,
                 mpi_types,
                 cuda_versions,
                 repository_folder="./",
                 git_location=utils.DEFAULT_GIT_LOCATION,
                 git_tag_for_env=utils.DEFAULT_GIT_TAG,
                 conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG,
                 test_labels=None):

        self._env_config_files = env_config_files
        self._repository_folder = repository_folder
        self._git_location = git_location
        self._git_tag_for_env = git_tag_for_env
        self._conda_build_config = conda_build_config
        self._external_dependencies = dict()
        self._conda_env_files = dict()
        self._test_commands = dict()
        self._test_labels = test_labels

        # Create a dependency tree that includes recipes for every combination
        # of variants.
        self._possible_variants = utils.make_variants(python_versions,
                                                      build_types, mpi_types,
                                                      cuda_versions)
        self.build_commands = []
        for variant in self._possible_variants:
            try:
                build_commands, external_deps, test_commands = self._create_all_commands(
                    variant)
            except OpenCEError as exc:
                raise OpenCEError(Error.CREATE_BUILD_TREE, exc.msg) from exc
            variant_string = utils.variant_string(variant["python"],
                                                  variant["build_type"],
                                                  variant["mpi_type"],
                                                  variant["cudatoolkit"])
            self._external_dependencies[variant_string] = external_deps
            self._test_commands[variant_string] = test_commands

            # Add dependency tree information to the packages list and
            # remove build commands from build_commands that are already in self.build_commands
            build_commands = _add_build_command_dependencies(
                build_commands, self.build_commands, len(self.build_commands))
            self.build_commands += build_commands

            installable_packages = self.get_installable_packages(
                variant_string)
            self._conda_env_files[variant_string] = CondaEnvFileGenerator(
                installable_packages)
        self._detect_cycle()
Example #12
0
def validate_conda_env_files(py_versions=utils.DEFAULT_PYTHON_VERS,
                             build_types=utils.DEFAULT_BUILD_TYPES,
                             mpi_types=utils.DEFAULT_MPI_TYPES):

    # Check if conda env files are created for given python versions and build variants
    variants = utils.make_variants(py_versions, build_types, mpi_types)
    for variant in variants:
        cuda_env_file = os.path.join(os.getcwd(), utils.DEFAULT_OUTPUT_FOLDER,
                                     "{}{}.yaml".format(utils.CONDA_ENV_FILENAME_PREFIX,
                                     utils.variant_string(variant['python'], variant['build_type'], variant['mpi_type'], variant['cudatoolkit'])))

        assert os.path.exists(cuda_env_file)
        # Remove the file once it's existence is verified
        os.remove(cuda_env_file)
Example #13
0
def validate_env(arg_strings=None):
    '''
    Entry function.
    '''
    parser = make_parser()
    args = parser.parse_args(arg_strings)
    variants = utils.make_variants(args.python_versions, args.build_types,
                                   args.mpi_types)

    for variant in variants:
        try:
            env_config.load_env_config_files(args.env_config_file, variant)
        except OpenCEError as exc:
            raise OpenCEError(Error.VALIDATE_ENV, args.env_config_file,
                              str(variant), exc.msg) from exc
Example #14
0
def validate_config(arg_strings=None):
    '''
    Entry function.
    '''
    args = make_parser().parse_args(arg_strings)
    variants = utils.make_variants(args.python_versions, args.build_types, args.mpi_types)
    for variant in variants:
        print('Validating {} for {}'.format(args.conda_build_config, variant))
        for env_file in args.env_config_file:
            print('Validating {} for {} : {}'.format(args.conda_build_config, env_file, variant))
            try:
                recipes = build_tree.BuildTree([env_file],
                                               variant['python'],
                                               variant['build_type'],
                                               variant['mpi_type'],
                                               repository_folder=args.repository_folder,
                                               conda_build_config=args.conda_build_config)
            except OpenCEError as err:
                print(err.msg)
                print('Error while validating {} for {} : {}'.format(args.conda_build_config, env_file, variant))
                return 1

            packages = [package for recipe in recipes for package in recipe.packages]
            channels = {channel for recipe in recipes for channel in recipe.channels}
            deps = {dep for recipe in recipes for dep in recipe.run_dependencies}
            deps.update(recipes.get_external_dependencies(variant))

            pkg_args = " ".join(["\"{}\"".format(generalize_version(dep)) for dep in deps
                                                                          if not utils.remove_version(dep) in packages])

            channel_args = " ".join({"-c \"{}\"".format(channel) for channel in channels})

            cli = "conda create --dry-run -n test_conda_dependencies {} {}".format(channel_args, pkg_args)

            retval = utils.run_and_log(cli)

            if retval != 0:
                print('Error while validating {} for {} : {}'.format(args.conda_build_config, env_file, variant))
                return 1

            print('Successfully validated {} for {} : {}'.format(args.conda_build_config, env_file, variant))

        print('Successfully validated {} for {}'.format(args.conda_build_config, variant))

    print("{} Successfully validated!".format(args.conda_build_config))
    return 0
def test_conda_env_file_content():
    '''
    Tests that the conda env file content are being populated correctly
    '''
    python_versions = "3.6,3.7"
    build_types = "cpu,cuda"
    mpi_types = "openmpi,system"
    mock_build_tree = TestBuildTree([], python_versions, build_types,
                                    mpi_types, external_deps)
    mock_build_tree.build_commands = sample_build_commands

    output_dir = os.path.join(test_dir, '../condabuild')
    mock_conda_env_file_generator = TestCondaEnvFileGenerator(
        python_versions, build_types, mpi_types, ["some channel"], output_dir)
    expected_channels = [
        "file:/{}".format(output_dir), "some channel", "defaults"
    ]
    actual_channels = mock_conda_env_file_generator.channels
    assert actual_channels == expected_channels

    variants = utils.make_variants(python_versions, build_types, mpi_types)
    expected_keys = [
        utils.variant_string(variant['python'], variant['build_type'],
                             variant['mpi_type']) for variant in variants
    ]
    actual_keys = list(mock_conda_env_file_generator.dependency_dict.keys())
    assert Counter(actual_keys) == Counter(expected_keys)

    for build_command in mock_build_tree:
        mock_conda_env_file_generator.update_conda_env_file_content(
            build_command, mock_build_tree)

    files_generated_for_keys = []
    validate_dependencies(mock_conda_env_file_generator, expected_keys,
                          files_generated_for_keys)
    mock_conda_env_file_generator.write_conda_env_files(TMP_OPENCE_DIR)

    # Check if conda env files are created for all variants
    for key in files_generated_for_keys:
        cuda_env_file = os.path.join(
            TMP_OPENCE_DIR, "{}{}.yaml".format(utils.CONDA_ENV_FILENAME_PREFIX,
                                               key))
        assert os.path.exists(cuda_env_file)

    cleanup()
    assert not os.path.exists(TMP_OPENCE_DIR)
Example #16
0
 def __init__(
         self,  #pylint: disable=super-init-not-called
         env_config_files,
         python_versions,
         build_types,
         mpi_types,
         cuda_versions,
         repository_folder="./",
         git_location=utils.DEFAULT_GIT_LOCATION,
         git_tag_for_env=utils.DEFAULT_GIT_TAG,
         conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG):
     self._env_config_files = env_config_files
     self._repository_folder = repository_folder
     self._git_location = git_location
     self._git_tag_for_env = git_tag_for_env
     self._conda_build_config = conda_build_config
     self._possible_variants = utils.make_variants(python_versions,
                                                   build_types, mpi_types,
                                                   cuda_versions)
Example #17
0
def main(arg_strings=None):
    '''
    Entry function.
    '''
    parser = make_parser()
    args = parser.parse_args(arg_strings)
    variants = utils.make_variants(args.python_versions, args.build_types,
                                   args.mpi_types)

    build_config_data, _ = build_feedstock.load_package_config()

    pr_branch = utils.get_output("git log -1 --format='%H'")
    utils.run_and_log("git remote set-head origin -a")
    default_branch = utils.get_output(
        "git symbolic-ref refs/remotes/origin/HEAD | sed 's@^refs/remotes/origin/@@'"
    )

    config = get_or_merge_config(None)
    config.variant_config_files = [utils.DEFAULT_CONDA_BUILD_CONFIG]
    recipe_conda_build_config = os.path.join(os.getcwd(), "config",
                                             "conda_build_config.yaml")
    if os.path.exists(recipe_conda_build_config):
        config.variant_config_files += [recipe_conda_build_config]
    config.verbose = False

    utils.run_and_log("git checkout {}".format(default_branch))
    master_build_numbers = dict()
    for recipe in build_config_data["recipes"]:
        metas = conda_build.api.render(recipe['path'],
                                       config=config,
                                       variants=variants[0],
                                       bypass_env_check=True,
                                       finalize=False)
        for meta, _, _ in metas:
            master_build_numbers[meta.meta['package']['name']] = {
                "version": meta.meta['package']['version'],
                "number": meta.meta['build']['number']
            }

    utils.run_and_log("git checkout {}".format(pr_branch))
    current_pr_build_numbers = dict()
    for recipe in build_config_data["recipes"]:
        metas = conda_build.api.render(recipe['path'],
                                       config=config,
                                       variants=variants[0],
                                       bypass_env_check=True,
                                       finalize=False)
        for meta, _, _ in metas:
            current_pr_build_numbers[meta.meta['package']['name']] = {
                "version": meta.meta['package']['version'],
                "number": meta.meta['build']['number']
            }

    print("Current PR Build Info:    ", current_pr_build_numbers)
    print("Master Branch Build Info: ", master_build_numbers)

    #No build numbers can go backwards without a version change.
    for package in master_build_numbers:
        if package in current_pr_build_numbers and current_pr_build_numbers[
                package]["version"] == master_build_numbers[package]["version"]:
            assert int(current_pr_build_numbers[package]["number"]) >= int(
                master_build_numbers[package]["number"]
            ), "If the version doesn't change, the build number can't be reduced."

    #If packages are added or removed, don't require a version change
    if set(master_build_numbers.keys()) != set(
            current_pr_build_numbers.keys()):
        return

    #At least one package needs to increase the build number or change the version.
    checks = [
        current_pr_build_numbers[package]["version"] !=
        master_build_numbers[package]["version"]
        or int(current_pr_build_numbers[package]["number"]) > int(
            master_build_numbers[package]["number"])
        for package in master_build_numbers
    ]
    assert any(
        checks
    ), "At least one package needs to increase the build number or change the version."
Example #18
0
def validate_config(args):
    '''Entry Function'''
    variants = utils.make_variants(args.python_versions, args.build_types,
                                   args.mpi_types, args.cuda_versions)
    validate_env_config(args.conda_build_config, args.env_config_file,
                        variants, args.repository_folder)
Example #19
0
def build_env(args):
    '''Entry Function'''
    if args.docker_build:
        if len(args.cuda_versions.split(',')) > 1:
            raise OpenCEError(Error.TOO_MANY_CUDA)
        docker_build.build_with_docker(os.path.abspath(args.output_folder),
                                       args.build_types, args.cuda_versions,
                                       sys.argv)
        for conda_env_file in glob.glob(
                os.path.join(args.output_folder, "*.yaml")):
            utils.replace_conda_env_channels(
                conda_env_file,
                os.path.abspath(
                    os.path.join(docker_build.HOME_PATH,
                                 utils.DEFAULT_OUTPUT_FOLDER)),
                os.path.abspath(args.output_folder))
        return

    # Checking conda-build existence if --docker_build is not specified
    utils.check_if_conda_build_exists()

    # Here, importing BuildTree is intentionally done after checking
    # existence of conda-build as BuildTree uses conda_build APIs.
    from build_tree import BuildTree  # pylint: disable=import-outside-toplevel

    # If repository_folder doesn't exist, create it
    if args.repository_folder and not os.path.exists(args.repository_folder):
        os.mkdir(args.repository_folder)

    variants = utils.make_variants(args.python_versions, args.build_types,
                                   args.mpi_types, args.cuda_versions)
    validate_config.validate_env_config(args.conda_build_config,
                                        args.env_config_file, variants,
                                        args.repository_folder)

    # Create the build tree
    build_tree = BuildTree(
        env_config_files=args.env_config_file,
        python_versions=inputs.parse_arg_list(args.python_versions),
        build_types=inputs.parse_arg_list(args.build_types),
        mpi_types=inputs.parse_arg_list(args.mpi_types),
        cuda_versions=inputs.parse_arg_list(args.cuda_versions),
        repository_folder=args.repository_folder,
        git_location=args.git_location,
        git_tag_for_env=args.git_tag_for_env,
        conda_build_config=args.conda_build_config,
        test_labels=inputs.parse_arg_list(args.test_labels))

    # Generate conda environment files
    conda_env_files = build_tree.write_conda_env_files(
        channels=args.channels_list,
        output_folder=os.path.abspath(args.output_folder),
        path=os.path.abspath(args.output_folder))
    print(
        "Generated conda environment files from the selected build arguments:",
        conda_env_files.values())
    print("INFO: One can use these environment files to create a conda" \
          " environment using \"conda env create -f <conda_env_file_name>.\"")

    if not args.skip_build_packages:
        # Build each package in the packages list
        for build_command in build_tree:
            if not _all_outputs_exist(args.output_folder,
                                      build_command.output_files):
                try:
                    print("Building " + build_command.recipe)
                    build_feedstock.build_feedstock_from_command(
                        build_command,
                        output_folder=os.path.abspath(args.output_folder),
                        extra_channels=[os.path.abspath(args.output_folder)] +
                        args.channels_list,
                        conda_build_config=os.path.abspath(
                            args.conda_build_config))
                except OpenCEError as exc:
                    raise OpenCEError(Error.BUILD_RECIPE,
                                      build_command.repository,
                                      exc.msg) from exc
            else:
                print("Skipping build of " + build_command.recipe +
                      " because it already exists")

    if args.run_tests:
        _run_tests(build_tree, conda_env_files)
                            mpi_type="openmpi",
                            run_dependencies=[
                                "python 3.7", "pack1==1.0", "pack2 <=2.0",
                                "pack3   3.0.*"
                            ]),
    build_tree.BuildCommand(
        "recipe4",
        "repo4", ["package4a", "package4b"],
        python="3.7",
        build_type="cuda",
        mpi_type="system",
        run_dependencies=["pack1==1.0", "pack2 <=2.0", "pack3-suffix 3.0"])
]

external_deps = {}
possible_variants = utils.make_variants(['3.6', '3.7'], ['cpu', 'cuda'],
                                        'openmpi')
for variant in possible_variants:
    external_deps[str(variant)] = [
        "external_pac1    1.2", "external_pack2", "external_pack3=1.2.3"
    ]
TMP_OPENCE_DIR = "/tmp/opence-test/"


def test_conda_env_file_content():
    '''
    Tests that the conda env file content are being populated correctly
    '''
    python_versions = "3.6,3.7"
    build_types = "cpu,cuda"
    mpi_types = "openmpi,system"
    mock_build_tree = TestBuildTree([], python_versions, build_types,