def test_get_repo_git_tag_options(mocker, caplog): ''' Test for `_get_repo` that verifies `git_tag` and `git_tag_for_env` priorities. ''' env_file1 = os.path.join(test_dir, 'test-env1.yaml') mock_build_tree = TestBuildTree([env_file1], "3.6", "cpu", "openmpi", "10.2") dir_tracker = helpers.DirTracker() mocker.patch('os.getcwd', side_effect=dir_tracker.mocked_getcwd) mocker.patch('os.chdir', side_effect=dir_tracker.validate_chdir) mocker.patch('os.system', return_value=0, side_effect=(lambda x: helpers.validate_cli( x, possible_expect=["git clone", "git checkout"]))) possible_variants = utils.make_variants("3.6", "cpu", "openmpi", "10.2") for variant in possible_variants: # test-env1.yaml has defined "git_tag" and "git_tag_for_env". env_config_data_list = env_config.load_env_config_files([env_file1], [variant]) for env_config_data in env_config_data_list: packages = env_config_data.get(env_config.Key.packages.name, []) for package in packages: _ = mock_build_tree._get_repo(env_config_data, package) validate_git_tags(mock_build_tree._git_tag_for_env, env_config_data, package, caplog) # Setting git_tag_for_env in BuildTree should override whatever is in the config file mock_build_tree._git_tag_for_env = "test_tag_for_all" env_config_data_list = env_config.load_env_config_files([env_file1], [variant]) for env_config_data in env_config_data_list: packages = env_config_data.get(env_config.Key.packages.name, []) for package in packages: _ = mock_build_tree._get_repo(env_config_data, package) validate_git_tags(mock_build_tree._git_tag_for_env, env_config_data, package, caplog) # Setting git_tag_for_env in BuildTree back to Default and no git tags # specified in the config file too. mocker.patch('os.system', return_value=0, side_effect=(lambda x: helpers.validate_cli( x, possible_expect=["git clone", "git apply"], reject=["git checkout"]))) mock_build_tree._git_tag_for_env = None env_file2 = os.path.join(test_dir, 'test-env3.yaml') env_config_data_list = env_config.load_env_config_files([env_file2], [variant]) for env_config_data in env_config_data_list: packages = env_config_data.get(env_config.Key.packages.name, []) for package in packages: _ = mock_build_tree._get_repo(env_config_data, package) validate_git_tags(mock_build_tree._git_tag_for_env, env_config_data, package, caplog)
def validate_env(args): '''Entry Function''' variants = utils.make_variants(args.python_versions, args.build_types, args.mpi_types, args.cuda_versions) for variant in variants: try: env_config.load_env_config_files(args.env_config_file, variant) except OpenCEError as exc: raise OpenCEError(Error.VALIDATE_ENV, args.env_config_file, str(variant), exc.msg) from exc
def test_get_repo_for_nonexisting_patch(mocker): ''' Test for `_get_repo` that verifies exception is thrown when patch application fails ''' env_file = os.path.join(test_dir, 'test-env3.yaml') mock_build_tree = TestBuildTree([env_file], "3.6", "cpu", "openmpi", "10.2") dir_tracker = helpers.DirTracker() mocker.patch('os.getcwd', side_effect=dir_tracker.mocked_getcwd) mocker.patch('os.chdir', side_effect=dir_tracker.validate_chdir) mocker.patch('os.system', side_effect=(lambda x: helpers.validate_cli( x, expect=["git apply"], ignore=["git clone", "git checkout"], retval=1))) mocker.patch('shutil.rmtree', return_value=None) possible_variants = utils.make_variants("3.6", "cpu", "openmpi", "10.2") for variant in possible_variants: # test-env3.yaml has defined "patches". env_config_data_list = env_config.load_env_config_files([env_file], [variant]) for env_config_data in env_config_data_list: packages = env_config_data.get(env_config.Key.packages.name, []) for package in packages: # "package211" has specified a non-existing patch if package.get(env_config.Key.feedstock.name) == "package211": with pytest.raises(OpenCEError) as exc: _ = mock_build_tree._get_repo(env_config_data, package) assert "Failed to apply patch " in str(exc.value)
def test_get_repo_with_patches(mocker, caplog): ''' Test for `_get_repo` that verifies `patches` field ''' env_file = os.path.join(test_dir, 'test-env3.yaml') mock_build_tree = TestBuildTree([env_file], "3.6", "cpu", "openmpi", "10.2") dir_tracker = helpers.DirTracker() mocker.patch('os.getcwd', side_effect=dir_tracker.mocked_getcwd) mocker.patch('os.chdir', side_effect=dir_tracker.validate_chdir) mocker.patch( 'os.system', return_value=0, side_effect=(lambda x: helpers.validate_cli( x, expect=["git apply"], ignore=["git clone", "git checkout"]))) possible_variants = utils.make_variants("3.6", "cpu", "openmpi", "10.2") for variant in possible_variants: # test-env3.yaml has specified "patches". env_config_data_list = env_config.load_env_config_files([env_file], [variant]) for env_config_data in env_config_data_list: packages = env_config_data.get(env_config.Key.packages.name, []) for package in packages: if package.get(env_config.Key.feedstock.name) == "package22": _ = mock_build_tree._get_repo(env_config_data, package) assert "Patch apply command: git apply" in caplog.text break
def _create_nodes(self, variants): ''' Create a recipe dictionary for each recipe needed for a given environment file. ''' env_config_data_list = env_config.load_env_config_files( self._env_config_files, variants) feedstocks_seen = set() external_deps = [] retval = graph.OpenCEGraph() create_commands_args = [] # Find all conda_build_configs listed in environment files conda_build_configs = [] for env_config_data in env_config_data_list: conda_build_configs += [ utils.expanded_path( config, relative_to=env_config_data[ env_config.Key.opence_env_file_path.name]) for config in env_config_data.get( env_config.Key.conda_build_configs.name, []) ] utils.check_conda_build_configs_exist(conda_build_configs) # Create recipe dictionaries for each repository in the environment file for env_config_data in env_config_data_list: channels = self._channels + env_config_data.get( env_config.Key.channels.name, []) feedstocks = env_config_data.get(env_config.Key.packages.name, []) if not feedstocks: feedstocks = [] for feedstock in feedstocks: if _make_hash(feedstock) in feedstocks_seen: continue # Create arguments for call to _create_commands_helper create_commands_args.append((variants, env_config_data, conda_build_configs, feedstock)) feedstocks_seen.add(_make_hash(feedstock)) current_deps = env_config_data.get( env_config.Key.external_dependencies.name, []) for dep in current_deps: #Add external dependencies as top level nodes in the graph. new_dep = DependencyNode({dep}, channels=channels) retval.add_node(new_dep) if current_deps: external_deps += current_deps # Execute _create_commands_helper in parallel commands = utils.run_in_parallel(self._create_commands_helper, create_commands_args) # Add the results of _create_commands_helper to the graph for command in commands: retval = networkx.compose(retval, command) return retval, external_deps
def test_check_recipe_path_package_field(): ''' Test for `runtime_package` field ''' env_file = os.path.join(test_dir, 'test-env1.yaml') possible_variants = utils.make_variants("3.6", "cpu", "openmpi", "10.2") for variant in possible_variants: # test-env1.yaml has defined "recipe_path" as "package11_recipe_path" for "package11". env_config_data_list = env_config.load_env_config_files([env_file], [variant]) for env_config_data in env_config_data_list: packages = env_config_data.get(env_config.Key.packages.name, []) for package in packages: if package.get(env_config.Key.feedstock.name) == "package11": assert package.get(env_config.Key.recipe_path.name ) == "package11_recipe_path"
def _create_all_commands(self, variants): ''' Create a recipe dictionary for each recipe needed for a given environment file. ''' env_config_data_list = env_config.load_env_config_files( self._env_config_files, variants) packages_seen = set() build_commands = [] external_deps = [] # Create recipe dictionaries for each repository in the environment file for env_config_data in env_config_data_list: channels = self._channels + env_config_data.get( env_config.Key.channels.name, []) packages = env_config_data.get(env_config.Key.packages.name, []) if not packages: packages = [] for package in packages: if _make_hash(package) in packages_seen: continue repo_dir = self._get_repo(env_config_data, package) runtime_package = package.get( env_config.Key.runtime_package.name, True) repo_build_commands = _create_commands( repo_dir, runtime_package, package.get(env_config.Key.recipe_path.name), package.get(env_config.Key.recipes.name), [os.path.abspath(self._conda_build_config)], variants, channels) build_commands += repo_build_commands packages_seen.add(_make_hash(package)) current_deps = env_config_data.get( env_config.Key.external_dependencies.name, []) if current_deps: external_deps += current_deps return build_commands, external_deps
def _main(arg_strings=None): # pylint: disable=too-many-locals, too-many-statements parser = _make_parser() args = parser.parse_args(arg_strings) config_file = None if args.conda_build_configs: config_file = os.path.abspath(args.conda_build_configs) primary_repo_path = "./" open_ce_env_file = os.path.abspath( os.path.join(primary_repo_path, "envs", "opence-env.yaml")) if not _has_git_tag_changed(primary_repo_path, args.branch, open_ce_env_file): print("--->The opence-env git_tag has not changed.") print("--->No release is needed.") return print("--->The opence-env git_tag has changed!") current_tag = _get_git_tag_from_env_file(open_ce_env_file) previous_tag = _get_previous_git_tag_from_env_file(primary_repo_path, args.branch, open_ce_env_file) version = _git_tag_to_version(current_tag) release_number = ".".join(version.split(".")[:-1]) bug_fix = version.split(".")[-1] branch_name = "open-ce-r{}".format(release_number) version_msg = "Open-CE Version {}".format(version) release_name = "v{}".format(version) env_file_contents = env_config.load_env_config_files([open_ce_env_file], utils.ALL_VARIANTS(), ignore_urls=True) for env_file_content in env_file_contents: env_file_tag = env_file_content.get( env_config.Key.git_tag_for_env.name, None) if env_file_tag != current_tag: message = "Incorrect {} '{}' found in the following env_file:\n{}".format( env_config.Key.git_tag_for_env.name, env_file_tag, env_file_content) raise Exception(message) if not git_utils.branch_exists(primary_repo_path, branch_name): print("--->Creating {} branch in {}".format(branch_name, args.primary_repo)) git_utils.create_branch(primary_repo_path, branch_name) else: print("--->Branch {} already exists in {}. Not creating it.".format( current_tag, args.primary_repo)) print("--->Tag Primary Branch") git_utils.create_tag(primary_repo_path, current_tag, version_msg) if args.not_dry_run: print("--->Pushing branch.") git_utils.push_branch(primary_repo_path, branch_name) print("--->Pushing tag.") git_utils.push_branch(primary_repo_path, current_tag) else: print("--->Skipping pushing branch and tag for dry run.") repos = _get_all_feedstocks(env_files=env_file_contents, github_org=args.github_org, pat=args.pat, skipped_repos=[args.primary_repo, ".github"] + inputs.parse_arg_list(args.skipped_repos)) repos.sort(key=lambda repo: repo["name"]) tag_all_repos.clone_repos(repos=repos, branch=None, repo_dir=args.repo_dir, prev_tag=previous_tag) tag_all_repos.tag_repos(repos=repos, tag=current_tag, tag_msg=version_msg, repo_dir=args.repo_dir) if args.not_dry_run: tag_all_repos.push_repos(repos=repos, tag=current_tag, repo_dir=args.repo_dir, continue_query=False) else: print("--->Skipping pushing feedstocks for dry run.") print("--->Generating Release Notes.") release_notes = _create_release_notes( repos, version, release_number, bug_fix, current_tag, previous_tag, utils.ALL_VARIANTS(), config_file, repo_dir=args.repo_dir, ) print(release_notes) if args.not_dry_run: print("--->Creating Draft Release.") git_utils.create_release(args.github_org, args.primary_repo, args.pat, current_tag, release_name, release_notes, True) else: print("--->Skipping release creation for dry run.")