def make_variants(python_versions=DEFAULT_PYTHON_VERS, build_types=DEFAULT_BUILD_TYPES, mpi_types=DEFAULT_MPI_TYPES, cuda_versions=DEFAULT_CUDA_VERS): '''Create a cross product of possible variant combinations.''' variants = { 'python': inputs.parse_arg_list(python_versions), 'build_type': inputs.parse_arg_list(build_types), 'mpi_type': inputs.parse_arg_list(mpi_types), 'cudatoolkit': inputs.parse_arg_list(cuda_versions) } return [dict(zip(variants, y)) for y in product(*variants.values())]
def test_feedstock(conda_env_file, test_labels=None, test_working_dir=utils.DEFAULT_TEST_WORKING_DIRECTORY, working_directory=None): """ Test a particular feedstock, provided by the working_directory argument. """ saved_working_directory = None if working_directory: saved_working_directory = os.getcwd() os.chdir(os.path.abspath(working_directory)) conda_env_file = os.path.abspath(conda_env_file) var_string = conda_env_file_generator.get_variant_string(conda_env_file) if var_string: variant_dict = utils.variant_string_to_dict(var_string) else: variant_dict = dict() for test_label in inputs.parse_arg_list(test_labels): variant_dict[test_label] = True test_commands = gen_test_commands(working_dir=test_working_dir, variants=variant_dict) test_results = run_test_commands(conda_env_file, test_commands) if saved_working_directory: os.chdir(saved_working_directory) return test_results
def _main(arg_strings=None): parser = _make_parser() args = parser.parse_args(arg_strings) version_name = "open-ce-v{}".format(args.version) release_number = ".".join(args.version.split(".")[:-1]) branch_name = "open-ce-r{}".format(release_number) primary_repo_url = "[email protected]:{}/{}.git".format( args.github_org, args.primary_repo) version_msg = "Open-CE Version {}".format(args.version) release_name = "v{}".format(args.version) if args.code_name: version_msg = "{} Code-named {}".format(version_msg, args.code_name) release_name = "{} ({})".format(release_name, args.code_name) primary_repo_path = os.path.abspath( os.path.join(args.repo_dir, args.primary_repo)) print("--->Making clone location: " + primary_repo_path) os.makedirs(primary_repo_path, exist_ok=True) print("--->Cloning {}".format(primary_repo_url)) git_utils.clone_repo(primary_repo_url, primary_repo_path, args.branch) print("--->Creating {} branch in {}".format(version_name, args.primary_repo)) git_utils.create_branch(primary_repo_path, branch_name) print("--->Updating env files.") _update_env_files(primary_repo_path, version_name) print("--->Committing env files.") git_utils.commit_changes(primary_repo_path, "Updates for {}".format(release_number)) print("--->Tag Primary Branch") git_utils.create_tag(primary_repo_path, version_name, version_msg) push = git_utils.ask_for_input( "Would you like to push changes to primary repo?") if push.startswith("y"): print("--->Pushing branch.") git_utils.push_branch(primary_repo_path, branch_name) print("--->Pushing tag.") git_utils.push_branch(primary_repo_path, version_name) tag_all_repos.tag_all_repos(github_org=args.github_org, tag=version_name, tag_msg=version_msg, branch=args.branch, repo_dir=args.repo_dir, pat=args.pat, skipped_repos=[args.primary_repo, ".github"] + inputs.parse_arg_list(args.skipped_repos), prev_tag=None) release = git_utils.ask_for_input( "Would you like to create a github release?") if release.startswith("y"): print("--->Creating Draft Release.") git_utils.create_release(args.github_org, args.primary_repo, args.pat, version_name, release_name, version_msg, True)
def test_parse_arg_list_large_string_input(): ''' Test parse_arg_list with a more complicated input, including spaces. ''' string_input = "this,is a, big , test ," list_output = ["this", "is a", " big ", " test ", ""] assert list_output == inputs.parse_arg_list(string_input)
def test_parse_arg_list_small_string_input(): ''' Tests that parse_arg_list works for a simple case. ''' string_input = "a,b,c" list_output = ["a", "b", "c"] assert list_output == inputs.parse_arg_list(string_input)
def get_licenses(args): """ Entry point for `get licenses`. """ if not args.conda_env_files: raise OpenCEError(Error.CONDA_ENV_FILE_REQUIRED) gen = LicenseGenerator() for conda_env_file in parse_arg_list(args.conda_env_files): gen.add_licenses(conda_env_file) gen.write_licenses_file(args.output_folder) if args.template_files: for template_file in parse_arg_list(args.template_files): gen.gen_file_from_template(template_file, args.output_folder)
def build_feedstock_from_command(command, # pylint: disable=too-many-arguments, too-many-locals recipe_config_file=None, output_folder=utils.DEFAULT_OUTPUT_FOLDER, conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG, local_src_dir=None): ''' Build a feedstock from a build_command object. ''' utils.check_if_package_exists('conda-build') # pylint: disable=import-outside-toplevel import conda_build.api from conda_build.config import get_or_merge_config saved_working_directory = None if command.repository: saved_working_directory = os.getcwd() os.chdir(os.path.abspath(command.repository)) recipes_to_build = inputs.parse_arg_list(command.recipe) for variant in utils.make_variants(command.python, command.build_type, command.mpi_type, command.cudatoolkit): build_config_data, recipe_config_file = load_package_config(recipe_config_file, variant, command.recipe_path) # Build each recipe if build_config_data['recipes'] is None: build_config_data['recipes'] = [] print("INFO: No recipe to build for given configuration.") for recipe in build_config_data['recipes']: if recipes_to_build and recipe['name'] not in recipes_to_build: continue config = get_or_merge_config(None, variant=variant) config.skip_existing = True config.prefix_length = 225 config.output_folder = output_folder config.variant_config_files = [conda_build_config] if os.path.exists(conda_build_config) else [] recipe_conda_build_config = get_conda_build_config() if recipe_conda_build_config: config.variant_config_files.append(recipe_conda_build_config) config.channel_urls = [os.path.abspath(output_folder)] config.channel_urls += command.channels config.channel_urls += build_config_data.get('channels', []) _set_local_src_dir(local_src_dir, recipe, recipe_config_file) try: conda_build.api.build(os.path.join(os.getcwd(), recipe['path']), config=config) except Exception as exc: # pylint: disable=broad-except traceback.print_exc() raise OpenCEError(Error.BUILD_RECIPE, recipe['name'] if 'name' in recipe else os.getcwd, str(exc)) from exc if saved_working_directory: os.chdir(saved_working_directory)
def make_variants(python_versions=DEFAULT_PYTHON_VERS, build_types=DEFAULT_BUILD_TYPES, mpi_types=DEFAULT_MPI_TYPES, cuda_versions=DEFAULT_CUDA_VERS): '''Create a cross product of possible variant combinations.''' results = [] for build_type in inputs.parse_arg_list(build_types): variants = { 'python': inputs.parse_arg_list(python_versions), 'build_type': [build_type], 'mpi_type': inputs.parse_arg_list(mpi_types) } if build_type == "cuda": variants["cudatoolkit"] = inputs.parse_arg_list(cuda_versions) results += [ dict(zip(variants, y)) for y in product(*variants.values()) ] return results
def test_feedstock_entry(args): '''Entry Function''' if not args.conda_env_files: raise OpenCEError(Error.CONDA_ENV_FILE_REQUIRED) test_failures = [] for conda_env_file in inputs.parse_arg_list(args.conda_env_files): test_failures += test_feedstock(conda_env_file, args.test_labels, args.test_working_dir, args.working_directory) if test_failures: display_failed_tests(test_failures) raise OpenCEError(Error.FAILED_TESTS, len(test_failures))
def construct_build_tree(args): ''' Common function to make a build_tree from args. ''' utils.check_conda_build_configs_exist(args.conda_build_configs) # If repository_folder doesn't exist, create it if args.repository_folder: os.makedirs(args.repository_folder, exist_ok=True) # Create the build tree return BuildTree(env_config_files=args.env_config_file, python_versions=inputs.parse_arg_list( args.python_versions), build_types=inputs.parse_arg_list(args.build_types), mpi_types=inputs.parse_arg_list(args.mpi_types), cuda_versions=inputs.parse_arg_list(args.cuda_versions), repository_folder=args.repository_folder, channels=args.channels_list, git_location=args.git_location, git_tag_for_env=args.git_tag_for_env, git_up_to_date=args.git_up_to_date, conda_build_config=args.conda_build_configs, packages=inputs.parse_arg_list(args.packages))
def test_feedstock_entry(args): '''Entry Function''' if not args.conda_env_files: raise OpenCEError(Error.CONDA_ENV_FILE_REQUIRED) if args.working_directory: feedstock = os.path.basename(os.path.abspath(args.working_directory)) else: feedstock = os.path.basename(os.getcwd()) test_results = {feedstock: []} for conda_env_file in inputs.parse_arg_list(args.conda_env_files): test_results[feedstock] += test_feedstock(conda_env_file, args.test_labels, args.test_working_dir, args.working_directory) process_test_results(test_results, args.test_working_dir, args.test_labels)
def tag_all_repos(github_org, tag, tag_msg, branch, repo_dir, pat, skipped_repos, prev_tag): # pylint: disable=too-many-arguments ''' Clones, then tags all repos with a given tag, and pushes back to remote. These steps are performed in separate loops to make debugging easier. ''' skipped_repos = inputs.parse_arg_list(skipped_repos) repos = git_utils.get_all_repos(github_org, pat) repos = [repo for repo in repos if repo["name"] not in skipped_repos ] print("---------------------------Cloning all Repos") for repo in repos: repo_path = os.path.abspath(os.path.join(repo_dir, repo["name"])) print("--->Making clone location: " + repo_path) os.makedirs(repo_path, exist_ok=True) print("--->Cloning {}".format(repo["name"])) git_utils.clone_repo(repo["ssh_url"], repo_path) if branch and git_utils.branch_exists(repo_path, branch): print("--->Branch '{}' exists, checking it out.".format(branch)) git_utils.checkout(repo_path, branch) elif prev_tag: repo_branch = git_utils.get_tag_branch(repo_path, prev_tag) print("--->Checking out branch '{}' which contains tag '{}'.".format(repo_branch, prev_tag)) git_utils.checkout(repo_path, repo_branch) print("---------------------------Tagging all Repos") for repo in repos: repo_path = os.path.abspath(os.path.join(repo_dir, repo["name"])) print("--->Tagging {}".format(repo["name"])) git_utils.create_tag(repo_path, tag, tag_msg) push = git_utils.ask_for_input("Would you like to push all tags to remote?") if not push.startswith("y"): return print("---------------------------Pushing all Repos") for repo in repos: try: repo_path = os.path.abspath(os.path.join(repo_dir, repo["name"])) print("--->Pushing {}".format(repo["name"])) git_utils.push_branch(repo_path, tag) except Exception as exc:# pylint: disable=broad-except print("Error encountered when trying to push {}".format(repo["name"])) print(exc) cont_tag = git_utils.ask_for_input("Would you like to continue tagging other repos?") if cont_tag.startswith("y"): continue raise
def build_env(args): '''Entry Function''' utils.check_conda_build_configs_exist(args.conda_build_configs) if args.container_build: if len(args.cuda_versions.split(',')) > 1: raise OpenCEError(Error.TOO_MANY_CUDA) container_build.build_with_container_tool(args, sys.argv) return # Importing BuildTree is intentionally done here because it checks for the # existence of conda-build as BuildTree uses conda_build APIs. from open_ce.build_tree import construct_build_tree # pylint: disable=import-outside-toplevel build_tree = construct_build_tree(args) # Generate conda environment files conda_env_files = build_tree.write_conda_env_files( output_folder=os.path.abspath(args.output_folder), path=os.path.abspath(args.output_folder)) log.info( "Generated conda environment files from the selected build arguments: %s", conda_env_files.values()) log.info("One can use these environment files to create a conda" \ " environment using \"conda env create -f <conda_env_file_name>.\"") if not args.skip_build_packages: # Build each package in the packages list for build_command in build_tree: if not build_command.all_outputs_exist(args.output_folder): try: log.info("Building %s", build_command.recipe) build_feedstock.build_feedstock_from_command( build_command, output_folder=os.path.abspath(args.output_folder), pkg_format=args.conda_pkg_format) except OpenCEError as exc: raise OpenCEError(Error.BUILD_RECIPE, build_command.repository, exc.msg) from exc else: log.info("Skipping build of %s because it already exists.", build_command.recipe) if args.run_tests: _run_tests(build_tree, inputs.parse_arg_list(args.test_labels), conda_env_files, os.path.abspath(args.output_folder))
def build_feedstock(args): '''Entry Function''' # Here, importing BuildCommand is intentionally done here to avoid circular import. from open_ce.build_tree import BuildCommand # pylint: disable=import-outside-toplevel command = BuildCommand(recipe=inputs.parse_arg_list(args.recipe_list), repository=args.working_directory, packages=[], python=args.python_versions, build_type=args.build_types, mpi_type=args.mpi_types, cudatoolkit=args.cuda_versions, channels=args.channels_list, conda_build_configs=args.conda_build_configs) build_feedstock_from_command(command, recipe_config_file=args.recipe_config_file, output_folder=args.output_folder, local_src_dir=args.local_src_dir)
def tag_all_repos(github_org, tag, tag_msg, branch, repo_dir, pat, skipped_repos, prev_tag): # pylint: disable=too-many-arguments ''' Clones, then tags all repos with a given tag, and pushes back to remote. These steps are performed in separate loops to make debugging easier. ''' skipped_repos = inputs.parse_arg_list(skipped_repos) repos = git_utils.get_all_repos(github_org, pat) repos = [repo for repo in repos if repo["name"] in skipped_repos] clone_repos(repos, branch, repo_dir, prev_tag) tag_repos(repos, tag, tag_msg, repo_dir) push = git_utils.ask_for_input( "Would you like to push all tags to remote?") if not push.startswith("y"): return push_repos(repos, tag, repo_dir)
def build_runtime_container_image(args): """ Create a runtime image which will have a conda environment created using locally built conda packages and environment file. """ if not args.container_tool: raise OpenCEError(Error.NO_CONTAINER_TOOL_FOUND) local_conda_channel = os.path.abspath(args.local_conda_channel) if not os.path.exists(local_conda_channel): raise OpenCEError(Error.INCORRECT_INPUT_PATHS) if not os.path.exists(os.path.join(local_conda_channel, TEMP_FILES)): os.mkdir(os.path.join(local_conda_channel, TEMP_FILES)) for conda_env_file in parse_arg_list(args.conda_env_files): conda_env_file = os.path.abspath(conda_env_file) if not os.path.exists(conda_env_file): raise OpenCEError(Error.INCORRECT_INPUT_PATHS) # Copy the conda environment file into the TEMP_FILES dir inside local # conda channel with a new name and modify it conda_env_runtime_filename = os.path.splitext( os.path.basename(conda_env_file))[0] + '-runtime.yaml' conda_env_runtime_file = os.path.join(local_conda_channel, TEMP_FILES, conda_env_runtime_filename) create_copy(conda_env_file, conda_env_runtime_file) utils.replace_conda_env_channels(conda_env_runtime_file, r'file:.*', "file:/{}".format(TARGET_DIR)) image_version = utils.get_open_ce_version(conda_env_file) image_name = build_image(args.local_conda_channel, os.path.basename(conda_env_runtime_file), args.container_tool, image_version, args.container_build_args) print("Docker image with name {} is built successfully.".format( image_name)) cleanup(local_conda_channel)
def _main(arg_strings=None): # pylint: disable=too-many-locals, too-many-statements parser = _make_parser() args = parser.parse_args(arg_strings) config_file = None if args.conda_build_configs: config_file = os.path.abspath(args.conda_build_configs) primary_repo_path = "./" open_ce_env_file = os.path.abspath( os.path.join(primary_repo_path, "envs", "opence-env.yaml")) if not _has_git_tag_changed(primary_repo_path, args.branch, open_ce_env_file): print("--->The opence-env git_tag has not changed.") print("--->No release is needed.") return print("--->The opence-env git_tag has changed!") current_tag = _get_git_tag_from_env_file(open_ce_env_file) previous_tag = _get_previous_git_tag_from_env_file(primary_repo_path, args.branch, open_ce_env_file) version = _git_tag_to_version(current_tag) release_number = ".".join(version.split(".")[:-1]) bug_fix = version.split(".")[-1] branch_name = "open-ce-r{}".format(release_number) version_msg = "Open-CE Version {}".format(version) release_name = "v{}".format(version) env_file_contents = env_config.load_env_config_files([open_ce_env_file], utils.ALL_VARIANTS(), ignore_urls=True) for env_file_content in env_file_contents: env_file_tag = env_file_content.get( env_config.Key.git_tag_for_env.name, None) if env_file_tag != current_tag: message = "Incorrect {} '{}' found in the following env_file:\n{}".format( env_config.Key.git_tag_for_env.name, env_file_tag, env_file_content) raise Exception(message) if not git_utils.branch_exists(primary_repo_path, branch_name): print("--->Creating {} branch in {}".format(branch_name, args.primary_repo)) git_utils.create_branch(primary_repo_path, branch_name) else: print("--->Branch {} already exists in {}. Not creating it.".format( current_tag, args.primary_repo)) print("--->Tag Primary Branch") git_utils.create_tag(primary_repo_path, current_tag, version_msg) if args.not_dry_run: print("--->Pushing branch.") git_utils.push_branch(primary_repo_path, branch_name) print("--->Pushing tag.") git_utils.push_branch(primary_repo_path, current_tag) else: print("--->Skipping pushing branch and tag for dry run.") repos = _get_all_feedstocks(env_files=env_file_contents, github_org=args.github_org, pat=args.pat, skipped_repos=[args.primary_repo, ".github"] + inputs.parse_arg_list(args.skipped_repos)) repos.sort(key=lambda repo: repo["name"]) tag_all_repos.clone_repos(repos=repos, branch=None, repo_dir=args.repo_dir, prev_tag=previous_tag) tag_all_repos.tag_repos(repos=repos, tag=current_tag, tag_msg=version_msg, repo_dir=args.repo_dir) if args.not_dry_run: tag_all_repos.push_repos(repos=repos, tag=current_tag, repo_dir=args.repo_dir, continue_query=False) else: print("--->Skipping pushing feedstocks for dry run.") print("--->Generating Release Notes.") release_notes = _create_release_notes( repos, version, release_number, bug_fix, current_tag, previous_tag, utils.ALL_VARIANTS(), config_file, repo_dir=args.repo_dir, ) print(release_notes) if args.not_dry_run: print("--->Creating Draft Release.") git_utils.create_release(args.github_org, args.primary_repo, args.pat, current_tag, release_name, release_notes, True) else: print("--->Skipping release creation for dry run.")
def test_parse_arg_list_list_input(): ''' Parse arg list should return the input argument if it's already a list. ''' list_input = ["a", "b", "c"] assert list_input == inputs.parse_arg_list(list_input)
def _main(arg_strings=None): parser = make_parser() args = parser.parse_args(arg_strings) skipped_repos = inputs.parse_arg_list(args.skipped_repos) repos = git_utils.get_all_repos(args.github_org, args.pat) repos = [repo for repo in repos if repo["name"] not in skipped_repos] param_dict = { param.split(":")[0]: param.split(":")[1] for param in args.params } patches = [ os.path.abspath(arg_file) for arg_file in inputs.parse_arg_list(args.patches) ] for repo in repos: try: print("Beginning " + repo["name"] + "---------------------------") repo_path = os.path.abspath( os.path.join(args.repo_dir, repo["name"])) print("--->Making clone location: " + repo_path) os.makedirs(repo_path, exist_ok=True) print("--->Cloning {}".format(repo["name"])) git_utils.clone_repo(repo["ssh_url"], repo_path) head_branch = git_utils.get_current_branch(repo_path) git_utils.create_branch(repo_path, args.branch) for patch in patches: replaced_patch = git_utils.fill_in_params( patch, param_dict, default_branch=head_branch) print("--->Applying Patch {}".format(replaced_patch)) git_utils.apply_patch(repo_path, replaced_patch) print("--->Pushing Branch") git_utils.push_branch(repo_path, args.branch) print("--->Creating PR") created_pr = git_utils.create_pr(args.github_org, repo["name"], args.pat, args.commit_msg, args.pr_msg, args.branch, head_branch) print("--->Requesting PR Review") git_utils.request_pr_review( args.github_org, repo["name"], args.pat, created_pr["number"], inputs.parse_arg_list(args.reviewers), inputs.parse_arg_list(args.team_reviewers)) print("---------------------------" + "Finished " + repo["name"]) except Exception as exc: # pylint: disable=broad-except print("Error encountered when trying to patch {}".format( repo["name"])) print(exc) cont = git_utils.ask_for_input( "Would you like to continue applying patches to other repos?") if cont.startswith("y"): continue raise
def build_feedstock_from_command( command, # pylint: disable=too-many-arguments, too-many-locals recipe_config_file=None, output_folder=utils.DEFAULT_OUTPUT_FOLDER, local_src_dir=None, pkg_format=utils.DEFAULT_PKG_FORMAT, debug=None, debug_output_id=None): ''' Build a feedstock from a build_command object. ''' utils.check_if_package_exists('conda-build') # pylint: disable=import-outside-toplevel import conda_build.api from conda_build.config import get_or_merge_config saved_working_directory = None if command.repository: saved_working_directory = os.getcwd() os.chdir(os.path.abspath(command.repository)) recipes_to_build = inputs.parse_arg_list(command.recipe) for variant in utils.make_variants(command.python, command.build_type, command.mpi_type, command.cudatoolkit): build_config_data, recipe_config_file = load_package_config( recipe_config_file, variant, command.recipe_path) # Build each recipe if build_config_data['recipes'] is None: build_config_data['recipes'] = [] log.info("No recipe to build for given configuration.") for recipe in build_config_data['recipes']: if recipes_to_build and recipe['name'] not in recipes_to_build: continue config = get_or_merge_config(None, variant=variant) config.skip_existing = False config.prefix_length = 225 config.output_folder = output_folder conda_build_configs = [ utils.download_file(conda_build_config) if utils.is_url(conda_build_config) else conda_build_config for conda_build_config in command.conda_build_configs ] config.variant_config_files = [ config for config in conda_build_configs if os.path.exists(config) ] if pkg_format == "conda": config.conda_pkg_format = "2" # set to .conda format recipe_conda_build_config = get_conda_build_config() if recipe_conda_build_config: config.variant_config_files.append(recipe_conda_build_config) config.channel_urls = [os.path.abspath(output_folder)] config.channel_urls += command.channels config.channel_urls += build_config_data.get('channels', []) _set_local_src_dir(local_src_dir, recipe, recipe_config_file) try: if debug: activation_string = conda_build.api.debug( os.path.join(os.getcwd(), recipe['path']), output_id=debug_output_id, config=config) if activation_string: log.info("#" * 80) log.info( "Build and/or host environments created for debug output id %s." "To enter a debugging environment:\n", debug_output_id) log.info(activation_string) log.info("#" * 80) else: conda_build.api.build(os.path.join(os.getcwd(), recipe['path']), config=config) except Exception as exc: # pylint: disable=broad-except traceback.print_exc() raise OpenCEError( Error.BUILD_RECIPE, recipe['name'] if 'name' in recipe else os.getcwd, str(exc)) from exc if saved_working_directory: os.chdir(saved_working_directory)
def build_env(args): '''Entry Function''' utils.check_conda_build_configs_exist(args.conda_build_configs) if args.container_build: if len(args.cuda_versions.split(',')) > 1: raise OpenCEError(Error.TOO_MANY_CUDA) container_build.build_with_container_tool(args, sys.argv) return # Checking conda-build existence if --container_build is not specified utils.check_if_package_exists('conda-build') # Here, importing BuildTree is intentionally done after checking # existence of conda-build as BuildTree uses conda_build APIs. from open_ce.build_tree import BuildTree # pylint: disable=import-outside-toplevel # If repository_folder doesn't exist, create it if args.repository_folder and not os.path.exists(args.repository_folder): os.mkdir(args.repository_folder) # Create the build tree build_tree = BuildTree( env_config_files=args.env_config_file, python_versions=inputs.parse_arg_list(args.python_versions), build_types=inputs.parse_arg_list(args.build_types), mpi_types=inputs.parse_arg_list(args.mpi_types), cuda_versions=inputs.parse_arg_list(args.cuda_versions), repository_folder=args.repository_folder, channels=args.channels_list, git_location=args.git_location, git_tag_for_env=args.git_tag_for_env, git_up_to_date=args.git_up_to_date, conda_build_config=args.conda_build_configs, packages=inputs.parse_arg_list(args.packages)) # Generate conda environment files conda_env_files = build_tree.write_conda_env_files( output_folder=os.path.abspath(args.output_folder), path=os.path.abspath(args.output_folder)) print( "Generated conda environment files from the selected build arguments:", conda_env_files.values()) print("INFO: One can use these environment files to create a conda" \ " environment using \"conda env create -f <conda_env_file_name>.\"") if not args.skip_build_packages: # Build each package in the packages list for build_command in build_tree: if not build_command.all_outputs_exist(args.output_folder): try: print("Building " + build_command.recipe) build_feedstock.build_feedstock_from_command( build_command, output_folder=os.path.abspath(args.output_folder)) except OpenCEError as exc: raise OpenCEError(Error.BUILD_RECIPE, build_command.repository, exc.msg) from exc else: print("Skipping build of " + build_command.recipe + " because it already exists") if args.run_tests: _run_tests(build_tree, inputs.parse_arg_list(args.test_labels), conda_env_files)