def make_variants(python_versions=DEFAULT_PYTHON_VERS, build_types=DEFAULT_BUILD_TYPES, mpi_types=DEFAULT_MPI_TYPES, cuda_versions=DEFAULT_CUDA_VERS): '''Create a cross product of possible variant combinations.''' variants = { 'python': inputs.parse_arg_list(python_versions), 'build_type': inputs.parse_arg_list(build_types), 'mpi_type': inputs.parse_arg_list(mpi_types), 'cudatoolkit': inputs.parse_arg_list(cuda_versions) } return [dict(zip(variants, y)) for y in product(*variants.values())]
def test_parse_arg_list_large_string_input(): ''' Test parse_arg_list with a more complicated input, including spaces. ''' string_input = "this,is a, big , test ," list_output = ["this", "is a", " big ", " test ", ""] assert list_output == inputs.parse_arg_list(string_input)
def test_parse_arg_list_small_string_input(): ''' Tests that parse_arg_list works for a simple case. ''' string_input = "a,b,c" list_output = ["a", "b", "c"] assert list_output == inputs.parse_arg_list(string_input)
def _main(arg_strings=None): parser = make_parser() args = parser.parse_args(arg_strings) skipped_repos = inputs.parse_arg_list(args.skipped_repos) repos = git_utils.get_all_repos(args.github_org, args.pat) repos = [repo for repo in repos if repo["name"] not in skipped_repos] patches = [ os.path.abspath(arg_file) for arg_file in inputs.parse_arg_list(args.patches) ] for repo in repos: try: print("Beginning " + repo["name"] + "---------------------------") repo_path = os.path.abspath( os.path.join(args.repo_dir, repo["name"])) print("--->Making clone location: " + repo_path) os.makedirs(repo_path, exist_ok=True) print("--->Cloning {}".format(repo["name"])) git_utils.clone_repo(repo["ssh_url"], repo_path) head_branch = git_utils.get_current_branch(repo_path) git_utils.create_branch(repo_path, args.branch) for patch in patches: print("--->Applying Patch {}".format(patch)) git_utils.apply_patch(repo_path, patch) print("--->Pushing Branch") git_utils.push_branch(repo_path, args.branch) print("--->Creating PR") git_utils.create_pr(args.github_org, repo["name"], args.pat, args.commit_msg, args.pr_msg, args.branch, head_branch) print("---------------------------" + "Finished " + repo["name"]) except Exception as exc: # pylint: disable=broad-except print("Error encountered when trying to patch {}".format( repo["name"])) print(exc) cont = git_utils.ask_for_input( "Would you like to continue applying patches to other repos?") if cont.startswith("y"): continue raise
def build_feedstock_from_command( command, # pylint: disable=too-many-arguments recipe_config_file=None, output_folder=utils.DEFAULT_OUTPUT_FOLDER, extra_channels=None, conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG, local_src_dir=None): ''' Build a feedstock from a build_command object. ''' if not extra_channels: extra_channels = [] saved_working_directory = None if command.repository: saved_working_directory = os.getcwd() os.chdir(os.path.abspath(command.repository)) recipes_to_build = inputs.parse_arg_list(command.recipe) for variant in utils.make_variants(command.python, command.build_type, command.mpi_type, command.cudatoolkit): build_config_data, recipe_config_file = load_package_config( recipe_config_file, variant) # Build each recipe for recipe in build_config_data['recipes']: if recipes_to_build and recipe['name'] not in recipes_to_build: continue config = get_or_merge_config(None, variant=variant) config.skip_existing = True config.prefix_length = 225 config.output_folder = output_folder config.variant_config_files = [conda_build_config] recipe_conda_build_config = os.path.join( os.getcwd(), "config", "conda_build_config.yaml") if os.path.exists(recipe_conda_build_config): config.variant_config_files.append(recipe_conda_build_config) config.channel_urls = extra_channels + command.channels + build_config_data.get( 'channels', []) _set_local_src_dir(local_src_dir, recipe, recipe_config_file) try: conda_build.api.build(os.path.join(os.getcwd(), recipe['path']), config=config) except Exception as exc: # pylint: disable=broad-except traceback.print_exc() raise OpenCEError( Error.BUILD_RECIPE, recipe['name'] if 'name' in recipe else os.getcwd, str(exc)) from exc if saved_working_directory: os.chdir(saved_working_directory)
def _test_feedstock(args): conda_env_file = os.path.abspath(args.conda_env_file) var_string = conda_env_file_generator.get_variant_string(conda_env_file) if var_string: variant_dict = utils.variant_string_to_dict(var_string) else: variant_dict = dict() for test_label in inputs.parse_arg_list(args.test_labels): variant_dict[test_label] = True test_commands = gen_test_commands(working_dir=args.test_working_dir, variants=variant_dict) failed_tests = run_test_commands(conda_env_file, test_commands) display_failed_tests(failed_tests) return len(failed_tests)
def build_feedstock(args): '''Entry Function''' # Here, importing BuildCommand is intentionally done here to avoid circular import. from build_tree import BuildCommand # pylint: disable=import-outside-toplevel command = BuildCommand(recipe=inputs.parse_arg_list(args.recipe_list), repository=args.working_directory, packages=[], python=args.python_versions, build_type=args.build_types, mpi_type=args.mpi_types, cudatoolkit=args.cuda_versions, channels=args.channels_list) build_feedstock_from_command(command, recipe_config_file=args.recipe_config_file, output_folder=args.output_folder, local_src_dir=args.local_src_dir)
def tag_all_repos(github_org, tag, tag_msg, branch, repo_dir, pat, skipped_repos): # pylint: disable=too-many-arguments ''' Clones, then tags all repos with a given tag, and pushes back to remote. These steps are performed in separate loops to make debugging easier. ''' skipped_repos = inputs.parse_arg_list(skipped_repos) repos = git_utils.get_all_repos(github_org, pat) repos = [repo for repo in repos if repo["name"] not in skipped_repos ] print("---------------------------Cloning all Repos") for repo in repos: repo_path = os.path.abspath(os.path.join(repo_dir, repo["name"])) print("--->Making clone location: " + repo_path) os.makedirs(repo_path, exist_ok=True) print("--->Cloning {}".format(repo["name"])) git_utils.clone_repo(repo["ssh_url"], repo_path, branch) print("---------------------------Tagging all Repos") for repo in repos: repo_path = os.path.abspath(os.path.join(repo_dir, repo["name"])) print("--->Tagging {}".format(repo["name"])) git_utils.create_tag(repo_path, tag, tag_msg) push = git_utils.ask_for_input("Would you like to push all tags to remote?") if not push.startswith("y"): return print("---------------------------Pushing all Repos") for repo in repos: try: repo_path = os.path.abspath(os.path.join(repo_dir, repo["name"])) print("--->Pushing {}".format(repo["name"])) git_utils.push_branch(repo_path, tag) except Exception as exc:# pylint: disable=broad-except print("Error encountered when trying to push {}".format(repo["name"])) print(exc) cont_tag = git_utils.ask_for_input("Would you like to continue tagging other repos?") if cont_tag.startswith("y"): continue raise
def build_env(args): '''Entry Function''' if args.docker_build: if len(args.cuda_versions.split(',')) > 1: raise OpenCEError(Error.TOO_MANY_CUDA) docker_build.build_with_docker(os.path.abspath(args.output_folder), args.build_types, args.cuda_versions, sys.argv) for conda_env_file in glob.glob( os.path.join(args.output_folder, "*.yaml")): utils.replace_conda_env_channels( conda_env_file, os.path.abspath( os.path.join(docker_build.HOME_PATH, utils.DEFAULT_OUTPUT_FOLDER)), os.path.abspath(args.output_folder)) return # Checking conda-build existence if --docker_build is not specified utils.check_if_conda_build_exists() # Here, importing BuildTree is intentionally done after checking # existence of conda-build as BuildTree uses conda_build APIs. from build_tree import BuildTree # pylint: disable=import-outside-toplevel # If repository_folder doesn't exist, create it if args.repository_folder and not os.path.exists(args.repository_folder): os.mkdir(args.repository_folder) variants = utils.make_variants(args.python_versions, args.build_types, args.mpi_types, args.cuda_versions) validate_config.validate_env_config(args.conda_build_config, args.env_config_file, variants, args.repository_folder) # Create the build tree build_tree = BuildTree( env_config_files=args.env_config_file, python_versions=inputs.parse_arg_list(args.python_versions), build_types=inputs.parse_arg_list(args.build_types), mpi_types=inputs.parse_arg_list(args.mpi_types), cuda_versions=inputs.parse_arg_list(args.cuda_versions), repository_folder=args.repository_folder, git_location=args.git_location, git_tag_for_env=args.git_tag_for_env, conda_build_config=args.conda_build_config, test_labels=inputs.parse_arg_list(args.test_labels)) # Generate conda environment files conda_env_files = build_tree.write_conda_env_files( channels=args.channels_list, output_folder=os.path.abspath(args.output_folder), path=os.path.abspath(args.output_folder)) print( "Generated conda environment files from the selected build arguments:", conda_env_files.values()) print("INFO: One can use these environment files to create a conda" \ " environment using \"conda env create -f <conda_env_file_name>.\"") if not args.skip_build_packages: # Build each package in the packages list for build_command in build_tree: if not _all_outputs_exist(args.output_folder, build_command.output_files): try: print("Building " + build_command.recipe) build_feedstock.build_feedstock_from_command( build_command, output_folder=os.path.abspath(args.output_folder), extra_channels=[os.path.abspath(args.output_folder)] + args.channels_list, conda_build_config=os.path.abspath( args.conda_build_config)) except OpenCEError as exc: raise OpenCEError(Error.BUILD_RECIPE, build_command.repository, exc.msg) from exc else: print("Skipping build of " + build_command.recipe + " because it already exists") if args.run_tests: _run_tests(build_tree, conda_env_files)
def test_parse_arg_list_list_input(): ''' Parse arg list should return the input argument if it's already a list. ''' list_input = ["a", "b", "c"] assert list_input == inputs.parse_arg_list(list_input)