def __init__( self, #pylint: disable=super-init-not-called env_config_files, python_versions, build_types, mpi_types, cuda_versions, repository_folder="./", channels=None, git_location=utils.DEFAULT_GIT_LOCATION, git_tag_for_env=utils.DEFAULT_GIT_TAG, git_up_to_date=False, conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG): self._env_config_files = env_config_files self._repository_folder = repository_folder self._channels = channels if channels else [] self._git_location = git_location self._git_tag_for_env = git_tag_for_env self._git_up_to_date = git_up_to_date self._conda_build_config = conda_build_config self._possible_variants = utils.make_variants(python_versions, build_types, mpi_types, cuda_versions) self._test_feedstocks = dict() self._initial_package_indices = None
def test_get_repo_for_nonexisting_patch(mocker): ''' Test for `_get_repo` that verifies exception is thrown when patch application fails ''' env_file = os.path.join(test_dir, 'test-env3.yaml') mock_build_tree = TestBuildTree([env_file], "3.6", "cpu", "openmpi", "10.2") dir_tracker = helpers.DirTracker() mocker.patch('os.getcwd', side_effect=dir_tracker.mocked_getcwd) mocker.patch('os.chdir', side_effect=dir_tracker.validate_chdir) mocker.patch('os.system', side_effect=(lambda x: helpers.validate_cli( x, expect=["git apply"], ignore=["git clone", "git checkout"], retval=1))) mocker.patch('shutil.rmtree', return_value=None) possible_variants = utils.make_variants("3.6", "cpu", "openmpi", "10.2") for variant in possible_variants: # test-env3.yaml has defined "patches". env_config_data_list = env_config.load_env_config_files([env_file], [variant]) for env_config_data in env_config_data_list: packages = env_config_data.get(env_config.Key.packages.name, []) for package in packages: # "package211" has specified a non-existing patch if package.get(env_config.Key.feedstock.name) == "package211": with pytest.raises(OpenCEError) as exc: _ = mock_build_tree._get_repo(env_config_data, package) assert "Failed to apply patch " in str(exc.value)
def validate_and_remove_conda_env_files(py_versions=utils.DEFAULT_PYTHON_VERS, build_types=utils.DEFAULT_BUILD_TYPES, mpi_types=utils.DEFAULT_MPI_TYPES, cuda_versions=utils.DEFAULT_CUDA_VERS, channels=None): # Check if conda env files are created for given python versions and build variants variants = utils.make_variants(py_versions, build_types, mpi_types, cuda_versions) for variant in variants: conda_env_file = os.path.join( os.getcwd(), utils.DEFAULT_OUTPUT_FOLDER, "{}{}.yaml".format( utils.CONDA_ENV_FILENAME_PREFIX, utils.variant_string(variant.get('python'), variant.get('build_type'), variant.get('mpi_type'), variant.get('cudatoolkit')))) assert os.path.exists(conda_env_file) if channels: with open(conda_env_file, 'r') as file_handle: env_info = yaml.safe_load(file_handle) env_channels = env_info['channels'] assert (all([channel in env_channels for channel in channels])) # Remove the file once it's existence is verified os.remove(conda_env_file)
def test_get_repo_with_patches(mocker, caplog): ''' Test for `_get_repo` that verifies `patches` field ''' env_file = os.path.join(test_dir, 'test-env3.yaml') mock_build_tree = TestBuildTree([env_file], "3.6", "cpu", "openmpi", "10.2") dir_tracker = helpers.DirTracker() mocker.patch('os.getcwd', side_effect=dir_tracker.mocked_getcwd) mocker.patch('os.chdir', side_effect=dir_tracker.validate_chdir) mocker.patch( 'os.system', return_value=0, side_effect=(lambda x: helpers.validate_cli( x, expect=["git apply"], ignore=["git clone", "git checkout"]))) possible_variants = utils.make_variants("3.6", "cpu", "openmpi", "10.2") for variant in possible_variants: # test-env3.yaml has specified "patches". env_config_data_list = env_config.load_env_config_files([env_file], [variant]) for env_config_data in env_config_data_list: packages = env_config_data.get(env_config.Key.packages.name, []) for package in packages: if package.get(env_config.Key.feedstock.name) == "package22": _ = mock_build_tree._get_repo(env_config_data, package) assert "Patch apply command: git apply" in caplog.text break
def test_get_repo_git_tag_options(mocker, caplog): ''' Test for `_get_repo` that verifies `git_tag` and `git_tag_for_env` priorities. ''' env_file1 = os.path.join(test_dir, 'test-env1.yaml') mock_build_tree = TestBuildTree([env_file1], "3.6", "cpu", "openmpi", "10.2") dir_tracker = helpers.DirTracker() mocker.patch('os.getcwd', side_effect=dir_tracker.mocked_getcwd) mocker.patch('os.chdir', side_effect=dir_tracker.validate_chdir) mocker.patch('os.system', return_value=0, side_effect=(lambda x: helpers.validate_cli( x, possible_expect=["git clone", "git checkout"]))) possible_variants = utils.make_variants("3.6", "cpu", "openmpi", "10.2") for variant in possible_variants: # test-env1.yaml has defined "git_tag" and "git_tag_for_env". env_config_data_list = env_config.load_env_config_files([env_file1], [variant]) for env_config_data in env_config_data_list: packages = env_config_data.get(env_config.Key.packages.name, []) for package in packages: _ = mock_build_tree._get_repo(env_config_data, package) validate_git_tags(mock_build_tree._git_tag_for_env, env_config_data, package, caplog) # Setting git_tag_for_env in BuildTree should override whatever is in the config file mock_build_tree._git_tag_for_env = "test_tag_for_all" env_config_data_list = env_config.load_env_config_files([env_file1], [variant]) for env_config_data in env_config_data_list: packages = env_config_data.get(env_config.Key.packages.name, []) for package in packages: _ = mock_build_tree._get_repo(env_config_data, package) validate_git_tags(mock_build_tree._git_tag_for_env, env_config_data, package, caplog) # Setting git_tag_for_env in BuildTree back to Default and no git tags # specified in the config file too. mocker.patch('os.system', return_value=0, side_effect=(lambda x: helpers.validate_cli( x, possible_expect=["git clone", "git apply"], reject=["git checkout"]))) mock_build_tree._git_tag_for_env = None env_file2 = os.path.join(test_dir, 'test-env3.yaml') env_config_data_list = env_config.load_env_config_files([env_file2], [variant]) for env_config_data in env_config_data_list: packages = env_config_data.get(env_config.Key.packages.name, []) for package in packages: _ = mock_build_tree._get_repo(env_config_data, package) validate_git_tags(mock_build_tree._git_tag_for_env, env_config_data, package, caplog)
def build_feedstock_from_command(command, # pylint: disable=too-many-arguments, too-many-locals recipe_config_file=None, output_folder=utils.DEFAULT_OUTPUT_FOLDER, conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG, local_src_dir=None): ''' Build a feedstock from a build_command object. ''' utils.check_if_package_exists('conda-build') # pylint: disable=import-outside-toplevel import conda_build.api from conda_build.config import get_or_merge_config saved_working_directory = None if command.repository: saved_working_directory = os.getcwd() os.chdir(os.path.abspath(command.repository)) recipes_to_build = inputs.parse_arg_list(command.recipe) for variant in utils.make_variants(command.python, command.build_type, command.mpi_type, command.cudatoolkit): build_config_data, recipe_config_file = load_package_config(recipe_config_file, variant, command.recipe_path) # Build each recipe if build_config_data['recipes'] is None: build_config_data['recipes'] = [] print("INFO: No recipe to build for given configuration.") for recipe in build_config_data['recipes']: if recipes_to_build and recipe['name'] not in recipes_to_build: continue config = get_or_merge_config(None, variant=variant) config.skip_existing = True config.prefix_length = 225 config.output_folder = output_folder config.variant_config_files = [conda_build_config] if os.path.exists(conda_build_config) else [] recipe_conda_build_config = get_conda_build_config() if recipe_conda_build_config: config.variant_config_files.append(recipe_conda_build_config) config.channel_urls = [os.path.abspath(output_folder)] config.channel_urls += command.channels config.channel_urls += build_config_data.get('channels', []) _set_local_src_dir(local_src_dir, recipe, recipe_config_file) try: conda_build.api.build(os.path.join(os.getcwd(), recipe['path']), config=config) except Exception as exc: # pylint: disable=broad-except traceback.print_exc() raise OpenCEError(Error.BUILD_RECIPE, recipe['name'] if 'name' in recipe else os.getcwd, str(exc)) from exc if saved_working_directory: os.chdir(saved_working_directory)
def validate_env(args): '''Entry Function''' variants = utils.make_variants(args.python_versions, args.build_types, args.mpi_types, args.cuda_versions) for variant in variants: try: env_config.load_env_config_files(args.env_config_file, variant) except OpenCEError as exc: raise OpenCEError(Error.VALIDATE_ENV, args.env_config_file, str(variant), exc.msg) from exc
def validate_conda_env_files(py_versions=utils.DEFAULT_PYTHON_VERS, build_types=utils.DEFAULT_BUILD_TYPES, mpi_types=utils.DEFAULT_MPI_TYPES, cuda_versions=utils.DEFAULT_CUDA_VERS): # Check if conda env files are created for given python versions and build variants variants = utils.make_variants(py_versions, build_types, mpi_types, cuda_versions) for variant in variants: cuda_env_file = os.path.join( os.getcwd(), utils.DEFAULT_OUTPUT_FOLDER, "{}{}.yaml".format( utils.CONDA_ENV_FILENAME_PREFIX, utils.variant_string(variant['python'], variant['build_type'], variant['mpi_type'], variant['cudatoolkit']))) assert os.path.exists(cuda_env_file) # Remove the file once it's existence is verified os.remove(cuda_env_file)
def test_check_recipe_path_package_field(): ''' Test for `runtime_package` field ''' env_file = os.path.join(test_dir, 'test-env1.yaml') possible_variants = utils.make_variants("3.6", "cpu", "openmpi", "10.2") for variant in possible_variants: # test-env1.yaml has defined "recipe_path" as "package11_recipe_path" for "package11". env_config_data_list = env_config.load_env_config_files([env_file], [variant]) for env_config_data in env_config_data_list: packages = env_config_data.get(env_config.Key.packages.name, []) for package in packages: if package.get(env_config.Key.feedstock.name) == "package11": assert package.get(env_config.Key.recipe_path.name ) == "package11_recipe_path"
def main(arg_strings=None): ''' Entry function. ''' parser = make_parser() args = inputs.parse_args(parser, arg_strings) variants = utils.make_variants(args.python_versions, args.build_types, args.mpi_types, args.cuda_versions) check_result = True for variant in variants: main_build_config_data, main_config = get_configs( variant, args.conda_build_config) if not check_recipes(main_build_config_data, main_config, variant): check_result = False print("Recipe validation failed for variant '{}'.".format(variant)) assert check_result, "All recipes must be valid."
def main(arg_strings=None): ''' Entry function. ''' parser = make_parser() args = inputs.parse_args(parser, arg_strings) variants = utils.make_variants(args.python_versions, args.build_types, args.mpi_types, args.cuda_versions) pr_branch = utils.get_output("git log -1 --format='%H'") utils.run_and_log("git remote set-head origin -a") default_branch = utils.get_output("git symbolic-ref refs/remotes/origin/HEAD | sed 's@^refs/remotes/origin/@@'") variant_build_results = dict() for variant in variants: utils.run_and_log("git checkout {}".format(default_branch)) main_build_config_data, main_config = get_configs(variant, args.conda_build_config) main_build_numbers = get_build_numbers(main_build_config_data, main_config, variant) utils.run_and_log("git checkout {}".format(pr_branch)) pr_build_config_data, pr_config = get_configs(variant, args.conda_build_config) current_pr_build_numbers = get_build_numbers(pr_build_config_data, pr_config, variant) print("Build Info for Variant: {}".format(variant)) print("Current PR Build Info: {}".format(current_pr_build_numbers)) print("Main Branch Build Info: {}".format(main_build_numbers)) #No build numbers can go backwards without a version change. for package in main_build_numbers: if package in current_pr_build_numbers and current_pr_build_numbers[package]["version"] == main_build_numbers[package]["version"]: assert int(current_pr_build_numbers[package]["number"]) >= int(main_build_numbers[package]["number"]), "If the version doesn't change, the build number can't be reduced." #If packages are added or removed, don't require a version change if set(main_build_numbers.keys()) != set(current_pr_build_numbers.keys()): return #At least one package needs to increase the build number or change the version. checks = [current_pr_build_numbers[package]["version"] != main_build_numbers[package]["version"] or int(current_pr_build_numbers[package]["number"]) > int(main_build_numbers[package]["number"]) for package in main_build_numbers] variant_build_results[utils.variant_string(variant["python"], variant["build_type"], variant["mpi_type"], variant["cudatoolkit"])] = any(checks) assert any(variant_build_results.values()), "At least one package needs to increase the build number or change the version in at least one variant."
def __init__(self, env_config_files, python_versions, build_types, mpi_types, cuda_versions, repository_folder="./", channels=None, git_location=utils.DEFAULT_GIT_LOCATION, git_tag_for_env=utils.DEFAULT_GIT_TAG, conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG, packages=None): self._env_config_files = env_config_files self._repository_folder = repository_folder self._channels = channels if channels else [] self._git_location = git_location self._git_tag_for_env = git_tag_for_env self._conda_build_config = conda_build_config self._external_dependencies = dict() self._conda_env_files = dict() self._test_feedstocks = dict() self._initial_package_indices = [] # Create a dependency tree that includes recipes for every combination # of variants. self._possible_variants = utils.make_variants(python_versions, build_types, mpi_types, cuda_versions) self.build_commands = [] for variant in self._possible_variants: try: build_commands, external_deps = self._create_all_commands( variant) except OpenCEError as exc: raise OpenCEError(Error.CREATE_BUILD_TREE, exc.msg) from exc variant_string = utils.variant_string(variant["python"], variant["build_type"], variant["mpi_type"], variant["cudatoolkit"]) self._external_dependencies[variant_string] = external_deps # Add dependency tree information to the packages list and # remove build commands from build_commands that are already in self.build_commands build_commands, package_indices = _add_build_command_dependencies( build_commands, self.build_commands, len(self.build_commands)) self.build_commands += build_commands self._detect_cycle() # If the packages argument is provided, find the indices into the build_commands for all # of the packages that were requested. variant_package_indices = [] if packages: for package in packages: if package in package_indices: variant_package_indices += package_indices[package] else: print("INFO: No recipes were found for " + package + " for variant " + variant_string) else: for package in package_indices: variant_package_indices += package_indices[package] self._initial_package_indices += variant_package_indices validate_config.validate_build_tree(self.build_commands, external_deps, variant_package_indices) installable_packages = get_installable_packages( self.build_commands, external_deps, variant_package_indices) filtered_packages = [ package for package in installable_packages if utils.remove_version(package) in package_indices or utils.remove_version(package) in utils.KNOWN_VARIANT_PACKAGES ] self._conda_env_files[variant_string] = CondaEnvFileGenerator( filtered_packages) self._test_feedstocks[variant_string] = [] for build_command in traverse_build_commands( self.build_commands, variant_package_indices): self._test_feedstocks[variant_string].append( build_command.repository)
def __init__(self, env_config_files, python_versions, build_types, mpi_types, cuda_versions, repository_folder="./", channels=None, git_location=utils.DEFAULT_GIT_LOCATION, git_tag_for_env=utils.DEFAULT_GIT_TAG, git_up_to_date=False, conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG, packages=None): self._env_config_files = env_config_files self._repository_folder = repository_folder self._channels = channels if channels else [] self._git_location = git_location self._git_tag_for_env = git_tag_for_env self._git_up_to_date = git_up_to_date self._conda_build_config = conda_build_config self._external_dependencies = dict() self._conda_env_files = dict() self._test_feedstocks = dict() self._initial_nodes = [] # Create a dependency tree that includes recipes for every combination # of variants. self._possible_variants = utils.make_variants(python_versions, build_types, mpi_types, cuda_versions) self._tree = networkx.DiGraph() validate_args = [] for variant in self._possible_variants: try: variant_tree, external_deps = self._create_nodes(variant) variant_tree = _create_edges(variant_tree) variant_tree = self._create_remote_deps(variant_tree) self._tree = networkx.compose(self._tree, variant_tree) except OpenCEError as exc: raise OpenCEError(Error.CREATE_BUILD_TREE, exc.msg) from exc variant_string = utils.variant_string(variant["python"], variant["build_type"], variant["mpi_type"], variant["cudatoolkit"]) self._external_dependencies[variant_string] = external_deps self._detect_cycle() variant_start_nodes = {n for n,d in variant_tree.in_degree() if d==0} # If the packages argument is provided, find the indices into the build_commands for all # of the packages that were requested. if packages: for package in packages: if not {n for n in traverse_build_commands(variant_tree, return_node=True) if package in n.packages}: print("INFO: No recipes were found for " + package + " for variant " + variant_string) variant_start_nodes = {n for n in traverse_build_commands(variant_tree, return_node=True) if n.packages.intersection(packages)} self._initial_nodes += variant_start_nodes validate_args.append((self._tree, external_deps, variant_start_nodes)) self._conda_env_files[variant_string] = get_conda_file_packages(self._tree, external_deps, variant_start_nodes) self._test_feedstocks[variant_string] = [] for build_command in traverse_build_commands(self._tree, variant_start_nodes): self._test_feedstocks[variant_string].append(build_command.repository) # Execute validate_build_tree in parallel utils.run_in_parallel(validate_config.validate_build_tree, validate_args)
def validate_config(args): '''Entry Function''' variants = utils.make_variants(args.python_versions, args.build_types, args.mpi_types, args.cuda_versions) validate_env_config(args.conda_build_config, args.env_config_file, variants, args.repository_folder)
def build_feedstock_from_command( command, # pylint: disable=too-many-arguments, too-many-locals recipe_config_file=None, output_folder=utils.DEFAULT_OUTPUT_FOLDER, local_src_dir=None, pkg_format=utils.DEFAULT_PKG_FORMAT, debug=None, debug_output_id=None): ''' Build a feedstock from a build_command object. ''' utils.check_if_package_exists('conda-build') # pylint: disable=import-outside-toplevel import conda_build.api from conda_build.config import get_or_merge_config saved_working_directory = None if command.repository: saved_working_directory = os.getcwd() os.chdir(os.path.abspath(command.repository)) recipes_to_build = inputs.parse_arg_list(command.recipe) for variant in utils.make_variants(command.python, command.build_type, command.mpi_type, command.cudatoolkit): build_config_data, recipe_config_file = load_package_config( recipe_config_file, variant, command.recipe_path) # Build each recipe if build_config_data['recipes'] is None: build_config_data['recipes'] = [] log.info("No recipe to build for given configuration.") for recipe in build_config_data['recipes']: if recipes_to_build and recipe['name'] not in recipes_to_build: continue config = get_or_merge_config(None, variant=variant) config.skip_existing = False config.prefix_length = 225 config.output_folder = output_folder conda_build_configs = [ utils.download_file(conda_build_config) if utils.is_url(conda_build_config) else conda_build_config for conda_build_config in command.conda_build_configs ] config.variant_config_files = [ config for config in conda_build_configs if os.path.exists(config) ] if pkg_format == "conda": config.conda_pkg_format = "2" # set to .conda format recipe_conda_build_config = get_conda_build_config() if recipe_conda_build_config: config.variant_config_files.append(recipe_conda_build_config) config.channel_urls = [os.path.abspath(output_folder)] config.channel_urls += command.channels config.channel_urls += build_config_data.get('channels', []) _set_local_src_dir(local_src_dir, recipe, recipe_config_file) try: if debug: activation_string = conda_build.api.debug( os.path.join(os.getcwd(), recipe['path']), output_id=debug_output_id, config=config) if activation_string: log.info("#" * 80) log.info( "Build and/or host environments created for debug output id %s." "To enter a debugging environment:\n", debug_output_id) log.info(activation_string) log.info("#" * 80) else: conda_build.api.build(os.path.join(os.getcwd(), recipe['path']), config=config) except Exception as exc: # pylint: disable=broad-except traceback.print_exc() raise OpenCEError( Error.BUILD_RECIPE, recipe['name'] if 'name' in recipe else os.getcwd, str(exc)) from exc if saved_working_directory: os.chdir(saved_working_directory)