def validate_and_remove_conda_env_files(py_versions=utils.DEFAULT_PYTHON_VERS, build_types=utils.DEFAULT_BUILD_TYPES, mpi_types=utils.DEFAULT_MPI_TYPES, cuda_versions=utils.DEFAULT_CUDA_VERS, channels=None): # Check if conda env files are created for given python versions and build variants variants = utils.make_variants(py_versions, build_types, mpi_types, cuda_versions) for variant in variants: conda_env_file = os.path.join( os.getcwd(), utils.DEFAULT_OUTPUT_FOLDER, "{}{}.yaml".format( utils.CONDA_ENV_FILENAME_PREFIX, utils.variant_string(variant.get('python'), variant.get('build_type'), variant.get('mpi_type'), variant.get('cudatoolkit')))) assert os.path.exists(conda_env_file) if channels: with open(conda_env_file, 'r') as file_handle: env_info = yaml.safe_load(file_handle) env_channels = env_info['channels'] assert (all([channel in env_channels for channel in channels])) # Remove the file once it's existence is verified os.remove(conda_env_file)
def get_external_dependencies(self, variant): '''Return the list of external dependencies for the given variant.''' variant_string = utils.variant_string(variant["python"], variant["build_type"], variant["mpi_type"], variant["cudatoolkit"]) return self._external_dependencies.get(variant_string, [])
def name(self): """ Returns a name representing the Build Command """ result = self.recipe variant_string = utils.variant_string(self.python, self.build_type, self.mpi_type, self.cudatoolkit) if variant_string: result += "-" + variant_string result = result.replace(".", "-") result = result.replace("_", "-") return result
def validate_conda_env_files(py_versions=utils.DEFAULT_PYTHON_VERS, build_types=utils.DEFAULT_BUILD_TYPES, mpi_types=utils.DEFAULT_MPI_TYPES, cuda_versions=utils.DEFAULT_CUDA_VERS): # Check if conda env files are created for given python versions and build variants variants = utils.make_variants(py_versions, build_types, mpi_types, cuda_versions) for variant in variants: cuda_env_file = os.path.join( os.getcwd(), utils.DEFAULT_OUTPUT_FOLDER, "{}{}.yaml".format( utils.CONDA_ENV_FILENAME_PREFIX, utils.variant_string(variant['python'], variant['build_type'], variant['mpi_type'], variant['cudatoolkit']))) assert os.path.exists(cuda_env_file) # Remove the file once it's existence is verified os.remove(cuda_env_file)
def main(arg_strings=None): ''' Entry function. ''' parser = make_parser() args = inputs.parse_args(parser, arg_strings) variants = utils.make_variants(args.python_versions, args.build_types, args.mpi_types, args.cuda_versions) pr_branch = utils.get_output("git log -1 --format='%H'") utils.run_and_log("git remote set-head origin -a") default_branch = utils.get_output("git symbolic-ref refs/remotes/origin/HEAD | sed 's@^refs/remotes/origin/@@'") variant_build_results = dict() for variant in variants: utils.run_and_log("git checkout {}".format(default_branch)) main_build_config_data, main_config = get_configs(variant, args.conda_build_config) main_build_numbers = get_build_numbers(main_build_config_data, main_config, variant) utils.run_and_log("git checkout {}".format(pr_branch)) pr_build_config_data, pr_config = get_configs(variant, args.conda_build_config) current_pr_build_numbers = get_build_numbers(pr_build_config_data, pr_config, variant) print("Build Info for Variant: {}".format(variant)) print("Current PR Build Info: {}".format(current_pr_build_numbers)) print("Main Branch Build Info: {}".format(main_build_numbers)) #No build numbers can go backwards without a version change. for package in main_build_numbers: if package in current_pr_build_numbers and current_pr_build_numbers[package]["version"] == main_build_numbers[package]["version"]: assert int(current_pr_build_numbers[package]["number"]) >= int(main_build_numbers[package]["number"]), "If the version doesn't change, the build number can't be reduced." #If packages are added or removed, don't require a version change if set(main_build_numbers.keys()) != set(current_pr_build_numbers.keys()): return #At least one package needs to increase the build number or change the version. checks = [current_pr_build_numbers[package]["version"] != main_build_numbers[package]["version"] or int(current_pr_build_numbers[package]["number"]) > int(main_build_numbers[package]["number"]) for package in main_build_numbers] variant_build_results[utils.variant_string(variant["python"], variant["build_type"], variant["mpi_type"], variant["cudatoolkit"])] = any(checks) assert any(variant_build_results.values()), "At least one package needs to increase the build number or change the version in at least one variant."
def __init__(self, env_config_files, python_versions, build_types, mpi_types, cuda_versions, repository_folder="./", channels=None, git_location=utils.DEFAULT_GIT_LOCATION, git_tag_for_env=utils.DEFAULT_GIT_TAG, conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG, packages=None): self._env_config_files = env_config_files self._repository_folder = repository_folder self._channels = channels if channels else [] self._git_location = git_location self._git_tag_for_env = git_tag_for_env self._conda_build_config = conda_build_config self._external_dependencies = dict() self._conda_env_files = dict() self._test_feedstocks = dict() self._initial_package_indices = [] # Create a dependency tree that includes recipes for every combination # of variants. self._possible_variants = utils.make_variants(python_versions, build_types, mpi_types, cuda_versions) self.build_commands = [] for variant in self._possible_variants: try: build_commands, external_deps = self._create_all_commands( variant) except OpenCEError as exc: raise OpenCEError(Error.CREATE_BUILD_TREE, exc.msg) from exc variant_string = utils.variant_string(variant["python"], variant["build_type"], variant["mpi_type"], variant["cudatoolkit"]) self._external_dependencies[variant_string] = external_deps # Add dependency tree information to the packages list and # remove build commands from build_commands that are already in self.build_commands build_commands, package_indices = _add_build_command_dependencies( build_commands, self.build_commands, len(self.build_commands)) self.build_commands += build_commands self._detect_cycle() # If the packages argument is provided, find the indices into the build_commands for all # of the packages that were requested. variant_package_indices = [] if packages: for package in packages: if package in package_indices: variant_package_indices += package_indices[package] else: print("INFO: No recipes were found for " + package + " for variant " + variant_string) else: for package in package_indices: variant_package_indices += package_indices[package] self._initial_package_indices += variant_package_indices validate_config.validate_build_tree(self.build_commands, external_deps, variant_package_indices) installable_packages = get_installable_packages( self.build_commands, external_deps, variant_package_indices) filtered_packages = [ package for package in installable_packages if utils.remove_version(package) in package_indices or utils.remove_version(package) in utils.KNOWN_VARIANT_PACKAGES ] self._conda_env_files[variant_string] = CondaEnvFileGenerator( filtered_packages) self._test_feedstocks[variant_string] = [] for build_command in traverse_build_commands( self.build_commands, variant_package_indices): self._test_feedstocks[variant_string].append( build_command.repository)
def __init__(self, env_config_files, python_versions, build_types, mpi_types, cuda_versions, repository_folder="./", channels=None, git_location=utils.DEFAULT_GIT_LOCATION, git_tag_for_env=utils.DEFAULT_GIT_TAG, git_up_to_date=False, conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG, packages=None): self._env_config_files = env_config_files self._repository_folder = repository_folder self._channels = channels if channels else [] self._git_location = git_location self._git_tag_for_env = git_tag_for_env self._git_up_to_date = git_up_to_date self._conda_build_config = conda_build_config self._external_dependencies = dict() self._conda_env_files = dict() self._test_feedstocks = dict() self._initial_nodes = [] # Create a dependency tree that includes recipes for every combination # of variants. self._possible_variants = utils.make_variants(python_versions, build_types, mpi_types, cuda_versions) self._tree = networkx.DiGraph() validate_args = [] for variant in self._possible_variants: try: variant_tree, external_deps = self._create_nodes(variant) variant_tree = _create_edges(variant_tree) variant_tree = self._create_remote_deps(variant_tree) self._tree = networkx.compose(self._tree, variant_tree) except OpenCEError as exc: raise OpenCEError(Error.CREATE_BUILD_TREE, exc.msg) from exc variant_string = utils.variant_string(variant["python"], variant["build_type"], variant["mpi_type"], variant["cudatoolkit"]) self._external_dependencies[variant_string] = external_deps self._detect_cycle() variant_start_nodes = {n for n,d in variant_tree.in_degree() if d==0} # If the packages argument is provided, find the indices into the build_commands for all # of the packages that were requested. if packages: for package in packages: if not {n for n in traverse_build_commands(variant_tree, return_node=True) if package in n.packages}: print("INFO: No recipes were found for " + package + " for variant " + variant_string) variant_start_nodes = {n for n in traverse_build_commands(variant_tree, return_node=True) if n.packages.intersection(packages)} self._initial_nodes += variant_start_nodes validate_args.append((self._tree, external_deps, variant_start_nodes)) self._conda_env_files[variant_string] = get_conda_file_packages(self._tree, external_deps, variant_start_nodes) self._test_feedstocks[variant_string] = [] for build_command in traverse_build_commands(self._tree, variant_start_nodes): self._test_feedstocks[variant_string].append(build_command.repository) # Execute validate_build_tree in parallel utils.run_in_parallel(validate_config.validate_build_tree, validate_args)