def _create_remote_deps(self, dep_graph): #pylint: disable=import-outside-toplevel from open_ce import conda_utils deps = {dep for dep in dep_graph.nodes() if dep.build_command is None} seen = set() try: while deps: node = deps.pop() for package in node.packages: package_name = utils.remove_version(package) if package_name in seen: continue seen.add(package_name) package_info = conda_utils.get_latest_package_info(self._channels + node.channels, package) dep_graph.add_node(DependencyNode({package})) for dep in package_info['dependencies']: dep_name = utils.remove_version(dep) local_dest = {dest_node for dest_node in dep_graph.nodes() if dep_name in map(utils.remove_version, dest_node.packages)} if local_dest: dep_graph.add_edge(node, local_dest.pop()) else: new_dep = DependencyNode({dep}) dep_graph.add_edge(node, new_dep) deps.add(new_dep) return dep_graph except OpenCEError as err: raise OpenCEError(Error.REMOTE_PACKAGE_DEPENDENCIES, deps, err.msg) from err
def _create_remote_deps(self, dep_graph): #pylint: disable=import-outside-toplevel from open_ce import conda_utils deps = {dep for dep in dep_graph.nodes() if dep.build_command is None} seen = set() try: while deps: node = deps.pop() ancestor_build_cmds = { x.build_command for x in networkx.ancestors(dep_graph, node) if x.build_command is not None } channels = [] ancestor_channels = [] for cmd in ancestor_build_cmds: ancestor_channels += cmd.channels for channel in node.channels + ancestor_channels + self._channels: if not channel in channels: channels += [channel] for package in node.packages: package_name = utils.remove_version(package) if package_name in seen: continue seen.add(package_name) # Pass in channels ordered by priority. package_info = conda_utils.get_latest_package_info( channels, package) # package_info is empty for a virtual package. # As of now, this is just one case of package_info being empty. if package_info == "": continue dep_graph.add_node(DependencyNode({package})) for dep in package_info['dependencies']: dep_name = utils.remove_version(dep) local_dest = { dest_node for dest_node in dep_graph.nodes() if dep_name in map(utils.remove_version, dest_node.packages) } if local_dest: dep_graph.add_edge(node, local_dest.pop()) else: new_dep = DependencyNode({dep}) dep_graph.add_edge(node, new_dep) deps.add(new_dep) return dep_graph except OpenCEError as err: raise OpenCEError(Error.REMOTE_PACKAGE_DEPENDENCIES, deps, err.msg) from err
def get_independent_runtime_deps(tree, node): """ This function gets all run dependencies of a node that don't depend on any internal build commands. """ deps = set() if node.build_command: run_deps = {x for x in node.build_command.run_dependencies if utils.remove_version(x) not in map(utils.remove_version, node.packages)} for run_dep in run_deps: run_dep_node = next(x for x in tree.successors(node) if utils.remove_version(run_dep) in map(utils.remove_version, x.packages)) if is_independent(run_dep_node, tree): deps.add(run_dep) return deps
def validate_build_tree(tree, external_deps, start_nodes=None): ''' Check a build tree for dependency compatability. ''' packages = [ package for recipe in build_tree.traverse_build_commands(tree, start_nodes) for package in recipe.packages ] channels = { channel for recipe in build_tree.traverse_build_commands(tree, start_nodes) for channel in recipe.channels } env_channels = { channel for node in tree.nodes() for channel in node.channels } deps = build_tree.get_installable_packages(tree, external_deps, start_nodes, True) pkg_args = " ".join([ "\"{}\"".format(utils.generalize_version(dep)) for dep in deps if not utils.remove_version(dep) in packages ]) channel_args = " ".join({ "-c \"{}\"".format(channel) for channel in channels.union(env_channels) }) cli = "conda create --dry-run -n test_conda_dependencies {} {}".format( channel_args, pkg_args) ret_code, std_out, std_err = utils.run_command_capture(cli) if not ret_code: raise OpenCEError(Error.VALIDATE_BUILD_TREE, cli, std_out, std_err)
def _create_edges(tree): # Use set() to create a copy of the nodes since they change during the loop. for node in set(tree.nodes()): if node.build_command is not None: for dependency in node.build_command.get_all_dependencies(): local_dest = {dest_node for dest_node in tree.nodes() if utils.remove_version(dependency) in map(utils.remove_version, dest_node.packages)} if local_dest: dest_node = local_dest.pop() if node != dest_node: tree.add_edge(node, dest_node) else: new_node = DependencyNode({dependency}) tree.add_node(new_node) tree.add_edge(node, new_node) return tree
def validate_build_tree(tree, external_deps, start_nodes=None): ''' Check a build tree for dependency compatability. ''' # Importing BuildTree is intentionally done here because it checks for the # existence of conda-build as BuildTree uses conda_build APIs. from open_ce import build_tree # pylint: disable=import-outside-toplevel packages = [package for recipe in build_tree.traverse_build_commands(tree, start_nodes) for package in recipe.packages] channels = {channel for recipe in build_tree.traverse_build_commands(tree, start_nodes) for channel in recipe.channels} env_channels = {channel for node in tree.nodes() for channel in node.channels} deps = build_tree.get_installable_packages(tree, external_deps, start_nodes, True) pkg_args = " ".join(["\"{}\"".format(utils.generalize_version(dep)) for dep in deps if not utils.remove_version(dep) in packages]) channel_args = " ".join({"-c \"{}\"".format(channel) for channel in channels.union(env_channels)}) cli = "conda create --dry-run -n test_conda_dependencies {} {}".format(channel_args, pkg_args) ret_code, std_out, std_err = utils.run_command_capture(cli) if not ret_code: raise OpenCEError(Error.VALIDATE_BUILD_TREE, cli, std_out, std_err)
def __init__(self, env_config_files, python_versions, build_types, mpi_types, cuda_versions, repository_folder="./", channels=None, git_location=utils.DEFAULT_GIT_LOCATION, git_tag_for_env=utils.DEFAULT_GIT_TAG, conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG, packages=None): self._env_config_files = env_config_files self._repository_folder = repository_folder self._channels = channels if channels else [] self._git_location = git_location self._git_tag_for_env = git_tag_for_env self._conda_build_config = conda_build_config self._external_dependencies = dict() self._conda_env_files = dict() self._test_feedstocks = dict() self._initial_package_indices = [] # Create a dependency tree that includes recipes for every combination # of variants. self._possible_variants = utils.make_variants(python_versions, build_types, mpi_types, cuda_versions) self.build_commands = [] for variant in self._possible_variants: try: build_commands, external_deps = self._create_all_commands( variant) except OpenCEError as exc: raise OpenCEError(Error.CREATE_BUILD_TREE, exc.msg) from exc variant_string = utils.variant_string(variant["python"], variant["build_type"], variant["mpi_type"], variant["cudatoolkit"]) self._external_dependencies[variant_string] = external_deps # Add dependency tree information to the packages list and # remove build commands from build_commands that are already in self.build_commands build_commands, package_indices = _add_build_command_dependencies( build_commands, self.build_commands, len(self.build_commands)) self.build_commands += build_commands self._detect_cycle() # If the packages argument is provided, find the indices into the build_commands for all # of the packages that were requested. variant_package_indices = [] if packages: for package in packages: if package in package_indices: variant_package_indices += package_indices[package] else: print("INFO: No recipes were found for " + package + " for variant " + variant_string) else: for package in package_indices: variant_package_indices += package_indices[package] self._initial_package_indices += variant_package_indices validate_config.validate_build_tree(self.build_commands, external_deps, variant_package_indices) installable_packages = get_installable_packages( self.build_commands, external_deps, variant_package_indices) filtered_packages = [ package for package in installable_packages if utils.remove_version(package) in package_indices or utils.remove_version(package) in utils.KNOWN_VARIANT_PACKAGES ] self._conda_env_files[variant_string] = CondaEnvFileGenerator( filtered_packages) self._test_feedstocks[variant_string] = [] for build_command in traverse_build_commands( self.build_commands, variant_package_indices): self._test_feedstocks[variant_string].append( build_command.repository)
def _add_build_command_dependencies(variant_build_commands, build_commands, start_index=0): """ Create a dependency tree for a list of build commands. Each build_command will contain a `build_command_dependencies` key which contains a list of integers. Each integer in the list represents the index of the dependencies build_commands within the list. The start_index indicates the value that the dependency indices should start counting from. """ # Create a packages dictionary that uses all of a recipe's packages as key, with # the recipes index as values. packages = dict() index = 0 #save indices of build commands which are already present in build_commands duplicates = [] for var_index, build_command in enumerate(variant_build_commands): if build_command in build_commands: alt_index = build_commands.index(build_command) duplicates.append(var_index) for package in build_command.packages: packages.update( {package: [alt_index] + packages.get(package, [])}) else: for package in build_command.packages: packages.update({ package: [start_index + index] + packages.get(package, []) }) index += 1 # remove build commands that are already added to build_commands variant_build_commands = [ i for j, i in enumerate(variant_build_commands) if j not in duplicates ] # Add a list of indices for dependency to a BuildCommand's `build_command_dependencies` value # Note: This will filter out all dependencies that aren't in the recipes list. for index, build_command in enumerate(variant_build_commands): deps = [] dependencies = set() dependencies.update({ utils.remove_version(dep) for dep in build_command.run_dependencies }) dependencies.update({ utils.remove_version(dep) for dep in build_command.build_dependencies }) dependencies.update({ utils.remove_version(dep) for dep in build_command.host_dependencies }) dependencies.update({ utils.remove_version(dep) for dep in build_command.test_dependencies }) for dep in dependencies: if dep in packages: deps += filter(lambda x: x != start_index + index, packages[dep]) build_command.build_command_dependencies = deps return variant_build_commands, packages