def build_in_container(image_name, args, arg_strings): """ Run a build inside of a container using the provided image_name. """ time_stamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") container_name = IMAGE_NAME + "-" + time_stamp output_folder = os.path.abspath(args.output_folder) env_files = [ os.path.abspath(e) if not utils.is_url(e) else e for e in args.env_config_file ] conda_build_config = os.path.abspath(args.conda_build_config) home_path = _home_path(args.container_tool) #use set comprehension to remove duplicates env_folders = { os.path.dirname(env_file) for env_file in env_files if not utils.is_url(env_file) } env_files_in_container = { os.path.join(home_path, "envs", _mount_name(os.path.dirname(env_file)), os.path.basename(env_file)) if not utils.is_url(env_file) else env_file for env_file in env_files } arg_strings = list(env_files_in_container) + arg_strings _create_container(container_name, image_name, output_folder, env_folders, args.container_tool) # Add the open-ce directory _copy_to_container(OPEN_CE_PATH, home_path, container_name, args.container_tool) # Add the conda_build_config _copy_to_container(conda_build_config, home_path, container_name, args.container_tool) config_in_container = os.path.join(home_path, os.path.basename(conda_build_config)) arg_strings = arg_strings + ["--conda_build_config", config_in_container] # Add local_files directory (if it exists) if os.path.isdir(LOCAL_FILES_PATH): _copy_to_container(LOCAL_FILES_PATH, home_path, container_name, args.container_tool) _start_container(container_name, args.container_tool) # Execute build command cmd = "source $HOME/.bashrc; python {} {} {} {}".format( os.path.join(home_path, "open_ce", "open-ce"), args.command, args.sub_command, ' '.join(arg_strings[0:])) try: _execute_in_container(container_name, cmd, args.container_tool) finally: # Cleanup _stop_container(container_name, args.container_tool)
def parse_args(parser, arg_strings=None): ''' Parses input arguments and handles more complex defaults. - conda_build_config: If not passed in the default is with the env file, if is passed in, otherwise it is expected to be in the local path. ''' args = parser.parse_args(arg_strings) _create_env_config_paths(args) if "conda_build_config" in vars(args).keys(): if args.conda_build_config is None: if "env_config_file" in vars(args).keys() and args.env_config_file: args.conda_build_config = os.path.join( os.path.dirname(args.env_config_file[0]), utils.CONDA_BUILD_CONFIG_FILE) else: args.conda_build_config = utils.DEFAULT_CONDA_BUILD_CONFIG if utils.is_url(args.conda_build_config): args.conda_build_config = utils.download_file( args.conda_build_config, filename=utils.CONDA_BUILD_CONFIG_FILE) elif os.path.exists(args.conda_build_config): args.conda_build_config = os.path.abspath(args.conda_build_config) else: print( "WARNING: No valid conda_build_config.yaml file was found. Some recipes may fail to build." ) return args
def _create_env_config_paths(args): ''' If the provided env_config_file's don't exist locally, convert the paths to URLs pointing to the GitHub repository for environemnt files. ''' if "env_config_file" in vars(args).keys(): if not "provided_env_files" in vars(args).keys(): args.__dict__["provided_env_files"] = [] for index, config_file in enumerate(args.env_config_file): args.provided_env_files.append(config_file) if not os.path.exists(config_file) and not utils.is_url(config_file): # Determine the organization name from the git_location argument organization = os.path.basename(args.git_location) # Grab the branch name from the git_tag_for_env argument if "git_tag_for_env" in vars(args).keys() and args.git_tag_for_env: branch = args.git_tag_for_env else: branch = "main" # Determine the file name file_name, extension = os.path.splitext(config_file) if not extension: file_name = file_name + ".yaml" else: file_name = file_name + extension new_url = "https://raw.githubusercontent.com/{}/{}/{}/envs/{}".format( organization, utils.DEFAULT_ENVS_REPO, branch, file_name) log.info("Unable to find '%s' locally. Attempting to use '%s'.", config_file, new_url) args.env_config_file[index] = new_url
def _create_commands(repository, runtime_package, recipe_path, recipes, variant_config_files, variants, channels): """ Returns: A tree of nodes containing BuildCommands for each recipe within a repository. """ retval = graph.OpenCEGraph() saved_working_directory = os.getcwd() os.chdir(repository) config_data, _ = build_feedstock.load_package_config( variants=variants, recipe_path=recipe_path) combined_config_files = [ utils.download_file(config) if utils.is_url(config) else config for config in variant_config_files ] feedstock_conda_build_config_file = build_feedstock.get_conda_build_config( ) if feedstock_conda_build_config_file: combined_config_files.append(feedstock_conda_build_config_file) recipes_from_config = config_data.get('recipes', []) if recipes_from_config is None: recipes_from_config = [] channels_from_config = config_data.get('channels', []) if channels_from_config is not None: channels += channels_from_config for recipe in recipes_from_config: if recipes and not recipe.get('name') in recipes: continue packages, version, run_deps, host_deps, build_deps, test_deps, output_files = _get_package_dependencies( recipe.get('path'), combined_config_files, variants) build_command = BuildCommand(recipe=recipe.get('name', None), repository=repository, packages=packages, version=version, recipe_path=recipe_path, runtime_package=runtime_package, output_files=output_files, python=variants.get('python'), build_type=variants.get('build_type'), mpi_type=variants.get('mpi_type'), cudatoolkit=variants.get('cudatoolkit'), run_dependencies=run_deps, host_dependencies=host_deps, build_dependencies=build_deps, test_dependencies=test_deps, channels=channels, resources=recipe.get('resources'), conda_build_configs=variant_config_files) package_node = DependencyNode(set(packages), build_command) retval.add_node(package_node) os.chdir(saved_working_directory) return retval
def _clone_repo(self, git_url, repo_dir, env_config_data, package): """ Clone the git repo at repository. """ # Priority is given to command line specified tag, if it is not # specified then package specific tag, and when even that is not specified # then top level git tag specified for env in the env file. And if nothing is # at all specified then fall back to default branch of the repo. git_tag = self._git_tag_for_env git_tag_for_package = None if git_tag is None: git_tag_for_package = package.get(env_config.Key.git_tag.name, None) if package else None if git_tag_for_package: git_tag = git_tag_for_package else: git_tag = env_config_data.get( env_config.Key.git_tag_for_env.name, None) if env_config_data else None clone_successful = utils.git_clone( git_url, git_tag, repo_dir, self._git_up_to_date and not git_tag_for_package) if clone_successful: patches = package.get(env_config.Key.patches.name, []) if package else [] if len(patches) > 0: cur_dir = os.getcwd() os.chdir(repo_dir) for patch in patches: if os.path.isabs(patch) and os.path.exists(patch): patch_file = patch else: # Look for patch relative to where the Open-CE environment file is patch_file = os.path.join( os.path.dirname( env_config_data.get( env_config.Key.opence_env_file_path.name)), patch) if utils.is_url(patch_file): patch_file = utils.download_file(patch_file) patch_apply_cmd = "git apply {}".format(patch_file) log.info("Patch apply command: %s", patch_apply_cmd) patch_apply_res = os.system(patch_apply_cmd) if patch_apply_res != 0: os.chdir(cur_dir) shutil.rmtree(repo_dir) raise OpenCEError( Error.PATCH_APPLICATION, patch, package[env_config.Key.feedstock.name]) os.chdir(cur_dir)
def load_env_config_files(config_files, variants): ''' Load all of the environment config files, plus any that come from "imported_envs" within an environment config file. ''' env_config_files = [ os.path.abspath(e) if not utils.is_url(e) else e for e in config_files ] env_config_data_list = [] loaded_files = [] while env_config_files: # Load the environment config files using conda-build's API. This will allow for the # filtering of text using selectors and jinja2 functions env = _validate_config_file(env_config_files[0], variants) # Examine all of the imported_envs items and determine if they still need to be loaded. new_config_files = [] imported_envs = env.get(Key.imported_envs.name, []) if not imported_envs: imported_envs = [] for imported_env in imported_envs: imported_env = os.path.expanduser(imported_env) if not utils.is_url(imported_env) and not os.path.isabs( imported_env): imported_env = os.path.join( os.path.dirname(env_config_files[0]), imported_env) if not imported_env in env_config_files and not imported_env in loaded_files: new_config_files += [imported_env] # If there are new files to load, add them to the env_conf_files list. # Otherwise, remove the current file from the env_conf_files list and # add its data to the env_config_data_list. if new_config_files: env_config_files = new_config_files + env_config_files else: env_config_data_list += [env] loaded_files += [env_config_files.pop(0)] return env_config_data_list
def _create_commands_helper(self, variants, env_config_data, env_conda_build_configs, feedstock): channels = self._channels + env_config_data.get( env_config.Key.channels.name, []) repo_dir = self._get_repo(env_config_data, feedstock) runtime_package = feedstock.get(env_config.Key.runtime_package.name, True) conda_build_configs = self._conda_build_config + env_conda_build_configs retval = _create_commands( repo_dir, runtime_package, feedstock.get(env_config.Key.recipe_path.name), feedstock.get(env_config.Key.recipes.name), [ config if utils.is_url(config) else os.path.abspath(config) for config in conda_build_configs ], variants, channels) return retval
def _validate_config_file(env_file, variants): '''Perform some validation on the environment file after loading it.''' # pylint: disable=import-outside-toplevel from open_ce import conda_utils try: if utils.is_url(env_file): env_file = utils.download_file(env_file) meta_obj = conda_utils.render_yaml(env_file, variants=variants, schema=_ENV_CONFIG_SCHEMA) if not (Key.packages.name in meta_obj.keys() or Key.imported_envs.name in meta_obj.keys()): raise OpenCEError(Error.CONFIG_CONTENT) meta_obj[Key.opence_env_file_path.name] = env_file return meta_obj except (Exception, SystemExit) as exc: #pylint: disable=broad-except raise OpenCEError(Error.ERROR, "Error in {}:\n {}".format(env_file, str(exc))) from exc
def _validate_config_file(env_file, variants): '''Perform some validation on the environment file after loading it.''' # pylint: disable=import-outside-toplevel from open_ce import conda_utils try: if utils.is_url(env_file): env_file = utils.download_file(env_file) # First, partially render yaml to validate builder version number. version_check_obj = conda_utils.render_yaml( env_file, permit_undefined_jinja=True) if Key.builder_version.name in version_check_obj.keys(): if not conda_utils.version_matches_spec( version_check_obj.get(Key.builder_version.name)): raise OpenCEError( Error.SCHEMA_VERSION_MISMATCH, env_file, version_check_obj.get(Key.builder_version.name), open_ce_version) meta_obj = None try: meta_obj = conda_utils.render_yaml(env_file, variants=variants, schema=_ENV_CONFIG_SCHEMA) if not (Key.packages.name in meta_obj.keys() or Key.imported_envs.name in meta_obj.keys()): raise OpenCEError(Error.CONFIG_CONTENT) meta_obj[Key.opence_env_file_path.name] = env_file except OpenCEError as exc: if Key.builder_version.name not in version_check_obj.keys(): show_warning(Error.SCHEMA_VERSION_NOT_FOUND, env_file, Key.builder_version.name) raise exc return meta_obj except (Exception, SystemExit) as exc: #pylint: disable=broad-except raise OpenCEError(Error.ERROR, "Error in {}:\n {}".format(env_file, str(exc))) from exc
def _get_all_feedstocks(env_files, github_org, skipped_repos, pat=None): feedstocks = set() for env in env_files: packages = env.get(env_config.Key.packages.name, []) if packages is None: packages = [] for package in packages: feedstock = package.get(env_config.Key.feedstock.name, "") if not utils.is_url(feedstock): feedstocks.add(feedstock) org_repos = [{ "name": "{}-feedstock".format(feedstock), "ssh_url": "https://{}github.com/{}/{}-feedstock.git".format( pat + "@" if pat else "", github_org, feedstock) } for feedstock in feedstocks] org_repos = [ repo for repo in org_repos if repo["name"] not in skipped_repos ] return org_repos
def build_feedstock_from_command( command, # pylint: disable=too-many-arguments, too-many-locals recipe_config_file=None, output_folder=utils.DEFAULT_OUTPUT_FOLDER, local_src_dir=None, pkg_format=utils.DEFAULT_PKG_FORMAT, debug=None, debug_output_id=None): ''' Build a feedstock from a build_command object. ''' utils.check_if_package_exists('conda-build') # pylint: disable=import-outside-toplevel import conda_build.api from conda_build.config import get_or_merge_config saved_working_directory = None if command.repository: saved_working_directory = os.getcwd() os.chdir(os.path.abspath(command.repository)) recipes_to_build = inputs.parse_arg_list(command.recipe) for variant in utils.make_variants(command.python, command.build_type, command.mpi_type, command.cudatoolkit): build_config_data, recipe_config_file = load_package_config( recipe_config_file, variant, command.recipe_path) # Build each recipe if build_config_data['recipes'] is None: build_config_data['recipes'] = [] log.info("No recipe to build for given configuration.") for recipe in build_config_data['recipes']: if recipes_to_build and recipe['name'] not in recipes_to_build: continue config = get_or_merge_config(None, variant=variant) config.skip_existing = False config.prefix_length = 225 config.output_folder = output_folder conda_build_configs = [ utils.download_file(conda_build_config) if utils.is_url(conda_build_config) else conda_build_config for conda_build_config in command.conda_build_configs ] config.variant_config_files = [ config for config in conda_build_configs if os.path.exists(config) ] if pkg_format == "conda": config.conda_pkg_format = "2" # set to .conda format recipe_conda_build_config = get_conda_build_config() if recipe_conda_build_config: config.variant_config_files.append(recipe_conda_build_config) config.channel_urls = [os.path.abspath(output_folder)] config.channel_urls += command.channels config.channel_urls += build_config_data.get('channels', []) _set_local_src_dir(local_src_dir, recipe, recipe_config_file) try: if debug: activation_string = conda_build.api.debug( os.path.join(os.getcwd(), recipe['path']), output_id=debug_output_id, config=config) if activation_string: log.info("#" * 80) log.info( "Build and/or host environments created for debug output id %s." "To enter a debugging environment:\n", debug_output_id) log.info(activation_string) log.info("#" * 80) else: conda_build.api.build(os.path.join(os.getcwd(), recipe['path']), config=config) except Exception as exc: # pylint: disable=broad-except traceback.print_exc() raise OpenCEError( Error.BUILD_RECIPE, recipe['name'] if 'name' in recipe else os.getcwd, str(exc)) from exc if saved_working_directory: os.chdir(saved_working_directory)
def build_env(args): '''Entry Function''' # pylint: disable=too-many-branches if not utils.is_url(args.conda_build_config) and not os.path.exists(args.conda_build_config): raise OpenCEError(Error.CONDA_BUILD_CONFIG_FILE_NOT_FOUND, args.conda_build_config) # pylint: enable=too-many-branches if args.container_build: if len(args.cuda_versions.split(',')) > 1: raise OpenCEError(Error.TOO_MANY_CUDA) container_build.build_with_container_tool(args, sys.argv) return # Checking conda-build existence if --container_build is not specified utils.check_if_package_exists('conda-build') # Here, importing BuildTree is intentionally done after checking # existence of conda-build as BuildTree uses conda_build APIs. from open_ce.build_tree import BuildTree # pylint: disable=import-outside-toplevel # If repository_folder doesn't exist, create it if args.repository_folder and not os.path.exists(args.repository_folder): os.mkdir(args.repository_folder) # Create the build tree build_tree = BuildTree(env_config_files=args.env_config_file, python_versions=inputs.parse_arg_list(args.python_versions), build_types=inputs.parse_arg_list(args.build_types), mpi_types=inputs.parse_arg_list(args.mpi_types), cuda_versions=inputs.parse_arg_list(args.cuda_versions), repository_folder=args.repository_folder, channels=args.channels_list, git_location=args.git_location, git_tag_for_env=args.git_tag_for_env, git_up_to_date=args.git_up_to_date, conda_build_config=args.conda_build_config, packages=inputs.parse_arg_list(args.packages)) # Generate conda environment files conda_env_files = build_tree.write_conda_env_files(output_folder=os.path.abspath(args.output_folder), path=os.path.abspath(args.output_folder)) print("Generated conda environment files from the selected build arguments:", conda_env_files.values()) print("INFO: One can use these environment files to create a conda" \ " environment using \"conda env create -f <conda_env_file_name>.\"") if not args.skip_build_packages: # Build each package in the packages list for build_command in build_tree: if not build_command.all_outputs_exist(args.output_folder): try: print("Building " + build_command.recipe) build_feedstock.build_feedstock_from_command(build_command, output_folder=os.path.abspath(args.output_folder), conda_build_config=os.path.abspath(args.conda_build_config)) except OpenCEError as exc: raise OpenCEError(Error.BUILD_RECIPE, build_command.repository, exc.msg) from exc else: print("Skipping build of " + build_command.recipe + " because it already exists") if args.run_tests: _run_tests(build_tree, inputs.parse_arg_list(args.test_labels), conda_env_files)
def _create_nodes(self, variants): ''' Create a recipe dictionary for each recipe needed for a given environment file. ''' env_config_data_list = env_config.load_env_config_files( self._env_config_files, [variants]) feedstocks_seen = set() external_deps = [] channels_in_env_files = set() retval = graph.OpenCEGraph() create_commands_args = [] # Find all conda_build_configs listed in environment files conda_build_configs = [] for env_config_data in env_config_data_list: conda_build_configs += [ config if utils.is_url(config) else utils.expanded_path( config, relative_to=env_config_data[ env_config.Key.opence_env_file_path.name]) for config in env_config_data.get( env_config.Key.conda_build_configs.name, []) ] utils.check_conda_build_configs_exist(conda_build_configs) # Create recipe dictionaries for each repository in the environment file for env_config_data in env_config_data_list: new_channels = env_config_data.get(env_config.Key.channels.name, []) channels = self._channels + new_channels channels_in_env_files.update(new_channels) feedstocks = env_config_data.get(env_config.Key.packages.name, []) if not feedstocks: feedstocks = [] for feedstock in feedstocks: if _make_hash(feedstock) in feedstocks_seen: continue # Create arguments for call to _create_commands_helper create_commands_args.append((variants, env_config_data, conda_build_configs, feedstock)) feedstocks_seen.add(_make_hash(feedstock)) current_deps = env_config_data.get( env_config.Key.external_dependencies.name, []) for dep in current_deps: #Add external dependencies as top level nodes in the graph. new_dep = DependencyNode({dep}, channels=channels) retval.add_node(new_dep) if current_deps: external_deps += current_deps # Execute _create_commands_helper in parallel commands = utils.run_in_parallel(self._create_commands_helper, create_commands_args) # Add the results of _create_commands_helper to the graph for command in commands: retval = networkx.compose(retval, command) return retval, external_deps, list(channels_in_env_files)