def _clone_repo(self, git_url, repo_dir, env_config_data, package): """ Clone the git repo at repository. """ # Priority is given to command line specified tag, if it is not # specified then package specific tag, and when even that is not specified # then top level git tag specified for env in the env file. And if nothing is # at all specified then fall back to default branch of the repo. git_tag = self._git_tag_for_env if git_tag is None: git_tag_for_package = package.get(env_config.Key.git_tag.name, None) if package else None if git_tag_for_package: git_tag = git_tag_for_package else: git_tag = env_config_data.get( env_config.Key.git_tag_for_env.name, None) if env_config_data else None clone_cmd = "git clone " + git_url + " " + repo_dir print("Clone cmd: ", clone_cmd) clone_result = os.system(clone_cmd) cur_dir = os.getcwd() clone_successful = clone_result == 0 if clone_successful: if not git_tag is None: os.chdir(repo_dir) checkout_cmd = "git checkout " + git_tag print("Checkout branch/tag command: ", checkout_cmd) checkout_res = os.system(checkout_cmd) os.chdir(cur_dir) clone_successful = checkout_res == 0 else: raise OpenCEError(Error.CLONE_REPO, git_url) if clone_successful: patches = package.get(env_config.Key.patches.name, []) if package else [] if len(patches) > 0: os.chdir(repo_dir) for patch in patches: if os.path.isabs(patch) and os.path.exists(patch): patch_file = patch else: # Look for patch relative to where the Open-CE environment file is patch_file = os.path.join( os.path.dirname( env_config_data.get( env_config.Key.opence_env_file_path)), patch) patch_apply_cmd = "git apply {}".format(patch_file) print("Patch apply command: ", patch_apply_cmd) patch_apply_res = os.system(patch_apply_cmd) if patch_apply_res != 0: raise OpenCEError( Error.PATCH_APPLICATION, patch, package[env_config.Key.feedstock.name]) os.chdir(cur_dir)
def test_feedstock(args): '''Entry Function''' if not args.conda_env_file: raise OpenCEError(Error.CONDA_ENV_FILE_REQUIRED) test_failures = _test_feedstock(args) if test_failures: raise OpenCEError(Error.FAILED_TESTS, test_failures)
def _validate_input_paths(local_conda_channel, conda_env_file): # Check if path exists if not os.path.exists(local_conda_channel) or not os.path.exists(conda_env_file): raise OpenCEError(Error.INCORRECT_INPUT_PATHS) # Check if local conda channel path is subdir of the docker build context if not utils.is_subdir(local_conda_channel, os.path.abspath(BUILD_CONTEXT)): raise OpenCEError(Error.LOCAL_CHANNEL_NOT_IN_CONTEXT)
def _validate_config_file(env_file, variants): '''Perform some validation on the environment file after loading it.''' try: meta_obj = conda_build.metadata.MetaData(env_file, variant=variants) if not (Key.packages.name in meta_obj.meta.keys() or Key.imported_envs.name in meta_obj.meta.keys()): raise OpenCEError(Error.CONFIG_CONTENT) utils.validate_dict_schema(meta_obj.meta, _ENV_CONFIG_SCHEMA) return meta_obj except (Exception, SystemExit) as exc: #pylint: disable=broad-except raise OpenCEError(Error.ERROR, "Error in {}:\n {}".format(env_file, str(exc))) from exc
def cuda_driver_installed(): ''' Determine if the current machine has the NVIDIA driver installed ''' try: lsmod_out = subprocess.check_output("lsmod").decode("utf-8").strip() return re.search(r"nvidia ", lsmod_out) is not None except OSError as err: if err.errno == errno.ENOENT: raise OpenCEError(Error.ERROR, "lsmod command not found") from err raise OpenCEError(Error.ERROR, "lsmod command unexpectedly failed") from err
def get_driver_level(): ''' Return the NVIDIA driver level on the system. ''' try: smi_out = subprocess.check_output("nvidia-smi").decode("utf-8").strip() return re.search(r"Driver Version\: (\d+\.\d+\.\d+)", smi_out).group(1) except OSError as err: if err.errno == errno.ENOENT: raise OpenCEError(Error.ERROR, "nvidia-smi command not found") from err raise OpenCEError(Error.ERROR, "nvidia-smi command unexpectedly failed") from err
def get_driver_cuda_level(): ''' Return what level of Cuda the driver can support ''' try: smi_out = subprocess.check_output("nvidia-smi").decode("utf-8").strip() return re.search(r"CUDA Version\: (\d+\.\d+)", smi_out).group(1) except OSError as err: if err.errno == errno.ENOENT: raise OpenCEError(Error.ERROR, "nvidia-smi command not found") from err raise OpenCEError(Error.ERROR, "nvidia-smi command unexpectedly failed") from err
def _validate_config_file(env_file, variants): '''Perform some validation on the environment file after loading it.''' try: meta_obj = conda_utils.render_yaml(env_file, variants=variants, schema=_ENV_CONFIG_SCHEMA) if not (Key.packages.name in meta_obj.keys() or Key.imported_envs.name in meta_obj.keys()): raise OpenCEError(Error.CONFIG_CONTENT) meta_obj[Key.opence_env_file_path] = env_file return meta_obj except (Exception, SystemExit) as exc: #pylint: disable=broad-except raise OpenCEError(Error.ERROR, "Error in {}:\n {}".format(env_file, str(exc))) from exc
def _run_tests(build_tree, conda_env_files): """ Run through all of the tests within a build tree for the given conda environment files. Args: build_tree (BuildTree): The build tree containing the tests conda_env_files (dict): A dictionary where the key is a variant string and the value is the name of a conda environment file. """ failed_tests = [] # Run test commands for each conda environment that was generated for variant_string, conda_env_file in conda_env_files.items(): test_commands = build_tree.get_test_commands(variant_string) if test_commands: print("\n*** Running tests within the " + os.path.basename(conda_env_file) + " conda environment ***\n") for feedstock, feedstock_test_commands in test_commands.items(): print("Running tests for " + feedstock) failed_tests += test_feedstock.run_test_commands( conda_env_file, feedstock_test_commands) test_feedstock.display_failed_tests(failed_tests) if failed_tests: raise OpenCEError(Error.FAILED_TESTS, len(failed_tests))
def validate_build_tree(build_commands, external_deps): ''' Check a build tree for dependency compatability. ''' packages = [ package for recipe in build_commands for package in recipe.packages ] channels = { channel for recipe in build_commands for channel in recipe.channels } deps = { dep for recipe in build_commands for dep in recipe.run_dependencies } deps.update(external_deps) pkg_args = " ".join([ "\"{}\"".format(utils.generalize_version(dep)) for dep in deps if not utils.remove_version(dep) in packages ]) channel_args = " ".join( {"-c \"{}\"".format(channel) for channel in channels}) cli = "conda create --dry-run -n test_conda_dependencies {} {}".format( channel_args, pkg_args) ret_code, std_out, std_err = utils.run_command_capture(cli) if not ret_code: raise OpenCEError(Error.VALIDATE_BUILD_TREE, cli, std_out, std_err)
def validate_env_config(conda_build_config, env_config_files, variants, repository_folder): ''' Validates a lits of Open-CE env files against a conda build config for a given set of variants. ''' for variant in variants: for env_file in env_config_files: print('Validating {} for {} : {}'.format(conda_build_config, env_file, variant)) try: recipes = build_tree.BuildTree( [env_file], variant['python'], variant['build_type'], variant['mpi_type'], variant['cudatoolkit'], repository_folder=repository_folder, conda_build_config=conda_build_config) validate_build_tree(recipes, variant) except OpenCEError as err: raise OpenCEError(Error.VALIDATE_CONFIG, conda_build_config, env_file, variant, err.msg) from err print('Successfully validated {} for {} : {}'.format( conda_build_config, env_file, variant))
def load_package_config(config_file=None): ''' Check for a config file. If the user does not provide a recipe config file as an argument, it will be assumed that there is only one recipe to build, and it is in the directory called 'recipe'. ''' if not config_file and not os.path.exists( utils.DEFAULT_RECIPE_CONFIG_FILE): recipe_name = os.path.basename(os.getcwd()) build_config_data = { 'recipes': [{ 'name': recipe_name, 'path': 'recipe' }] } else: if not config_file: config_file = utils.DEFAULT_RECIPE_CONFIG_FILE if not os.path.exists(config_file): raise OpenCEError(Error.CONFIG_FILE, config_file) with open(config_file, 'r') as stream: build_config_data = yaml.safe_load(stream) return build_config_data, config_file
def _execute_in_container(container_name, command): docker_cmd = DOCKER_TOOL + " exec " + container_name + " " # Change to home directory docker_cmd += "bash -c 'cd " + HOME_PATH + "; " + command + "'" if os.system(docker_cmd): raise OpenCEError(Error.BUILD_IN_CONTAINER, container_name)
def __init__(self, env_config_files, python_versions, build_types, mpi_types, repository_folder="./", git_location=utils.DEFAULT_GIT_LOCATION, git_tag_for_env="master", conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG): self._env_config_files = env_config_files self._repository_folder = repository_folder self._git_location = git_location self._git_tag_for_env = git_tag_for_env self._conda_build_config = conda_build_config self._external_dependencies = dict() # Create a dependency tree that includes recipes for every combination # of variants. self._possible_variants = utils.make_variants(python_versions, build_types, mpi_types) self.build_commands = [] for variant in self._possible_variants: try: variant_recipes, external_deps = self._create_all_recipes( variant) except OpenCEError as exc: raise OpenCEError(Error.CREATE_BUILD_TREE, exc.msg) from exc self._external_dependencies[str(variant)] = external_deps # Add dependency tree information to the packages list _add_build_command_dependencies(variant_recipes, len(self.build_commands)) self.build_commands += variant_recipes self._detect_cycle()
def _clone_repo(self, git_url, repo_dir, env_config_data, git_tag_from_config): """ Clone the git repo at repository. """ # Priority is given to command line specified tag, if it is not # specified then package specific tag, and when even that is not specified # then top level git tag specified for env in the env file. And if nothing is # at all specified then fall back to default branch of the repo. git_tag = self._git_tag_for_env if git_tag is None: if git_tag_from_config: git_tag = git_tag_from_config else: git_tag = env_config_data.get( env_config.Key.git_tag_for_env.name, None) if git_tag is None: clone_cmd = "git clone " + git_url + " " + repo_dir else: clone_cmd = "git clone -b " + git_tag + " --single-branch " + git_url + " " + repo_dir print("Clone cmd: ", clone_cmd) clone_result = os.system(clone_cmd) if clone_result != 0: raise OpenCEError(Error.CLONE_REPO, git_url)
def _set_local_src_dir(local_src_dir_arg, recipe, recipe_config_file): """ Set the LOCAL_SRC_DIR environment variable if local_src_dir is specified. """ # Local source directory provided as command line argument has higher priority # than what is specified in build-config.yaml if local_src_dir_arg: local_src_dir = os.path.expanduser(local_src_dir_arg) elif 'local_src_dir' in recipe: local_src_dir = os.path.expanduser(recipe.get('local_src_dir')) # If a relative path is specified, it should be in relation to the config file if not os.path.isabs(local_src_dir): local_src_dir = os.path.join( os.path.dirname(os.path.abspath(recipe_config_file)), local_src_dir) else: local_src_dir = None if local_src_dir: if not os.path.exists(local_src_dir): raise OpenCEError(Error.LOCAL_SRC_DIR, local_src_dir) os.environ["LOCAL_SRC_DIR"] = local_src_dir else: if 'LOCAL_SRC_DIR' in os.environ: del os.environ['LOCAL_SRC_DIR']
def build_feedstock_from_command( command, # pylint: disable=too-many-arguments recipe_config_file=None, output_folder=utils.DEFAULT_OUTPUT_FOLDER, extra_channels=None, conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG, local_src_dir=None): ''' Build a feedstock from a build_command object. ''' if not extra_channels: extra_channels = [] saved_working_directory = None if command.repository: saved_working_directory = os.getcwd() os.chdir(os.path.abspath(command.repository)) recipes_to_build = inputs.parse_arg_list(command.recipe) for variant in utils.make_variants(command.python, command.build_type, command.mpi_type, command.cudatoolkit): build_config_data, recipe_config_file = load_package_config( recipe_config_file, variant) # Build each recipe for recipe in build_config_data['recipes']: if recipes_to_build and recipe['name'] not in recipes_to_build: continue config = get_or_merge_config(None, variant=variant) config.skip_existing = True config.prefix_length = 225 config.output_folder = output_folder config.variant_config_files = [conda_build_config] recipe_conda_build_config = os.path.join( os.getcwd(), "config", "conda_build_config.yaml") if os.path.exists(recipe_conda_build_config): config.variant_config_files.append(recipe_conda_build_config) config.channel_urls = extra_channels + command.channels + build_config_data.get( 'channels', []) _set_local_src_dir(local_src_dir, recipe, recipe_config_file) try: conda_build.api.build(os.path.join(os.getcwd(), recipe['path']), config=config) except Exception as exc: # pylint: disable=broad-except traceback.print_exc() raise OpenCEError( Error.BUILD_RECIPE, recipe['name'] if 'name' in recipe else os.getcwd, str(exc)) from exc if saved_working_directory: os.chdir(saved_working_directory)
def validate_type(value, schema_type): '''Validate a single type instance against a schema type.''' if isinstance(schema_type, dict): validate_dict_schema(value, schema_type) else: if not isinstance(value, schema_type): raise OpenCEError( Error.ERROR, "{} is not of expected type {}".format(value, schema_type))
def build_feedstock(args_string=None): ''' Entry function. ''' parser = make_parser() args = parser.parse_args(args_string) saved_working_directory = None if args.working_directory: saved_working_directory = os.getcwd() os.chdir(os.path.abspath(args.working_directory)) build_config_data, recipe_config_file = load_package_config( args.recipe_config_file) args.recipes = utils.parse_arg_list(args.recipe_list) # Build each recipe for recipe in build_config_data['recipes']: if args.recipes and recipe['name'] not in args.recipes: continue config = get_or_merge_config(None) config.skip_existing = True config.output_folder = args.output_folder config.variant_config_files = [args.conda_build_config] recipe_conda_build_config = os.path.join(os.getcwd(), "config", "conda_build_config.yaml") if os.path.exists(recipe_conda_build_config): config.variant_config_files.append(recipe_conda_build_config) config.channel_urls = args.channels_list + build_config_data.get( 'channels', []) _set_local_src_dir(args.local_src_dir, recipe, recipe_config_file) try: for variant in utils.make_variants(args.python_versions, args.build_types, args.mpi_types): conda_build.api.build(os.path.join(os.getcwd(), recipe['path']), config=config, variants=variant) except Exception as exc: # pylint: disable=broad-except traceback.print_exc() raise OpenCEError( Error.BUILD_RECIPE, recipe['name'] if 'name' in recipe else os.getcwd, str(exc)) from exc if saved_working_directory: os.chdir(saved_working_directory)
def validate_env(args): '''Entry Function''' variants = utils.make_variants(args.python_versions, args.build_types, args.mpi_types) for variant in variants: try: env_config.load_env_config_files(args.env_config_file, variant) except OpenCEError as exc: raise OpenCEError(Error.VALIDATE_ENV, args.env_config_file, str(variant), exc.msg) from exc
def __init__(self, env_config_files, python_versions, build_types, mpi_types, cuda_versions, repository_folder="./", git_location=utils.DEFAULT_GIT_LOCATION, git_tag_for_env=utils.DEFAULT_GIT_TAG, conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG, test_labels=None): self._env_config_files = env_config_files self._repository_folder = repository_folder self._git_location = git_location self._git_tag_for_env = git_tag_for_env self._conda_build_config = conda_build_config self._external_dependencies = dict() self._conda_env_files = dict() self._test_commands = dict() self._test_labels = test_labels # Create a dependency tree that includes recipes for every combination # of variants. self._possible_variants = utils.make_variants(python_versions, build_types, mpi_types, cuda_versions) self.build_commands = [] for variant in self._possible_variants: try: build_commands, external_deps, test_commands = self._create_all_commands( variant) except OpenCEError as exc: raise OpenCEError(Error.CREATE_BUILD_TREE, exc.msg) from exc variant_string = utils.variant_string(variant["python"], variant["build_type"], variant["mpi_type"], variant["cudatoolkit"]) self._external_dependencies[variant_string] = external_deps self._test_commands[variant_string] = test_commands # Add dependency tree information to the packages list and # remove build commands from build_commands that are already in self.build_commands build_commands = _add_build_command_dependencies( build_commands, self.build_commands, len(self.build_commands)) self.build_commands += build_commands installable_packages = self.get_installable_packages( variant_string) self._conda_env_files[variant_string] = CondaEnvFileGenerator( installable_packages) self._detect_cycle()
def validate_dict_schema(dictionary, schema): '''Recursively validate a dictionary's schema.''' for k, (schema_type, required) in schema.items(): if k not in dictionary: if required: raise OpenCEError( Error.ERROR, "Required key {} was not found in {}".format( k, dictionary)) continue if isinstance(schema_type, list): if dictionary[ k] is not None: #Handle if the yaml file has an empty list for this key. validate_type(dictionary[k], list) for value in dictionary[k]: validate_type(value, schema_type[0]) else: validate_type(dictionary[k], schema_type) for k in dictionary: if not k in schema: raise OpenCEError( Error.ERROR, "Unexpected key {} was found in {}".format(k, dictionary))
def _detect_cycle(self, max_cycles=10): extract_build_tree = [x.build_command_dependencies for x in self.build_commands] cycles = [] for start in range(len(self.build_commands)): # Check to see if there are any cycles that start anywhere in the tree. cycles += find_all_cycles(extract_build_tree, start) if len(cycles) >= max_cycles: break if cycles: cycle_print = "\n".join([" -> ".join([self.build_commands[i].recipe for i in cycle]) for cycle in cycles[:min(max_cycles, len(cycles))]]) if len(cycles) > max_cycles: cycle_print += "\nCycles truncated after {}...".format(max_cycles) raise OpenCEError(Error.BUILD_TREE_CYCLE, cycle_print)
def _generate_dockerfile_name(build_types, cuda_version): ''' Ensure we have valid combinations. I.e. Specify a valid cuda version ''' if 'cuda' in build_types: dockerfile = os.path.join(BUILD_CUDA_IMAGE_PATH, "Dockerfile.cuda-" + cuda_version) build_image_path = BUILD_CUDA_IMAGE_PATH if not os.path.isfile(dockerfile): raise OpenCEError(Error.UNSUPPORTED_CUDA, cuda_version) else: #Build with cpu based image dockerfile = os.path.join(BUILD_IMAGE_PATH, "Dockerfile") build_image_path = BUILD_IMAGE_PATH return build_image_path, dockerfile
def validate_env(arg_strings=None): ''' Entry function. ''' parser = make_parser() args = parser.parse_args(arg_strings) variants = utils.make_variants(args.python_versions, args.build_types, args.mpi_types) for variant in variants: try: env_config.load_env_config_files(args.env_config_file, variant) except OpenCEError as exc: raise OpenCEError(Error.VALIDATE_ENV, args.env_config_file, str(variant), exc.msg) from exc
def build_image(build_image_path, dockerfile): """ Build a docker image from the Dockerfile in BUILD_IMAGE_PATH. Returns a result code and the name of the new image. """ image_name = REPO_NAME + ":" + IMAGE_NAME + "-" + str(os.getuid()) build_cmd = DOCKER_TOOL + " build " build_cmd += "-f " + dockerfile + " " build_cmd += "-t " + image_name + " " build_cmd += "--build-arg BUILD_ID=" + str(os.getuid()) + " " build_cmd += "--build-arg GROUP_ID=" + str(os.getgid()) + " " build_cmd += build_image_path if os.system(build_cmd): raise OpenCEError(Error.BUILD_IMAGE, image_name) return image_name
def build_with_docker(output_folder, build_types, cuda_versions, arg_strings): """ Create a build image and run a build inside of container based on that image. """ parser = make_parser() _, unused_args = parser.parse_known_args(arg_strings) build_image_path, dockerfile = _generate_dockerfile_name( build_types, cuda_versions) if 'cuda' not in build_types or _capable_of_cuda_containers(cuda_versions): image_name = build_image(build_image_path, dockerfile) else: raise OpenCEError(Error.INCOMPAT_CUDA, utils.get_driver_level(), cuda_versions) build_in_container(image_name, output_folder, unused_args)
def _create_container(container_name, image_name, output_folder): """ Create a docker container """ # Create the container docker_cmd = DOCKER_TOOL + " create -i --rm --name " + container_name + " " # Add output folder docker_cmd += _add_volume(os.path.join(os.getcwd(), output_folder), os.path.join(HOME_PATH, output_folder)) # Add cache directory docker_cmd += _add_volume(None, os.path.join(HOME_PATH, ".cache")) # Add conda-bld directory docker_cmd += _add_volume(None, "/opt/conda/conda-bld") docker_cmd += image_name + " bash" if os.system(docker_cmd): raise OpenCEError(Error.CREATE_CONTAINER, container_name)
def build_image(local_conda_channel, conda_env_file): """ Build a docker image from the Dockerfile in RUNTIME_IMAGE_PATH. Returns a result code and the name of the new image. """ image_name = REPO_NAME + ":" + IMAGE_NAME + "-" + str(os.getuid()) build_cmd = DOCKER_TOOL + " build " build_cmd += "-f " + os.path.join(RUNTIME_IMAGE_PATH, "Dockerfile") + " " build_cmd += "-t " + image_name + " " build_cmd += "--build-arg OPENCE_USER="******" " build_cmd += "--build-arg LOCAL_CONDA_CHANNEL=" + local_conda_channel + " " build_cmd += "--build-arg CONDA_ENV_FILE=" + conda_env_file + " " build_cmd += "--build-arg TARGET_DIR=" + TARGET_DIR + " " build_cmd += BUILD_CONTEXT print("Docker build command: ", build_cmd) if os.system(build_cmd): raise OpenCEError(Error.BUILD_IMAGE, image_name) return image_name
def validate_build_tree(recipes, variant_string): ''' Check a build tree for dependency compatability. ''' #packages = [package for recipe in recipes for package in recipe.packages] channels = {channel for recipe in recipes for channel in recipe.channels} #deps = {dep for recipe in recipes for dep in recipe.run_dependencies} #deps.update(recipes.get_external_dependencies(variant)) deps = recipes.get_installable_packages(variant_string) print("Dependencies from build tree: ", deps) pkg_args = " ".join(["\"{}\"".format(dep) for dep in deps]) channel_args = " ".join( {"-c \"{}\"".format(channel) for channel in channels}) cli = "conda create --dry-run -n test_conda_dependencies {} {}".format( channel_args, pkg_args) print("cli cmd: ", cli) ret_code, std_out, std_err = utils.run_command_capture(cli) if not ret_code: raise OpenCEError(Error.VALIDATE_BUILD_TREE, cli, std_out, std_err)