def _run_tests(build_tree, test_labels, conda_env_files, output_folder): """ Run through all of the tests within a build tree for the given conda environment files. Args: build_tree (BuildTree): The build tree containing the tests conda_env_files (dict): A dictionary where the key is a variant string and the value is the name of a conda environment file. """ test_results = {} # Run test commands for each conda environment that was generated for variant_string, conda_env_file in conda_env_files.items(): test_feedstocks = build_tree.get_test_feedstocks(variant_string) if test_feedstocks: log.info( "\n*** Running tests within the %s conda environment ***\n", os.path.basename(conda_env_file)) for feedstock in test_feedstocks: log.info("Running tests for %s", feedstock) test_result = test_feedstock.test_feedstock( conda_env_file, test_labels=test_labels, working_directory=feedstock) if feedstock not in test_results.keys(): test_results[feedstock] = test_result else: test_results[feedstock] += test_result test_feedstock.process_test_results(test_results, output_folder, test_labels)
def _create_env_config_paths(args): ''' If the provided env_config_file's don't exist locally, convert the paths to URLs pointing to the GitHub repository for environemnt files. ''' if "env_config_file" in vars(args).keys(): if not "provided_env_files" in vars(args).keys(): args.__dict__["provided_env_files"] = [] for index, config_file in enumerate(args.env_config_file): args.provided_env_files.append(config_file) if not os.path.exists(config_file) and not utils.is_url(config_file): # Determine the organization name from the git_location argument organization = os.path.basename(args.git_location) # Grab the branch name from the git_tag_for_env argument if "git_tag_for_env" in vars(args).keys() and args.git_tag_for_env: branch = args.git_tag_for_env else: branch = "main" # Determine the file name file_name, extension = os.path.splitext(config_file) if not extension: file_name = file_name + ".yaml" else: file_name = file_name + extension new_url = "https://raw.githubusercontent.com/{}/{}/{}/envs/{}".format( organization, utils.DEFAULT_ENVS_REPO, branch, file_name) log.info("Unable to find '%s' locally. Attempting to use '%s'.", config_file, new_url) args.env_config_file[index] = new_url
def create_copy(src_file, dest_file): ''' Creates a copy of src_file to dest_file ''' try: shutil.copy(src_file, dest_file) except shutil.SameFileError: log.info("File '%s' already in local conda channel.", src_file)
def export_graph(args): '''Entry Function''' # Importing BuildTree is intentionally done here because it checks for the # existence of conda-build as BuildTree uses conda_build APIs. from open_ce.build_tree import construct_build_tree # pylint: disable=import-outside-toplevel build_tree = construct_build_tree(args) os.makedirs(args.output_folder, exist_ok=True) graph_output = os.path.join(args.output_folder, utils.DEFAULT_GRAPH_FILE) graph.export_image(build_tree._tree, graph_output, args.width, args.height) # pylint: disable=protected-access log.info("Build graph successfully output to: %s", graph_output)
def _clone_repo(self, git_url, repo_dir, env_config_data, package): """ Clone the git repo at repository. """ # Priority is given to command line specified tag, if it is not # specified then package specific tag, and when even that is not specified # then top level git tag specified for env in the env file. And if nothing is # at all specified then fall back to default branch of the repo. git_tag = self._git_tag_for_env git_tag_for_package = None if git_tag is None: git_tag_for_package = package.get(env_config.Key.git_tag.name, None) if package else None if git_tag_for_package: git_tag = git_tag_for_package else: git_tag = env_config_data.get( env_config.Key.git_tag_for_env.name, None) if env_config_data else None clone_successful = utils.git_clone( git_url, git_tag, repo_dir, self._git_up_to_date and not git_tag_for_package) if clone_successful: patches = package.get(env_config.Key.patches.name, []) if package else [] if len(patches) > 0: cur_dir = os.getcwd() os.chdir(repo_dir) for patch in patches: if os.path.isabs(patch) and os.path.exists(patch): patch_file = patch else: # Look for patch relative to where the Open-CE environment file is patch_file = os.path.join( os.path.dirname( env_config_data.get( env_config.Key.opence_env_file_path.name)), patch) if utils.is_url(patch_file): patch_file = utils.download_file(patch_file) patch_apply_cmd = "git apply {}".format(patch_file) log.info("Patch apply command: %s", patch_apply_cmd) patch_apply_res = os.system(patch_apply_cmd) if patch_apply_res != 0: os.chdir(cur_dir) shutil.rmtree(repo_dir) raise OpenCEError( Error.PATCH_APPLICATION, patch, package[env_config.Key.feedstock.name]) os.chdir(cur_dir)
def validate_config(args): '''Entry Function Validates a lits of Open-CE env files against a conda build config for a given set of variants. ''' # Importing BuildTree is intentionally done here because it checks for the # existence of conda-build as BuildTree uses conda_build APIs. from open_ce.build_tree import construct_build_tree # pylint: disable=import-outside-toplevel for env_file in list(args.env_config_file): #make a copy of the env_file list log.info('Validating %s for %s', args.conda_build_configs, env_file) try: args.env_config_file = [env_file] _ = construct_build_tree(args) except OpenCEError as err: raise OpenCEError(Error.VALIDATE_CONFIG, args.conda_build_configs, env_file, err.msg) from err log.info('Successfully validated %s for %s', args.conda_build_configs, env_file)
def _get_runtime_image_file(container_tool): tool_ver = utils.get_container_tool_ver(container_tool) image_file = os.path.join(RUNTIME_IMAGE_PATH, "docker/Dockerfile") if not tool_ver: show_warning(Error.CONTAINER_VERSION, container_tool) # If we couldn't get the version of the tool, should use the older docker # supported Dockerfile return image_file tool_ver = tool_ver.replace(".", "") tool_ver = tool_ver.replace("-dev", "") if (container_tool == "docker" and int(tool_ver) >= 1709) or \ (container_tool == "podman" and int(tool_ver) >= 200): # Use the newer docker/podman supported Dockerfile image_file = os.path.join(RUNTIME_IMAGE_PATH, "podman/Dockerfile") log.info("Dockerfile being used: %s", image_file) return image_file
def write_licenses_file(self, output_folder): """ Write all of the license information to the provided path. """ result = "" for lic in sorted(self._licenses): result += str(lic) + "\n" if not os.path.exists(output_folder): os.makedirs(output_folder) licenses_file = os.path.join(output_folder, utils.DEFAULT_LICENSES_FILE) with open(licenses_file, 'w') as file_stream: file_stream.write(result) log.info("Licenses file generated: %s", licenses_file)
def run(self, conda_env_file): """ Runs the test. Creates a temporary bash file, and writes the contents to `get_test_command` into it. Runs the generated bash file. Removes the temporary bash file. Args: conda_env_file (str): The name of the original conda environment file. """ log.info("Running: %s", self.name) start_time = time.time() # Create file containing bash commands os.makedirs(self.working_dir, exist_ok=True) with tempfile.NamedTemporaryFile(mode='w+t', dir=self.working_dir, delete=False) as temp: temp.write(self.get_test_command(conda_env_file)) temp_file_name = temp.name # Execute file retval, stdout, stderr = utils.run_command_capture( "bash {}".format(temp_file_name), cwd=self.working_dir) # Remove file containing bash commands os.remove(temp_file_name) result = TestResult(conda_env_file, retval, name=self.name, category=os.path.basename(self.feedstock_dir) + ":" + os.path.basename(conda_env_file), file=self.test_file, stdout=stdout if not retval else None, stderr=stderr if not retval else None, timestamp=start_time, elapsed_sec=time.time() - start_time) if not retval: log.error(result.display_failed()) return result
def build_runtime_container_image(args): """ Create a runtime image which will have a conda environment created using locally built conda packages and environment file. """ if not args.container_tool: raise OpenCEError(Error.NO_CONTAINER_TOOL_FOUND) local_conda_channel = os.path.abspath(args.local_conda_channel) if not os.path.exists(local_conda_channel): raise OpenCEError(Error.INCORRECT_INPUT_PATHS) os.makedirs(os.path.join(local_conda_channel, TEMP_FILES), exist_ok=True) for conda_env_file in parse_arg_list(args.conda_env_files): conda_env_file = os.path.abspath(conda_env_file) if not os.path.exists(conda_env_file): raise OpenCEError(Error.INCORRECT_INPUT_PATHS) # Copy the conda environment file into the TEMP_FILES dir inside local # conda channel with a new name and modify it conda_env_runtime_filename = os.path.splitext( os.path.basename(conda_env_file))[0] + '-runtime.yaml' conda_env_runtime_file = os.path.join(local_conda_channel, TEMP_FILES, conda_env_runtime_filename) create_copy(conda_env_file, conda_env_runtime_file) utils.replace_conda_env_channels(conda_env_runtime_file, r'file:.*', "file:/{}".format(TARGET_DIR)) image_version = utils.get_open_ce_version(conda_env_file) image_name = build_image(args.local_conda_channel, os.path.basename(conda_env_runtime_file), args.container_tool, image_version, args.container_build_args) log.info("Docker image with name %s is built successfully.", image_name) cleanup(local_conda_channel)
def gen_file_from_template(self, template, output_folder): """ Fill in a jinja template file with license information and write the new file into the provided output_folder. """ # Create dictionary with license_type as key license_dict = defaultdict(list) for info in self._licenses: for license_type in info.license_type: license_dict[license_type].append(info) file_loader = FileSystemLoader( [os.path.dirname(template), os.getcwd()]) env = Environment(loader=file_loader) jinja_template = env.get_template(os.path.basename(template)) output = jinja_template.render(licenseInfo=license_dict) output_name = os.path.splitext(os.path.basename(template))[0] + ".txt" with open(os.path.join(output_folder, output_name), 'w') as stream: stream.write(output) log.info("%s generated from %s", os.path.join(output_folder, output_name), template)
def process_test_results(test_results, output_folder="./", test_labels=None): """ This function writes test results to a file, displays failed tests to stdout, and throws an exception if there are test failures. """ label_string = "" if test_labels: label_string = "with labels: {}".format(str(test_labels)) test_suites = [ TestSuite("Open-CE tests for {} {}".format(feedstock, label_string), test_results[feedstock]) for feedstock in test_results ] with open(os.path.join(output_folder, utils.DEFAULT_TEST_RESULT_FILE), 'w') as outfile: outfile.write(to_xml_report_string(test_suites)) failed_tests = [ x for key in test_results for x in test_results[key] if x.is_failure() ] if failed_tests: raise OpenCEError( Error.FAILED_TESTS, len(failed_tests), str([failed_test.name for failed_test in failed_tests])) log.info("All tests passed!")
def build_image(local_conda_channel, conda_env_file, container_tool, image_version, container_build_args=""): """ Build a container image from the Dockerfile in RUNTIME_IMAGE_PATH. Returns a result code and the name of the new image. """ variant = os.path.splitext(conda_env_file)[0].replace( utils.CONDA_ENV_FILENAME_PREFIX, "", 1) variant = variant.replace("-runtime", "") image_name = REPO_NAME + ":" + image_version + "-" + variant # Docker version on ppc64le rhel7 doesn't allow Dockerfiles to be out of build context. # Hence, copying it in temp_dir inside the build context. This isn't needed with newer # docker versions or podman but to be consistent, doing this in all cases. dockerfile_path = os.path.join(local_conda_channel, TEMP_FILES, "Dockerfile") runtime_img_file = _get_runtime_image_file(container_tool) create_copy(runtime_img_file, dockerfile_path) build_cmd = container_tool + " build " build_cmd += "-f " + dockerfile_path + " " build_cmd += "-t " + image_name + " " build_cmd += "--build-arg OPENCE_USER="******" " build_cmd += "--build-arg LOCAL_CONDA_CHANNEL=" + "./ " build_cmd += "--build-arg CONDA_ENV_FILE=" + os.path.join( TEMP_FILES, conda_env_file) + " " build_cmd += "--build-arg TARGET_DIR=" + TARGET_DIR + " " build_cmd += container_build_args + " " build_cmd += local_conda_channel log.info("Container build command: %s", build_cmd) if os.system(build_cmd): raise OpenCEError(Error.BUILD_IMAGE, image_name) return image_name
def build_env(args): '''Entry Function''' utils.check_conda_build_configs_exist(args.conda_build_configs) if args.container_build: if len(args.cuda_versions.split(',')) > 1: raise OpenCEError(Error.TOO_MANY_CUDA) container_build.build_with_container_tool(args, sys.argv) return # Importing BuildTree is intentionally done here because it checks for the # existence of conda-build as BuildTree uses conda_build APIs. from open_ce.build_tree import construct_build_tree # pylint: disable=import-outside-toplevel build_tree = construct_build_tree(args) # Generate conda environment files conda_env_files = build_tree.write_conda_env_files( output_folder=os.path.abspath(args.output_folder), path=os.path.abspath(args.output_folder)) log.info( "Generated conda environment files from the selected build arguments: %s", conda_env_files.values()) log.info("One can use these environment files to create a conda" \ " environment using \"conda env create -f <conda_env_file_name>.\"") if not args.skip_build_packages: # Build each package in the packages list for build_command in build_tree: if not build_command.all_outputs_exist(args.output_folder): try: log.info("Building %s", build_command.recipe) build_feedstock.build_feedstock_from_command( build_command, output_folder=os.path.abspath(args.output_folder), pkg_format=args.conda_pkg_format) except OpenCEError as exc: raise OpenCEError(Error.BUILD_RECIPE, build_command.repository, exc.msg) from exc else: log.info("Skipping build of %s because it already exists.", build_command.recipe) if args.run_tests: _run_tests(build_tree, inputs.parse_arg_list(args.test_labels), conda_env_files, os.path.abspath(args.output_folder))
def git_clone(git_url, git_tag, location, up_to_date=False): ''' Clone a git repository and checkout a certain branch. ''' clone_cmd = "git clone " + git_url + " " + location log.info("Clone cmd: %s", clone_cmd) clone_result = os.system(clone_cmd) cur_dir = os.getcwd() clone_successful = clone_result == 0 if clone_successful: if git_tag: os.chdir(location) if up_to_date: git_tag = get_branch_of_tag(git_tag) checkout_cmd = "git checkout " + git_tag log.info("Checkout branch/tag command: %s", checkout_cmd) checkout_res = os.system(checkout_cmd) os.chdir(cur_dir) clone_successful = checkout_res == 0 else: raise OpenCEError(Error.CLONE_REPO, git_url) return clone_successful
def run_and_log(command): '''Print a shell command and then execute it.''' log.info("--->%s", command) return os.system(command)
def get_output(command): '''Print and execute a shell command and then return the output.''' log.info("--->%s", command) _, std_out, _ = run_command_capture(command, stderr=subprocess.STDOUT) return std_out.strip()
def build_feedstock_from_command( command, # pylint: disable=too-many-arguments, too-many-locals recipe_config_file=None, output_folder=utils.DEFAULT_OUTPUT_FOLDER, local_src_dir=None, pkg_format=utils.DEFAULT_PKG_FORMAT, debug=None, debug_output_id=None): ''' Build a feedstock from a build_command object. ''' utils.check_if_package_exists('conda-build') # pylint: disable=import-outside-toplevel import conda_build.api from conda_build.config import get_or_merge_config saved_working_directory = None if command.repository: saved_working_directory = os.getcwd() os.chdir(os.path.abspath(command.repository)) recipes_to_build = inputs.parse_arg_list(command.recipe) for variant in utils.make_variants(command.python, command.build_type, command.mpi_type, command.cudatoolkit): build_config_data, recipe_config_file = load_package_config( recipe_config_file, variant, command.recipe_path) # Build each recipe if build_config_data['recipes'] is None: build_config_data['recipes'] = [] log.info("No recipe to build for given configuration.") for recipe in build_config_data['recipes']: if recipes_to_build and recipe['name'] not in recipes_to_build: continue config = get_or_merge_config(None, variant=variant) config.skip_existing = False config.prefix_length = 225 config.output_folder = output_folder conda_build_configs = [ utils.download_file(conda_build_config) if utils.is_url(conda_build_config) else conda_build_config for conda_build_config in command.conda_build_configs ] config.variant_config_files = [ config for config in conda_build_configs if os.path.exists(config) ] if pkg_format == "conda": config.conda_pkg_format = "2" # set to .conda format recipe_conda_build_config = get_conda_build_config() if recipe_conda_build_config: config.variant_config_files.append(recipe_conda_build_config) config.channel_urls = [os.path.abspath(output_folder)] config.channel_urls += command.channels config.channel_urls += build_config_data.get('channels', []) _set_local_src_dir(local_src_dir, recipe, recipe_config_file) try: if debug: activation_string = conda_build.api.debug( os.path.join(os.getcwd(), recipe['path']), output_id=debug_output_id, config=config) if activation_string: log.info("#" * 80) log.info( "Build and/or host environments created for debug output id %s." "To enter a debugging environment:\n", debug_output_id) log.info(activation_string) log.info("#" * 80) else: conda_build.api.build(os.path.join(os.getcwd(), recipe['path']), config=config) except Exception as exc: # pylint: disable=broad-except traceback.print_exc() raise OpenCEError( Error.BUILD_RECIPE, recipe['name'] if 'name' in recipe else os.getcwd, str(exc)) from exc if saved_working_directory: os.chdir(saved_working_directory)
def __init__(self, env_config_files, python_versions, build_types, mpi_types, cuda_versions, repository_folder="./", channels=None, git_location=utils.DEFAULT_GIT_LOCATION, git_tag_for_env=utils.DEFAULT_GIT_TAG, git_up_to_date=False, conda_build_config=None, packages=None): self._env_config_files = env_config_files self._repository_folder = repository_folder self._channels = channels if channels else [] self._git_location = git_location self._git_tag_for_env = git_tag_for_env self._git_up_to_date = git_up_to_date self._conda_build_config = conda_build_config if conda_build_config else [] self._external_dependencies = dict() self._conda_env_files = dict() self._test_feedstocks = dict() self._initial_nodes = [] # Create a dependency tree that includes recipes for every combination # of variants. self._possible_variants = utils.make_variants(python_versions, build_types, mpi_types, cuda_versions) self._tree = graph.OpenCEGraph() validate_args = [] for variant in self._possible_variants: try: variant_tree, external_deps, channels = self._create_nodes( variant) variant_tree = _create_edges(variant_tree) variant_tree = self._create_remote_deps(variant_tree) self._tree = networkx.compose(self._tree, variant_tree) except OpenCEError as exc: raise OpenCEError(Error.CREATE_BUILD_TREE, exc.msg) from exc variant_string = utils.variant_string(variant.get("python"), variant.get("build_type"), variant.get("mpi_type"), variant.get("cudatoolkit")) self._external_dependencies[variant_string] = external_deps self._detect_cycle() variant_start_nodes = { n for n, d in variant_tree.in_degree() if d == 0 } # If the packages argument is provided, find the indices into the build_commands for all # of the packages that were requested. if packages: for package in packages: if not { n for n in traverse_build_commands(variant_tree, return_node=True) if package in n.packages }: log.info( "No recipes were found for '%s' for variant '%s'", package, variant_string) variant_start_nodes = { n for n in traverse_build_commands(variant_tree, return_node=True) if n.packages.intersection(packages) } self._initial_nodes += variant_start_nodes validate_args.append( (variant_tree, external_deps, variant_start_nodes)) self._conda_env_files[variant_string] = get_conda_file_packages( variant_tree, external_deps, self._channels + channels, variant_start_nodes) self._test_feedstocks[variant_string] = set() for build_command in traverse_build_commands( variant_tree, variant_start_nodes): self._test_feedstocks[variant_string].add( build_command.repository) # Execute validate_build_tree in parallel utils.run_in_parallel(validate_config.validate_build_tree, validate_args) self.remove_external_deps_from_dag()