def parse_meta_yaml( text: str, for_pinning=False, platform=None, arch=None, recipe_dir=None, cbc_path=None, **kwargs: Any, ) -> "MetaYamlTypedDict": """Parse the meta.yaml. Parameters ---------- text : str The raw text in conda-forge feedstock meta.yaml file Returns ------- dict : The parsed YAML dict. If parsing fails, returns an empty dict. """ from conda_build.config import Config from conda_build.metadata import parse, ns_cfg if (recipe_dir is not None and cbc_path is not None and arch is not None and platform is not None): cbc = Config( platform=platform, arch=arch, variant_config_files=[cbc_path], **kwargs, ) cfg_as_dict = ns_cfg(cbc) with open(cbc_path, "r") as fp: _cfg_as_dict = yaml.load(fp, Loader=yaml.Loader) for k, v in _cfg_as_dict.items(): if (isinstance(v, list) and not isinstance(v, str) and len(v) > 0 and k not in ["zip_keys", "pin_run_as_build"]): v = v[0] cfg_as_dict[k] = v else: _cfg = {} _cfg.update(kwargs) if platform is not None: _cfg["platform"] = platform if arch is not None: _cfg["arch"] = arch cbc = Config(**_cfg) cfg_as_dict = {} if for_pinning: content = render_meta_yaml(text, for_pinning=for_pinning, **cfg_as_dict) else: content = render_meta_yaml(text, **cfg_as_dict) return parse(content, cbc)
def parse_variant( variant_file_content: str, config: Optional[Config] = None ) -> Dict[str, Union[List[str], float, List[List[str]], Dict[str, Dict[ str, str]], Dict[str, Dict[str, List[str]]], ], ]: """ Parameters ---------- variant_file_content : str The loaded vaiant contents. This can include selectors etc. """ if not config: from conda_build.config import Config config = Config() from conda_build.metadata import select_lines, ns_cfg contents = select_lines(variant_file_content, ns_cfg(config), variants_in_place=False) content = yaml.load(contents, Loader=yaml.loader.BaseLoader) or {} variants.trim_empty_keys(content) # TODO: Base this default on mtime or something content["migration_ts"] = float(content.get("migration_ts", -1.0)) return content
def parse_config_file(path, config): from conda_build.metadata import select_lines, ns_cfg with open(path) as f: contents = f.read() contents = select_lines(contents, ns_cfg(config)) content = yaml.load(contents, Loader=yaml.loader.BaseLoader) return content
def render(recipe_path, config=None): # console.print(f"\n[yellow]Rendering {recipe_path}[/yellow]\n") # step 1: parse YAML with open(recipe_path) as fi: loader = YAML(typ="safe") ydoc = loader.load(fi) # step 2: fill out context dict context_dict = ydoc.get("context") or {} jenv = jinja2.Environment() for key, value in context_dict.items(): if isinstance(value, str): tmpl = jenv.from_string(value) context_dict[key] = tmpl.render(context_dict) # step 3: recursively loop over the entire recipe and render jinja with context jenv.globals.update(jinja_functions(config, context_dict)) for key in ydoc: render_recursive(ydoc[key], context_dict, jenv) flatten_selectors(ydoc, ns_cfg(config)) ydoc = normalize_recipe(ydoc) # console.print("\n[yellow]Normalized recipe[/yellow]\n") # console.print(ydoc) return ydoc
def skip(self): skips = self.sections["build"].get("skip") if skips: return any( [eval_selector(x, ns_cfg(self.config), []) for x in skips]) return False
def parse_config_file(path, config): from conda_build.metadata import select_lines, ns_cfg with open(path) as f: contents = f.read() contents = select_lines(contents, ns_cfg(config), variants_in_place=False) content = yaml.load(contents, Loader=yaml.loader.BaseLoader) or {} trim_empty_keys(content) return content
def skip(self): skips = self.sections["build"].get("skip", []) skip_reasons = [] for x in skips: if eval_selector(x, ns_cfg(self.config), []): skip_reasons.append(x) if len(skip_reasons): console.print( f"[green]Skipping {self.name} {' | '.join(self.differentiating_variant)} because of[/green]\n" + "\n".join([f" - {x}" for x in skip_reasons])) return len(skip_reasons) != 0
def default_jinja_vars(config): res = {} cfg = ns_cfg(config) res["build_platform"] = cfg["build_platform"] res["target_platform"] = cfg.get("target_platform", cfg["build_platform"]) tgp = res["target_platform"] if tgp.startswith("win"): prefix = "%PREFIX%" else: prefix = "$PREFIX" # this adds PYTHON, R, RSCRIPT ... etc so that they can be used in the # recipe script for lang in ["python", "lua", "r", "rscript", "perl"]: res[lang.upper()] = getattr(config, "_get_" + lang)(prefix, tgp) return res
def load_packages_from_conda_build_config(conda_build_config, condarc_options): with open(conda_build_config, "r") as f: content = f.read() idx1 = content.find("# AUTOMATIC PARSING START") idx2 = content.find("# AUTOMATIC PARSING END") content = content[idx1:idx2] # filter out using conda-build specific markers from conda_build.metadata import ns_cfg, select_lines config = make_conda_config(conda_build_config, None, None, condarc_options) content = select_lines(content, ns_cfg(config), variants_in_place=False) package_pins = yaml.safe_load(content) package_names_map = package_pins.pop("package_names_map") packages = [package_names_map.get(p, p) for p in package_pins.keys()] return packages, package_names_map
def main(config=None): print(banner) parser = argparse.ArgumentParser( description="Boa, the fast, mamba powered-build tool for conda packages." ) subparsers = parser.add_subparsers(help="sub-command help", dest="command") parent_parser = argparse.ArgumentParser(add_help=False) parent_parser.add_argument("recipe_dir", type=str) render_parser = subparsers.add_parser( "render", parents=[parent_parser], help="render a recipe" ) convert_parser = subparsers.add_parser( "convert", parents=[parent_parser], help="convert recipe.yaml to old-style meta.yaml" ) build_parser = subparsers.add_parser( "build", parents=[parent_parser], help="build a recipe" ) args = parser.parse_args() command = args.command if command == 'convert': from boa.cli import convert convert.main(args.recipe_dir) exit() folder = args.recipe_dir cbc, config = get_config(folder) if not os.path.exists(config.output_folder): mkdir_p(config.output_folder) print(f"Updating build index: {(config.output_folder)}\n") update_index(config.output_folder, verbose=config.debug, threads=1) recipe_path = os.path.join(folder, "recipe.yaml") # step 1: parse YAML with open(recipe_path) as fi: loader = YAML(typ="safe") ydoc = loader.load(fi) # step 2: fill out context dict context_dict = ydoc.get("context") or {} jenv = jinja2.Environment() for key, value in context_dict.items(): if isinstance(value, str): tmpl = jenv.from_string(value) context_dict[key] = tmpl.render(context_dict) if ydoc.get("context"): del ydoc["context"] # step 3: recursively loop over the entire recipe and render jinja with context jenv.globals.update(jinja_functions(config, context_dict)) for key in ydoc: render_recursive(ydoc[key], context_dict, jenv) flatten_selectors(ydoc, ns_cfg(config)) normalize_recipe(ydoc) # pprint(ydoc) # We need to assemble the variants for each output variants = {} # if we have a outputs section, use that order the outputs if ydoc.get("outputs"): for o in ydoc["outputs"]: # inherit from global package pkg_meta = {} pkg_meta.update(ydoc["package"]) pkg_meta.update(o["package"]) o["package"] = pkg_meta build_meta = {} build_meta.update(ydoc.get("build")) build_meta.update(o.get("build") or {}) o["build"] = build_meta variants[o["package"]["name"]] = get_dependency_variants( o.get("requirements", {}), cbc, config ) else: # we only have one output variants[ydoc["package"]["name"]] = get_dependency_variants( ydoc.get("requirements", {}), cbc, config ) # this takes in all variants and outputs, builds a dependency tree and returns # the final metadata sorted_outputs = to_build_tree(ydoc, variants, config) # then we need to solve and build from the bottom up # we can't first solve all packages without finalizing everything # # FLOW: # ===== # - solve the package # - solv build, add weak run exports to # - add run exports from deps! print('\n') if command == "render": for o in sorted_outputs: print(o) exit() # TODO this should be done cleaner top_name = ydoc['package']['name'] o0 = sorted_outputs[0] o0.is_first = True o0.config.compute_build_id(top_name) solver = MambaSolver(["conda-forge"], context.subdir) print("\n") download_source(MetaData(recipe_path, o0)) cached_source = o0.sections['source'] for o in sorted_outputs: solver.replace_channels() o.finalize_solve(sorted_outputs, solver) # print(o) o.config._build_id = o0.config.build_id if "build" in o.transactions: if isdir(o.config.build_prefix): utils.rm_rf(o.config.build_prefix) mkdir_p(o.config.build_prefix) o.transactions['build'].execute(PrefixData(o.config.build_prefix), PackageCacheData.first_writable().pkgs_dir) if "host" in o.transactions: mkdir_p(o.config.host_prefix) o.transactions['host'].execute(PrefixData(o.config.host_prefix), PackageCacheData.first_writable().pkgs_dir) meta = MetaData(recipe_path, o) o.final_build_id = meta.build_id() if cached_source != o.sections['source']: download_source(meta) build(meta, None) for o in sorted_outputs: print("\n") print(o)
def main(config=None): folder = sys.argv[1] config = get_or_merge_config(None, {}) config_files = find_config_files(folder) parsed_cfg = collections.OrderedDict() for f in config_files: parsed_cfg[f] = parse_config_file(f, config) print(parsed_cfg[f]) normalized = {} for k in parsed_cfg[f].keys(): if '_' in k: n = k.replace('_', '-') normalized[n] = parsed_cfg[f][k] parsed_cfg[f].update(normalized) print(parsed_cfg[f].keys()) # TODO just using latest config here, should merge! cbc = parsed_cfg[config_files[-1]] recipe_path = os.path.join(folder, "recipe.yaml") # step 1: parse YAML with open(recipe_path) as fi: loader = YAML(typ='safe') ydoc = loader.load(fi) print(ydoc) # step 2: fill out context dict context_dict = ydoc.get("context") or {} jenv = jinja2.Environment() for key, value in context_dict.items(): if isinstance(value, str): tmpl = jenv.from_string(value) context_dict[key] = tmpl.render(context_dict) if ydoc.get("context"): del ydoc["context"] # step 3: recursively loop over the entire recipe and render jinja with context jenv.globals.update(jinja_functions(config, context_dict)) for key in ydoc: render_recursive(ydoc[key], context_dict, jenv) flatten_selectors(ydoc, ns_cfg(config)) # We need to assemble the variants for each output variants = {} # if we have a outputs section, use that order the outputs if ydoc.get("outputs"): # if ydoc.get("build"): # raise InvalidRecipeError("You can either declare outputs, or build?") for o in ydoc["outputs"]: variants[o["package"]["name"]] = get_dependency_variants( o["requirements"], cbc, config) else: # we only have one output variants[ydoc["package"]["name"]] = get_dependency_variants( ydoc["requirements"], cbc, config) # this takes in all variants and outputs, builds a dependency tree and returns # the final metadata sorted_outputs = to_build_tree(ydoc, variants, config)
def main(config=None): print(banner) parser = argparse.ArgumentParser( description='Boa, the fast build tool for conda packages.') subparsers = parser.add_subparsers(help='sub-command help', dest='command') parent_parser = argparse.ArgumentParser(add_help=False) parent_parser.add_argument('recipe_dir', type=str) render_parser = subparsers.add_parser('render', parents=[parent_parser], help='render a recipe') build_parser = subparsers.add_parser('build', parents=[parent_parser], help='build a recipe') args = parser.parse_args() command = args.command folder = args.recipe_dir config = get_or_merge_config(None, {}) config_files = find_config_files(folder) parsed_cfg = collections.OrderedDict() for f in config_files: parsed_cfg[f] = parse_config_file(f, config) normalized = {} for k in parsed_cfg[f].keys(): if "_" in k: n = k.replace("_", "-") normalized[n] = parsed_cfg[f][k] parsed_cfg[f].update(normalized) # TODO just using latest config here, should merge! if len(config_files): cbc = parsed_cfg[config_files[-1]] else: cbc = {} update_index(os.path.dirname(config.output_folder), verbose=config.debug, threads=1) recipe_path = os.path.join(folder, "recipe.yaml") # step 1: parse YAML with open(recipe_path) as fi: loader = YAML(typ="safe") ydoc = loader.load(fi) # step 2: fill out context dict context_dict = ydoc.get("context") or {} jenv = jinja2.Environment() for key, value in context_dict.items(): if isinstance(value, str): tmpl = jenv.from_string(value) context_dict[key] = tmpl.render(context_dict) if ydoc.get("context"): del ydoc["context"] # step 3: recursively loop over the entire recipe and render jinja with context jenv.globals.update(jinja_functions(config, context_dict)) for key in ydoc: render_recursive(ydoc[key], context_dict, jenv) flatten_selectors(ydoc, ns_cfg(config)) # We need to assemble the variants for each output variants = {} # if we have a outputs section, use that order the outputs if ydoc.get("outputs"): # if ydoc.get("build"): # raise InvalidRecipeError("You can either declare outputs, or build?") for o in ydoc["outputs"]: # inherit from global package pkg_meta = {} pkg_meta.update(ydoc["package"]) pkg_meta.update(o["package"]) o["package"] = pkg_meta build_meta = {} build_meta.update(ydoc.get("build")) build_meta.update(o.get("build") or {}) o["build"] = build_meta variants[o["package"]["name"]] = get_dependency_variants( o["requirements"], cbc, config) else: # we only have one output variants[ydoc["package"]["name"]] = get_dependency_variants( ydoc["requirements"], cbc, config) # this takes in all variants and outputs, builds a dependency tree and returns # the final metadata sorted_outputs = to_build_tree(ydoc, variants, config) # then we need to solve and build from the bottom up # we can't first solve all packages without finalizing everything # - solve the package # - solv build, add weak run exports to # - add run exports from deps! if command == 'render': for o in sorted_outputs: print(o) exit() solver = MambaSolver(["conda-forge"], "linux-64") for o in sorted_outputs: solver.replace_channels() o.finalize_solve(sorted_outputs, solver) print(o) o.config.compute_build_id(o.name) print(o.config.host_prefix) if 'build' in o.transactions: mkdir_p(o.config.build_prefix) print(o.transactions) o.transactions['build'].execute( PrefixData(o.config.build_prefix), PackageCacheData.first_writable().pkgs_dir) if 'host' in o.transactions: mkdir_p(o.config.host_prefix) print(o.transactions) o.transactions['host'].execute( PrefixData(o.config.host_prefix), PackageCacheData.first_writable().pkgs_dir) print(o.sections) stats = {} print("Final variant config") print(config.variant) print(o.variant) build(MetaData(recipe_path, o), None) # sorted_outputs # print(sorted_outputs[0].config.host_prefix) exit() for o in sorted_outputs: print("\n") print(o)