def get_config(folder, variant=None, additional_files=None): if not additional_files: additional_files = [] if not variant: variant = {} config = get_or_merge_config(None, variant) if cb_split_version >= (3, 20, 5): config_files = find_config_files(folder, config) else: config_files = find_config_files(folder) all_files = [os.path.abspath(p) for p in config_files + additional_files] # reverse files an uniquify def make_unique_list(lx): seen = set() return [x for x in lx if not (x in seen or seen.add(x))] # we reverse the order so that command line can overwrite the hierarchy all_files = make_unique_list(all_files[::-1])[::-1] console.print(f"\nLoading config files: [green]{', '.join(all_files)}\n") parsed_cfg = collections.OrderedDict() for f in all_files: parsed_cfg[f] = parse_config_file(f, config) # this merges each of the specs, providing a debug message when a given setting is overridden # by a later spec combined_spec = combine_specs(parsed_cfg, log_output=config.verbose) # console.print(combined_spec) return combined_spec, config
def get_config(folder, variant=None): if not variant: variant = {} config = get_or_merge_config(None, variant) if cb_split_version >= (3, 20, 5): config_files = find_config_files(folder, config) else: config_files = find_config_files(folder) console.print(f"\nLoading config files: [green]{', '.join(config_files)}\n") parsed_cfg = collections.OrderedDict() for f in config_files: parsed_cfg[f] = parse_config_file(f, config) # TODO just using latest config here, should merge! if len(config_files): cbc = parsed_cfg[config_files[-1]] else: cbc = {} return cbc, config
def get_config(folder): config = get_or_merge_config(None, {}) config_files = find_config_files(folder) parsed_cfg = collections.OrderedDict() for f in config_files: parsed_cfg[f] = parse_config_file(f, config) normalized = {} for k in parsed_cfg[f].keys(): if "_" in k: n = k.replace("_", "-") normalized[n] = parsed_cfg[f][k] parsed_cfg[f].update(normalized) # TODO just using latest config here, should merge! if len(config_files): cbc = parsed_cfg[config_files[-1]] else: cbc = {} return cbc, config
def main(config=None): folder = sys.argv[1] config = get_or_merge_config(None, {}) config_files = find_config_files(folder) parsed_cfg = collections.OrderedDict() for f in config_files: parsed_cfg[f] = parse_config_file(f, config) print(parsed_cfg[f]) normalized = {} for k in parsed_cfg[f].keys(): if '_' in k: n = k.replace('_', '-') normalized[n] = parsed_cfg[f][k] parsed_cfg[f].update(normalized) print(parsed_cfg[f].keys()) # TODO just using latest config here, should merge! cbc = parsed_cfg[config_files[-1]] recipe_path = os.path.join(folder, "recipe.yaml") # step 1: parse YAML with open(recipe_path) as fi: loader = YAML(typ='safe') ydoc = loader.load(fi) print(ydoc) # step 2: fill out context dict context_dict = ydoc.get("context") or {} jenv = jinja2.Environment() for key, value in context_dict.items(): if isinstance(value, str): tmpl = jenv.from_string(value) context_dict[key] = tmpl.render(context_dict) if ydoc.get("context"): del ydoc["context"] # step 3: recursively loop over the entire recipe and render jinja with context jenv.globals.update(jinja_functions(config, context_dict)) for key in ydoc: render_recursive(ydoc[key], context_dict, jenv) flatten_selectors(ydoc, ns_cfg(config)) # We need to assemble the variants for each output variants = {} # if we have a outputs section, use that order the outputs if ydoc.get("outputs"): # if ydoc.get("build"): # raise InvalidRecipeError("You can either declare outputs, or build?") for o in ydoc["outputs"]: variants[o["package"]["name"]] = get_dependency_variants( o["requirements"], cbc, config) else: # we only have one output variants[ydoc["package"]["name"]] = get_dependency_variants( ydoc["requirements"], cbc, config) # this takes in all variants and outputs, builds a dependency tree and returns # the final metadata sorted_outputs = to_build_tree(ydoc, variants, config)
def main(config=None): print(banner) parser = argparse.ArgumentParser( description='Boa, the fast build tool for conda packages.') subparsers = parser.add_subparsers(help='sub-command help', dest='command') parent_parser = argparse.ArgumentParser(add_help=False) parent_parser.add_argument('recipe_dir', type=str) render_parser = subparsers.add_parser('render', parents=[parent_parser], help='render a recipe') build_parser = subparsers.add_parser('build', parents=[parent_parser], help='build a recipe') args = parser.parse_args() command = args.command folder = args.recipe_dir config = get_or_merge_config(None, {}) config_files = find_config_files(folder) parsed_cfg = collections.OrderedDict() for f in config_files: parsed_cfg[f] = parse_config_file(f, config) normalized = {} for k in parsed_cfg[f].keys(): if "_" in k: n = k.replace("_", "-") normalized[n] = parsed_cfg[f][k] parsed_cfg[f].update(normalized) # TODO just using latest config here, should merge! if len(config_files): cbc = parsed_cfg[config_files[-1]] else: cbc = {} update_index(os.path.dirname(config.output_folder), verbose=config.debug, threads=1) recipe_path = os.path.join(folder, "recipe.yaml") # step 1: parse YAML with open(recipe_path) as fi: loader = YAML(typ="safe") ydoc = loader.load(fi) # step 2: fill out context dict context_dict = ydoc.get("context") or {} jenv = jinja2.Environment() for key, value in context_dict.items(): if isinstance(value, str): tmpl = jenv.from_string(value) context_dict[key] = tmpl.render(context_dict) if ydoc.get("context"): del ydoc["context"] # step 3: recursively loop over the entire recipe and render jinja with context jenv.globals.update(jinja_functions(config, context_dict)) for key in ydoc: render_recursive(ydoc[key], context_dict, jenv) flatten_selectors(ydoc, ns_cfg(config)) # We need to assemble the variants for each output variants = {} # if we have a outputs section, use that order the outputs if ydoc.get("outputs"): # if ydoc.get("build"): # raise InvalidRecipeError("You can either declare outputs, or build?") for o in ydoc["outputs"]: # inherit from global package pkg_meta = {} pkg_meta.update(ydoc["package"]) pkg_meta.update(o["package"]) o["package"] = pkg_meta build_meta = {} build_meta.update(ydoc.get("build")) build_meta.update(o.get("build") or {}) o["build"] = build_meta variants[o["package"]["name"]] = get_dependency_variants( o["requirements"], cbc, config) else: # we only have one output variants[ydoc["package"]["name"]] = get_dependency_variants( ydoc["requirements"], cbc, config) # this takes in all variants and outputs, builds a dependency tree and returns # the final metadata sorted_outputs = to_build_tree(ydoc, variants, config) # then we need to solve and build from the bottom up # we can't first solve all packages without finalizing everything # - solve the package # - solv build, add weak run exports to # - add run exports from deps! if command == 'render': for o in sorted_outputs: print(o) exit() solver = MambaSolver(["conda-forge"], "linux-64") for o in sorted_outputs: solver.replace_channels() o.finalize_solve(sorted_outputs, solver) print(o) o.config.compute_build_id(o.name) print(o.config.host_prefix) if 'build' in o.transactions: mkdir_p(o.config.build_prefix) print(o.transactions) o.transactions['build'].execute( PrefixData(o.config.build_prefix), PackageCacheData.first_writable().pkgs_dir) if 'host' in o.transactions: mkdir_p(o.config.host_prefix) print(o.transactions) o.transactions['host'].execute( PrefixData(o.config.host_prefix), PackageCacheData.first_writable().pkgs_dir) print(o.sections) stats = {} print("Final variant config") print(config.variant) print(o.variant) build(MetaData(recipe_path, o), None) # sorted_outputs # print(sorted_outputs[0].config.host_prefix) exit() for o in sorted_outputs: print("\n") print(o)