def main(recipe): cbc, config = get_config(recipe) ydoc = render(recipe, config) result = validate(ydoc) if result is None: console.print("[green]Validation OK[/green]")
def find_all_recipes(target, config): if os.path.isdir(target): cwd = target else: cwd = os.getcwd() yamls = glob.glob(os.path.join(cwd, "recipe.yaml")) yamls += glob.glob(os.path.join(cwd, "**", "recipe.yaml")) recipes = {} for fn in yamls: yml = render(fn, config=config) pkg_name = yml["package"]["name"] recipes[pkg_name] = yml recipes[pkg_name]["recipe_file"] = fn # find all outputs from recipe output_names = set([yml["package"]["name"]]) for output in yml.get("outputs", []): output_names.add(output["package"]["name"]) if "static" in [f["name"] for f in yml.get("features", [])]: output_names.add(yml["package"]["name"] + "-static") recipes[pkg_name]["output_names"] = output_names sort_recipes = {} def get_all_requirements(x): req = x.get("requirements", {}).get("host", []) req += x.get("requirements", {}).get("run", []) for feat in x.get("features", []): req += feat.get("requirements", {}).get("host", []) req += feat.get("requirements", {}).get("run", []) for o in x.get("outputs", []): req += get_all_requirements(o) return req def recursive_add(target): all_requirements = { x.split(" ")[0] for x in get_all_requirements(recipes[target]) } all_requirements = all_requirements.intersection(recipes.keys()) sort_recipes[target] = all_requirements for req in all_requirements: if req not in sort_recipes: recursive_add(req) if not target or target not in recipes.keys(): for k in recipes.keys(): recursive_add(k) else: recursive_add(target) sorted_recipes = toposort.toposort(sort_recipes) print(sorted_recipes) return [recipes[x] for x in sorted_recipes]
def build_recipe(args, recipe_path, cbc, config): if args.features: assert args.features.startswith("[") and args.features.endswith("]") features = [f.strip() for f in args.features[1:-1].split(",")] else: features = [] selected_features = {} for f in features: if f.startswith("~"): selected_features[f[1:]] = False else: selected_features[f] = True ydoc = render(recipe_path, config=config) # We need to assemble the variants for each output variants = {} # if we have a outputs section, use that order the outputs if ydoc.get("outputs"): for o in ydoc["outputs"]: # inherit from global package pkg_meta = {} pkg_meta.update(ydoc["package"]) pkg_meta.update(o["package"]) o["package"] = pkg_meta build_meta = {} build_meta.update(ydoc.get("build")) build_meta.update(o.get("build") or {}) o["build"] = build_meta o["selected_features"] = selected_features variants[o["package"]["name"]] = get_dependency_variants( o.get("requirements", {}), cbc, config, features ) else: # we only have one output variants[ydoc["package"]["name"]] = get_dependency_variants( ydoc.get("requirements", {}), cbc, config, features ) # this takes in all variants and outputs, builds a dependency tree and returns # the final metadata sorted_outputs = to_build_tree(ydoc, variants, config, selected_features) # then we need to solve and build from the bottom up # we can't first solve all packages without finalizing everything # # FLOW: # ===== # - solve the package # - solv build, add weak run exports to # - add run exports from deps! if args.command == "render": for o in sorted_outputs: console.print(o) exit() # TODO this should be done cleaner top_name = ydoc["package"]["name"] o0 = sorted_outputs[0] o0.is_first = True o0.config.compute_build_id(top_name) console.print("\n[yellow]Initializing mamba solver[/yellow]\n") solver = MambaSolver([], context.subdir) console.print("\n[yellow]Downloading source[/yellow]\n") download_source(MetaData(recipe_path, o0), args.interactive) cached_source = o0.sections["source"] for o in sorted_outputs: console.print( f"\n[yellow]Preparing environment for [bold]{o.name}[/bold][/yellow]\n" ) solver.replace_channels() o.finalize_solve(sorted_outputs, solver) o.config._build_id = o0.config.build_id if "build" in o.transactions: if os.path.isdir(o.config.build_prefix): rm_rf(o.config.build_prefix) mkdir_p(o.config.build_prefix) try: o.transactions["build"].execute( PrefixData(o.config.build_prefix), PackageCacheData.first_writable().pkgs_dir, ) except Exception: # This currently enables windows-multi-build... print("Could not instantiate build environment") if "host" in o.transactions: mkdir_p(o.config.host_prefix) o.transactions["host"].execute( PrefixData(o.config.host_prefix), PackageCacheData.first_writable().pkgs_dir, ) meta = MetaData(recipe_path, o) o.set_final_build_id(meta) if cached_source != o.sections["source"]: download_source(meta, args.interactive) console.print(f"\n[yellow]Starting build for [bold]{o.name}[/bold][/yellow]\n") build(meta, None, allow_interactive=args.interactive) for o in sorted_outputs: print("\n\n") console.print(o)
def build_recipe( command, recipe_path, cbc, config, selected_features, notest: bool = False, skip_existing: bool = False, interactive: bool = False, skip_fast: bool = False, ): ydoc = render(recipe_path, config=config) # We need to assemble the variants for each output variants = {} # if we have a outputs section, use that order the outputs if ydoc.get("outputs"): for o in ydoc["outputs"]: # inherit from global package pkg_meta = {} pkg_meta.update(ydoc["package"]) pkg_meta.update(o["package"]) o["package"] = pkg_meta build_meta = {} build_meta.update(ydoc.get("build")) build_meta.update(o.get("build") or {}) o["build"] = build_meta o["selected_features"] = selected_features variants[o["package"]["name"]] = get_dependency_variants( o.get("requirements", {}), cbc, config ) else: # we only have one output variants[ydoc["package"]["name"]] = get_dependency_variants( ydoc.get("requirements", {}), cbc, config ) # this takes in all variants and outputs, builds a dependency tree and returns # the final metadata sorted_outputs = to_build_tree(ydoc, variants, config, cbc, selected_features) # then we need to solve and build from the bottom up # we can't first solve all packages without finalizing everything # # FLOW: # ===== # - solve the package # - solv build, add weak run exports to # - add run exports from deps! if command == "render": if boa_config.json: jlist = [o.to_json() for o in sorted_outputs] print(json.dumps(jlist, indent=4)) else: for o in sorted_outputs: console.print(o) return sorted_outputs # TODO this should be done cleaner top_name = ydoc["package"]["name"] o0 = sorted_outputs[0] o0.is_first = True o0.config.compute_build_id(top_name) console.print("\n[yellow]Initializing mamba solver[/yellow]\n") if all([o.skip() for o in sorted_outputs]): console.print("All outputs skipped.\n") return full_render = command == "full-render" if skip_fast: build_pkgs = [] archs = [o0.variant["target_platform"], "noarch"] for arch in archs: build_pkgs += [ os.path.basename(x.rsplit("-", 1)[0]) for x in glob.glob( os.path.join(o0.config.output_folder, arch, "*.tar.bz2",) ) ] del_idx = [] for i in range(len(sorted_outputs)): if f"{sorted_outputs[i].name}-{sorted_outputs[i].version}" in build_pkgs: del_idx.append(i) for idx in del_idx[::-1]: console.print( f"[green]Fast skip of {sorted_outputs[idx].name}-{sorted_outputs[idx].version}" ) del sorted_outputs[idx] # Do not download source if we might skip if not (skip_existing or full_render): console.print("\n[yellow]Downloading source[/yellow]\n") download_source(MetaData(recipe_path, o0), interactive) cached_source = o0.sections["source"] else: cached_source = {} for o in sorted_outputs: console.print( f"\n[yellow]Preparing environment for [bold]{o.name}[/bold][/yellow]\n" ) refresh_solvers() o.finalize_solve(sorted_outputs) o.config._build_id = o0.config.build_id meta = MetaData(recipe_path, o) o.set_final_build_id(meta) if o.skip() or full_render: continue final_name = meta.dist() # TODO this doesn't work for noarch! if skip_existing: final_name = meta.dist() if os.path.exists( os.path.join( o.config.output_folder, o.variant["target_platform"], final_name + ".tar.bz2", ) ): console.print(f"\n[green]Skipping existing {final_name}\n") continue if "build" in o.transactions: if os.path.isdir(o.config.build_prefix): rm_rf(o.config.build_prefix) mkdir_p(o.config.build_prefix) try: o.transactions["build"]["transaction"].execute( PrefixData(o.config.build_prefix), ) except Exception: # This currently enables windows-multi-build... print("Could not instantiate build environment") if "host" in o.transactions: mkdir_p(o.config.host_prefix) o.transactions["host"]["transaction"].execute( PrefixData(o.config.host_prefix) ) if cached_source != o.sections["source"]: download_source(meta, interactive) cached_source = o.sections["source"] console.print(f"\n[yellow]Starting build for [bold]{o.name}[/bold][/yellow]\n") final_outputs = build(meta, None, allow_interactive=interactive) stats = {} if final_outputs is not None: for final_out in final_outputs: if not notest: run_test( final_out, o.config, stats, move_broken=False, provision_only=False, ) # print(stats) for o in sorted_outputs: print("\n\n") console.print(o) return sorted_outputs
def build_recipe(args, recipe_path, cbc, config): if args.features: assert args.features.startswith("[") and args.features.endswith("]") features = [f.strip() for f in args.features[1:-1].split(",")] else: features = [] selected_features = {} for f in features: if f.startswith("~"): selected_features[f[1:]] = False else: selected_features[f] = True ydoc = render(recipe_path, config=config) # We need to assemble the variants for each output variants = {} # if we have a outputs section, use that order the outputs if ydoc.get("outputs"): for o in ydoc["outputs"]: # inherit from global package pkg_meta = {} pkg_meta.update(ydoc["package"]) pkg_meta.update(o["package"]) o["package"] = pkg_meta build_meta = {} build_meta.update(ydoc.get("build")) build_meta.update(o.get("build") or {}) o["build"] = build_meta o["selected_features"] = selected_features variants[o["package"]["name"]] = get_dependency_variants( o.get("requirements", {}), cbc, config, features ) else: # we only have one output variants[ydoc["package"]["name"]] = get_dependency_variants( ydoc.get("requirements", {}), cbc, config, features ) # this takes in all variants and outputs, builds a dependency tree and returns # the final metadata sorted_outputs = to_build_tree(ydoc, variants, config, cbc, selected_features) # then we need to solve and build from the bottom up # we can't first solve all packages without finalizing everything # # FLOW: # ===== # - solve the package # - solv build, add weak run exports to # - add run exports from deps! if args.command == "render": for o in sorted_outputs: console.print(o) # TODO this should be done cleaner top_name = ydoc["package"]["name"] o0 = sorted_outputs[0] o0.is_first = True o0.config.compute_build_id(top_name) console.print("\n[yellow]Initializing mamba solver[/yellow]\n") # Do not download source if we might skip if not args.skip_existing: console.print("\n[yellow]Downloading source[/yellow]\n") download_source(MetaData(recipe_path, o0), args.interactive) cached_source = o0.sections["source"] else: cached_source = {} for o in sorted_outputs: console.print( f"\n[yellow]Preparing environment for [bold]{o.name}[/bold][/yellow]\n" ) refresh_solvers() o.finalize_solve(sorted_outputs) o.config._build_id = o0.config.build_id meta = MetaData(recipe_path, o) o.set_final_build_id(meta) if args.skip_existing: final_name = meta.dist() if os.path.exists( os.path.join( o.config.output_folder, o.variant["target_platform"], final_name + ".tar.bz2", ) ): console.print(f"\n[green]Skipping existing {final_name}\n") continue if "build" in o.transactions: if os.path.isdir(o.config.build_prefix): rm_rf(o.config.build_prefix) mkdir_p(o.config.build_prefix) try: o.transactions["build"]["transaction"].execute( PrefixData(o.config.build_prefix), o.transactions["build"]["pkg_cache"], ) except Exception: # This currently enables windows-multi-build... print("Could not instantiate build environment") if "host" in o.transactions: mkdir_p(o.config.host_prefix) o.transactions["host"]["transaction"].execute( PrefixData(o.config.host_prefix), o.transactions["host"]["pkg_cache"], ) if cached_source != o.sections["source"]: download_source(meta, args.interactive) cached_source = o.sections["source"] console.print(f"\n[yellow]Starting build for [bold]{o.name}[/bold][/yellow]\n") final_outputs = build(meta, None, allow_interactive=args.interactive) stats = {} if final_outputs is not None: for final_out in final_outputs: run_test( final_out, o.config, stats, move_broken=False, provision_only=False, ) # print(stats) for o in sorted_outputs: print("\n\n") console.print(o)