async def input_coroutine(): completer = get_completer() while True: with patch_stdout(): text = await session.prompt_async("> ", bottom_toolbar=bottom_toolbar, completer=completer) token = text.split() if len(token) == 0: continue if token[0] == "help": print_help() elif token[0] == "glob": glob_search(*token[1:]) elif token[0] == "edit": if token[1] == "recipe": subprocess.call( [os.environ["EDITOR"], build_context.meta_path]) if token[1] == "script": subprocess.call([ os.environ["EDITOR"], os.path.join(build_context.path, "build.sh"), ]) elif token[1] == "file": subprocess.call([ os.environ["EDITOR"], os.path.join(build_context.config.work_dir, token[2]), ]) elif token[0] == "ls": # TODO add autocomplete subprocess.call([ "ls", "-l", "-a", "--color=always", os.path.join(build_context.config.work_dir, *token[1:]), ]) elif token[0] == "build": console.print("[yellow]Running build![/yellow]") build(build_context, from_interactive=True) elif token[0] == "exit": raise KeyboardInterrupt() else: console.print( f'[red]Could not understand command "{token[0]}"[/red]')
def execute_tokens(token): if token[0] == "help": print_help() elif token[0] == "patch": generate_patch(token[1:]) elif token[0] == "glob": glob_search(*token[1:]) elif token[0] == "edit": if token[1] == "recipe": subprocess.call([get_editor(), build_context.meta_path]) if token[1] == "script": subprocess.call( [get_editor(), os.path.join(build_context.path, "build.sh")]) elif token[1] == "file": if len(token) == 3: file = os.path.join(build_context.config.work_dir, token[2]) else: file = build_context.config.work_dir subprocess.call([get_editor(), file]) elif token[0] == "ls": # TODO add autocomplete out = subprocess.check_output([ "ls", "-l", "-a", "--color=always", os.path.join(build_context.config.work_dir, *token[1:]), ]) print(out.decode("utf-8", errors="ignore")) elif token[0] == "build": console.print("[yellow]Running build![/yellow]") build(build_context, from_interactive=True) elif token[0] == "exit": print("Exiting.") raise BoaExitException() else: console.print(f'[red]Could not understand command "{token[0]}"[/red]')
def build_recipe(args, recipe_path, cbc, config): if args.features: assert args.features.startswith("[") and args.features.endswith("]") features = [f.strip() for f in args.features[1:-1].split(",")] else: features = [] selected_features = {} for f in features: if f.startswith("~"): selected_features[f[1:]] = False else: selected_features[f] = True ydoc = render(recipe_path, config=config) # We need to assemble the variants for each output variants = {} # if we have a outputs section, use that order the outputs if ydoc.get("outputs"): for o in ydoc["outputs"]: # inherit from global package pkg_meta = {} pkg_meta.update(ydoc["package"]) pkg_meta.update(o["package"]) o["package"] = pkg_meta build_meta = {} build_meta.update(ydoc.get("build")) build_meta.update(o.get("build") or {}) o["build"] = build_meta o["selected_features"] = selected_features variants[o["package"]["name"]] = get_dependency_variants( o.get("requirements", {}), cbc, config, features ) else: # we only have one output variants[ydoc["package"]["name"]] = get_dependency_variants( ydoc.get("requirements", {}), cbc, config, features ) # this takes in all variants and outputs, builds a dependency tree and returns # the final metadata sorted_outputs = to_build_tree(ydoc, variants, config, selected_features) # then we need to solve and build from the bottom up # we can't first solve all packages without finalizing everything # # FLOW: # ===== # - solve the package # - solv build, add weak run exports to # - add run exports from deps! if args.command == "render": for o in sorted_outputs: console.print(o) exit() # TODO this should be done cleaner top_name = ydoc["package"]["name"] o0 = sorted_outputs[0] o0.is_first = True o0.config.compute_build_id(top_name) console.print("\n[yellow]Initializing mamba solver[/yellow]\n") solver = MambaSolver([], context.subdir) console.print("\n[yellow]Downloading source[/yellow]\n") download_source(MetaData(recipe_path, o0), args.interactive) cached_source = o0.sections["source"] for o in sorted_outputs: console.print( f"\n[yellow]Preparing environment for [bold]{o.name}[/bold][/yellow]\n" ) solver.replace_channels() o.finalize_solve(sorted_outputs, solver) o.config._build_id = o0.config.build_id if "build" in o.transactions: if os.path.isdir(o.config.build_prefix): rm_rf(o.config.build_prefix) mkdir_p(o.config.build_prefix) try: o.transactions["build"].execute( PrefixData(o.config.build_prefix), PackageCacheData.first_writable().pkgs_dir, ) except Exception: # This currently enables windows-multi-build... print("Could not instantiate build environment") if "host" in o.transactions: mkdir_p(o.config.host_prefix) o.transactions["host"].execute( PrefixData(o.config.host_prefix), PackageCacheData.first_writable().pkgs_dir, ) meta = MetaData(recipe_path, o) o.set_final_build_id(meta) if cached_source != o.sections["source"]: download_source(meta, args.interactive) console.print(f"\n[yellow]Starting build for [bold]{o.name}[/bold][/yellow]\n") build(meta, None, allow_interactive=args.interactive) for o in sorted_outputs: print("\n\n") console.print(o)
def build_recipe( command, recipe_path, cbc, config, selected_features, notest: bool = False, skip_existing: bool = False, interactive: bool = False, skip_fast: bool = False, ): ydoc = render(recipe_path, config=config) # We need to assemble the variants for each output variants = {} # if we have a outputs section, use that order the outputs if ydoc.get("outputs"): for o in ydoc["outputs"]: # inherit from global package pkg_meta = {} pkg_meta.update(ydoc["package"]) pkg_meta.update(o["package"]) o["package"] = pkg_meta build_meta = {} build_meta.update(ydoc.get("build")) build_meta.update(o.get("build") or {}) o["build"] = build_meta o["selected_features"] = selected_features variants[o["package"]["name"]] = get_dependency_variants( o.get("requirements", {}), cbc, config ) else: # we only have one output variants[ydoc["package"]["name"]] = get_dependency_variants( ydoc.get("requirements", {}), cbc, config ) # this takes in all variants and outputs, builds a dependency tree and returns # the final metadata sorted_outputs = to_build_tree(ydoc, variants, config, cbc, selected_features) # then we need to solve and build from the bottom up # we can't first solve all packages without finalizing everything # # FLOW: # ===== # - solve the package # - solv build, add weak run exports to # - add run exports from deps! if command == "render": if boa_config.json: jlist = [o.to_json() for o in sorted_outputs] print(json.dumps(jlist, indent=4)) else: for o in sorted_outputs: console.print(o) return sorted_outputs # TODO this should be done cleaner top_name = ydoc["package"]["name"] o0 = sorted_outputs[0] o0.is_first = True o0.config.compute_build_id(top_name) console.print("\n[yellow]Initializing mamba solver[/yellow]\n") if all([o.skip() for o in sorted_outputs]): console.print("All outputs skipped.\n") return full_render = command == "full-render" if skip_fast: build_pkgs = [] archs = [o0.variant["target_platform"], "noarch"] for arch in archs: build_pkgs += [ os.path.basename(x.rsplit("-", 1)[0]) for x in glob.glob( os.path.join(o0.config.output_folder, arch, "*.tar.bz2",) ) ] del_idx = [] for i in range(len(sorted_outputs)): if f"{sorted_outputs[i].name}-{sorted_outputs[i].version}" in build_pkgs: del_idx.append(i) for idx in del_idx[::-1]: console.print( f"[green]Fast skip of {sorted_outputs[idx].name}-{sorted_outputs[idx].version}" ) del sorted_outputs[idx] # Do not download source if we might skip if not (skip_existing or full_render): console.print("\n[yellow]Downloading source[/yellow]\n") download_source(MetaData(recipe_path, o0), interactive) cached_source = o0.sections["source"] else: cached_source = {} for o in sorted_outputs: console.print( f"\n[yellow]Preparing environment for [bold]{o.name}[/bold][/yellow]\n" ) refresh_solvers() o.finalize_solve(sorted_outputs) o.config._build_id = o0.config.build_id meta = MetaData(recipe_path, o) o.set_final_build_id(meta) if o.skip() or full_render: continue final_name = meta.dist() # TODO this doesn't work for noarch! if skip_existing: final_name = meta.dist() if os.path.exists( os.path.join( o.config.output_folder, o.variant["target_platform"], final_name + ".tar.bz2", ) ): console.print(f"\n[green]Skipping existing {final_name}\n") continue if "build" in o.transactions: if os.path.isdir(o.config.build_prefix): rm_rf(o.config.build_prefix) mkdir_p(o.config.build_prefix) try: o.transactions["build"]["transaction"].execute( PrefixData(o.config.build_prefix), ) except Exception: # This currently enables windows-multi-build... print("Could not instantiate build environment") if "host" in o.transactions: mkdir_p(o.config.host_prefix) o.transactions["host"]["transaction"].execute( PrefixData(o.config.host_prefix) ) if cached_source != o.sections["source"]: download_source(meta, interactive) cached_source = o.sections["source"] console.print(f"\n[yellow]Starting build for [bold]{o.name}[/bold][/yellow]\n") final_outputs = build(meta, None, allow_interactive=interactive) stats = {} if final_outputs is not None: for final_out in final_outputs: if not notest: run_test( final_out, o.config, stats, move_broken=False, provision_only=False, ) # print(stats) for o in sorted_outputs: print("\n\n") console.print(o) return sorted_outputs
def build_recipe(args, recipe_path, cbc, config): if args.features: assert args.features.startswith("[") and args.features.endswith("]") features = [f.strip() for f in args.features[1:-1].split(",")] else: features = [] selected_features = {} for f in features: if f.startswith("~"): selected_features[f[1:]] = False else: selected_features[f] = True ydoc = render(recipe_path, config=config) # We need to assemble the variants for each output variants = {} # if we have a outputs section, use that order the outputs if ydoc.get("outputs"): for o in ydoc["outputs"]: # inherit from global package pkg_meta = {} pkg_meta.update(ydoc["package"]) pkg_meta.update(o["package"]) o["package"] = pkg_meta build_meta = {} build_meta.update(ydoc.get("build")) build_meta.update(o.get("build") or {}) o["build"] = build_meta o["selected_features"] = selected_features variants[o["package"]["name"]] = get_dependency_variants( o.get("requirements", {}), cbc, config, features ) else: # we only have one output variants[ydoc["package"]["name"]] = get_dependency_variants( ydoc.get("requirements", {}), cbc, config, features ) # this takes in all variants and outputs, builds a dependency tree and returns # the final metadata sorted_outputs = to_build_tree(ydoc, variants, config, cbc, selected_features) # then we need to solve and build from the bottom up # we can't first solve all packages without finalizing everything # # FLOW: # ===== # - solve the package # - solv build, add weak run exports to # - add run exports from deps! if args.command == "render": for o in sorted_outputs: console.print(o) # TODO this should be done cleaner top_name = ydoc["package"]["name"] o0 = sorted_outputs[0] o0.is_first = True o0.config.compute_build_id(top_name) console.print("\n[yellow]Initializing mamba solver[/yellow]\n") # Do not download source if we might skip if not args.skip_existing: console.print("\n[yellow]Downloading source[/yellow]\n") download_source(MetaData(recipe_path, o0), args.interactive) cached_source = o0.sections["source"] else: cached_source = {} for o in sorted_outputs: console.print( f"\n[yellow]Preparing environment for [bold]{o.name}[/bold][/yellow]\n" ) refresh_solvers() o.finalize_solve(sorted_outputs) o.config._build_id = o0.config.build_id meta = MetaData(recipe_path, o) o.set_final_build_id(meta) if args.skip_existing: final_name = meta.dist() if os.path.exists( os.path.join( o.config.output_folder, o.variant["target_platform"], final_name + ".tar.bz2", ) ): console.print(f"\n[green]Skipping existing {final_name}\n") continue if "build" in o.transactions: if os.path.isdir(o.config.build_prefix): rm_rf(o.config.build_prefix) mkdir_p(o.config.build_prefix) try: o.transactions["build"]["transaction"].execute( PrefixData(o.config.build_prefix), o.transactions["build"]["pkg_cache"], ) except Exception: # This currently enables windows-multi-build... print("Could not instantiate build environment") if "host" in o.transactions: mkdir_p(o.config.host_prefix) o.transactions["host"]["transaction"].execute( PrefixData(o.config.host_prefix), o.transactions["host"]["pkg_cache"], ) if cached_source != o.sections["source"]: download_source(meta, args.interactive) cached_source = o.sections["source"] console.print(f"\n[yellow]Starting build for [bold]{o.name}[/bold][/yellow]\n") final_outputs = build(meta, None, allow_interactive=args.interactive) stats = {} if final_outputs is not None: for final_out in final_outputs: run_test( final_out, o.config, stats, move_broken=False, provision_only=False, ) # print(stats) for o in sorted_outputs: print("\n\n") console.print(o)