Example #1
0
def build_recipe(args, recipe_path, cbc, config):

    if args.features:
        assert args.features.startswith("[") and args.features.endswith("]")
        features = [f.strip() for f in args.features[1:-1].split(",")]
    else:
        features = []

    selected_features = {}
    for f in features:
        if f.startswith("~"):
            selected_features[f[1:]] = False
        else:
            selected_features[f] = True

    ydoc = render(recipe_path, config=config)
    # We need to assemble the variants for each output
    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):
        for o in ydoc["outputs"]:
            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta

            o["selected_features"] = selected_features

            variants[o["package"]["name"]] = get_dependency_variants(
                o.get("requirements", {}), cbc, config, features
            )
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc.get("requirements", {}), cbc, config, features
        )

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config, selected_features)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything
    #
    # FLOW:
    # =====
    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    if args.command == "render":
        for o in sorted_outputs:
            console.print(o)
        exit()

    # TODO this should be done cleaner
    top_name = ydoc["package"]["name"]
    o0 = sorted_outputs[0]
    o0.is_first = True
    o0.config.compute_build_id(top_name)

    console.print("\n[yellow]Initializing mamba solver[/yellow]\n")
    solver = MambaSolver([], context.subdir)

    console.print("\n[yellow]Downloading source[/yellow]\n")
    download_source(MetaData(recipe_path, o0), args.interactive)
    cached_source = o0.sections["source"]

    for o in sorted_outputs:
        console.print(
            f"\n[yellow]Preparing environment for [bold]{o.name}[/bold][/yellow]\n"
        )
        solver.replace_channels()
        o.finalize_solve(sorted_outputs, solver)

        o.config._build_id = o0.config.build_id

        if "build" in o.transactions:
            if os.path.isdir(o.config.build_prefix):
                rm_rf(o.config.build_prefix)
            mkdir_p(o.config.build_prefix)
            try:
                o.transactions["build"].execute(
                    PrefixData(o.config.build_prefix),
                    PackageCacheData.first_writable().pkgs_dir,
                )
            except Exception:
                # This currently enables windows-multi-build...
                print("Could not instantiate build environment")

        if "host" in o.transactions:
            mkdir_p(o.config.host_prefix)
            o.transactions["host"].execute(
                PrefixData(o.config.host_prefix),
                PackageCacheData.first_writable().pkgs_dir,
            )

        meta = MetaData(recipe_path, o)
        o.set_final_build_id(meta)

        if cached_source != o.sections["source"]:
            download_source(meta, args.interactive)

        console.print(f"\n[yellow]Starting build for [bold]{o.name}[/bold][/yellow]\n")

        build(meta, None, allow_interactive=args.interactive)

    for o in sorted_outputs:
        print("\n\n")
        console.print(o)
Example #2
0
def main(config=None):
    print(banner)

    parser = argparse.ArgumentParser(
        description="Boa, the fast, mamba powered-build tool for conda packages."
    )
    subparsers = parser.add_subparsers(help="sub-command help", dest="command")
    parent_parser = argparse.ArgumentParser(add_help=False)
    parent_parser.add_argument("recipe_dir", type=str)

    render_parser = subparsers.add_parser(
        "render", parents=[parent_parser], help="render a recipe"
    )
    convert_parser = subparsers.add_parser(
        "convert", parents=[parent_parser], help="convert recipe.yaml to old-style meta.yaml"
    )
    build_parser = subparsers.add_parser(
        "build", parents=[parent_parser], help="build a recipe"
    )
    args = parser.parse_args()

    command = args.command

    if command == 'convert':
        from boa.cli import convert
        convert.main(args.recipe_dir)
        exit()

    folder = args.recipe_dir
    cbc, config = get_config(folder)

    if not os.path.exists(config.output_folder):
        mkdir_p(config.output_folder)
    print(f"Updating build index: {(config.output_folder)}\n")
    update_index(config.output_folder, verbose=config.debug, threads=1)

    recipe_path = os.path.join(folder, "recipe.yaml")

    # step 1: parse YAML
    with open(recipe_path) as fi:
        loader = YAML(typ="safe")
        ydoc = loader.load(fi)

    # step 2: fill out context dict
    context_dict = ydoc.get("context") or {}
    jenv = jinja2.Environment()
    for key, value in context_dict.items():
        if isinstance(value, str):
            tmpl = jenv.from_string(value)
            context_dict[key] = tmpl.render(context_dict)

    if ydoc.get("context"):
        del ydoc["context"]

    # step 3: recursively loop over the entire recipe and render jinja with context
    jenv.globals.update(jinja_functions(config, context_dict))
    for key in ydoc:
        render_recursive(ydoc[key], context_dict, jenv)

    flatten_selectors(ydoc, ns_cfg(config))
    normalize_recipe(ydoc)

    # pprint(ydoc)
    # We need to assemble the variants for each output
    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):
        for o in ydoc["outputs"]:
            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta
            variants[o["package"]["name"]] = get_dependency_variants(
                o.get("requirements", {}), cbc, config
            )
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc.get("requirements", {}), cbc, config
        )

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything
    #
    # FLOW:
    # =====
    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    print('\n')
    if command == "render":
        for o in sorted_outputs:
            print(o)
        exit()


    # TODO this should be done cleaner
    top_name = ydoc['package']['name']
    o0 = sorted_outputs[0]
    o0.is_first = True
    o0.config.compute_build_id(top_name)

    solver = MambaSolver(["conda-forge"], context.subdir)
    print("\n")

    download_source(MetaData(recipe_path, o0))
    cached_source = o0.sections['source']

    for o in sorted_outputs:
        solver.replace_channels()
        o.finalize_solve(sorted_outputs, solver)
        # print(o)

        o.config._build_id = o0.config.build_id

        if "build" in o.transactions:
            if isdir(o.config.build_prefix):
                utils.rm_rf(o.config.build_prefix)
            mkdir_p(o.config.build_prefix)
            o.transactions['build'].execute(PrefixData(o.config.build_prefix), PackageCacheData.first_writable().pkgs_dir)

        if "host" in o.transactions:
            mkdir_p(o.config.host_prefix)
            o.transactions['host'].execute(PrefixData(o.config.host_prefix), PackageCacheData.first_writable().pkgs_dir)

        meta = MetaData(recipe_path, o)
        o.final_build_id = meta.build_id()

        if cached_source != o.sections['source']:
            download_source(meta)

        build(meta, None)

    for o in sorted_outputs:
        print("\n")
        print(o)
Example #3
0
def run_test(
    recipedir_or_package_or_metadata,
    config,
    stats,
    move_broken=True,
    provision_only=False,
    solver=None,
):
    """
    Execute any test scripts for the given package.

    :param m: Package's metadata.
    :type m: Metadata
    """

    # we want to know if we're dealing with package input.  If so, we can move the input on success.
    hash_input = {}

    # store this name to keep it consistent.  By changing files, we change the hash later.
    #    It matches the build hash now, so let's keep it around.
    test_package_name = (
        recipedir_or_package_or_metadata.dist()
        if hasattr(recipedir_or_package_or_metadata, "dist")
        else recipedir_or_package_or_metadata
    )

    if not provision_only:
        print("TEST START:", test_package_name)

    if hasattr(recipedir_or_package_or_metadata, "config"):
        metadata = recipedir_or_package_or_metadata
        utils.rm_rf(metadata.config.test_dir)
    else:
        metadata, hash_input = construct_metadata_for_test(
            recipedir_or_package_or_metadata, config
        )

    trace = "-x " if metadata.config.debug else ""

    # Must download *after* computing build id, or else computing build id will change
    #     folder destination
    _extract_test_files_from_package(metadata)

    # When testing a .tar.bz2 in the pkgs dir, clean_pkg_cache() will remove it.
    # Prevent this. When https://github.com/conda/conda/issues/5708 gets fixed
    # I think we can remove this call to clean_pkg_cache().
    in_pkg_cache = (
        not hasattr(recipedir_or_package_or_metadata, "config")
        and os.path.isfile(recipedir_or_package_or_metadata)
        and recipedir_or_package_or_metadata.endswith(CONDA_PACKAGE_EXTENSIONS)
        and os.path.dirname(recipedir_or_package_or_metadata) in pkgs_dirs[0]
    )
    if not in_pkg_cache:
        environ.clean_pkg_cache(metadata.dist(), metadata.config)

    copy_test_source_files(metadata, metadata.config.test_dir)
    # this is also copying tests/source_files from work_dir to testing workdir

    _, pl_files, py_files, r_files, lua_files, shell_files = create_all_test_files(
        metadata
    )

    if (
        not any([py_files, shell_files, pl_files, lua_files, r_files])
        and not metadata.config.test_run_post
    ):
        print("Nothing to test for:", test_package_name)
        return True

    if metadata.config.remove_work_dir:
        for name, prefix in (
            ("host", metadata.config.host_prefix),
            ("build", metadata.config.build_prefix),
        ):
            if os.path.isdir(prefix):
                # move host folder to force hardcoded paths to host env to break during tests
                #    (so that they can be properly addressed by recipe author)
                dest = os.path.join(
                    os.path.dirname(prefix),
                    "_".join(
                        (
                            "%s_prefix_moved" % name,
                            metadata.dist(),
                            getattr(metadata.config, "%s_subdir" % name),
                        )
                    ),
                )
                # Needs to come after create_files in case there's test/source_files
                shutil_move_more_retrying(prefix, dest, "{} prefix".format(prefix))

        # nested if so that there's no warning when we just leave the empty workdir in place
        if metadata.source_provided:
            dest = os.path.join(
                os.path.dirname(metadata.config.work_dir),
                "_".join(("work_moved", metadata.dist(), metadata.config.host_subdir)),
            )
            # Needs to come after create_files in case there's test/source_files
            shutil_move_more_retrying(config.work_dir, dest, "work")
    else:
        log.warn(
            "Not moving work directory after build.  Your package may depend on files "
            "in the work directory that are not included with your package"
        )

    # looks like a dead function to me
    # get_build_metadata(metadata)

    specs = metadata.get_test_deps(py_files, pl_files, lua_files, r_files)

    with utils.path_prepended(metadata.config.test_prefix):
        env = dict(os.environ.copy())
        env.update(environ.get_dict(m=metadata, prefix=config.test_prefix))
        env["CONDA_BUILD_STATE"] = "TEST"
        env["CONDA_BUILD"] = "1"
        if env_path_backup_var_exists:
            env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"]

    if not metadata.config.activate or metadata.name() == "conda":
        # prepend bin (or Scripts) directory
        env = utils.prepend_bin_path(
            env, metadata.config.test_prefix, prepend_prefix=True
        )

    if utils.on_win:
        env["PATH"] = metadata.config.test_prefix + os.pathsep + env["PATH"]

    env["PREFIX"] = metadata.config.test_prefix
    if "BUILD_PREFIX" in env:
        del env["BUILD_PREFIX"]

    # In the future, we will need to support testing cross compiled
    #     packages on physical hardware. until then it is expected that
    #     something like QEMU or Wine will be used on the build machine,
    #     therefore, for now, we use host_subdir.

    # ensure that the test prefix isn't kept between variants
    utils.rm_rf(metadata.config.test_prefix)

    if solver is None:
        solver, pkg_cache_path = get_solver(metadata.config.host_subdir)
    else:
        pkg_cache_path = PackageCacheData.first_writable().pkgs_dir

    solver.replace_channels()
    transaction = solver.solve(specs, [pkg_cache_path])

    downloaded = transaction.fetch_extract_packages(
        pkg_cache_path, solver.repos + list(solver.local_repos.values()),
    )
    if not downloaded:
        raise RuntimeError("Did not succeed in downloading packages.")

    mkdir_p(metadata.config.test_prefix)
    transaction.execute(
        PrefixData(metadata.config.test_prefix), pkg_cache_path,
    )

    with utils.path_prepended(metadata.config.test_prefix):
        env = dict(os.environ.copy())
        env.update(environ.get_dict(m=metadata, prefix=metadata.config.test_prefix))
        env["CONDA_BUILD_STATE"] = "TEST"
        if env_path_backup_var_exists:
            env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"]

    if config.test_run_post:
        from conda_build.utils import get_installed_packages

        installed = get_installed_packages(metadata.config.test_prefix)
        files = installed[metadata.meta["package"]["name"]]["files"]
        replacements = get_all_replacements(metadata.config)
        try_download(metadata, False, True)
        create_info_files(metadata, replacements, files, metadata.config.test_prefix)
        post_build(metadata, files, None, metadata.config.test_prefix, True)

    # when workdir is removed, the source files are unavailable.  There's the test/source_files
    #    entry that lets people keep these files around.  The files are copied into test_dir for
    #    intuitive relative path behavior, though, not work_dir, so we need to adjust where
    #    SRC_DIR points.  The initial CWD during tests is test_dir.
    if metadata.config.remove_work_dir:
        env["SRC_DIR"] = metadata.config.test_dir

    test_script, _ = write_test_scripts(
        metadata, env, py_files, pl_files, lua_files, r_files, shell_files, trace
    )

    if utils.on_win:
        cmd = [os.environ.get("COMSPEC", "cmd.exe"), "/d", "/c", test_script]
    else:
        cmd = (
            [shell_path]
            + (["-x"] if metadata.config.debug else [])
            + ["-o", "errexit", test_script]
        )
    try:
        test_stats = {}
        if not provision_only:
            # rewrite long paths in stdout back to their env variables
            if metadata.config.debug or metadata.config.no_rewrite_stdout_env:
                rewrite_env = None
            else:
                rewrite_env = {k: env[k] for k in ["PREFIX", "SRC_DIR"] if k in env}
                if metadata.config.verbose:
                    for k, v in rewrite_env.items():
                        print(
                            "{0} {1}={2}".format(
                                "set" if test_script.endswith(".bat") else "export",
                                k,
                                v,
                            )
                        )
            utils.check_call_env(
                cmd,
                env=env,
                cwd=metadata.config.test_dir,
                stats=test_stats,
                rewrite_stdout_env=rewrite_env,
            )
            log_stats(test_stats, "testing {}".format(metadata.name()))
            # TODO need to implement metadata.get_used_loop_vars
            # if stats is not None and metadata.config.variants:
            #     stats[
            #         stats_key(metadata, "test_{}".format(metadata.name()))
            #     ] = test_stats
            if os.path.exists(join(metadata.config.test_dir, "TEST_FAILED")):
                raise subprocess.CalledProcessError(-1, "")
            print("TEST END:", test_package_name)

    except subprocess.CalledProcessError as _:  # noqa
        tests_failed(
            metadata,
            move_broken=move_broken,
            broken_dir=metadata.config.broken_dir,
            config=metadata.config,
        )
        raise

    if config.need_cleanup and config.recipe_dir is not None and not provision_only:
        utils.rm_rf(config.recipe_dir)

    return True
Example #4
0
def build_recipe(
    command,
    recipe_path,
    cbc,
    config,
    selected_features,
    notest: bool = False,
    skip_existing: bool = False,
    interactive: bool = False,
    skip_fast: bool = False,
):

    ydoc = render(recipe_path, config=config)
    # We need to assemble the variants for each output
    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):
        for o in ydoc["outputs"]:
            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta

            o["selected_features"] = selected_features

            variants[o["package"]["name"]] = get_dependency_variants(
                o.get("requirements", {}), cbc, config
            )
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc.get("requirements", {}), cbc, config
        )

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config, cbc, selected_features)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything
    #
    # FLOW:
    # =====
    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    if command == "render":
        if boa_config.json:
            jlist = [o.to_json() for o in sorted_outputs]
            print(json.dumps(jlist, indent=4))
        else:
            for o in sorted_outputs:
                console.print(o)
        return sorted_outputs

    # TODO this should be done cleaner
    top_name = ydoc["package"]["name"]
    o0 = sorted_outputs[0]
    o0.is_first = True
    o0.config.compute_build_id(top_name)

    console.print("\n[yellow]Initializing mamba solver[/yellow]\n")

    if all([o.skip() for o in sorted_outputs]):
        console.print("All outputs skipped.\n")
        return

    full_render = command == "full-render"

    if skip_fast:
        build_pkgs = []

        archs = [o0.variant["target_platform"], "noarch"]
        for arch in archs:
            build_pkgs += [
                os.path.basename(x.rsplit("-", 1)[0])
                for x in glob.glob(
                    os.path.join(o0.config.output_folder, arch, "*.tar.bz2",)
                )
            ]

        del_idx = []
        for i in range(len(sorted_outputs)):
            if f"{sorted_outputs[i].name}-{sorted_outputs[i].version}" in build_pkgs:
                del_idx.append(i)

        for idx in del_idx[::-1]:
            console.print(
                f"[green]Fast skip of {sorted_outputs[idx].name}-{sorted_outputs[idx].version}"
            )
            del sorted_outputs[idx]

    # Do not download source if we might skip
    if not (skip_existing or full_render):
        console.print("\n[yellow]Downloading source[/yellow]\n")
        download_source(MetaData(recipe_path, o0), interactive)
        cached_source = o0.sections["source"]
    else:
        cached_source = {}

    for o in sorted_outputs:
        console.print(
            f"\n[yellow]Preparing environment for [bold]{o.name}[/bold][/yellow]\n"
        )
        refresh_solvers()
        o.finalize_solve(sorted_outputs)

        o.config._build_id = o0.config.build_id

        meta = MetaData(recipe_path, o)
        o.set_final_build_id(meta)

        if o.skip() or full_render:
            continue

        final_name = meta.dist()

        # TODO this doesn't work for noarch!
        if skip_existing:
            final_name = meta.dist()

            if os.path.exists(
                os.path.join(
                    o.config.output_folder,
                    o.variant["target_platform"],
                    final_name + ".tar.bz2",
                )
            ):
                console.print(f"\n[green]Skipping existing {final_name}\n")
                continue

        if "build" in o.transactions:
            if os.path.isdir(o.config.build_prefix):
                rm_rf(o.config.build_prefix)
            mkdir_p(o.config.build_prefix)
            try:
                o.transactions["build"]["transaction"].execute(
                    PrefixData(o.config.build_prefix),
                )
            except Exception:
                # This currently enables windows-multi-build...
                print("Could not instantiate build environment")

        if "host" in o.transactions:
            mkdir_p(o.config.host_prefix)
            o.transactions["host"]["transaction"].execute(
                PrefixData(o.config.host_prefix)
            )

        if cached_source != o.sections["source"]:
            download_source(meta, interactive)
            cached_source = o.sections["source"]

        console.print(f"\n[yellow]Starting build for [bold]{o.name}[/bold][/yellow]\n")

        final_outputs = build(meta, None, allow_interactive=interactive)

        stats = {}
        if final_outputs is not None:
            for final_out in final_outputs:
                if not notest:
                    run_test(
                        final_out,
                        o.config,
                        stats,
                        move_broken=False,
                        provision_only=False,
                    )
        # print(stats)

    for o in sorted_outputs:
        print("\n\n")
        console.print(o)
    return sorted_outputs
Example #5
0
def build_recipe(args, recipe_path, cbc, config):

    if args.features:
        assert args.features.startswith("[") and args.features.endswith("]")
        features = [f.strip() for f in args.features[1:-1].split(",")]
    else:
        features = []

    selected_features = {}
    for f in features:
        if f.startswith("~"):
            selected_features[f[1:]] = False
        else:
            selected_features[f] = True

    ydoc = render(recipe_path, config=config)
    # We need to assemble the variants for each output
    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):
        for o in ydoc["outputs"]:
            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta

            o["selected_features"] = selected_features

            variants[o["package"]["name"]] = get_dependency_variants(
                o.get("requirements", {}), cbc, config, features
            )
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc.get("requirements", {}), cbc, config, features
        )

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config, cbc, selected_features)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything
    #
    # FLOW:
    # =====
    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    if args.command == "render":
        for o in sorted_outputs:
            console.print(o)

    # TODO this should be done cleaner
    top_name = ydoc["package"]["name"]
    o0 = sorted_outputs[0]
    o0.is_first = True
    o0.config.compute_build_id(top_name)

    console.print("\n[yellow]Initializing mamba solver[/yellow]\n")

    # Do not download source if we might skip
    if not args.skip_existing:
        console.print("\n[yellow]Downloading source[/yellow]\n")
        download_source(MetaData(recipe_path, o0), args.interactive)
        cached_source = o0.sections["source"]
    else:
        cached_source = {}
    for o in sorted_outputs:
        console.print(
            f"\n[yellow]Preparing environment for [bold]{o.name}[/bold][/yellow]\n"
        )
        refresh_solvers()
        o.finalize_solve(sorted_outputs)

        o.config._build_id = o0.config.build_id

        meta = MetaData(recipe_path, o)
        o.set_final_build_id(meta)

        if args.skip_existing:
            final_name = meta.dist()
            if os.path.exists(
                os.path.join(
                    o.config.output_folder,
                    o.variant["target_platform"],
                    final_name + ".tar.bz2",
                )
            ):
                console.print(f"\n[green]Skipping existing {final_name}\n")
                continue

        if "build" in o.transactions:
            if os.path.isdir(o.config.build_prefix):
                rm_rf(o.config.build_prefix)
            mkdir_p(o.config.build_prefix)
            try:
                o.transactions["build"]["transaction"].execute(
                    PrefixData(o.config.build_prefix),
                    o.transactions["build"]["pkg_cache"],
                )
            except Exception:
                # This currently enables windows-multi-build...
                print("Could not instantiate build environment")

        if "host" in o.transactions:
            mkdir_p(o.config.host_prefix)
            o.transactions["host"]["transaction"].execute(
                PrefixData(o.config.host_prefix), o.transactions["host"]["pkg_cache"],
            )

        if cached_source != o.sections["source"]:
            download_source(meta, args.interactive)
            cached_source = o.sections["source"]

        console.print(f"\n[yellow]Starting build for [bold]{o.name}[/bold][/yellow]\n")

        final_outputs = build(meta, None, allow_interactive=args.interactive)

        stats = {}
        if final_outputs is not None:
            for final_out in final_outputs:
                run_test(
                    final_out, o.config, stats, move_broken=False, provision_only=False,
                )
        # print(stats)

    for o in sorted_outputs:
        print("\n\n")
        console.print(o)
Example #6
0
def main(config=None):
    print(banner)

    parser = argparse.ArgumentParser(
        description='Boa, the fast build tool for conda packages.')
    subparsers = parser.add_subparsers(help='sub-command help', dest='command')
    parent_parser = argparse.ArgumentParser(add_help=False)
    parent_parser.add_argument('recipe_dir', type=str)

    render_parser = subparsers.add_parser('render',
                                          parents=[parent_parser],
                                          help='render a recipe')
    build_parser = subparsers.add_parser('build',
                                         parents=[parent_parser],
                                         help='build a recipe')
    args = parser.parse_args()

    command = args.command

    folder = args.recipe_dir
    config = get_or_merge_config(None, {})
    config_files = find_config_files(folder)
    parsed_cfg = collections.OrderedDict()
    for f in config_files:
        parsed_cfg[f] = parse_config_file(f, config)
        normalized = {}
        for k in parsed_cfg[f].keys():
            if "_" in k:
                n = k.replace("_", "-")
                normalized[n] = parsed_cfg[f][k]
        parsed_cfg[f].update(normalized)

    # TODO just using latest config here, should merge!
    if len(config_files):
        cbc = parsed_cfg[config_files[-1]]
    else:
        cbc = {}

    update_index(os.path.dirname(config.output_folder),
                 verbose=config.debug,
                 threads=1)

    recipe_path = os.path.join(folder, "recipe.yaml")

    # step 1: parse YAML
    with open(recipe_path) as fi:
        loader = YAML(typ="safe")
        ydoc = loader.load(fi)

    # step 2: fill out context dict
    context_dict = ydoc.get("context") or {}
    jenv = jinja2.Environment()
    for key, value in context_dict.items():
        if isinstance(value, str):
            tmpl = jenv.from_string(value)
            context_dict[key] = tmpl.render(context_dict)

    if ydoc.get("context"):
        del ydoc["context"]

    # step 3: recursively loop over the entire recipe and render jinja with context
    jenv.globals.update(jinja_functions(config, context_dict))
    for key in ydoc:
        render_recursive(ydoc[key], context_dict, jenv)

    flatten_selectors(ydoc, ns_cfg(config))

    # We need to assemble the variants for each output

    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):

        # if ydoc.get("build"):
        #     raise InvalidRecipeError("You can either declare outputs, or build?")
        for o in ydoc["outputs"]:

            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta
            variants[o["package"]["name"]] = get_dependency_variants(
                o["requirements"], cbc, config)
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc["requirements"], cbc, config)

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything

    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    if command == 'render':
        for o in sorted_outputs:
            print(o)
        exit()

    solver = MambaSolver(["conda-forge"], "linux-64")
    for o in sorted_outputs:
        solver.replace_channels()
        o.finalize_solve(sorted_outputs, solver)
        print(o)

        o.config.compute_build_id(o.name)

        print(o.config.host_prefix)

        if 'build' in o.transactions:
            mkdir_p(o.config.build_prefix)
            print(o.transactions)
            o.transactions['build'].execute(
                PrefixData(o.config.build_prefix),
                PackageCacheData.first_writable().pkgs_dir)
        if 'host' in o.transactions:
            mkdir_p(o.config.host_prefix)
            print(o.transactions)
            o.transactions['host'].execute(
                PrefixData(o.config.host_prefix),
                PackageCacheData.first_writable().pkgs_dir)
        print(o.sections)
        stats = {}

        print("Final variant config")
        print(config.variant)
        print(o.variant)
        build(MetaData(recipe_path, o), None)

    # sorted_outputs
    # print(sorted_outputs[0].config.host_prefix)
    exit()

    for o in sorted_outputs:
        print("\n")
        print(o)