Ejemplo n.º 1
0
def build_recipe(args, recipe_path, cbc, config):

    if args.features:
        assert args.features.startswith("[") and args.features.endswith("]")
        features = [f.strip() for f in args.features[1:-1].split(",")]
    else:
        features = []

    selected_features = {}
    for f in features:
        if f.startswith("~"):
            selected_features[f[1:]] = False
        else:
            selected_features[f] = True

    ydoc = render(recipe_path, config=config)
    # We need to assemble the variants for each output
    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):
        for o in ydoc["outputs"]:
            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta

            o["selected_features"] = selected_features

            variants[o["package"]["name"]] = get_dependency_variants(
                o.get("requirements", {}), cbc, config, features
            )
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc.get("requirements", {}), cbc, config, features
        )

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config, selected_features)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything
    #
    # FLOW:
    # =====
    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    if args.command == "render":
        for o in sorted_outputs:
            console.print(o)
        exit()

    # TODO this should be done cleaner
    top_name = ydoc["package"]["name"]
    o0 = sorted_outputs[0]
    o0.is_first = True
    o0.config.compute_build_id(top_name)

    console.print("\n[yellow]Initializing mamba solver[/yellow]\n")
    solver = MambaSolver([], context.subdir)

    console.print("\n[yellow]Downloading source[/yellow]\n")
    download_source(MetaData(recipe_path, o0), args.interactive)
    cached_source = o0.sections["source"]

    for o in sorted_outputs:
        console.print(
            f"\n[yellow]Preparing environment for [bold]{o.name}[/bold][/yellow]\n"
        )
        solver.replace_channels()
        o.finalize_solve(sorted_outputs, solver)

        o.config._build_id = o0.config.build_id

        if "build" in o.transactions:
            if os.path.isdir(o.config.build_prefix):
                rm_rf(o.config.build_prefix)
            mkdir_p(o.config.build_prefix)
            try:
                o.transactions["build"].execute(
                    PrefixData(o.config.build_prefix),
                    PackageCacheData.first_writable().pkgs_dir,
                )
            except Exception:
                # This currently enables windows-multi-build...
                print("Could not instantiate build environment")

        if "host" in o.transactions:
            mkdir_p(o.config.host_prefix)
            o.transactions["host"].execute(
                PrefixData(o.config.host_prefix),
                PackageCacheData.first_writable().pkgs_dir,
            )

        meta = MetaData(recipe_path, o)
        o.set_final_build_id(meta)

        if cached_source != o.sections["source"]:
            download_source(meta, args.interactive)

        console.print(f"\n[yellow]Starting build for [bold]{o.name}[/bold][/yellow]\n")

        build(meta, None, allow_interactive=args.interactive)

    for o in sorted_outputs:
        print("\n\n")
        console.print(o)
Ejemplo n.º 2
0
def get_metadata(yml, config):
    with open(yml, "r") as fi:
        d = ruamel.yaml.safe_load(fi)
    o = Output(d, config)
    return MetaData(os.path.dirname(yml), o)
Ejemplo n.º 3
0
def build_recipe(
    command,
    recipe_path,
    cbc,
    config,
    selected_features,
    notest: bool = False,
    skip_existing: bool = False,
    interactive: bool = False,
    skip_fast: bool = False,
):

    ydoc = render(recipe_path, config=config)
    # We need to assemble the variants for each output
    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):
        for o in ydoc["outputs"]:
            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta

            o["selected_features"] = selected_features

            variants[o["package"]["name"]] = get_dependency_variants(
                o.get("requirements", {}), cbc, config
            )
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc.get("requirements", {}), cbc, config
        )

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config, cbc, selected_features)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything
    #
    # FLOW:
    # =====
    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    if command == "render":
        if boa_config.json:
            jlist = [o.to_json() for o in sorted_outputs]
            print(json.dumps(jlist, indent=4))
        else:
            for o in sorted_outputs:
                console.print(o)
        return sorted_outputs

    # TODO this should be done cleaner
    top_name = ydoc["package"]["name"]
    o0 = sorted_outputs[0]
    o0.is_first = True
    o0.config.compute_build_id(top_name)

    console.print("\n[yellow]Initializing mamba solver[/yellow]\n")

    if all([o.skip() for o in sorted_outputs]):
        console.print("All outputs skipped.\n")
        return

    full_render = command == "full-render"

    if skip_fast:
        build_pkgs = []

        archs = [o0.variant["target_platform"], "noarch"]
        for arch in archs:
            build_pkgs += [
                os.path.basename(x.rsplit("-", 1)[0])
                for x in glob.glob(
                    os.path.join(o0.config.output_folder, arch, "*.tar.bz2",)
                )
            ]

        del_idx = []
        for i in range(len(sorted_outputs)):
            if f"{sorted_outputs[i].name}-{sorted_outputs[i].version}" in build_pkgs:
                del_idx.append(i)

        for idx in del_idx[::-1]:
            console.print(
                f"[green]Fast skip of {sorted_outputs[idx].name}-{sorted_outputs[idx].version}"
            )
            del sorted_outputs[idx]

    # Do not download source if we might skip
    if not (skip_existing or full_render):
        console.print("\n[yellow]Downloading source[/yellow]\n")
        download_source(MetaData(recipe_path, o0), interactive)
        cached_source = o0.sections["source"]
    else:
        cached_source = {}

    for o in sorted_outputs:
        console.print(
            f"\n[yellow]Preparing environment for [bold]{o.name}[/bold][/yellow]\n"
        )
        refresh_solvers()
        o.finalize_solve(sorted_outputs)

        o.config._build_id = o0.config.build_id

        meta = MetaData(recipe_path, o)
        o.set_final_build_id(meta)

        if o.skip() or full_render:
            continue

        final_name = meta.dist()

        # TODO this doesn't work for noarch!
        if skip_existing:
            final_name = meta.dist()

            if os.path.exists(
                os.path.join(
                    o.config.output_folder,
                    o.variant["target_platform"],
                    final_name + ".tar.bz2",
                )
            ):
                console.print(f"\n[green]Skipping existing {final_name}\n")
                continue

        if "build" in o.transactions:
            if os.path.isdir(o.config.build_prefix):
                rm_rf(o.config.build_prefix)
            mkdir_p(o.config.build_prefix)
            try:
                o.transactions["build"]["transaction"].execute(
                    PrefixData(o.config.build_prefix),
                )
            except Exception:
                # This currently enables windows-multi-build...
                print("Could not instantiate build environment")

        if "host" in o.transactions:
            mkdir_p(o.config.host_prefix)
            o.transactions["host"]["transaction"].execute(
                PrefixData(o.config.host_prefix)
            )

        if cached_source != o.sections["source"]:
            download_source(meta, interactive)
            cached_source = o.sections["source"]

        console.print(f"\n[yellow]Starting build for [bold]{o.name}[/bold][/yellow]\n")

        final_outputs = build(meta, None, allow_interactive=interactive)

        stats = {}
        if final_outputs is not None:
            for final_out in final_outputs:
                if not notest:
                    run_test(
                        final_out,
                        o.config,
                        stats,
                        move_broken=False,
                        provision_only=False,
                    )
        # print(stats)

    for o in sorted_outputs:
        print("\n\n")
        console.print(o)
    return sorted_outputs
Ejemplo n.º 4
0
def _construct_metadata_for_test_from_package(package, config):
    recipe_dir, need_cleanup = utils.get_recipe_abspath(package)
    config.need_cleanup = need_cleanup
    config.recipe_dir = recipe_dir
    hash_input = {}

    info_dir = os.path.normpath(os.path.join(recipe_dir, "info"))
    with open(os.path.join(info_dir, "index.json")) as f:
        package_data = json.load(f)

    if package_data["subdir"] != "noarch":
        config.host_subdir = package_data["subdir"]
    # We may be testing an (old) package built without filename hashing.
    hash_input = os.path.join(info_dir, "hash_input.json")
    if os.path.isfile(hash_input):
        with open(os.path.join(info_dir, "hash_input.json")) as f:
            hash_input = json.load(f)
    else:
        config.filename_hashing = False
        hash_input = {}
    # not actually used as a variant, since metadata will have been finalized.
    #    This is still necessary for computing the hash correctly though
    config.variant = hash_input
    log = utils.get_logger(__name__)

    # get absolute file location
    local_pkg_location = os.path.normpath(os.path.abspath(os.path.dirname(package)))

    # get last part of the path
    last_element = os.path.basename(local_pkg_location)
    is_channel = False
    for platform in ("win-", "linux-", "osx-", "noarch"):
        if last_element.startswith(platform):
            is_channel = True

    if not is_channel:
        log.warn(
            "Copying package to conda-build croot.  No packages otherwise alongside yours will"
            " be available unless you specify -c local.  To avoid this warning, your package "
            "must reside in a channel structure with platform-subfolders.  See more info on "
            "what a valid channel is at "
            "https://conda.io/docs/user-guide/tasks/create-custom-channels.html"
        )

        local_dir = os.path.join(config.croot, config.host_subdir)
        mkdir_p(local_dir)
        local_pkg_location = os.path.join(local_dir, os.path.basename(package))
        utils.copy_into(package, local_pkg_location)
        local_pkg_location = local_dir

    local_channel = os.path.dirname(local_pkg_location)

    # update indices in the channel
    update_index(local_channel, verbose=config.debug, threads=1)

    try:
        # raise IOError()
        # metadata = render_recipe(
        #     os.path.join(info_dir, "recipe"), config=config, reset_build_id=False
        # )[0][0]

        metadata = get_metadata(os.path.join(info_dir, "recipe", "recipe.yaml"), config)
        # with open(os.path.join(info_dir, "recipe", "recipe.yaml")) as fi:
        # metadata = yaml.load(fi)
    # no recipe in package.  Fudge metadata
    except SystemExit:
        # force the build string to line up - recomputing it would
        #    yield a different result
        metadata = MetaData.fromdict(
            {
                "package": {
                    "name": package_data["name"],
                    "version": package_data["version"],
                },
                "build": {
                    "number": int(package_data["build_number"]),
                    "string": package_data["build"],
                },
                "requirements": {"run": package_data["depends"]},
            },
            config=config,
        )
    # HACK: because the recipe is fully baked, detecting "used" variables no longer works.  The set
    #     of variables in the hash_input suffices, though.

    if metadata.noarch:
        metadata.config.variant["target_platform"] = "noarch"

    metadata.config.used_vars = list(hash_input.keys())
    urls = list(utils.ensure_list(metadata.config.channel_urls))
    local_path = url_path(local_channel)
    # replace local with the appropriate real channel.  Order is maintained.
    urls = [url if url != "local" else local_path for url in urls]
    if local_path not in urls:
        urls.insert(0, local_path)
    metadata.config.channel_urls = urls
    utils.rm_rf(metadata.config.test_dir)
    return metadata, hash_input
Ejemplo n.º 5
0
def build_recipe(args, recipe_path, cbc, config):

    if args.features:
        assert args.features.startswith("[") and args.features.endswith("]")
        features = [f.strip() for f in args.features[1:-1].split(",")]
    else:
        features = []

    selected_features = {}
    for f in features:
        if f.startswith("~"):
            selected_features[f[1:]] = False
        else:
            selected_features[f] = True

    ydoc = render(recipe_path, config=config)
    # We need to assemble the variants for each output
    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):
        for o in ydoc["outputs"]:
            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta

            o["selected_features"] = selected_features

            variants[o["package"]["name"]] = get_dependency_variants(
                o.get("requirements", {}), cbc, config, features
            )
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc.get("requirements", {}), cbc, config, features
        )

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config, cbc, selected_features)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything
    #
    # FLOW:
    # =====
    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    if args.command == "render":
        for o in sorted_outputs:
            console.print(o)

    # TODO this should be done cleaner
    top_name = ydoc["package"]["name"]
    o0 = sorted_outputs[0]
    o0.is_first = True
    o0.config.compute_build_id(top_name)

    console.print("\n[yellow]Initializing mamba solver[/yellow]\n")

    # Do not download source if we might skip
    if not args.skip_existing:
        console.print("\n[yellow]Downloading source[/yellow]\n")
        download_source(MetaData(recipe_path, o0), args.interactive)
        cached_source = o0.sections["source"]
    else:
        cached_source = {}
    for o in sorted_outputs:
        console.print(
            f"\n[yellow]Preparing environment for [bold]{o.name}[/bold][/yellow]\n"
        )
        refresh_solvers()
        o.finalize_solve(sorted_outputs)

        o.config._build_id = o0.config.build_id

        meta = MetaData(recipe_path, o)
        o.set_final_build_id(meta)

        if args.skip_existing:
            final_name = meta.dist()
            if os.path.exists(
                os.path.join(
                    o.config.output_folder,
                    o.variant["target_platform"],
                    final_name + ".tar.bz2",
                )
            ):
                console.print(f"\n[green]Skipping existing {final_name}\n")
                continue

        if "build" in o.transactions:
            if os.path.isdir(o.config.build_prefix):
                rm_rf(o.config.build_prefix)
            mkdir_p(o.config.build_prefix)
            try:
                o.transactions["build"]["transaction"].execute(
                    PrefixData(o.config.build_prefix),
                    o.transactions["build"]["pkg_cache"],
                )
            except Exception:
                # This currently enables windows-multi-build...
                print("Could not instantiate build environment")

        if "host" in o.transactions:
            mkdir_p(o.config.host_prefix)
            o.transactions["host"]["transaction"].execute(
                PrefixData(o.config.host_prefix), o.transactions["host"]["pkg_cache"],
            )

        if cached_source != o.sections["source"]:
            download_source(meta, args.interactive)
            cached_source = o.sections["source"]

        console.print(f"\n[yellow]Starting build for [bold]{o.name}[/bold][/yellow]\n")

        final_outputs = build(meta, None, allow_interactive=args.interactive)

        stats = {}
        if final_outputs is not None:
            for final_out in final_outputs:
                run_test(
                    final_out, o.config, stats, move_broken=False, provision_only=False,
                )
        # print(stats)

    for o in sorted_outputs:
        print("\n\n")
        console.print(o)