Esempio n. 1
0
File: utils.py Progetto: cav71/boa
def get_config(folder, variant=None, additional_files=None):
    if not additional_files:
        additional_files = []
    if not variant:
        variant = {}
    config = get_or_merge_config(None, variant)

    if cb_split_version >= (3, 20, 5):
        config_files = find_config_files(folder, config)
    else:
        config_files = find_config_files(folder)

    all_files = [os.path.abspath(p) for p in config_files + additional_files]

    # reverse files an uniquify
    def make_unique_list(lx):
        seen = set()
        return [x for x in lx if not (x in seen or seen.add(x))]

    # we reverse the order so that command line can overwrite the hierarchy
    all_files = make_unique_list(all_files[::-1])[::-1]

    console.print(f"\nLoading config files: [green]{', '.join(all_files)}\n")
    parsed_cfg = collections.OrderedDict()

    for f in all_files:
        parsed_cfg[f] = parse_config_file(f, config)

    # this merges each of the specs, providing a debug message when a given setting is overridden
    #      by a later spec
    combined_spec = combine_specs(parsed_cfg, log_output=config.verbose)
    # console.print(combined_spec)

    return combined_spec, config
Esempio n. 2
0
def get_config(folder):
    config = get_or_merge_config(None, {})
    config_files = find_config_files(folder)
    parsed_cfg = collections.OrderedDict()
    for f in config_files:
        parsed_cfg[f] = parse_config_file(f, config)
        normalized = {}
        for k in parsed_cfg[f].keys():
            if "_" in k:
                n = k.replace("_", "-")
                normalized[n] = parsed_cfg[f][k]
        parsed_cfg[f].update(normalized)

    # TODO just using latest config here, should merge!
    if len(config_files):
        cbc = parsed_cfg[config_files[-1]]
    else:
        cbc = {}

    return cbc, config
Esempio n. 3
0
def get_config(folder, variant=None):
    if not variant:
        variant = {}
    config = get_or_merge_config(None, variant)

    if cb_split_version >= (3, 20, 5):
        config_files = find_config_files(folder, config)
    else:
        config_files = find_config_files(folder)
    console.print(f"\nLoading config files: [green]{', '.join(config_files)}\n")
    parsed_cfg = collections.OrderedDict()

    for f in config_files:
        parsed_cfg[f] = parse_config_file(f, config)

    # TODO just using latest config here, should merge!
    if len(config_files):
        cbc = parsed_cfg[config_files[-1]]
    else:
        cbc = {}

    return cbc, config
Esempio n. 4
0
def create_migration_yaml_creator(migrators: MutableSequence[Migrator], gx: nx.DiGraph):
    with indir(os.environ["CONDA_PREFIX"]):
        pinnings = parse_config_file(
            "conda_build_config.yaml", config=Config(**CB_CONFIG),
        )
    feedstocks_to_be_repinned = []
    for k, package_pin_list in pinnings.items():
        # we need the package names for the migrator itself but need the
        # feedstock for everything else
        package_name = k
        # exclude non-package keys
        if k not in gx.nodes and k not in gx.graph["outputs_lut"]:
            # conda_build_config.yaml can't have `-` unlike our package names
            k = k.replace("_", "-")
        # replace sub-packages with their feedstock names
        k = gx.graph["outputs_lut"].get(k, k)

        if (
            (k in gx.nodes)
            and not gx.nodes[k]["payload"].get("archived", False)
            and gx.nodes[k]["payload"].get("version")
            and k not in feedstocks_to_be_repinned
        ):

            current_pins = list(map(str, package_pin_list))
            current_version = str(gx.nodes[k]["payload"]["version"])

            # we need a special parsing for pinning stuff
            meta_yaml = parse_meta_yaml(
                gx.nodes[k]["payload"]["raw_meta_yaml"], for_pinning=True,
            )

            # find the most stringent max pin for this feedstock if any
            pin_spec = ""
            for block in [meta_yaml] + meta_yaml.get("outputs", []) or []:
                build = block.get("build", {}) or {}
                # and check the exported package is within the feedstock
                exports = [
                    p.get("max_pin", "")
                    for p in build.get("run_exports", [{}])
                    # make certain not direct hard pin
                    if isinstance(p, MutableMapping)
                    # if the pinned package is in an output of the parent feedstock
                    and (
                        gx.graph["outputs_lut"].get(p.get("package_name", ""), "") == k
                        # if the pinned package is the feedstock itself
                        or p.get("package_name", "") == k
                    )
                ]
                if not exports:
                    continue
                # get the most stringent pin spec from the recipe block
                max_pin = max(exports, key=len)
                if len(max_pin) > len(pin_spec):
                    pin_spec = max_pin

            # fall back to the pinning file or "x"
            if not pin_spec:
                pin_spec = (
                    pinnings["pin_run_as_build"].get(k, {}).get("max_pin", "x") or "x"
                )

            current_pins = list(
                map(lambda x: re.sub("[^0-9.]", "", x).rstrip("."), current_pins),
            )
            current_version = re.sub("[^0-9.]", "", current_version).rstrip(".")
            if current_pins == [""]:
                continue

            current_pin = str(max(map(VersionOrder, current_pins)))
            # If the current pin and the current version is the same nothing
            # to do even if the pin isn't accurate to the spec
            if current_pin != current_version and _outside_pin_range(
                pin_spec, current_pin, current_version,
            ):
                feedstocks_to_be_repinned.append(k)
                print(package_name, current_version, current_pin, pin_spec)
                migrators.append(
                    MigrationYamlCreator(
                        package_name, current_version, current_pin, pin_spec, k, gx,
                    ),
                )
Esempio n. 5
0
def create_migration_yaml_creator(migrators: MutableSequence[Migrator],
                                  gx: nx.DiGraph):
    cfp_gx = copy.deepcopy(gx)
    for node in list(cfp_gx.nodes):
        if node != "conda-forge-pinning":
            pluck(cfp_gx, node)

    print("pinning migrations", flush=True)
    with indir(os.environ["CONDA_PREFIX"]):
        pinnings = parse_config_file(
            "conda_build_config.yaml",
            config=Config(**CB_CONFIG),
        )
    feedstocks_to_be_repinned = []
    for pinning_name, package_pin_list in pinnings.items():
        # there are three things:
        # pinning_name - entry in pinning file
        # package_name - the actual package, could differ via `-` -> `_`
        #                from pinning_name
        # feedstock_name - the feedstock that outputs the package
        # we need the package names for the migrator itself but need the
        # feedstock for everything else

        # exclude non-package keys
        if pinning_name not in gx.graph["outputs_lut"]:
            # conda_build_config.yaml can't have `-` unlike our package names
            package_name = pinning_name.replace("_", "-")
        else:
            package_name = pinning_name

        # replace sub-packages with their feedstock names
        # TODO - we are grabbing one element almost at random here
        # the sorted call makes it stable at least?
        fs_name = next(
            iter(
                sorted(gx.graph["outputs_lut"].get(package_name,
                                                   {package_name})), ), )

        if ((fs_name in gx.nodes)
                and not gx.nodes[fs_name]["payload"].get("archived", False)
                and gx.nodes[fs_name]["payload"].get("version")
                and fs_name not in feedstocks_to_be_repinned):

            current_pins = list(map(str, package_pin_list))
            current_version = str(gx.nodes[fs_name]["payload"]["version"])

            # we need a special parsing for pinning stuff
            meta_yaml = parse_meta_yaml(
                gx.nodes[fs_name]["payload"]["raw_meta_yaml"],
                for_pinning=True,
            )

            # find the most stringent max pin for this feedstock if any
            pin_spec = ""
            for block in [meta_yaml] + meta_yaml.get("outputs", []) or []:
                build = block.get("build", {}) or {}
                # and check the exported package is within the feedstock
                exports = [
                    p.get("max_pin", "")
                    for p in build.get("run_exports", [{}])
                    # make certain not direct hard pin
                    if isinstance(p, MutableMapping)
                    # ensure the export is for this package
                    and p.get("package_name", "") == package_name
                    # ensure the pinned package is in an output of the parent feedstock
                    and (fs_name in gx.graph["outputs_lut"].get(
                        p.get("package_name", ""), set()))
                ]
                if not exports:
                    continue
                # get the most stringent pin spec from the recipe block
                max_pin = max(exports, key=len)
                if len(max_pin) > len(pin_spec):
                    pin_spec = max_pin

            # fall back to the pinning file or "x"
            if not pin_spec:
                pin_spec = (pinnings["pin_run_as_build"].get(
                    pinning_name, {}).get("max_pin", "x")) or "x"

            current_pins = list(
                map(lambda x: re.sub("[^0-9.]", "", x).rstrip("."),
                    current_pins), )
            current_pins = [
                cp.strip() for cp in current_pins if cp.strip() != ""
            ]
            current_version = re.sub("[^0-9.]", "",
                                     current_version).rstrip(".")
            if not current_pins or current_version == "":
                continue

            current_pin = str(max(map(VersionOrder, current_pins)))
            # If the current pin and the current version is the same nothing
            # to do even if the pin isn't accurate to the spec
            if current_pin != current_version and _outside_pin_range(
                    pin_spec,
                    current_pin,
                    current_version,
            ):
                feedstocks_to_be_repinned.append(fs_name)
                print(
                    "    %s:\n"
                    "        curr version: %s\n"
                    "        curr pin: %s\n"
                    "        pin_spec: %s" %
                    (pinning_name, current_version, current_pin, pin_spec),
                    flush=True,
                )
                migrators.append(
                    MigrationYamlCreator(
                        pinning_name,
                        current_version,
                        current_pin,
                        pin_spec,
                        fs_name,
                        cfp_gx,
                        full_graph=gx,
                    ), )
    print(" ", flush=True)
Esempio n. 6
0
def main(config=None):

    folder = sys.argv[1]
    config = get_or_merge_config(None, {})
    config_files = find_config_files(folder)
    parsed_cfg = collections.OrderedDict()
    for f in config_files:
        parsed_cfg[f] = parse_config_file(f, config)
        print(parsed_cfg[f])
        normalized = {}
        for k in parsed_cfg[f].keys():
            if '_' in k:
                n = k.replace('_', '-')
                normalized[n] = parsed_cfg[f][k]
        parsed_cfg[f].update(normalized)
        print(parsed_cfg[f].keys())
    # TODO just using latest config here, should merge!
    cbc = parsed_cfg[config_files[-1]]

    recipe_path = os.path.join(folder, "recipe.yaml")

    # step 1: parse YAML
    with open(recipe_path) as fi:
        loader = YAML(typ='safe')
        ydoc = loader.load(fi)
    print(ydoc)

    # step 2: fill out context dict
    context_dict = ydoc.get("context") or {}
    jenv = jinja2.Environment()
    for key, value in context_dict.items():
        if isinstance(value, str):
            tmpl = jenv.from_string(value)
            context_dict[key] = tmpl.render(context_dict)

    if ydoc.get("context"):
        del ydoc["context"]

    # step 3: recursively loop over the entire recipe and render jinja with context
    jenv.globals.update(jinja_functions(config, context_dict))
    for key in ydoc:
        render_recursive(ydoc[key], context_dict, jenv)

    flatten_selectors(ydoc, ns_cfg(config))

    # We need to assemble the variants for each output

    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):
        # if ydoc.get("build"):
        #     raise InvalidRecipeError("You can either declare outputs, or build?")
        for o in ydoc["outputs"]:
            variants[o["package"]["name"]] = get_dependency_variants(
                o["requirements"], cbc, config)
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc["requirements"], cbc, config)

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config)
Esempio n. 7
0
def main(config=None):
    print(banner)

    parser = argparse.ArgumentParser(
        description='Boa, the fast build tool for conda packages.')
    subparsers = parser.add_subparsers(help='sub-command help', dest='command')
    parent_parser = argparse.ArgumentParser(add_help=False)
    parent_parser.add_argument('recipe_dir', type=str)

    render_parser = subparsers.add_parser('render',
                                          parents=[parent_parser],
                                          help='render a recipe')
    build_parser = subparsers.add_parser('build',
                                         parents=[parent_parser],
                                         help='build a recipe')
    args = parser.parse_args()

    command = args.command

    folder = args.recipe_dir
    config = get_or_merge_config(None, {})
    config_files = find_config_files(folder)
    parsed_cfg = collections.OrderedDict()
    for f in config_files:
        parsed_cfg[f] = parse_config_file(f, config)
        normalized = {}
        for k in parsed_cfg[f].keys():
            if "_" in k:
                n = k.replace("_", "-")
                normalized[n] = parsed_cfg[f][k]
        parsed_cfg[f].update(normalized)

    # TODO just using latest config here, should merge!
    if len(config_files):
        cbc = parsed_cfg[config_files[-1]]
    else:
        cbc = {}

    update_index(os.path.dirname(config.output_folder),
                 verbose=config.debug,
                 threads=1)

    recipe_path = os.path.join(folder, "recipe.yaml")

    # step 1: parse YAML
    with open(recipe_path) as fi:
        loader = YAML(typ="safe")
        ydoc = loader.load(fi)

    # step 2: fill out context dict
    context_dict = ydoc.get("context") or {}
    jenv = jinja2.Environment()
    for key, value in context_dict.items():
        if isinstance(value, str):
            tmpl = jenv.from_string(value)
            context_dict[key] = tmpl.render(context_dict)

    if ydoc.get("context"):
        del ydoc["context"]

    # step 3: recursively loop over the entire recipe and render jinja with context
    jenv.globals.update(jinja_functions(config, context_dict))
    for key in ydoc:
        render_recursive(ydoc[key], context_dict, jenv)

    flatten_selectors(ydoc, ns_cfg(config))

    # We need to assemble the variants for each output

    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):

        # if ydoc.get("build"):
        #     raise InvalidRecipeError("You can either declare outputs, or build?")
        for o in ydoc["outputs"]:

            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta
            variants[o["package"]["name"]] = get_dependency_variants(
                o["requirements"], cbc, config)
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc["requirements"], cbc, config)

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything

    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    if command == 'render':
        for o in sorted_outputs:
            print(o)
        exit()

    solver = MambaSolver(["conda-forge"], "linux-64")
    for o in sorted_outputs:
        solver.replace_channels()
        o.finalize_solve(sorted_outputs, solver)
        print(o)

        o.config.compute_build_id(o.name)

        print(o.config.host_prefix)

        if 'build' in o.transactions:
            mkdir_p(o.config.build_prefix)
            print(o.transactions)
            o.transactions['build'].execute(
                PrefixData(o.config.build_prefix),
                PackageCacheData.first_writable().pkgs_dir)
        if 'host' in o.transactions:
            mkdir_p(o.config.host_prefix)
            print(o.transactions)
            o.transactions['host'].execute(
                PrefixData(o.config.host_prefix),
                PackageCacheData.first_writable().pkgs_dir)
        print(o.sections)
        stats = {}

        print("Final variant config")
        print(config.variant)
        print(o.variant)
        build(MetaData(recipe_path, o), None)

    # sorted_outputs
    # print(sorted_outputs[0].config.host_prefix)
    exit()

    for o in sorted_outputs:
        print("\n")
        print(o)