Ejemplo n.º 1
0
    def __init__(
        self, graph: nx.DiGraph = None, name: Optional[str] = None, pr_limit: int = 0,
    ):
        super().__init__(graph=graph, pr_limit=pr_limit)
        # We are constraining the scope of this migrator
        with indir("../conda-forge-pinning-feedstock/recipe/migrations"), open(
            "arch_rebuild.txt", "r"
        ) as f:
            self.target_packages = set(f.read().split())

        self.name = name
        # filter the graph down to the target packages
        if self.target_packages:
            packages = self.target_packages.copy()
            for target in self.target_packages:
                if target in self.graph.nodes:
                    packages.update(nx.ancestors(self.graph, target))
            self.graph.remove_nodes_from([n for n in self.graph if n not in packages])
        # filter out stub packages and ignored packages
        for node in list(self.graph.nodes):
            if (
                node.endswith("_stub")
                or (node.startswith("m2-"))
                or (node.startswith("m2w64-"))
                or (node in self.ignored_packages)
                or (
                    self.graph.nodes[node]
                    .get("payload", {})
                    .get("meta_yaml", {})
                    .get("build", {})
                    .get("noarch")
                )
            ):
                pluck(self.graph, node)
        self.graph.remove_edges_from(nx.selfloop_edges(self.graph))
Ejemplo n.º 2
0
    def __init__(
        self,
        graph: nx.DiGraph = None,
        name: Optional[str] = None,
        pr_limit: int = 0,
    ):
        # rebuild the graph to only use edges from the arm osx requirements
        graph2 = nx.create_empty_copy(graph)
        for node, attrs in graph.nodes(data="payload"):
            for plat_arch in self.arches:
                reqs = attrs.get(
                    f"{plat_arch}_requirements",
                    attrs.get("osx_64_requirements",
                              attrs.get("requirements", {})),
                )
                host_deps = set(as_iterable(reqs.get("host", set())))
                run_deps = set(as_iterable(reqs.get("run", set())))
                deps = host_deps.union(run_deps)
                for dep in deps:
                    dep = graph.graph["outputs_lut"].get(dep, dep)
                    graph2.add_edge(dep, node)

        super().__init__(graph=graph2, pr_limit=pr_limit, check_solvable=False)

        assert (not self.check_solvable
                ), "We don't want to check solvability for arm osx!"

        # We are constraining the scope of this migrator
        with indir("../conda-forge-pinning-feedstock/recipe/migrations"), open(
                "osx_arm64.txt",
                "r",
        ) as f:
            self.target_packages = set(f.read().split())

        self.name = name
        # filter the graph down to the target packages
        if self.target_packages:
            self.target_packages.add("python")  # hack that is ~harmless?
            packages = self.target_packages.copy()
            for target in self.target_packages:
                if target in self.graph.nodes:
                    packages.update(nx.ancestors(self.graph, target))
            self.graph.remove_nodes_from(
                [n for n in self.graph if n not in packages])

        for name in self.excluded_dependencies:
            self.graph.remove_nodes_from(nx.descendants(graph, name))
        # filter out stub packages and ignored packages
        for node, attrs in list(self.graph.nodes("payload")):
            if not attrs:
                print(node)
            if (not attrs or node.endswith("_stub") or (node.startswith("m2-"))
                    or (node.startswith("m2w64-"))
                    or (node in self.ignored_packages) or
                (attrs.get("meta_yaml", {}).get("build", {}).get("noarch"))):
                pluck(self.graph, node)

        self.graph.remove_edges_from(nx.selfloop_edges(self.graph))
Ejemplo n.º 3
0
def create_rebuild_graph(
    gx: nx.DiGraph,
    package_names: Sequence[str],
    excluded_feedstocks: MutableSet[str] = None,
    include_noarch: bool = False,
) -> nx.DiGraph:
    total_graph = copy.deepcopy(gx)
    excluded_feedstocks = set() if excluded_feedstocks is None else excluded_feedstocks
    # Always exclude the packages themselves from the migration
    for node in package_names:
        excluded_feedstocks.update(gx.graph["outputs_lut"].get(node, {node}))

    included_nodes = set()

    for node, node_attrs in gx.nodes.items():
        # always keep pinning
        if node == "conda-forge-pinning":
            continue
        attrs: "AttrsTypedDict" = node_attrs["payload"]
        requirements = attrs.get("requirements", {})
        host = requirements.get("host", set())
        build = requirements.get("build", set())
        bh = host or build
        only_python = "python" in package_names
        inclusion_criteria = bh & set(package_names) and (
            include_noarch or not all_noarch(attrs, only_python=only_python)
        )
        # get host/build, run and test and launder them through outputs
        # this should fix outputs related issues (eg gdal)
        all_reqs = requirements.get("run", set())
        if inclusion_criteria:
            all_reqs = all_reqs | requirements.get("test", set())
            all_reqs = all_reqs | (host or build)
        rq = get_deps_from_outputs_lut(
            all_reqs,
            gx.graph["outputs_lut"],
        )

        for e in list(total_graph.in_edges(node)):
            if e[0] not in rq:
                total_graph.remove_edge(*e)
        if inclusion_criteria:
            included_nodes.add(node)

    # all nodes have the conda-forge-pinning as child package
    total_graph.add_edges_from([(n, "conda-forge-pinning") for n in total_graph.nodes])
    included_nodes.add("conda-forge-pinning")  # it does not get added above

    # finally remove all nodes that should not be built from the graph
    for node in list(total_graph.nodes):
        # if there isn't a strict dependency or if the feedstock is excluded,
        # remove it while retaining the edges to its parents and children
        if (node not in included_nodes) or (node in excluded_feedstocks):
            pluck(total_graph, node)

    # post plucking we can have several strange cases, lets remove all selfloops
    total_graph.remove_edges_from(nx.selfloop_edges(total_graph))
    return total_graph
Ejemplo n.º 4
0
    def __init__(
        self,
        graph: nx.DiGraph = None,
        name: Optional[str] = None,
        pr_limit: int = 0,
        piggy_back_migrations: Optional[Sequence[MiniMigrator]] = None,
    ):
        # rebuild the graph to only use edges from the arm and power requirements
        graph2 = nx.create_empty_copy(graph)
        for node, attrs in graph.nodes(data="payload"):
            for plat_arch in self.arches:
                deps = set().union(
                    *attrs.get(
                        f"{plat_arch}_requirements",
                        attrs.get("requirements", {}),
                    ).values()
                )
                for dep in get_deps_from_outputs_lut(deps, graph.graph["outputs_lut"]):
                    graph2.add_edge(dep, node)

        super().__init__(
            graph=graph2,
            pr_limit=pr_limit,
            check_solvable=False,
            piggy_back_migrations=piggy_back_migrations,
        )

        assert not self.check_solvable, "We don't want to check solvability for aarch!"
        # We are constraining the scope of this migrator
        with indir("../conda-forge-pinning-feedstock/recipe/migrations"), open(
            "arch_rebuild.txt",
        ) as f:
            self.target_packages = set(f.read().split())

        self.name = name
        # filter the graph down to the target packages
        if self.target_packages:
            self.target_packages.add("python")  # hack that is ~harmless?
            packages = self.target_packages.copy()
            for target in self.target_packages:
                if target in self.graph.nodes:
                    packages.update(nx.ancestors(self.graph, target))
            self.graph.remove_nodes_from([n for n in self.graph if n not in packages])

        # filter out stub packages and ignored packages
        for node, attrs in list(self.graph.nodes("payload")):
            if (
                node.endswith("_stub")
                or (node.startswith("m2-"))
                or (node.startswith("m2w64-"))
                or (node in self.ignored_packages)
                or all_noarch(attrs)
            ):
                pluck(self.graph, node)
        self.graph.remove_edges_from(nx.selfloop_edges(self.graph))
Ejemplo n.º 5
0
    def __init__(
        self,
        graph: nx.DiGraph = None,
        name: Optional[str] = None,
        pr_limit: int = 0,
        piggy_back_migrations: Optional[Sequence[MiniMigrator]] = None,
    ):
        # rebuild the graph to only use edges from the arm osx requirements
        graph2 = nx.create_empty_copy(graph)
        for node, attrs in graph.nodes(data="payload"):
            for plat_arch in self.arches:
                reqs = attrs.get(
                    f"{plat_arch}_requirements",
                    attrs.get("osx_64_requirements",
                              attrs.get("requirements", {})),
                )
                host_deps = set(as_iterable(reqs.get("host", set())))
                run_deps = set(as_iterable(reqs.get("run", set())))
                deps = host_deps.union(run_deps)

                # We are including the compiler stubs here so that
                # excluded_dependencies work correctly.
                # Edges to these compiler stubs are removed afterwards
                build_deps = set(as_iterable(reqs.get("build", set())))
                for build_dep in build_deps:
                    if build_dep.endswith("_stub"):
                        deps.add(build_dep)
                for dep in get_deps_from_outputs_lut(
                        deps, graph.graph["outputs_lut"]):
                    graph2.add_edge(dep, node)

        super().__init__(
            graph=graph2,
            pr_limit=pr_limit,
            check_solvable=False,
            piggy_back_migrations=piggy_back_migrations,
        )

        assert (not self.check_solvable
                ), "We don't want to check solvability for arm osx!"

        self.name = name

        # Excluded dependencies need to be removed before no target_packages are
        # filtered out so that if a target_package is excluded, its dependencies
        # are not added to the graph
        for excluded_dep in self.excluded_dependencies:
            self.graph.remove_nodes_from(
                nx.descendants(self.graph, excluded_dep))

        # We are constraining the scope of this migrator
        with indir("../conda-forge-pinning-feedstock/recipe/migrations"), open(
                "osx_arm64.txt", ) as f:
            self.target_packages = set(f.read().split())

        # filter the graph down to the target packages
        if self.target_packages:
            self.target_packages.add("python")  # hack that is ~harmless?
            packages = self.target_packages.copy()
            for target in self.target_packages:
                if target in self.graph.nodes:
                    packages.update(nx.ancestors(self.graph, target))
            self.graph.remove_nodes_from(
                [n for n in self.graph if n not in packages])

        # filter out stub packages and ignored packages
        for node, attrs in list(self.graph.nodes("payload")):
            if (not attrs or node.endswith("_stub") or (node.startswith("m2-"))
                    or (node.startswith("m2w64-"))
                    or (node in self.ignored_packages) or all_noarch(attrs)):
                pluck(self.graph, node)

        self.graph.remove_edges_from(nx.selfloop_edges(self.graph))
Ejemplo n.º 6
0
def create_migration_yaml_creator(migrators: MutableSequence[Migrator],
                                  gx: nx.DiGraph):
    cfp_gx = copy.deepcopy(gx)
    for node in list(cfp_gx.nodes):
        if node != "conda-forge-pinning":
            pluck(cfp_gx, node)

    print("pinning migrations", flush=True)
    with indir(os.environ["CONDA_PREFIX"]):
        pinnings = parse_config_file(
            "conda_build_config.yaml",
            config=Config(**CB_CONFIG),
        )
    feedstocks_to_be_repinned = []
    for pinning_name, package_pin_list in pinnings.items():
        # there are three things:
        # pinning_name - entry in pinning file
        # package_name - the actual package, could differ via `-` -> `_`
        #                from pinning_name
        # feedstock_name - the feedstock that outputs the package
        # we need the package names for the migrator itself but need the
        # feedstock for everything else

        # exclude non-package keys
        if pinning_name not in gx.graph["outputs_lut"]:
            # conda_build_config.yaml can't have `-` unlike our package names
            package_name = pinning_name.replace("_", "-")
        else:
            package_name = pinning_name

        # replace sub-packages with their feedstock names
        # TODO - we are grabbing one element almost at random here
        # the sorted call makes it stable at least?
        fs_name = next(
            iter(
                sorted(gx.graph["outputs_lut"].get(package_name,
                                                   {package_name})), ), )

        if ((fs_name in gx.nodes)
                and not gx.nodes[fs_name]["payload"].get("archived", False)
                and gx.nodes[fs_name]["payload"].get("version")
                and fs_name not in feedstocks_to_be_repinned):

            current_pins = list(map(str, package_pin_list))
            current_version = str(gx.nodes[fs_name]["payload"]["version"])

            # we need a special parsing for pinning stuff
            meta_yaml = parse_meta_yaml(
                gx.nodes[fs_name]["payload"]["raw_meta_yaml"],
                for_pinning=True,
            )

            # find the most stringent max pin for this feedstock if any
            pin_spec = ""
            for block in [meta_yaml] + meta_yaml.get("outputs", []) or []:
                build = block.get("build", {}) or {}
                # and check the exported package is within the feedstock
                exports = [
                    p.get("max_pin", "")
                    for p in build.get("run_exports", [{}])
                    # make certain not direct hard pin
                    if isinstance(p, MutableMapping)
                    # ensure the export is for this package
                    and p.get("package_name", "") == package_name
                    # ensure the pinned package is in an output of the parent feedstock
                    and (fs_name in gx.graph["outputs_lut"].get(
                        p.get("package_name", ""), set()))
                ]
                if not exports:
                    continue
                # get the most stringent pin spec from the recipe block
                max_pin = max(exports, key=len)
                if len(max_pin) > len(pin_spec):
                    pin_spec = max_pin

            # fall back to the pinning file or "x"
            if not pin_spec:
                pin_spec = (pinnings["pin_run_as_build"].get(
                    pinning_name, {}).get("max_pin", "x")) or "x"

            current_pins = list(
                map(lambda x: re.sub("[^0-9.]", "", x).rstrip("."),
                    current_pins), )
            current_pins = [
                cp.strip() for cp in current_pins if cp.strip() != ""
            ]
            current_version = re.sub("[^0-9.]", "",
                                     current_version).rstrip(".")
            if not current_pins or current_version == "":
                continue

            current_pin = str(max(map(VersionOrder, current_pins)))
            # If the current pin and the current version is the same nothing
            # to do even if the pin isn't accurate to the spec
            if current_pin != current_version and _outside_pin_range(
                    pin_spec,
                    current_pin,
                    current_version,
            ):
                feedstocks_to_be_repinned.append(fs_name)
                print(
                    "    %s:\n"
                    "        curr version: %s\n"
                    "        curr pin: %s\n"
                    "        pin_spec: %s" %
                    (pinning_name, current_version, current_pin, pin_spec),
                    flush=True,
                )
                migrators.append(
                    MigrationYamlCreator(
                        pinning_name,
                        current_version,
                        current_pin,
                        pin_spec,
                        fs_name,
                        cfp_gx,
                        full_graph=gx,
                    ), )
    print(" ", flush=True)
Ejemplo n.º 7
0
def add_replacement_migrator(
    migrators: MutableSequence[Migrator],
    gx: nx.DiGraph,
    old_pkg: "PackageName",
    new_pkg: "PackageName",
    rationale: str,
    alt_migrator: Union[Migrator, None] = None,
) -> None:
    """Adds a migrator to replace one package with another.

    Parameters
    ----------
    migrators : list of Migrator
        The list of migrators to run.
    gx : graph
        The conda-forge dependency graph.
    old_pkg : str
        The package to be replaced.
    new_pkg : str
        The package to replace the `old_pkg`.
    rationale : str
        The reason the for the migration. Should be a full statement.
    alt_migrator : Replacement migrator or a sublcass thereof
        An alternate Replacement migrator to use for special tasks.

    """
    print(
        "========================================"
        "========================================",
        flush=True,
    )
    print(f"making replacement migrator for {old_pkg} -> {new_pkg}",
          flush=True)
    total_graph = copy.deepcopy(gx)

    for node, node_attrs in gx.nodes.items():
        requirements = node_attrs["payload"].get("requirements", {})
        rq = (requirements.get("build", set())
              | requirements.get("host", set())
              | requirements.get("run", set())
              | requirements.get("test", set()))
        pkgs = {old_pkg}
        old_pkg_c = pkgs.intersection(rq)

        if not old_pkg_c:
            pluck(total_graph, node)

    # post plucking we can have several strange cases, lets remove all selfloops
    total_graph.remove_edges_from(nx.selfloop_edges(total_graph))

    if alt_migrator is not None:
        migrators.append(
            alt_migrator(
                old_pkg=old_pkg,
                new_pkg=new_pkg,
                rationale=rationale,
                pr_limit=PR_LIMIT,
                graph=total_graph,
            ), )
    else:
        migrators.append(
            Replacement(
                old_pkg=old_pkg,
                new_pkg=new_pkg,
                rationale=rationale,
                pr_limit=PR_LIMIT,
                graph=total_graph,
            ), )