Esempio n. 1
0
    def __init__(
        self,
        graph: nx.DiGraph = None,
        name: Optional[str] = None,
        pr_limit: int = 0,
    ):
        # rebuild the graph to only use edges from the arm osx requirements
        graph2 = nx.create_empty_copy(graph)
        for node, attrs in graph.nodes(data="payload"):
            for plat_arch in self.arches:
                reqs = attrs.get(
                    f"{plat_arch}_requirements",
                    attrs.get("osx_64_requirements",
                              attrs.get("requirements", {})),
                )
                host_deps = set(as_iterable(reqs.get("host", set())))
                run_deps = set(as_iterable(reqs.get("run", set())))
                deps = host_deps.union(run_deps)
                for dep in deps:
                    dep = graph.graph["outputs_lut"].get(dep, dep)
                    graph2.add_edge(dep, node)

        super().__init__(graph=graph2, pr_limit=pr_limit, check_solvable=False)

        assert (not self.check_solvable
                ), "We don't want to check solvability for arm osx!"

        # We are constraining the scope of this migrator
        with indir("../conda-forge-pinning-feedstock/recipe/migrations"), open(
                "osx_arm64.txt",
                "r",
        ) as f:
            self.target_packages = set(f.read().split())

        self.name = name
        # filter the graph down to the target packages
        if self.target_packages:
            self.target_packages.add("python")  # hack that is ~harmless?
            packages = self.target_packages.copy()
            for target in self.target_packages:
                if target in self.graph.nodes:
                    packages.update(nx.ancestors(self.graph, target))
            self.graph.remove_nodes_from(
                [n for n in self.graph if n not in packages])

        for name in self.excluded_dependencies:
            self.graph.remove_nodes_from(nx.descendants(graph, name))
        # filter out stub packages and ignored packages
        for node, attrs in list(self.graph.nodes("payload")):
            if not attrs:
                print(node)
            if (not attrs or node.endswith("_stub") or (node.startswith("m2-"))
                    or (node.startswith("m2w64-"))
                    or (node in self.ignored_packages) or
                (attrs.get("meta_yaml", {}).get("build", {}).get("noarch"))):
                pluck(self.graph, node)

        self.graph.remove_edges_from(nx.selfloop_edges(self.graph))
Esempio n. 2
0
def extract_missing_packages(
    required_imports,
    questionable_imports,
    run_packages,
    package_by_import,
    import_by_package,
    node,
    nodes,
):
    exclude_packages = STATIC_EXCLUDES.union(
        {node, node.replace("-", "_"),
         node.replace("_", "-")}, )

    questionable_packages = set().union(*list(
        as_iterable(package_by_import.get(k, k))
        for k in questionable_imports))
    required_packages = set().union(*list(
        as_iterable(package_by_import.get(k, k)) for k in required_imports))

    run_imports = set().union(*list(
        as_iterable(import_by_package.get(k, k)) for k in run_packages))
    exclude_imports = set().union(*list(
        as_iterable(import_by_package.get(k, k)) for k in exclude_packages))

    d = {}
    # These are all normalized to packages
    # packages who's libraries are not imported
    cf_minus_df = (run_packages - required_packages - exclude_packages -
                   questionable_packages) & nodes
    if cf_minus_df:
        d.update(cf_minus_df=cf_minus_df)

    # These are all normalized to imports
    # imports which have no associated package in the meta.yaml
    df_minus_cf_imports = required_imports - run_imports - exclude_imports
    # Normalize to packages, the native interface for conda-forge
    # Note that the set overlap is a bit of a hack, sources could have imports we don't ship at all
    df_minus_cf = (set().union(*list(
        as_iterable(package_by_import.get(k, k)) for k in df_minus_cf_imports))
                   & nodes)
    if df_minus_cf:
        d.update(df_minus_cf=df_minus_cf)
    return d
Esempio n. 3
0
def populate_feedstock_attributes(
    name: str,
    sub_graph: LazyJson,
    meta_yaml: typing.Union[str, Response] = "",
    conda_forge_yaml: typing.Union[str, Response] = "",
    mark_not_archived=False,
    # build_sh: typing.Union[str, Response] = "",
    # pre_unlink: typing.Union[str, Response] = "",
    # post_link: typing.Union[str, Response] = "",
    # pre_link: typing.Union[str, Response] = "",
    # activate: typing.Union[str, Response] = "",
) -> LazyJson:
    """Parse the various configuration information into something usable

    Notes
    -----
    If the return is bad hand the response itself in so that it can be parsed
    for meaning.
    """
    sub_graph.update({"feedstock_name": name, "bad": False})

    if mark_not_archived:
        sub_graph.update({"archived": False})

    # handle all the raw strings
    if isinstance(meta_yaml, Response):
        sub_graph["bad"] = f"make_graph: {meta_yaml.status_code}"
        return sub_graph
    sub_graph["raw_meta_yaml"] = meta_yaml

    # Get the conda-forge.yml
    if isinstance(conda_forge_yaml, str):
        sub_graph["conda-forge.yml"] = {
            k: v
            for k, v in yaml.safe_load(conda_forge_yaml).items() if k in {
                "provider",
                "min_r_ver",
                "min_py_ver",
                "max_py_ver",
                "max_r_ver",
                "compiler_stack",
                "bot",
            }
        }

    yaml_dict = ChainDB(*[
        parse_meta_yaml(meta_yaml, platform=plat)
        for plat in ["win", "osx", "linux"]
    ])
    if not yaml_dict:
        logger.error(f"Something odd happened when parsing recipe {name}")
        sub_graph["bad"] = "make_graph: Could not parse"
        return sub_graph
    sub_graph["meta_yaml"] = _convert_to_dict(yaml_dict)
    meta_yaml = sub_graph["meta_yaml"]

    sub_graph["strong_exports"] = False
    # TODO: make certain to remove None
    requirements_dict = defaultdict(set)
    for block in [meta_yaml] + meta_yaml.get("outputs", []) or []:
        req: "RequirementsTypedDict" = block.get("requirements", {}) or {}
        if isinstance(req, list):
            requirements_dict["run"].update(set(req))
            continue
        for section in ["build", "host", "run"]:
            requirements_dict[section].update(
                list(as_iterable(req.get(section, []) or [])), )
        test: "TestTypedDict" = block.get("test", {})
        requirements_dict["test"].update(test.get("requirements", []) or [])
        requirements_dict["test"].update(test.get("requires", []) or [])
        run_exports = (block.get("build", {}) or {}).get("run_exports", {})
        if isinstance(run_exports, dict) and run_exports.get("strong"):
            sub_graph["strong_exports"] = True
    for k in list(requirements_dict.keys()):
        requirements_dict[k] = {v for v in requirements_dict[k] if v}

    sub_graph["total_requirements"] = dict(requirements_dict)
    sub_graph["requirements"] = {
        k: {pin_sep_pat.split(x)[0].lower()
            for x in v}
        for k, v in sub_graph["total_requirements"].items()
    }

    # handle multi outputs
    if "outputs" in yaml_dict:
        sub_graph["outputs_names"] = sorted(
            list({d.get("name", "")
                  for d in yaml_dict["outputs"]}), )

    # TODO: Write schema for dict
    # TODO: remove this
    req = get_requirements(yaml_dict)
    sub_graph["req"] = req

    keys = [("package", "name"), ("package", "version")]
    missing_keys = [k[1] for k in keys if k[1] not in yaml_dict.get(k[0], {})]
    source = yaml_dict.get("source", [])
    if isinstance(source, collections.abc.Mapping):
        source = [source]
    source_keys: Set[str] = set()
    for s in source:
        if not sub_graph.get("url"):
            sub_graph["url"] = s.get("url")
        source_keys |= s.keys()
    if "url" not in source_keys:
        missing_keys.append("url")
    if missing_keys:
        logger.error(f"Recipe {name} doesn't have a {', '.join(missing_keys)}")
    for k in keys:
        if k[1] not in missing_keys:
            sub_graph[k[1]] = yaml_dict[k[0]][k[1]]
    kl = list(sorted(source_keys & hashlib.algorithms_available, reverse=True))
    if kl:
        sub_graph["hash_type"] = kl[0]
    return sub_graph
Esempio n. 4
0
 def filter(self,
            attrs: "AttrsTypedDict",
            not_bad_str_start: str = "") -> bool:
     scripts = as_iterable(
         attrs.get("meta_yaml", {}).get("build", {}).get("script", []), )
     return not bool(set(self.bad_install) & set(scripts))
Esempio n. 5
0
    def __init__(
        self,
        graph: nx.DiGraph = None,
        name: Optional[str] = None,
        pr_limit: int = 0,
        piggy_back_migrations: Optional[Sequence[MiniMigrator]] = None,
    ):
        # rebuild the graph to only use edges from the arm osx requirements
        graph2 = nx.create_empty_copy(graph)
        for node, attrs in graph.nodes(data="payload"):
            for plat_arch in self.arches:
                reqs = attrs.get(
                    f"{plat_arch}_requirements",
                    attrs.get("osx_64_requirements",
                              attrs.get("requirements", {})),
                )
                host_deps = set(as_iterable(reqs.get("host", set())))
                run_deps = set(as_iterable(reqs.get("run", set())))
                deps = host_deps.union(run_deps)

                # We are including the compiler stubs here so that
                # excluded_dependencies work correctly.
                # Edges to these compiler stubs are removed afterwards
                build_deps = set(as_iterable(reqs.get("build", set())))
                for build_dep in build_deps:
                    if build_dep.endswith("_stub"):
                        deps.add(build_dep)
                for dep in get_deps_from_outputs_lut(
                        deps, graph.graph["outputs_lut"]):
                    graph2.add_edge(dep, node)

        super().__init__(
            graph=graph2,
            pr_limit=pr_limit,
            check_solvable=False,
            piggy_back_migrations=piggy_back_migrations,
        )

        assert (not self.check_solvable
                ), "We don't want to check solvability for arm osx!"

        self.name = name

        # Excluded dependencies need to be removed before no target_packages are
        # filtered out so that if a target_package is excluded, its dependencies
        # are not added to the graph
        for excluded_dep in self.excluded_dependencies:
            self.graph.remove_nodes_from(
                nx.descendants(self.graph, excluded_dep))

        # We are constraining the scope of this migrator
        with indir("../conda-forge-pinning-feedstock/recipe/migrations"), open(
                "osx_arm64.txt", ) as f:
            self.target_packages = set(f.read().split())

        # filter the graph down to the target packages
        if self.target_packages:
            self.target_packages.add("python")  # hack that is ~harmless?
            packages = self.target_packages.copy()
            for target in self.target_packages:
                if target in self.graph.nodes:
                    packages.update(nx.ancestors(self.graph, target))
            self.graph.remove_nodes_from(
                [n for n in self.graph if n not in packages])

        # filter out stub packages and ignored packages
        for node, attrs in list(self.graph.nodes("payload")):
            if (not attrs or node.endswith("_stub") or (node.startswith("m2-"))
                    or (node.startswith("m2w64-"))
                    or (node in self.ignored_packages) or all_noarch(attrs)):
                pluck(self.graph, node)

        self.graph.remove_edges_from(nx.selfloop_edges(self.graph))