Esempio n. 1
0
def get_attrs(name, i):
    sub_graph = {
        "time": time.time(),
        "feedstock_name": name,
        # All feedstocks start out as good
        "bad": False,
    }

    logger.info((i, name))
    r = requests.get("https://raw.githubusercontent.com/"
                     "conda-forge/{}-feedstock/master/recipe/"
                     "meta.yaml".format(name))
    if r.status_code != 200:
        logger.warn("Something odd happened when fetching recipe "
                    "{}: {}".format(name, r.status_code))
        sub_graph["bad"] = "make_graph: {}".format(r.status_code)
        return sub_graph

    text = r.content.decode("utf-8")
    sub_graph["raw_meta_yaml"] = text
    yaml_dict = ChainDB(*[
        parse_meta_yaml(text, platform=plat)
        for plat in ["win", "osx", "linux"]
    ])
    if not yaml_dict:
        logger.warn("Something odd happened when parsing recipe "
                    "{}".format(name))
        sub_graph["bad"] = "make_graph: Could not parse"
        return sub_graph
    sub_graph["meta_yaml"] = _convert_to_dict(yaml_dict)

    # TODO: Write schema for dict
    req = get_requirements(yaml_dict)
    sub_graph["req"] = req

    keys = [("package", "name"), ("package", "version")]
    missing_keys = [k[1] for k in keys if k[1] not in yaml_dict.get(k[0], {})]
    source = yaml_dict.get("source", [])
    if isinstance(source, collections.abc.Mapping):
        source = [source]
    source_keys = set()
    for s in source:
        if not sub_graph.get("url"):
            sub_graph["url"] = s.get("url")
        source_keys |= s.keys()
    if "url" not in source_keys:
        missing_keys.append("url")
    if missing_keys:
        logger.warn("Recipe {} doesn't have a {}".format(
            name, ", ".join(missing_keys)))
        sub_graph["bad"] = "make_graph: missing {}".format(
            ", ".join(missing_keys))
    for k in keys:
        if k[1] not in missing_keys:
            sub_graph[k[1]] = yaml_dict[k[0]][k[1]]
    k = next(iter((source_keys & hashlib.algorithms_available)), None)
    if k:
        sub_graph["hash_type"] = k
    return sub_graph
Esempio n. 2
0
 def _emit(self, x):
     # flatten out the nested setup
     x = [xxx for xx in x for xxx in xx]
     names = x[::2]
     docs = x[1::2]
     if names[0] == "start":
         docs[0].update(
             original_start_uid=docs[0]["uid"],
             original_start_time=docs[0]["time"],
         )
     return super()._emit((names[0], _convert_to_dict(ChainDB(*docs))))
Esempio n. 3
0
def populate_feedstock_attributes(
    name: str,
    sub_graph: LazyJson,
    meta_yaml: typing.Union[str, Response] = "",
    conda_forge_yaml: typing.Union[str, Response] = "",
    mark_not_archived=False,
    # build_sh: typing.Union[str, Response] = "",
    # pre_unlink: typing.Union[str, Response] = "",
    # post_link: typing.Union[str, Response] = "",
    # pre_link: typing.Union[str, Response] = "",
    # activate: typing.Union[str, Response] = "",
) -> LazyJson:
    """Parse the various configuration information into something usable

    Notes
    -----
    If the return is bad hand the response itself in so that it can be parsed
    for meaning.
    """
    sub_graph.update({"feedstock_name": name, "bad": False})

    if mark_not_archived:
        sub_graph.update({"archived": False})

    # handle all the raw strings
    if isinstance(meta_yaml, Response):
        sub_graph["bad"] = f"make_graph: {meta_yaml.status_code}"
        return sub_graph
    sub_graph["raw_meta_yaml"] = meta_yaml

    # Get the conda-forge.yml
    if isinstance(conda_forge_yaml, str):
        sub_graph["conda-forge.yml"] = {
            k: v
            for k, v in yaml.safe_load(conda_forge_yaml).items() if k in {
                "provider",
                "min_r_ver",
                "min_py_ver",
                "max_py_ver",
                "max_r_ver",
                "compiler_stack",
                "bot",
            }
        }

    yaml_dict = ChainDB(*[
        parse_meta_yaml(meta_yaml, platform=plat)
        for plat in ["win", "osx", "linux"]
    ])
    if not yaml_dict:
        logger.error(f"Something odd happened when parsing recipe {name}")
        sub_graph["bad"] = "make_graph: Could not parse"
        return sub_graph
    sub_graph["meta_yaml"] = _convert_to_dict(yaml_dict)
    meta_yaml = sub_graph["meta_yaml"]

    sub_graph["strong_exports"] = False
    # TODO: make certain to remove None
    requirements_dict = defaultdict(set)
    for block in [meta_yaml] + meta_yaml.get("outputs", []) or []:
        req: "RequirementsTypedDict" = block.get("requirements", {}) or {}
        if isinstance(req, list):
            requirements_dict["run"].update(set(req))
            continue
        for section in ["build", "host", "run"]:
            requirements_dict[section].update(
                list(as_iterable(req.get(section, []) or [])), )
        test: "TestTypedDict" = block.get("test", {})
        requirements_dict["test"].update(test.get("requirements", []) or [])
        requirements_dict["test"].update(test.get("requires", []) or [])
        run_exports = (block.get("build", {}) or {}).get("run_exports", {})
        if isinstance(run_exports, dict) and run_exports.get("strong"):
            sub_graph["strong_exports"] = True
    for k in list(requirements_dict.keys()):
        requirements_dict[k] = {v for v in requirements_dict[k] if v}

    sub_graph["total_requirements"] = dict(requirements_dict)
    sub_graph["requirements"] = {
        k: {pin_sep_pat.split(x)[0].lower()
            for x in v}
        for k, v in sub_graph["total_requirements"].items()
    }

    # handle multi outputs
    if "outputs" in yaml_dict:
        sub_graph["outputs_names"] = sorted(
            list({d.get("name", "")
                  for d in yaml_dict["outputs"]}), )

    # TODO: Write schema for dict
    # TODO: remove this
    req = get_requirements(yaml_dict)
    sub_graph["req"] = req

    keys = [("package", "name"), ("package", "version")]
    missing_keys = [k[1] for k in keys if k[1] not in yaml_dict.get(k[0], {})]
    source = yaml_dict.get("source", [])
    if isinstance(source, collections.abc.Mapping):
        source = [source]
    source_keys: Set[str] = set()
    for s in source:
        if not sub_graph.get("url"):
            sub_graph["url"] = s.get("url")
        source_keys |= s.keys()
    if "url" not in source_keys:
        missing_keys.append("url")
    if missing_keys:
        logger.error(f"Recipe {name} doesn't have a {', '.join(missing_keys)}")
    for k in keys:
        if k[1] not in missing_keys:
            sub_graph[k[1]] = yaml_dict[k[0]][k[1]]
    kl = list(sorted(source_keys & hashlib.algorithms_available, reverse=True))
    if kl:
        sub_graph["hash_type"] = kl[0]
    return sub_graph
Esempio n. 4
0
def populate_feedstock_attributes(
    name: str,
    sub_graph: LazyJson,
    meta_yaml: typing.Union[str, Response] = "",
    conda_forge_yaml: typing.Union[str, Response] = "",
    mark_not_archived=False,
    # build_sh: typing.Union[str, Response] = "",
    # pre_unlink: typing.Union[str, Response] = "",
    # post_link: typing.Union[str, Response] = "",
    # pre_link: typing.Union[str, Response] = "",
    # activate: typing.Union[str, Response] = "",
) -> LazyJson:
    """Parse the various configuration information into something usable

    Notes
    -----
    If the return is bad hand the response itself in so that it can be parsed
    for meaning.
    """
    sub_graph.update({"feedstock_name": name, "bad": False})

    if mark_not_archived:
        sub_graph.update({"archived": False})

    # handle all the raw strings
    if isinstance(meta_yaml, Response):
        sub_graph["bad"] = f"make_graph: {meta_yaml.status_code}"
        return sub_graph
    sub_graph["raw_meta_yaml"] = meta_yaml

    # Get the conda-forge.yml
    if isinstance(conda_forge_yaml, str):
        sub_graph["conda-forge.yml"] = {
            k: v
            for k, v in yaml.safe_load(conda_forge_yaml).items() if k in {
                "provider",
                "min_r_ver",
                "min_py_ver",
                "max_py_ver",
                "max_r_ver",
                "compiler_stack",
                "bot",
            }
        }

    plat_arch = [("win", "64"), ("osx", "64"), ("linux", "64")]
    for k in set(sub_graph["conda-forge.yml"].get("provider", {})):
        if "_" in k:
            plat_arch.append(k.split("_"))
    varient_yamls = [
        parse_meta_yaml(meta_yaml, platform=plat, arch=arch)
        for plat, arch in plat_arch
    ]

    yaml_dict = ChainDB(*varient_yamls)
    if not yaml_dict:
        logger.error(f"Something odd happened when parsing recipe {name}")
        sub_graph["bad"] = "make_graph: Could not parse"
        return sub_graph

    sub_graph["meta_yaml"] = _convert_to_dict(yaml_dict)
    meta_yaml = sub_graph["meta_yaml"]

    for k, v in zip(plat_arch, varient_yamls):
        plat_arch_name = "_".join(k)
        sub_graph[f"{plat_arch_name}_meta_yaml"] = v
        _, sub_graph[
            f"{plat_arch_name}_requirements"], _ = extract_requirements(v)

    (
        sub_graph["total_requirements"],
        sub_graph["requirements"],
        sub_graph["strong_exports"],
    ) = extract_requirements(meta_yaml)

    # handle multi outputs
    if "outputs" in yaml_dict:
        sub_graph["outputs_names"] = sorted(
            list({d.get("name", "")
                  for d in yaml_dict["outputs"]}), )

    # TODO: Write schema for dict
    # TODO: remove this
    req = get_requirements(yaml_dict)
    sub_graph["req"] = req

    keys = [("package", "name"), ("package", "version")]
    missing_keys = [k[1] for k in keys if k[1] not in yaml_dict.get(k[0], {})]
    source = yaml_dict.get("source", [])
    if isinstance(source, collections.abc.Mapping):
        source = [source]
    source_keys: Set[str] = set()
    for s in source:
        if not sub_graph.get("url"):
            sub_graph["url"] = s.get("url")
        source_keys |= s.keys()
    for k in keys:
        if k[1] not in missing_keys:
            sub_graph[k[1]] = yaml_dict[k[0]][k[1]]
    kl = list(sorted(source_keys & hashlib.algorithms_available, reverse=True))
    if kl:
        sub_graph["hash_type"] = kl[0]
    return sub_graph
Esempio n. 5
0
    def update(self, x, who=None):
        rl = []
        # If we have a start document ready to go, release it.
        if self.state == "stopped":  # pragma: no coverage
            raise RuntimeError("Can't emit events from a stopped state "
                               "it seems that a start was not emitted")

        descriptor_dict = self.descriptor_dicts[who]
        data_keys = descriptor_dict.setdefault("data_keys", {})

        # If there are no data_keys then we are taking in a dict and the
        # keys of the dict will be the keys for the stream
        if data_keys == {}:
            if not isinstance(x, Mapping):  # pragma: no coverage
                raise TypeError(f"No data keys were provided so expected "
                                f"Mapping, but {type(x)} found")
            data_keys = {k: {} for k in x}

        # If the incoming data is a dict extract the data as a tuple
        if isinstance(x, Mapping):
            x = tuple([x[k] for k in data_keys.keys()])
        # normalize the data to a tuple
        if not isinstance(x, tuple):
            tx = tuple([x])
        # XXX: need to do something where the data is a tuple!
        elif len(data_keys) == 1:
            tx = tuple([x])
        else:
            tx = x

        # If we haven't issued a descriptor yet make one
        if who not in self.descriptors:
            # clobber the user supplied metadata and the auto generated
            # metadata via ChainDB with resolution favoring the user's input
            descriptor, self.descriptors[who], _ = self.desc_fac(
                **_convert_to_dict(
                    ChainDB(
                        dict(
                            name="primary",
                            data_keys={
                                k: {
                                    "source": "analysis",
                                    "dtype": get_dtype(xx),
                                    "shape": getattr(xx, "shape", []),
                                    **data_keys[k].get(k, {}),
                                }
                                for k, xx in zip(data_keys, tx)
                            },
                            hints={
                                "analyzer": {
                                    "fields": sorted(list(data_keys))
                                }
                            },
                            object_keys={k: [k]
                                         for k in data_keys},
                        ),
                        descriptor_dict,
                    )),
                validate=False,
            )
            rl.append(self.emit(("descriptor", descriptor)))
            self.state = "described"
        event = self.descriptors[who](
            timestamps={k: time.time()
                        for k in data_keys},
            data={k: v
                  for k, v in zip(data_keys, tx)},
            validate=False,
        )
        rl.append(self.emit(("event", event)))

        return rl
Esempio n. 6
0
def populate_feedstock_attributes(
    name: str,
    sub_graph: typing.MutableMapping,
    meta_yaml: typing.Union[str, Response] = "",
    conda_forge_yaml: typing.Union[str, Response] = "",
    mark_not_archived=False,
    feedstock_dir=None,
) -> typing.MutableMapping:
    """Parse the various configuration information into something usable

    Notes
    -----
    If the return is bad hand the response itself in so that it can be parsed
    for meaning.
    """
    sub_graph.update({"feedstock_name": name, "bad": False})

    if mark_not_archived:
        sub_graph.update({"archived": False})

    # handle all the raw strings
    if isinstance(meta_yaml, Response):
        sub_graph["bad"] = f"make_graph: {meta_yaml.status_code}"
        return sub_graph
    sub_graph["raw_meta_yaml"] = meta_yaml

    # Get the conda-forge.yml
    if isinstance(conda_forge_yaml, str):
        sub_graph["conda-forge.yml"] = {
            k: v
            for k, v in yaml.safe_load(conda_forge_yaml).items() if k in {
                "provider",
                "min_r_ver",
                "min_py_ver",
                "max_py_ver",
                "max_r_ver",
                "compiler_stack",
                "bot",
            }
        }

    if (feedstock_dir is not None and
            len(glob.glob(os.path.join(feedstock_dir, ".ci_support",
                                       "*.yaml"))) > 0):
        recipe_dir = os.path.join(feedstock_dir, "recipe")
        ci_support_files = glob.glob(
            os.path.join(feedstock_dir, ".ci_support", "*.yaml"), )
        varient_yamls = []
        plat_arch = []
        for cbc_path in ci_support_files:
            cbc_name = os.path.basename(cbc_path)
            cbc_name_parts = cbc_name.replace(".yaml", "").split("_")
            plat = cbc_name_parts[0]
            if len(cbc_name_parts) == 1:
                arch = "64"
            else:
                if cbc_name_parts[1] in ["64", "aarch64", "ppc64le", "arm64"]:
                    arch = cbc_name_parts[1]
                else:
                    arch = "64"
            plat_arch.append((plat, arch))

            varient_yamls.append(
                parse_meta_yaml(
                    meta_yaml,
                    platform=plat,
                    arch=arch,
                    recipe_dir=recipe_dir,
                    cbc_path=cbc_path,
                ), )

            # collapse them down
            final_cfgs = {}
            for plat_arch, varyml in zip(plat_arch, varient_yamls):
                if plat_arch not in final_cfgs:
                    final_cfgs[plat_arch] = []
                final_cfgs[plat_arch].append(varyml)
            for k in final_cfgs:
                ymls = final_cfgs[k]
                final_cfgs[k] = _convert_to_dict(ChainDB(*ymls))
            plat_arch = []
            varient_yamls = []
            for k, v in final_cfgs.items():
                plat_arch.append(k)
                varient_yamls.append(v)
    else:
        plat_arch = [("win", "64"), ("osx", "64"), ("linux", "64")]
        for k in set(sub_graph["conda-forge.yml"].get("provider", {})):
            if "_" in k:
                plat_arch.append(k.split("_"))
        varient_yamls = [
            parse_meta_yaml(meta_yaml, platform=plat, arch=arch)
            for plat, arch in plat_arch
        ]

    # this makes certain that we have consistent ordering
    sorted_varient_yamls = [
        x for _, x in sorted(zip(plat_arch, varient_yamls))
    ]
    yaml_dict = ChainDB(*sorted_varient_yamls)
    if not yaml_dict:
        logger.error(f"Something odd happened when parsing recipe {name}")
        sub_graph["bad"] = "make_graph: Could not parse"
        return sub_graph

    sub_graph["meta_yaml"] = _convert_to_dict(yaml_dict)
    meta_yaml = sub_graph["meta_yaml"]

    for k, v in zip(plat_arch, varient_yamls):
        plat_arch_name = "_".join(k)
        sub_graph[f"{plat_arch_name}_meta_yaml"] = v
        _, sub_graph[
            f"{plat_arch_name}_requirements"], _ = extract_requirements(v)

    (
        sub_graph["total_requirements"],
        sub_graph["requirements"],
        sub_graph["strong_exports"],
    ) = extract_requirements(meta_yaml)

    # handle multi outputs
    if "outputs" in yaml_dict:
        sub_graph["outputs_names"] = sorted(
            list({d.get("name", "")
                  for d in yaml_dict["outputs"]}), )

    # TODO: Write schema for dict
    # TODO: remove this
    req = get_requirements(yaml_dict)
    sub_graph["req"] = req

    keys = [("package", "name"), ("package", "version")]
    missing_keys = [k[1] for k in keys if k[1] not in yaml_dict.get(k[0], {})]
    source = yaml_dict.get("source", [])
    if isinstance(source, collections.abc.Mapping):
        source = [source]
    source_keys: Set[str] = set()
    for s in source:
        if not sub_graph.get("url"):
            sub_graph["url"] = s.get("url")
        source_keys |= s.keys()
    for k in keys:
        if k[1] not in missing_keys:
            sub_graph[k[1]] = yaml_dict[k[0]][k[1]]
    kl = list(sorted(source_keys & hashlib.algorithms_available, reverse=True))
    if kl:
        sub_graph["hash_type"] = kl[0]
    return sub_graph
Esempio n. 7
0
def get_attrs(name, i):
    lzj = LazyJson(f'node_attrs/{name}.json')
    with lzj as sub_graph:
        sub_graph.update({
            "feedstock_name": name,
            # All feedstocks start out as good
            "bad": False,
        })

        logger.info((i, name))

        def fetch_file(filepath):
            r = requests.get(
                "https://raw.githubusercontent.com/"
                "conda-forge/{}-feedstock/master/{}".format(name, filepath)
            )
            failed = False
            if r.status_code != 200:
                logger.warn(
                    "Something odd happened when fetching recipe "
                    "{}: {}".format(name, r.status_code)
                )
                sub_graph["bad"] = "make_graph: {}".format(r.status_code)
                failed = True

            text = r.content.decode("utf-8")
            return text, failed

        text, failed = fetch_file('recipe/meta.yaml')
        if failed:
            return sub_graph
        sub_graph["raw_meta_yaml"] = text
        yaml_dict = ChainDB(
            *[parse_meta_yaml(text, platform=plat) for plat in ["win", "osx", "linux"]]
        )
        if not yaml_dict:
            logger.warn("Something odd happened when parsing recipe " "{}".format(name))
            sub_graph["bad"] = "make_graph: Could not parse"
            return sub_graph
        sub_graph["meta_yaml"] = _convert_to_dict(yaml_dict)

        # Get the conda-forge.yml
        text, failed = fetch_file('conda-forge.yml')
        if failed:
            return sub_graph
        sub_graph["conda-forge.yml"] = {k: v for k, v in  yaml.safe_load(text).items() if
            k in {'provider', 'max_py_ver', 'max_r_ver', 'compiler_stack'}}

        # TODO: Write schema for dict
        req = get_requirements(yaml_dict)
        sub_graph["req"] = req

        keys = [("package", "name"), ("package", "version")]
        missing_keys = [k[1] for k in keys if k[1] not in yaml_dict.get(k[0], {})]
        source = yaml_dict.get("source", [])
        if isinstance(source, collections.abc.Mapping):
            source = [source]
        source_keys = set()
        for s in source:
            if not sub_graph.get("url"):
                sub_graph["url"] = s.get("url")
            source_keys |= s.keys()
        if "url" not in source_keys:
            missing_keys.append("url")
        if missing_keys:
            logger.warn("Recipe {} doesn't have a {}".format(name, ", ".join(missing_keys)))
        for k in keys:
            if k[1] not in missing_keys:
                sub_graph[k[1]] = yaml_dict[k[0]][k[1]]
        k = sorted(source_keys & hashlib.algorithms_available, reverse=True)
        if k:
            sub_graph["hash_type"] = k[0]
    return lzj
Esempio n. 8
0
def populate_feedstock_attributes(
    name: str,
    sub_graph: typing.MutableMapping,
    meta_yaml: typing.Union[str, Response] = "",
    conda_forge_yaml: typing.Union[str, Response] = "",
    mark_not_archived=False,
    feedstock_dir=None,
) -> typing.MutableMapping:
    """Parse the various configuration information into something usable

    Notes
    -----
    If the return is bad hand the response itself in so that it can be parsed
    for meaning.
    """
    sub_graph.update({
        "feedstock_name": name,
        "bad": False,
        "branch": "master"
    })

    if mark_not_archived:
        sub_graph.update({"archived": False})

    # handle all the raw strings
    if isinstance(meta_yaml, Response):
        sub_graph["bad"] = f"make_graph: {meta_yaml.status_code}"
        return sub_graph

    # strip out old keys - this removes old platforms when one gets disabled
    for key in list(sub_graph.keys()):
        if key.endswith("meta_yaml") or key.endswith(
                "requirements") or key == "req":
            del sub_graph[key]

    sub_graph["raw_meta_yaml"] = meta_yaml

    # Get the conda-forge.yml
    if isinstance(conda_forge_yaml, str):
        sub_graph["conda-forge.yml"] = {
            k: v
            for k, v in yaml.safe_load(conda_forge_yaml).items()
            if k in CONDA_FORGE_YML_KEYS_TO_KEEP
        }

    if feedstock_dir is not None:
        LOGGER.debug(
            "# of ci support files: %s",
            len(glob.glob(os.path.join(feedstock_dir, ".ci_support",
                                       "*.yaml"))),
        )

    try:
        if (feedstock_dir is not None and len(
                glob.glob(os.path.join(feedstock_dir, ".ci_support",
                                       "*.yaml"))) > 0):
            recipe_dir = os.path.join(feedstock_dir, "recipe")
            ci_support_files = glob.glob(
                os.path.join(feedstock_dir, ".ci_support", "*.yaml"), )
            varient_yamls = []
            plat_arch = []
            for cbc_path in ci_support_files:
                cbc_name = os.path.basename(cbc_path)
                cbc_name_parts = cbc_name.replace(".yaml", "").split("_")
                plat = cbc_name_parts[0]
                if len(cbc_name_parts) == 1:
                    arch = "64"
                else:
                    if cbc_name_parts[1] in [
                            "64", "aarch64", "ppc64le", "arm64"
                    ]:
                        arch = cbc_name_parts[1]
                    else:
                        arch = "64"
                plat_arch.append((plat, arch))

                varient_yamls.append(
                    parse_meta_yaml(
                        meta_yaml,
                        platform=plat,
                        arch=arch,
                        recipe_dir=recipe_dir,
                        cbc_path=cbc_path,
                    ), )

                # sometimes the requirements come out to None and this ruins the
                # aggregated meta_yaml
                if "requirements" in varient_yamls[-1]:
                    for section in ["build", "host", "run"]:
                        # We make sure to set a section only if it is actually in
                        # the recipe. Adding a section when it is not there might
                        # confuse migrators trying to move CB2 recipes to CB3.
                        if section in varient_yamls[-1]["requirements"]:
                            val = varient_yamls[-1]["requirements"].get(
                                section, [])
                            varient_yamls[-1]["requirements"][
                                section] = val or []

                # collapse them down
                final_cfgs = {}
                for plat_arch, varyml in zip(plat_arch, varient_yamls):
                    if plat_arch not in final_cfgs:
                        final_cfgs[plat_arch] = []
                    final_cfgs[plat_arch].append(varyml)
                for k in final_cfgs:
                    ymls = final_cfgs[k]
                    final_cfgs[k] = _convert_to_dict(ChainDB(*ymls))
                plat_arch = []
                varient_yamls = []
                for k, v in final_cfgs.items():
                    plat_arch.append(k)
                    varient_yamls.append(v)
        else:
            plat_arch = [("win", "64"), ("osx", "64"), ("linux", "64")]
            for k in set(sub_graph["conda-forge.yml"].get("provider", {})):
                if "_" in k:
                    plat_arch.append(k.split("_"))
            varient_yamls = [
                parse_meta_yaml(meta_yaml, platform=plat, arch=arch)
                for plat, arch in plat_arch
            ]
    except Exception as e:
        import traceback

        trb = traceback.format_exc()
        sub_graph["bad"] = f"make_graph: render error {e}\n{trb}"
        raise

    LOGGER.debug("platforms: %s", plat_arch)

    # this makes certain that we have consistent ordering
    sorted_varient_yamls = [
        x for _, x in sorted(zip(plat_arch, varient_yamls))
    ]
    yaml_dict = ChainDB(*sorted_varient_yamls)
    if not yaml_dict:
        LOGGER.error(f"Something odd happened when parsing recipe {name}")
        sub_graph["bad"] = "make_graph: Could not parse"
        return sub_graph

    sub_graph["meta_yaml"] = _convert_to_dict(yaml_dict)
    meta_yaml = sub_graph["meta_yaml"]

    for k, v in zip(plat_arch, varient_yamls):
        plat_arch_name = "_".join(k)
        sub_graph[f"{plat_arch_name}_meta_yaml"] = v
        _, sub_graph[
            f"{plat_arch_name}_requirements"], _ = _extract_requirements(v)

    (
        sub_graph["total_requirements"],
        sub_graph["requirements"],
        sub_graph["strong_exports"],
    ) = _extract_requirements(meta_yaml)

    # handle multi outputs
    outputs_names = set()
    if "outputs" in yaml_dict:
        outputs_names.update(
            set(list({d.get("name", "")
                      for d in yaml_dict["outputs"]}), ), )
        # handle implicit meta packages
        if "run" in sub_graph.get("meta_yaml", {}).get("requirements", {}):
            outputs_names.add(meta_yaml["package"]["name"])
    # add in single package name
    else:
        outputs_names = {meta_yaml["package"]["name"]}
    sub_graph["outputs_names"] = outputs_names

    # TODO: Write schema for dict
    # TODO: remove this
    req = _get_requirements(yaml_dict)
    sub_graph["req"] = req

    # set name and version
    keys = [("package", "name"), ("package", "version")]
    missing_keys = [k[1] for k in keys if k[1] not in yaml_dict.get(k[0], {})]
    for k in keys:
        if k[1] not in missing_keys:
            sub_graph[k[1]] = yaml_dict[k[0]][k[1]]

    # set the url and hash
    sub_graph.pop("url", None)
    sub_graph.pop("hash_type", None)

    source = yaml_dict.get("source", [])
    if isinstance(source, collections.abc.Mapping):
        source = [source]
    source_keys: Set[str] = set()
    for s in source:
        if not sub_graph.get("url"):
            sub_graph["url"] = s.get("url")
        source_keys |= s.keys()

    kl = list(sorted(source_keys & hashlib.algorithms_available, reverse=True))
    if kl:
        sub_graph["hash_type"] = kl[0]

    return sub_graph