def __setitem__(self, key: str, val: object) -> None: ''' Writes an `ArrayFile`, `EncodedFile`, or `Artifact` to `self.path/key` `np.ndarray`-like objects are written as `ArrayFiles`, `Path`-like objects are written as `EncodedFile`s, and string-keyed mappings are written as subartifacts. Attribute access syntax is also supported, and occurrences of "__" in `key` are transformed into ".", to support accessing encoded files as attributes (i.e. `artifact['name.ext'] = val` is equivalent to `artifact.name__ext = val`). ''' path = self.path / key # Copy an existing file. if isinstance(val, Path): assert path.suffix != '' _copy_file(path, val) # Write a subartifact. elif isinstance(val, (Mapping, Artifact)): assert path.suffix == '' MutableMapping.update(Artifact(path), val) # type: ignore # Write an array. else: assert path.suffix == '' _write_h5(path.with_suffix('.h5'), val)
def populate_feedstock_attributes( name: str, sub_graph: typing.MutableMapping, meta_yaml: typing.Union[str, Response] = "", conda_forge_yaml: typing.Union[str, Response] = "", mark_not_archived=False, feedstock_dir=None, ) -> typing.MutableMapping: """Parse the various configuration information into something usable Notes ----- If the return is bad hand the response itself in so that it can be parsed for meaning. """ sub_graph.update({"feedstock_name": name, "bad": False}) if mark_not_archived: sub_graph.update({"archived": False}) # handle all the raw strings if isinstance(meta_yaml, Response): sub_graph["bad"] = f"make_graph: {meta_yaml.status_code}" return sub_graph sub_graph["raw_meta_yaml"] = meta_yaml # Get the conda-forge.yml if isinstance(conda_forge_yaml, str): sub_graph["conda-forge.yml"] = { k: v for k, v in yaml.safe_load(conda_forge_yaml).items() if k in { "provider", "min_r_ver", "min_py_ver", "max_py_ver", "max_r_ver", "compiler_stack", "bot", } } if (feedstock_dir is not None and len(glob.glob(os.path.join(feedstock_dir, ".ci_support", "*.yaml"))) > 0): recipe_dir = os.path.join(feedstock_dir, "recipe") ci_support_files = glob.glob( os.path.join(feedstock_dir, ".ci_support", "*.yaml"), ) varient_yamls = [] plat_arch = [] for cbc_path in ci_support_files: cbc_name = os.path.basename(cbc_path) cbc_name_parts = cbc_name.replace(".yaml", "").split("_") plat = cbc_name_parts[0] if len(cbc_name_parts) == 1: arch = "64" else: if cbc_name_parts[1] in ["64", "aarch64", "ppc64le", "arm64"]: arch = cbc_name_parts[1] else: arch = "64" plat_arch.append((plat, arch)) varient_yamls.append( parse_meta_yaml( meta_yaml, platform=plat, arch=arch, recipe_dir=recipe_dir, cbc_path=cbc_path, ), ) # collapse them down final_cfgs = {} for plat_arch, varyml in zip(plat_arch, varient_yamls): if plat_arch not in final_cfgs: final_cfgs[plat_arch] = [] final_cfgs[plat_arch].append(varyml) for k in final_cfgs: ymls = final_cfgs[k] final_cfgs[k] = _convert_to_dict(ChainDB(*ymls)) plat_arch = [] varient_yamls = [] for k, v in final_cfgs.items(): plat_arch.append(k) varient_yamls.append(v) else: plat_arch = [("win", "64"), ("osx", "64"), ("linux", "64")] for k in set(sub_graph["conda-forge.yml"].get("provider", {})): if "_" in k: plat_arch.append(k.split("_")) varient_yamls = [ parse_meta_yaml(meta_yaml, platform=plat, arch=arch) for plat, arch in plat_arch ] # this makes certain that we have consistent ordering sorted_varient_yamls = [ x for _, x in sorted(zip(plat_arch, varient_yamls)) ] yaml_dict = ChainDB(*sorted_varient_yamls) if not yaml_dict: logger.error(f"Something odd happened when parsing recipe {name}") sub_graph["bad"] = "make_graph: Could not parse" return sub_graph sub_graph["meta_yaml"] = _convert_to_dict(yaml_dict) meta_yaml = sub_graph["meta_yaml"] for k, v in zip(plat_arch, varient_yamls): plat_arch_name = "_".join(k) sub_graph[f"{plat_arch_name}_meta_yaml"] = v _, sub_graph[ f"{plat_arch_name}_requirements"], _ = extract_requirements(v) ( sub_graph["total_requirements"], sub_graph["requirements"], sub_graph["strong_exports"], ) = extract_requirements(meta_yaml) # handle multi outputs if "outputs" in yaml_dict: sub_graph["outputs_names"] = sorted( list({d.get("name", "") for d in yaml_dict["outputs"]}), ) # TODO: Write schema for dict # TODO: remove this req = get_requirements(yaml_dict) sub_graph["req"] = req keys = [("package", "name"), ("package", "version")] missing_keys = [k[1] for k in keys if k[1] not in yaml_dict.get(k[0], {})] source = yaml_dict.get("source", []) if isinstance(source, collections.abc.Mapping): source = [source] source_keys: Set[str] = set() for s in source: if not sub_graph.get("url"): sub_graph["url"] = s.get("url") source_keys |= s.keys() for k in keys: if k[1] not in missing_keys: sub_graph[k[1]] = yaml_dict[k[0]][k[1]] kl = list(sorted(source_keys & hashlib.algorithms_available, reverse=True)) if kl: sub_graph["hash_type"] = kl[0] return sub_graph
def populate_feedstock_attributes( name: str, sub_graph: typing.MutableMapping, meta_yaml: typing.Union[str, Response] = "", conda_forge_yaml: typing.Union[str, Response] = "", mark_not_archived=False, feedstock_dir=None, ) -> typing.MutableMapping: """Parse the various configuration information into something usable Notes ----- If the return is bad hand the response itself in so that it can be parsed for meaning. """ sub_graph.update({ "feedstock_name": name, "bad": False, "branch": "master" }) if mark_not_archived: sub_graph.update({"archived": False}) # handle all the raw strings if isinstance(meta_yaml, Response): sub_graph["bad"] = f"make_graph: {meta_yaml.status_code}" return sub_graph # strip out old keys - this removes old platforms when one gets disabled for key in list(sub_graph.keys()): if key.endswith("meta_yaml") or key.endswith( "requirements") or key == "req": del sub_graph[key] sub_graph["raw_meta_yaml"] = meta_yaml # Get the conda-forge.yml if isinstance(conda_forge_yaml, str): sub_graph["conda-forge.yml"] = { k: v for k, v in yaml.safe_load(conda_forge_yaml).items() if k in CONDA_FORGE_YML_KEYS_TO_KEEP } if feedstock_dir is not None: LOGGER.debug( "# of ci support files: %s", len(glob.glob(os.path.join(feedstock_dir, ".ci_support", "*.yaml"))), ) try: if (feedstock_dir is not None and len( glob.glob(os.path.join(feedstock_dir, ".ci_support", "*.yaml"))) > 0): recipe_dir = os.path.join(feedstock_dir, "recipe") ci_support_files = glob.glob( os.path.join(feedstock_dir, ".ci_support", "*.yaml"), ) varient_yamls = [] plat_arch = [] for cbc_path in ci_support_files: cbc_name = os.path.basename(cbc_path) cbc_name_parts = cbc_name.replace(".yaml", "").split("_") plat = cbc_name_parts[0] if len(cbc_name_parts) == 1: arch = "64" else: if cbc_name_parts[1] in [ "64", "aarch64", "ppc64le", "arm64" ]: arch = cbc_name_parts[1] else: arch = "64" plat_arch.append((plat, arch)) varient_yamls.append( parse_meta_yaml( meta_yaml, platform=plat, arch=arch, recipe_dir=recipe_dir, cbc_path=cbc_path, ), ) # sometimes the requirements come out to None and this ruins the # aggregated meta_yaml if "requirements" in varient_yamls[-1]: for section in ["build", "host", "run"]: # We make sure to set a section only if it is actually in # the recipe. Adding a section when it is not there might # confuse migrators trying to move CB2 recipes to CB3. if section in varient_yamls[-1]["requirements"]: val = varient_yamls[-1]["requirements"].get( section, []) varient_yamls[-1]["requirements"][ section] = val or [] # collapse them down final_cfgs = {} for plat_arch, varyml in zip(plat_arch, varient_yamls): if plat_arch not in final_cfgs: final_cfgs[plat_arch] = [] final_cfgs[plat_arch].append(varyml) for k in final_cfgs: ymls = final_cfgs[k] final_cfgs[k] = _convert_to_dict(ChainDB(*ymls)) plat_arch = [] varient_yamls = [] for k, v in final_cfgs.items(): plat_arch.append(k) varient_yamls.append(v) else: plat_arch = [("win", "64"), ("osx", "64"), ("linux", "64")] for k in set(sub_graph["conda-forge.yml"].get("provider", {})): if "_" in k: plat_arch.append(k.split("_")) varient_yamls = [ parse_meta_yaml(meta_yaml, platform=plat, arch=arch) for plat, arch in plat_arch ] except Exception as e: import traceback trb = traceback.format_exc() sub_graph["bad"] = f"make_graph: render error {e}\n{trb}" raise LOGGER.debug("platforms: %s", plat_arch) # this makes certain that we have consistent ordering sorted_varient_yamls = [ x for _, x in sorted(zip(plat_arch, varient_yamls)) ] yaml_dict = ChainDB(*sorted_varient_yamls) if not yaml_dict: LOGGER.error(f"Something odd happened when parsing recipe {name}") sub_graph["bad"] = "make_graph: Could not parse" return sub_graph sub_graph["meta_yaml"] = _convert_to_dict(yaml_dict) meta_yaml = sub_graph["meta_yaml"] for k, v in zip(plat_arch, varient_yamls): plat_arch_name = "_".join(k) sub_graph[f"{plat_arch_name}_meta_yaml"] = v _, sub_graph[ f"{plat_arch_name}_requirements"], _ = _extract_requirements(v) ( sub_graph["total_requirements"], sub_graph["requirements"], sub_graph["strong_exports"], ) = _extract_requirements(meta_yaml) # handle multi outputs outputs_names = set() if "outputs" in yaml_dict: outputs_names.update( set(list({d.get("name", "") for d in yaml_dict["outputs"]}), ), ) # handle implicit meta packages if "run" in sub_graph.get("meta_yaml", {}).get("requirements", {}): outputs_names.add(meta_yaml["package"]["name"]) # add in single package name else: outputs_names = {meta_yaml["package"]["name"]} sub_graph["outputs_names"] = outputs_names # TODO: Write schema for dict # TODO: remove this req = _get_requirements(yaml_dict) sub_graph["req"] = req # set name and version keys = [("package", "name"), ("package", "version")] missing_keys = [k[1] for k in keys if k[1] not in yaml_dict.get(k[0], {})] for k in keys: if k[1] not in missing_keys: sub_graph[k[1]] = yaml_dict[k[0]][k[1]] # set the url and hash sub_graph.pop("url", None) sub_graph.pop("hash_type", None) source = yaml_dict.get("source", []) if isinstance(source, collections.abc.Mapping): source = [source] source_keys: Set[str] = set() for s in source: if not sub_graph.get("url"): sub_graph["url"] = s.get("url") source_keys |= s.keys() kl = list(sorted(source_keys & hashlib.algorithms_available, reverse=True)) if kl: sub_graph["hash_type"] = kl[0] return sub_graph
def assign(_dict: MM, *_dicts: Iterable[Mapping]) -> MM: for __dict in _dicts: _dict.update(__dict) return _dict