def fix_app_spec_locations(self, app_spec_config: t.MutableMapping, config_path: pathlib.Path): """Fix any relative path in the AppSpec configuration. Relative paths are relative to the location of the configuration file. :param app_spec_config: The configuration of the application. :param config_path: The location of the configuration file. :return: """ # Aliases for proc_input and proc_output. # TODO: Deprecate these fields. if 'input' in app_spec_config: app_spec_config['proc_input'] = app_spec_config.pop('input') if 'output' in app_spec_config: app_spec_config['proc_output'] = app_spec_config.pop('output') proc_input = app_spec_config['proc_input'] handler_type = proc_input['type'] if handler_type in self.io_file_handler_types: # If input_location is absolute, base_path is discarded # automatically. input_location = proc_input['location'] proc_input['location'] = str(config_path / input_location) proc_output = app_spec_config['proc_output'] handler_type = proc_output['type'] if handler_type in self.io_file_handler_types: output_location = proc_output['location'] proc_output['location'] = str(config_path / output_location)
def override(target_dict: typing.MutableMapping, override_dict: typing.Mapping): """Apply the updates in override_dict to the dict target_dict. This is like dict.update, but recursive. i.e. if the existing element is a dict, then override elements of the sub-dict rather than wholesale replacing. One special case is added. If a key within override dict starts with '!' then it is interpretted as follows: - if the associated value is "REMOVE", the key is removed from the parent dict - use !! for keys that actually start with ! and shouldn't be removed. e.g. override( { 'outer': { 'inner': { 'key': 'oldValue', 'existingKey': True } } }, { 'outer': { 'inner': { 'key': 'newValue' } }, 'newKey': { 'newDict': True }, } ) yields: { 'outer': { 'inner': { 'key': 'newValue', 'existingKey': True } }, 'newKey': { newDict: True } } """ for key, value in override_dict.items(): # # Handle special ! syntax: # "!keyname" : "REMOVE", --> remove the key 'keyname' from target_dict # if key[0:1] == "!" and key[1:2] != "!": key = key[1:] if value == "REMOVE": target_dict.pop(key, None) continue current_value = target_dict.get(key) if not isinstance(current_value, Mapping): # Thing or Mapping overrides Thing or None target_dict[key] = value elif isinstance(value, Mapping): # Mapping overrides mapping, recurse target_dict[key] = override(current_value, value) else: # Thing overrides Mapping target_dict[key] = value return target_dict
def _func(self, args: t.MutableMapping): # map args back onto the signature. pargs = [] # type: t.List[t.Any] for param in self.positionals: if param.kind == param.VAR_POSITIONAL: pargs.extend(args.pop(param.name)) elif param.kind == param.POSITIONAL_OR_KEYWORD: pargs.append(args.pop(param.name)) for key, value in args.items(): if key.startswith('no_'): args[key[3:]] = args.pop(key) continue return (self.function or (lambda: None))(*pargs, **args)
def prepare_meta(attrs: typing.MutableMapping): # pylint: disable=unsupported-membership-test,unsubscriptable-object if 'Meta' not in attrs: raise RuntimeError('no Meta') if not hasattr(attrs['Meta'], 'table'): raise RuntimeError('no table in Meta') if not isinstance(attrs['Meta'].table, Table): raise RuntimeError('Meta.table is not Table') frozen_meta = attrs.pop('Meta') frozen_meta.__setattr__ = _frozen_setattrs frozen_meta.__delattr__ = _frozen_setattrs return frozen_meta
def populate_feedstock_attributes( name: str, sub_graph: typing.MutableMapping, meta_yaml: typing.Union[str, Response] = "", conda_forge_yaml: typing.Union[str, Response] = "", mark_not_archived=False, feedstock_dir=None, ) -> typing.MutableMapping: """Parse the various configuration information into something usable Notes ----- If the return is bad hand the response itself in so that it can be parsed for meaning. """ sub_graph.update({ "feedstock_name": name, "bad": False, "branch": "master" }) if mark_not_archived: sub_graph.update({"archived": False}) # handle all the raw strings if isinstance(meta_yaml, Response): sub_graph["bad"] = f"make_graph: {meta_yaml.status_code}" return sub_graph # strip out old keys - this removes old platforms when one gets disabled for key in list(sub_graph.keys()): if key.endswith("meta_yaml") or key.endswith( "requirements") or key == "req": del sub_graph[key] sub_graph["raw_meta_yaml"] = meta_yaml # Get the conda-forge.yml if isinstance(conda_forge_yaml, str): sub_graph["conda-forge.yml"] = { k: v for k, v in yaml.safe_load(conda_forge_yaml).items() if k in CONDA_FORGE_YML_KEYS_TO_KEEP } if feedstock_dir is not None: LOGGER.debug( "# of ci support files: %s", len(glob.glob(os.path.join(feedstock_dir, ".ci_support", "*.yaml"))), ) try: if (feedstock_dir is not None and len( glob.glob(os.path.join(feedstock_dir, ".ci_support", "*.yaml"))) > 0): recipe_dir = os.path.join(feedstock_dir, "recipe") ci_support_files = glob.glob( os.path.join(feedstock_dir, ".ci_support", "*.yaml"), ) varient_yamls = [] plat_arch = [] for cbc_path in ci_support_files: cbc_name = os.path.basename(cbc_path) cbc_name_parts = cbc_name.replace(".yaml", "").split("_") plat = cbc_name_parts[0] if len(cbc_name_parts) == 1: arch = "64" else: if cbc_name_parts[1] in [ "64", "aarch64", "ppc64le", "arm64" ]: arch = cbc_name_parts[1] else: arch = "64" plat_arch.append((plat, arch)) varient_yamls.append( parse_meta_yaml( meta_yaml, platform=plat, arch=arch, recipe_dir=recipe_dir, cbc_path=cbc_path, ), ) # sometimes the requirements come out to None and this ruins the # aggregated meta_yaml if "requirements" in varient_yamls[-1]: for section in ["build", "host", "run"]: # We make sure to set a section only if it is actually in # the recipe. Adding a section when it is not there might # confuse migrators trying to move CB2 recipes to CB3. if section in varient_yamls[-1]["requirements"]: val = varient_yamls[-1]["requirements"].get( section, []) varient_yamls[-1]["requirements"][ section] = val or [] # collapse them down final_cfgs = {} for plat_arch, varyml in zip(plat_arch, varient_yamls): if plat_arch not in final_cfgs: final_cfgs[plat_arch] = [] final_cfgs[plat_arch].append(varyml) for k in final_cfgs: ymls = final_cfgs[k] final_cfgs[k] = _convert_to_dict(ChainDB(*ymls)) plat_arch = [] varient_yamls = [] for k, v in final_cfgs.items(): plat_arch.append(k) varient_yamls.append(v) else: plat_arch = [("win", "64"), ("osx", "64"), ("linux", "64")] for k in set(sub_graph["conda-forge.yml"].get("provider", {})): if "_" in k: plat_arch.append(k.split("_")) varient_yamls = [ parse_meta_yaml(meta_yaml, platform=plat, arch=arch) for plat, arch in plat_arch ] except Exception as e: import traceback trb = traceback.format_exc() sub_graph["bad"] = f"make_graph: render error {e}\n{trb}" raise LOGGER.debug("platforms: %s", plat_arch) # this makes certain that we have consistent ordering sorted_varient_yamls = [ x for _, x in sorted(zip(plat_arch, varient_yamls)) ] yaml_dict = ChainDB(*sorted_varient_yamls) if not yaml_dict: LOGGER.error(f"Something odd happened when parsing recipe {name}") sub_graph["bad"] = "make_graph: Could not parse" return sub_graph sub_graph["meta_yaml"] = _convert_to_dict(yaml_dict) meta_yaml = sub_graph["meta_yaml"] for k, v in zip(plat_arch, varient_yamls): plat_arch_name = "_".join(k) sub_graph[f"{plat_arch_name}_meta_yaml"] = v _, sub_graph[ f"{plat_arch_name}_requirements"], _ = _extract_requirements(v) ( sub_graph["total_requirements"], sub_graph["requirements"], sub_graph["strong_exports"], ) = _extract_requirements(meta_yaml) # handle multi outputs outputs_names = set() if "outputs" in yaml_dict: outputs_names.update( set(list({d.get("name", "") for d in yaml_dict["outputs"]}), ), ) # handle implicit meta packages if "run" in sub_graph.get("meta_yaml", {}).get("requirements", {}): outputs_names.add(meta_yaml["package"]["name"]) # add in single package name else: outputs_names = {meta_yaml["package"]["name"]} sub_graph["outputs_names"] = outputs_names # TODO: Write schema for dict # TODO: remove this req = _get_requirements(yaml_dict) sub_graph["req"] = req # set name and version keys = [("package", "name"), ("package", "version")] missing_keys = [k[1] for k in keys if k[1] not in yaml_dict.get(k[0], {})] for k in keys: if k[1] not in missing_keys: sub_graph[k[1]] = yaml_dict[k[0]][k[1]] # set the url and hash sub_graph.pop("url", None) sub_graph.pop("hash_type", None) source = yaml_dict.get("source", []) if isinstance(source, collections.abc.Mapping): source = [source] source_keys: Set[str] = set() for s in source: if not sub_graph.get("url"): sub_graph["url"] = s.get("url") source_keys |= s.keys() kl = list(sorted(source_keys & hashlib.algorithms_available, reverse=True)) if kl: sub_graph["hash_type"] = kl[0] return sub_graph
def drop(_dict: MM, keys: Iterable) -> MM: for key in keys: _dict.pop(key, None) return _dict
def pick(_dict: MM, keys: Iterable, pop=True, default=None) -> Generator[Any, None, None]: for key in keys: yield _dict.pop(key, default) if pop else _dict.get(key, default)