def __init__(self, **config: Any): self._cache_dir = os.path.join( BRING_PKG_CACHE, "resolvers", from_camel_case(self.__class__.__name__)) ensure_folder(self._cache_dir, mode=0o700) self._config: Mapping[str, Any] = get_seeded_dict(PKG_RESOLVER_DEFAULTS, config)
async def _get_required_args(self) -> RecordArg: if self._required_args is None: base_vars: VarSet = await self.get_base_vars() frecklet_args = await self.get_required_args( **base_vars.create_values_dict()) if frecklet_args is None: frecklet_args = {} # TODO: maybe check for duplicate keys? base_args = await self._get_base_args() merged_args = get_seeded_dict(base_args.childs, frecklet_args, merge_strategy="update") self._required_args = self.tingistry.arg_hive.create_record_arg( merged_args) return self._required_args
async def get_pkg_metadata( self, source_details: Union[str, Mapping[str, Any]], bring_index: "BringIndexTing", override_config: Optional[Mapping[str, Any]] = None, ) -> Mapping[str, Any]: """Return metadata of a bring package, specified via the provided source details and current index. Returns a dictionary with the following keys: *versions*: a list of dictionaries with the keys being package specific sets of variables that are combined to denominate one version item, as well as a '_meta' key containing arbitrary metadata *aliases*: TO BE DONE *metadata_check*: timestamp string (incl. timezone) describing the date of the metadata check *args*: a mapping describing the available args that are required/optional to point to a specific version of a pkg @param pkg_name: """ if isinstance(source_details, str): _source_details: Mapping[str, Any] = {"url": source_details} else: _source_details = source_details config = get_seeded_dict(self.get_resolver_config(), override_config) metadata = await self._get_cached_metadata( source_details=_source_details, bring_index=bring_index, config=config) if metadata: return metadata metadata = await self._get_pkg_metadata( source_details=_source_details, bring_index=bring_index, config=config, cached_only=False, ) PkgType.metadata_cache.setdefault(self.__class__, {})[id] = { "metadata": metadata, "source": source_details, "index": bring_index.full_name, } return metadata
async def merge_with_defaults(self, **vars: Any) -> MutableMapping[str, Any]: vals: Mapping[str, Any] = await self.get_values( # type: ignore "metadata", "args", resolve=True) # _pkg_metadata: Mapping[str, Any] = vals["metadata"] args: RecordArg = vals["args"] pkg_defaults = args.default _vars = get_seeded_dict(pkg_defaults, vars, merge_strategy="update") filtered: Dict[str, Any] = {} for k, v in _vars.items(): if k in args.arg_names: filtered[k] = v return filtered
async def calculate_config( self, _config_list: Iterable[Union[str, Mapping[str, Any]]] ) -> MutableMapping[str, Any]: # TODO: this could be made more efficient by only loading the config dicts that are required config_dicts: Mapping[str, Mapping[ str, Any]] = await self._config_contexts.get_config_dicts() result: List[Mapping[str, Any]] = [] config_list: List[Union[str, Mapping[str, Any]]] = ["__init_dict__", "default"] config_list.extend(_config_list) for c in config_list: temp_dict: Optional[Mapping[str, Any]] = None if isinstance(c, str): if c == "__init_dict__": temp_dict = BRING_CORE_CONFIG elif "=" in c: k, v = c.split("=", maxsplit=1) temp_dict = {k: v} elif c in config_dicts.keys(): temp_dict = config_dicts[c] elif isinstance(c, collections.Mapping): temp_dict = c if temp_dict is None: raise FrklException( msg=f"Can't parse config item: {c}.", reason= "Invalid type or config option, must be either name of a config profile, a key/value pair (separated with '=', or a dict-like object.", ) result.append(temp_dict) result_dict = get_seeded_dict(*result, merge_strategy="merge") return result_dict
def calculate_defaults(typistry: Typistry, data: Mapping[str, Any]): pm = typistry.get_plugin_manager(DefaultsProducer) producers: List[Tuple] = [] values: Dict[str, Any] = {} for k, v in data.items(): if k.startswith("_"): pn = f"{k[1:]}" if pn in pm.plugin_names: producers.append((pn, v)) else: values[k] = v result = [] for item in producers: plugin: DefaultsProducer = pm.get_plugin(item[0]) val = item[1] if val is False or (isinstance(val, str) and val.lower() == "false"): continue if not isinstance(val, (bool, Mapping)): raise FrklException( msg=f"Can't calculate '{item[0]}' defaults for: {val}", reason="Value must be a boolean, or a dictionary.", ) if isinstance(val, Mapping): result.append(plugin.get_values(**val)) else: result.append(plugin.get_values()) result.append(values) r = get_seeded_dict(*result, merge_strategy="merge") return r
async def metadata_is_valid( self, source_details: Union[str, Mapping[str, Any]], bring_index: "BringIndexTing", override_config: Optional[Mapping[str, Any]] = None, ) -> bool: if isinstance(source_details, str): _source_details: Mapping[str, Any] = {"url": source_details} else: _source_details = source_details config = get_seeded_dict(self.get_resolver_config(), override_config) metadata = await self._get_cached_metadata( source_details=_source_details, bring_index=bring_index, config=config) if metadata: return True else: return False
async def process_vars( self, source_args: Mapping[str, Any], pkg_args: Mapping[str, Any], mogrifiers: Union[Iterable, Mapping], source_vars: Mapping[str, Any], versions: List[Mapping[str, Any]], aliases: Mapping[str, Mapping[str, str]], ) -> Mapping[str, Any]: """Return the (remaining) args a user can specify to select a version or mogrify options. Source args can contain more arguments than will eventually be used/displayed to the user. Args: - *source_args*: dictionary of args to describe the type/schema of an argument - *pkg_args*: a dictionary of automatically created args by a specific resolver. Those will be used as base, but will be overwritten by anything in 'source_args' - *mogrifiers*: the 'mogrify' section of the pkg 'source' - *source_vars*: vars that are hardcoded in the 'source' section of a package, can also contain templates - *versions*: all avaailable versions of a package - *aliases*: a dictionary of value aliases that can be used by the user instead of the 'real' ones. Aliases are per arg name. Returns: a dictionary with 3 keys: args, version_vars, mogrify_vars """ # calculate args to select version version_vars: MutableMapping[str, Mapping] = {} for version in versions: for k in version.keys(): if k == "_meta" or k == "_mogrify": continue elif k in version_vars.keys(): val = version[k] if val not in version_vars[k]["allowed"]: version_vars[k]["allowed"].append(val) continue version_vars[k] = { # "default": version[k], "allowed": [version[k]], "type": "string", } # add aliases to 'allowed' values in version select args for var_name, alias_details in aliases.items(): for alias, value in alias_details.items(): if var_name in version_vars.keys(): if value not in version_vars[var_name]["allowed"]: log.debug( f"Alias '{alias}' does not have a corresponding value registered ('{value}'). Ignoring it..." ) continue if alias in version_vars[var_name]["allowed"]: log.debug( f"Alias '{alias}' (for value '{value}') already in possible values for key '{var_name}'. It'll be ignored if specified by the user." ) else: version_vars[var_name]["allowed"].append(alias) mogrify_vars: Mapping[str, Mapping] duplicates = {} if mogrifiers: template_schema = get_template_schema(mogrifiers) mogrify_vars = template_schema_to_args(template_schema) for k in mogrify_vars.keys(): if k in version_vars.keys(): duplicates[k] = (mogrify_vars[k], version_vars[k]) else: mogrify_vars = {} computed_vars = get_seeded_dict(mogrify_vars, version_vars, merge_strategy="update") if source_vars is None: source_vars = {} required_keys = computed_vars.keys() # now try to find keys that are not included in the first/latest version (most of the time there won't be any) args = get_seeded_dict(pkg_args, computed_vars, source_args, merge_strategy="merge") final_args = {} for k, v in args.items(): if k in required_keys and k not in source_vars.keys(): final_args[k] = v return { "args": final_args, "version_vars": version_vars, "mogrify_vars": mogrify_vars, }
async def _get_pkg_metadata( self, source_details: Mapping[str, Any], bring_index: "BringIndexTing", config: Mapping[str, Any], cached_only=False, ) -> Optional[Mapping[str, Mapping]]: """Utility method that handles (external/non-in-memory) caching of metadata, as well as calculating the 'args' return parameter.""" id = self.get_unique_source_id(source_details, bring_index=bring_index) if not id: raise Exception("Unique source id can't be empty") id = generate_valid_filename(id, sep="_") metadata_file = os.path.join(self._cache_dir, f"{id}.json") all_metadata = await self.get_metadata_from_cache_file(metadata_file) if self.check_pkg_metadata_valid(all_metadata, source_details, bring_index=bring_index, config=config): return all_metadata["metadata"] if cached_only: return None metadata = all_metadata.get("metadata", {}) try: result: Mapping[str, Any] = await self._process_pkg_versions( source_details=source_details, bring_index=bring_index) versions: List[MutableMapping[str, Any]] = result["versions"] aliases: MutableMapping[str, str] = result.get("aliases", None) pkg_args: Mapping[str, Mapping] = result.get("args", None) default_args = copy.deepcopy(DEFAULT_ARGS_DICT) pkg_args = get_seeded_dict(default_args, pkg_args, merge_strategy="update") except (Exception) as e: log.debug(f"Can't retrieve versions for pkg: {e}") log.debug( f"Error retrieving versions in resolver '{self.__class__.__name__}': {e}", exc_info=True, ) raise e metadata["versions"] = versions if aliases is None: aliases = {} if "aliases" in source_details.keys(): pkg_aliases = dict_merge(aliases, source_details["aliases"], copy_dct=False) else: pkg_aliases = aliases metadata["aliases"] = pkg_aliases version_aliases = pkg_aliases.setdefault("version", {}) for version in versions: if "version" in version.keys() and "latest" not in version_aliases: version_aliases["latest"] = version["version"] sam = source_details.get("artefact", None) if sam: if isinstance(sam, str): sam = {"type": sam} if isinstance(sam, Mapping): version["_mogrify"].append(sam) else: version["_mogrify"].extend(sam) continue if not hasattr(self, "get_artefact_mogrify"): continue vam = self.get_artefact_mogrify(source_details, version) if vam: if isinstance(vam, Mapping): version["_mogrify"].append(vam) else: version["_mogrify"].extend(vam) mogrifiers = source_details.get("mogrify", None) if mogrifiers: for version in versions: if isinstance(mogrifiers, Mapping): version["_mogrify"].append(mogrifiers) else: version["_mogrify"].extend(mogrifiers) for version in versions: pkg_type_mogrifier = self.get_pkg_content_mogrify( source_details, version) if pkg_type_mogrifier: if isinstance(pkg_type_mogrifier, Mapping): version["_mogrify"].append(pkg_type_mogrifier) else: version["_mogrify"] for version in versions: mog = version["_mogrify"] # print(version) var_names = find_var_names_in_obj(mog) if var_names: vars = {} for k, v in version.items(): if k.startswith("_"): continue vars[k] = v new_mog = replace_var_names_in_obj(mog, vars) # print(vars) # print(new_mog) version["_mogrify"] = new_mog pkg_vars = await self.process_vars( source_args=source_details.get("args", None), pkg_args=pkg_args, mogrifiers=mogrifiers, source_vars=source_details.get("vars", None), versions=versions, # type: ignore aliases=pkg_aliases, ) metadata["pkg_vars"] = pkg_vars metadata["metadata_check"] = str(arrow.Arrow.now()) await self.write_metadata(metadata_file, metadata, source_details, bring_index) return metadata
async def retrieve(self, *value_names: str, **requirements) -> Mapping[str, Any]: if not self._index: raise FrklException( msg=f"Can't retrieve values for PkgTing '{self.full_name}'.", reason="Index not set yet.", ) result: Dict[str, Any] = {} source = requirements["source"] resolver = self._get_resolver(source_dict=source) seed_data = await resolver.get_seed_data(source, bring_index=self.bring_index) if seed_data is None: seed_data = {} if "index_name" in value_names: result["index_name"] = self.bring_index.name if "source" in value_names: result["source"] = source metadata = None if ("metadata" in value_names or "args" in value_names or "aliases" in value_names or "metadata_valid" in value_names): metadata = await self._get_metadata(source) result["metadata"] = metadata if "args" in value_names: result["args"] = await self._calculate_args(metadata=metadata) if "aliases" in value_names: result["aliases"] = await self._get_aliases(metadata) if "info" in value_names: info = requirements.get("info", {}) result["info"] = get_seeded_dict(seed_data.get("info", None), info, merge_strategy="merge") if "labels" in value_names: labels = requirements.get("labels", {}) result["labels"] = get_seeded_dict(seed_data.get("labels", None), labels, merge_strategy="update") # if "tags" in value_names: # tags = requirements.get("tags", []) # result["tags"] = tags if "tags" in value_names: result["tags"] = requirements.get("tags", []) parent_tags: Iterable[str] = seed_data.get("tags", None) if parent_tags: result["tags"].extend(parent_tags) return result