def _parse_blk_settings(self, dic): """parse the "config" block""" block = self._get_entry(dic, self.key_settings).copy() # set defaults settings = Settings(None).serialize().get(self.key_settings) settings.update(block) # resolve minimum version if self.key_settings_minversion in settings: minversion = settings[self.key_settings_minversion] self._check_minversion(minversion) # normalize paths p = self._norm_path(settings[self.key_settings_dotpath]) settings[self.key_settings_dotpath] = p p = self._norm_path(settings[self.key_settings_workdir]) settings[self.key_settings_workdir] = p p = [self._norm_path(p) for p in settings[Settings.key_filter_file]] settings[Settings.key_filter_file] = p p = [self._norm_path(p) for p in settings[Settings.key_func_file]] settings[Settings.key_func_file] = p if self._debug: self._debug_dict('settings block:', settings) return settings
class CfgYaml: # global entries key_settings = 'config' key_dotfiles = 'dotfiles' key_profiles = 'profiles' key_actions = 'actions' old_key_trans_r = 'trans' key_trans_r = 'trans_read' key_trans_w = 'trans_write' key_variables = 'variables' key_dvariables = 'dynvariables' action_pre = 'pre' action_post = 'post' # profiles/dotfiles entries key_dotfile_src = 'src' key_dotfile_dst = 'dst' key_dotfile_link = 'link' key_dotfile_actions = 'actions' key_dotfile_link_children = 'link_children' key_dotfile_noempty = 'ignoreempty' # profile key_profile_dotfiles = 'dotfiles' key_profile_include = 'include' key_profile_variables = 'variables' key_profile_dvariables = 'dynvariables' key_profile_actions = 'actions' key_all = 'ALL' # import entries key_import_actions = 'import_actions' key_import_configs = 'import_configs' key_import_variables = 'import_variables' key_import_profile_dfs = 'import' # settings key_settings_dotpath = 'dotpath' key_settings_workdir = 'workdir' key_settings_link_dotfile_default = 'link_dotfile_default' key_settings_noempty = 'ignoreempty' key_settings_minversion = 'minversion' key_imp_link = 'link_on_import' # link values lnk_nolink = LinkTypes.NOLINK.name.lower() lnk_link = LinkTypes.LINK.name.lower() lnk_children = LinkTypes.LINK_CHILDREN.name.lower() def __init__(self, path, profile=None, debug=False): """ config parser @path: config file path @profile: the selected profile @debug: debug flag """ self.path = os.path.abspath(path) self.profile = profile self.debug = debug self.log = Logger() # config needs to be written self.dirty = False # indicates the config has been updated self.dirty_deprecated = False if not os.path.exists(path): err = 'invalid config path: \"{}\"'.format(path) if self.debug: self.log.dbg(err) raise YamlException(err) self.yaml_dict = self._load_yaml(self.path) # live patch deprecated entries self._fix_deprecated(self.yaml_dict) # parse to self variables self._parse_main_yaml(self.yaml_dict) if self.debug: self.log.dbg('before normalization: {}'.format(self.yaml_dict)) # resolve variables self.variables, self.prokeys = self._merge_variables() # apply variables self._apply_variables() # process imported variables (import_variables) self._import_variables() # process imported actions (import_actions) self._import_actions() # process imported profile dotfiles (import) self._import_profiles_dotfiles() # process imported configs (import_configs) self._import_configs() # process profile include self._resolve_profile_includes() # process profile ALL self._resolve_profile_all() # patch dotfiles paths self._resolve_dotfile_paths() if self.debug: self.log.dbg('after normalization: {}'.format(self.yaml_dict)) def get_variables(self): """retrieve all variables""" return self.variables ######################################################## # parsing ######################################################## def _parse_main_yaml(self, dic): """parse the different blocks""" self.ori_settings = self._get_entry(dic, self.key_settings) self.settings = Settings(None).serialize().get(self.key_settings) self.settings.update(self.ori_settings) # resolve minimum version if self.key_settings_minversion in self.settings: minversion = self.settings[self.key_settings_minversion] self._check_minversion(minversion) # resolve settings paths p = self._norm_path(self.settings[self.key_settings_dotpath]) self.settings[self.key_settings_dotpath] = p p = self._norm_path(self.settings[self.key_settings_workdir]) self.settings[self.key_settings_workdir] = p if self.debug: self.log.dbg('settings: {}'.format(self.settings)) # dotfiles self.ori_dotfiles = self._get_entry(dic, self.key_dotfiles) self.dotfiles = deepcopy(self.ori_dotfiles) keys = self.dotfiles.keys() if len(keys) != len(list(set(keys))): dups = [x for x in keys if x not in list(set(keys))] err = 'duplicate dotfile keys found: {}'.format(dups) raise YamlException(err) self.dotfiles = self._norm_dotfiles(self.dotfiles) if self.debug: self.log.dbg('dotfiles: {}'.format(self.dotfiles)) # profiles self.ori_profiles = self._get_entry(dic, self.key_profiles) self.profiles = deepcopy(self.ori_profiles) self.profiles = self._norm_profiles(self.profiles) if self.debug: self.log.dbg('profiles: {}'.format(self.profiles)) # actions self.ori_actions = self._get_entry(dic, self.key_actions, mandatory=False) self.actions = deepcopy(self.ori_actions) self.actions = self._norm_actions(self.actions) if self.debug: self.log.dbg('actions: {}'.format(self.actions)) # trans_r key = self.key_trans_r if self.old_key_trans_r in dic: self.log.warn('\"trans\" is deprecated, please use \"trans_read\"') dic[self.key_trans_r] = dic[self.old_key_trans_r] del dic[self.old_key_trans_r] self.ori_trans_r = self._get_entry(dic, key, mandatory=False) self.trans_r = deepcopy(self.ori_trans_r) if self.debug: self.log.dbg('trans_r: {}'.format(self.trans_r)) # trans_w self.ori_trans_w = self._get_entry(dic, self.key_trans_w, mandatory=False) self.trans_w = deepcopy(self.ori_trans_w) if self.debug: self.log.dbg('trans_w: {}'.format(self.trans_w)) # variables self.ori_variables = self._get_entry(dic, self.key_variables, mandatory=False) if self.debug: self.log.dbg('variables: {}'.format(self.ori_variables)) # dynvariables self.ori_dvariables = self._get_entry(dic, self.key_dvariables, mandatory=False) if self.debug: self.log.dbg('dynvariables: {}'.format(self.ori_dvariables)) def _resolve_dotfile_paths(self): """resolve dotfile paths""" t = Templategen(variables=self.variables) for dotfile in self.dotfiles.values(): # src src = dotfile[self.key_dotfile_src] new = t.generate_string(src) if new != src and self.debug: self.log.dbg('dotfile: {} -> {}'.format(src, new)) src = new src = os.path.join(self.settings[self.key_settings_dotpath], src) dotfile[self.key_dotfile_src] = self._norm_path(src) # dst dst = dotfile[self.key_dotfile_dst] new = t.generate_string(dst) if new != dst and self.debug: self.log.dbg('dotfile: {} -> {}'.format(dst, new)) dst = new dotfile[self.key_dotfile_dst] = self._norm_path(dst) def _rec_resolve_vars(self, variables): """recursive resolve variables""" default = self._get_variables_dict(self.profile) t = Templategen(variables=self._merge_dict(default, variables)) for k in variables.keys(): val = variables[k] while Templategen.var_is_template(val): val = t.generate_string(val) variables[k] = val t.update_variables(variables) return variables def _merge_variables(self): """ resolve all variables across the config apply them to any needed entries and return the full list of variables """ if self.debug: self.log.dbg('get local variables') # get all variables from local and resolve var = self._get_variables_dict(self.profile) # get all dynvariables from local and resolve dvar = self._get_dvariables_dict() # temporarly resolve all variables for "include" merged = self._merge_dict(dvar, var) merged = self._rec_resolve_vars(merged) self._debug_vars(merged) # exec dynvariables self._shell_exec_dvars(dvar.keys(), merged) if self.debug: self.log.dbg('local variables resolved') self._debug_vars(merged) # resolve profile includes t = Templategen(variables=merged) for k, v in self.profiles.items(): if self.key_profile_include in v: new = [] for k in v[self.key_profile_include]: new.append(t.generate_string(k)) v[self.key_profile_include] = new # now get the included ones pro_var = self._get_included_variables(self.profile, seen=[self.profile]) pro_dvar = self._get_included_dvariables(self.profile, seen=[self.profile]) # exec incl dynvariables self._shell_exec_dvars(pro_dvar.keys(), pro_dvar) # merge all and resolve merged = self._merge_dict(pro_var, merged) merged = self._merge_dict(pro_dvar, merged) merged = self._rec_resolve_vars(merged) if self.debug: self.log.dbg('resolve all uses of variables in config') self._debug_vars(merged) prokeys = list(pro_var.keys()) + list(pro_dvar.keys()) return merged, prokeys def _apply_variables(self): """template any needed parts of the config""" t = Templategen(variables=self.variables) # import_actions new = [] entries = self.settings.get(self.key_import_actions, []) new = self._template_list(t, entries) if new: self.settings[self.key_import_actions] = new # import_configs entries = self.settings.get(self.key_import_configs, []) new = self._template_list(t, entries) if new: self.settings[self.key_import_configs] = new # import_variables entries = self.settings.get(self.key_import_variables, []) new = self._template_list(t, entries) if new: self.settings[self.key_import_variables] = new # profile's import for k, v in self.profiles.items(): entries = v.get(self.key_import_profile_dfs, []) new = self._template_list(t, entries) if new: v[self.key_import_profile_dfs] = new def _norm_actions(self, actions): """ ensure each action is either pre or post explicitely action entry of the form {action_key: (pre|post, action)} """ if not actions: return actions new = {} for k, v in actions.items(): if k == self.action_pre or k == self.action_post: for key, action in v.items(): new[key] = (k, action) else: new[k] = (self.action_post, v) return new def _norm_profiles(self, profiles): """normalize profiles entries""" if not profiles: return profiles new = {} for k, v in profiles.items(): # add dotfiles entry if not present if self.key_profile_dotfiles not in v: v[self.key_profile_dotfiles] = [] new[k] = v return new def _norm_dotfiles(self, dotfiles): """normalize dotfiles entries""" if not dotfiles: return dotfiles new = {} for k, v in dotfiles.items(): # add 'src' as key' if not present if self.key_dotfile_src not in v: v[self.key_dotfile_src] = k new[k] = v else: new[k] = v # fix deprecated trans key if self.old_key_trans_r in v: msg = '\"trans\" is deprecated, please use \"trans_read\"' self.log.warn(msg) v[self.key_trans_r] = v[self.old_key_trans_r] del v[self.old_key_trans_r] new[k] = v # apply link value if self.key_dotfile_link not in v: val = self.settings[self.key_settings_link_dotfile_default] v[self.key_dotfile_link] = val # apply noempty if undefined if self.key_dotfile_noempty not in v: val = self.settings.get(self.key_settings_noempty, False) v[self.key_dotfile_noempty] = val return new def _get_variables_dict(self, profile): """return enriched variables""" variables = deepcopy(self.ori_variables) # add profile variable if profile: variables['profile'] = profile # add some more variables p = self.settings.get(self.key_settings_dotpath) p = self._norm_path(p) variables['_dotdrop_dotpath'] = p variables['_dotdrop_cfgpath'] = self._norm_path(self.path) p = self.settings.get(self.key_settings_workdir) p = self._norm_path(p) variables['_dotdrop_workdir'] = p return variables def _get_dvariables_dict(self): """return dynvariables""" variables = deepcopy(self.ori_dvariables) return variables def _get_included_variables(self, profile, seen): """return included variables""" variables = {} if not profile or profile not in self.profiles.keys(): return variables # profile entry pentry = self.profiles.get(profile) # inherite profile variables for inherited_profile in pentry.get(self.key_profile_include, []): if inherited_profile == profile or inherited_profile in seen: raise YamlException('\"include\" loop') seen.append(inherited_profile) new = self._get_included_variables(inherited_profile, seen) if self.debug: msg = 'included vars from {}: {}' self.log.dbg(msg.format(inherited_profile, new)) variables.update(new) cur = pentry.get(self.key_profile_variables, {}) return self._merge_dict(cur, variables) def _get_included_dvariables(self, profile, seen): """return included dynvariables""" variables = {} if not profile or profile not in self.profiles.keys(): return variables # profile entry pentry = self.profiles.get(profile) # inherite profile dynvariables for inherited_profile in pentry.get(self.key_profile_include, []): if inherited_profile == profile or inherited_profile in seen: raise YamlException('\"include loop\"') seen.append(inherited_profile) new = self._get_included_dvariables(inherited_profile, seen) if self.debug: msg = 'included dvars from {}: {}' self.log.dbg(msg.format(inherited_profile, new)) variables.update(new) cur = pentry.get(self.key_profile_dvariables, {}) return self._merge_dict(cur, variables) def _resolve_profile_all(self): """resolve some other parts of the config""" # profile -> ALL for k, v in self.profiles.items(): dfs = v.get(self.key_profile_dotfiles, None) if not dfs: continue if self.key_all in dfs: if self.debug: self.log.dbg('add ALL to profile {}'.format(k)) v[self.key_profile_dotfiles] = self.dotfiles.keys() def _resolve_profile_includes(self): # profiles -> include other profile for k, v in self.profiles.items(): self._rec_resolve_profile_include(k) def _rec_resolve_profile_include(self, profile): """ recursively resolve include of other profiles's: * dotfiles * actions """ this_profile = self.profiles[profile] # include dotfiles = this_profile.get(self.key_profile_dotfiles, []) actions = this_profile.get(self.key_profile_actions, []) includes = this_profile.get(self.key_profile_include, None) if not includes: # nothing to include return dotfiles, actions if self.debug: self.log.dbg('{} includes: {}'.format(profile, ','.join(includes))) self.log.dbg('{} dotfiles before include: {}'.format( profile, dotfiles)) self.log.dbg('{} actions before include: {}'.format( profile, actions)) seen = [] for i in uniq_list(includes): # ensure no include loop occurs if i in seen: raise YamlException('\"include loop\"') seen.append(i) # included profile even exists if i not in self.profiles.keys(): self.log.warn('include unknown profile: {}'.format(i)) continue # recursive resolve o_dfs, o_actions = self._rec_resolve_profile_include(i) # merge dotfile keys dotfiles.extend(o_dfs) this_profile[self.key_profile_dotfiles] = uniq_list(dotfiles) # merge actions keys actions.extend(o_actions) this_profile[self.key_profile_actions] = uniq_list(actions) dotfiles = this_profile.get(self.key_profile_dotfiles, []) actions = this_profile.get(self.key_profile_actions, []) if self.debug: self.log.dbg('{} dotfiles after include: {}'.format( profile, dotfiles)) self.log.dbg('{} actions after include: {}'.format( profile, actions)) # since dotfiles and actions are resolved here # and variables have been already done at the beginning # of the parsing, we can clear these include self.profiles[profile][self.key_profile_include] = None return dotfiles, actions ######################################################## # handle imported entries ######################################################## def _import_variables(self): """import external variables from paths""" paths = self.settings.get(self.key_import_variables, None) if not paths: return paths = self._glob_paths(paths) for p in paths: path = self._norm_path(p) if self.debug: self.log.dbg('import variables from {}'.format(path)) var = self._import_sub(path, self.key_variables, mandatory=False) if self.debug: self.log.dbg('import dynvariables from {}'.format(path)) dvar = self._import_sub(path, self.key_dvariables, mandatory=False) merged = self._merge_dict(dvar, var) merged = self._rec_resolve_vars(merged) # execute dvar self._shell_exec_dvars(dvar.keys(), merged) self._clear_profile_vars(merged) self.variables = self._merge_dict(merged, self.variables) def _clear_profile_vars(self, dic): """remove profile variables from dic if found""" [dic.pop(k, None) for k in self.prokeys] def _import_actions(self): """import external actions from paths""" paths = self.settings.get(self.key_import_actions, None) if not paths: return paths = self._glob_paths(paths) for p in paths: path = self._norm_path(p) if self.debug: self.log.dbg('import actions from {}'.format(path)) new = self._import_sub(path, self.key_actions, mandatory=False, patch_func=self._norm_actions) self.actions = self._merge_dict(new, self.actions) def _import_profiles_dotfiles(self): """import profile dotfiles""" for k, v in self.profiles.items(): imp = v.get(self.key_import_profile_dfs, None) if not imp: continue if self.debug: self.log.dbg('import dotfiles for profile {}'.format(k)) paths = self._glob_paths(imp) for p in paths: current = v.get(self.key_dotfiles, []) path = self._norm_path(p) new = self._import_sub(path, self.key_dotfiles, mandatory=False) v[self.key_dotfiles] = new + current def _import_config(self, path): """import config from path""" path = self._norm_path(path) if self.debug: self.log.dbg('import config from {}'.format(path)) sub = CfgYaml(path, profile=self.profile, debug=self.debug) # settings is ignored self.dotfiles = self._merge_dict(self.dotfiles, sub.dotfiles) self.profiles = self._merge_dict(self.profiles, sub.profiles) self.actions = self._merge_dict(self.actions, sub.actions) self.trans_r = self._merge_dict(self.trans_r, sub.trans_r) self.trans_w = self._merge_dict(self.trans_w, sub.trans_w) self._clear_profile_vars(sub.variables) if self.debug: self.log.dbg('add import_configs var: {}'.format(sub.variables)) self.variables = self._merge_dict(sub.variables, self.variables) def _import_configs(self): """import configs from external files""" # settings -> import_configs imp = self.settings.get(self.key_import_configs, None) if not imp: return paths = self._glob_paths(imp) for path in paths: self._import_config(path) def _import_sub(self, path, key, mandatory=False, patch_func=None): """ import the block "key" from "path" patch_func is applied to each element if defined """ if self.debug: self.log.dbg('import \"{}\" from \"{}\"'.format(key, path)) extdict = self._load_yaml(path) new = self._get_entry(extdict, key, mandatory=mandatory) if patch_func: if self.debug: self.log.dbg('calling patch: {}'.format(patch_func)) new = patch_func(new) if not new and mandatory: err = 'no \"{}\" imported from \"{}\"'.format(key, path) self.log.warn(err) raise YamlException(err) if self.debug: self.log.dbg('imported \"{}\": {}'.format(key, new)) return new ######################################################## # add/remove entries ######################################################## def _new_profile(self, key): """add a new profile if it doesn't exist""" if key not in self.profiles.keys(): # update yaml_dict self.yaml_dict[self.key_profiles][key] = { self.key_profile_dotfiles: [] } if self.debug: self.log.dbg('adding new profile: {}'.format(key)) self.dirty = True def add_dotfile_to_profile(self, dotfile_key, profile_key): """add an existing dotfile key to a profile_key""" self._new_profile(profile_key) profile = self.yaml_dict[self.key_profiles][profile_key] if dotfile_key not in profile[self.key_profile_dotfiles]: profile[self.key_profile_dotfiles].append(dotfile_key) if self.debug: msg = 'add \"{}\" to profile \"{}\"'.format( dotfile_key, profile_key) msg.format(dotfile_key, profile_key) self.log.dbg(msg) self.dirty = True return self.dirty def add_dotfile(self, key, src, dst, link): """add a new dotfile""" if key in self.dotfiles.keys(): return False if self.debug: self.log.dbg('adding new dotfile: {}'.format(key)) df_dict = { self.key_dotfile_src: src, self.key_dotfile_dst: dst, } dfl = self.settings[self.key_settings_link_dotfile_default] if str(link) != dfl: df_dict[self.key_dotfile_link] = str(link) self.yaml_dict[self.key_dotfiles][key] = df_dict self.dirty = True def del_dotfile(self, key): """remove this dotfile from config""" if key not in self.yaml_dict[self.key_dotfiles]: self.log.err('key not in dotfiles: {}'.format(key)) return False if self.debug: self.log.dbg('remove dotfile: {}'.format(key)) del self.yaml_dict[self.key_dotfiles][key] if self.debug: dfs = self.yaml_dict[self.key_dotfiles] self.log.dbg('new dotfiles: {}'.format(dfs)) self.dirty = True return True def del_dotfile_from_profile(self, df_key, pro_key): """remove this dotfile from that profile""" if df_key not in self.dotfiles.keys(): self.log.err('key not in dotfiles: {}'.format(df_key)) return False if pro_key not in self.profiles.keys(): self.log.err('key not in profile: {}'.format(pro_key)) return False # get the profile dictionary profile = self.yaml_dict[self.key_profiles][pro_key] if df_key not in profile[self.key_profile_dotfiles]: return True if self.debug: dfs = profile[self.key_profile_dotfiles] self.log.dbg('{} profile dotfiles: {}'.format(pro_key, dfs)) self.log.dbg('remove {} from profile {}'.format(df_key, pro_key)) profile[self.key_profile_dotfiles].remove(df_key) if self.debug: dfs = profile[self.key_profile_dotfiles] self.log.dbg('{} profile dotfiles: {}'.format(pro_key, dfs)) self.dirty = True return True ######################################################## # handle deprecated entries ######################################################## def _fix_deprecated(self, yamldict): """fix deprecated entries""" self._fix_deprecated_link_by_default(yamldict) self._fix_deprecated_dotfile_link(yamldict) def _fix_deprecated_link_by_default(self, yamldict): """fix deprecated link_by_default""" key = 'link_by_default' newkey = self.key_imp_link if self.key_settings not in yamldict: return if not yamldict[self.key_settings]: return config = yamldict[self.key_settings] if key not in config: return if config[key]: config[newkey] = self.lnk_link else: config[newkey] = self.lnk_nolink del config[key] self.log.warn('deprecated \"link_by_default\"') self.dirty = True self.dirty_deprecated = True def _fix_deprecated_dotfile_link(self, yamldict): """fix deprecated link in dotfiles""" if self.key_dotfiles not in yamldict: return if not yamldict[self.key_dotfiles]: return for k, dotfile in yamldict[self.key_dotfiles].items(): new = self.lnk_nolink if self.key_dotfile_link in dotfile and \ type(dotfile[self.key_dotfile_link]) is bool: # patch link: <bool> cur = dotfile[self.key_dotfile_link] new = self.lnk_nolink if cur: new = self.lnk_link dotfile[self.key_dotfile_link] = new self.dirty = True self.dirty_deprecated = True self.log.warn('deprecated \"link\" value') elif self.key_dotfile_link_children in dotfile and \ type(dotfile[self.key_dotfile_link_children]) is bool: # patch link_children: <bool> cur = dotfile[self.key_dotfile_link_children] new = self.lnk_nolink if cur: new = self.lnk_children del dotfile[self.key_dotfile_link_children] dotfile[self.key_dotfile_link] = new self.dirty = True self.dirty_deprecated = True self.log.warn('deprecated \"link_children\" value') ######################################################## # yaml utils ######################################################## def save(self): """save this instance and return True if saved""" if not self.dirty: return False content = self._clear_none(self.dump()) # make sure we have the base entries if self.key_settings not in content: content[self.key_settings] = None if self.key_dotfiles not in content: content[self.key_dotfiles] = None if self.key_profiles not in content: content[self.key_profiles] = None if self.dirty_deprecated: # add minversion settings = content[self.key_settings] settings[self.key_settings_minversion] = VERSION # save to file if self.debug: self.log.dbg('saving to {}'.format(self.path)) try: self._yaml_dump(content, self.path) except Exception as e: self.log.err(e) raise YamlException('error saving config: {}'.format(self.path)) if self.dirty_deprecated: warn = 'your config contained deprecated entries' warn += ' and was updated' self.log.warn(warn) self.dirty = False self.cfg_updated = False return True def dump(self): """dump the config dictionary""" return self.yaml_dict def _load_yaml(self, path): """load a yaml file to a dict""" content = {} if not os.path.exists(path): raise YamlException('config path not found: {}'.format(path)) try: content = self._yaml_load(path) except Exception as e: self.log.err(e) raise YamlException('invalid config: {}'.format(path)) return content def _yaml_load(self, path): """load from yaml""" with open(path, 'r') as f: y = yaml() y.typ = 'rt' content = y.load(f) return content def _yaml_dump(self, content, path): """dump to yaml""" with open(self.path, 'w') as f: y = yaml() y.default_flow_style = False y.indent = 2 y.typ = 'rt' y.dump(content, f) ######################################################## # helpers ######################################################## def _merge_dict(self, high, low): """merge high and low dict""" if not high: high = {} if not low: low = {} return {**low, **high} def _get_entry(self, dic, key, mandatory=True): """return entry from yaml dictionary""" if key not in dic: if mandatory: raise YamlException('invalid config: no {} found'.format(key)) dic[key] = {} return dic[key] if mandatory and not dic[key]: # ensure is not none dic[key] = {} return dic[key] def _clear_none(self, dic): """recursively delete all none/empty values in a dictionary.""" new = {} for k, v in dic.items(): newv = v if isinstance(v, dict): newv = self._clear_none(v) if not newv: # no empty dict continue if newv is None: # no None value continue if isinstance(newv, list) and not newv: # no empty list continue new[k] = newv return new def _is_glob(self, path): """quick test if path is a glob""" return '*' in path or '?' in path def _glob_paths(self, paths): """glob a list of paths""" if not isinstance(paths, list): paths = [paths] res = [] for p in paths: if not self._is_glob(p): res.append(p) continue p = os.path.expanduser(p) new = glob.glob(p) if not new: raise YamlException('bad path: {}'.format(p)) res.extend(glob.glob(p)) return res def _debug_vars(self, variables): """pretty print variables""" if not self.debug: return self.log.dbg('variables:') for k, v in variables.items(): self.log.dbg('\t\"{}\": {}'.format(k, v)) def _norm_path(self, path): """resolve a path either absolute or relative to config path""" path = os.path.expanduser(path) if not os.path.isabs(path): d = os.path.dirname(self.path) return os.path.join(d, path) return os.path.normpath(path) def _shell_exec_dvars(self, keys, variables): """shell execute dynvariables""" for k in list(keys): ret, out = shell(variables[k], debug=self.debug) if not ret: err = 'var \"{}: {}\" failed: {}'.format(k, variables[k], out) self.log.err(err) raise YamlException(err) if self.debug: self.log.dbg('\"{}\": {} -> {}'.format(k, variables[k], out)) variables[k] = out def _template_list(self, t, entries): """template a list of entries""" new = [] if not entries: return new for e in entries: et = t.generate_string(e) if self.debug and e != et: self.log.dbg('resolved: {} -> {}'.format(e, et)) new.append(et) return new def _check_minversion(self, minversion): if not minversion: return try: cur = tuple([int(x) for x in VERSION.split('.')]) cfg = tuple([int(x) for x in minversion.split('.')]) except Exception: err = 'bad version: \"{}\" VS \"{}\"'.format(VERSION, minversion) raise YamlException(err) if cur < cfg: err = 'current dotdrop version is too old for that config file.' err += ' Please update.' raise YamlException(err)
class CfgYaml: # global entries key_settings = Settings.key_yaml key_dotfiles = 'dotfiles' key_profiles = 'profiles' key_actions = 'actions' old_key_trans_r = 'trans' key_trans_r = 'trans_read' key_trans_w = 'trans_write' key_variables = 'variables' key_dvariables = 'dynvariables' action_pre = 'pre' action_post = 'post' # profiles/dotfiles entries key_dotfile_src = 'src' key_dotfile_dst = 'dst' key_dotfile_link = 'link' key_dotfile_actions = 'actions' key_dotfile_link_children = 'link_children' key_dotfile_noempty = 'ignoreempty' # profile key_profile_dotfiles = 'dotfiles' key_profile_include = 'include' key_profile_variables = 'variables' key_profile_dvariables = 'dynvariables' key_profile_actions = 'actions' key_all = 'ALL' # import entries key_import_actions = 'import_actions' key_import_configs = 'import_configs' key_import_variables = 'import_variables' key_import_profile_dfs = 'import' key_import_sep = ':' key_import_ignore_key = 'optional' key_import_fatal_not_found = True # settings key_settings_dotpath = Settings.key_dotpath key_settings_workdir = Settings.key_workdir key_settings_link_dotfile_default = Settings.key_link_dotfile_default key_settings_noempty = Settings.key_ignoreempty key_settings_minversion = Settings.key_minversion key_imp_link = Settings.key_link_on_import # link values lnk_nolink = LinkTypes.NOLINK.name.lower() lnk_link = LinkTypes.LINK.name.lower() lnk_children = LinkTypes.LINK_CHILDREN.name.lower() def __init__(self, path, profile=None, debug=False): """ config parser @path: config file path @profile: the selected profile @debug: debug flag """ self.path = os.path.abspath(path) self.profile = profile self.debug = debug self.log = Logger() # config needs to be written self.dirty = False # indicates the config has been updated self.dirty_deprecated = False if not os.path.exists(path): err = 'invalid config path: \"{}\"'.format(path) if self.debug: self.log.dbg(err) raise YamlException(err) self.yaml_dict = self._load_yaml(self.path) # live patch deprecated entries self._fix_deprecated(self.yaml_dict) # parse to self variables self._parse_main_yaml(self.yaml_dict) if self.debug: self.log.dbg('BEFORE normalization: {}'.format(self.yaml_dict)) # resolve variables self.variables, self.prokeys = self._merge_variables() # apply variables self._apply_variables() # process imported variables (import_variables) self._import_variables() # process imported actions (import_actions) self._import_actions() # process imported profile dotfiles (import) self._import_profiles_dotfiles() # process imported configs (import_configs) self._import_configs() # process profile include self._resolve_profile_includes() # process profile ALL self._resolve_profile_all() # patch dotfiles paths self._resolve_dotfile_paths() if self.debug: self.log.dbg('AFTER normalization: {}'.format(self.yaml_dict)) def get_variables(self): """retrieve all variables""" return self.variables ######################################################## # parsing ######################################################## def _parse_main_yaml(self, dic): """parse the different blocks""" self.ori_settings = self._get_entry(dic, self.key_settings) self.settings = Settings(None).serialize().get(self.key_settings) self.settings.update(self.ori_settings) # resolve minimum version if self.key_settings_minversion in self.settings: minversion = self.settings[self.key_settings_minversion] self._check_minversion(minversion) # resolve settings paths p = self._norm_path(self.settings[self.key_settings_dotpath]) self.settings[self.key_settings_dotpath] = p p = self._norm_path(self.settings[self.key_settings_workdir]) self.settings[self.key_settings_workdir] = p p = [ self._norm_path(p) for p in self.settings[Settings.key_filter_file] ] self.settings[Settings.key_filter_file] = p p = [self._norm_path(p) for p in self.settings[Settings.key_func_file]] self.settings[Settings.key_func_file] = p if self.debug: self._debug_dict('settings', self.settings) # dotfiles self.ori_dotfiles = self._get_entry(dic, self.key_dotfiles) self.dotfiles = deepcopy(self.ori_dotfiles) keys = self.dotfiles.keys() if len(keys) != len(list(set(keys))): dups = [x for x in keys if x not in list(set(keys))] err = 'duplicate dotfile keys found: {}'.format(dups) raise YamlException(err) self.dotfiles = self._norm_dotfiles(self.dotfiles) if self.debug: self._debug_dict('dotfiles', self.dotfiles) # profiles self.ori_profiles = self._get_entry(dic, self.key_profiles) self.profiles = deepcopy(self.ori_profiles) self.profiles = self._norm_profiles(self.profiles) if self.debug: self._debug_dict('profiles', self.profiles) # actions self.ori_actions = self._get_entry(dic, self.key_actions, mandatory=False) self.actions = deepcopy(self.ori_actions) self.actions = self._norm_actions(self.actions) if self.debug: self._debug_dict('actions', self.actions) # trans_r key = self.key_trans_r if self.old_key_trans_r in dic: self.log.warn('\"trans\" is deprecated, please use \"trans_read\"') dic[self.key_trans_r] = dic[self.old_key_trans_r] del dic[self.old_key_trans_r] self.ori_trans_r = self._get_entry(dic, key, mandatory=False) self.trans_r = deepcopy(self.ori_trans_r) if self.debug: self._debug_dict('trans_r', self.trans_r) # trans_w self.ori_trans_w = self._get_entry(dic, self.key_trans_w, mandatory=False) self.trans_w = deepcopy(self.ori_trans_w) if self.debug: self._debug_dict('trans_w', self.trans_w) # variables self.ori_variables = self._get_entry(dic, self.key_variables, mandatory=False) if self.debug: self._debug_dict('variables', self.ori_variables) # dynvariables self.ori_dvariables = self._get_entry(dic, self.key_dvariables, mandatory=False) if self.debug: self._debug_dict('dynvariables', self.ori_dvariables) def _resolve_dotfile_paths(self): """resolve dotfiles paths""" t = Templategen(variables=self.variables, func_file=self.settings[Settings.key_func_file], filter_file=self.settings[Settings.key_filter_file]) for dotfile in self.dotfiles.values(): # src src = dotfile[self.key_dotfile_src] newsrc = self.resolve_dotfile_src(src, templater=t) dotfile[self.key_dotfile_src] = newsrc # dst dst = dotfile[self.key_dotfile_dst] newdst = self.resolve_dotfile_dst(dst, templater=t) dotfile[self.key_dotfile_dst] = newdst def resolve_dotfile_src(self, src, templater=None): """resolve dotfile src path""" newsrc = '' if src: new = src if templater: new = templater.generate_string(src) if new != src and self.debug: msg = 'dotfile src: \"{}\" -> \"{}\"'.format(src, new) self.log.dbg(msg) src = new src = os.path.join(self.settings[self.key_settings_dotpath], src) newsrc = self._norm_path(src) return newsrc def resolve_dotfile_dst(self, dst, templater=None): """resolve dotfile dst path""" newdst = '' if dst: new = dst if templater: new = templater.generate_string(dst) if new != dst and self.debug: msg = 'dotfile dst: \"{}\" -> \"{}\"'.format(dst, new) self.log.dbg(msg) dst = new newdst = self._norm_path(dst) return newdst def _rec_resolve_vars(self, variables): """recursive resolve variables""" default = self._get_variables_dict(self.profile) t = Templategen(variables=self._merge_dict(default, variables), func_file=self.settings[Settings.key_func_file], filter_file=self.settings[Settings.key_filter_file]) for k in variables.keys(): val = variables[k] while Templategen.var_is_template(val): val = t.generate_string(val) variables[k] = val t.update_variables(variables) return variables def _get_profile_included_vars(self, tvars): """resolve profile included variables/dynvariables""" t = Templategen(variables=tvars, func_file=self.settings[Settings.key_func_file], filter_file=self.settings[Settings.key_filter_file]) for k, v in self.profiles.items(): if self.key_profile_include in v: new = [] for x in v[self.key_profile_include]: new.append(t.generate_string(x)) v[self.key_profile_include] = new # now get the included ones pro_var = self._get_profile_included_item(self.profile, self.key_profile_variables, seen=[self.profile]) pro_dvar = self._get_profile_included_item(self.profile, self.key_profile_dvariables, seen=[self.profile]) # exec incl dynvariables self._shell_exec_dvars(pro_dvar.keys(), pro_dvar) return pro_var, pro_dvar def _merge_variables(self): """ resolve all variables across the config apply them to any needed entries and return the full list of variables """ if self.debug: self.log.dbg('get local variables') # get all variables from local and resolve var = self._get_variables_dict(self.profile) # get all dynvariables from local and resolve dvar = self._get_dvariables_dict() # temporarly resolve all variables for "include" merged = self._merge_dict(dvar, var) merged = self._rec_resolve_vars(merged) if self.debug: self._debug_dict('variables', merged) # exec dynvariables self._shell_exec_dvars(dvar.keys(), merged) if self.debug: self.log.dbg('local variables resolved') self._debug_dict('variables', merged) # resolve profile included variables/dynvariables pro_var, pro_dvar = self._get_profile_included_vars(merged) # merge all and resolve merged = self._merge_dict(pro_var, merged) merged = self._merge_dict(pro_dvar, merged) merged = self._rec_resolve_vars(merged) if self.debug: self.log.dbg('resolve all uses of variables in config') self._debug_dict('variables', merged) prokeys = list(pro_var.keys()) + list(pro_dvar.keys()) return merged, prokeys def _apply_variables(self): """template any needed parts of the config""" t = Templategen(variables=self.variables, func_file=self.settings[Settings.key_func_file], filter_file=self.settings[Settings.key_filter_file]) # import_actions new = [] entries = self.settings.get(self.key_import_actions, []) new = self._template_list(t, entries) if new: self.settings[self.key_import_actions] = new # import_configs entries = self.settings.get(self.key_import_configs, []) new = self._template_list(t, entries) if new: self.settings[self.key_import_configs] = new # import_variables entries = self.settings.get(self.key_import_variables, []) new = self._template_list(t, entries) if new: self.settings[self.key_import_variables] = new # profile's import for k, v in self.profiles.items(): entries = v.get(self.key_import_profile_dfs, []) new = self._template_list(t, entries) if new: v[self.key_import_profile_dfs] = new def _norm_actions(self, actions): """ ensure each action is either pre or post explicitely action entry of the form {action_key: (pre|post, action)} """ if not actions: return actions new = {} for k, v in actions.items(): if k == self.action_pre or k == self.action_post: for key, action in v.items(): new[key] = (k, action) else: new[k] = (self.action_post, v) return new def _norm_profiles(self, profiles): """normalize profiles entries""" if not profiles: return profiles new = {} for k, v in profiles.items(): if not v: # no dotfiles continue # add dotfiles entry if not present if self.key_profile_dotfiles not in v: v[self.key_profile_dotfiles] = [] new[k] = v return new def _norm_dotfiles(self, dotfiles): """normalize dotfiles entries""" if not dotfiles: return dotfiles new = {} for k, v in dotfiles.items(): # add 'src' as key' if not present if self.key_dotfile_src not in v: v[self.key_dotfile_src] = k new[k] = v else: new[k] = v # fix deprecated trans key if self.old_key_trans_r in v: msg = '\"trans\" is deprecated, please use \"trans_read\"' self.log.warn(msg) v[self.key_trans_r] = v[self.old_key_trans_r] del v[self.old_key_trans_r] new[k] = v # apply link value if self.key_dotfile_link not in v: val = self.settings[self.key_settings_link_dotfile_default] v[self.key_dotfile_link] = val # apply noempty if undefined if self.key_dotfile_noempty not in v: val = self.settings.get(self.key_settings_noempty, False) v[self.key_dotfile_noempty] = val return new def _get_variables_dict(self, profile): """return enriched variables""" variables = deepcopy(self.ori_variables) # add profile variable if profile: variables['profile'] = profile # add some more variables p = self.settings.get(self.key_settings_dotpath) p = self._norm_path(p) variables['_dotdrop_dotpath'] = p variables['_dotdrop_cfgpath'] = self._norm_path(self.path) p = self.settings.get(self.key_settings_workdir) p = self._norm_path(p) variables['_dotdrop_workdir'] = p return variables def _get_dvariables_dict(self): """return dynvariables""" variables = deepcopy(self.ori_dvariables) return variables def _get_profile_included_item(self, profile, item, seen): """recursively get included <item> from profile""" items = {} if not profile or profile not in self.profiles.keys(): return items # considered profile entry pentry = self.profiles.get(profile) # recursively get <item> from inherited profile for inherited_profile in pentry.get(self.key_profile_include, []): if inherited_profile == profile or inherited_profile in seen: raise YamlException('\"include\" loop') seen.append(inherited_profile) new = self._get_profile_included_item(inherited_profile, item, seen) if self.debug: msg = 'included {} from {}: {}' self.log.dbg(msg.format(item, inherited_profile, new)) items.update(new) cur = pentry.get(item, {}) return self._merge_dict(cur, items) def _resolve_profile_all(self): """resolve some other parts of the config""" # profile -> ALL for k, v in self.profiles.items(): dfs = v.get(self.key_profile_dotfiles, None) if not dfs: continue if self.key_all in dfs: if self.debug: self.log.dbg('add ALL to profile {}'.format(k)) v[self.key_profile_dotfiles] = self.dotfiles.keys() def _resolve_profile_includes(self): # profiles -> include other profile for k, v in self.profiles.items(): self._rec_resolve_profile_include(k) def _rec_resolve_profile_include(self, profile): """ recursively resolve include of other profiles's: * dotfiles * actions * variables * dynvariables variables/dynvariables are directly merged with the global variables (self.variables) if these are included in the selected profile returns dotfiles, actions, variables, dynvariables """ this_profile = self.profiles[profile] # considered profile content dotfiles = this_profile.get(self.key_profile_dotfiles, []) or [] actions = this_profile.get(self.key_profile_actions, []) or [] includes = this_profile.get(self.key_profile_include, []) or [] pvars = this_profile.get(self.key_profile_variables, {}) or {} pdvars = this_profile.get(self.key_profile_dvariables, {}) or {} if not includes: # nothing to include return dotfiles, actions, pvars, pdvars if self.debug: self.log.dbg('{} includes {}'.format(profile, ','.join(includes))) self.log.dbg('{} dotfiles before include: {}'.format( profile, dotfiles)) self.log.dbg('{} actions before include: {}'.format( profile, actions)) self.log.dbg('{} variables before include: {}'.format( profile, pvars)) self.log.dbg('{} dynvariables before include: {}'.format( profile, pdvars)) seen = [] for i in uniq_list(includes): if self.debug: self.log.dbg('resolving includes "{}" <- "{}"'.format( profile, i)) # ensure no include loop occurs if i in seen: raise YamlException('\"include loop\"') seen.append(i) # included profile even exists if i not in self.profiles.keys(): self.log.warn('include unknown profile: {}'.format(i)) continue # recursive resolve if self.debug: self.log.dbg( 'recursively resolving includes for profile "{}"'.format( i)) o_dfs, o_actions, o_v, o_dv = self._rec_resolve_profile_include(i) # merge dotfile keys if self.debug: self.log.dbg('Merging dotfiles {} <- {}: {} <- {}'.format( profile, i, dotfiles, o_dfs)) dotfiles.extend(o_dfs) this_profile[self.key_profile_dotfiles] = uniq_list(dotfiles) # merge actions keys if self.debug: self.log.dbg('Merging actions {} <- {}: {} <- {}'.format( profile, i, actions, o_actions)) actions.extend(o_actions) this_profile[self.key_profile_actions] = uniq_list(actions) # merge variables if self.debug: self.log.dbg('Merging variables {} <- {}: {} <- {}'.format( profile, i, dict(pvars), dict(o_v))) pvars = self._merge_dict(o_v, pvars) this_profile[self.key_profile_variables] = pvars # merge dynvariables if self.debug: self.log.dbg( 'Merging dynamic variables {} <- {}: {} <- {}'.format( profile, i, dict(pdvars), dict(o_dv))) pdvars = self._merge_dict(o_dv, pdvars) this_profile[self.key_profile_dvariables] = pdvars dotfiles = this_profile.get(self.key_profile_dotfiles, []) actions = this_profile.get(self.key_profile_actions, []) pvars = this_profile.get(self.key_profile_variables, {}) or {} pdvars = this_profile.get(self.key_profile_dvariables, {}) or {} if self.debug: self.log.dbg('{} dotfiles after include: {}'.format( profile, dotfiles)) self.log.dbg('{} actions after include: {}'.format( profile, actions)) self.log.dbg('{} variables after include: {}'.format( profile, pvars)) self.log.dbg('{} dynvariables after include: {}'.format( profile, pdvars)) if profile == self.profile: # Only for the selected profile, we execute dynamic variables and # we merge variables/dynvariables into the global variables self._shell_exec_dvars(pdvars.keys(), pdvars) self.variables = self._merge_dict(pvars, self.variables) self.variables = self._merge_dict(pdvars, self.variables) # since included items are resolved here # we can clear these include self.profiles[profile][self.key_profile_include] = None return dotfiles, actions, pvars, pdvars ######################################################## # handle imported entries ######################################################## def _import_variables(self): """import external variables from paths""" paths = self.settings.get(self.key_import_variables, None) if not paths: return paths = self._resolve_paths(paths) for path in paths: if self.debug: self.log.dbg('import variables from {}'.format(path)) var = self._import_sub(path, self.key_variables, mandatory=False) if self.debug: self.log.dbg('import dynvariables from {}'.format(path)) dvar = self._import_sub(path, self.key_dvariables, mandatory=False) merged = self._merge_dict(dvar, var) merged = self._rec_resolve_vars(merged) # execute dvar self._shell_exec_dvars(dvar.keys(), merged) self._clear_profile_vars(merged) self.variables = self._merge_dict(merged, self.variables) def _clear_profile_vars(self, dic): """remove profile variables from dic if found""" [dic.pop(k, None) for k in self.prokeys] def _parse_extended_import_path(self, path_entry): """Parse an import path in a tuple (path, fatal_not_found).""" if self.debug: self.log.dbg('parsing path entry {}'.format(path_entry)) path, _, attribute = path_entry.rpartition(self.key_import_sep) fatal_not_found = attribute != self.key_import_ignore_key is_valid_attribute = attribute in ('', self.key_import_ignore_key) if not is_valid_attribute: # If attribute is not valid it can mean that: # - path_entry doesn't contain the separator, and attribute is set # to the whole path by str.rpartition # - path_entry contains a separator, but it's in the file path, so # attribute is set to whatever comes after the separator by # str.rpartition # In both cases, path_entry is the path we're looking for. if self.debug: self.log.dbg( 'using attribute default values for path {}'.format( path_entry)) path = path_entry fatal_not_found = self.key_import_fatal_not_found elif self.debug: self.log.dbg( 'path entry {} has fatal_not_found flag set to {}'.format( path_entry, fatal_not_found)) return path, fatal_not_found def _handle_non_existing_path(self, path, fatal_not_found=True): """Raise an exception or log a warning to handle non-existing paths.""" error = 'bad path {}'.format(path) if fatal_not_found: raise YamlException(error) self.log.warn(error) def _check_path_existence(self, path, fatal_not_found=True): """Check if a path exists, raising if necessary.""" if os.path.exists(path): if self.debug: self.log.dbg('path {} exists'.format(path)) return path self._handle_non_existing_path(path, fatal_not_found) # Explicit return for readability. Anything evaluating to false is ok. return None def _process_path(self, path_entry): """Process a path entry to a normalized form. This method processed a path entry. Namely it: - Normalizes the path. - Expands globs. - Checks for path existence, taking in account fatal_not_found. This method always returns a list containing only absolute paths existing on the filesystem. If the input is not a glob, the list contains at most one element, otheriwse it could hold more. :param path_entry: A path with an optional attribute. :type path_entry: str :return: A list of normalized existing paths, obtained from the input. :rtype: List of str """ path, fatal_not_found = self._parse_extended_import_path(path_entry) path = self._norm_path(path) paths = self._glob_path(path) if self._is_glob(path) else [path] if not paths: if self.debug: self.log.dbg("glob path {} didn't expand".format(path)) self._handle_non_existing_path(path, fatal_not_found) return [] checked_paths = (self._check_path_existence(p, fatal_not_found) for p in paths) return [p for p in checked_paths if p] def _resolve_paths(self, paths): """Resolve a list of path to existing paths. This function resolves a list of paths. This means normalizing, expanding globs and checking for existence, taking in account fatal_not_found flags. :param paths: A list of paths. Might contain globs and options. :type paths: List of str :return: A list of processed paths. :rtype: List of str """ processed_paths = (self._process_path(p) for p in paths) return list(chain.from_iterable(processed_paths)) def _import_actions(self): """import external actions from paths""" paths = self.settings.get(self.key_import_actions, None) if not paths: return paths = self._resolve_paths(paths) for path in paths: if self.debug: self.log.dbg('import actions from {}'.format(path)) new = self._import_sub(path, self.key_actions, mandatory=False, patch_func=self._norm_actions) self.actions = self._merge_dict(new, self.actions) def _import_profiles_dotfiles(self): """import profile dotfiles""" for k, v in self.profiles.items(): imp = v.get(self.key_import_profile_dfs, None) if not imp: continue if self.debug: self.log.dbg('import dotfiles for profile {}'.format(k)) paths = self._resolve_paths(imp) for path in paths: current = v.get(self.key_dotfiles, []) new = self._import_sub(path, self.key_dotfiles, mandatory=False) v[self.key_dotfiles] = new + current def _import_config(self, path): """import config from path""" if self.debug: self.log.dbg('import config from {}'.format(path)) sub = CfgYaml(path, profile=self.profile, debug=self.debug) # settings are ignored from external file # except for filter_file and func_file self.settings[Settings.key_func_file] += [ self._norm_path(func_file) for func_file in sub.settings[Settings.key_func_file] ] self.settings[Settings.key_filter_file] += [ self._norm_path(func_file) for func_file in sub.settings[Settings.key_filter_file] ] # merge top entries self.dotfiles = self._merge_dict(self.dotfiles, sub.dotfiles) self.profiles = self._merge_dict(self.profiles, sub.profiles) self.actions = self._merge_dict(self.actions, sub.actions) self.trans_r = self._merge_dict(self.trans_r, sub.trans_r) self.trans_w = self._merge_dict(self.trans_w, sub.trans_w) self._clear_profile_vars(sub.variables) if self.debug: self._debug_dict('add import_configs var', sub.variables) self.variables = self._merge_dict(sub.variables, self.variables) def _import_configs(self): """import configs from external files""" # settings -> import_configs imp = self.settings.get(self.key_import_configs, None) if not imp: return paths = self._resolve_paths(imp) for path in paths: self._import_config(path) def _import_sub(self, path, key, mandatory=False, patch_func=None): """ import the block "key" from "path" patch_func is applied to each element if defined """ if self.debug: self.log.dbg('import \"{}\" from \"{}\"'.format(key, path)) extdict = self._load_yaml(path) new = self._get_entry(extdict, key, mandatory=mandatory) if patch_func: if self.debug: self.log.dbg('calling patch: {}'.format(patch_func)) new = patch_func(new) if not new and mandatory: err = 'no \"{}\" imported from \"{}\"'.format(key, path) self.log.warn(err) raise YamlException(err) if self.debug: self.log.dbg('imported \"{}\": {}'.format(key, new)) return new ######################################################## # add/remove entries ######################################################## def _new_profile(self, key): """add a new profile if it doesn't exist""" if key not in self.profiles.keys(): # update yaml_dict self.yaml_dict[self.key_profiles][key] = { self.key_profile_dotfiles: [] } if self.debug: self.log.dbg('adding new profile: {}'.format(key)) self.dirty = True def add_dotfile_to_profile(self, dotfile_key, profile_key): """add an existing dotfile key to a profile_key""" self._new_profile(profile_key) profile = self.yaml_dict[self.key_profiles][profile_key] if self.key_profile_dotfiles not in profile or \ profile[self.key_profile_dotfiles] is None: profile[self.key_profile_dotfiles] = [] pdfs = profile[self.key_profile_dotfiles] if self.key_all not in pdfs and \ dotfile_key not in pdfs: profile[self.key_profile_dotfiles].append(dotfile_key) if self.debug: msg = 'add \"{}\" to profile \"{}\"'.format( dotfile_key, profile_key) msg.format(dotfile_key, profile_key) self.log.dbg(msg) self.dirty = True return self.dirty def get_all_dotfile_keys(self): """return all existing dotfile keys""" return self.dotfiles.keys() def add_dotfile(self, key, src, dst, link): """add a new dotfile""" if key in self.dotfiles.keys(): return False if self.debug: self.log.dbg('adding new dotfile: {}'.format(key)) df_dict = { self.key_dotfile_src: src, self.key_dotfile_dst: dst, } dfl = self.settings[self.key_settings_link_dotfile_default] if str(link) != dfl: df_dict[self.key_dotfile_link] = str(link) self.yaml_dict[self.key_dotfiles][key] = df_dict self.dirty = True def del_dotfile(self, key): """remove this dotfile from config""" if key not in self.yaml_dict[self.key_dotfiles]: self.log.err('key not in dotfiles: {}'.format(key)) return False if self.debug: self.log.dbg('remove dotfile: {}'.format(key)) del self.yaml_dict[self.key_dotfiles][key] if self.debug: dfs = self.yaml_dict[self.key_dotfiles] self.log.dbg('new dotfiles: {}'.format(dfs)) self.dirty = True return True def del_dotfile_from_profile(self, df_key, pro_key): """remove this dotfile from that profile""" if df_key not in self.dotfiles.keys(): self.log.err('key not in dotfiles: {}'.format(df_key)) return False if pro_key not in self.profiles.keys(): self.log.err('key not in profile: {}'.format(pro_key)) return False # get the profile dictionary profile = self.yaml_dict[self.key_profiles][pro_key] if df_key not in profile[self.key_profile_dotfiles]: return True if self.debug: dfs = profile[self.key_profile_dotfiles] self.log.dbg('{} profile dotfiles: {}'.format(pro_key, dfs)) self.log.dbg('remove {} from profile {}'.format(df_key, pro_key)) profile[self.key_profile_dotfiles].remove(df_key) if self.debug: dfs = profile[self.key_profile_dotfiles] self.log.dbg('{} profile dotfiles: {}'.format(pro_key, dfs)) self.dirty = True return True ######################################################## # handle deprecated entries ######################################################## def _fix_deprecated(self, yamldict): """fix deprecated entries""" self._fix_deprecated_link_by_default(yamldict) self._fix_deprecated_dotfile_link(yamldict) def _fix_deprecated_link_by_default(self, yamldict): """fix deprecated link_by_default""" key = 'link_by_default' newkey = self.key_imp_link if self.key_settings not in yamldict: return if not yamldict[self.key_settings]: return config = yamldict[self.key_settings] if key not in config: return if config[key]: config[newkey] = self.lnk_link else: config[newkey] = self.lnk_nolink del config[key] self.log.warn('deprecated \"link_by_default\"') self.dirty = True self.dirty_deprecated = True def _fix_deprecated_dotfile_link(self, yamldict): """fix deprecated link in dotfiles""" if self.key_dotfiles not in yamldict: return if not yamldict[self.key_dotfiles]: return for k, dotfile in yamldict[self.key_dotfiles].items(): new = self.lnk_nolink if self.key_dotfile_link in dotfile and \ type(dotfile[self.key_dotfile_link]) is bool: # patch link: <bool> cur = dotfile[self.key_dotfile_link] new = self.lnk_nolink if cur: new = self.lnk_link dotfile[self.key_dotfile_link] = new self.dirty = True self.dirty_deprecated = True self.log.warn('deprecated \"link\" value') elif self.key_dotfile_link_children in dotfile and \ type(dotfile[self.key_dotfile_link_children]) is bool: # patch link_children: <bool> cur = dotfile[self.key_dotfile_link_children] new = self.lnk_nolink if cur: new = self.lnk_children del dotfile[self.key_dotfile_link_children] dotfile[self.key_dotfile_link] = new self.dirty = True self.dirty_deprecated = True self.log.warn('deprecated \"link_children\" value') ######################################################## # yaml utils ######################################################## def _prepare_to_save(self, content): content = self._clear_none(content) # make sure we have the base entries if self.key_settings not in content: content[self.key_settings] = None if self.key_dotfiles not in content: content[self.key_dotfiles] = None if self.key_profiles not in content: content[self.key_profiles] = None return content def save(self): """save this instance and return True if saved""" if not self.dirty: return False content = self._prepare_to_save(self.yaml_dict) if self.dirty_deprecated: # add minversion settings = content[self.key_settings] settings[self.key_settings_minversion] = VERSION # save to file if self.debug: self.log.dbg('saving to {}'.format(self.path)) try: with open(self.path, 'w') as f: self._yaml_dump(content, f) except Exception as e: self.log.err(e) raise YamlException('error saving config: {}'.format(self.path)) if self.dirty_deprecated: warn = 'your config contained deprecated entries' warn += ' and was updated' self.log.warn(warn) self.dirty = False self.cfg_updated = False return True def dump(self): """dump the config dictionary""" output = io.StringIO() content = self._prepare_to_save(self.yaml_dict.copy()) self._yaml_dump(content, output) return output.getvalue() def _load_yaml(self, path): """load a yaml file to a dict""" content = {} if self.debug: self.log.dbg('----------start:{}----------'.format(path)) cfg = '\n' with open(path, 'r') as f: for line in f: cfg += line self.log.dbg(cfg.rstrip()) self.log.dbg('----------end:{}----------'.format(path)) try: content = self._yaml_load(path) except Exception as e: self.log.err(e) raise YamlException('invalid config: {}'.format(path)) return content def _yaml_load(self, path): """load from yaml""" with open(path, 'r') as f: y = yaml() y.typ = 'rt' content = y.load(f) return content def _yaml_dump(self, content, where): """dump to yaml""" y = yaml() y.default_flow_style = False y.indent = 2 y.typ = 'rt' y.dump(content, where) ######################################################## # helpers ######################################################## def _merge_dict(self, high, low): """merge high and low dict""" if not high: high = {} if not low: low = {} return {**low, **high} def _get_entry(self, dic, key, mandatory=True): """return entry from yaml dictionary""" if key not in dic: if mandatory: raise YamlException('invalid config: no {} found'.format(key)) dic[key] = {} return dic[key] if mandatory and not dic[key]: # ensure is not none dic[key] = {} return dic[key] def _clear_none(self, dic): """recursively delete all none/empty values in a dictionary.""" new = {} for k, v in dic.items(): newv = v if isinstance(v, dict): newv = self._clear_none(v) if not newv: # no empty dict continue if newv is None: # no None value continue if isinstance(newv, list) and not newv: # no empty list continue new[k] = newv return new def _is_glob(self, path): """Quick test if path is a glob.""" return '*' in path or '?' in path def _glob_path(self, path): """Expand a glob.""" if self.debug: self.log.dbg('expanding glob {}'.format(path)) expanded_path = os.path.expanduser(path) return glob.glob(expanded_path, recursive=True) def _norm_path(self, path): """Resolve a path either absolute or relative to config path""" if not path: return path path = os.path.expanduser(path) if not os.path.isabs(path): d = os.path.dirname(self.path) ret = os.path.join(d, path) if self.debug: msg = 'normalizing relative to cfg: {} -> {}' self.log.dbg(msg.format(path, ret)) return ret ret = os.path.normpath(path) if self.debug and path != ret: self.log.dbg('normalizing: {} -> {}'.format(path, ret)) return ret def _shell_exec_dvars(self, keys, variables): """shell execute dynvariables""" for k in list(keys): ret, out = shell(variables[k], debug=self.debug) if not ret: err = 'var \"{}: {}\" failed: {}'.format(k, variables[k], out) self.log.err(err) raise YamlException(err) if self.debug: self.log.dbg('\"{}\": {} -> {}'.format(k, variables[k], out)) variables[k] = out def _template_list(self, t, entries): """template a list of entries""" new = [] if not entries: return new for e in entries: et = t.generate_string(e) if self.debug and e != et: self.log.dbg('resolved: {} -> {}'.format(e, et)) new.append(et) return new def _check_minversion(self, minversion): if not minversion: return try: cur = tuple([int(x) for x in VERSION.split('.')]) cfg = tuple([int(x) for x in minversion.split('.')]) except Exception: err = 'bad version: \"{}\" VS \"{}\"'.format(VERSION, minversion) raise YamlException(err) if cur < cfg: err = 'current dotdrop version is too old for that config file.' err += ' Please update.' raise YamlException(err) def _debug_dict(self, title, elems): """pretty print dict""" if not self.debug: return self.log.dbg('{}:'.format(title)) if not elems: return for k, v in elems.items(): self.log.dbg('\t- \"{}\": {}'.format(k, v))