def init_config_defaults(self): assert False, "TODO: implementing default values for existing settings " if self.settings.config_file: rc_file = self.settings.config_file else: rc_file = os.path.join(os.path.expanduser('~'), '.'+self.DEFAULT_RC) assert not os.path.exists(rc_file), "File exists: %s" % rc_file os.mknod(rc_file) self.settings = confparse.load_path(rc_file) if config_key: setattr(settings, config_key, confparse.Values()) self.rc = getattr(rc, config_key) else: self.rc = settings "Default some global settings: " self.settings.set_source_key('config_file') self.settings.config_file = Application.DEFAULT_RC "Default program specific settings: " self.rc.dbref = Application.DEFAULT_DB v = raw_input("Write new config to %s? [Yn]") if not v.strip() or v.lower().strip() == 'y': self.settings.commit() print "File rewritten. " else: print "Not writing file. "
def init_config(self, **opts): config_key = self.NAME # Create if needed and load config file if self.settings.config_file: config_file = self.settings.config_file #elif self != self.getsource(): # config_file = os.path.join(os.path.expanduser('~'), '.'+self.DEFAULT_RC) if not os.path.exists(config_file): os.mknod(config_file) settings = confparse.load_path(config_file) settings.set_source_key('config_file') settings.config_file = config_file # Reset sub-Values of settings, or use settings itself if config_key: setattr(settings, config_key, Values()) rc = getattr(settings, config_key) assert config_key assert isinstance(rc, Values) #else: # rc = settings assert False, 'TODO update iso reset settings' self.settings = settings self.rc = rc self.init_config_defaults() v = input("Write new config to %s? [Yn]" % settings.getsource().config_file) if not v.strip() or v.lower().strip() == 'y': settings.commit() print("File rewritten. ") else: print("Not writing file. ")
def init(): config = list(confparse.expand_config_path('domain.rc')) "Configuration filename." assert config, "Missing domain.rc" if len(config)> 1: print("XXX multiple rc", config) settings = confparse.load_path(config[0]) "Static, persisted settings."
def load_config_(self, config_file, opts=None ): settings = confparse.load_path(config_file) config_key = opts.config_key if not config_key: self.rc = 'global' self.settings = settings return if not hasattr(settings, config_key): if self.INIT_RC and hasattr(self, self.INIT_RC): self.rc = getattr(self, self.INIT_RC)(opts) else: log.warn("Config key %s does not exist in %s" % (config_key, config_file)) else: self.rc = getattr(settings, config_key) settings.set_source_key('config_file') settings.config_file = config_file self.config_key = config_key self.settings = settings
def load_config_(self, config_file, opts=None ): settings = confparse.load_path(config_file) config_key = opts.config_key if not config_key: self.rc = 'global' self.settings.update(settings) return if hasattr(settings, config_key): self.rc = self.default_rc if getattr(settings, config_key): self.rc.update(getattr(settings, config_key)) self.rc.update({ k: v for k, v in opts.items() if v }) else: log.warn("Config key %s does not exist in %s" % (config_key, config_file)) settings.set_source_key('config_file') settings.config_file = config_file self.config_key = config_key self.settings.update(settings)
def main(argv, doc=__doc__, usage=__usage__): """ Execute using docopt-mpe options. """ # Process environment db = os.getenv( 'VOLUME_DB', __db__ ) if db is not __db__: usage = usage.replace(__db__, db) opts = util.get_opts(doc + usage, version=get_version(), argv=argv[1:]) opts.flags.dbref = taxus.ScriptMixin.assert_dbref(opts.flags.dbref) # Load configuration config_file = list(confparse.expand_config_path(opts.flags.config)).pop() settings = confparse.load_path(config_file) pprint(settings.todict()) print for v, p in settings.volume.items(): print v, p print for v, s in settings.volumes.items(): print v, s
def init_config(path, defaults={}, overrides={}, persist=[]): """ Get settings from path. Use defaults to seed non-existant keys. Overwrite using overrides. Persists allows to indicate which settings are persisted. Any override key not in this list will be listed as volatile. Normally persist equals defaults.keys. """ settings = confparse.load_path(path) if not persist: persist = defaults.keys() # FIXME: volatile/config_file handling should be in confparse if 'volatile' not in settings: defaults['volatile'] = ['config_file'] for k, v in defaults.items(): if k not in settings: setattr(settings, k, v) for k, v in overrides.items(): if k not in persist: settings.volatile.append(k) setattr(settings, k, v) return settings
Take a set of paths and assert that each real location is an archived path. Move files and symlink to archive root if needed. Exceptions are when the files are either too old or too large. """ import os, sys, re, fnmatch, datetime, optparse, itertools import confparse from cmdline import log config = confparse.expand_config_path('cllct.rc') "Root configuration file." settings = confparse.load_path(config.next()) "Static, persisted settings." # Settings with hard-coded defaults #volumes = settings.rsr.volumes#.getlist([ '%(home)/htdocs/' ]) #"Physically disjunct storage trees." # archive_root = settings.volume.cabinet.root #archive_root = settings.rsr.volumes.archive #.getstr('%(volumes)') "Root for current archive." # #archived = settings.volumes.archives#.getlist([]) #"Roots of older archives." #
import optparse import os import pprint import re import socket import subprocess import sys from lib import is_versioned, remote_proc, datetime_to_timestamp, timestamp_to_datetime import confparse config = confparse.expand_config_path('cllct.rc') "Configuration filename." settings = confparse.load_path(*config) "Static, persisted settings." hostname = socket.gethostname() username = getpass.getuser() mapsync_file = settings.mapsync.log_file def reload(): global settings, hostname settings = settings.reload() if 'dynamic' not in settings: settings['dynamic'] = [] # List all sync'ed map-id's for this host
else: path = os.getcwd() fscard_dbref = list(confparse.find_config_path("fscard", path=path, suffixes=[".db"])) if not fscard_dbref: print "No fscard file" sys.exit(1) shelve = shelve.open(fscard_dbref[0], "r") for k in shelve: print k, shelve.get(k) sys.exit(0) sitefiles = list(confparse.find_config_path("Sitefile", suffixes=[".json", ".yaml", ".py", ".js"])) if sitefiles: settings = confparse.load_path(sitefiles[0], confparse.YAMLValues).sitefile else: homesitefile = os.path.expanduser("~/Sitefile.yaml") settings = confparse.load_path(homesitefile, confparse.YAMLValues).sitefile res.File.ignore_paths = settings.res.File.ignore_paths res.File.ignore_names = settings.res.File.ignore_names res.Dir.ignore_paths = settings.res.Dir.ignore_paths res.Dir.ignore_names = settings.res.Dir.ignore_names walk_opts = confparse.Values( dict( interactive=False, recurse=True, max_depth=-1, include_root=False,
from pprint import pformat import re import socket import sys import yaml import confparse config = list(confparse.expand_config_path('domain.rc')) "Configuration filename." assert config, "Missing domain.rc" if len(config)> 1: print "XXX multiple rc", config settings = confparse.load_path(config[0]) "Static, persisted settings." def reload(): global settings settings = settings.reload() if 'dynamic' not in settings: settings['dynamic'] = [] # Reparse interfaces settings['interfaces'] = confparse.Values({}, root=settings) for host in settings.node: for mac in settings.node[host].interface: assert mac not in settings.interfaces settings.interfaces[mac] = host if 'interfaces' not in settings.dynamic: settings.dynamic.append('interfaces')
def main(argv, doc=__doc__, usage=__usage__): """ Execute using docopt-mpe options. prog [opts] [CTX] ( FILE... | DIR... ) """ # Process environment db = os.getenv( 'FINFO_DB', __db__ ) if db is not __db__: usage = usage.replace(__db__, db) ctx = confparse.Values(dict( opts = util.get_opts(doc + usage, version=get_version(), argv=argv[1:]) )) ctx.opts.flags.dbref = taxus.ScriptMixin.assert_dbref(ctx.opts.flags.dbref) # Load configuration ctx.config_file = list(confparse.expand_config_path(ctx.opts.flags.config)).pop() ctx.settings = settings = confparse.load_path(ctx.config_file) # Load SA session ctx.sa = get_session(ctx.opts.flags.dbref) # DEBUG: pprint(ctx.settings.todict()) # Process arguments dirs = [] # Shift paths from ctx arg if ctx.opts.args.CTX and os.path.exists(ctx.opts.args.CTX): ctx.opts.args.FILE.append(ctx.opts.args.CTX) ctx.opts.args.CTX = None # Sort out dirs from files for arg in ctx.opts.args.FILE: if os.path.isdir(arg): ctx.opts.args.FILE.remove(arg) dirs.append(arg) elif os.path.isfile(arg): pass else: log.note("Unhandled path %r" % arg) ctx.opts.args.DIR = dirs # Set default path context if ctx.opts.flags.name: assert not ctx.opts.args.CTX ctx.opts.args.CTX = ctx.opts.flags.name elif not ctx.opts.args.CTX: ctx.opts.args.CTX = 'current' # XXX: create prefixes object on context ctx.prefixes = confparse.Values(dict( map= settings.finfo['prefix-map'], env={}, map_={} )) if 'homedir' not in ctx.prefixes.map: ctx.prefixes.map['homedir'] = 'HOME=%s' % os.path.expanduser('~') if 'current' not in ctx.prefixes.map: ctx.prefixes.map['current'] = '$PWD:$HOME' if 'pwd' not in ctx.prefixes.map: ctx.prefixes.map['pwd'] = 'PWD=%s' % os.path.abspath('.') for prefix, path in ctx.prefixes.map.items(): if '=' in path: envvar, path = path.split('=') if envvar in ctx.prefixes.env: assert ctx.prefixes.env[envvar] == prefix, ( ctx.prefixes.env[envvar], prefix ) ctx.prefixes.env[envvar] = prefix # Pre-pocess binds from env flags if not isinstance(ctx.opts.flags.env, list): ctx.opts.flags.env = [ ctx.opts.flags.env ] for env_map in ctx.opts.flags.env: envvar, prefix = env_map.split('=') if envvar in ctx.prefixes.env: assert prefix == ctx.prefixes.env[envvar] else: ctx.prefixes.env[envvar] = prefix envvalue = os.getenv(envvar, None) if envvalue: ctx.prefixes.map[prefix] = "%s=%s" % ( envvar, envvalue ) #ctx.prefixes.map_[prefix] = envvalue.split(':') # Post-process prefixes after passed flags, and resolve all values for prefix, spec in ctx.prefixes.map.items(): if '=' in spec: envvar, spec = spec.split('=') if envvar in ctx.prefixes.env: assert ctx.prefixes.env[envvar] == prefix, ( ctx.prefixes.env[envvar], prefix ) ctx.prefixes.env[envvar] = prefix specs = spec.split(':') set_ = [] for idx, path in enumerate(specs): path = os.path.expanduser(path) if varname.match(path): refpref = ctx.prefixes.env[path[1:]] #refpath = ctx.prefixes.map[] path = '#prefixes/'+refpref elif '$' in path: pass #else: # path = '#prefixes/'+prefix+':'+str(idx) set_.append(path) ctx.prefixes.map_[prefix] = set_ ctx.pathrefs = ctx.prefixes.map_[ctx.opts.args.CTX] #DEBUG: #print ctx.opts.todict() #print pformat(ctx.prefixes.todict()) #print pformat(ctx.pathrefs) # Preprocess filters to regex if 'FILTER' not in ctx.opts.args: ctx.opts.args.FILTER = [] if not ctx.opts.args.FILTER: ctx.opts.args.FILTER = default_filters if ctx.opts.flags.documents: ctx.opts.args.FILTER = doc_filters + ctx.opts.args.FILTER for idx, filter in enumerate(ctx.opts.args.FILTER): if isinstance(filter, str): ctx.opts.args.FILTER[idx] = fnmatch.translating(filter) # Resolve FILE/DIR arguments files, dirs = [], [] for arg in ctx.opts.args.FILE + ctx.opts.args.DIR: r = find_local(arg, search_path(ctx.pathrefs, ctx)) if not r: continue prefix, file, dir = r if not dir: raise Exception("No path for %s" % arg) elif file: files.append((prefix, file)) else: dirs.append((prefix, dir)) print("Resolved arguments to %s dirs, %s files" % ( len(dirs), len(files) )) # XXX: if not ctx.opts.flags.directories: if ctx.opts.flags.recurse: # Resolve all dirs to file lists for p, d in dirs: for top, path_dirs, path_files in os.walk(os.path.join(p, d)): for path_dir in list(path_dirs): for filter in ctx.opts.args.FILTER: if not filter.match(os.path.basename(path_dir)): path_dirs.remove(path_dir) break if top.startswith('./'): top = top[2:] for path_file in list(path_files): filter = None for filter in ctx.opts.args.FILTER: if filter.match(os.path.basename(path_file)): break else: continue if not filter.match(os.path.basename(path_file)): path_files.remove(path_file) if path_file not in path_files: continue files.append((p, os.path.join(top, path_file))) print("Continue with %s files" % len(files)) mfadapter = None res.persistence.PersistedMetaObject.stores['metafile'] = mfadapter prefix = None for p, f in files: if ctx.opts.flags.auto_prefix: prefixes = find_prefixes(p, ctx) assert prefixes # FIXME: how come only use first?? prefix = prefixes.next() assert len(ctx.prefixes.map_[prefix]) == 1, prefix name = f[len(ctx.prefixes.map_[prefix][0])+1:] else: prefix = ctx.opts.args.CTX name = f[len(p)+1:] ref = prefix+':'+name if ctx.opts.flags.names_only: print ref else: # TODO: get INode through context? Also add mediatype & parameters # resolver. But needs access to finfo ctx.. record = taxus.INode.get_instance(name=ref, _sa=ctx.sa) # GNU/Linux: -bi = --brief --mime # Darwin/BSD: -bI = --brief --mime #mediatype = lib.cmd('file --brief --mime "%s"', path).strip() # XXX: see basename-reg? #if ctx.opts.flags.update == True: # TODO: repopulate metadata; mf = res.metafile.Metafile(f) assert mf.date_accessed record.date_accessed = mf.date_accessed assert mf.date_modified record.date_modified = mf.date_modified if not record.node_id: ctx.sa.add(record) print record, record.date_updated, record.date_modified #sys.exit() if ctx.opts.flags.update: ctx.sa.commit()