def test_readconfig(tmpdir): p16 = Proc() assert p16.id == 'p16' # nothing updated p16._readConfig(None, Config()) assert p16.id == 'p16' config = Config() config._load({'f20': {'forks': 20}}) p16.forks = 10 p16._readConfig({'forks': 30}, config) assert p16.id == 'p16' assert p16.forks == 10 assert p16.runner == 'local' assert p16.config.runner == '__tmp__' p17 = Proc() p17.forks = 10 config = Config() config._load({'f30': {'forks': 20}}) # no such profile in config p17._readConfig('dry', config) assert p17.forks == 10 assert p17.runner == 'dry' assert p17.config.runner == 'dry'
def test_readconfig_preload(tmpdir): config._load({'xyz': {'runner': 'sge', 'forks': 50}}) p18 = Proc() p18._readConfig('xyz', Config()) assert p18.runner == 'sge' assert p18.forks == 50 assert p18.config.runner == 'xyz'
def tag(options): """Tag the version""" default_options = Config() default_options._load('./.tagitrc') default_options._use('TAGIT') default_options.update(vars(options['c'])) publish = default_options.get('publish', False) #changelog = default_options.get('changelog', '') increment = default_options.get('increment', 'patch') versource = default_options.get('versource', '') vertoml = default_options.get('vertoml', '') #checksource = default_options.get('checksource', True) #checktoml = default_options.get('checktoml', True) extra = default_options.get('extra', '') specver = options[POSITIONAL] or None ret = status(default_options, specver, True) if not ret: return tagver = _get_version_from_gittag() or Tag((0, 0, 0)) specver = specver or tagver.increment(increment) if versource: _log('Updating version in source file ...') _update_version_to_source(_getsrcfile(versource), specver) if vertoml: _log('Updating version in pyproject.toml ...') _update_version_to_toml(specver, vertoml) if extra: cmd = bash(c=extra).fg if cmd.rc != 0: raise RuntimeError('Failed to run %r' % extra) _log('Committing the change ...') try: git.commit({'allow-empty': True}, a=True, m=str(specver)).fg except CmdyReturnCodeError: # pre-commit fails, do it again _log('Pre-commit failed, try again ...') git.add('.') git.commit({'allow-empty': True}, a=True, m=str(specver)).fg _log('Pushing the commit to remote ...') git.push().fg _log('Adding tag %r ...' % specver) git.tag(str(specver)).fg _log('Pushing the tag to remote ...') git.push(tag=True).fg if publish: _log('Building the release ...') poetry.build().fg _log('Publishing the release ...') poetry.publish().fg _log('Done!')
def version(options): # pylint: disable=unused-argument """Get current local version""" default_options = Config() default_options._load('./.tagitrc') default_options._use('TAGIT') vertoml = options.vertoml ver = _get_version_from_toml(vertoml) _log('Current version: %s' % ver)
def __init__(self, conf = None, cfgfile = None): """@API PyPPL Constructor @params: conf (dict): the configurations for the pipeline, default: `None` - Remember the profile name should be included. cfgfile (file): the configuration file for the pipeline, default: `None` """ self.counter = PyPPL.COUNTER PyPPL.COUNTER += 1 self.config = Config() # __noloading__ tells processes not load it as they have it initiated. self.config._load({'default': config, '__noloading__': None}) if cfgfile: self.config._load(cfgfile) if conf and isinstance(conf, dict): self.config._load(conf) if self.config._log.file is True: self.config._log.file = (Path('./') / Path(sys.argv[0]).stem).with_suffix( '%s.pyppl.log' % ('.' + str(self.counter) if self.counter else '')) # reinitiate logger according to new config logger.init(self.config) logger.pyppl('Version: %s', __version__) logger.tips(random.choice(PyPPL.TIPS)) for cfile in DEFAULT_CFGFILES + (str(cfgfile), ): if cfile.endswith('.osenv'): logger.config('Read from environment variables with prefix: "%s_"', Path(cfile).name[:-6]) cfile = Path(cfile).expanduser() if not utils.fs.isfile(cfile): if cfile == cfgfile: logger.warning('Configuration file does not exist: %s', cfile) continue elif cfile.suffix in ('.yaml', 'yml'): try: import yaml # pylint: disable=W0611 except ImportError: # pragma: no cover logger.warning('Module PyYAML not installed, config file ignored: %s', cfile) elif cfile.suffix == '.toml': try: import toml # pylint: disable=W0611 except ImportError: # pragma: no cover logger.warning('Module toml not installed, config file ignored: %s', cfile) logger.config('Read from %s', cfile) for plgname, plugin in pluginmgr.list_name_plugin(): logger.plugin('Loaded %s: v%s', plgname, plugin.__version__ if hasattr(plugin, '__version__') else 'Unknown') self.tree = ProcTree() # save the procs in order for plugin use self.procs = [] pluginmgr.hook.pypplInit(ppl = self)
def test_run(tmpdir, caplog): sys.argv = ['pytest'] p24 = Proc() p24.resume = 'resume' p24.props.workdir = tmpdir / 'test_run_p24' fs.mkdir(p24.workdir) (p24.workdir / 'proc.settings.yaml').write_text('input: ') p24.run('dry', Config()) assert 'Previous processes skipped.' in caplog.text assert p24.runner == 'dry' p25 = Proc() p25.resume = 'skip' caplog.clear() p25.run(None, Config()) assert 'Pipeline will resume from future processes.' in caplog.text p25.resume = 'skip+' caplog.clear() p25.props.workdir = tmpdir / 'test_run_p25' fs.mkdir(p25.workdir) (p25.workdir / 'proc.settings.yaml').write_text('input: ') p25.run(None, Config()) assert 'Data loaded, pipeline will resume from future processes.' in caplog.text
def test_readconfig_preset(tmpdir): p181 = Proc() p181.runner = 'xyz' config = Config() cfile = tmpdir / 'test_readconfig_preset.ini' cfile.write_text(""" [xyz] runner: sge forks: 50 """) config._load(cfile) p181._readConfig('', config) assert p181.runner == 'sge' assert p181.forks == 50 assert p181.config.runner == 'xyz'
def setup_module(module): pytest.config = Config() pytest.config._load( { 'default': { 'repos': 'pwwang/remotedata', }, 'standard': { 'source': 'github' }, 'invalidrepos': { 'source': 'github', 'repos': 'a' }, 'withbranch': { 'source': 'github', 'repos': 'pwwang/remotedata/notmaster' }, 'dropbox': { 'source': 'dropbox', } }, '~/.remotedata.yaml', 'REMOTEDATA.osenv') # get token from osenv
def load_config(config, opts): """Load the configurations from file""" if not path.isfile(config): raise OSError("Config file does not exist: {}".format(config)) configs = Config(with_profile=False) configs._load(config) configs = configs.as_dict() ones = [] if "instance" in configs: ones = configs["instance"] del configs["instance"] opts |= configs # padding figtype and ggs, and devpars len_fml = len(opts["formula"]) opts["figtype"].extend([None] * (len_fml - len(opts["figtype"]))) opts["figfmt"].extend([None] * (len_fml - len(opts["figfmt"]))) opts["ggs"].extend([None] * (len_fml - len(opts["ggs"]))) if isinstance(opts["devpars"], list): default_devpars = opts["devpars"][0] opts["devpars"].extend([default_devpars] * (len_fml - len(opts["devpars"]))) else: default_devpars = opts["devpars"] opts["devpars"] = [opts["devpars"]] * len_fml for instance in ones: if "formula" not in instance: raise ValueError( "Formula not found in instance: {}".format(instance)) if "title" not in instance: raise ValueError( "Title not found in instance: {}".format(instance)) opts["formula"].append(instance["formula"]) opts["title"].append(instance["title"]) opts["figtype"].append(instance.get("figtype")) opts["figfmt"].append(instance.get("figfmt")) opts["ggs"].append(instance.get("ggs")) def_devpars = default_devpars.copy() def_devpars.update(instance.get("devpars", {})) opts["devpars"].append(def_devpars)
def procset(procs): # pretend they are running procs.p1.runtime_config = procs.p2.runtime_config = procs.p3.runtime_config = Config( ) procs.p1.runtime_config._load({'default': {'dirsig': False}}) return ProcSet(procs.p1, procs.p2, procs.p3, procs.p4)
def __init__(self, id=None, tag='notag', desc='No description.', **kwargs): """@API Proc constructor @params: tag (str) : The tag of the process desc (str) : The description of the process id (str) : The identify of the process **kwargs: Other properties of the process, which can be set by `proc.xxx` later. """ # Do not go through __getattr__ and __setattr__ # Get configuration from config self.__dict__['config'] = Config() # computed props self.__dict__['props'] = Diot(diot_nest=False) defaultconfig = dict.copy(utils.config) # The id (actually, it's the showing name) of the process defaultconfig['id'] = id if id else utils.varname() if ' ' in tag: raise ProcTagError("No space allowed in tag.") defaultconfig['tag'] = tag defaultconfig['desc'] = desc # The extra arguments for the process defaultconfig['args'] = dict.copy(defaultconfig['args']) # The callfront function of the process defaultconfig['callfront'] = None # The callback function of the process defaultconfig['callback'] = None # The dependencies specified defaultconfig['depends'] = [] # The input that user specified defaultconfig['input'] = '' # The output that user specified defaultconfig['output'] = '' # resume flag of the process # '' : Normal, do not resume # 'skip+' : Load data from previous run, pipeline resumes from future processes # 'resume+': Deduce input from 'skip+' processes # 'skip' : Just skip, do not load data # 'resume' : Load data from previous run, resume pipeline defaultconfig['resume'] = '' # The template environment, keep process indenpendent, even for the subconfigs defaultconfig['envs'] = utils.tryDeepCopy(defaultconfig['envs']) # The output channel of the process self.props.channel = Channel.create() # The dependencies computed self.props.depends = [] # the computed echo option self.props.echo = {} # computed expart self.props.expart = [] # computed expect self.props.expect = None # The computed input self.props.input = {} # The jobs self.props.jobs = [] # The locker for the process self.props.lock = None # non-cached job ids self.props.ncjobids = [] # The original name of the process if it's copied self.props.origin = defaultconfig['id'] # The computed output self.props.output = OrderedDiot() # data for proc.xxx in template self.props.procvars = {} # Valid return code self.props.rc = [0] # get the runner from the profile self.props.runner = 'local' # The computed script. Template object self.props.script = None # The unique identify of the process # cache the suffix self.props._suffix = '' # The template class self.props.template = None # timer for running time self.props.timer = None # The computed workdir self.props.workdir = '' # Remember the attr being set, they have the highest priority self.props.sets = set() # convert alias to its original name for aliaskey, aliasval in Proc.ALIAS.items(): if aliaskey in kwargs: kwargs[aliasval] = kwargs.pop(aliaskey) for key in kwargs: if key not in defaultconfig: raise ProcAttributeError(key) # update the conf with kwargs defaultconfig.update(kwargs) # collapse the loading trace, we don't need it anymore. self.config._load({'default': defaultconfig}) for key, val in kwargs.items(): if key[0] != '_': setattr(self, key, val)
A set of utitities for PyPPL """ import re import inspect from copy import deepcopy from queue import PriorityQueue from os import path, walk, sep as pathsep from hashlib import md5 from threading import Thread import psutil from transitions import Transition, Machine import cmdy from simpleconf import Config from . import _fsutil as fs config = Config() # pylint: disable=invalid-name def loadConfigurations(conf, *cfgfiles): """Load the configuration files""" conf.clear() conf._load(*cfgfiles) # remove python2 support # try: # from Queue import Queue, PriorityQueue, Empty as QueueEmpty # except ImportError: # pragma: no cover # from queue import Queue, PriorityQueue, Empty as QueueEmpty # try: # string_types = basestring # pylint: disable=invalid-name
def load_file(self, compfile): """Load commands and options from a configuration file""" config = Config(with_profile=False) config._load(compfile) self.load(config)
def __init__(self, id = None, tag = 'notag', desc = 'No description.', **kwargs): """@API Proc constructor @params: tag (str) : The tag of the process desc (str) : The description of the process id (str) : The identify of the process **kwargs: Other properties of the process, which can be set by `proc.xxx` later. """ # Do not go through __getattr__ and __setattr__ # Get configuration from config self.__dict__['config'] = Config() # computed props self.__dict__['props'] = Box(box_intact_types = [list]) defaultconfig = dict.copy(utils.config) # The id (actually, it's the showing name) of the process defaultconfig['id'] = id if id else utils.varname() if ' ' in tag: raise ProcTagError("No space allowed in tag.") if 'depends' in kwargs: raise ProcAttributeError("Attribute 'depends' has to be set using `__setattr__`") # The extra arguments for the process defaultconfig['args'] = dict.copy(defaultconfig['args']) # The callfront function of the process defaultconfig['callfront'] = None # The callback function of the process defaultconfig['callback'] = None # The dependencies specified defaultconfig['depends'] = [] # The input that user specified defaultconfig['input'] = '' # The output that user specified defaultconfig['output'] = '' # resume flag of the process # '' : Normal, do not resume # 'skip+' : Load data from previous run, pipeline resumes from future processes # 'resume+': Deduce input from 'skip+' processes # 'skip' : Just skip, do not load data # 'resume' : Load data from previous run, resume pipeline defaultconfig['resume'] = '' # The template environment, keep process indenpendent, even for the subconfigs defaultconfig['envs'] = pycopy.deepcopy(defaultconfig['envs']) # The output channel of the process self.props.channel = Channel.create() # The dependencies computed self.props.depends = [] # the computed echo option self.props.echo = {} # computed expart self.props.expart = [] # computed expect self.props.expect = None # The computed input self.props.input = {} # The jobs self.props.jobs = [] # The locker for the process self.props.lock = None # non-cached job ids self.props.ncjobids = [] # The original name of the process if it's copied self.props.origin = defaultconfig['id'] # The computed output self.props.output = OBox() # data for proc.xxx in template self.props.procvars = {} # Valid return code self.props.rc = [0] # get the runner from the profile self.props.runner = 'local' # The computed script. Template object self.props.script = None # The unique identify of the process # cache the suffix self.props._suffix = '' # The template class self.props.template = None # timer for running time self.props.timer = None # The computed workdir self.props.workdir = '' # remember which property is set, then it will not be overwritten by configurations, # do not put any values here because we want # the kwargs to be overwritten by the configurations but keep the values set by: # p.xxx = xxx self.props.sets = set() # update the conf with kwargs defaultconfig.update(dict(tag = tag, desc = desc, **kwargs)) # collapse the loading trace, we don't need it anymore. self.config._load({'default': defaultconfig})
def status(options, specver=None, ret=False): """Get the status of the project""" tagver = _get_version_from_gittag() exception = None gitstatus = git.status(s=True).str() cherry = git.cherry(v=True).str() if gitstatus or cherry: exception = UncleanRepoException( 'You have changes uncommitted or unpushed.\n\n' + git.status().str()) lastmsg = git.log('-1', pretty="format:%s", _sep='=').strip() if lastmsg == str(tagver): raise NoChangesSinceLastTagException('No changes since last tag.') tagver = tagver or Tag((0, 0, 0)) rcoptions = Config() rcoptions._load('./.tagitrc') rcoptions._use('TAGIT') rcoptions.update(vars(options)) changelog = rcoptions.get('changelog', '') increment = rcoptions.get('increment', 'patch') versource = rcoptions.get('versource', '') vertoml = rcoptions.get('vertoml', '') checksource = rcoptions.get('checksource', True) checktoml = rcoptions.get('checktoml', True) _log('Current version: %s' % str(tagver)) if ret: nextver = tagver.increment(increment) _log('New version received: %r' % (specver or nextver)) if exception: raise exception return _checkver(specver or nextver, changelog, versource, vertoml, checksource, checktoml) nextver = tagver.increment('patch') _log('Next auto patch version is: %s' % nextver) if _checkver(nextver, changelog, versource, vertoml, checksource, checktoml): _log(' You are good to go with this version.') shortcmd = '`tagit tag`, ' if increment == 'patch' else '' _log(' Run %s`tagit tag -c.i patch` or `tagit tag %s`' % (shortcmd, nextver)) nextver = tagver.increment('minor') _log('Next auto minor version is: %s' % nextver) if _checkver(nextver, changelog, versource, vertoml, checksource, checktoml): _log(' You are good to go with this version.') shortcmd = '`tagit tag`, ' if increment == 'minor' else '' _log(' Run %s`tagit tag -c.i minor` or `tagit tag %s`' % (shortcmd, nextver)) nextver = tagver.increment('major') _log('Next auto major version is: %s' % nextver) if _checkver(nextver, changelog, versource, vertoml, checksource, checktoml): _log(' You are good to go with this version.') shortcmd = '`tagit tag`, ' if increment == 'major' else '' _log(' Run %s`tagit tag -c.i major` or `tagit tag %s`' % (shortcmd, nextver)) if exception: raise exception