def testNoYaml(self, testdir): PyPPL.DEFAULT_CFGFILES = [] ymlfile = path.join(testdir, 'config.yaml') helpers.writeFile(ymlfile, ['default:', ' forks: 10']) import sys if helpers.moduleInstalled('yaml'): import yaml del sys.modules['yaml'] paths = [] paths.extend(sys.path) del sys.path[:] #while sys.path: # paths.append(sys.path.pop(0)) with helpers.log2str(levels='all') as (out, err): pp = PyPPL(config={'_log': {'file': True}}, cfgfile=ymlfile) #for p in paths: sys.path.append(p) sys.path = paths self.assertDictEqual(pp.config, {}) logfiles = glob(path.splitext(sys.argv[0])[0] + "*.pyppl.log") self.assertTrue(logfiles) for logfile in logfiles: remove(logfile)
def dataProvider_test_resume(self): ''' / p3 --- \ p1 -- p2 \ / p8 \ \ p4 \ p7 p10 p6 / \ p9 p5 / ''' with helpers.log2str(): pp = PyPPL({'_log': {'_file': None}}) p_resume1 = Proc() p_resume2 = Proc() p_resume3 = Proc() p_resume4 = Proc() p_resume5 = Proc() p_resume6 = Proc() p_resume7 = Proc() p_resume8 = Proc() p_resume9 = Proc() p_resume10 = Proc() p_resume9.depends = p_resume7 p_resume8.depends = p_resume7 p_resume7.depends = p_resume3, p_resume6 p_resume3.depends = p_resume2 p_resume6.depends = p_resume4, p_resume5 p_resume3.depends = p_resume2 p_resume4.depends = p_resume2 p_resume2.depends = p_resume1 p_resume10.depends = p_resume1 yield pp, [p_resume1, p_resume5], [p_resume2, p_resume6], True, [ p_resume1, p_resume5 ], PyPPLProcRelationError, 'One of the routes cannot be achived from resumed processes: \'p_resume10 <- \[p_resume1\]\'' yield pp, [p_resume1, p_resume5], [p_resume1, p_resume6], True, [p_resume5] yield pp, [p_resume1, p_resume5], [p_resume1, p_resume3, p_resume6 ], False, [p_resume5, p_resume2, p_resume4]
def dataProvider_testBuildInput(self): pBuildInputDep = Proc() pBuildInputDep.props['channel'] = [] pBuildInput = Proc() pBuildInput.depends = pBuildInputDep yield pBuildInput, {}, {} yield pBuildInput, 'a,b', { 'a': { 'data': [], 'type': 'var' }, 'b': { 'data': [], 'type': 'var' } } yield pBuildInput, 'a:unknowntype', {}, ProcInputError, 'Unknown input type' pBuildInputDep1 = Proc() pBuildInputDep1.props['channel'] = Channel.create([1, 2]) pBuildInputDep2 = Proc() pBuildInputDep2.props['channel'] = Channel.create([3, 4]) pBuildInput1 = Proc() pBuildInput1.depends = pBuildInputDep1, pBuildInputDep2 yield pBuildInput1, 'a,b', { 'a': { 'data': [1, 2], 'type': 'var' }, 'b': { 'data': [3, 4], 'type': 'var' } } pBuildInput2 = Proc() pBuildInput2.depends = pBuildInputDep1, pBuildInputDep2 yield pBuildInput2, 'a', { 'a': { 'data': [1, 2], 'type': 'var' } }, None, None, ['Not all data are used as input, 1 column(s) wasted.'] pBuildInput3 = Proc() pBuildInput3.depends = pBuildInputDep1, pBuildInputDep2 yield pBuildInput2, { 'a,b,c': lambda ch1, ch2: ch1.cbind(ch2) }, { 'a': { 'data': [1, 2], 'type': 'var' }, 'b': { 'data': [3, 4], 'type': 'var' }, 'c': { 'data': ['', ''], 'type': 'var' } }, None, None, [ 'No data found for input key "c", use empty strings/lists instead.' ] pBuildInput4 = Proc() yield pBuildInput4, { 'a': [1], 'b': 2, 'c': [1, 2], 'd:files': [[self.testdir, self.testdir]] }, { 'a': { 'data': [1, 1], 'type': 'var' }, 'b': { 'data': [2, 2], 'type': 'var' }, 'c': { 'data': [1, 2], 'type': 'var' }, 'd': { 'data': [[self.testdir, self.testdir], [self.testdir, self.testdir]], 'type': 'files' }, } pBuildInput5 = Proc() pBuildInput5.ppldir = self.testdir pBuildInput5.input = { 'a': ['h"i\'nihao'], 'b': 2, 'c': [1, 2], 'd:files': [[self.testdir, self.testdir]] } with helpers.log2str(): pBuildInput5._buildInput() pBuildInput5._buildProps() pBuildInput5._saveSettings() pBuildInput5.props['resume'] = 'skip+' yield pBuildInput5, {}, { 'a': { 'data': ['h"i\'nihao', 'h"i\'nihao'], 'type': 'var' }, 'b': { 'data': [2, 2], 'type': 'var' }, 'c': { 'data': [1, 2], 'type': 'var' }, 'd': { 'data': [[self.testdir, self.testdir], [self.testdir, self.testdir]], 'type': 'files' }, } pBuildInput6 = Proc() pBuildInput6.ppldir = self.testdir pBuildInput6.props['resume'] = 'skip+' yield pBuildInput6, {}, {}, ProcInputError, 'Cannot parse input for skip\+/resume process, no such file:'
def testProgressBar(self, jm, jid, loglevel, bar): with helpers.log2str(levels='all') as (out, err): jm.progressbar(jid, loglevel) stderr = err.getvalue() self.assertIn(loglevel.upper(), stderr) self.assertIn(bar, stderr)
def dataProvider_testCheckCached(self): pCheckCached = Proc() pCheckCached.props['cache'] = False yield pCheckCached, False, [ 'DEBUG', 'Not cached, because proc.cache is False' ] # 1 all cached pCheckCached1 = Proc() pCheckCached1.ppldir = self.testdir pCheckCached1.input = {'a': [1,2]} with helpers.log2str(): pCheckCached1._tidyBeforeRun() for job in pCheckCached1.jobs: job.cache() yield pCheckCached1, True, [ 'INFO', 'Truly cached jobs : ALL', 'Export-cached jobs: []' ] # 2 all export cached pCheckCached2 = Proc() pCheckCached2.ppldir = self.testdir pCheckCached2.input = {'a': [1,2]} pCheckCached2.output = 'a:file:{{in.a}}.txt' pCheckCached2.cache = 'export' pCheckCached2.exdir = self.testdir with helpers.log2str(): pCheckCached2._tidyBeforeRun() for i, job in enumerate(pCheckCached2.jobs): helpers.writeFile(job.rcfile, 0) helpers.writeFile(path.join(job.outdir, str(i+1) + '.txt')) helpers.writeFile(path.join(self.testdir, str(i+1) + '.txt')) yield pCheckCached2, True, [ 'INFO', 'Truly cached jobs : []', 'Export-cached jobs: ALL' ] # partially cached pCheckCached3 = Proc() pCheckCached3.ppldir = self.testdir pCheckCached3.input = {'a': [1,2]} with helpers.log2str(): pCheckCached3._tidyBeforeRun() pCheckCached3.jobs[0].cache() yield pCheckCached3, False, [ 'INFO', 'Truly cached jobs : 0', 'Export-cached jobs: []', 'Partly cached, only run non-cached 1 job(s).', 'DEBUG', 'Jobs to run: 1' ] # no jobs cached pCheckCached4 = Proc() pCheckCached4.ppldir = self.testdir pCheckCached4.input = {'a': [1,2]} with helpers.log2str(): pCheckCached4._tidyBeforeRun() yield pCheckCached4, False, [ 'DEBUG', 'Not cached, none of the jobs are cached.', ]