def testFromInclude(self): '''Test include keyword''' with open('inc.sos', 'w') as ts: ts.write(''' # a slave script to be included gv = 1 [A_1] [A_2] [B] ''') script = SoS_Script(''' %from inc include gv [0] ''') wf = script.workflow() Base_Executor(wf).dryrun() Base_Executor(wf).run() self.assertEqual(env.sos_dict['gv'], 1) # # include with alias script = SoS_Script(''' %from inc include gv as g res1 = g [0] ''') wf = script.workflow() Base_Executor(wf).dryrun() Base_Executor(wf).run() self.assertEqual(env.sos_dict['res1'], 1)
def testVarOutput(self): '''Test early appearance of variable output''' script = SoS_Script(''' [0] seq = range(3) input: for_each='seq' output: "test${_seq}.txt" print(output) ''') wf = script.workflow() # this does not work before until we make variable output available sooner Base_Executor(wf).dryrun() # however, output should be the same in task script = SoS_Script(''' [0] seq = range(3) input: for_each='seq' output: "test${_seq}.txt" assert(len(output), _index + 1) task: assert(len(output), 3) ''') wf = script.workflow() # this does not work before until we make variable output available sooner Base_Executor(wf).dryrun()
def testWorkflows(self): '''Test workflows defined in SoS script''' script = SoS_Script('''[0]''') self.assertEqual(sorted(script.workflows), ['default']) script = SoS_Script('''[0]\n[1]''') self.assertEqual(sorted(script.workflows), ['default']) script = SoS_Script('''[0]\n[*_1]''') self.assertEqual(sorted(script.workflows), ['default']) script = SoS_Script('''[0]\n[*_1]\n[auxiliary:provides='{a}.txt']''') self.assertEqual(sorted(script.workflows), ['auxiliary', 'default']) script = SoS_Script('''[0]\n[*_1]\n[human_1]''') self.assertEqual(sorted(script.workflows), ['default', 'human']) script = SoS_Script('''[0]\n[*_1]\n[human_1]\n[mouse_2]''') self.assertEqual(sorted(script.workflows), ['default', 'human', 'mouse']) script = SoS_Script('''[0]\n[*_1]\n[human_1]\n[mouse_2]\n[s*_2]''') self.assertEqual(sorted(script.workflows), ['default', 'human', 'mouse']) # skip option is not effective at parsing time script = SoS_Script('''[0]\n[*_1]\n[human_1]\n[mouse_2:skip]\n[s*_2]''') self.assertEqual(sorted(script.workflows), ['default', 'human', 'mouse']) # unnamed script = SoS_Script('''[0]\n[*_1]\n[human_1]\n[mouse]\n[s*_2]''') self.assertEqual(sorted(script.workflows), ['default', 'human', 'mouse']) # # workflow name with - script = SoS_Script('''[proc-1]\n[test-case_2]''') self.assertEqual(sorted(script.workflows), ['proc-1', 'test-case']) script.workflow('proc-1') script.workflow('proc-1 + test-case:2')
def testOverwriteKeyword(self): '''Test overwrite sos keyword with user defined one.''' FileTarget('a.txt').remove('both') # script = SoS_Script(''' def run(script): pass [1] run: touch a.txt ''') wf = script.workflow() Base_Executor(wf).run() self.assertFalse(os.path.isfile('a.txt')) # script = SoS_Script(''' parameter: run = 5 [1] run: touch a.txt ''') wf = script.workflow() self.assertRaises(Exception, Base_Executor(wf).run)
def testTextRepr(self): # the " as the last character can lead to problems... script = SoS_Script( ''' run: echo "Hi, This is from bash"''' ) wf = script.workflow() Base_Executor(wf).run() # for text in ('"""a"""', '"b"', r'"""\na\\nb"""', r"'''a\nb'''", """ "a'\\"='" """): script = SoS_Script( r''' a = 1 python: with open('tmp.txt', 'w') as tmp: tmp.write({} + '{}') k = """b"""'''.format( text, "${a}" ) ) wf = script.workflow() Base_Executor(wf).run() with open("tmp.txt") as tmp: self.assertEqual(tmp.read(), eval(text) + "1") os.remove("tmp.txt")
def testInput(self): '''Test input directive''' self.touch(['a.txt', 'b.txt', 'a.pdf', 'a0', 'a1']) script = SoS_Script(''' [0] files = ['a.txt', 'b.txt'] input: 'a.pdf', files ''') wf = script.workflow('default') Base_Executor(wf).dryrun() # # test input types script = SoS_Script(''' [0:shared={'i':'input', 'o':'output'}] files = ("a${i}" for i in range(2)) input: {'a.txt', 'b.txt'}, files output: ("a${x}" for x in _input) ''') wf = script.workflow() Base_Executor(wf).dryrun() self.assertEqual(sorted(env.sos_dict['i']), ['a.txt', 'a0', 'a1', 'b.txt']) self.assertEqual(sorted(env.sos_dict['o']), ['aa.txt', 'aa0', 'aa1', 'ab.txt'])
def testInclude(self): '''Test include keyword''' with open('inc.sos', 'w') as ts: ts.write(''' # a slave script to be included gv = 1 [A_1] [A_2] [B] ''') script = SoS_Script(''' %include inc res = inc.gv [0] ''') wf = script.workflow() Base_Executor(wf).dryrun() Base_Executor(wf).run() self.assertEqual(env.sos_dict['res'], 1) # # include with alias script = SoS_Script(''' %include inc as tt res1 = tt.gv [0] ''') wf = script.workflow() Base_Executor(wf).dryrun() Base_Executor(wf).run() self.assertEqual(env.sos_dict['res1'], 1) os.remove('inc.sos')
def testSharedVar(self): '''Test shared var with rq queue''' script = SoS_Script( ''' [work_1: shared = {'data': 'output'}] input: "1.txt", "2.txt", group_by = 'single', pattern = '{name}.{ext}' output: expand_pattern('{_name}.out') task: concurrent = True run: touch ${_output} [work_2] input: "1.txt", "2.txt", group_by = 'single', pattern = '{name}.{ext}', paired_with = ['data'] output: expand_pattern('{_name}.out2') task: concurrent = True run: touch ${_data} ${_output} [default] sos_run("work:1+work:2") ''') self.touch(['1.txt', '2.txt']) subprocess.call('sos remove . -t -y', shell=True) wf = script.workflow() RQ_Executor(wf).run() for f in ['1.out', '1.out2', '2.out', '2.out2']: self.assertTrue(FileTarget(f).exists('target')) FileTarget(f).remove('both')
def testSharedVar(self): '''Test shared var with rq queue''' script = SoS_Script(''' [work_1: shared = {'data': 'output'}] input: "1.txt", "2.txt", group_by = 'single', pattern = '{name}.{ext}' output: expand_pattern('{_name}.out') task: concurrent = True run: touch ${_output} [work_2] input: "1.txt", "2.txt", group_by = 'single', pattern = '{name}.{ext}', paired_with = ['data'] output: expand_pattern('{_name}.out2') task: concurrent = True run: touch ${_data} ${_output} [default] sos_run("work:1+work:2") ''') self.touch(['1.txt', '2.txt']) subprocess.call('sos remove . -t -y', shell=True) wf = script.workflow() RQ_Executor(wf).run() for f in ['1.out', '1.out2', '2.out', '2.out2']: self.assertTrue(FileTarget(f).exists('target')) FileTarget(f).remove('both')
def testDynamicNestedWorkflow(self): '''Test nested workflow controlled by command line option''' script = SoS_Script(''' if 'executed' not in locals(): executed = [] parameter: wf='a' [a_1:shared='executed'] executed.append(step_name) [a_2:shared='executed'] executed.append(step_name) [a_3:shared='executed'] executed.append(step_name) [b_1:shared='executed'] executed.append(step_name) [b_2:shared='executed'] executed.append(step_name) [b_3:shared='executed'] executed.append(step_name) [default:shared='executed'] executed.append(step_name) sos_run(wf) ''') wf = script.workflow() Base_Executor(wf, args=['--wf', 'b']).run() self.assertEqual(env.sos_dict['executed'], [ 'default_0', 'b_1', 'b_2', 'b_3']) # Base_Executor(wf, args=['--wf', 'a']).run() self.assertEqual(env.sos_dict['executed'], ['default_0', 'a_1', 'a_2', 'a_3'])
def testWarnIf(self): '''Test action fail if''' script = SoS_Script(r""" [0] warn_if(input is None, 'Expect to see a warning message') """) wf = script.workflow() # should see a warning message. Base_Executor(wf).dryrun() #self.assertRaises(ExecuteError, Base_Executor(wf).run) script = SoS_Script(r""" [0] input: 'a.txt', 'b.txt' warn_if(len(input) == 1) """) wf = script.workflow()
def testChainedDepends(self): '''Test chain dependent''' shutil.rmtree('.sos') os.makedirs('.sos/.runtime') script = SoS_Script(r''' # this step provides variable `var` [index: provides='{filename}.bam.bai'] input: "${filename}.bam" sh: echo "Generating ${output}" touch ${output} [call: provides='{filename}.vcf'] input: "${filename}.bam" depends: "${input}.bai" sh: echo "Calling variants from ${input} with ${depends} to ${output}" touch ${output} ''') FileTarget('a.bam.bai').remove('both') FileTarget('a.vcf').remove('both') self.touch('a.bam') Base_Executor(script.workflow()).run(targets=['a.vcf']) for file in ('a.vcf', 'a.bam', 'a.bam.bai'): FileTarget(file).remove('both')
def testSh(self): '''Test action run''' script = SoS_Script(r''' [0] sh: echo 'Echo' ''') wf = script.workflow() Base_Executor(wf).run() script = SoS_Script(r''' [0] sh: echo 'Echo ''') wf = script.workflow() self.assertRaises(ExecuteError, Base_Executor(wf).run)
def testSoSAction(self): '''Test sos_action decorator''' script = SoS_Script(r""" from sos.actions import SoS_Action @SoS_Action(run_mode='run') def func_run(): return 1 @SoS_Action(run_mode=['run', 'dryrun']) def func_both(): return 1 [0: shared=('b', 'c')] b=func_run() c=func_both() """) wf = script.workflow() Base_Executor(wf).dryrun() self.assertTrue(isinstance(env.sos_dict['b'], Undetermined)) self.assertTrue(isinstance(env.sos_dict['c'], Undetermined)) # Base_Executor(wf).run() self.assertEqual(env.sos_dict['b'], 1) self.assertEqual(env.sos_dict['c'], 1)
def testFailIf(self): '''Test action fail if''' self.touch('a.txt') script = SoS_Script(r""" [0] input: 'a.txt' fail_if(len(input) == 1) """) wf = script.workflow() # should fail in dryrun mode self.assertRaises(ExecuteError, Base_Executor(wf).run) script = SoS_Script(r""" [0] input: 'a.txt', 'b.txt' fail_if(len(input) == 1) """) wf = script.workflow()
def testPatternReuse(self): '''Test repeated use of steps that use pattern and produce different files.''' # for f in ['A1.txt', 'A2.txt', 'B1.txt', 'B1.txt.p', 'B2.txt', 'B2.txt.p']: FileTarget(f).remove('both') # # A1 <- P <- B1 # A1 <- P <- B2 # A2 # script = SoS_Script(''' [A_1] input: 'B1.txt.p', 'B2.txt.p' output: 'A1.txt' sh: touch A1.txt [A_2] sh: touch A2.txt [B1: provides='B1.txt'] sh: touch B1.txt [B2: provides='B2.txt'] sh: touch B2.txt [P: provides='{filename}.p'] input: filename sh: touch ${output} ''') # the workflow should call step K for step C_2, but not C_3 wf = script.workflow() dag = Base_Executor(wf).initialize_dag() self.assertDAG(dag, ''' strict digraph "" { "P ['B2.txt.p']"; "B1 ['B1.txt']"; "B2 ['B2.txt']"; A_2; A_1; "P ['B1.txt.p']"; "P ['B2.txt.p']" -> A_1; "B1 ['B1.txt']" -> "P ['B1.txt.p']"; "B2 ['B2.txt']" -> "P ['B2.txt.p']"; A_1 -> A_2; "P ['B1.txt.p']" -> A_1; } ''') Base_Executor(wf).run() for f in ['A1.txt', 'A2.txt', 'B1.txt', 'B1.txt.p', 'B2.txt', 'B2.txt.p']: t = FileTarget(f) self.assertTrue(t.exists()) t.remove('both')
def testGetOutput(self): '''Test utility function get_output''' script = SoS_Script(r""" [0: shared='ret'] ret = get_output('echo blah') """) wf = script.workflow() # should be ok Base_Executor(wf).run() self.assertEqual(env.sos_dict['ret'], 'blah\n') # script = SoS_Script(r""" [0: shared='ret'] ret = get_output('echo blah', show_command=True) """) wf = script.workflow() # should be ok Base_Executor(wf).run() self.assertEqual(env.sos_dict['ret'], '$ echo blah\nblah\n') # script = SoS_Script(r""" [0: shared='ret'] ret = get_output('echo blah', show_command=True, prompt='% ') """) wf = script.workflow() # should be ok Base_Executor(wf).run() self.assertEqual(env.sos_dict['ret'], '% echo blah\nblah\n') # script = SoS_Script(r""" [0] get_output('catmouse') """) wf = script.workflow() # should fail in dryrun mode self.assertRaises(ExecuteError, Base_Executor(wf).run) # # script = SoS_Script(r""" [0] ret = get_output('cat -h') """) wf = script.workflow() # this should give a warning and return false self.assertRaises(ExecuteError, Base_Executor(wf).run)
def testLiteralConnection(self): '''Testing the connection of steps with by variables.''' for f in ['A1.txt']: FileTarget(f).remove('both') # # A1 introduces a shared variable ss, A3 depends on ss but not A2 # script = SoS_Script(''' [A_1: shared='p'] sh: touch 'A1.txt' p = 'A1.txt' [A_2] input: None sh: sleep 3 [A_3] input: p sh: sleep 3 [A_4] input: p sh: sleep 3 [A_5] input: dynamic(p) ''') wf = script.workflow('A') dag = Base_Executor(wf).initialize_dag() self.assertDAG(dag, ''' strict digraph "" { A_1; A_4; A_2; A_3; A_5; A_1 -> A_4; A_1 -> A_3; A_1 -> A_5; A_4 -> A_5; } ''') env.max_jobs = 3 st = time.time() MP_Executor(wf).run() self.assertLess(time.time() - st, 5) for f in ['A1.txt']: self.assertTrue(FileTarget(f).exists()) FileTarget(f).remove('both')
def testYAMLConfig(self): '''Test config file in yaml format''' with open('myconfig.yml', 'w') as config: config.write(''' # Lines beginning with # are skipped when the JSON is parsed, so we can # put comments into our JSON configuration files { StoreOwner : "John Doe", # List of items that we sell Fruits: [ "apple", "banana", "pear" ], Price: 1.05 } ''') with open('config.sos', 'w') as sos: sos.write(''' [0] print(CONFIG['StoreOwner']) print(CONFIG.get('StoreOwner', 'something')) print(CONFIG.get('StoreOwnerSpouse', 'someone else')) print(CONFIG.StoreOwner) ''' ) # run the command self.assertEqual(subprocess.call('sos run config.sos -c myconfig.yml', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True), 0) # now test the value script = SoS_Script(filename='config.sos') wf = script.workflow() Base_Executor(wf, config={'config_file': 'myconfig.yml'}).run() self.assertEqual(env.sos_dict['CONFIG']['Price'], 1.05) self.assertEqual(env.sos_dict['CONFIG']['StoreOwner'], 'John Doe') self.assertEqual(env.sos_dict['CONFIG']['Fruits'], ['apple', 'banana', 'pear']) # configuration items should be readonly with open('config.sos', 'w') as sos: sos.write(''' [0] CONFIG['a'] = 'b' ''' ) # the command would fail with error message # ERROR: Failed to process statement CONFIG['a'] = 'b' # : Cannot modify a readonly dictionary. self.assertEqual(subprocess.call('sos run config.sos -c myconfig.yml', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True), 1) # with open('config.sos', 'w') as sos: sos.write(''' [0] CONFIG.a = 'b' ''' ) # the command would fail with error message # ERROR: Failed to process statement CONFIG['a'] = 'b' # : Cannot modify a readonly dictionary. self.assertEqual(subprocess.call('sos run config.sos -c myconfig.yml', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True), 1) # for filename in ['config.sos', 'myconfig.yml']: os.remove(filename)
def testBashInDocker(self): '''Test action bash in docker environment''' script = SoS_Script(r''' [0] bash: docker_image='ubuntu' echo 'Echo' ''') wf = script.workflow() Base_Executor(wf).run()
def testYAMLConfig(self): '''Test config file in yaml format''' with open('myconfig.yaml', 'w') as config: config.write(''' # Lines beginning with # are skipped when the JSON is parsed, so we can # put comments into our JSON configuration files { StoreOwner : "John Doe", # List of items that we sell Fruits: [ "apple", "banana", "pear" ], Price: 1.05 } ''') with open('config.sos', 'w') as sos: sos.write(''' [0] print(CONFIG['StoreOwner']) print(CONFIG.get('StoreOwner', 'something')) print(CONFIG.get('StoreOwnerSpouse', 'someone else')) print(CONFIG.StoreOwner) ''' ) # run the command self.assertEqual(subprocess.call('sos run config.sos -c myconfig.yaml', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True), 0) # now test the value script = SoS_Script(filename='config.sos') wf = script.workflow() Base_Executor(wf, config_file='myconfig.yaml').run() self.assertEqual(env.sos_dict['CONFIG']['Price'], 1.05) self.assertEqual(env.sos_dict['CONFIG']['StoreOwner'], 'John Doe') self.assertEqual(env.sos_dict['CONFIG']['Fruits'], ['apple', 'banana', 'pear']) # configuration items should be readonly with open('config.sos', 'w') as sos: sos.write(''' [0] CONFIG['a'] = 'b' ''' ) # the command would fail with error message # ERROR: Failed to process statement CONFIG['a'] = 'b' # : Cannot modify a readonly dictionary. self.assertEqual(subprocess.call('sos run config.sos -c myconfig.yaml', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True), 1) # with open('config.sos', 'w') as sos: sos.write(''' [0] CONFIG.a = 'b' ''' ) # the command would fail with error message # ERROR: Failed to process statement CONFIG['a'] = 'b' # : Cannot modify a readonly dictionary. self.assertEqual(subprocess.call('sos run config.sos -c myconfig.yaml', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True), 1) # for filename in ['config.sos', 'myconfig.yaml']: os.remove(filename)
def testLiteralConnection(self): '''Testing the connection of steps with by variables.''' for f in ['A1.txt']: FileTarget(f).remove('both') # # A1 introduces a shared variable ss, A3 depends on ss but not A2 # script = SoS_Script(''' [A_1: shared='p'] sh: touch 'A1.txt' p = 'A1.txt' [A_2] input: None sh: sleep 3 [A_3] input: p sh: sleep 3 [A_4] input: p sh: sleep 3 [A_5] input: dynamic(p) ''') wf = script.workflow('A') dag = Base_Executor(wf).initialize_dag() self.assertDAG( dag, ''' strict digraph "" { A_1; A_4; A_2; A_3; A_5; A_1 -> A_4; A_1 -> A_3; A_1 -> A_5; A_4 -> A_5; } ''') env.max_jobs = 3 st = time.time() MP_Executor(wf).run() self.assertLess(time.time() - st, 5) for f in ['A1.txt']: self.assertTrue(FileTarget(f).exists()) FileTarget(f).remove('both')
def testPython3(self): script = SoS_Script(r''' [0] python3: a = {'1', '2'} print(a) ''') wf = script.workflow() Base_Executor(wf).run()
def testCombinedWorkflow(self): '''Test the creation and execution of combined workfow''' script = SoS_Script(''' a0 = 0 if 'executed' not in locals(): executed = [] parameter: a = a0 + 1 [a_1: shared='executed'] executed.append(step_name) [a_2: shared='executed'] executed.append(step_name) [a_3: shared='executed'] executed.append(step_name) [a_4: shared='executed'] executed.append(step_name) output: 'out_a_4' [b_1: shared=['executed', 'input_b1']] executed.append(step_name) input_b1 = input [b_2: shared='executed'] executed.append(step_name) [b_3: shared='executed'] executed.append(step_name) [b_4: shared='executed'] executed.append(step_name) [c: shared='executed'] executed.append(step_name) [d: shared='executed'] executed.append(step_name) ''') wf = script.workflow('a+b') Base_Executor(wf).dryrun() self.assertEqual(env.sos_dict['executed'], ['a_1', 'a_2', 'a_3', 'a_4', 'b_1', 'b_2', 'b_3', 'b_4']) self.assertEqual(env.sos_dict['a'], 1) self.assertEqual(env.sos_dict['input_b1'], ['out_a_4']) # wf = script.workflow('a: 1-2 + a:4 + b:3-') Base_Executor(wf).dryrun() self.assertEqual(env.sos_dict['executed'], ['a_1', 'a_2', 'a_4', 'b_3', 'b_4']) # wf = script.workflow('a+c+d') Base_Executor(wf).dryrun() self.assertEqual(env.sos_dict['executed'], ['a_1', 'a_2', 'a_3', 'a_4', 'c_0', 'd_0'])
def testRInDocker(self): '''Test action R in docker environment''' script = SoS_Script(r''' [0] R: docker_image='r-base' nums = rnorm(25, mean=100, sd=15) mean(nums) ''') wf = script.workflow() Base_Executor(wf).run()
def testPythonInDocker(self): '''Test action python in docker environment''' script = SoS_Script(r''' [0] python: docker_image='python' a = {'1': 2} print(a) ''') wf = script.workflow() Base_Executor(wf).run()
def testProgressBar(self): """Test progress bar""" env.verbosity = 1 prog = ProgressBar("test", 100) for i in range(100): prog.update(i) prog.done() prog = ProgressBar("test", 100) for i in range(20): prog.progress(5) prog.done() # script = SoS_Script( """ [1] [2] [3] [4] [5] """ ) wf = script.workflow() Base_Executor(wf).run() # progress bar with nested workflow script = SoS_Script( """ import time time.sleep(0.5) [sub_1] [sub_2] [sub_3] [sub_4] [a_1] [a_2] [a_3] sos_run('sub') [a_4] [a_5] """ ) wf = script.workflow("a") Base_Executor(wf).run()
def testZsh(self): '''Test action zsh''' if not shutil.which('zsh'): return script = SoS_Script(r''' [0] zsh: echo "Hello World!", $SHELL ''') wf = script.workflow() Base_Executor(wf).run()
def testPython(self): '''Test python command. This might fail if python3 is the default interpreter''' script = SoS_Script(r''' [0] python: a = {'1': 2} print(a) ''') wf = script.workflow() Base_Executor(wf).run()
def testPythonsInDocker(self): '''Test action pythons in docker environment''' script = SoS_Script(r''' [0] python3: docker_image='python' #!/usr/bin/env python3 a = {'1', '2'} print(a) ''') wf = script.workflow() Base_Executor(wf).run()
def testArgs(self): '''Test args option of scripts''' FileTarget('a.txt').remove('both') script = SoS_Script(r''' [0] sh: args='-n' touch a.txt ''') wf = script.workflow() Base_Executor(wf).run() self.assertFalse(os.path.exists('a.txt'))
def testShInDocker(self): '''Test action sh in docker environment''' # test docker script = SoS_Script(r''' [0] sh: docker_image='ubuntu' echo 'Echo ''') wf = script.workflow() self.assertRaises(ExecuteError, Base_Executor(wf).run) # Base_Executor(wf).dryrun()
def testPerlInDocker(self): '''Test action perl in docker environment''' script = SoS_Script(r''' [0] perl: docker_image='ubuntu' use strict; use warnings; print "hi NAME\n"; ''') wf = script.workflow() Base_Executor(wf).run()
def testR(self): '''Test action JavaScript''' if not shutil.which('R'): return script = SoS_Script(r''' [0] R: nums = rnorm(25, mean=100, sd=15) mean(nums) ''') wf = script.workflow() Base_Executor(wf).run()
def testNodeInDocker(self): '''Test action node in docker environment''' script = SoS_Script(r''' [0] node: docker_image='node' var args = process.argv.slice(2); console.log('Hello ' + args.join(' ') + '!'); ''') wf = script.workflow() Base_Executor(wf).run() # script = SoS_Script(r''' [0] JavaScript: docker_image='node' var args = process.argv.slice(2); console.log('Hello ' + args.join(' ') + '!'); ''') wf = script.workflow() Base_Executor(wf).run()
def testNode(self): '''Test action ruby''' if not shutil.which('node'): return script = SoS_Script(r''' [0] node: var args = process.argv.slice(2); console.log('Hello ' + args.join(' ') + '!'); ''') wf = script.workflow() Base_Executor(wf).run() # script = SoS_Script(r''' [0] JavaScript: var args = process.argv.slice(2); console.log('Hello ' + args.join(' ') + '!'); ''') wf = script.workflow() Base_Executor(wf).run()
def testMixedTabAndSpace(self): '''Test handling of mixed tab and space''' script = SoS_Script(''' [1: shared=['a', 'b', 'c']] if True: a = 1 \tb = 2 \tc= 3 ''') wf = script.workflow() Base_Executor(wf).run() self.assertEqual(env.sos_dict['a'], 1) self.assertEqual(env.sos_dict['b'], 2) self.assertEqual(env.sos_dict['c'], 3)
def testAnalyzeSection(self): """Test analysis of sections (statically)""" script = SoS_Script( """ g1 = 'a' g2 = 1 parameter: p1 = 5 parameter: infiles = 'a.txt' [A_1: shared='b'] b = p1 + 2 input: infiles output: None c = 5 [A_2] b = [1, 2, 3] input: for_each='b' depends: 'some.txt', executable('ls') import time import random r = random.randint(1, 5) time.sleep(r) [A_3] input: None print(p1) """ ) wf = script.workflow("A") for section in wf.sections: res = analyze_section(section) if section.names[0][1] == "1": self.assertTrue(isinstance(res["step_input"], Undetermined)) self.assertEqual(res["step_depends"], []) self.assertEqual(res["step_output"], []) self.assertEqual(res["environ_vars"], {"p1", "infiles"}) self.assertEqual(res["signature_vars"], {"c"}) self.assertEqual(res["changed_vars"], {"b"}) elif section.names[0][1] == "2": self.assertEqual(res["step_input"], []) self.assertEqual(res["step_depends"], ["some.txt", executable("ls")]) self.assertTrue(isinstance(res["step_output"], Undetermined)) # for_each will not be used for DAG self.assertEqual(res["environ_vars"], {"for_each"}) self.assertEqual(res["signature_vars"], {"import", "r", "time", "random"}) self.assertEqual(res["changed_vars"], set())
def testParallelExecution(self): '''Test basic parallel execution''' ''' A1 <- None A2 <- B2 ''' for f in ['A1.txt', 'B2.txt', 'A2.txt']: FileTarget(f).remove('both') script = SoS_Script(''' [A_1] output: 'A1.txt' sh: sleep 3 touch A1.txt [A_2] input: 'B2.txt' output: 'A2.txt' sh: sleep 3 touch A2.txt [B: provides='B2.txt'] output: 'B2.txt' sh: touch B2.txt ''') # the workflow should call step K for step C_2, but not C_3 wf = script.workflow() dag = Base_Executor(wf).initialize_dag() self.assertDAG(dag, ''' strict digraph "" { A_1; A_2; "B ['B2.txt']"; "B ['B2.txt']" -> A_2; } ''') env.max_jobs = 4 st = time.time() #env.verbosity = 4 MP_Executor(wf).run() self.assertLess(time.time() - st, 4) for f in ['A1.txt', 'B2.txt', 'A2.txt']: FileTarget(f).remove('both')
def testDockerImage(self): '''Test docker_image option''' script = SoS_Script(r''' [0] fastq_files = glob.glob('data/*.fastq') input_volume = os.path.dirname(fastq_files[0]) output_volume = os.getcwd() run: docker_image='compbio/ngseasy-fastqc:1.0-r001', volumes=["${input_volume}:/input_data", "${output_volume}:/output_data"] ls -l /input_data /usr/local/bin/fastqc /input_data/*.fastq --outdir /output_data ''') wf = script.workflow() Base_Executor(wf).run()
def testRubyInDocker(self): '''Test action ruby in docker environment''' script = SoS_Script(r''' [0] ruby: docker_image='ruby' line1 = "Cats are smarter than dogs"; line2 = "Dogs also like meat"; if ( line1 =~ /Cats(.*)/ ) puts "Line1 contains Cats" end if ( line2 =~ /Cats(.*)/ ) puts "Line2 contains Dogs" end ''') wf = script.workflow() Base_Executor(wf).run()
def testSharedDependency(self): # # shared variable should introduce additional dependency # for f in ['A1.txt']: FileTarget(f).remove('both') # # A1 introduces a shared variable ss, A3 depends on ss but not A2 # script = SoS_Script(''' [A_1: shared='ss'] ss = 'A1' [A_2] input: None sh: sleep 3 [A_3] input: None import time time.sleep(3) with open("${ss}.txt", 'w') as tmp: tmp.write('test') ''') wf = script.workflow('A') dag = Base_Executor(wf).initialize_dag() self.assertDAG(dag, ''' strict digraph "" { A_3; A_1; A_2; A_1 -> A_3; } ''') env.max_jobs = 3 st = time.time() MP_Executor(wf).run() self.assertLess(time.time() - st, 5) for f in ['A1.txt']: self.assertTrue(FileTarget(f).exists()) FileTarget(f).remove('both')
def testSkipStep(self): '''Test the skip option to skip certain steps''' script = SoS_Script(''' parameter: skip = 0 [0: shared={'a':'var'}, skip=skip==0] var = 0 [1: shared={'b': 'var'}, skip=skip==1] var = 1 ''') wf = script.workflow() Base_Executor(wf, args=['--skip', '0']).run() self.assertEqual(env.sos_dict['b'], 1) # Base_Executor(wf, args=['--skip', '1']).run() self.assertEqual(env.sos_dict['a'], 0)
def testVariableTarget(self): '''Test dependency caused by variable usage.''' script = SoS_Script(r''' [A: shared='b'] b = 1 [C: shared={'c':'k'}] k = 2 [all: shared='p'] depends: sos_variable('c'), sos_variable('b') p = c + b ''') wf = script.workflow('all') Base_Executor(wf).run() self.assertTrue(env.sos_dict['p'], 3)