def testSkipMode(self): '''Test skipping mode of signature''' for i in range(4): with open(f'a_{i}.txt', 'w') as a: a.write(f'a_{i}.txt') if os.path.isfile(f'a_{i}.bak'): os.remove(f'a_{i}.bak') # script = SoS_Script(r''' [A_1] input: [f'a_{i}.txt' for i in range(4)], group_by=1 output: _input.with_suffix('.bak') with open(_input) as ifile, open(_output, 'w') as ofile: ofile.write(ifile.read()) ''') wf = script.workflow() env.config['sig_mode'] = 'default' res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__substep_completed__'], 4) env.config['sig_mode'] = 'default' res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__substep_skipped__'], 4) # env.config['sig_mode'] = 'skip' res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__substep_skipped__'], 4) # # if result file is changed, skip will still skip for i in range(4): with open(f'a_{i}.bak', 'a') as bak: bak.write('extra') # env.config['sig_mode'] = 'skip' res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__substep_skipped__'], 4) # now if we change to default mode, will not skip env.config['sig_mode'] = 'default' res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__substep_completed__'], 4)
def testSignatureWithWithoutTask(self): '''Test the inclusion of task would not trigger rerun''' script = SoS_Script(r'''[1] output: 'aa' sh: echo aa > aa ''') if file_target('aa').exists(): file_target('aa').unlink() wf = script.workflow() res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__step_completed__'], 1) script = SoS_Script(r'''[1] output: 'aa' task: sh: echo aa > aa ''') wf = script.workflow() res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__step_completed__'], 0)
def testStepsWithStepName(self): '''Test from steps''' script = SoS_Script(''' [step_10] output: 'a.txt' _output.touch() [step_20] input: from_steps=step_name.split('_')[0] + '_10' print(_input) ''') wf = script.workflow() Base_Executor(wf).run() # script = SoS_Script(''' [step_10] output: 'a.txt' _output.touch() [step_20] input: from_steps=10 print(_input) ''') wf = script.workflow() Base_Executor(wf).run()
def testInput(self): '''Test input directive''' self.touch(['a.txt', 'b.txt', 'a.pdf', 'a0', 'a1']) script = SoS_Script(''' [0] files = ['a.txt', 'b.txt'] input: 'a.pdf', files ''') wf = script.workflow('default') Base_Executor(wf).run(mode='dryrun') # # test input types script = SoS_Script(''' [0:shared={'i':'_input', 'o':'_output'}] files = (f"a{i}" for i in range(2)) input: {'a.txt', 'b.txt'}, files output: (f"a{x}" for x in _input) ''') wf = script.workflow() Base_Executor(wf).run(mode='dryrun') self.assertEqual(sorted(env.sos_dict['i']), ['a.txt', 'a0', 'a1', 'b.txt']) self.assertEqual(sorted(env.sos_dict['o']), ['aa.txt', 'aa0', 'aa1', 'ab.txt'])
def testSharedOption(self): '''Test shared option of task''' file_target("a.txt").remove("both") file_target("a100.txt").remove("both") script = SoS_Script(''' [10: shared = {'a': 'a[0]'}] task: shared={'a': 'int(open("a.txt").read())'} run: echo 100 > a.txt [20] run: expand=True touch a{a}.txt ''') wf = script.workflow() Base_Executor(wf, config={'sig_mode': 'force'}).run() self.assertTrue(os.path.isfile("a100.txt")) # sequence of var or mapping file_target("a.txt").remove("both") file_target("a100.txt").remove("both") script = SoS_Script(''' [10: shared = {'a': 'a[0]', 'b':'b[0]'}] task: shared=[{'a': 'int(open("a.txt").read())'}, 'b'] b = 20 run: echo 100 > a.txt [20] run: expand=True touch a{a}_{b}.txt ''') wf = script.workflow() Base_Executor(wf, config={'sig_mode': 'force'}).run() self.assertTrue(os.path.isfile("a100_20.txt"))
def testSignatureWithVars(self): '''Test revaluation with variable change''' self.touch(('a1.out', 'a2.out')) script = SoS_Script(''' parameter: DB = {'input': ['a1.out'], 'output': ['b2.out']} parameter: input_file = DB['input'] parameter: output_file = DB['output'] [2] input: input_file, group_by = 1 output: output_file[_index] run: expand=True sleep 2 touch {_output} ''') wf = script.workflow() Base_Executor(wf).run() ts = os.path.getmtime('b2.out') # script = SoS_Script(''' parameter: DB = {'input': ['a1.out', 'a2.out'], 'output': ['b2.out', 'b1.out']} parameter: input_file = DB['input'] parameter: output_file = DB['output'] [2] input: input_file, group_by = 1 output: output_file[_index] run: expand=True sleep 2 touch {_output} ''') wf = script.workflow() Base_Executor(wf).run() self.assertEqual(ts, os.path.getmtime('b2.out'))
def testOverwriteKeyword(self): '''Test overwrite sos keyword with user defined one.''' if file_target('a.txt').exists(): file_target('a.txt').unlink() # script = SoS_Script(''' def run(script): pass [1] run: touch a.txt ''') wf = script.workflow() Base_Executor(wf).run() self.assertFalse(os.path.isfile('a.txt')) # script = SoS_Script(''' parameter: run = 5 [1] run: touch a.txt ''') wf = script.workflow() # this is ok, see https://github.com/vatlab/SoS/issues/1221 Base_Executor(wf).run()
def testRebuidSignature(self): '''Test rebuilding signature''' if os.path.isfile('a.txt'): os.remove('a.txt') script = SoS_Script(r''' [A_1] output: 'a.txt' _output.touch() ''') wf = script.workflow() env.config['sig_mode'] = 'default' res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__substep_completed__'], 1) env.config['sig_mode'] = 'default' res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__substep_skipped__'], 1) env.config['sig_mode'] = 'build' res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__substep_skipped__'], 1) env.config['sig_mode'] = 'default' res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__substep_skipped__'], 1) # if a.txt is changed, rebuild will rerun with open('a.txt', 'a') as atxt: atxt.write('aaa') env.config['sig_mode'] = 'build' res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__substep_skipped__'], 1) # rerun? env.config['sig_mode'] = 'default' res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__substep_skipped__'], 1) # env.config['sig_mode'] = 'force' res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__substep_completed__'], 1) # env.config['sig_mode'] = 'ignore' res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__substep_completed__'], 1)
def testTextRepr(self): # the " as the last character can lead to problems... script = SoS_Script(''' run: echo "Hi, This is from bash"''') wf = script.workflow() Base_Executor(wf).run() # windows does not have #! mechanism so the python code # would incorrectly be executed as bat if sys.platform == 'win32': return for text in ('"""a"""', '"b"', r'"""\na\\nb"""', r"'''a\nb'''", """ "a'\\"='" """): script = SoS_Script(r''' a = 1 run: expand=True #!/usr/bin/env python with open('tmp.txt', 'w') as tmp: tmp.write({} + '{}') k = """b"""'''.format(text, '{a}')) wf = script.workflow() Base_Executor(wf).run() with open('tmp.txt') as tmp: self.assertEqual(tmp.read(), eval(text) + '1') os.remove('tmp.txt')
def test_action_signature(self): '''Test action signature''' with open('test_action.txt', 'w') as ta: ta.write('#something\n') script = SoS_Script(r''' [1] input: 'test_action.txt' run: output='lc.txt', expand=True, tracked='test_action.txt' sleep 5 wc -l {_input[0]} > lc.txt ''') wf = script.workflow() Base_Executor(wf).run() # the second time, should skip st = time.time() Base_Executor(wf).run() # should skip self.assertLess(time.time() - st, 5) # script = SoS_Script(r''' [1] input: 'test_action.txt' print('step is changed') run: output='lc.txt', expand=True, tracked='test_action.txt' sleep 5 wc -l {_input[0]} > lc.txt ''') wf = script.workflow() st = time.time() Base_Executor(wf).run() self.assertLess(time.time() - st, 5) # force env.config['sig_mode'] = 'build' Base_Executor(wf).run()
def testFromInclude(self): '''Test include keyword''' with open('inc.sos', 'w') as ts: ts.write(''' # a slave script to be included gv = 1 [A_1] [A_2] [B] ''') script = SoS_Script(''' %from inc include gv [0] ''') wf = script.workflow() Base_Executor(wf).run(mode='dryrun') Base_Executor(wf).run() self.assertEqual(env.sos_dict['gv'], 1) # # include with alias script = SoS_Script(''' %from inc include gv as g res1 = g [0] ''') wf = script.workflow() Base_Executor(wf).run(mode='dryrun') Base_Executor(wf).run() self.assertEqual(env.sos_dict['res1'], 1)
def testIfElse(self): '''Test if/elif/else/endif structural directive''' # no matching %endif self.assertRaises(ParsingError, SoS_Script, ''' %if 1 a = 1 %else a=2 ''') # no if for else self.assertRaises(ParsingError, SoS_Script, ''' %else a=2 ''') # no conditon for if self.assertRaises(ParsingError, SoS_Script, ''' %if a=2 %endif ''') # no conditon for elif self.assertRaises(ParsingError, SoS_Script, ''' %if 1 %elif a=2 %endif [0] ''') # test if else script = SoS_Script(''' %if 0 a = 1 %else a = 2 %endif [0] ''') wf = script.workflow() Base_Executor(wf).run(mode='dryrun') self.assertEqual(env.sos_dict['a'], 2)
def test_shared_dependency(clear_now_and_after): # # shared variable should introduce additional dependency # clear_now_and_after('A1.txt') # # A1 introduces a shared variable ss, A3 depends on ss but not A2 # script = SoS_Script( textwrap.dedent(''' [A_1: shared='ss'] ss = 'A1' [A_2] input: None run: sleep 0 [A_3] input: None import time time.sleep(0) with open(f"{ss}.txt", 'w') as tmp: tmp.write('test') ''')) wf = script.workflow('A') dag = Base_Executor(wf).initialize_dag() assertDAG( dag, textwrap.dedent(''' strict digraph "" { A_3; A_1; A_2; A_1 -> A_3; } ''')) env.max_jobs = 3
def testInclude(self): '''Test include keyword''' with open('inc.sos', 'w') as ts: ts.write(''' # a slave script to be included gv = 1 [A_1] [A_2] [B] ''') script = SoS_Script(''' %include inc res = inc.gv [0] ''') wf = script.workflow() Base_Executor(wf).run(mode='dryrun') Base_Executor(wf).run() self.assertEqual(env.sos_dict['res'], 1) # # include with alias script = SoS_Script(''' %include inc as tt res1 = tt.gv [0] ''') wf = script.workflow() Base_Executor(wf).run(mode='dryrun') Base_Executor(wf).run() self.assertEqual(env.sos_dict['res1'], 1) os.remove('inc.sos')
def testDockerBuildLinuxImage(self): '''Test action docker build''' script = SoS_Script(r''' [0] docker_build: tag='test/docker_build' # # Super simple example of a Dockerfile # FROM ubuntu:latest MAINTAINER Andrew Odewahn "*****@*****.**" RUN apt-get update WORKDIR /home ''') wf = script.workflow() Base_Executor(wf).run() # build with more options script = SoS_Script(r''' [0] docker_build: tag='test/docker_build1', label='label with space', compress=True, memory='2G' # # Super simple example of a Dockerfile # FROM ubuntu:latest MAINTAINER Andrew Odewahn "*****@*****.**" WORKDIR /home ''') wf = script.workflow() Base_Executor(wf).run()
def testUserDefinedFunc(self): '''Test the use of user-defined functions in SoS script''' script = SoS_Script(r""" def myfunc(): return 'a' [1: shared={'test':'_output'}] output: myfunc() myfunc() """) wf = script.workflow() Base_Executor(wf).run(mode='dryrun') self.assertEqual(env.sos_dict['test'], ['a']) # User defined function should also work under nested workflows # This is difficult because the 'local namespace' is usually # not seen inside function definition. The solution now is to # use a single workspace. script = SoS_Script(r""" def myfunc(): # test if builtin functions (sum and range) can be used here. return 'a' + str(sum(range(10))) [1: shared={'test':'_output'}] output: [myfunc() for i in range(10)][0] myfunc() """) wf = script.workflow() Base_Executor(wf).run(mode='dryrun') self.assertEqual(env.sos_dict['test'], ['a45'])
def testOverwriteKeyword(self): '''Test overwrite sos keyword with user defined one.''' file_target('a.txt').remove('both') # script = SoS_Script(''' def run(script): pass [1] run: touch a.txt ''') wf = script.workflow() Base_Executor(wf).run() self.assertFalse(os.path.isfile('a.txt')) # script = SoS_Script(''' parameter: run = 5 [1] run: touch a.txt ''') wf = script.workflow() self.assertRaises(Exception, Base_Executor(wf).run)
def test_local_runtime_max_walltime(self): """Test server max_walltime option""" script = SoS_Script( """ [10] task: import time time.sleep(15) """ ) wf = script.workflow() self.assertRaises( Exception, Base_Executor( wf, config={ "config_file": "~/docker.yml", "default_queue": "local_limited", "sig_mode": "force", }, ).run, )
def test_concurrent_task(self): """Test submitting tasks from concurrent substeps""" for f in [f"con_{x}.txt" for x in range(5)]: if file_target(f).exists(): file_target(f).unlink() script = SoS_Script( """ [10] input: for_each={'i': range(5)} output: f'con_{i}.txt' task: run: expand=True echo {i} > {_output} """ ) wf = script.workflow() Base_Executor( wf, config={"sig_mode": "force", "default_queue": "localhost"} ).run() for f in [f"con_{x}.txt" for x in range(5)]: self.assertTrue(file_target(f).exists())
def testNestedFromAnotherFile(self): '''Test nested runtime option for work directory''' if os.path.isdir('a.txt'): os.remove('a.txt') with open('another.sos', 'w') as another: another.write(''' [whatever] run: touch 'a.txt' ''') script = SoS_Script(''' [default] sos_run('whatever', source='another.sos') ''') wf = script.workflow() # this should be ok. Base_Executor(wf).run() self.assertTrue( os.path.isfile('a.txt'), 'a.txt should have been created by nested workflow from another file' )
def testConcurrentTask(self): '''Test submitting tasks from concurrent substeps''' for f in [f'con_{x}.txt' for x in range(5)]: if file_target(f).exists(): file_target(f).unlink() script = SoS_Script(''' [10] input: for_each={'i': range(5)} output: f'con_{i}.txt' task: run: expand=True echo {i} > {_output} ''') wf = script.workflow() Base_Executor(wf, config={ 'sig_mode': 'force', 'default_queue': 'localhost' }).run() for f in [f'con_{x}.txt' for x in range(5)]: self.assertTrue(file_target(f).exists())
def testSignatureAfterRemovalOfFiles(self): '''test action shrink''' if os.path.isfile('largefile.txt'): os.remove('largefile.txt') script = SoS_Script(r''' [10] # generate a file output: 'largefile.txt' run: expand='${ }' for x in {1..1000} do echo $x >> ${_output} done ''') wf = script.workflow() res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__step_completed__'], 1) # rerun, because this is the final target, it has to be # re-generated os.remove('largefile.txt') res = Base_Executor(wf).run() self.assertTrue(os.path.isfile('largefile.txt')) self.assertEqual(res['__completed__']['__step_completed__'], 1) # # we discard the signature, the step would still be # skipped because file signature will be calculated # during verification res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__step_completed__'], 0) # # now if we touch the file, it needs to be regenerated with open('largefile.txt', 'a') as lf: lf.write('something') res = Base_Executor(wf).run() self.assertEqual(res['__completed__']['__step_completed__'], 1) file_target('largefile.txt').unlink()
def testDynamicOutput(self): '''Testing dynamic output''' # if not os.path.isdir('temp'): os.mkdir('temp') # script = SoS_Script(''' [10: shared={'test':'step_output'}] ofiles = [] output: dynamic(ofiles) for i in range(4): ff = 'temp/something{}.html'.format(i) ofiles.append(ff) with open(ff, 'w') as h: h.write('a') ''') wf = script.workflow() Base_Executor(wf).run() self.assertEqual(env.sos_dict['test'], ['temp/something{}.html'.format(x) for x in range(4)]) # shutil.rmtree('temp')
def test_output_executable(self): '''Testing target executable.''' # change $PATH so that lls can be found at the current # directory. os.environ['PATH'] += os.pathsep + '.' script = SoS_Script(''' [0] output: executable('lls') run: touch lls chmod +x lls ''') wf = script.workflow() if file_target('lls').exists(): file_target('lls').unlink() env.config['sig_mode'] = 'force' Base_Executor(wf).run() # test validation env.config['sig_mode'] = 'default' Base_Executor(wf).run() if file_target('ls').exists(): file_target('lls').unlink()
def testStepWithMultipleOutput(self): '''Test addition of steps with multiple outputs. It should be added only once''' script = SoS_Script(''' [test_1: provides=['{}.txt'.format(i) for i in range(10)]] output: ['{}.txt'.format(i) for i in range(10)] run: touch {output} [test_2: provides=['{}.txt'.format(i) for i in range(10, 20)]] depends: ['{}.txt'.format(i) for i in range(10)] output: ['{}.txt'.format(i) for i in range(10, 20)] run: touch {output} [default] depends: ['{}.txt'.format(i) for i in range(10, 20)] ''') wf = script.workflow() Base_Executor(wf, config={'output_dag': 'test'}).initialize_dag() with open('test.dot') as dot: lc = len(dot.readlines()) self.assertTrue(lc, 6)
def test_output_in_task(self): """Test passing _output to task #1136""" script = SoS_Script( """ chunks = [1,2] [1] input: for_each = 'chunks' output: f'{_chunks}.txt' _output.touch() [2] input: group_with = 'chunks' output: summary_stats = f'{_input}.summary', ld_matrix = f'{_input}.result' task: python3: expand="${ }" open("${_output['summary_stats']}", 'w').close() open("${_output['ld_matrix']}", 'w').close() """ ) wf = script.workflow() Base_Executor(wf, config={"default_queue": "localhost"}).run()
def testSendSymbolicLink(self): '''Test to_host symbolic link or directories that contain symbolic link. #508''' # create a symbloc link with open('ttt.py', 'w') as ttt: ttt.write('something') if os.path.exists('llink'): os.remove('llink') subprocess.call('ln -s ttt.py llink', shell=True) script = SoS_Script(''' [10] task: to_host='llink' sz = os.path.getmtime('llink') ''') wf = script.workflow() Base_Executor(wf, config={ 'config_file': '~/docker.yml', # do not wait for jobs 'wait_for_task': True, 'default_queue': 'docker', 'sig_mode': 'force' }).run() os.remove('llink')
def testSectionActions(self): '''Test actions of sections''' SoS_Script(""" [0] func(''' multiline string''', with_option=1 ) """) self.assertRaises(ParsingError, SoS_Script, ''' [0] func( ''')
def test_shared_var_in_for_each(self): self.touch(['1.txt', '2.txt']) for file in ('1.out', '2.out', '1.out2', '2.out2'): if file_target(file).exists(): file_target(file).unlink() script = SoS_Script(''' [work_1: shared = {'data': 'step_output'}] input: "1.txt", "2.txt", group_by = 'single', pattern = '{name}.{ext}' output: expand_pattern('{_name}.out') run: expand=True touch {_output} [work_2] depends: sos_variable('data') input: "1.txt", "2.txt", group_by = 'single', for_each = dict(data=data), pattern = '{name}.{ext}' output: expand_pattern('{data}_{_name}.out2') run: expand=True touch {_output} ''') wf = script.workflow() Base_Executor(wf).run()
def testLocalFromHostOption(self): '''Test from_remote option''' if os.path.isfile('llp'): os.remove('llp') script = SoS_Script(''' [10] task: from_host='llp' sh: echo "LLP" > llp ''') wf = script.workflow() Base_Executor( wf, config={ 'config_file': '~/docker.yml', # do not wait for jobs 'wait_for_task': True, 'sig_mode': 'force', 'default_queue': 'localhost', }).run() self.assertTrue(os.path.isfile('llp')) os.remove('llp')