Ejemplo n.º 1
0
    def test_rebuid_signature_with_substeps(self):
        '''Test rebuilding signature'''
        for i in range(4):
            if os.path.isfile(f'a_{i}.txt'):
                os.remove(f'a_{i}.txt')
        script = SoS_Script(r'''
[A_1]
input: for_each=dict(i=range(4))
output: f'a_{i}.txt'
_output.touch()
''')
        wf = script.workflow()
        env.config['sig_mode'] = 'default'
        res = Base_Executor(wf).run()
        self.assertEqual(res['__completed__']['__substep_completed__'], 4)
        env.config['sig_mode'] = 'default'
        res = Base_Executor(wf).run()
        self.assertEqual(res['__completed__']['__substep_skipped__'], 4)
        env.config['sig_mode'] = 'build'
        res = Base_Executor(wf).run()
        self.assertEqual(res['__completed__']['__substep_skipped__'], 4)
        env.config['sig_mode'] = 'default'
        res = Base_Executor(wf).run()
        self.assertEqual(res['__completed__']['__substep_skipped__'], 4)
        # if a.txt is changed, rebuild will rerun
        for i in range(4):
            with open(f'a_{i}.txt', 'a') as atxt:
                atxt.write('aaa')
        env.config['sig_mode'] = 'build'
        res = Base_Executor(wf).run()
        self.assertEqual(res['__completed__']['__substep_skipped__'], 4)
        # rerun?
        env.config['sig_mode'] = 'default'
        res = Base_Executor(wf).run()
        self.assertEqual(res['__completed__']['__substep_skipped__'], 4)
        #
        env.config['sig_mode'] = 'force'
        res = Base_Executor(wf).run()
        self.assertEqual(res['__completed__']['__substep_completed__'], 4)
        #
        env.config['sig_mode'] = 'ignore'
        res = Base_Executor(wf).run()
        self.assertEqual(res['__completed__']['__substep_completed__'], 4)
Ejemplo n.º 2
0
def test_variable_target():
    '''Test dependency caused by variable usage.'''
    script = SoS_Script(
        textwrap.dedent(r'''
    [A: shared='b']
    b = 1

    [C: shared={'c':'k'}]
    k = 2

    [all: shared='p']
    depends: sos_variable('c'), sos_variable('b')

    p = c + b

    '''))
    wf = script.workflow('all')
    Base_Executor(wf).run()
    assert env.sos_dict['p'] == 3
Ejemplo n.º 3
0
    def testAnalyzeOutputFrom(self):
        '''Test extracting of from=value option from input'''
        script = SoS_Script('''
[A_1]
input:  output_from('B')

[A_2]
input: something_unknown, sos_groups(output_from(['C1', 'C2']), by=2), group_by=1
''')
        wf = script.workflow('A')
        Base_Executor(wf)
        for section in wf.sections:
            res = analyze_section(section)
            if section.names[0][1] == 1:
                self.assertEqual(res['step_depends'],
                                 sos_targets(sos_step('B')))
            if section.names[0][1] == 2:
                self.assertTrue(res['step_depends'] == sos_targets(
                    sos_step('C1'), sos_step('C2')))
Ejemplo n.º 4
0
    def testRuby(self):
        '''Test action ruby'''
        if not shutil.which('ruby'):
            return True
        script = SoS_Script(r'''
[0]
ruby:
line1 = "Cats are smarter than dogs";
line2 = "Dogs also like meat";

if ( line1 =~ /Cats(.*)/ )
  puts "Line1 contains Cats"
end
if ( line2 =~ /Cats(.*)/ )
  puts "Line2 contains  Dogs"
end
''')
        wf = script.workflow()
        Base_Executor(wf).run()
Ejemplo n.º 5
0
    def testConcurrency(self):
        '''Test concurrency option for runtime environment'''
        env.max_jobs = 5
        env.config['sig_mode'] = 'force'
        script = SoS_Script(r"""
[0]

repeat = range(4)
input: for_each='repeat'

task: 

import time
print('I am {}, waited {} seconds'.format(_index, _repeat + 1))
time.sleep(_repeat + 1)
print('I am {}, done'.format(_index))
""")
        wf = script.workflow()
        Base_Executor(wf).run()
Ejemplo n.º 6
0
    def testDAGofDynamicNestedWorkflow(self):
        #
        # Because we are not sure which workflows would be executed
        # until run time, the DAG should not contain nested workflow
        # until runtime.
        #
        for f in ['B0.txt', 'B0.txt.p', 'B1.txt', 'B1.txt.p', 'B2.txt', 'B2.txt.p']:
            if file_target(f).exists():
                file_target(f).unlink()
        #
        #  A1 <- P <- B
        #  A1 <- P <- B
        #  A2
        #
        #  ALL calls A and B with parameter
        #
        script = SoS_Script('''
[A_1]
parameter: num = 2
input: f"B{num}.txt.p"

[B: provides='B{num}.txt']
run: expand=True
    touch 'B{num[0]}.txt'

[P: provides='{filename}.p']
input: filename
run: expand=True
    touch {_output}

[ALL]

for i in range(3):
    sos_run('A', num=i)


''')
        # the workflow should call step K for step C_2, but not C_3
        wf = script.workflow('ALL')
        Base_Executor(wf).run()
        for f in ['B0.txt', 'B0.txt.p', 'B1.txt', 'B1.txt.p', 'B2.txt', 'B2.txt.p']:
            self.assertTrue(file_target(f).target_exists())
            file_target(f).unlink()
Ejemplo n.º 7
0
    def testFunDef(self):
        '''Test defintion of function that can be used by other steps'''
        self.touch(['aa.txt', 'ab.txt'])
        # in nested workflow?
        script = SoS_Script(r"""
def myfunc(a):
    return ['a' + x for x in a]

[mse: shared={'test':'_output'}]
input: myfunc(['a.txt', 'b.txt'])

[1]
sos_run('mse')
""")
        wf = script.workflow()
        Base_Executor(wf).run(mode='dryrun')
        #
        # Names defined in subworkflow is not returned to the master dict
        self.assertTrue('test' not in env.sos_dict)
Ejemplo n.º 8
0
    def testRmarkdownWithActionOutput(self):
        script = SoS_Script(r'''
[10]
report: output='default_10.md'
A_10

[20]
report: output='default_20.md'
A_20

[100]
# generate report
Rmarkdown(input=['default_10.md', 'default_20.md'], output='output.html')
''')
        wf = script.workflow()
        Base_Executor(wf, config={'report_output': '${step_name}.md'}).run()
        for f in ['default_10.md', 'default_20.md', 'output.html']:
            self.assertTrue(file_target(f).exists())
            file_target(f).remove()
Ejemplo n.º 9
0
    def testProgressBar(self):
        # progress bar with nested workflow
        script = SoS_Script('''
import time
time.sleep(0)
[sub_1]
[sub_2]
[sub_3]
[sub_4]
[a_1]
[a_2]
[a_3]
sos_run('sub')
[a_4]
[a_5]
''')
        env.verbosity = 1
        wf = script.workflow('a')
        Base_Executor(wf).run()
Ejemplo n.º 10
0
    def testNoWait(self):
        '''Test no wait'''
        script = SoS_Script(r'''
[10]
input: for_each=[{'a': range(3)}]

task: concurrent=True
run: expand=True
    echo "a = {a}"
    sleep 20
''')
        wf = script.workflow()
        #st = time.time()
        env.config['sig_mode'] = 'force'
        env.config['wait_for_task'] = False
        ret = Base_Executor(wf).run()
        # sos should quit
        self.assertGreater(len(ret['pending_tasks']), 0)
        #
        time.sleep(18)
        print('RESTART')
        env.config['sig_mode'] = 'default'
        env.config['wait_for_task'] = True
        env.config['resume_mode'] = True
        #st = time.time()
        try:
            Base_Executor(wf).run()
            # sos should wait till everything exists
            #self.assertLess(time.time() - st, 15)
        except SystemExit:
            # ok if the task has already been completed and there is nothing
            # to resume
            pass
        #
        # rerun task in different mode
        env.config['resume_mode'] = False
        env.config['wait_for_task'] = True
        Base_Executor(wf).run()
        env.config['sig_mode'] = 'assert'
        Base_Executor(wf).run()
        env.config['sig_mode'] = 'build'
        Base_Executor(wf).run()
Ejemplo n.º 11
0
    def testRemoteExecution(self):
        subprocess.check_output('sos purge', shell=True).decode()
        script = SoS_Script('''
[10]
input: for_each={'i': range(5)}
task:

run: expand=True
    echo I am {i}
    sleep {5+i}
''')
        wf = script.workflow()
        res = Base_Executor(wf, config={
                'config_file': '~/docker.yml',
                # do not wait for jobs
                'wait_for_task': False,
                'default_queue': 'docker',
                'max_running_jobs': 5,
                'sig_mode': 'force',
                }).run()
        import time
        # we should be able to get status
        tasks = ' '.join(res['pending_tasks'])
        # wait another 15 seconds?
        time.sleep(15)
        out = subprocess.check_output('sos status {} -c ~/docker.yml -q docker'.format(tasks), shell=True).decode()
        self.assertEqual(out.count('completed'), len(res['pending_tasks']), 'Expect all completed jobs: ' + out)

        Host.reset()
        # until we run the workflow again
        #st = time.time()
        Base_Executor(wf, config={
                'config_file': '~/docker.yml',
                # do not wait for jobs
                'wait_for_task': True,
                'default_queue': 'docker',
                'resume_mode': True,
                }).run()
        # should finish relatively fast?
        #self.assertLess(time.time() - st, 5)
        out = subprocess.check_output('sos status {} -c ~/docker.yml'.format(tasks), shell=True).decode()
        self.assertEqual(out.count('completed'), len(res['pending_tasks']), 'Expect all completed jobs: ' + out)
Ejemplo n.º 12
0
    def testFromHostOption(self):
        '''Test from_remote option'''
        if os.path.isfile('llp'):
            os.remove('llp')
        script = SoS_Script('''
[10]
task: from_host='llp'
with open('llp', 'w') as llp:
    llp.write("LLP")
''')
        wf = script.workflow()
        Base_Executor(
            wf,
            config={
                'config_file': '~/docker.yml',
                'wait_for_task': True,
                'default_queue': 'docker',
                'sig_mode': 'force',
            }).run()
        self.assertTrue(os.path.isfile('llp'))
Ejemplo n.º 13
0
    def test_rmarkdown_with_input(self):
        # Rmarkdown with specified input.
        script = SoS_Script(r'''
[10]
report: output='a.md'
## Some random figure

Generated by matplotlib


[100]
# generate report
output: 'myreport.html'
Rmarkdown(input='a.md', output=_output[0])
''')
        wf = script.workflow()
        Base_Executor(wf).run()
        self.assertTrue(os.path.isfile('myreport.html'))
        if file_target('myreport.html').exists():
            file_target('myreport.html').unlink()
Ejemplo n.º 14
0
    def testLongerCode(self):
        '''Test definition of classes (with intermediate newlines) in step.'''
        script = SoS_Script('''# first block

[0: shared='b']
class A:
    def __init__(self):
        pass

    # the newline above should be fine because SoS treat this as
    # regular lines
    def __call__(self):
        return 0

b = A()()

''')
        wf = script.workflow()
        Base_Executor(wf).run()
        self.assertEqual(env.sos_dict['b'], 0)
Ejemplo n.º 15
0
    def testSharedVarInPairedWith(self):
        self.touch(['1.txt', '2.txt'])
        script = SoS_Script('''
[work_1: shared = {'data': 'step_output'}]
input: "1.txt", "2.txt", group_by = 'single', pattern = '{name}.{ext}'
output: expand_pattern('{_name}.out')
run: expand=True
  touch {_output}

[work_2]
input: "1.txt", "2.txt", group_by = 'single', pattern = '{name}.{ext}', paired_with = ['data']
output: expand_pattern('{_name}.out2')
run: expand=True
  touch {_data[0]} {_output}
''')
        wf = script.workflow()
        Base_Executor(wf).run()
        for file in ('1.out', '2.out', '1.out2', '2.out2'):
            if file_target(file).exists():
                file_target(file).unlink()
Ejemplo n.º 16
0
    def testReverseSharedVariable(self):
        '''Test shared variables defined in auxiliary steps'''
        if file_target('a.txt').exists():
            file_target('a.txt').unlink()
        script = SoS_Script(r'''
[A: shared='b', provides='a.txt']
b = 1
run:
    touch a.txt

[B_1]
depends: 'a.txt'

[B_2]
print(b)

''')
        wf = script.workflow('B')
        Base_Executor(wf).run()
        self.assertTrue(env.sos_dict['b'], 1)
Ejemplo n.º 17
0
    def testForwardStyleDepend(self):
        '''Test the execution of forward-style workflow with undtermined dependency'''
        if file_target('a.txt.bak').exists():
            file_target('a.txt.bak').unlink()
        self.touch('a.txt')
        script = SoS_Script('''
[10]
input: 'a.txt'
output: f"{_input}.bak"
run: expand=True
    cp {_input} {_output}

[20]
depends: "a.txt.bak"
run: expand=True
    ls {_depends}
''')
        wf = script.workflow()
        Base_Executor(wf).run()
        self.assertTrue(file_target('a.txt.bak').target_exists())
Ejemplo n.º 18
0
    def testNestedFromAnotherFile(self):
        '''Test nested runtime option for work directory'''
        if os.path.isdir('a.txt'):
            os.remove('a.txt')
        with open('another.sos', 'w') as another:
            another.write('''
[whatever]
run:
    touch 'a.txt'

''')
        script = SoS_Script('''
[default]
sos_run('whatever', source='another.sos')
''')
        wf = script.workflow()
        # this should be ok.
        Base_Executor(wf).run()
        self.assertTrue(os.path.isfile('a.txt'),
                        'a.txt should have been created by nested workflow from another file')
Ejemplo n.º 19
0
    def testOutputExecutable(self):
        '''Testing target executable.'''
        # change $PATH so that lls can be found at the current
        # directory.
        os.environ['PATH'] += os.pathsep + '.'
        script = SoS_Script('''
[0]
output: executable('lls')
run:
    touch lls
    chmod +x lls
''')
        wf = script.workflow()
        file_target('lls').remove('both')
        env.config['sig_mode'] = 'force'
        Base_Executor(wf).run()
        # test validation
        env.config['sig_mode'] = 'default'
        Base_Executor(wf).run()
        file_target('lls').remove('both')
Ejemplo n.º 20
0
    def testDryrunPlaceholder(self):
        '''Test the creation and removal of placeholder files in dryrun mode'''
        if file_target('1.txt').exists():
            file_target('1.txt').unlink()
        script = SoS_Script('''
a = '1.txt'

[out: provides=a]
output: a
run: expand = True
  touch {a}

[1]
depends: a
''')
        wf = script.workflow()
        # should be ok
        res = Base_Executor(wf).run(mode='dryrun')
        # but the file would be removed afterwards
        self.assertFalse(os.path.isfile('1.txt'))
Ejemplo n.º 21
0
    def testLocalRuntimeMaxWalltime(self):
        '''Test server max_walltime option'''
        script = SoS_Script('''
[10]
task:
import time
time.sleep(15)
''')
        wf = script.workflow()
        self.assertRaises(
            Exception,
            Base_Executor(
                wf,
                config={
                    'config_file': '~/docker.yml',
                    # do not wait for jobs
                    'wait_for_task': True,
                    'default_queue': 'local_limited',
                    'sig_mode': 'force',
                }).run)
Ejemplo n.º 22
0
def test_reverse_shared_variable(clear_now_and_after):
    '''Test shared variables defined in auxiliary steps'''
    clear_now_and_after('a.txt')
    script = SoS_Script(
        textwrap.dedent(r'''
    [A: shared='b', provides='a.txt']
    b = 1
    run:
    touch a.txt

    [B_1]
    depends: 'a.txt'

    [B_2]
    print(b)

    '''))
    wf = script.workflow('B')
    Base_Executor(wf).run()
    assert env.sos_dict['b'] == 1
Ejemplo n.º 23
0
    def testOutputInTask(self):
        '''Test passing _output to task #1136'''
        script = SoS_Script('''
chunks  = [1,2]
[1]
input: for_each = 'chunks'
output: f'{_chunks}.txt'
_output.touch()

[2]
input: group_with = 'chunks'
output: summary_stats = f'{_input}.summary', ld_matrix = f'{_input}.result'
task:

python3: expand="${ }"
       open("${_output['summary_stats']}", 'w').close()
       open("${_output['ld_matrix']}", 'w').close()
''')
        wf = script.workflow()
        Base_Executor(wf, config={'default_queue': 'localhost'}).run()
Ejemplo n.º 24
0
    def test_provides_executable(self):
        """Testing provides executable target."""
        # change $PATH so that lls can be found at the current
        # directory.
        os.environ["PATH"] += os.pathsep + "."
        if file_target("lls").exists():
            file_target("lls").unlink()
        script = SoS_Script("""
[lls: provides=executable('lkls')]
run:
    touch lkls
    chmod +x lkls

[c]
depends: executable('lkls')

""")
        wf = script.workflow("c")
        Base_Executor(wf).run()
        file_target("lkls").unlink()
Ejemplo n.º 25
0
    def testOutputPattern(self):
        '''Test option pattern of step output'''
        #env.verbosity = 4
        self.touch(['a-20.txt', 'b-10.txt'])
        script = SoS_Script(r"""
[0: shared=['base', 'name', 'par', '_output']]

files = ['a-20.txt', 'b-10.txt']
input: files, pattern=['{name}-{par}.txt', '{base}.txt']
output: expand_pattern('{base}-{name}-{par}.txt'), expand_pattern('{par}.txt')

""")
        wf = script.workflow()
        Base_Executor(wf).run(mode='dryrun')
        self.assertEqual(env.sos_dict['base'], ["a-20", 'b-10'])
        self.assertEqual(env.sos_dict['name'], ["a", 'b'])
        self.assertEqual(env.sos_dict['par'], ["20", '10'])
        self.assertEqual(
            env.sos_dict['_output'],
            ['a-20-a-20.txt', 'b-10-b-10.txt', '20.txt', '10.txt'])
Ejemplo n.º 26
0
    def test_provides_executable(self):
        '''Testing provides executable target.'''
        # change $PATH so that lls can be found at the current
        # directory.
        os.environ['PATH'] += os.pathsep + '.'
        if file_target('lls').exists():
            file_target('lls').unlink()
        script = SoS_Script('''
[lls: provides=executable('lkls')]
run:
    touch lkls
    chmod +x lkls

[c]
depends: executable('lkls')

''')
        wf = script.workflow('c')
        Base_Executor(wf).run()
        file_target('lkls').unlink()
Ejemplo n.º 27
0
    def testRemoteExecute(self):
        script = SoS_Script('''
[10]
output: 'result.txt'
task:

run:
  echo 'a' > 'result.txt'

''')
        wf = script.workflow()
        Base_Executor(wf, config={
                'config_file': '~/docker.yml',
                'wait_for_task': True,
                'default_queue': 'docker',
                'sig_mode': 'force',
                }).run()
        self.assertTrue(file_target('result.txt').target_exists())
        with open('result.txt') as res:
            self.assertEqual(res.read(), 'a\n')
Ejemplo n.º 28
0
    def test_removed_depends(self):
        '''Test a case where a dependent file has signature, but
        gets removed before the next run.'''
        script = SoS_Script('''
[tet: provides='a.txt']
run:
    echo "something" > a.txt

[20]
depends: 'a.txt'
output: 'b.txt'
run:
    cat a.txt > b.txt
''')
        wf = script.workflow()
        # this should be ok.
        Base_Executor(wf).run()
        # now let us remove a.txt (but the signature is still there)
        os.remove('a.txt')
        os.remove('b.txt')
        Base_Executor(wf).run()
Ejemplo n.º 29
0
    def testPassingVarsToNestedWorkflow(self):
        '''Test if variables can be passed to nested workflows'''
        script = SoS_Script(r"""
import time
import random

[nested]
print(f'I am nested {nested} with seed {seed}')

[0]
reps = range(5)
input: for_each='reps'
import random
nested = _reps
seed = random.randint(1, 1000)
print(f'Passing {seed} to {nested}')
sos_run('nested', nested=nested, seed=seed)

""")
        wf = script.workflow()
        Base_Executor(wf).run()
Ejemplo n.º 30
0
    def test_max_walltime(self):
        """Test server restriction max_walltime"""
        script = SoS_Script(
            """
[10]
task: walltime='1:00:00'
print('a')
"""
        )
        wf = script.workflow()
        self.assertRaises(
            Exception,
            Base_Executor(
                wf,
                config={
                    "config_file": "~/docker.yml",
                    "default_queue": "docker_limited",
                    "sig_mode": "force",
                },
            ).run,
        )