def test_pipeline_decrement_stage(): p = Pipeline() s1 = Stage() t = Task() t.executable = ['/bin/date'] s1.tasks = t s2 = Stage() t = Task() t.executable = ['/bin/date'] s2.tasks = t p.add_stages([s1, s2]) p._increment_stage() p._increment_stage() assert p._stage_count == 2 assert p._cur_stage == 2 assert p._completed_flag.is_set() == True p._decrement_stage() assert p._stage_count == 2 assert p._cur_stage == 1 assert p._completed_flag.is_set() == False p._decrement_stage() assert p._stage_count == 2 assert p._cur_stage == 0 assert p._completed_flag.is_set() == False
def test_pipeline_decrement_stage(): p = Pipeline() s1 = Stage() t = Task() t.executable = '/bin/date' s1.tasks = t s2 = Stage() t = Task() t.executable = '/bin/date' s2.tasks = t p.add_stages([s1, s2]) p._increment_stage() p._increment_stage() assert p._stage_count == 2 assert p._cur_stage == 2 assert p._completed_flag.is_set() == True p._decrement_stage() assert p._stage_count == 2 assert p._cur_stage == 1 assert p._completed_flag.is_set() == False p._decrement_stage() assert p._stage_count == 2 assert p._cur_stage == 0 assert p._completed_flag.is_set() == False
def test_stage_exceptions(self, mocked_generate_id, l, i, b, se): """ ***Purpose***: Test if correct exceptions are raised when attributes are assigned unacceptable values. """ s = Stage() data_type = [l, i, b, se] for data in data_type: if not isinstance(data, str): with self.assertRaises(TypeError): s.name = data # if isinstance(data,str): # with self.assertRaises(ValueError): # s.name = data with self.assertRaises(TypeError): s.tasks = data with self.assertRaises(TypeError): s.add_tasks(data)
def test_state_order(): """ **Purpose**: Test if the Pipeline, Stage and Task are assigned their states in the correct order """ def create_single_task(): t1 = Task() t1.name = 'simulation' t1.executable = ['/bin/date'] t1.copy_input_data = [] t1.copy_output_data = [] return t1 p1 = Pipeline() p1.name = 'p1' s = Stage() s.name = 's1' s.tasks = create_single_task() s.add_tasks(create_single_task()) p1.add_stages(s) res_dict = { 'resource': 'local.localhost', 'walltime': 5, 'cpus': 1, 'project': '' } os.environ['RADICAL_PILOT_DBURL'] = MLAB os.environ['RP_ENABLE_OLD_DEFINES'] = 'True' appman = Amgr(hostname=hostname, port=port) appman.resource_desc = res_dict appman.workflow = [p1] appman.run() p_state_hist = p1.state_history assert p_state_hist == ['DESCRIBED', 'SCHEDULING', 'DONE'] s_state_hist = p1.stages[0].state_history assert s_state_hist == ['DESCRIBED', 'SCHEDULING', 'SCHEDULED', 'DONE'] tasks = p1.stages[0].tasks for t in tasks: t_state_hist = t.state_history assert t_state_hist == ['DESCRIBED', 'SCHEDULING', 'SCHEDULED', 'SUBMITTING', 'SUBMITTED', 'EXECUTED', 'DEQUEUEING', 'DEQUEUED', 'DONE']
def test_pipeline_stage_addition(): p = Pipeline() s1 = Stage() t = Task() t.executable = ['/bin/date'] s1.tasks = t s2 = Stage() t = Task() t.executable = ['/bin/date'] s2.tasks = t p.add_stages([s1, s2]) assert type(p.stages) == list assert p._stage_count == 2 assert p._cur_stage == 1 assert p.stages[0] == s1 assert p.stages[1] == s2
def test_pipeline_stage_addition(): p = Pipeline() s1 = Stage() t = Task() t.executable = '/bin/date' s1.tasks = t s2 = Stage() t = Task() t.executable = '/bin/date' s2.tasks = t p.add_stages([s1, s2]) assert type(p.stages) == list assert p._stage_count == 2 assert p._cur_stage == 1 assert p.stages[0] == s1 assert p.stages[1] == s2
def test_uid_assignment(): p = Pipeline() s = Stage() t = Task() s.tasks = t p.stages = s assert t._parent_pipeline == p.uid assert t._parent_stage == s.uid assert s._parent_pipeline == p.uid
def test_pipeline_stage_assignment(): p = Pipeline() s = Stage() t = Task() t.executable = ['/bin/date'] s.tasks = t p.stages = s assert type(p.stages) == list assert p._stage_count == 1 assert p._cur_stage == 1 assert p.stages[0] == s
def test_stage_task_assignment(): """ ***Purpose***: Test if necessary attributes are automatically updates upon task assignment """ s = Stage() t = Task() t.executable = '/bin/date' s.tasks = t assert type(s.tasks) == set assert s._task_count == 1 assert t in s.tasks
def test_pipeline_stage_assignment(): p = Pipeline() s = Stage() t = Task() t.executable = '/bin/date' s.tasks = t p.stages = s assert type(p.stages) == list assert p._stage_count == 1 assert p._cur_stage == 1 assert p.stages[0] == s
def test_stage_task_assignment(): """ ***Purpose***: Test if necessary attributes are automatically updates upon task assignment """ s = Stage() t = Task() t.executable = ['/bin/date'] s.tasks = t assert type(s.tasks) == set assert s._task_count == 1 assert t in s.tasks
def test_integration_local(): """ **Purpose**: Run an EnTK application on localhost """ def create_single_task(): t1 = Task() t1.name = 'simulation' t1.executable = '/bin/echo' t1.arguments = ['hello'] t1.copy_input_data = [] t1.copy_output_data = [] return t1 p1 = Pipeline() p1.name = 'p1' s = Stage() s.name = 's1' s.tasks = create_single_task() s.add_tasks(create_single_task()) p1.add_stages(s) res_dict = { 'resource': 'local.localhost', 'walltime': 5, 'cpus': 1, 'project': '' } appman = AppManager(hostname=hostname, port=port) appman.resource_desc = res_dict appman.workflow = [p1] appman.run()
def test_integration_local(): """ **Purpose**: Run an EnTK application on localhost """ def create_single_task(): t1 = Task() t1.name = 'simulation' t1.executable = ['/bin/echo'] t1.arguments = ['hello'] t1.copy_input_data = [] t1.copy_output_data = [] return t1 p1 = Pipeline() p1.name = 'p1' s = Stage() s.name = 's1' s.tasks = create_single_task() s.add_tasks(create_single_task()) p1.add_stages(s) res_dict = { 'resource': 'local.localhost', 'walltime': 5, 'cpus': 1, 'project': '' } os.environ['RADICAL_PILOT_DBURL'] = MLAB appman = AppManager(hostname=hostname, port=port) appman.resource_desc = res_dict appman.workflow = [p1] appman.run()
def test_amgr_run_mock(): p = Pipeline() s = Stage() t = Task() t.name = 'simulation' t.executable = '/bin/date' s.tasks = t p.add_stages(s) res_dict = {'resource': 'local.localhost', 'walltime': 5, 'cpus' : 1, 'project' : ''} appman = Amgr(hostname=host, port=port, rts="mock") appman.resource_desc = res_dict appman.workflow = [p] appman.run()
def test_stage_exceptions(t, l, i, b, se): """ ***Purpose***: Test if correct exceptions are raised when attributes are assigned unacceptable values. """ s = Stage() data_type = [t, l, i, b, se] for data in data_type: print 'Using: %s, %s' % (data, type(data)) if not isinstance(data, str): with pytest.raises(TypeError): s.name = data with pytest.raises(TypeError): s.tasks = data with pytest.raises(TypeError): s.add_tasks(data)
def test_stage_task_assignment(self, mocked_init): """ ***Purpose***: Test if necessary attributes are automatically updates upon task assignment """ global_tasks = set() # ------------------------------------------------------------------------------ # def _validate_entities_side_effect(things): nonlocal global_tasks global_tasks.add(things) return global_tasks s = Stage() s._validate_entities = mock.MagicMock( side_effect=_validate_entities_side_effect) t = mock.MagicMock(spec=Task) s.tasks = t self.assertIsInstance(s.tasks, set) self.assertEqual(s._task_count, 1) self.assertIn(t, s.tasks)
def test_assignment_exceptions(): s = Stage() data_type = [1, 'a', True, [1], set([1])] for data in data_type: if not isinstance(data, str): with pytest.raises(TypeError): s.name = data with pytest.raises(TypeError): s.tasks = data with pytest.raises(TypeError): s.add_tasks(data) if not isinstance(data, str): with pytest.raises(TypeError): s.remove_tasks(data) with pytest.raises(TypeError): s._set_task_state(data)
def test_amgr_run_mock(): p = Pipeline() s = Stage() t = Task() t.name = 'simulation' t.executable = ['/bin/date'] s.tasks = t p.add_stages(s) res_dict = { 'resource': 'local.localhost', 'walltime': 5, 'cpus': 1, 'project': '' } appman = Amgr(hostname=hostname, port=port, rts="mock") appman.resource_desc = res_dict appman.workflow = [p] appman.run()
def test_create_cud_from_task(): """ **Purpose**: Test if the 'create_cud_from_task' function generates a RP ComputeUnitDescription with the complete Task description """ pipeline = 'p1' stage = 's1' task = 't1' placeholder_dict = { pipeline: { stage: { task: '/home/vivek/some_file.txt' } } } t1 = Task() t1.name = 't1' t1.pre_exec = ['module load gromacs'] t1.executable = ['grompp'] t1.arguments = ['hello'] t1.cpu_reqs = {'processes': 4, 'process_type': 'MPI', 'threads_per_process': 1, 'thread_type': 'OpenMP' } t1.gpu_reqs = {'processes': 4, 'process_type': 'MPI', 'threads_per_process': 2, 'thread_type': 'OpenMP' } t1.post_exec = ['echo test'] t1.upload_input_data = ['upload_input.dat'] t1.copy_input_data = ['copy_input.dat'] t1.link_input_data = ['link_input.dat'] t1.copy_output_data = ['copy_output.dat'] t1.download_output_data = ['download_output.dat'] p = Pipeline() p.name = 'p1' s = Stage() s.name = 's1' s.tasks = t1 p.stages = s p._assign_uid('test') cud = create_cud_from_task(t1, placeholder_dict) assert cud.name == '%s,%s,%s,%s,%s,%s' % (t1.uid, t1.name, t1.parent_stage['uid'], t1.parent_stage['name'], t1.parent_pipeline['uid'], t1.parent_pipeline['name']) assert cud.pre_exec == t1.pre_exec # rp returns executable as a string regardless of whether assignment was using string or list assert cud.executable == t1.executable assert cud.arguments == t1.arguments assert cud.cpu_processes == t1.cpu_reqs['processes'] assert cud.cpu_threads == t1.cpu_reqs['threads_per_process'] assert cud.cpu_process_type == t1.cpu_reqs['process_type'] assert cud.cpu_thread_type == t1.cpu_reqs['thread_type'] assert cud.gpu_processes == t1.gpu_reqs['processes'] assert cud.gpu_threads == t1.gpu_reqs['threads_per_process'] assert cud.gpu_process_type == t1.gpu_reqs['process_type'] assert cud.gpu_thread_type == t1.gpu_reqs['thread_type'] assert cud.post_exec == t1.post_exec assert {'source': 'upload_input.dat', 'target': 'upload_input.dat'} in cud.input_staging assert {'source': 'copy_input.dat', 'action': rp.COPY, 'target': 'copy_input.dat'} in cud.input_staging assert {'source': 'link_input.dat', 'action': rp.LINK, 'target': 'link_input.dat'} in cud.input_staging assert {'source': 'copy_output.dat', 'action': rp.COPY, 'target': 'copy_output.dat'} in cud.output_staging assert {'source': 'download_output.dat', 'target': 'download_output.dat'} in cud.output_staging
return t1 if __name__ == '__main__': p1 = Pipeline() p1.name = 'p1' p2 = Pipeline() p2.name = 'p2' stages = 3 for cnt in range(stages): s = Stage() s.name = 's_%s' % cnt s.tasks = create_single_task() s.add_tasks(create_single_task()) p1.add_stages(s) for cnt in range(stages): s = Stage() s.name = 's-%s' % cnt s.tasks = create_single_task() s.add_tasks(create_single_task()) p2.add_stages(s) appman = AppManager() appman.resubmit_failed = True appman.assign_workload(set([p1, p2]))
def test_create_cud_from_task(): """ **Purpose**: Test if the 'create_cud_from_task' function generates a RP ComputeUnitDescription with the complete Task description. """ pipeline = 'p1' stage = 's1' task = 't1' placeholders = {pipeline: {stage: {task: '/home/vivek/some_file.txt'}}} t1 = Task() t1.name = 't1' t1.pre_exec = ['module load gromacs'] t1.executable = 'grompp' t1.arguments = ['hello'] t1.cpu_reqs = { 'processes': 4, 'process_type': 'MPI', 'threads_per_process': 1, 'thread_type': 'OpenMP' } t1.gpu_reqs = { 'processes': 4, 'process_type': 'MPI', 'threads_per_process': 2, 'thread_type': 'OpenMP' } t1.post_exec = ['echo test'] t1.upload_input_data = ['upload_input.dat'] t1.copy_input_data = ['copy_input.dat'] t1.link_input_data = ['link_input.dat'] t1.copy_output_data = ['copy_output.dat'] t1.download_output_data = ['download_output.dat'] p = Pipeline() p.name = 'p1' s = Stage() s.name = 's1' s.tasks = t1 p.stages = s cud = create_cud_from_task(t1, placeholders) assert cud.name == '%s,%s,%s,%s,%s,%s' % ( t1.uid, t1.name, t1.parent_stage['uid'], t1.parent_stage['name'], t1.parent_pipeline['uid'], t1.parent_pipeline['name']) assert cud.pre_exec == t1.pre_exec # rp returns executable as a string regardless of whether assignment was using string or list assert cud.executable == t1.executable assert cud.arguments == t1.arguments assert cud.post_exec == t1.post_exec assert cud.cpu_processes == t1.cpu_reqs['processes'] assert cud.cpu_threads == t1.cpu_reqs['threads_per_process'] assert cud.cpu_process_type == t1.cpu_reqs['process_type'] assert cud.cpu_thread_type == t1.cpu_reqs['thread_type'] assert cud.gpu_processes == t1.gpu_reqs['processes'] assert cud.gpu_threads == t1.gpu_reqs['threads_per_process'] assert cud.gpu_process_type == t1.gpu_reqs['process_type'] assert cud.gpu_thread_type == t1.gpu_reqs['thread_type'] assert { 'source': 'upload_input.dat', 'target': 'upload_input.dat' } in cud.input_staging assert { 'source': 'copy_input.dat', 'action': rp.COPY, 'target': 'copy_input.dat' } in cud.input_staging assert { 'source': 'link_input.dat', 'action': rp.LINK, 'target': 'link_input.dat' } in cud.input_staging assert { 'source': 'copy_output.dat', 'action': rp.COPY, 'target': 'copy_output.dat' } in cud.output_staging assert { 'source': 'download_output.dat', 'target': 'download_output.dat' } in cud.output_staging
t1.name = 'dummy_task' t1.executable = ['placeholder'] t1.arguments = ['a', 'b', 'c'] t1.copy_input_data = [] t1.copy_output_data = [] return t1 if __name__ == '__main__': pipes = [] for i in range(4): p = Pipeline() s1 = Stage() s1.name = 'prediction' s1.tasks = create_single_task() p.add_stages(s1) s2 = Stage() s2.name = 'quality-check' s2.tasks = create_single_task() p.add_stages(s2) pipes.append(p) appman = AppManager() appman.assign_workflow(set(pipes)) appman.run()