def test_pipeline_to_dict(): p = Pipeline() d = p.to_dict() assert d == {'uid': None, 'name': None, 'state': states.INITIAL, 'state_history': [states.INITIAL], 'completed': False}
def test_pipeline_to_dict(self, mocked_init): p = Pipeline() p._uid = 'pipeline.0000' p._name = 'test_pipeline' p._stages = list() p._state = states.INITIAL p._state_history = [states.INITIAL] p._stage_count = len(p._stages) p._cur_stage = 0 p._completed_flag = mock.Mock() p._completed_flag.is_set = mock.MagicMock(return_value=False) d = p.to_dict() self.assertEqual(d, {'uid': 'pipeline.0000', 'name': 'test_pipeline', 'state': states.INITIAL, 'state_history': [states.INITIAL], 'completed': False})
def generate_simulation_pipeline(i): def post_stage(): if (not os.path.exists(f'{run_dir}/aggregator/stop.aggregator')): nstages = len(p.stages) s = Stage() s.name = f"{nstages}" t = Task() t.cpu_reqs = { 'processes': 1, 'process_type': None, 'threads_per_process': 4, 'thread_type': 'OpenMP' } t.gpu_reqs = { 'processes': 0, 'process_type': None, 'threads_per_process': 0, 'thread_type': None } t.name = f" {i}_{nstages} " t.executable = PYTHON t.arguments = [ f'{current_dir}/simulation.py', f'{run_dir}/simulations/all/{i}_{nstages}', ADIOS_XML ] subprocess.getstatusoutput( f'ln -s {run_dir}/simulations/all/{i}_{nstages} {run_dir}/simulations/new/{i}_{nstages}' ) s.add_tasks(t) s.post_exec = post_stage p.add_stages(s) p = Pipeline() nstages = len(p.stages) p.name = f"{i}" s = Stage() s.name = f"{nstages}" t = Task() t.cpu_reqs = { 'processes': 1, 'process_type': None, 'threads_per_process': 4, 'thread_type': 'OpenMP' } t.gpu_reqs = { 'processes': 0, 'process_type': None, 'threads_per_process': 0, 'thread_type': None } t.name = f" {i}_{nstages} " t.executable = PYTHON t.arguments = [ f'{current_dir}/simulation.py', f'{run_dir}/simulations/all/{i}_{nstages}', ADIOS_XML ] subprocess.getstatusoutput( f'ln -s {run_dir}/simulations/all/{i}_{nstages} {run_dir}/simulations/new/{i}_{nstages}' ) s.add_tasks(t) s.post_exec = post_stage p.add_stages(s) print(f"In generate_simulation_pipelin({i}): {nstages}") print("=" * 20) print(p.to_dict()) print("=" * 20) print('-' * 15) print(s.to_dict()) print('-' * 15) print('_' * 10) print(t.to_dict()) print('_' * 10) return p
'process_type': None, 'threads_per_process': 0, 'thread_type': None } t.name = "aggregator" t.executable = PYTHON t.arguments = [f'{current_dir}/aggregator.py', current_dir, run_dir] subprocess.getstatusoutput(f'mkdir -p {run_dir}/aggregator') s.add_tasks(t) p.add_stages(s) pipelines.append(p) print("After creating an aggregation pipeline") print("=" * 20) print(p.to_dict()) print("=" * 20) print('-' * 15) print(s.to_dict()) print('-' * 15) print('_' * 10) print(t.to_dict()) print('_' * 10) appman = AppManager(hostname=hostname, port=port) print(config) res_dict = { 'resource': RESOURCE, 'walltime': 30,