def test_stage_to_dict(): s = Stage() d = s.to_dict() assert d == {'uid': None, 'name': None, 'state': states.INITIAL, 'state_history': [states.INITIAL], 'parent_pipeline': {'uid': None, 'name': None}}
def test_stage_to_dict(): s = Stage() d = s.to_dict() assert d == {'uid': 'stage.0000', 'name': None, 'state': states.INITIAL, 'state_history': [states.INITIAL], 'parent_pipeline': {'uid': None, 'name': None}}
def test_stage_to_dict(self, mocked_init): s = Stage() s._uid = 'stage.0000' s._name = 'test_stage' s._state = states.INITIAL s._state_history = [states.INITIAL] s._p_pipeline = {'uid': 'pipeline.0000', 'name': 'parent'} self.assertEqual( s.to_dict(), { 'uid': 'stage.0000', 'name': 'test_stage', 'state': states.INITIAL, 'state_history': [states.INITIAL], 'parent_pipeline': { 'uid': 'pipeline.0000', 'name': 'parent' } })
def generate_simulation_pipeline(i): def post_stage(): if (not os.path.exists(f'{run_dir}/aggregator/stop.aggregator')): nstages = len(p.stages) s = Stage() s.name = f"{nstages}" t = Task() t.cpu_reqs = { 'processes': 1, 'process_type': None, 'threads_per_process': 4, 'thread_type': 'OpenMP' } t.gpu_reqs = { 'processes': 0, 'process_type': None, 'threads_per_process': 0, 'thread_type': None } t.name = f" {i}_{nstages} " t.executable = PYTHON t.arguments = [ f'{current_dir}/simulation.py', f'{run_dir}/simulations/all/{i}_{nstages}', ADIOS_XML ] subprocess.getstatusoutput( f'ln -s {run_dir}/simulations/all/{i}_{nstages} {run_dir}/simulations/new/{i}_{nstages}' ) s.add_tasks(t) s.post_exec = post_stage p.add_stages(s) p = Pipeline() nstages = len(p.stages) p.name = f"{i}" s = Stage() s.name = f"{nstages}" t = Task() t.cpu_reqs = { 'processes': 1, 'process_type': None, 'threads_per_process': 4, 'thread_type': 'OpenMP' } t.gpu_reqs = { 'processes': 0, 'process_type': None, 'threads_per_process': 0, 'thread_type': None } t.name = f" {i}_{nstages} " t.executable = PYTHON t.arguments = [ f'{current_dir}/simulation.py', f'{run_dir}/simulations/all/{i}_{nstages}', ADIOS_XML ] subprocess.getstatusoutput( f'ln -s {run_dir}/simulations/all/{i}_{nstages} {run_dir}/simulations/new/{i}_{nstages}' ) s.add_tasks(t) s.post_exec = post_stage p.add_stages(s) print(f"In generate_simulation_pipelin({i}): {nstages}") print("=" * 20) print(p.to_dict()) print("=" * 20) print('-' * 15) print(s.to_dict()) print('-' * 15) print('_' * 10) print(t.to_dict()) print('_' * 10) return p
} t.name = "aggregator" t.executable = PYTHON t.arguments = [f'{current_dir}/aggregator.py', current_dir, run_dir] subprocess.getstatusoutput(f'mkdir -p {run_dir}/aggregator') s.add_tasks(t) p.add_stages(s) pipelines.append(p) print("After creating an aggregation pipeline") print("=" * 20) print(p.to_dict()) print("=" * 20) print('-' * 15) print(s.to_dict()) print('-' * 15) print('_' * 10) print(t.to_dict()) print('_' * 10) appman = AppManager(hostname=hostname, port=port) print(config) res_dict = { 'resource': RESOURCE, 'walltime': 30, 'cpus': config[RESOURCE]['cores'], 'gpus': config[RESOURCE]['gpus'], 'project': config[RESOURCE]['project'],