def test_function_run(tmp_path: Path) -> None: """ Test the function run method. Parameters ---------- tmp_path : Path Temporary path """ run_graph = RunGraph() runner = BoutRunner(run_graph) path = tmp_path.joinpath("return_none.py") submitter = runner.run_function(path, return_none) submitter.wait_until_completed() assert path.is_file() path = tmp_path.joinpath("return_sum_of_two.py") submitter = runner.run_function(path, return_sum_of_two, (1, 2)) submitter.wait_until_completed() assert path.is_file() path = tmp_path.joinpath("return_sum_of_three.py") submitter = runner.run_function(path, return_sum_of_three, (1, 2), {"number_3": 3}) submitter.wait_until_completed() assert path.is_file()
def __init__(self, run_graph: Optional[RunGraph] = None, wait_time: int = 5) -> None: """ Set the member data. Parameters ---------- run_graph : None or RunGraph The run graph to be executed If None the run graph will be constructed and added parameters from the default BoutRunSetup wait_time : int Time to wait before checking if a job has completed """ self.wait_time = wait_time if run_graph is None: self.__run_graph = RunGraph() _ = RunGroup(self.__run_graph, BoutRunSetup()) else: self.__run_graph = run_graph if (len([ node for node in self.__run_graph.nodes if node.startswith("bout_run") ]) == 0): logging.warning( "The provided run_graph does not contain any bout_runs")
def test_pre_processor(get_bout_run_setup: Callable[[str], BoutRunSetup]) -> None: """ Test the pre-processor. Parameters ---------- get_bout_run_setup : function Function which returns the BoutRunSetup object based on the conduction directory """ bout_run_setup = get_bout_run_setup("test_run_group_pre") run_graph = RunGraph() run_graph.add_function_node("1") run_group_pre = RunGroup( run_graph, bout_run_setup, name="test_pre", waiting_for="1" ) run_group_pre.add_pre_processor( {"function": lambda: None, "args": None, "kwargs": None} ) run_group_pre.add_pre_processor( {"function": lambda: None, "args": None, "kwargs": None} ) root_nodes = next(run_graph) assert len(root_nodes) == 3
def another_complex_graph() -> RunGraph: """ Return another complex graph. Returns ------- graph : RunGraph A simple graph """ graph = RunGraph() graph.add_edge("0", "2") graph.add_edge("0", "3") graph.add_edge("0", "4") graph.add_edge("1", "5") graph.add_edge("1", "6") graph.add_edge("3", "7") graph.add_edge("3", "8") graph.add_edge("4", "9") graph.add_edge("5", "9") graph.add_edge("8", "10") graph.add_edge("9", "10") return graph
def test_post_processor(get_bout_run_setup: Callable[[str], BoutRunSetup]) -> None: """ Test the post-processor. Parameters ---------- get_bout_run_setup : function Function which returns the BoutRunSetup object based on the conduction directory """ bout_run_setup = get_bout_run_setup("test_run_group_post") run_graph = RunGraph() run_graph.add_function_node("1") run_group_post = RunGroup( run_graph, bout_run_setup, name="test_post", waiting_for="1" ) run_group_post.add_post_processor( {"function": lambda: None, "args": None, "kwargs": None} ) run_group_post.add_post_processor( {"function": lambda: None, "args": None, "kwargs": None} ) expected = ( "1", "bout_run_test_post", "post_processor_test_post_0", "post_processor_test_post_1", ) assert set(expected) == set(run_graph.get_waiting_for_tuple("1"))
def complex_graph() -> RunGraph: """ Return a complex graph. Returns ------- graph : RunGraph A simple graph """ graph = RunGraph() graph.add_edge("0", "2") graph.add_edge("1", "2") graph.add_edge("2", "3") graph.add_edge("2", "5") graph.add_edge("2", "6") graph.add_edge("2", "7") graph.add_edge("4", "9") graph.add_edge("6", "9") graph.add_edge("7", "9") graph.add_edge("9", "10") graph.add_edge("4", "8") graph.add_edge("6", "8") graph.add_edge("8", "10") graph.add_edge("12", "11") graph.add_edge("11", "4") return graph
def test_constructor(get_bout_run_setup: Callable[[str], BoutRunSetup]) -> None: """ Test the constructor. Parameters ---------- get_bout_run_setup : function Function which returns the BoutRunSetup object based on the conduction directory """ bout_run_setup = get_bout_run_setup("test_run_group_constructor") run_graph = RunGraph() run_graph.add_function_node("1") run_group_0 = RunGroup(run_graph, bout_run_setup) run_number_first_run = int(run_group_0.bout_run_node_name.split("_")[-1]) assert run_group_0.bout_run_node_name == f"bout_run_{run_number_first_run}" run_group_test = RunGroup(run_graph, bout_run_setup, name="test") assert run_group_test.bout_run_node_name == "bout_run_test" run_group_1 = RunGroup(run_graph, bout_run_setup, waiting_for="1") run_number_second_run = int(run_group_1.bout_run_node_name.split("_")[-1]) assert run_number_second_run > run_number_first_run assert run_group_1.bout_run_node_name == f"bout_run_{run_number_second_run}" expected = ( "1", f"bout_run_{run_number_second_run}", ) assert expected == run_graph.get_waiting_for_tuple("1")
def make_run_group( name: str, make_project: Path, run_graph: Optional[RunGraph] = None, restart_from: Optional[Path] = None, waiting_for: Optional[Union[str, Iterable[str]]] = None, ) -> RunGroup: """ Return a basic RunGroup. Parameters ---------- run_graph name : str Name of RunGroup and DatabaseConnector make_project : Path The path to the conduction example run_graph : RunGraph The RunGraph object restart_from : Path or None The path to copy the restart files from waiting_for : None or str or iterable Name of nodes this node will wait for to finish before executing Returns ------- run_group : RunGroup A basic run group """ # Make project to save time project_path = make_project # Create the `bout_paths` object bout_paths = BoutPaths( project_path=project_path, bout_inp_src_dir=project_path.joinpath("data"), bout_inp_dst_dir=project_path.joinpath(name), ) # Create the input objects run_parameters = RunParameters({"global": {"nout": 0}}) default_parameters = DefaultParameters(bout_paths) final_parameters = FinalParameters(default_parameters, run_parameters) executor = Executor( bout_paths=bout_paths, submitter=LocalSubmitter(bout_paths.project_path), run_parameters=run_parameters, restart_from=restart_from, ) db_connector = DatabaseConnector(name) bout_run_setup = BoutRunSetup(executor, db_connector, final_parameters) # Create the `run_group` run_graph = run_graph if run_graph is not None else RunGraph() run_group = RunGroup(run_graph, bout_run_setup, name=name, waiting_for=waiting_for) return run_group
def test_get_dot_string() -> None: """Test the ability to get the dot string.""" run_graph = RunGraph() run_graph.add_function_node("42") hex_id_submitter = hex(id(run_graph["42"]["submitter"])) expected = ( "strict digraph " "{\n42 [args=None, function=None, kwargs=None, path=None, status=ready, " "submitter=<bout_runners.submitter.local_submitter.LocalSubmitter object at " f"{hex_id_submitter}" ">];\n}\n") assert expected == run_graph.get_dot_string()
def test_add_waiting_for() -> None: """Test the ability to let a node wait for other nodes.""" run_graph = RunGraph() run_graph.add_function_node("1") run_graph.add_function_node("2") run_graph.add_function_node("3") run_graph.add_waiting_for("2", "1") run_graph.add_waiting_for("3", ("2", "1")) expected = ("1", "2", "3") assert expected == run_graph.get_waiting_for_tuple("1")
def test_add_edge() -> None: """Test ability to add edges, and the ability to detect if a graph is cyclic.""" run_graph = RunGraph() run_graph.add_function_node("1") run_graph.add_function_node("2") run_graph.add_edge("1", "2") with pytest.raises(ValueError): run_graph.add_edge("2", "1") expected = {"1", "2"} assert expected == set(run_graph.nodes)
def simple_graph() -> RunGraph: """ Return a simple graph. Returns ------- graph : RunGraph A simple graph """ graph = RunGraph() graph.add_edge("0", "1") graph.add_edge("0", "2") graph.add_edge("1", "3") graph.add_edge("1", "4") return graph
def test_add_bout_run_node(get_bout_run_setup) -> None: """ Test ability to write and rewrite a BoutRunSetup node. Parameters ---------- get_bout_run_setup : function Function which returns the BoutRunSetup object based on the conduction directory """ run_graph = RunGraph() bout_run_setup = get_bout_run_setup("test_run_graph") run_graph.add_bout_run_node("test", bout_run_setup) assert len(run_graph.nodes) == 1 assert isinstance(run_graph["test"]["bout_run_setup"], BoutRunSetup) with pytest.raises(ValueError): run_graph.add_function_node("test") assert len(run_graph.nodes) == 1
def test_add_function_node() -> None: """Test ability to write and rewrite a function node.""" run_graph = RunGraph() run_graph.add_function_node("test", function_dict={ "function": None, "args": ("pass", 42), "kwargs": None }) assert len(run_graph.nodes) == 1 assert run_graph["test"]["function"] is None assert run_graph["test"]["args"] == ("pass", 42) assert run_graph["test"]["kwargs"] is None assert isinstance(run_graph["test"]["submitter"], LocalSubmitter) with pytest.raises(ValueError): run_graph.add_function_node("test") assert len(run_graph.nodes) == 1
def make_graph() -> RunGraph: """ Yield a simple graph. Returns ------- run_graph : RunGraph A simple graph """ run_graph = RunGraph() for i in range(6): run_graph.add_function_node(str(i)) run_graph.add_waiting_for("4", "3") run_graph.add_waiting_for("5", "3") run_graph.add_waiting_for("3", "2") run_graph.add_waiting_for("2", "0") run_graph.add_waiting_for("1", "0") return run_graph
def test_constructor(yield_conduction_path) -> None: """ Test the constructor of BoutRunner. Parameters ---------- yield_conduction_path : Path Path to the BOUT++ conduction example See the yield_conduction_path for more details """ # Assert that auto setting of the setup works project_path = yield_conduction_path with change_directory(project_path): runner = BoutRunner() node_name = list(runner.run_graph.nodes.keys())[0] assert isinstance(runner.run_graph[node_name]["bout_run_setup"], BoutRunSetup) # Assert that an empty graph can be added run_graph = RunGraph() runner = BoutRunner(run_graph) assert len(runner.run_graph.nodes) == 0
def test_pre_and_post_documentation( make_project: Path, copy_bout_inp: Callable[[Path, str], Path], file_state_restorer: FileStateRestorer, ) -> None: """ Test that the pre and post documentation runs without error. Parameters ---------- make_project : Path The path to the conduction example copy_bout_inp : function Function which copies BOUT.inp and returns the path to the temporary directory file_state_restorer : FileStateRestorer Object for restoring files to original state """ # NOTE: We are aware of the number of locals, and are here only testing the docs # pylint: disable=too-many-locals project_path = make_project bout_inp_src_dir = copy_bout_inp(project_path, "test_pre_post_documentation_src") bout_inp_dst_dir = project_path.joinpath("test_pre_post_documentation_dst") # NOTE: bout_inp_src_dir removed by copy_bout_inp teardown file_state_restorer.add(bout_inp_dst_dir, force_mark_removal=True) bout_paths = BoutPaths( project_path=project_path, bout_inp_src_dir=bout_inp_src_dir, bout_inp_dst_dir=bout_inp_dst_dir, ) default_parameters = DefaultParameters(bout_paths) run_parameters = RunParameters({"global": {"nout": 0}}) final_parameters = FinalParameters(default_parameters, run_parameters) basic_executor = BoutRunExecutor( bout_paths=bout_paths, submitter=LocalSubmitter(bout_paths.project_path), run_parameters=run_parameters, ) # NOTE: We set the database to bout_inp_dst_dir as this will be removed later db_connector = DatabaseConnector("name_of_database", db_root_path=bout_inp_dst_dir) file_state_restorer.add(db_connector.db_path, force_mark_removal=True) basic_bout_run_setup = BoutRunSetup(basic_executor, db_connector, final_parameters) run_graph = RunGraph() name = "my_restart_runs" basic_run_group = RunGroup(run_graph, basic_bout_run_setup, name=name) # New section in the documentation basic_run_group.add_post_processor({ "function": return_none, "args": None, "kwargs": None }) expanded_noise_restarts_dir = bout_paths.bout_inp_dst_dir.parent.joinpath( "expanded_noise_restarts") file_state_restorer.add(expanded_noise_restarts_dir, force_mark_removal=True) kwargs = { "newNz": 16, "path": bout_paths.bout_inp_dst_dir, "output": expanded_noise_restarts_dir, } expand_node_name = basic_run_group.add_post_processor( { "function": mock_expand, "args": None, "kwargs": kwargs, }, ) # New section in the documentation # Create the RunGroup restart_executor = BoutRunExecutor( bout_paths=bout_paths, submitter=LocalSubmitter(bout_paths.project_path), run_parameters=run_parameters, restart_from=expanded_noise_restarts_dir, ) file_state_restorer.add(restart_executor.bout_paths.bout_inp_dst_dir, force_mark_removal=True) restart_bout_run_setup = BoutRunSetup(restart_executor, db_connector, final_parameters) restart_run_group = RunGroup(run_graph, restart_bout_run_setup, name=name) kwargs = {"path": expanded_noise_restarts_dir, "scale": 1e-5} restart_run_group.add_pre_processor( { "function": return_none, "args": None, "kwargs": kwargs, }, waiting_for=expand_node_name, ) # New section in the documentation run_graph.get_dot_string() # New section in the documentation runner = BoutRunner(run_graph) runner.run()
def test_run_bout_run( make_project: Path, get_bout_run_setup: Callable[[str], BoutRunSetup], yield_number_of_rows_for_all_tables: Callable[[DatabaseReader], Dict[str, int]], file_state_restorer: FileStateRestorer, ) -> None: """ Test the BOUT++ run method. Parameters ---------- make_project : Path The path to the conduction example get_bout_run_setup : function Function which returns the BoutRunSetup object based on the conduction directory yield_number_of_rows_for_all_tables : function Function which returns the number of rows for all tables in a schema file_state_restorer : FileStateRestorer Object for restoring files to original state """ # Make project to save time _ = make_project run_graph = RunGraph() runner = BoutRunner(run_graph) bout_run_setup = get_bout_run_setup("test_run_bout_run") bout_paths = bout_run_setup.bout_paths db_connector = bout_run_setup.db_connector # NOTE: bout_run_setup.bout_paths.bout_inp_dst_dir will be removed in the # yield_bout_path_conduction fixture (through the get_bout_run_setup # fixture) # Hence we do not need to add bout_run_setup.bout_paths.bout_inp_dst_dir # to the file_state_restorer file_state_restorer.add(db_connector.db_path, force_mark_removal=True) # Run once submitter = bout_run_setup.submitter if runner.run_bout_run(bout_run_setup): submitter.wait_until_completed() # Assert that the run went well database_reader = assert_first_run(bout_paths, db_connector) # Assert that the number of runs is 1 assert_tables_have_expected_len(database_reader, yield_number_of_rows_for_all_tables, expected_run_number=1) # Check that the run will not be executed again assert not runner.run_bout_run(bout_run_setup) # Assert that the number of runs is 1 assert_tables_have_expected_len(database_reader, yield_number_of_rows_for_all_tables, expected_run_number=1) # Check that force overrides the behaviour if runner.run_bout_run(bout_run_setup, force=True): submitter.wait_until_completed() assert_tables_have_expected_len(database_reader, yield_number_of_rows_for_all_tables, expected_run_number=2) dump_dir_parent = bout_paths.bout_inp_dst_dir.parent dump_dir_name = bout_paths.bout_inp_dst_dir.name # Check that restart makes another entry bout_run_setup.executor.restart_from = bout_run_setup.bout_paths.bout_inp_dst_dir copy_restart_files(bout_run_setup.executor.restart_from, bout_run_setup.bout_paths.bout_inp_dst_dir) if runner.run_bout_run(bout_run_setup): submitter.wait_until_completed() expected_run_number = 3 assert_tables_have_expected_len( database_reader, yield_number_of_rows_for_all_tables, expected_run_number=expected_run_number, restarted=True, ) # NOTE: The test in tests.unit.bout_runners.runner.test_bout_runner is testing # restart_all=True, whether this is testing restart_from_bout_inp_dst=True assert_dump_files_exist( dump_dir_parent.joinpath(f"{dump_dir_name}_restart_0")) file_state_restorer.add( dump_dir_parent.joinpath(f"{dump_dir_name}_restart_0"), force_mark_removal=True) # ...and yet another entry bout_run_setup.executor.restart_from = bout_run_setup.bout_paths.bout_inp_dst_dir copy_restart_files(bout_run_setup.executor.restart_from, bout_run_setup.bout_paths.bout_inp_dst_dir) if runner.run_bout_run(bout_run_setup): submitter.wait_until_completed() assert_tables_have_expected_len( database_reader, yield_number_of_rows_for_all_tables, expected_run_number=expected_run_number + 1, restarted=True, ) # NOTE: The test in tests.unit.bout_runners.runner.test_bout_runner is testing # restart_all=True, whether this is testing restart_from_bout_inp_dst=True assert_dump_files_exist( dump_dir_parent.joinpath(f"{dump_dir_name}_restart_1")) file_state_restorer.add( dump_dir_parent.joinpath(f"{dump_dir_name}_restart_1"), force_mark_removal=True)
def test_pre_and_post_documentation( clean_up_bout_inp_src_and_dst: Callable[[str, str], Tuple[Path, Path, Path]] ) -> None: """ Test that the pre and post documentation runs without error. Parameters ---------- clean_up_bout_inp_src_and_dst : function Function which adds temporary BOUT.inp directories to removal. """ # NOTE: We are aware of the number of locals, and are here only testing the docs # pylint: disable=too-many-locals project_path, bout_inp_src_dir, bout_inp_dst_dir = clean_up_bout_inp_src_and_dst( "test_pre_post_documentation_src", "test_pre_post_documentation_dst") bout_paths = BoutPaths( project_path=project_path, bout_inp_src_dir=bout_inp_src_dir, bout_inp_dst_dir=bout_inp_dst_dir, ) default_parameters = DefaultParameters(bout_paths) run_parameters = RunParameters({"global": {"nout": 0}}) final_parameters = FinalParameters(default_parameters, run_parameters) basic_executor = Executor( bout_paths=bout_paths, submitter=LocalSubmitter(bout_paths.project_path), run_parameters=run_parameters, ) # NOTE: We set the database to bout_inp_dst_dir as this will be removed later db_connector = DatabaseConnector("name_of_database", db_root_path=bout_inp_dst_dir) basic_bout_run_setup = BoutRunSetup(basic_executor, db_connector, final_parameters) run_graph = RunGraph() name = "my_restart_runs" basic_run_group = RunGroup(run_graph, basic_bout_run_setup, name=name) # New section in the documentation basic_run_group.add_post_processor({ "function": return_none, "args": None, "kwargs": None }) expanded_noise_restarts_dir = bout_paths.bout_inp_dst_dir.parent.joinpath( "expanded_noise_restarts") kwargs = { "newNz": 16, "path": bout_paths.bout_inp_dst_dir, "output": expanded_noise_restarts_dir, } expand_node_name = basic_run_group.add_post_processor({ "function": mock_expand, "args": None, "kwargs": kwargs }) # New section in the documentation # NOTE: Add these for removal clean_up_bout_inp_src_and_dst("expanded_noise_restarts", "expanded_noise_restarts") # Create the RunGroup restart_executor = Executor( bout_paths=bout_paths, submitter=LocalSubmitter(bout_paths.project_path), run_parameters=run_parameters, restart_from=expanded_noise_restarts_dir, ) restart_bout_run_setup = BoutRunSetup(restart_executor, db_connector, final_parameters) restart_run_group = RunGroup(run_graph, restart_bout_run_setup, name=name) kwargs = {"path": expanded_noise_restarts_dir, "scale": 1e-5} restart_run_group.add_pre_processor( { "function": return_none, "args": None, "kwargs": kwargs }, waiting_for=expand_node_name, ) # New section in the documentation run_graph.get_dot_string() # New section in the documentation runner = BoutRunner(run_graph) runner.run()
def test_run_bout_run( make_project: Path, clean_default_db_dir: Path, get_bout_run_setup: Callable[[str], BoutRunSetup], yield_number_of_rows_for_all_tables: Callable[[DatabaseReader], Dict[str, int]], tear_down_restart_directories: Callable[[Path], None], ) -> None: """ Test the BOUT++ run method. Parameters ---------- make_project : Path The path to the conduction example clean_default_db_dir : Path Path to the default database dir get_bout_run_setup : function Function which returns the BoutRunSetup object based on the conduction directory yield_number_of_rows_for_all_tables : function Function which returns the number of rows for all tables in a schema tear_down_restart_directories : function Function used for removal of restart directories """ # For automatic clean-up _ = clean_default_db_dir # Make project to save time _ = make_project run_graph = RunGraph() runner = BoutRunner(run_graph) bout_run_setup = get_bout_run_setup("test_run_bout_run") tear_down_restart_directories(bout_run_setup.bout_paths.bout_inp_dst_dir) bout_paths = bout_run_setup.bout_paths db_connector = bout_run_setup.db_connector # Run once submitter = runner.run_bout_run(bout_run_setup) if submitter is not None: submitter.wait_until_completed() # Assert that the run went well database_reader = assert_first_run(bout_paths, db_connector) # Assert that the number of runs is 1 assert_tables_have_expected_len(database_reader, yield_number_of_rows_for_all_tables, expected_run_number=1) # Check that the run will not be executed again assert runner.run_bout_run(bout_run_setup) is None # Assert that the number of runs is 1 assert_tables_have_expected_len(database_reader, yield_number_of_rows_for_all_tables, expected_run_number=1) # Check that force overrides the behaviour submitter = runner.run_bout_run(bout_run_setup, force=True) if submitter is not None: submitter.wait_until_completed() assert_tables_have_expected_len(database_reader, yield_number_of_rows_for_all_tables, expected_run_number=2) dump_dir_parent = bout_paths.bout_inp_dst_dir.parent dump_dir_name = bout_paths.bout_inp_dst_dir.name # Check that restart makes another entry submitter = runner.run_bout_run(bout_run_setup, restart_from_bout_inp_dst=True) if submitter is not None: submitter.wait_until_completed() assert_tables_have_expected_len( database_reader, yield_number_of_rows_for_all_tables, expected_run_number=3, restarted=True, ) # NOTE: The test in tests.unit.bout_runners.runner.test_bout_runner is testing # restart_all=True, whether this is testing restart_from_bout_inp_dst=True assert_dump_files_exist( dump_dir_parent.joinpath(f"{dump_dir_name}_restart_0")) # ...and yet another entry submitter = runner.run_bout_run(bout_run_setup, restart_from_bout_inp_dst=True) if submitter is not None: submitter.wait_until_completed() assert_tables_have_expected_len( database_reader, yield_number_of_rows_for_all_tables, expected_run_number=4, restarted=True, ) # NOTE: The test in tests.unit.bout_runners.runner.test_bout_runner is testing # restart_all=True, whether this is testing restart_from_bout_inp_dst=True assert_dump_files_exist( dump_dir_parent.joinpath(f"{dump_dir_name}_restart_1"))
def test_restart_documentation( clean_up_bout_inp_src_and_dst: Callable[[str, str], Tuple[Path, Path, Path]] ) -> None: """ Test that the restart documentation runs without error. Parameters ---------- clean_up_bout_inp_src_and_dst : function Function which adds temporary BOUT.inp directories to removal. """ # NOTE: We are aware of the number of locals, and are here only testing the docs # pylint: disable=too-many-locals project_path, bout_inp_src_dir, bout_inp_dst_dir = clean_up_bout_inp_src_and_dst( "test_restart_documentation_src", "test_restart_documentation_dst") bout_paths = BoutPaths( project_path=project_path, bout_inp_src_dir=bout_inp_src_dir, bout_inp_dst_dir=bout_inp_dst_dir, ) default_parameters = DefaultParameters(bout_paths) run_parameters = RunParameters({"global": {"nout": 0}}) final_parameters = FinalParameters(default_parameters, run_parameters) basic_executor = Executor( bout_paths=bout_paths, submitter=LocalSubmitter(bout_paths.project_path), run_parameters=run_parameters, ) # NOTE: We set the database to bout_inp_dst_dir as this will be removed later db_connector = DatabaseConnector("name_of_database", db_root_path=bout_inp_dst_dir) basic_bout_run_setup = BoutRunSetup(basic_executor, db_connector, final_parameters) run_graph = RunGraph() name = "my_restart_runs" basic_run_group = RunGroup(run_graph, basic_bout_run_setup, name=name) # New section in the documentation restart_executor = Executor( bout_paths=bout_paths, submitter=LocalSubmitter(bout_paths.project_path), run_parameters=run_parameters, restart_from=bout_paths.bout_inp_dst_dir, ) restart_bout_run_setup = BoutRunSetup(restart_executor, db_connector, final_parameters) RunGroup( run_graph, restart_bout_run_setup, name=name, waiting_for=basic_run_group.bout_run_node_name, ) # New section in the documentation new_run_parameters = RunParameters({"solver": {"adams_moulton": True}}) new_final_parameters = FinalParameters(default_parameters, run_parameters) restart_with_changing_parameters_executor = Executor( bout_paths=bout_paths, submitter=LocalSubmitter(bout_paths.project_path), run_parameters=new_run_parameters, restart_from=bout_paths.bout_inp_dst_dir, ) BoutRunSetup(restart_with_changing_parameters_executor, db_connector, new_final_parameters) RunGroup( run_graph, restart_bout_run_setup, name=name, waiting_for=basic_run_group.bout_run_node_name, ) # New section in the documentation run_graph.get_dot_string() # New section in the documentation runner = BoutRunner(run_graph) runner.run()
def make_run_group( run_group_parameters: Dict[str, Union[str, Optional[RunGraph], Optional[Union[str, Iterable[str]]]]], make_project: Path, file_state_restorer: FileStateRestorer, restart_from: Optional[Path] = None, ) -> RunGroup: """ Return a basic RunGroup. Parameters ---------- run_group_parameters : dict Parameters to the run_group containing the keys - name : str Name of the run_group Note that the name will also be used for the destination dir and the name of the database - run_graph: None or RunGraph The run_graph to use - waiting_for : None or str or iterable of str Name of nodes this node will wait for to finish before executing make_project : Path The path to the conduction example file_state_restorer : FileStateRestorer Object for restoring files to original state restart_from : Path or None The path to copy the restart files from Returns ------- run_group : RunGroup A basic run group Raises ------ ValueError If the shape or types of the run_group_parameters are wrong """ # NOTE: The following is a mypy guard which could be solved with TypedDict # However, TypedDict is new from 3.8 if "name" not in run_group_parameters.keys() or not isinstance( run_group_parameters["name"], str): raise ValueError( "'name' must be of string type in run_group_parameters") if "run_graph" not in run_group_parameters.keys() or not ( isinstance(run_group_parameters["run_graph"], RunGraph) or run_group_parameters["run_graph"] is None): raise ValueError( "'run_graph' must be of RunGroup type or None in run_group_parameters" ) if ("waiting_for" not in run_group_parameters.keys() or not (hasattr(run_group_parameters["waiting_for"], "__iter__") or run_group_parameters["waiting_for"] is None) or isinstance(run_group_parameters["waiting_for"], RunGraph)): raise ValueError( "'waiting_for' must be of RunGroup type or None in run_group_parameters" ) # Make project to save time project_path = make_project # Create the `bout_paths` object bout_paths = BoutPaths( project_path=project_path, bout_inp_src_dir=project_path.joinpath("data"), bout_inp_dst_dir=project_path.joinpath(run_group_parameters["name"]), ) # Create the input objects run_parameters = RunParameters({"global": {"nout": 0}}) default_parameters = DefaultParameters(bout_paths) final_parameters = FinalParameters(default_parameters, run_parameters) submitter = get_submitter() if isinstance(submitter, LocalSubmitter): submitter.run_path = bout_paths.project_path executor = BoutRunExecutor( bout_paths=bout_paths, submitter=submitter, run_parameters=run_parameters, restart_from=restart_from, ) db_connector = DatabaseConnector(name=run_group_parameters["name"], db_root_path=project_path) bout_run_setup = BoutRunSetup(executor, db_connector, final_parameters) # Create the `run_group` run_group = RunGroup( run_group_parameters["run_graph"] if run_group_parameters["run_graph"] is not None else RunGraph(), bout_run_setup, name=run_group_parameters["name"], waiting_for=run_group_parameters["waiting_for"], ) file_state_restorer.add(executor.bout_paths.bout_inp_dst_dir, force_mark_removal=True) file_state_restorer.add(db_connector.db_path, force_mark_removal=True) file_state_restorer.add( executor.bout_paths.project_path.joinpath("settings_run"), force_mark_removal=True, ) return run_group
def assert_waiting_for_graph( node_zero_submitter: AbstractSubmitter, node_one_submitter: AbstractSubmitter, node_two_submitter: AbstractSubmitter, node_three_submitter: AbstractSubmitter, save_dir, ) -> None: """ Assert that the graph is running in correct order. Parameters ---------- node_zero_submitter : AbstractSubmitter Submitter object for node one node_one_submitter : AbstractSubmitter Submitter object for node one node_two_submitter : AbstractSubmitter Submitter object for node one node_three_submitter : AbstractSubmitter Submitter object for node one save_dir : Path Path to where the job artifacts are stored """ graph = RunGraph() graph.add_function_node( "node_zero", { "function": node_zero, "args": (save_dir, ), "kwargs": None }, save_dir.joinpath("node_zero.py"), node_zero_submitter, ) graph.add_function_node( "node_one", { "function": node_one, "args": (save_dir, ), "kwargs": None }, save_dir.joinpath("node_one.py"), node_one_submitter, ) graph.add_function_node( "node_two", { "function": node_two, "args": (save_dir, ), "kwargs": None }, save_dir.joinpath("node_two.py"), node_two_submitter, ) graph.add_function_node( "node_three", { "function": node_three, "args": (save_dir, ), "kwargs": None }, save_dir.joinpath("node_three.py"), node_three_submitter, ) graph.add_waiting_for("node_two", "node_one") graph.add_waiting_for("node_three", ("node_one", "node_two")) runner = BoutRunner(graph) runner.run() node_three_submitter.wait_until_completed() assert save_dir.joinpath("node_three.log").is_file()
def test_restart_documentation( make_project: Path, copy_bout_inp: Callable[[Path, str], Path], file_state_restorer: FileStateRestorer, ) -> None: """ Test that the restart documentation runs without error. Parameters ---------- make_project : Path The path to the conduction example copy_bout_inp : function Function which copies BOUT.inp and returns the path to the temporary directory file_state_restorer : FileStateRestorer Object for restoring files to original state """ # NOTE: We are aware of the number of locals, and are here only testing the docs # pylint: disable=too-many-locals project_path = make_project bout_inp_src_dir = copy_bout_inp(project_path, "test_restart_documentation_src") bout_inp_dst_dir = project_path.joinpath("test_restart_documentation_dst") # NOTE: bout_inp_src_dir removed by copy_bout_inp teardown file_state_restorer.add(bout_inp_dst_dir, force_mark_removal=True) bout_paths = BoutPaths( project_path=project_path, bout_inp_src_dir=bout_inp_src_dir, bout_inp_dst_dir=bout_inp_dst_dir, ) default_parameters = DefaultParameters(bout_paths) run_parameters = RunParameters({"global": {"nout": 0}}) final_parameters = FinalParameters(default_parameters, run_parameters) basic_executor = BoutRunExecutor( bout_paths=bout_paths, submitter=LocalSubmitter(bout_paths.project_path), run_parameters=run_parameters, ) # NOTE: We set the database to bout_inp_dst_dir as this will be removed later db_connector = DatabaseConnector("name_of_database", db_root_path=bout_inp_dst_dir) file_state_restorer.add(db_connector.db_path, force_mark_removal=True) basic_bout_run_setup = BoutRunSetup(basic_executor, db_connector, final_parameters) run_graph = RunGraph() name = "my_restart_runs" basic_run_group = RunGroup(run_graph, basic_bout_run_setup, name=name) # New section in the documentation restart_executor = BoutRunExecutor( bout_paths=bout_paths, submitter=LocalSubmitter(bout_paths.project_path), run_parameters=run_parameters, restart_from=bout_paths.bout_inp_dst_dir, ) file_state_restorer.add(restart_executor.bout_paths.bout_inp_dst_dir, force_mark_removal=True) restart_bout_run_setup = BoutRunSetup(restart_executor, db_connector, final_parameters) RunGroup( run_graph, restart_bout_run_setup, name=name, waiting_for=basic_run_group.bout_run_node_name, ) # New section in the documentation new_run_parameters = RunParameters({"solver": {"adams_moulton": True}}) new_final_parameters = FinalParameters(default_parameters, run_parameters) restart_with_changing_parameters_executor = BoutRunExecutor( bout_paths=bout_paths, submitter=LocalSubmitter(bout_paths.project_path), run_parameters=new_run_parameters, restart_from=bout_paths.bout_inp_dst_dir, ) file_state_restorer.add( restart_with_changing_parameters_executor.bout_paths.bout_inp_dst_dir, force_mark_removal=True, ) BoutRunSetup(restart_with_changing_parameters_executor, db_connector, new_final_parameters) RunGroup( run_graph, restart_bout_run_setup, name=name, waiting_for=basic_run_group.bout_run_node_name, ) # New section in the documentation run_graph.get_dot_string() # New section in the documentation runner = BoutRunner(run_graph) runner.run()