コード例 #1
0
ファイル: make.py プロジェクト: fossabot/bout_runners
    def __init__(
        self, makefile_root_path: Optional[Path] = None, makefile_name: None = None
    ) -> None:
        """
        Call the make file.

        Parameters
        ----------
        makefile_root_path : None or Path or str
            Root path of make file
            If None, the path of the root caller of MakeProject will be called
        makefile_name : None or str
            If set to None, it tries the following names, in order:
            'GNUmakefile', 'makefile' and 'Makefile'
        """
        if makefile_root_path is None:
            makefile_root_path = get_caller_dir()
        self.makefile_root_path = Path(makefile_root_path)
        logging.debug("self.makefile_root_path set to %s", makefile_root_path)

        self.makefile_name = makefile_name

        self.makefile_path = get_makefile_path(
            self.makefile_root_path, self.makefile_name
        )
        self.exec_name = get_exec_name(self.makefile_path)
        self.submitter = LocalSubmitter(self.makefile_root_path)
コード例 #2
0
def get_submitter(name: str, *args, **kwargs) -> LocalSubmitter:
    """
    Return a Submitter object.

    Parameters
    ----------
    name : str
        Name of the submitter to use
    args : tuple
        Positional arguments (see the different implementation for details)
    kwargs : dict
        Keyword arguments (see the different implementation for details)

    Returns
    -------
    submitter : AbstractSubmitter
        The implemented submitter class

    Raises
    ------
    NotImplementedError
        If the name is not a supported submitter class
    """
    implemented = ("local",)

    if name == "local":
        submitter = LocalSubmitter(*args, **kwargs)
    else:
        msg = f"{name} is not a valid submitter class, choose " f"from {implemented}"
        raise NotImplementedError(msg)

    logging.debug("%s submitter selected", name)

    return submitter
コード例 #3
0
ファイル: executor.py プロジェクト: fossabot/bout_runners
    def __init__(
        self,
        bout_paths: Optional[BoutPaths] = None,
        submitter: Optional[LocalSubmitter] = None,
        run_parameters: Optional[RunParameters] = None,
        restart_from: Optional[Path] = None,
    ) -> None:
        """
        Set the input parameters.

        Parameters
        ----------
        bout_paths : BoutPaths or None
            Object containing the paths
            If None, default BoutPaths values will be used
        submitter : AbstractSubmitter
            Object containing the submitter
        run_parameters : RunParameters or None
            Object containing the run parameters
            If None, default parameters will be used
        restart_from : Path or None
            The path to copy the restart files from
        """
        # Set member data
        self.restart_from = restart_from
        # NOTE: We are not setting the default as a keyword argument
        #       as this would mess up the paths
        self.submitter = submitter if submitter is not None else LocalSubmitter(
        )
        self.__bout_paths = bout_paths if bout_paths is not None else BoutPaths(
        )
        self.__run_parameters = (run_parameters if run_parameters is not None
                                 else RunParameters())
        self.__make = Make(self.__bout_paths.project_path)
コード例 #4
0
    def add_function_node(
        self,
        name: str,
        function_dict: Optional[Dict[str,
                                     Optional[Union[Callable, Tuple[Any, ...],
                                                    Dict[str, Any]]]]] = None,
        path: Optional[Path] = None,
        submitter: Optional[AbstractSubmitter] = None,
    ) -> None:
        """
        Add a node with an optionally attached callable to the graph.

        Parameters
        ----------
        name : str
            Name of the node
        function_dict : None or dict
            Dict with the function to call
            On the form
            >>> {'function': None or callable,
            ...  'args': None or tuple,
            ...  'kwargs': None or dict}
        path : None or Path
            Absolute path to store the python file which holds the function and
            its arguments
        submitter : AbstractSubmitter
            Submitter to submit the function with
            If None, the default LocalSubmitter will be used

        Raises
        ------
        ValueError
            If the node is already present in the graph
        """
        if name in self.__node_set:
            raise ValueError(f"'{name}' is already present in the graph")

        if function_dict is None:
            function_dict = {"function": None, "args": None, "kwargs": None}

        submitter = submitter if submitter is not None else LocalSubmitter()

        logging.debug(
            "Adding node=%s with function_dict=%s, path=%s and submitter=%s",
            name,
            function_dict,
            path,
            submitter,
        )
        self.__graph.add_node(
            name,
            function=function_dict["function"],
            args=function_dict["args"],
            kwargs=function_dict["kwargs"],
            path=path,
            submitter=submitter,
            status="ready",
        )
        self.__node_set = set(self.__graph.nodes)
コード例 #5
0
    def submit_command(self, command: str) -> None:
        """
        Submit a command.

        Notes
        -----
        All submitted jobs are held
        Release with self.release
        See [1]_ for details

        Parameters
        ----------
        command : str
            Command to submit

        References
        ----------
        .. [1] https://community.openpbs.org/t/ignoring-finished-dependencies/1976
        """
        # This starts the job anew, so we restart the instance to clear it from any
        # spurious member data, before doing so, we must capture the waiting for tuple
        waiting_for = self.waiting_for
        self.reset()
        script_path = self.store_dir.joinpath(f"{self._job_name}.sh")
        with script_path.open("w") as file:
            file.write(self.create_submission_string(command, waiting_for=waiting_for))

        # Make the script executable
        local_submitter = LocalSubmitter(run_path=self.store_dir)
        local_submitter.submit_command(f"chmod +x {script_path}")
        local_submitter.wait_until_completed()

        # Submit the command through a local submitter
        local_submitter.submit_command(
            f"{self._cluster_specific['submit_str']} {script_path}"
        )
        local_submitter.wait_until_completed()
        self._status["job_id"] = self.extract_job_id(local_submitter.std_out)
        logging.info(
            "job_id %s (%s) given to command '%s' in %s",
            self.job_id,
            self.job_name,
            command,
            script_path,
        )
コード例 #6
0
ファイル: run.py プロジェクト: fossabot/bout_runners
def make_run_group(
    name: str,
    make_project: Path,
    run_graph: Optional[RunGraph] = None,
    restart_from: Optional[Path] = None,
    waiting_for: Optional[Union[str, Iterable[str]]] = None,
) -> RunGroup:
    """
    Return a basic RunGroup.

    Parameters
    ----------
    run_graph
    name : str
        Name of RunGroup and DatabaseConnector
    make_project : Path
        The path to the conduction example
    run_graph : RunGraph
        The RunGraph object
    restart_from : Path or None
        The path to copy the restart files from
    waiting_for : None or str or iterable
        Name of nodes this node will wait for to finish before executing

    Returns
    -------
    run_group : RunGroup
        A basic run group
    """
    # Make project to save time
    project_path = make_project
    # Create the `bout_paths` object
    bout_paths = BoutPaths(
        project_path=project_path,
        bout_inp_src_dir=project_path.joinpath("data"),
        bout_inp_dst_dir=project_path.joinpath(name),
    )
    # Create the input objects
    run_parameters = RunParameters({"global": {"nout": 0}})
    default_parameters = DefaultParameters(bout_paths)
    final_parameters = FinalParameters(default_parameters, run_parameters)
    executor = Executor(
        bout_paths=bout_paths,
        submitter=LocalSubmitter(bout_paths.project_path),
        run_parameters=run_parameters,
        restart_from=restart_from,
    )
    db_connector = DatabaseConnector(name)
    bout_run_setup = BoutRunSetup(executor, db_connector, final_parameters)
    # Create the `run_group`
    run_graph = run_graph if run_graph is not None else RunGraph()
    run_group = RunGroup(run_graph,
                         bout_run_setup,
                         name=name,
                         waiting_for=waiting_for)
    return run_group
コード例 #7
0
def get_git_sha(path: Path) -> str:
    """
    Return the git hash.

    Parameters
    ----------
    path : Path
        Path to query the git hash

    Returns
    -------
    git_sha : str
        The git hash
    """
    try:
        submitter = LocalSubmitter(path)
        submitter.submit_command("git rev-parse HEAD")
        submitter.wait_until_completed()
        git_sha = submitter.std_out
    # FileNotFoundError when `git` is not found
    except (FileNotFoundError, CalledProcessError) as error:
        if isinstance(error, FileNotFoundError):
            error_str = error.args[1]
        elif isinstance(error, CalledProcessError):
            error_str = error.args[2]
        else:
            error_str = "Unknown error"
        logging.warning("Could not retrieve git sha: %s", error_str)
        git_sha = "None"

    git_sha = git_sha if git_sha is not None else "None"

    return git_sha
コード例 #8
0
 def kill(self) -> None:
     """Kill a job if it exists."""
     if self.job_id is not None and not self.completed():
         logging.info("Killing job_id %s (%s)", self.job_id, self.job_name)
         submitter = LocalSubmitter()
         submitter.submit_command(
             f"{self._cluster_specific['cancel_str']} {self.job_id}"
         )
         submitter.wait_until_completed()
         self._released = True
コード例 #9
0
 def release(self) -> None:
     """Release job if held."""
     if self.job_id is not None and not self._released:
         logging.debug("Releasing job_id %s (%s)", self.job_id, self.job_name)
         submitter = LocalSubmitter()
         submitter.submit_command(
             f"{self._cluster_specific['release_str']} {self.job_id}"
         )
         submitter.wait_until_completed()
         self._released = True
コード例 #10
0
    def run_function(
        path: Path,
        function: Callable,
        args: Optional[Tuple[Any, ...]] = None,
        kwargs: Optional[Dict[str, Any]] = None,
        submitter: Optional[AbstractSubmitter] = None,
    ) -> AbstractSubmitter:
        """
        Submit a function for execution.

        Parameters
        ----------
        path : Path
            Absolute path to store the python file which holds the function and
            its arguments
        function : function
            The function to call
        args : None or tuple
            The positional arguments
        kwargs : None or dict
            The keyword arguments
        submitter : None or AbstractSubmitter
            The submitter to submit the function with
            Uses the default LocalSubmitter if None

        Returns
        -------
        submitter : AbstractSubmitter
            The submitter used
        """
        logging.info(
            "Submitting %s, with positional parameters %s, and keyword parameters %s",
            function.__name__,
            args,
            kwargs,
        )
        submitter = submitter if submitter is not None else LocalSubmitter()
        submitter.write_python_script(path, function, args, kwargs)
        command = f"python3 {path}"
        submitter.submit_command(command)
        return submitter
コード例 #11
0
    def get_test_executor(bout_paths: BoutPaths) -> BoutRunExecutor:
        """
        Return the executor used for test (i.e. where nout=0).

        Parameters
        ----------
        bout_paths : BoutPaths
            Object containing the BOUT++ paths

        Returns
        -------
        executor : BoutRunExecutor
            Executor instantiated with the test set up
        """
        run_parameters = RunParameters({"global": {"nout": 0}})
        executor = BoutRunExecutor(
            bout_paths=bout_paths,
            submitter=LocalSubmitter(bout_paths.project_path),
            run_parameters=run_parameters,
        )
        return executor
コード例 #12
0
def submitter_graph_tester(
    tmp_path: Path,
    job_name: str,
    submitter_class: Type[AbstractClusterSubmitter],
    local_node_two: bool,
):
    """
    Test that the class can submit a command.

    Parameters
    ----------
    tmp_path: Path
        Temporary path (pytest fixture)
    job_name: str
        Name of the job
    submitter_class: Type[AbstractClusterSubmitter]
        The submitter to use
    local_node_two : bool
        Whether or not a local submitter should be used for node two
    """
    save_dir = tmp_path.joinpath(job_name)
    save_dir.mkdir()
    node_zero_submitter = submitter_class("node_zero", save_dir)
    node_one_submitter = submitter_class("node_one", save_dir)

    if local_node_two:
        node_two_submitter: AbstractSubmitter = LocalSubmitter(save_dir)
    else:
        node_two_submitter = submitter_class("node_two", save_dir)
    node_three_submitter = submitter_class("node_three", save_dir)

    assert_waiting_for_graph(
        node_zero_submitter,
        node_one_submitter,
        node_two_submitter,
        node_three_submitter,
        save_dir,
    )
コード例 #13
0
def pbs_is_available() -> bool:
    """
    Check if the PBS system is available.

    Returns
    -------
    pbs_available : bool
        True if PBS is available
    """
    # Submit the command through a local submitter
    local_submitter = LocalSubmitter()
    try:
        local_submitter.submit_command("qstat")
        local_submitter.wait_until_completed(raise_error=False)
        pbs_available = not local_submitter.errored()
    except FileNotFoundError:
        # subprocess.Popen throws FileNotFoundError if a command is not in scope
        pbs_available = False

    logging.debug("PBS is%s available", " not" if not pbs_available else "")
    return pbs_available
コード例 #14
0
def slurm_is_available() -> bool:
    """
    Check if the SLURM system is available.

    Returns
    -------
    slurm_available : bool
        True if SLURM is available
    """
    # Submit the command through a local submitter
    local_submitter = LocalSubmitter()
    try:
        local_submitter.submit_command("squeue")
        local_submitter.wait_until_completed(raise_error=False)
        slurm_available = not local_submitter.errored()
    except FileNotFoundError:
        # subprocess.Popen throws FileNotFoundError if a command is not in scope
        slurm_available = False

    logging.debug("SLURM is%s available",
                  " not" if not slurm_available else "")
    return slurm_available
コード例 #15
0
    def get_trace(self) -> str:
        """
        Return the trace from ``tracejob``.

        Returns
        -------
        trace : str
            Trace obtained from the ``tracejob``
            An empty string is will be returned if no job_id exist
        """
        if self._status["job_id"] is not None:
            # Submit the command through a local submitter
            local_submitter = LocalSubmitter(run_path=self.store_dir)
            local_submitter.submit_command(
                f"tracejob -n 365 {self._status['job_id']}")
            local_submitter.wait_until_completed()
            trace = (local_submitter.std_out
                     if local_submitter.std_out is not None else "")
            return trace
        return ""
コード例 #16
0
    def get_sacct(self) -> str:
        """
        Return the result from ``sacct``.

        Returns
        -------
        sacct_str : str
            The string obtained from ``sacct``
            An empty string is will be returned if no job_id exist
        """
        if self._status["job_id"] is not None:
            # Submit the command through a local submitter
            local_submitter = LocalSubmitter(run_path=self.store_dir)
            local_submitter.submit_command(
                f"sacct "
                f"--starttime {self.__sacct_starttime} "
                f"--j {self._status['job_id']} "
                f"--brief")
            local_submitter.wait_until_completed()
            sacct_str = (local_submitter.std_out
                         if local_submitter.std_out is not None else "")
            return sacct_str
        return ""
コード例 #17
0
def test_function_run(tmp_path: Path) -> None:
    """
    Test the function run method.

    Parameters
    ----------
    tmp_path : Path
        Temporary path (pytest fixture)
    """
    run_graph = RunGraph()
    runner = BoutRunner(run_graph)
    path = tmp_path.joinpath("return_none.py")

    submitter = LocalSubmitter()
    runner.run_function(path, submitter, return_none)
    submitter.wait_until_completed()
    assert path.is_file()

    path = tmp_path.joinpath("return_sum_of_two.py")
    submitter = LocalSubmitter()
    runner.run_function(path, submitter, return_sum_of_two, (1, 2))
    submitter.wait_until_completed()
    assert path.is_file()

    path = tmp_path.joinpath("return_sum_of_three.py")
    submitter = LocalSubmitter()
    runner.run_function(path, submitter, return_sum_of_three, (1, 2),
                        {"number_3": 3})
    submitter.wait_until_completed()
    assert path.is_file()
コード例 #18
0
def get_submitter(
    name: Optional[str] = None,
    argument_dict: Optional[Dict[str, Any]] = None,
) -> AbstractSubmitter:
    """
    Return a Submitter object.

    Parameters
    ----------
    name : str or None
        Name of the submitter to use
        If None the submitter will be inferred
    argument_dict : dict
        Dict containing positional and keyword arguments

    Other Parameters
    ----------------
    The following parameters can be given argument_dict

    processor_split : ProcessorSplit or None
        Object containing the processor split
        Used for all submitters
    run_path : Path or str or None
        Positional argument
        Directory to run the command from
        Used in LocalSubmitters
    job_name : str or None
        Positional argument
        Name of the job
        Used for cluster submitters
    store_directory : path or None
        Keyword agrument
        Directory to store the scripts
        Used for cluster submitters
    submission_dict : None or dict of str of None or str
        Keyword agrument
        Dict containing optional submission options
        One the form

        >>> {'walltime': None or str,
        ...  'account': None or str,
        ...  'queue': None or str,
        ...  'mail': None or str}

        These options will not be used if the submission_dict is None
        Used for cluster submitters

    Returns
    -------
    submitter : AbstractSubmitter
        The implemented submitter class

    Raises
    ------
    ValueError
        If the input does not match the desired submitter
    NotImplementedError
        If the name is not a supported submitter class
    """
    implemented = ("local", "pbs", "slurm")

    if name is None or argument_dict is None:
        name, argument_dict = infer_submitter()

    logging.debug("Choosing a %s submitter", name)

    if "processor_split" not in argument_dict.keys():
        argument_dict["processor_split"] = ProcessorSplit()
    if name == "local":
        if "run_path" not in argument_dict.keys():
            argument_dict["run_path"] = None
        return LocalSubmitter(
            run_path=argument_dict["run_path"],
            processor_split=argument_dict["processor_split"],
        )
    if name in ("pbs", "slurm"):
        for argument in ("job_name", "store_directory", "submission_dict"):
            if argument not in argument_dict.keys():
                argument_dict[argument] = None
    if name == "pbs":
        return PBSSubmitter(
            job_name=argument_dict["job_name"],
            store_directory=argument_dict["store_directory"],
            submission_dict=argument_dict["submission_dict"],
            processor_split=argument_dict["processor_split"],
        )
    if name == "slurm":
        return SLURMSubmitter(
            job_name=argument_dict["job_name"],
            store_directory=argument_dict["store_directory"],
            submission_dict=argument_dict["submission_dict"],
            processor_split=argument_dict["processor_split"],
        )

    msg = f"{name} is not a valid submitter class, choose " f"from {implemented}"
    logging.critical(msg)
    raise NotImplementedError(msg)
コード例 #19
0
def test_pre_and_post_documentation(
    clean_up_bout_inp_src_and_dst: Callable[[str, str], Tuple[Path, Path,
                                                              Path]]
) -> None:
    """
    Test that the pre and post documentation runs without error.

    Parameters
    ----------
    clean_up_bout_inp_src_and_dst : function
        Function which adds temporary BOUT.inp directories to removal.
    """
    # NOTE: We are aware of the number of locals, and are here only testing the docs
    # pylint: disable=too-many-locals
    project_path, bout_inp_src_dir, bout_inp_dst_dir = clean_up_bout_inp_src_and_dst(
        "test_pre_post_documentation_src", "test_pre_post_documentation_dst")

    bout_paths = BoutPaths(
        project_path=project_path,
        bout_inp_src_dir=bout_inp_src_dir,
        bout_inp_dst_dir=bout_inp_dst_dir,
    )

    default_parameters = DefaultParameters(bout_paths)
    run_parameters = RunParameters({"global": {"nout": 0}})
    final_parameters = FinalParameters(default_parameters, run_parameters)

    basic_executor = Executor(
        bout_paths=bout_paths,
        submitter=LocalSubmitter(bout_paths.project_path),
        run_parameters=run_parameters,
    )

    # NOTE: We set the database to bout_inp_dst_dir as this will be removed later
    db_connector = DatabaseConnector("name_of_database",
                                     db_root_path=bout_inp_dst_dir)

    basic_bout_run_setup = BoutRunSetup(basic_executor, db_connector,
                                        final_parameters)

    run_graph = RunGraph()
    name = "my_restart_runs"
    basic_run_group = RunGroup(run_graph, basic_bout_run_setup, name=name)

    # New section in the documentation

    basic_run_group.add_post_processor({
        "function": return_none,
        "args": None,
        "kwargs": None
    })
    expanded_noise_restarts_dir = bout_paths.bout_inp_dst_dir.parent.joinpath(
        "expanded_noise_restarts")
    kwargs = {
        "newNz": 16,
        "path": bout_paths.bout_inp_dst_dir,
        "output": expanded_noise_restarts_dir,
    }
    expand_node_name = basic_run_group.add_post_processor({
        "function": mock_expand,
        "args": None,
        "kwargs": kwargs
    })

    # New section in the documentation
    # NOTE: Add these for removal
    clean_up_bout_inp_src_and_dst("expanded_noise_restarts",
                                  "expanded_noise_restarts")

    # Create the RunGroup
    restart_executor = Executor(
        bout_paths=bout_paths,
        submitter=LocalSubmitter(bout_paths.project_path),
        run_parameters=run_parameters,
        restart_from=expanded_noise_restarts_dir,
    )

    restart_bout_run_setup = BoutRunSetup(restart_executor, db_connector,
                                          final_parameters)

    restart_run_group = RunGroup(run_graph, restart_bout_run_setup, name=name)

    kwargs = {"path": expanded_noise_restarts_dir, "scale": 1e-5}
    restart_run_group.add_pre_processor(
        {
            "function": return_none,
            "args": None,
            "kwargs": kwargs
        },
        waiting_for=expand_node_name,
    )

    # New section in the documentation

    run_graph.get_dot_string()

    # New section in the documentation

    runner = BoutRunner(run_graph)
    runner.run()
コード例 #20
0
ファイル: make.py プロジェクト: fossabot/bout_runners
class Make:
    """
    Class for making the project.

    Attributes
    ----------
    makefile_root_path : Path
        The path to the Makefile
    makefile_name : str
        The name of the Makefile
    makefile_path : Path
        Path to the makefile
    exec_name : str
        The name of the executable

    Methods
    -------
    run_make(force=False)
        Runs make in the self.makefile_root_path
    run_clean()
        Runs make clean in the self.makefile_root_path

    Examples
    --------
    >>> from bout_runners.make.make import Make
    ... from pathlib import Path
    ... path = Path('path', 'to', 'makefile_root_path')
    ... make_obj = Make(makefile_root_path=path)
    ... make_obj.run_make(force=True)
    """

    def __init__(
        self, makefile_root_path: Optional[Path] = None, makefile_name: None = None
    ) -> None:
        """
        Call the make file.

        Parameters
        ----------
        makefile_root_path : None or Path or str
            Root path of make file
            If None, the path of the root caller of MakeProject will be called
        makefile_name : None or str
            If set to None, it tries the following names, in order:
            'GNUmakefile', 'makefile' and 'Makefile'
        """
        if makefile_root_path is None:
            makefile_root_path = get_caller_dir()
        self.makefile_root_path = Path(makefile_root_path)
        logging.debug("self.makefile_root_path set to %s", makefile_root_path)

        self.makefile_name = makefile_name

        self.makefile_path = get_makefile_path(
            self.makefile_root_path, self.makefile_name
        )
        self.exec_name = get_exec_name(self.makefile_path)
        self.submitter = LocalSubmitter(self.makefile_root_path)

    def run_make(self, force: bool = False) -> None:
        """
        Execute the makefile.

        If an executable is found, nothing will be done unless 'force' is set to True

        Parameters
        ----------
        force : bool
            If True, make clean will be called prior to make
        """
        # If force: Run clean so that `made` returns false
        if force:
            self.run_clean()

        # Check if already made
        made = self.makefile_root_path.joinpath(self.exec_name).is_file()

        # Do nothing if already made
        if not made:
            make_str = (
                "make"
                if self.makefile_name is None
                else f"make -f {self.makefile_name}"
            )

            logging.info("Making the program")
            command = f"{make_str}"
            self.submitter.submit_command(command)
            self.submitter.wait_until_completed()

    def run_clean(self) -> None:
        """Run make clean."""
        make_str = (
            "make" if self.makefile_name is None else f"make -f {self.makefile_name}"
        )

        logging.info("Running make clean")
        command = f"{make_str} clean"
        self.submitter.submit_command(command)
        self.submitter.wait_until_completed()
コード例 #21
0
def test_local_submitter() -> None:
    """Test that LocalSubmitter can run a command and raise an error."""
    submitter = LocalSubmitter()
    submitter.submit_command("ls")
    submitter.wait_until_completed()

    submitter.errored()
    assert isinstance(submitter.pid, int)
    assert isinstance(submitter.return_code, int)
    assert isinstance(submitter.std_out, str)
    assert isinstance(submitter.std_err, str)

    with pytest.raises(FileNotFoundError):
        submitter.submit_command("not a real command")
        submitter.wait_until_completed()
        submitter.raise_error()

    with pytest.raises(CalledProcessError):
        submitter.submit_command("ls ThisPathDoesNotExist")
        submitter.wait_until_completed()
        submitter.raise_error()
コード例 #22
0
def test_restart_documentation(
    clean_up_bout_inp_src_and_dst: Callable[[str, str], Tuple[Path, Path,
                                                              Path]]
) -> None:
    """
    Test that the restart documentation runs without error.

    Parameters
    ----------
    clean_up_bout_inp_src_and_dst : function
        Function which adds temporary BOUT.inp directories to removal.
    """
    # NOTE: We are aware of the number of locals, and are here only testing the docs
    # pylint: disable=too-many-locals
    project_path, bout_inp_src_dir, bout_inp_dst_dir = clean_up_bout_inp_src_and_dst(
        "test_restart_documentation_src", "test_restart_documentation_dst")
    bout_paths = BoutPaths(
        project_path=project_path,
        bout_inp_src_dir=bout_inp_src_dir,
        bout_inp_dst_dir=bout_inp_dst_dir,
    )

    default_parameters = DefaultParameters(bout_paths)
    run_parameters = RunParameters({"global": {"nout": 0}})
    final_parameters = FinalParameters(default_parameters, run_parameters)

    basic_executor = Executor(
        bout_paths=bout_paths,
        submitter=LocalSubmitter(bout_paths.project_path),
        run_parameters=run_parameters,
    )

    # NOTE: We set the database to bout_inp_dst_dir as this will be removed later
    db_connector = DatabaseConnector("name_of_database",
                                     db_root_path=bout_inp_dst_dir)

    basic_bout_run_setup = BoutRunSetup(basic_executor, db_connector,
                                        final_parameters)

    run_graph = RunGraph()
    name = "my_restart_runs"
    basic_run_group = RunGroup(run_graph, basic_bout_run_setup, name=name)

    # New section in the documentation

    restart_executor = Executor(
        bout_paths=bout_paths,
        submitter=LocalSubmitter(bout_paths.project_path),
        run_parameters=run_parameters,
        restart_from=bout_paths.bout_inp_dst_dir,
    )

    restart_bout_run_setup = BoutRunSetup(restart_executor, db_connector,
                                          final_parameters)

    RunGroup(
        run_graph,
        restart_bout_run_setup,
        name=name,
        waiting_for=basic_run_group.bout_run_node_name,
    )

    # New section in the documentation

    new_run_parameters = RunParameters({"solver": {"adams_moulton": True}})
    new_final_parameters = FinalParameters(default_parameters, run_parameters)

    restart_with_changing_parameters_executor = Executor(
        bout_paths=bout_paths,
        submitter=LocalSubmitter(bout_paths.project_path),
        run_parameters=new_run_parameters,
        restart_from=bout_paths.bout_inp_dst_dir,
    )

    BoutRunSetup(restart_with_changing_parameters_executor, db_connector,
                 new_final_parameters)

    RunGroup(
        run_graph,
        restart_bout_run_setup,
        name=name,
        waiting_for=basic_run_group.bout_run_node_name,
    )

    # New section in the documentation

    run_graph.get_dot_string()

    # New section in the documentation

    runner = BoutRunner(run_graph)
    runner.run()
コード例 #23
0
def test_pre_and_post_documentation(
    make_project: Path,
    copy_bout_inp: Callable[[Path, str], Path],
    file_state_restorer: FileStateRestorer,
) -> None:
    """
    Test that the pre and post documentation runs without error.

    Parameters
    ----------
    make_project : Path
        The path to the conduction example
    copy_bout_inp : function
        Function which copies BOUT.inp and returns the path to the temporary
        directory
    file_state_restorer : FileStateRestorer
        Object for restoring files to original state
    """
    # NOTE: We are aware of the number of locals, and are here only testing the docs
    # pylint: disable=too-many-locals
    project_path = make_project
    bout_inp_src_dir = copy_bout_inp(project_path,
                                     "test_pre_post_documentation_src")
    bout_inp_dst_dir = project_path.joinpath("test_pre_post_documentation_dst")
    # NOTE: bout_inp_src_dir removed by copy_bout_inp teardown
    file_state_restorer.add(bout_inp_dst_dir, force_mark_removal=True)

    bout_paths = BoutPaths(
        project_path=project_path,
        bout_inp_src_dir=bout_inp_src_dir,
        bout_inp_dst_dir=bout_inp_dst_dir,
    )

    default_parameters = DefaultParameters(bout_paths)
    run_parameters = RunParameters({"global": {"nout": 0}})
    final_parameters = FinalParameters(default_parameters, run_parameters)

    basic_executor = BoutRunExecutor(
        bout_paths=bout_paths,
        submitter=LocalSubmitter(bout_paths.project_path),
        run_parameters=run_parameters,
    )

    # NOTE: We set the database to bout_inp_dst_dir as this will be removed later
    db_connector = DatabaseConnector("name_of_database",
                                     db_root_path=bout_inp_dst_dir)
    file_state_restorer.add(db_connector.db_path, force_mark_removal=True)

    basic_bout_run_setup = BoutRunSetup(basic_executor, db_connector,
                                        final_parameters)

    run_graph = RunGraph()
    name = "my_restart_runs"
    basic_run_group = RunGroup(run_graph, basic_bout_run_setup, name=name)

    # New section in the documentation

    basic_run_group.add_post_processor({
        "function": return_none,
        "args": None,
        "kwargs": None
    })
    expanded_noise_restarts_dir = bout_paths.bout_inp_dst_dir.parent.joinpath(
        "expanded_noise_restarts")
    file_state_restorer.add(expanded_noise_restarts_dir,
                            force_mark_removal=True)
    kwargs = {
        "newNz": 16,
        "path": bout_paths.bout_inp_dst_dir,
        "output": expanded_noise_restarts_dir,
    }
    expand_node_name = basic_run_group.add_post_processor(
        {
            "function": mock_expand,
            "args": None,
            "kwargs": kwargs,
        }, )

    # New section in the documentation

    # Create the RunGroup
    restart_executor = BoutRunExecutor(
        bout_paths=bout_paths,
        submitter=LocalSubmitter(bout_paths.project_path),
        run_parameters=run_parameters,
        restart_from=expanded_noise_restarts_dir,
    )
    file_state_restorer.add(restart_executor.bout_paths.bout_inp_dst_dir,
                            force_mark_removal=True)

    restart_bout_run_setup = BoutRunSetup(restart_executor, db_connector,
                                          final_parameters)

    restart_run_group = RunGroup(run_graph, restart_bout_run_setup, name=name)

    kwargs = {"path": expanded_noise_restarts_dir, "scale": 1e-5}
    restart_run_group.add_pre_processor(
        {
            "function": return_none,
            "args": None,
            "kwargs": kwargs,
        },
        waiting_for=expand_node_name,
    )

    # New section in the documentation

    run_graph.get_dot_string()

    # New section in the documentation

    runner = BoutRunner(run_graph)
    runner.run()
コード例 #24
0
def test_restart_documentation(
    make_project: Path,
    copy_bout_inp: Callable[[Path, str], Path],
    file_state_restorer: FileStateRestorer,
) -> None:
    """
    Test that the restart documentation runs without error.

    Parameters
    ----------
    make_project : Path
        The path to the conduction example
    copy_bout_inp : function
        Function which copies BOUT.inp and returns the path to the temporary
        directory
    file_state_restorer : FileStateRestorer
        Object for restoring files to original state
    """
    # NOTE: We are aware of the number of locals, and are here only testing the docs
    # pylint: disable=too-many-locals
    project_path = make_project
    bout_inp_src_dir = copy_bout_inp(project_path,
                                     "test_restart_documentation_src")
    bout_inp_dst_dir = project_path.joinpath("test_restart_documentation_dst")
    # NOTE: bout_inp_src_dir removed by copy_bout_inp teardown
    file_state_restorer.add(bout_inp_dst_dir, force_mark_removal=True)

    bout_paths = BoutPaths(
        project_path=project_path,
        bout_inp_src_dir=bout_inp_src_dir,
        bout_inp_dst_dir=bout_inp_dst_dir,
    )

    default_parameters = DefaultParameters(bout_paths)
    run_parameters = RunParameters({"global": {"nout": 0}})
    final_parameters = FinalParameters(default_parameters, run_parameters)

    basic_executor = BoutRunExecutor(
        bout_paths=bout_paths,
        submitter=LocalSubmitter(bout_paths.project_path),
        run_parameters=run_parameters,
    )

    # NOTE: We set the database to bout_inp_dst_dir as this will be removed later
    db_connector = DatabaseConnector("name_of_database",
                                     db_root_path=bout_inp_dst_dir)
    file_state_restorer.add(db_connector.db_path, force_mark_removal=True)

    basic_bout_run_setup = BoutRunSetup(basic_executor, db_connector,
                                        final_parameters)

    run_graph = RunGraph()
    name = "my_restart_runs"
    basic_run_group = RunGroup(run_graph, basic_bout_run_setup, name=name)

    # New section in the documentation

    restart_executor = BoutRunExecutor(
        bout_paths=bout_paths,
        submitter=LocalSubmitter(bout_paths.project_path),
        run_parameters=run_parameters,
        restart_from=bout_paths.bout_inp_dst_dir,
    )
    file_state_restorer.add(restart_executor.bout_paths.bout_inp_dst_dir,
                            force_mark_removal=True)

    restart_bout_run_setup = BoutRunSetup(restart_executor, db_connector,
                                          final_parameters)

    RunGroup(
        run_graph,
        restart_bout_run_setup,
        name=name,
        waiting_for=basic_run_group.bout_run_node_name,
    )

    # New section in the documentation

    new_run_parameters = RunParameters({"solver": {"adams_moulton": True}})
    new_final_parameters = FinalParameters(default_parameters, run_parameters)

    restart_with_changing_parameters_executor = BoutRunExecutor(
        bout_paths=bout_paths,
        submitter=LocalSubmitter(bout_paths.project_path),
        run_parameters=new_run_parameters,
        restart_from=bout_paths.bout_inp_dst_dir,
    )
    file_state_restorer.add(
        restart_with_changing_parameters_executor.bout_paths.bout_inp_dst_dir,
        force_mark_removal=True,
    )

    BoutRunSetup(restart_with_changing_parameters_executor, db_connector,
                 new_final_parameters)

    RunGroup(
        run_graph,
        restart_bout_run_setup,
        name=name,
        waiting_for=basic_run_group.bout_run_node_name,
    )

    # New section in the documentation

    run_graph.get_dot_string()

    # New section in the documentation

    runner = BoutRunner(run_graph)
    runner.run()
コード例 #25
0
ファイル: run_group.py プロジェクト: fossabot/bout_runners
    def add_post_processor(
        self,
        function_dict: Dict[str, Optional[Union[Callable, Tuple[Any, ...],
                                                Dict[str, Any]]]],
        directory: Optional[Path] = None,
        submitter: Optional[AbstractSubmitter] = None,
        waiting_for: Optional[Union[str, Iterable[str]]] = None,
    ) -> str:
        """
        Add a post-processor to the BOUT++ run.

        The function and the parameters will be saved to a python script which will
        be submitted

        Parameters
        ----------
        function_dict : dict
            Dict with the function to call
            On the form
            >>> {'function': callable,
            ...  'args': None or tuple,
            ...  'kwargs': None or dict}
        directory : None or Path
            Absolute path to directory to store the python script
            If None, the destination directory of BoutRun will be used
        waiting_for : None or str or iterable
            Name of nodes this node will wait for to finish before executing
        submitter : None or AbstractSubmitter
            Submitter to submit the function with
            If None, the default LocalSubmitter will be used

        Returns
        -------
        post_processor_node_name : str
            The node name of the pre-processor

        Raises
        ------
        ValueError
            If the function in the function_dict is not callable
        """
        if directory is None:
            directory = self.__dst_dir

        if "function" not in function_dict.keys() or not callable(
                function_dict["function"]):
            msg = 'function_dict["function"] must be callable'
            logging.error(msg)
            raise ValueError(msg)

        post_processor_node_name = (
            f"post_processor_{self.__name}_{len(self.__post_processors)}")
        path = directory.joinpath(
            f"{function_dict['function'].__name__}_{post_processor_node_name}.py"
        )
        submitter = submitter if submitter is not None else LocalSubmitter()
        self.__run_graph.add_function_node(
            post_processor_node_name,
            function_dict=function_dict,
            path=path,
            submitter=submitter,
        )
        self.__run_graph.add_edge(self.bout_run_node_name,
                                  post_processor_node_name)
        self.__run_graph.add_waiting_for(post_processor_node_name, waiting_for)
        self.__post_processors.append(post_processor_node_name)
        return post_processor_node_name