Пример #1
0
    def estimated_time(self):
        """
        Estimated time to end the simulation in seconds.
        
        The estimation uses a linear model and considers initialization negligible.
        For local runs, the start time of a process is used in the calculation.
        For grid runs, the start time of the job is used instead.

        If the run was resumed, the estimation will be wrong.
        
        Returns: 
            float: The estimation of the time to end the simulation or NaN if no estimation could be done.

        """
        self._update()
        if not self.running_mode:
            return 0 if self._is_finished() else float("nan")
        elif self.running_mode == "local":
            start = self.processes[0].create_time()
        elif self.running_mode == "grid":
            start = self.job["start_time"]
            if start == 0:
                # Queued, but not started
                return float("nan")
        else:
            logger.warning("Invalid running_mode attribute")
            return float("nan")
        current = self.current_step()
        if current <= 0:  # If not dumped yet or error
            return float('nan')
        else:
            elapsed = time() - start
            return elapsed * (self.total_steps / current - 1)
Пример #2
0
    def get_axes(self, dataset_selector=None, axes_selector=None):
        """
        Get a dictionary with the info of the axes obtained as result of a given reduction.
        
        Args:
            dataset_selector: See :func:`~duat.osiris.plot.Diagnostic.get_generator` method.
            axes_selector: See :func:`~duat.osiris.plot.Diagnostic.get_generator` method.

        Returns:
            list of dict: Ordered list of the axes left by the reduction.
            
        """
        axes = []
        if dataset_selector is not None:
            if self.shape[1] == 1:
                logger.warning("Single dataset found. Ignoring the provided dataset_selector.")
                # Anyhow, datasets are reduced, so skip

        elif self.shape[1] > 1:
            axes.append(self.datasets_as_axis)

        if axes_selector is not None:
            if len(axes_selector) != len(self.axes):
                raise ValueError(
                    "Invalid axes_selector parameter. Length must be %d. Check the axes of the Diagnostic instance." % len(
                        self.axes))
            for i, sel in enumerate(axes_selector):
                if sel is None:
                    axes.append(self.axes[i])
        else:
            for a in self.axes:
                axes.append(a)
        return axes
Пример #3
0
    def kill(self):
        """
        Abruptly terminate the OSIRIS processes (if running).
        
        The :func:`~duat.osiris.run.Run.terminate` method should be used instead to perform a cleaner exit.

        If runnning is "local", sends SIGKILL to the processes. If "grid", calls qdel.

        Raises:
            subprocess.CalledProcessError: If using a grid and qdel fails.

        """
        self._update()
        if self.running_mode == "local":
            for process in self.processes:
                try:
                    process.kill()
                except psutil.NoSuchProcess:
                    # The process has just terminated
                    # In multiprocess run this is likely to happen when other processes stops.
                    pass
        elif self.running_mode == "grid":
            subprocess.check_call("qdel %d" % self.job["job_number"],
                                  shell=True)
            pass
        else:
            logger.warning(
                "Asked for termination of a Run not known to be running.")
Пример #4
0
    def terminate(self):
        """
        Terminate the OSIRIS processes (if running).

        If runnning is "local", sends SIGINT to the processes. If "grid", calls qdel.

        Raises:
            subprocess.CalledProcessError: If using a grid and qdel fails.

        """
        self._update()
        if self.running_mode == "local":
            for process in self.processes:
                try:
                    process.terminate()
                except psutil.NoSuchProcess:
                    # The process has just terminated
                    # In multiprocess run this is likely to happen when other processes stops.
                    pass
        elif self.running_mode == "grid":
            subprocess.check_call("qdel %d" % self.job["job_number"],
                                  shell=True)
            pass
        else:
            logger.warning(
                "Asked for termination of a Run not known to be running.")
Пример #5
0
    def _update(self):
        """Update the running info using what is found at the moment."""
        candidates = _find_running_exe(path.join(self.run_dir, "osiris"))

        try:
            if not candidates:  # No process running found
                self.processes = None
                # Try to find a job in queue
                jobs = _get_grid_jobs()
                if not jobs:  # Either no qstat or empty list
                    self.running_mode = ""
                else:
                    script_path = path.abspath(
                        path.join(self.run_dir, "start.sh"))
                    valid_jobs = list(
                        filter(lambda j: j["script"] == script_path, jobs))
                    if valid_jobs:
                        if len(valid_jobs) > 1:
                            logger.warning(
                                "More than one grid job was found for the run."
                            )
                        self.job = valid_jobs[0]
                        self.running_mode = "grid"
                    else:  # No queued job
                        self.running_mode = ""

            else:
                self.processes = list(map(psutil.Process, candidates))
                self.running_mode = "local"

        except psutil.NoSuchProcess:
            # If the processes have died before processing was completed.
            self.processes = None
            self.running_mode = ""
Пример #6
0
 def _clean_dataset_key(self, dataset_key):
     """Return the given dataset key as `str`, using human order if `int`. Might raise error or warning."""
     if isinstance(dataset_key, int):
         dataset_key = self.keys[dataset_key]
     elif isinstance(dataset_key, str):
         if dataset_key not in self.keys:
             raise ValueError("Dataset %s does not exist in the file." % dataset_key)
     elif dataset_key is None:
         if len(self.keys) != 1:  # Warn if implicitly selecting one among others.
             logger.warning("No dataset selected when multiple are available. Plotting the first one.")
         dataset_key = self.keys[0]
     else:
         raise TypeError("Unknown dataset type: %s", type(dataset_key))
     return dataset_key
Пример #7
0
 def update(i):
     """Update the plot, returning the artists which must be redrawn."""
     try:
         new_dataset = np.transpose(np.asarray(next(gen)))
     except StopIteration:
         logger.warning("Tried to add a frame to the animation, but all data was used.")
         return
     label = 't = {0}'.format(time_list[i])
     # BEWARE: The set_array syntax is rather problematic. Depending on the shading used in pcolormesh, the
     #         following might not work.
     plot.set_array(new_dataset[:-1, :-1].ravel())
     # For more details, check lumbric's answer to
     # https://stackoverflow.com/questions/18797175/animation-with-pcolormesh-routine-in-matplotlib-how-do-i-initialize-the-data
     ax.set_title(label)
     return plot, ax
Пример #8
0
    def __init__(self, label=None, order=None, fixed=True, types=None):
        self.label = label if label else ""
        if not order:
            if fixed:
                logger.warning(
                    "A ConfigSectionOrdered instance with no order defined cannot be fixed."
                )
            self.fixed = False
            self.order = []
        else:
            self.fixed = fixed
            self.order = order

        self.types = types if types else {}
        self.subsections = {}
Пример #9
0
 def __getitem__(self, ind):
     if ind < len(self.lst):
         return self.lst[ind]
     if self.default_type == Section:
         raise ValueError(
             "A subsection cannot be implicitly added to the list due to generic default type."
         )
     if ind > len(self.lst):
         logger.warning(
             "Implicitly creating more than one section in a list.")
     for i in range(len(self.lst), ind + 1):
         if isinstance(self.default_type, str):
             self.append_section(Section(self.default_type))
         else:
             # Note default_type is not Section here, no reason to expect incorrect call arguments in general.
             # Code analyzers may warn though.
             self.append_section(self.default_type())
     return self.lst[ind]
Пример #10
0
 def real_time(self):
     """Find the total time in seconds taken by the simulation if it has finished, otherwise returning nan."""
     try:
         # TODO: Update for resuming runs
         with open(path.join(self.run_dir, "TIMINGS", "timings.001"),
                   "r") as f:
             text = f.read()
         r = re.match(r" Total time for loop was(?: *)(.*?)(?: *)seconds",
                      text, re.DOTALL + re.MULTILINE)
         if not r:
             logger.warning(
                 "Bad format in timings file. The real time could not be read."
             )
             return float("nan")
         else:
             return float(r.group(1))
     except FileNotFoundError:
         return float("nan")
Пример #11
0
 def update(i):
     """Update the plot, returning the artists which must be redrawn."""
     try:
         new_dataset = next(gen)
     except StopIteration:
         logger.warning("Tried to add a frame to the animation, but all data was used.")
         return
     label = 't = {0}'.format(time_list[i])
     plot_data.set_ydata(new_dataset[:])
     ax.set_title(label)
     if not scale_mode or scale_mode == "max":
         pass
     elif scale_mode == "expand":
         prev = ax.get_ylim()
         data_limit = [min(new_dataset), max(new_dataset)]
         ax.set_ylim(min(prev[0], data_limit[0]), max(prev[1], data_limit[1]))
     elif scale_mode == "adjust_always":
         ax.set_ylim(min(new_dataset), max(new_dataset))
     return plot_data, ax
Пример #12
0
 def __init__(self, *args, epilog=None):
     """
     Create a Variation with the given parameters and values.
     
     Args:
         *args (2-:obj:`tuple` of :obj:`list`): Each argument must be a 2-tuple whose first elements is a list of str
                            or int which identifies the parameter in its section and a list of the values the
                            parameter will take. The list can be None to perform no action while passing the
                            parameter to the epilog function (see below).
         epilog (callable): A function of two arguments that will be called with the simulation and the list of
                            parameters when a config is being generated. This can be used for advanced modification,
                            for example, to set two parameters to a related value (like two species temperature).
     """
     self.parameters = args
     self._par_names = [
         "[dummy]" if p[0] is None else p[0][-1] for p in self.parameters
     ]
     self.len_list = [len(p[1]) for p in self.parameters]
     self.epilog = epilog
     if epilog is None and all(p[0] is None for p in self.parameters):
         logger.warning(
             "Trivial Variation generated. Did you forget an epilog?")
Пример #13
0
 def __setitem__(self, ind, value):
     if value is None:
         # TODO: Probably removing an item from a list with the ...= None notation is a bad practice
         raise NotImplementedError(
             "Removal from a list of Sections is not allowed. If you really need this, consider .lst.pop(number)."
         )
     if ind < len(self.lst):
         self.lst[ind] = value
     elif ind == len(self.lst):
         self.lst.append(value)
     else:
         if self.default_type is None:
             raise ValueError(
                 "A subsection cannot be implicitly added to the list due to unknown type."
             )
         logger.warning("Implicitly added subsection(s).")
         for i in range(len(self.lst), ind):
             if isinstance(self.default_type, str):
                 self.append_section(Section(self.default_type))
             else:
                 # Note default_type is not Section here, no reason to expect incorrect call arguments in general.
                 # Code analyzers may warn though.
                 self.append_section(self.default_type())
         self.lst.append(value)
Пример #14
0
def set_osiris_path(folder, warn=True):
    global osiris_1d, osiris_2d, osiris_3d
    if not path.isdir(folder):
        if warn:
            logger.warning("%s is not an existing folder." % folder)
        return

    r = path.join(folder, "osiris-1D.e")
    if path.isfile(r):
        osiris_1d = r
    elif warn:
        logger.warning("osiris-1D not found in %s" % folder)
    r = path.join(folder, "osiris-2D.e")
    if path.isfile(r):
        osiris_2d = r
    elif warn:
        logger.warning("osiris-2D not found in %s" % folder)
    r = path.join(folder, "osiris-3D.e")
    if path.isfile(r):
        osiris_3d = r
    elif warn:
        logger.warning("osiris-3D not found in %s" % folder)
Пример #15
0

# Try to guess the OSIRIS location:
_candidates = []
if "OSIRIS_PATH" in environ:
    _candidates.append(environ["OSIRIS_PATH"])
_candidates.append(path.join(path.expanduser("~"), "osiris", "bin"))
_candidates.append(path.join("usr", "local", "osiris", "bin"))

for t in _candidates:
    set_osiris_path(t, warn=False)
    if osiris_1d and osiris_2d and osiris_3d:
        break

if not (osiris_1d and osiris_2d and osiris_3d):
    if not (osiris_1d or osiris_2d or osiris_3d):
        logger.warning("Warning: no OSIRIS executables were found.")
    else:
        if not osiris_1d:
            logger.warning("Warning: osiris-1D.e not found.")
        if not osiris_2d:
            logger.warning("Warning: osiris-2D.e not found.")
        if not osiris_3d:
            logger.warning("Warning: osiris-3D.e not found.")

    logger.warning(
        "Set the environment variable OSIRIS_PATH to a folder where the OSIRIS executables with names "
        "osiris-1D.e and so on are found.\n"
        "You can also use run.set_osiris_path or set the variables run.osiris_1d (and so on)."
    )
Пример #16
0
def run_config_grid(config,
                    run_dir,
                    prefix=None,
                    run_name="osiris_run",
                    remote_dir=None,
                    clean_dir=True,
                    prolog="",
                    epilog="",
                    create_only=False):
    """
    Queue a OSIRIS run in a compatible grid (e.g., Sun Grid Engine).

    Args:
        config (`ConfigFile`): The instance describing the configuration file.
        run_dir (str): Folder where the run will be carried.
        prefix (str): A prefix to run the command. If None, "mpirun -np X " will be used when a config with multiple
                      nodes is provided.
        run_name (str): Name of the job in the engine.
        remote_dir (str): If provided, a remote directory where the run will be carried, which might be only available
                          in the node selected by the engine. Note that if this option is used, the returned Run
                          instance will not access the remote_dir, but the run_dir. If the remote node is unable to
                          access the path (trying to create it if needed), OSIRIS will be started in the run dir and
                          errors will be logged by the queue system.
        clean_dir (bool): Whether to remove the files in the directory before execution.
        prolog (str): Shell code to run before calling OSIRIS (but once in the remote_dir if asked for).
        epilog (str): Shell code to run after calling OSIRIS.
        create_only (bool): Whether just to create the files, but not queueing the run.

    Returns:
        Run: A Run instance describing the execution.

    """
    # Clean if needed
    if clean_dir:
        for root, dirs, files in walk(run_dir):
            for f in files:
                remove(path.join(root, f))

        for root, dirs, files in walk(run_dir):
            for f in files:
                logger.warning("Could not remove file %s" % f)

    # Find the needed amount of nodes
    n = config.get_nodes()
    if prefix is None:
        prefix = "mpirun -np %d " % n if n > 1 else ""
    elif prefix[-1] != " ":
        prefix += " "

    # copy the input file
    ensure_dir_exists(run_dir)
    config.write(path.join(run_dir, "os-stdin"))

    # Copy the osiris executable
    osiris_path = path.abspath(path.join(run_dir, "osiris"))
    osiris = ifd(config.get_d(), osiris_1d, osiris_2d, osiris_3d)
    copyfile(osiris, osiris_path)
    ensure_executable(osiris_path)

    # Create a start.sh file with the launch script
    s = "".join([
        "#!/bin/bash\n#\n#$ -cwd\n#$ -S /bin/bash\n#$ -N %s\n" % run_name,
        "#$ -pe smp %d\n" % n if n > 1 else "", "#\n",
        "NEW_DIR=%s\nmkdir -p $NEW_DIR\ncp -r . $NEW_DIR\ncd $NEW_DIR\n" %
        remote_dir if remote_dir else "", prolog + "\n",
        "\n%s./osiris > out.txt 2> err.txt\n" % prefix, epilog + "\n"
    ])

    with open(path.join(run_dir, "start.sh"), 'w') as f:
        f.write(s)
    ensure_executable(path.join(run_dir, "start.sh"))

    if not create_only:
        subprocess.Popen("qsub " +
                         path.abspath(path.join(run_dir, "start.sh")),
                         shell=True,
                         cwd=path.abspath(run_dir))

    return Run(run_dir)
Пример #17
0
def run_config(config,
               run_dir,
               prefix=None,
               clean_dir=True,
               blocking=None,
               force=None,
               mpcaller=None,
               create_only=False):
    """
    Initiate a OSIRIS run from a config instance.

    Args:
        config (`ConfigFile`): The instance describing the configuration file.
        run_dir (str): Folder where the run is carried.
        prefix (str): A prefix to run the command. If None, "mpirun -np X " will be used when a config with multiple
                      nodes is provided.
        clean_dir (bool): Whether to remove the files in the directory before execution.
        blocking (bool): Whether to wait for the run to finish.
        force (str): Set what to do if a running executable is found in the directory. Set to "ignore" to launch anyway,
                     possibly resulting in multiple instances running simultaneously; set to "kill" to terminate the
                     existing processes.
        mpcaller (MPCaller): An instance controlling multithreaded calls. If supplied, all calls will be handled by this
                     instance and the blocking parameter will be ignored.
        create_only (bool): Whether just to create the files, but not starting the run.

    Returns:
        tuple: A Run instance describing the execution.

    """
    # Find the needed amount of nodes
    n = config.get_nodes()
    if prefix is None:
        prefix = "mpirun -np %d " % n if n > 1 else ""
    elif prefix[-1] != " ":
        prefix += " "

    # Search for possibly running processes
    candidates = _find_running_exe(path.join(run_dir, "osiris"))
    if candidates:
        if force == "ignore":
            logger.warning("Ignored %d running exe found in %s" %
                           (len(candidates), run_dir))
        elif force == "kill":
            logger.warning("Killing %d running exe found in %s" %
                           (len(candidates), run_dir))
            for c in candidates:
                try:
                    psutil.Process(c).terminate()
                except psutil.NoSuchProcess:
                    pass  # If just ended
        else:
            logger.warning("Running exe found in %s. Aborting launch." %
                           run_dir)
            return

    # Clean if needed
    if clean_dir:
        for root, dirs, files in walk(run_dir):
            for f in files:
                remove(path.join(root, f))

        for root, dirs, files in walk(run_dir):
            for f in files:
                logger.warning("Could not remove file %s" % f)

    # If the run is restartable, make the if_restart variable explicit
    if "restart" in config and "ndump_fac" in config["restart"] and config[
            "restart"]["ndump_fac"]:
        if "if_restart" not in config["restart"]:
            config["restart"][
                "if_restart"] = False  # This is the default value

    # copy the input file
    ensure_dir_exists(run_dir)
    config.write(path.join(run_dir, "os-stdin"))

    # Copy the osiris executable
    osiris_path = path.abspath(path.join(run_dir, "osiris"))
    osiris = ifd(config.get_d(), osiris_1d, osiris_2d, osiris_3d)
    copyfile(osiris, osiris_path)
    ensure_executable(osiris_path)

    # Create a start.sh file to ease manual launch
    with open(path.join(run_dir, "start.sh"), 'w') as f:
        f.write("#!/bin/bash\n%s./osiris > out.txt 2> err.txt" % prefix)
    ensure_executable(path.join(run_dir, "start.sh"))

    # Create a continue.sh file to ease manual relaunch of aborted executions
    with open(path.join(run_dir, "continue.sh"), 'w') as f:
        f.write(
            "#!/bin/bash"
            "\nsed -i -e \"s/if_restart = .false./if_restart = .true./g\" os-stdin"
            "\n./%s osiris >> out.txt 2>> err.txt" % prefix)
    ensure_executable(path.join(run_dir, "continue.sh"))

    if create_only:
        return Run(run_dir)

    if mpcaller is not None:
        run = Run(run_dir)
        # Set the run instance to update the process info when the call is made.
        mpcaller.add_call(Call(_execute_run, prefix, osiris_path, run_dir))
        return run
    else:
        proc = subprocess.Popen(prefix + osiris_path + " > out.txt 2> err.txt",
                                shell=True,
                                cwd=path.abspath(run_dir))
        if blocking:
            proc.wait()
        else:  # Sleep a little to check for quickly appearing errors and to allow the shell to start osiris
            sleep(0.2)

        # BEWARE: Perhaps under extreme circumstances, OSIRIS might have not started despite sleeping.
        # This could be solved calling the update method of the Run instance.
        # Consider this a feature instead of a bug :P

        run = Run(run_dir)

        # Try to detect errors checking the output
        if run._has_error():
            logger.warning(
                "Error detected while launching %s.\nCheck out.txt and err.txt for more information or re-run in console."
                % run_dir)
        return run
Пример #18
0
    def time_2d_animation(self, output_path=None, dataset_selector=None, axes_selector=None, time_selector=None,
                          dpi=200, fps=1, cmap=None, norm=None, rasterized=True, z_min=None,
                          z_max=None, latex_label=True, interval=200):
        """
        Generate a plot of 2d data as a color map which animated in time.

        If an output path with a suitable extension is supplied, the method will export it. Available formats are mp4
        and gif. The returned objects allow for minimal customization and representation. For example in Jupyter you
        might use `IPython.display.HTML(animation.to_html5_video())`, where `animation` is the returned `FuncAnimation`
        instance.

        Note:
            Exporting a high resolution animated gif with many frames might eat your RAM.

        Args:
            output_path (str): The place where the plot is saved. If "" or None, the plot is shown in matplotlib.
            dataset_selector: See :func:`~duat.osiris.plot.Diagnostic.get_generator` method.
            axes_selector: See :func:`~duat.osiris.plot.Diagnostic.get_generator` method.
            time_selector: See :func:`~duat.osiris.plot.Diagnostic.get_generator` method.
            interval (float): Delay between frames in ms. If exporting to mp4, the fps is used instead to generate the
                              file, although the returned objects do use this value.
            dpi (int): The resolution of the frames in dots per inch (only if exporting).
            fps (int): The frames per seconds (only if exporting to mp4).
            latex_label (bool): Whether for use LaTeX code for the plot.
            cmap (str or `matplotlib.colors.Colormap`): The Colormap to use in the plot.
            norm (str or `matplotlib.colors.Normalize`): How to scale the colormap. For advanced manipulation, use some
                           Normalize subclass, e.g., colors.SymLogNorm(0.01). Automatic scales can be selected with
                           the following strings:

                           * "lin": Linear scale from minimum to maximum.
                           * "log": Logarithmic scale from minimum to maximum up to vmax/vmin>1E9, otherwise increasing vmin.


            rasterized (bool): Whether the map is rasterized. This does not apply to axes, title... Note non-rasterized
                               images with large amount of data exported to PDF might challenging to handle.
        Returns:
            (`matplotlib.figure.Figure`, `matplotlib.axes.Axes`, `matplotlib.animation.FuncAnimation`):
            Objects representing the generated plot and its animation.
            
        Raises:
            FileNotFoundError: If tried to export to mp4 but ffmpeg is not found in the system.

        """
        if output_path:
            ensure_dir_exists(os.path.dirname(output_path))
        axes = self.get_axes(dataset_selector=dataset_selector, axes_selector=axes_selector)
        if len(axes) != 2:
            raise ValueError("Expected 2 axes plot, but %d were provided" % len(axes))

        gen = self.get_generator(dataset_selector=dataset_selector, axes_selector=axes_selector,
                                 time_selector=time_selector)

        # Set plot labels
        fig, ax = plt.subplots()
        fig.set_tight_layout(True)

        x_name = axes[0]["LONG_NAME"]
        x_units = axes[0]["UNITS"]
        y_name = axes[1]["LONG_NAME"]
        y_units = axes[1]["UNITS"]
        title_name = self.data_name
        title_units = self.units

        ax.set_xlabel(_create_label(x_name, x_units, latex_label))
        ax.set_ylabel(_create_label(y_name, y_units, latex_label))

        # Gather the points
        x_min, x_max = axes[0]["MIN"], axes[0]["MAX"]
        y_min, y_max = axes[1]["MIN"], axes[1]["MAX"]
        z = np.transpose(np.asarray(next(gen)))

        time_list = self.get_time_list(time_selector)
        if len(time_list) < 2:
            raise ValueError("At least two time snapshots are needed to make an animation")

        norm = _autonorm(norm, z)

        plot_function = ax.pcolormesh
        if rasterized:
            # Rasterizing in contourf is a bit tricky
            # Cf. http://stackoverflow.com/questions/33250005/size-of-matplotlib-contourf-image-files
            plot = plot_function(axes[0]["LIST"], axes[1]["LIST"], z, norm=norm, cmap=cmap, zorder=-9)
            ax.set_rasterization_zorder(-1)
        else:
            plot = plot_function(axes[0]["LIST"], axes[1]["LIST"], z, norm=norm, cmap=cmap)

        ax.set_xlim(x_min, x_max)
        ax.set_ylim(y_min, y_max)

        ax.set_title(_create_label(title_name, title_units, latex_label))

        _fix_colorbar(fig.colorbar(plot))

        # Prepare a function for the updates
        def update(i):
            """Update the plot, returning the artists which must be redrawn."""
            try:
                new_dataset = np.transpose(np.asarray(next(gen)))
            except StopIteration:
                logger.warning("Tried to add a frame to the animation, but all data was used.")
                return
            label = 't = {0}'.format(time_list[i])
            # BEWARE: The set_array syntax is rather problematic. Depending on the shading used in pcolormesh, the
            #         following might not work.
            plot.set_array(new_dataset[:-1, :-1].ravel())
            # For more details, check lumbric's answer to
            # https://stackoverflow.com/questions/18797175/animation-with-pcolormesh-routine-in-matplotlib-how-do-i-initialize-the-data
            ax.set_title(label)
            return plot, ax

        anim = FuncAnimation(fig, update, frames=range(1, len(time_list) - 2), interval=interval)

        if not output_path:  # "" or None
            pass
        else:
            filename = os.path.basename(output_path)
            if "." in filename:
                extension = output_path.split(".")[-1].lower()
            else:
                extension = None
            if extension == "gif":
                anim.save(output_path, dpi=dpi, writer='imagemagick')
            elif extension == "mp4":
                metadata = dict(title=os.path.split(self.data_path)[-1], artist='duat', comment=self.data_path)
                writer = FFMpegWriter(fps=fps, metadata=metadata)
                with writer.saving(fig, output_path, dpi):
                    # Iterate over frames
                    for i in range(1, len(time_list) - 1):
                        update(i)
                        writer.grab_frame()
                    # Keep showing the last frame for the fixed time
                    writer.grab_frame()
            else:
                logger.warning("Unknown extension in path %s. No output produced." % output_path)

        plt.close()

        return fig, ax, anim
Пример #19
0
def run_variation(config,
                  variation,
                  run_base,
                  caller=None,
                  on_existing=None,
                  **kwargs):
    """
    Make consecutive calls to :func:`~duat.osiris.run.run_config` with ConfigFiles generated from a variation.
    
    Args:
        config (`ConfigFile`): Base configuration file.
        variation (`Variation`): Description of the variations to apply.
        run_base (str): Path to the directory where the runs will take place, each in a folder named var_number.
        caller (int or `MPCaller`): If supplied, the calls will be managed by a MPCaller instance. If an int is provided
                                    an MPCaller with such a number of threads will be created. Provide an instance if
                                    interested in further controlling.
        on_existing (str): Action to do if a run of the variation exists. Only the names of the subfolders are used for
                           this purpose, which means the run could be different if the variation or the path have
                           changed. Set to "ignore" to leave untouched existing runs or set to "overwrite" to delete the
                           data and run a new instance. Default is like "ignore" but raising a warning.
        **kwargs: Keyword arguments to pass to :func:`~duat.osiris.run.run_config`

    Returns:
        list of Run: List with the Run instances in the variation directory.

    """
    r_list = []

    if on_existing is not None:
        if not isinstance(on_existing, str):
            raise ValueError("Invalid on_existing parameter")
        on_existing = on_existing.lower()
        if on_existing not in ["ignore", "overwrite"]:
            raise ValueError("Invalid on_existing parameter")

    # If there is no need for a an MPCaller
    if caller is None or ("create_only" in kwargs and kwargs["create_only"]):
        for i, c in enumerate(variation.get_generator(config)):
            r = run_config(c, path.join(run_base, "var_" + str(i)), **kwargs)
            r_list.append(r)
    else:
        if isinstance(caller, int):
            _caller = MPCaller(caller)
        else:
            # Otherwise assume it was a MPCaller instance
            _caller = caller

        for i, c in enumerate(variation.get_generator(config)):
            var_dir = path.join(run_base, "var_" + str(i))
            if path.isfile(path.join(var_dir, "os-stdin")):
                # If the item existed
                if on_existing is None:
                    logger.warning("Skipping existing variation item " +
                                   var_dir)
                elif on_existing == "ignore":
                    pass
                else:  # overwrite
                    run_config(c, var_dir, mpcaller=_caller, **kwargs)
            else:
                # The item did not exist
                run_config(c, var_dir, mpcaller=_caller, **kwargs)
            r_list.append(Run(var_dir))

        if isinstance(caller, int):
            # If the MPCaller was created in this method, threads should die after execution
            _caller.wait_calls(blocking=False)
            # Nevertheless, processes seems not to be discarded until a new call to this method is made
    return r_list
Пример #20
0
    def time_1d_animation(self, output_path=None, dataset_selector=None, axes_selector=None, time_selector=None,
                          dpi=200, fps=1, scale_mode="expand",
                          latex_label=True, interval=200):
        """
        Generate a plot of 1d data animated in time.
        
        If an output path with a suitable extension is supplied, the method will export it. Available formats are mp4
        and gif. The returned objects allow for minimal customization and representation. For example in Jupyter you
        might use `IPython.display.HTML(animation.to_html5_video())`, where `animation` is the returned `FuncAnimation`
        instance.
        
        Note:
            Exporting a high resolution animated gif with many frames might eat your RAM.

        Args:
            output_path (str): The place where the plot is saved. If "" or None, the plot is shown in matplotlib.
            dataset_selector: See :func:`~duat.osiris.plot.Diagnostic.get_generator` method.
            axes_selector: See :func:`~duat.osiris.plot.Diagnostic.get_generator` method.
            time_selector: See :func:`~duat.osiris.plot.Diagnostic.get_generator` method.
            interval (float): Delay between frames in ms. If exporting to mp4, the fps is used instead to generate the
                              file, although the returned objects do use this value.
            dpi (int): The resolution of the frames in dots per inch (only if exporting).
            fps (int): The frames per seconds (only if exporting to mp4).
            scale_mode (str): How the scale is changed through time. Available methods are:

                * "expand": The y limits increase when needed, but they don't decrease.
                * "adjust_always": Always change the y limits to those of the data.
                * "max": Use the maximum range from the beginning.

            latex_label (bool): Whether for use LaTeX code for the plot.
            
        Returns:
            (`matplotlib.figure.Figure`, `matplotlib.axes.Axes`, `matplotlib.animation.FuncAnimation`):
            Objects representing the generated plot and its animation.
            
        Raises:
            FileNotFoundError: If tried to export to mp4 but ffmpeg is not found in the system.

        """
        if output_path:
            ensure_dir_exists(os.path.dirname(output_path))
        axes = self.get_axes(dataset_selector=dataset_selector, axes_selector=axes_selector)
        if len(axes) != 1:
            raise ValueError("Expected 1 axis plot, but %d were provided" % len(axes))
        axis = axes[0]

        gen = self.get_generator(dataset_selector=dataset_selector, axes_selector=axes_selector,
                                 time_selector=time_selector)

        # Set plot labels
        fig, ax = plt.subplots()
        fig.set_tight_layout(True)

        x_name = axis["LONG_NAME"]
        x_units = axis["UNITS"]
        y_name = self.data_name
        y_units = self.units

        ax.set_xlabel(_create_label(x_name, x_units, latex_label))
        ax.set_ylabel(_create_label(y_name, y_units, latex_label))

        # Plot the points
        x_min, x_max = axis["MIN"], axis["MAX"]
        plot_data, = ax.plot(axis["LIST"], next(gen))
        ax.set_xlim(x_min, x_max)

        if scale_mode == "max":
            # Get a list (generator) with the mins and maxs in each time step
            min_max_list = map(lambda l: [min(l), max(l)],
                               self.get_generator(dataset_selector=dataset_selector, axes_selector=axes_selector,
                                                  time_selector=time_selector))
            f = lambda mins, maxs: (min(mins), max(maxs))
            y_min, y_max = f(*zip(*min_max_list))
            ax.set_ylim(y_min, y_max)

        time_list = self.get_time_list(time_selector)

        # Prepare a function for the updates
        def update(i):
            """Update the plot, returning the artists which must be redrawn."""
            try:
                new_dataset = next(gen)
            except StopIteration:
                logger.warning("Tried to add a frame to the animation, but all data was used.")
                return
            label = 't = {0}'.format(time_list[i])
            plot_data.set_ydata(new_dataset[:])
            ax.set_title(label)
            if not scale_mode or scale_mode == "max":
                pass
            elif scale_mode == "expand":
                prev = ax.get_ylim()
                data_limit = [min(new_dataset), max(new_dataset)]
                ax.set_ylim(min(prev[0], data_limit[0]), max(prev[1], data_limit[1]))
            elif scale_mode == "adjust_always":
                ax.set_ylim(min(new_dataset), max(new_dataset))
            return plot_data, ax

        anim = FuncAnimation(fig, update, frames=range(1, len(time_list) - 2), interval=interval)

        if not output_path:  # "" or None
            pass
        else:
            filename = os.path.basename(output_path)
            if "." in filename:
                extension = output_path.split(".")[-1].lower()
            else:
                extension = None
            if extension == "gif":
                anim.save(output_path, dpi=dpi, writer='imagemagick')
            elif extension == "mp4":
                metadata = dict(title=os.path.split(self.data_path)[-1], artist='duat', comment=self.data_path)
                writer = FFMpegWriter(fps=fps, metadata=metadata)
                with writer.saving(fig, output_path, dpi):
                    # Iterate over frames
                    for i in range(1, len(time_list) - 1):
                        update(i)
                        writer.grab_frame()
                    # Keep showing the last frame for the fixed time
                    writer.grab_frame()
            else:
                logger.warning("Unknown extension in path %s. No output produced." % output_path)

        plt.close()

        return fig, ax, anim
Пример #21
0
def run_variation_grid(config,
                       variation,
                       run_base,
                       run_name="os-var_",
                       remote_dir=None,
                       on_existing=None,
                       **kwargs):
    """
    Make consecutive calls to :func:`~duat.osiris.run.run_config_grid` with ConfigFiles generated from a variation.

    Args:
        config (`ConfigFile`): Base configuration file.
        variation (`Variation`): Description of the variations to apply.
        run_base (str): Path to the directory where the runs will take place, each in a folder named var_number.
        run_name (str): Prefix to the name to use in the grid system.
        remote_dir (str): If provided, a remote directory where the runs will be carried, which might be only available
                          in the node selected by the engine. See :func:`~duat.osiris.run.run_config_grid`.
        on_existing (str): Action to do if a run of the variation exists. Only the names of the subfolders are used for
                           this purpose, which means the run could be different if the variation or the path have
                           changed. Set to "ignore" to leave untouched existing runs or set to "overwrite" to delete the
                           data and run a new instance. Default is like "ignore" but raising a warning.
        **kwargs: Keyword arguments to pass to :func:`~duat.osiris.run.run_config_grid`.

    Returns:
        list of Run: List with the Run instances in the variation directory.

    """
    r_list = []

    if on_existing is not None:
        if not isinstance(on_existing, str):
            raise ValueError("Invalid on_existing parameter")
        on_existing = on_existing.lower()
        if on_existing not in ["ignore", "overwrite"]:
            raise ValueError("Invalid on_existing parameter")

    for i, c in enumerate(variation.get_generator(config)):
        var_name = "var_" + str(i)
        var_dir = path.join(run_base, var_name)
        if path.isfile(path.join(var_dir, "os-stdin")):
            # If the item existed
            if on_existing is None:
                logger.warning("Skipping existing variation item " + var_dir)
            elif on_existing == "ignore":
                pass
            else:  # overwrite
                if remote_dir:
                    run_config_grid(c,
                                    var_dir,
                                    run_name=run_name + str(i),
                                    remote_dir=path.join(remote_dir, var_name),
                                    **kwargs)
                else:
                    run_config_grid(c,
                                    var_dir,
                                    run_name=run_name + str(i),
                                    **kwargs)
        else:
            # The item did not exist
            if remote_dir:
                run_config_grid(c,
                                var_dir,
                                run_name=run_name + str(i),
                                remote_dir=path.join(remote_dir, var_name),
                                **kwargs)
            else:
                run_config_grid(c,
                                var_dir,
                                run_name=run_name + str(i),
                                **kwargs)
        r_list.append(Run(var_dir))

    return r_list
Пример #22
0
    def get_generator(self, dataset_selector=None, axes_selector=None, time_selector=None):
        """
        Get a generator providing data from the file.

        Calling this method returns a generator which, when called, will provide data for increasing times (unless
        modified by time_selector parameter). The data might be reduced either by selecting a position in an axis (or
        a dataset) or by using a function along some axis (or datasets), e.g., a sum.

        This data is provided as numpy arrays where the first axis refers to dataset coordinate (if present) and next
        to (non-reduced) axis in the order they are found in the files.

        Args:
            dataset_selector (str, int or callable): Instructions to reduce datasets. An int selects a dataset in human
                order, a str selects it by name. A function taking a list and returning a scalar can be used to reduce
                the data, e.g., sum, mean...

            axes_selector (tuple): Instructions to reduce axes data. It must be
                a tuple of the same length of the number axes or None to perform no reduction.
                Each element can be of the following types:

                    * int: Select the item in the given position.
                    * None: No reduction is performed in this axis.
                    * ScaledFunction: A function applied in a range selected by simulation units.
                    * callable (default): Reduce the data along this axes using the given function (e.g., mean, max, sum...).



            time_selector (slice or ScaledSlice): A slice or ScaledSlice instance selecting the points in time to take.
                A slice selects times from the list returned by :func:`~duat.osiris.plot.Diagnostic.get_time_list`.
                A ScaledSlice chooses a slice that best represents a choice in terms of time units.

        Returns:
            generator: A generator which provides the data.

        """
        multiple_datasets = False  # If a dataset list is going to be returned
        if dataset_selector is not None:
            if self.shape[1] == 1:
                logger.warning("Single dataset found. Ignoring the provided dataset_selector.")

                def f_dataset_selector(f):
                    return f[self.keys[0]][:]
            else:
                if isinstance(dataset_selector, int):
                    dataset_selector = self.keys[dataset_selector]

                if isinstance(dataset_selector, str):  # If it was int or str
                    def f_dataset_selector(f):
                        return f[dataset_selector][:]
                else:  # Assumed function
                    def f_dataset_selector(f):
                        return np.apply_along_axis(dataset_selector, 0, [f[key][:] for key in self.keys])

        else:
            if self.shape[1] > 1:
                multiple_datasets = True

                def f_dataset_selector(f):
                    return np.array([f[key][:] for key in self.keys])
            else:
                def f_dataset_selector(f):
                    return f[self.keys[0]][:]

        if axes_selector is not None:
            if len(axes_selector) != len(self.axes):
                raise ValueError(
                    "Invalid axes_selector parameter. Length must be %d. Check the axes of the Diagnostic instance." % len(
                        self.axes))

            def f_axes_selector(x):
                offset = 1 if multiple_datasets else 0  # If multiple dataset, do not count its axis for reduction
                for i, sel in enumerate(axes_selector):
                    if sel is not None:
                        if isinstance(sel, int):
                            x = np.take(x, sel, axis=i - offset)
                        elif isinstance(sel, ScaledFunction):
                            x = np.apply_along_axis(sel._get_function(self.axes[i]['LIST']), i - offset, x)
                        else:  # Assumed function
                            x = np.apply_along_axis(sel, i - offset, x)
                        offset += 1
                return x
        else:
            def f_axes_selector(x):
                return x

        if time_selector is not None:
            if isinstance(time_selector, ScaledSlice):
                time_selector = self._scaled_slice_to_slice(time_selector)
            elif not isinstance(time_selector, slice):
                logger.warning("Invalid time_selector parameter ignored. Use a slice or a ScaledSlice instead.")
                time_selector = None

        def gen():
            for file_name in (self.file_list[time_selector] if time_selector else self.file_list):
                with h5py.File(file_name, "r") as f:
                    data = f_dataset_selector(f)
                # Make sure to exit the context manager before yielding
                # h5py might accuse you of murdering identifiers if you don't!
                yield f_axes_selector(data)

        return gen()