def dispatch_smoothing_tasks(self):
        dispatching_msg = True
        for task_name, task_dict in self.tasks.items():
            if (not task_dict["submitted"]
                    and task_dict["reposts"] < self.comm.project.max_reposts):
                if dispatching_msg:
                    self.print("Dispatching Smoothing Tasks")
                    dispatching_msg = False

                sims = self.comm.smoother.get_sims_for_smoothing_task(
                    reference_model=task_dict["reference_model"],
                    model_to_smooth=task_dict["model_to_smooth"],
                    smoothing_lengths=task_dict["smoothing_lengths"],
                    smoothing_parameters=task_dict["smoothing_parameters"],
                )

                job = sapi.run_many_async(
                    input_files=sims,
                    site_name=self.comm.project.smoothing_site_name,
                    ranks_per_job=self.comm.project.smoothing_ranks,
                    wall_time_in_seconds_per_job=self.comm.project.
                    smoothing_wall_time,
                )
                self.tasks[task_name]["submitted"] = True
                self.tasks[task_name]["job_name"] = job.job_array_name
                self._write_tasks(self.tasks)
            elif task_dict["reposts"] >= self.comm.project.max_reposts:
                raise Exception(
                    "Too many reposts in smoothing, "
                    "please check the time steps and the inputs."
                    "and reset the number of reposts in the toml file.")
示例#2
0
def submit_salvus_simulation(
    comm: object,
    simulations: Union[List[object], object],
    events: Union[List[str], str],
    iteration: str,
    sim_type: str,
) -> object:
    """
    Submit a Salvus simulation to the machine defined in config file
    with details specified in config file

    :param comm: The Lasif communicator object
    :type comm: object
    :param simulations: Simulation object
    :type simulations: Union[List[object], object]
    :param events: We need names of events for the corresponding simulations
        in order to keep tabs on which simulation object corresponds to which
        event.
    :type events: Union[List[str], str]
    :param iteration: Name of iteration, this is needed to know where to
        download files to when jobs are done.
    :type iteration: str
    :param sim_type: can be either forward or adjoint.
    :type sim_type: str
    :return: SalvusJob object or an array of them
    :rtype: object
    """
    from salvus.flow.api import run_async, run_many_async

    if sim_type not in ["forward", "adjoint"]:
        raise LASIFError("sim_type needs to be forward or adjoint")

    array = False
    if isinstance(simulations, list):
        array = True
        if not isinstance(events, list):
            raise LASIFError(
                "If simulations are a list, events need to be "
                "a list aswell, with the corresponding events in the same "
                "order")
    else:
        if isinstance(events, list):
            raise LASIFError("If there is only one simulation object, "
                             "there should be only one event")

    iteration = comm.iterations.get_long_iteration_name(iteration)

    if sim_type == "forward":
        toml_file = (comm.project.paths["salvus_files"] / iteration /
                     "forward_jobs.toml")
    elif sim_type == "adjoint":
        toml_file = (comm.project.paths["salvus_files"] / iteration /
                     "adjoint_jobs.toml")

    if os.path.exists(toml_file):
        jobs = toml.load(toml_file)
    else:
        jobs = {}

    site_name = comm.project.salvus_settings["site_name"]
    ranks = comm.project.salvus_settings["ranks"]
    wall_time = comm.project.salvus_settings["wall_time_in_s"]

    if array:
        job = run_many_async(
            site_name=site_name,
            input_files=simulations,
            ranks_per_job=ranks,
            wall_time_in_seconds_per_job=wall_time,
        )
        jobs["array_name"] = job.job_array_name
        for _i, j in enumerate(job.jobs):
            jobs[events[_i]] = j.job_name
    else:
        job = run_async(
            site_name=site_name,
            input_file=simulations,
            ranks=ranks,
            wall_time_in_seconds=wall_time,
        )
        jobs[events] = job.job_name

    with open(toml_file, mode="w") as fh:
        toml.dump(jobs, fh)
        print(f"Wrote job information into {toml_file}")
    return job
示例#3
0
    def run_remote_smoother(
        self,
        event: str,
    ):
        """
        Run the Smoother, the settings are specified in inversion toml. Make
        sure that the smoothing config has already been generated

        :param event: Name of event
        :type event: str
        """
        from salvus.opt import smoothing
        import salvus.flow.simple_config as sc
        from salvus.flow.api import get_site
        from salvus.flow import api as sapi

        if self.comm.project.meshes == "multi-mesh":
            mesh = self.comm.lasif.find_event_mesh(event)
        else:
            mesh = self.comm.lasif.get_simulation_mesh(event)
        freq = 1.0 / self.comm.project.min_period
        smoothing_lengths = self.comm.project.smoothing_lengths

        # get remote gradient filename
        job = self.comm.salvus_flow.get_job(event, "adjoint")
        output_files = job.get_output_files()
        remote_grad = str(output_files[0][("adjoint", "gradient",
                                           "output_filename")])

        # make site stuff (hardcoded for now)
        daint = get_site(self.comm.project.site_name)
        username = daint.config["ssh_settings"]["username"]
        remote_diff_dir = os.path.join("/scratch/snx3000", username,
                                       "diff_models")
        local_diff_model_dir = "DIFF_MODELS"

        if not os.path.exists(local_diff_model_dir):
            os.mkdir(local_diff_model_dir)

        if not daint.remote_exists(remote_diff_dir):
            daint.remote_mkdir(remote_diff_dir)

        sims = []
        for param in self.comm.project.inversion_params:
            if param.startswith("V"):
                reference_velocity = param
            elif param == "RHO":
                if "VP" in self.comm.project.inversion_params:
                    reference_velocity = "VP"
                elif "VPV" in self.comm.project.inversion_params:
                    reference_velocity = "VPV"

            unique_id = (
                "_".join([str(i).replace(".", "")
                          for i in smoothing_lengths]) + "_" +
                str(self.comm.project.min_period))

            diff_model_file = unique_id + f"diff_model_{param}.h5"
            if self.comm.project.meshes == "multi-mesh":
                diff_model_file = event + "_" + diff_model_file

            remote_diff_model = os.path.join(remote_diff_dir, diff_model_file)

            diff_model_file = os.path.join(local_diff_model_dir,
                                           diff_model_file)

            if not os.path.exists(diff_model_file):
                smooth = smoothing.AnisotropicModelDependent(
                    reference_frequency_in_hertz=freq,
                    smoothing_lengths_in_wavelengths=smoothing_lengths,
                    reference_model=mesh,
                    reference_velocity=reference_velocity,
                )
                diff_model = smooth.get_diffusion_model(mesh)
                diff_model.write_h5(diff_model_file)

            if not daint.remote_exists(remote_diff_model):
                daint.remote_put(diff_model_file, remote_diff_model)

            sim = sc.simulation.Diffusion(mesh=diff_model_file)

            if self.comm.project.meshes == "multi-mesh":
                tensor_order = 4
            else:
                tensor_order = 2

            sim.domain.polynomial_order = tensor_order
            sim.physics.diffusion_equation.time_step_in_seconds = (
                self.comm.project.smoothing_timestep)
            sim.physics.diffusion_equation.courant_number = 0.06

            sim.physics.diffusion_equation.initial_values.filename = (
                "REMOTE:" + remote_grad)
            sim.physics.diffusion_equation.initial_values.format = "hdf5"
            sim.physics.diffusion_equation.initial_values.field = f"{param}"
            sim.physics.diffusion_equation.final_values.filename = (
                f"{param}.h5")

            sim.domain.mesh.filename = "REMOTE:" + remote_diff_model
            sim.domain.model.filename = "REMOTE:" + remote_diff_model
            sim.domain.geometry.filename = "REMOTE:" + remote_diff_model

            sim.validate()

            # append sim to array
            sims.append(sim)

        job = sapi.run_many_async(
            input_files=sims,
            site_name=self.comm.project.smoothing_site_name,
            ranks_per_job=self.comm.project.smoothing_ranks,
            wall_time_in_seconds_per_job=self.comm.project.smoothing_wall_time,
        )
        if self.comm.project.inversion_mode == "mini-batch":
            self.comm.project.change_attribute(
                f'smoothing_job["{event}"]["name"]', job.job_array_name)
            self.comm.project.change_attribute(
                f'smoothing_job["{event}"]["submitted"]', True)
        else:
            self.comm.project.change_attribute('smoothing_job["name"]',
                                               job.job_array_name)
            self.comm.project.change_attribute('smoothing_job["submitted"]',
                                               True)