コード例 #1
0
    def get_source_object(self, event_name: str):
        """
        Create the source object that the simulation wants

        :param event_name: Name of event
        :type event_name: str
        """

        iteration = self.comm.project.current_iteration
        src_info = self.comm.lasif.get_source(event_name)
        stf_file = self.comm.lasif.find_stf(iteration)
        side_set = "r1"
        if isinstance(src_info, list):
            src_info = src_info[0]
        if self.comm.project.meshes == "multi-mesh":
            src = source.seismology.SideSetMomentTensorPoint3D(
                latitude=src_info["latitude"],
                longitude=src_info["longitude"],
                depth_in_m=src_info["depth_in_m"],
                mrr=src_info["mrr"],
                mtt=src_info["mtt"],
                mpp=src_info["mpp"],
                mtp=src_info["mtp"],
                mrp=src_info["mrp"],
                mrt=src_info["mrt"],
                source_time_function=stf.Custom(filename=stf_file,
                                                dataset_name="/source"),
                side_set_name=side_set,
            )
            # print(f"Source info: {src_info}")
        else:
            if self.comm.project.ocean_loading["use"]:
                side_set = "r1_ol"
            src = source.seismology.SideSetMomentTensorPoint3D(
                latitude=src_info["latitude"],
                longitude=src_info["longitude"],
                depth_in_m=src_info["depth_in_m"],
                mrr=src_info["mrr"],
                mtt=src_info["mtt"],
                mpp=src_info["mpp"],
                mtp=src_info["mtp"],
                mrp=src_info["mrp"],
                mrt=src_info["mrt"],
                side_set_name=side_set,
                source_time_function=stf.Custom(filename=stf_file,
                                                dataset_name="/source"),
            )

        return src
コード例 #2
0
    def get_source_object(self, event_name: str):
        """
        Create the source object that the simulation wants

        :param event_name: Name of event
        :type event_name: str
        """

        from salvus.flow.simple_config import source
        from salvus.flow.simple_config import stf

        iteration = self.comm.project.current_iteration
        src_info = self.comm.lasif.get_source(event_name)
        stf_file = self.comm.lasif.find_stf(iteration)
        if isinstance(src_info, list):
            src_info = src_info[0]

        src = source.seismology.MomentTensorPoint3D(
            latitude=src_info["latitude"],
            longitude=src_info["longitude"],
            depth_in_m=src_info["depth_in_m"],
            mrr=src_info["mrr"],
            mtt=src_info["mtt"],
            mpp=src_info["mpp"],
            mtp=src_info["mtp"],
            mrp=src_info["mrp"],
            mrt=src_info["mrt"],
            source_time_function=stf.Custom(
                filename=stf_file, dataset_name="/source"
            ),
        )

        return src
コード例 #3
0
def get_adjoint_source(comm: object, event: str,
                       iteration: str) -> List[object]:
    """
    Get a list of adjoint source objects

    :param comm: The lasif communicator object
    :type comm: object
    :param event: Name of event
    :type event: str
    :param iteration: Name of iteration
    :type iteration: str
    :return: Adjoint source objects
    :type return: List[object]
    """
    from salvus.flow.simple_config import source, stf
    import h5py

    receivers = place_receivers(comm=comm, event=event)
    iteration_name = comm.iterations.get_long_iteration_name(iteration)
    adjoint_filename = str(comm.project.paths["adjoint_sources"] /
                           iteration_name / event / "stf.h5")

    p = h5py.File(adjoint_filename, "r")
    adjoint_recs = list(p.keys())
    adjoint_sources = []
    for rec in receivers:
        if rec["network-code"] + "_" + rec["station-code"] in adjoint_recs:
            adjoint_sources.append(rec)
    p.close()

    adj_src = [
        source.seismology.VectorPoint3DZNE(
            latitude=rec["latitude"],
            longitude=rec["longitude"],
            fz=1.0,
            fn=1.0,
            fe=1.0,
            source_time_function=stf.Custom(
                filename=adjoint_filename,
                dataset_name=f"/{rec['network-code']}_{rec['station-code']}",
            ),
        ) for rec in adjoint_sources
    ]
    return adj_src
コード例 #4
0
ファイル: flow_comp.py プロジェクト: bamboo06/Inversionson
    def get_adjoint_source_object(self, event_name: str) -> object:
        """
        Generate the adjoint source object for the respective event

        :param event_name: Name of event
        :type event_name: str
        :return: Adjoint source object for salvus
        :rtype: object
        """
        import h5py
        from salvus.flow.simple_config import source
        from salvus.flow.simple_config import stf

        iteration = self.comm.project.current_iteration
        receivers = self.comm.lasif.get_receivers(event_name)
        adjoint_filename = self.comm.lasif.get_adjoint_source_file(
            event=event_name, iteration=iteration)
        # A workaround needed for a current salvus bug:
        # stf_forward = os.path.join(
        #         self.comm.project.lasif_root,
        #         "SALVUS_INPUT_FILES",
        #         f"ITERATION_{iteration}",
        #         "custom_stf.h5")
        # job_name = self._get_job_name(
        #     event=event_name,
        #     sim_type="forward",
        #     new=False)
        # stf_forward_path = f"/scratch/snx3000/tsolvi/salvus_flow/run/{job_name}/input/custom_stf.h5"
        # copy it:
        # os.system(f"scp daint:{stf_forward_path} {stf_forward}")
        # f = h5py.File(stf_forward)
        # stf_source = f['stf'][()]
        p = h5py.File(adjoint_filename)
        # if 'stf' in p.keys():
        # del p['stf']
        adjoint_recs = list(p.keys())
        # p.create_dataset(name='stf', data=stf_source)
        # p["stf"].attrs["sampling_rate_in_hertz"] = 1 / self.comm.project.time_step
        # p["source"].attrs["spatial-type"] = np.string_("moment_tensor")
        # p["stf"].attrs["start_time_in_seconds"] = -self.comm.project.time_step
        # f.close()
        # rec = receivers[0]
        # Need to make sure I only take receivers with an adjoint source
        adjoint_sources = []
        for rec in receivers:
            if rec["network-code"] + "_" + rec["station-code"] in adjoint_recs:
                adjoint_sources.append(rec)

        p.close()
        adj_src = [
            source.seismology.VectorPoint3DZNE(
                latitude=rec["latitude"],
                longitude=rec["longitude"],
                fz=1.0,
                fn=1.0,
                fe=1.0,
                source_time_function=stf.Custom(
                    filename=adjoint_filename,
                    dataset_name="/" + rec["network-code"] + "_" +
                    rec["station-code"])) for rec in adjoint_sources
        ]

        return adj_src
コード例 #5
0
ファイル: flow_comp.py プロジェクト: geofiber/Inversionson
    def get_adjoint_source_object(self, event_name: str) -> object:
        """
        Generate the adjoint source object for the respective event

        :param event_name: Name of event
        :type event_name: str
        :return: Adjoint source object for salvus
        :rtype: object
        """
        import h5py
        from salvus.flow.simple_config import source, stf

        iteration = self.comm.project.current_iteration
        receivers = self.comm.lasif.get_receivers(event_name)
        adjoint_filename = self.comm.lasif.get_adjoint_source_file(
            event=event_name, iteration=iteration)
        # A workaround needed for a current salvus bug:
        # stf_forward = os.path.join(
        #         self.comm.project.lasif_root,
        #         "SALVUS_INPUT_FILES",
        #         f"ITERATION_{iteration}",
        #         "custom_stf.h5")
        # job_name = self._get_job_name(
        #     event=event_name,
        #     sim_type="forward",
        #     new=False)
        # stf_forward_path = f"/scratch/snx3000/tsolvi/salvus_flow/run/{job_name}/input/custom_stf.h5"
        # copy it:
        # os.system(f"scp daint:{stf_forward_path} {stf_forward}")
        # f = h5py.File(stf_forward)
        # stf_source = f['stf'][()]
        p = h5py.File(adjoint_filename, "r")
        # if 'stf' in p.keys():
        # del p['stf']
        adjoint_recs = list(p.keys())
        # p.create_dataset(name='stf', data=stf_source)
        # p["stf"].attrs["sampling_rate_in_hertz"] = 1 / self.comm.project.time_step
        # p["source"].attrs["spatial-type"] = np.string_("moment_tensor")
        # p["stf"].attrs["start_time_in_seconds"] = -self.comm.project.time_step
        # f.close()
        # rec = receivers[0]
        # Need to make sure I only take receivers with an adjoint source
        adjoint_sources = []
        for rec in receivers:
            if rec["network-code"] + "_" + rec["station-code"] in adjoint_recs:
                adjoint_sources.append(rec)

        p.close()
        if self.comm.project.meshes == "multi-mesh":
            adj_src = [
                source.seismology.VectorPoint3DZNE(
                    latitude=rec["latitude"],
                    longitude=rec["longitude"],
                    fz=1.0,
                    fn=1.0,
                    fe=1.0,
                    source_time_function=stf.Custom(
                        filename=adjoint_filename,
                        dataset_name="/" + rec["network-code"] + "_" +
                        rec["station-code"],
                    ),
                ) for rec in adjoint_sources
            ]
            return adj_src
        # Get path to meta.json to obtain receiver position, use again for adjoint
        meta_json_filename = os.path.join(
            self.comm.project.lasif_root,
            "SYNTHETICS",
            "EARTHQUAKES",
            f"ITERATION_{iteration}",
            event_name,
            "meta.json",
        )

        # Build meta info dict
        import json

        with open(meta_json_filename) as json_file:
            data = json.load(json_file)
        meta_recs = data["forward_run_input"]["output"]["point_data"][
            "receiver"]
        meta_info_dict = {}
        for rec in meta_recs:
            if rec["network_code"] + "_" + rec["station_code"] in adjoint_recs:
                rec_name = rec["network_code"] + "_" + rec["station_code"]
                meta_info_dict[rec_name] = {}
                # this is the rotation from XYZ to ZNE,
                # we still need to transpose to get ZND -> XYZ
                meta_info_dict[rec_name]["rotation_on_input"] = {
                    "matrix":
                    np.array(rec["rotation_on_output"]["matrix"]).T.tolist()
                }
                meta_info_dict[rec_name]["location"] = rec["location"]

        adj_src = [
            source.cartesian.VectorPoint3D(
                x=meta_info_dict[rec["network-code"] + "_" +
                                 rec["station-code"]]["location"][0],
                y=meta_info_dict[rec["network-code"] + "_" +
                                 rec["station-code"]]["location"][1],
                z=meta_info_dict[rec["network-code"] + "_" +
                                 rec["station-code"]]["location"][2],
                fx=1.0,
                fy=1.0,
                fz=1.0,
                source_time_function=stf.Custom(
                    filename=adjoint_filename,
                    dataset_name="/" + rec["network-code"] + "_" +
                    rec["station-code"],
                ),
                rotation_on_input=meta_info_dict[rec["network-code"] + "_" +
                                                 rec["station-code"]]
                ["rotation_on_input"],
            ) for rec in adjoint_sources
        ]

        return adj_src
コード例 #6
0
def create_salvus_forward_simulation(
    comm: object,
    event: str,
    iteration: str,
    mesh=None,
    side_set: str = None,
):
    """
    Create a Salvus simulation object based on simulation and salvus
    specific parameters specified in config file.

    :param comm: The lasif communicator object
    :type comm: object
    :param event: Name of event
    :type event: str
    :param iteration: Name of iteration
    :type iteration: str
    :param mesh: Path to mesh or Salvus mesh object, if None it will use
        the domain file from config file, defaults to None
    :type mesh: Union[str, salvus.mesh.unstructured_mesh.UnstructuredMesh],
        optional
    :param side_set: Name of side set on mesh to place receivers,
        defaults to None.
    :type side_set: str, optional
    """
    import salvus.flow.simple_config as sc
    from salvus.flow.simple_config import stf

    source_info = prepare_source(comm=comm, event=event, iteration=iteration)
    iteration = comm.iterations.get_long_iteration_name(iteration)
    receivers = place_receivers(comm=comm, event=event)
    stf_path = os.path.join(comm.project.paths["salvus_files"], iteration,
                            "stf.h5")

    if mesh is None:
        mesh = comm.project.lasif_config["domain_settings"]["domain_file"]

    if side_set is None:
        recs = [
            sc.receiver.seismology.Point3D(
                latitude=rec["latitude"],
                longitude=rec["longitude"],
                network_code=rec["network-code"],
                station_code=rec["station-code"],
                fields=["displacement"],
            ) for rec in receivers
        ]
    else:
        recs = [
            sc.receiver.seismology.SideSetPoint3D(
                latitude=rec["latitude"],
                longitude=rec["longitude"],
                network_code=rec["network-code"],
                station_code=rec["station-code"],
                side_set_name=side_set,
                fields=["displacement"],
            ) for rec in receivers
        ]

    sources = [
        sc.source.seismology.MomentTensorPoint3D(
            latitude=src["latitude"],
            longitude=src["longitude"],
            depth_in_m=src["depth_in_m"],
            mrr=src["mrr"],
            mtt=src["mtt"],
            mpp=src["mpp"],
            mtp=src["mtp"],
            mrp=src["mrp"],
            mrt=src["mrt"],
            source_time_function=stf.Custom(filename=stf_path,
                                            dataset_name="/source"),
        ) for src in source_info
    ]

    w = sc.simulation.Waveform(
        mesh=mesh,
        sources=sources,
        receivers=recs,
    )
    sim_set = comm.project.simulation_settings
    sal_set = comm.project.salvus_settings
    w.physics.wave_equation.end_time_in_seconds = sim_set["end_time_in_s"]
    w.physics.wave_equation.time_step_in_seconds = sim_set["time_step_in_s"]
    w.physics.wave_equation.start_time_in_seconds = sim_set["start_time_in_s"]
    w.physics.wave_equation.attenuation = sal_set["attenuation"]

    import lasif.domain

    domain = lasif.domain.HDF5Domain(mesh,
                                     sim_set["absorbing_boundaries_in_km"])
    if not domain.is_global_domain():
        absorbing = sc.boundary.Absorbing(
            width_in_meters=comm.project.
            simulation_settings["absorbing_boundaries_in_km"] * 1000.0,
            side_sets=["r0", "t0", "t1", "p0", "p1"],
            taper_amplitude=1.0 /
            comm.project.simulation_settings["minimum_period_in_s"],
        )
        w.physics.wave_equation.boundaries = [absorbing]
    w.output.memory_per_rank_in_MB = 4000.0
    w.output.volume_data.format = "hdf5"
    w.output.volume_data.filename = "output.h5"
    w.output.volume_data.fields = ["adjoint-checkpoint"]
    w.output.volume_data.sampling_interval_in_time_steps = (
        "auto-for-checkpointing")

    w.validate()
    return w
コード例 #7
0
def get_adjoint_source_object(event_name, adjoint_filename,
                              receiver_json_path, proc_filename,
                              misfits, forward_meta_json_filename) -> object:
    """
    Generate the adjoint source object for the respective event

    :param event_name: Name of event
    :type event_name: str
    :return: Adjoint source object for salvus
    :rtype: object
    """
    receivers = build_or_get_receiver_info(receiver_json_path, proc_filename)
    adjoint_recs = list(misfits[event_name].keys())

    # Need to make sure I only take receivers with an adjoint source
    adjoint_sources = []
    for rec in receivers:
        if (
                rec["network-code"] + "_" + rec["station-code"] in adjoint_recs
                or rec["network-code"] + "." + rec[
            "station-code"] in adjoint_recs
        ):
            adjoint_sources.append(rec)

    # Build meta_info_dict
    with open(forward_meta_json_filename) as json_file:
        data = json.load(json_file)

    meta_recs = data["forward_run_input"]["output"]["point_data"]["receiver"]
    meta_info_dict = {}
    for rec in meta_recs:
        if (
                rec["network_code"] + "_" + rec["station_code"] in adjoint_recs
                or rec["network_code"] + "." + rec[
            "station_code"] in adjoint_recs
        ):
            rec_name = rec["network_code"] + "_" + rec["station_code"]
            meta_info_dict[rec_name] = {}
            # this is the rotation from XYZ to ZNE,
            # we still need to transpose to get ZNE -> XYZ
            meta_info_dict[rec_name]["rotation_on_input"] = {
                "matrix": np.array(
                    rec["rotation_on_output"]["matrix"]).T.tolist()
            }
            meta_info_dict[rec_name]["location"] = rec["location"]

    adj_src = [
        source.cartesian.VectorPoint3D(
            x=meta_info_dict[rec["network-code"] + "_" + rec["station-code"]][
                "location"
            ][0],
            y=meta_info_dict[rec["network-code"] + "_" + rec["station-code"]][
                "location"
            ][1],
            z=meta_info_dict[rec["network-code"] + "_" + rec["station-code"]][
                "location"
            ][2],
            fx=1.0,
            fy=1.0,
            fz=1.0,
            source_time_function=stf.Custom(
                filename=adjoint_filename,
                dataset_name="/" + rec["network-code"] + "_" + rec[
                    "station-code"],
            ),
            rotation_on_input=meta_info_dict[
                rec["network-code"] + "_" + rec["station-code"]
                ]["rotation_on_input"],
        )
        for rec in adjoint_sources
    ]
    return adj_src
コード例 #8
0
    def get_adjoint_source_object(self, event_name: str) -> object:
        """
        Generate the adjoint source object for the respective event

        :param event_name: Name of event
        :type event_name: str
        :return: Adjoint source object for salvus
        :rtype: object
        """
        import h5py

        iteration = self.comm.project.current_iteration
        receivers = self.comm.lasif.get_receivers(event_name)
        if not self.comm.project.hpc_processing:
            adjoint_filename = self.comm.lasif.get_adjoint_source_file(
                event=event_name, iteration=iteration)

        if not self.comm.project.hpc_processing:
            p = h5py.File(adjoint_filename, "r")
            adjoint_recs = list(p.keys())
            p.close()
        else:
            forward_job = self.get_job(event_name, sim_type="forward")

            # remote synthetics
            remote_meta_path = forward_job.output_path / "meta.json"
            hpc_cluster = get_site(self.comm.project.site_name)
            meta_json_filename = "meta.json"
            if os.path.exists(meta_json_filename):
                os.remove(meta_json_filename)
            hpc_cluster.remote_get(remote_meta_path, meta_json_filename)

            proc_job = self.get_job(event_name, sim_type="hpc_processing")
            remote_misfit_dict_toml = str(proc_job.stdout_path.parent /
                                          "output" / "misfit_dict.toml")
            adjoint_filename = "REMOTE:" + str(
                proc_job.stdout_path.parent / "output" / "stf.h5")
            local_misfit_dict = "misfit_dict.toml"
            if os.path.exists(local_misfit_dict):
                os.remove(local_misfit_dict)
            hpc_cluster.remote_get(remote_misfit_dict_toml, local_misfit_dict)
            misfits = toml.load(local_misfit_dict)
            adjoint_recs = list(misfits[event_name].keys())
            if os.path.exists(local_misfit_dict):
                os.remove(local_misfit_dict)

        # Need to make sure I only take receivers with an adjoint source
        adjoint_sources = []
        for rec in receivers:
            if (rec["network-code"] + "_" + rec["station-code"] in adjoint_recs
                    or rec["network-code"] + "." + rec["station-code"]
                    in adjoint_recs):
                adjoint_sources.append(rec)

        # print(adjoint_sources)

        # Get path to meta.json to obtain receiver position, use again for adjoint
        if not self.comm.project.hpc_processing:
            meta_json_filename = os.path.join(
                self.comm.project.lasif_root,
                "SYNTHETICS",
                "EARTHQUAKES",
                f"ITERATION_{iteration}",
                event_name,
                "meta.json",
            )

        # Build meta info dict

        with open(meta_json_filename) as json_file:
            data = json.load(json_file)
        meta_recs = data["forward_run_input"]["output"]["point_data"][
            "receiver"]
        meta_info_dict = {}
        for rec in meta_recs:
            if (rec["network_code"] + "_" + rec["station_code"] in adjoint_recs
                    or rec["network_code"] + "." + rec["station_code"]
                    in adjoint_recs):
                rec_name = rec["network_code"] + "_" + rec["station_code"]
                meta_info_dict[rec_name] = {}
                # this is the rotation from XYZ to ZNE,
                # we still need to transpose to get ZNE -> XYZ
                meta_info_dict[rec_name]["rotation_on_input"] = {
                    "matrix":
                    np.array(rec["rotation_on_output"]["matrix"]).T.tolist()
                }
                meta_info_dict[rec_name]["location"] = rec["location"]

        adj_src = [
            source.cartesian.VectorPoint3D(
                x=meta_info_dict[rec["network-code"] + "_" +
                                 rec["station-code"]]["location"][0],
                y=meta_info_dict[rec["network-code"] + "_" +
                                 rec["station-code"]]["location"][1],
                z=meta_info_dict[rec["network-code"] + "_" +
                                 rec["station-code"]]["location"][2],
                fx=1.0,
                fy=1.0,
                fz=1.0,
                source_time_function=stf.Custom(
                    filename=adjoint_filename,
                    dataset_name="/" + rec["network-code"] + "_" +
                    rec["station-code"],
                ),
                rotation_on_input=meta_info_dict[rec["network-code"] + "_" +
                                                 rec["station-code"]]
                ["rotation_on_input"],
            ) for rec in adjoint_sources
        ]
        if os.path.exists(
                meta_json_filename) and self.comm.project.hpc_processing:
            os.remove(meta_json_filename)

        return adj_src