Esempio n. 1
0
    def point_in_domain(self, longitude, latitude):
        """
        Checks if a geographic point is placed inside a rotated spherical
        section. It simple rotates the point and checks if it is inside the
        unrotated domain. It therefore works for all possible projections
        and what not.

        :param longitude: The longitude of the point.
        :param latitude:  The latitude of the point.

        :return: bool
        """
        if self.rotation_angle_in_degree:
            # Rotate the point.
            r_lat, r_lng = rotations.rotate_lat_lon(
                latitude, longitude, self.rotation_axis,
                -1.0 * self.rotation_angle_in_degree)
        else:
            r_lng = longitude
            r_lat = latitude

        bw = self.boundary_width_in_degree

        # Check if in bounds.
        if not ((self.min_latitude + bw) <= r_lat <=
                (self.max_latitude - bw)) or \
                not ((self.min_longitude + bw) <= r_lng <=
                     (self.max_longitude - bw)):
            return False

        return True
Esempio n. 2
0
    def point_in_domain(self, longitude, latitude):
        """
        Checks if a geographic point is placed inside a rotated spherical
        section. It simple rotates the point and checks if it is inside the
        unrotated domain. It therefore works for all possible projections
        and what not.

        :param longitude: The longitude of the point.
        :param latitude:  The latitude of the point.

        :return: bool
        """
        if self.rotation_angle_in_degree:
            # Rotate the point.
            r_lat, r_lng = rotations.rotate_lat_lon(
                latitude, longitude, self.rotation_axis,
                -1.0 * self.rotation_angle_in_degree)
        else:
            r_lng = longitude
            r_lat = latitude

        bw = self.boundary_width_in_degree

        # Check if in bounds.
        if not ((self.min_latitude + bw) <= r_lat <=
                (self.max_latitude - bw)) or \
                not ((self.min_longitude + bw) <= r_lng <=
                     (self.max_longitude - bw)):
            return False

        return True
Esempio n. 3
0
def point_in_domain(latitude, longitude, domain,
                    rotation_axis=[0.0, 0.0, 1.0],
                    rotation_angle_in_degree=0.0):
    """
    Simple function checking if a geographic point is placed inside a
    rotated spherical section. It simple rotates the point and checks if it
    is inside the non-rotated domain.

    Domain is a dictionary containing at least the following keys:
        * "minimum_latitude"
        * "maximum_latitude"
        * "minimum_longitude"
        * "maximum_longitude"
        * "boundary_width_in_degree"

    Returns True or False.
    """
    from lasif import rotations
    min_latitude = domain["minimum_latitude"] + \
        domain["boundary_width_in_degree"]
    max_latitude = domain["maximum_latitude"] - \
        domain["boundary_width_in_degree"]
    min_longitude = domain["minimum_longitude"] + \
        domain["boundary_width_in_degree"]
    max_longitude = domain["maximum_longitude"] - \
        domain["boundary_width_in_degree"]

    # Rotate the station and check if it is still in bounds.
    r_lat, r_lng = rotations.rotate_lat_lon(
        latitude, longitude, rotation_axis, -1.0 * rotation_angle_in_degree)
    # Check if in bounds. If not continue.
    if not (min_latitude <= r_lat <= max_latitude) or \
            not (min_longitude <= r_lng <= max_longitude):
        return False
    return True
Esempio n. 4
0
 def plot_snapshots(self, component, vmin, vmax, outdir, fprx='wavefield',iter0=200, iterf=17000, \
         diter=200, stations=False, res="i", projection='lambert', dpi=300, zoomin=2, geopolygons=None, evlo=None, evla=None ):
     """
     Plot snapshots of field component at given depth ranging between "valmin" and "valmax"
     ================================================================================================
     Input parameters:
     component       - component for plotting
                         The currently available "components" are:
                             Material parameters: A, B, C, mu, lambda, rhoinv, vp, vsh, vsv, rho
                             Velocity field snapshots: vx, vy, vz
                             Sensitivity kernels: Q_mu, Q_kappa, alpha_mu, alpha_kappa
     vmin, vmax      - minimum/maximum value for plotting
     outdir          - output directory
     fprx            - output file name prefix
     iter0, iterf    - inital/final iterations for plotting
     diter           - iteration interval
     stations        - plot stations or not
     res             - resolution of the coastline (c, l, i, h, f)
     projection      - projection type (global, regional_ortho, regional_merc)
     dpi             - dots per inch (figure resolution parameter)
     zoomin          - zoom in factor for proj = regional_ortho
     geopolygons     - geological polygons( basins etc. ) for plot
     evlo, evla      - event location for plotting
     =================================================================================================
     """
     if not os.path.isdir(outdir):
         os.makedirs(outdir)
     iterArr=np.arange(iter0 ,iterf+diter, diter, dtype=int)
     use_default_iter=False
     for iteration in iterArr:
         if not str(iteration) in self[component].keys():
             warnings.warn('Velocity Snapshot:'+str(iteration)+' does not exist!', UserWarning, stacklevel=1)
             # raise KeyError('Velocity Snapshot:'+str(iteration)+' does not exist!')
             use_in_iter=True
     if use_default_iter: iterArr = self[component].keys()
     self.minlat = self.attrs['lat_min']; self.maxlat = self.attrs['lat_max']
     self.minlon = self.attrs['lon_min']; self.maxlon = self.attrs['lon_max']
     self.n = self.attrs['rotation_axis']; self.rotangle = self.attrs['rotation_angle']
     lat_centre = (self.maxlat+self.minlat)/2.0; lon_centre = (self.maxlon+self.minlon)/2.0
     self.lat_centre, self.lon_centre = rotations.rotate_lat_lon(lat_centre, lon_centre, self.n, -self.rotangle)
     fig=plt.figure(num=None, figsize=(10, 10), dpi=dpi, facecolor='w', edgecolor='k')
     # - Set up the map. ------------------------------------------------------------------------
     m=self._get_basemap(projection=projection)
     try: geopolygons.PlotPolygon(inbasemap=m)
     except: pass
     try:
         evx, evy=m(evlo, evla)
         m.plot(evx, evy, 'yo', markersize=2)
     except: pass
     for iteration in iterArr:
         self._plot_snapshot(inmap=m, component=component, vmin=vmin, vmax=vmax, iteration=iteration, stations=stations)
         outfname=outdir+'/'+fprx+'_%06d.png' %(int(iteration))
         print outfname, outdir
         fig.savefig(outfname, format='png', dpi=dpi)
     return 
Esempio n. 5
0
    def get_waveforms_synthetic(self, event_name, station_id, long_iteration_name):
        """
        Gets the synthetic waveforms for the given event and station as a
        :class:`~obspy.core.stream.Stream` object.

        :param event_name: The name of the event.
        :param station_id: The id of the station in the form NET.STA.
        :param long_iteration_name: The long form of an iteration name.
        """
        from lasif import rotations

        st = self._get_waveforms(event_name, station_id, data_type="synthetic", tag_or_iteration=long_iteration_name)
        network, station = station_id.split(".")

        iteration = self.comm.iterations.get(long_iteration_name)

        # This maps the synthetic channels to ZNE.
        synthetic_coordinates_mapping = {"X": "N", "Y": "E", "Z": "Z", "E": "E", "N": "N"}

        for tr in st:
            tr.stats.network = network
            tr.stats.station = station
            if tr.stats.channel in ["X"]:
                tr.data *= -1.0
            tr.stats.starttime = self.comm.events.get(event_name)["origin_time"]
            tr.stats.channel = synthetic_coordinates_mapping[tr.stats.channel]

        if not "specfem" in iteration.solver_settings["solver"].lower():
            # Also need to be rotated.
            domain = self.comm.project.domain

            # Coordinates are required for the rotation.
            coordinates = self.comm.query.get_coordinates_for_station(event_name, station_id)

            # First rotate the station back to see, where it was
            # recorded.
            lat, lng = rotations.rotate_lat_lon(
                coordinates["latitude"], coordinates["longitude"], domain["rotation_axis"], -domain["rotation_angle"]
            )
            # Rotate the synthetics.
            n, e, z = rotations.rotate_data(
                st.select(channel="N")[0].data,
                st.select(channel="E")[0].data,
                st.select(channel="Z")[0].data,
                lat,
                lng,
                domain["rotation_axis"],
                domain["rotation_angle"],
            )
            st.select(channel="N")[0].data = n
            st.select(channel="E")[0].data = e
            st.select(channel="Z")[0].data = z

        return st
Esempio n. 6
0
 def center(self):
     """
     Get the center of the domain.
     """
     c = self.unrotated_center
     Point = collections.namedtuple("CenterPoint", ["longitude",
                                                    "latitude"])
     r_lat, r_lng = rotations.rotate_lat_lon(
         c.latitude, c.longitude, self.rotation_axis,
         self.rotation_angle_in_degree)
     return Point(longitude=r_lng, latitude=r_lat)
Esempio n. 7
0
 def center(self):
     """
     Get the center of the domain.
     """
     c = self.unrotated_center
     Point = collections.namedtuple("CenterPoint",
                                    ["longitude", "latitude"])
     r_lat, r_lng = rotations.rotate_lat_lon(c.latitude, c.longitude,
                                             self.rotation_axis,
                                             self.rotation_angle_in_degree)
     return Point(longitude=r_lng, latitude=r_lat)
Esempio n. 8
0
    def get_depth_profile(self, component, latitude, longitude):
        """
        Returns a depth profile of the model at the requested at the
        GLL points closest do latitude and longitude.

        :param component: The component of the model.
        :param latitude: The latitude.
        :param longitude: The longitude.
        """
        # Need to rotate latitude and longitude.
        if hasattr(self.domain, "rotation_axis") and \
                self.domain.rotation_axis and \
                self.domain.rotation_angle_in_degree:
            latitude, longitude = rotations.rotate_lat_lon(
                latitude, longitude, self.domain.rotation_axis,
                -1.0 * self.domain.rotation_angle_in_degree)

        x_index = self.get_closest_gll_index("latitude", latitude)
        y_index = self.get_closest_gll_index("longitude", longitude)

        data = self.parsed_components[component]
        depths = self.collocation_points_depth
        values = data[x_index, y_index, :]

        lat = self.collocation_points_lats[::-1][x_index]
        lng = self.collocation_points_lngs[y_index]

        # Rotate back.
        if hasattr(self.domain, "rotation_axis") and \
                self.domain.rotation_axis and \
                self.domain.rotation_angle_in_degree:
            lat, lng = rotations.rotate_lat_lon(
                lat, lng, self.domain.rotation_axis,
                self.domain.rotation_angle_in_degree)

        return {
            "depths": depths,
            "values": values,
            "latitude": lat,
            "longitude": lng
        }
Esempio n. 9
0
    def get_depth_profile(self, component, latitude, longitude):
        """
        Returns a depth profile of the model at the requested at the
        GLL points closest do latitude and longitude.

        :param component: The component of the model.
        :param latitude: The latitude.
        :param longitude: The longitude.
        """
        # Need to rotate latitude and longitude.
        if hasattr(self.domain, "rotation_axis") and \
                self.domain.rotation_axis and \
                self.domain.rotation_angle_in_degree:
            latitude, longitude = rotations.rotate_lat_lon(
                latitude, longitude, self.domain.rotation_axis,
                -1.0 * self.domain.rotation_angle_in_degree)

        x_index = self.get_closest_gll_index("latitude", latitude)
        y_index = self.get_closest_gll_index("longitude", longitude)

        data = self.parsed_components[component]
        depths = self.collocation_points_depth
        values = data[x_index, y_index, :]

        lat = self.collocation_points_lats[::-1][x_index]
        lng = self.collocation_points_lngs[y_index]

        # Rotate back.
        if hasattr(self.domain, "rotation_axis") and \
                self.domain.rotation_axis and \
                self.domain.rotation_angle_in_degree:
            lat, lng = rotations.rotate_lat_lon(
                lat, lng, self.domain.rotation_axis,
                self.domain.rotation_angle_in_degree)

        return {
            "depths": depths,
            "values": values,
            "latitude": lat,
            "longitude": lng}
Esempio n. 10
0
def test_RotateLatLon():
    """
    Test the lat/lon rotation on a sphere.
    """
    # Rotate north pole to equator.
    lat_new, lon_new = rotations.rotate_lat_lon(90.0, 0.0, [0, 1, 0], 90)
    np.testing.assert_almost_equal(lat_new, 0.0)
    np.testing.assert_almost_equal(lon_new, 0.0)
    # Rotate north pole to the south pole.
    lat_new, lon_new = rotations.rotate_lat_lon(90.0, 0.0, [0, 1, 0], 180)
    np.testing.assert_almost_equal(lat_new, -90.0)
    np.testing.assert_almost_equal(lon_new, 0.0)
    # Rotate north pole to equator, the other way round.
    lat_new, lon_new = rotations.rotate_lat_lon(90.0, 0.0, [0, 1, 0], -90)
    np.testing.assert_almost_equal(lat_new, 0.0)
    np.testing.assert_almost_equal(lon_new, 180.0)
    # Rotate (0/0) to the east
    lat_new, lon_new = rotations.rotate_lat_lon(0.0, 0.0, [0, 0, 1], 90)
    np.testing.assert_almost_equal(lat_new, 0.0)
    np.testing.assert_almost_equal(lon_new, 90.0)
    # Rotate (0/0) to the west
    lat_new, lon_new = rotations.rotate_lat_lon(0.0, 0.0, [0, 0, 1], -90)
    np.testing.assert_almost_equal(lat_new, 0.0)
    np.testing.assert_almost_equal(lon_new, -90.0)
    # Rotate the west to the South Pole. The longitude can not be tested
    # reliably because is varies infinitly fast directly at a pole.
    lat_new, lon_new = rotations.rotate_lat_lon(0.0, -90.0, [1, 0, 0], 90)
    np.testing.assert_almost_equal(lat_new, -90.0)
Esempio n. 11
0
 def test_RotateLatLon(self):
     """
     Test the lat/lon rotation on a sphere.
     """
     # Rotate north pole to equator.
     lat_new, lon_new = rotations.rotate_lat_lon(90.0, 0.0, [0, 1, 0], 90)
     self.assertAlmostEqual(lat_new, 0.0)
     self.assertAlmostEqual(lon_new, 0.0)
     # Rotate north pole to the south pole.
     lat_new, lon_new = rotations.rotate_lat_lon(90.0, 0.0, [0, 1, 0], 180)
     self.assertAlmostEqual(lat_new, -90.0)
     self.assertAlmostEqual(lon_new, 0.0)
     # Rotate north pole to equator, the other way round.
     lat_new, lon_new = rotations.rotate_lat_lon(90.0, 0.0, [0, 1, 0], -90)
     self.assertAlmostEqual(lat_new, 0.0)
     self.assertAlmostEqual(lon_new, 180.0)
     # Rotate (0/0) to the east
     lat_new, lon_new = rotations.rotate_lat_lon(0.0, 0.0, [0, 0, 1], 90)
     self.assertAlmostEqual(lat_new, 0.0)
     self.assertAlmostEqual(lon_new, 90.0)
     # Rotate (0/0) to the west
     lat_new, lon_new = rotations.rotate_lat_lon(0.0, 0.0, [0, 0, 1], -90)
     self.assertAlmostEqual(lat_new, 0.0)
     self.assertAlmostEqual(lon_new, -90.0)
     # Rotate the west to the South Pole. The longitude can not be tested
     # reliably because is varies infinitly fast directly at a pole.
     lat_new, lon_new = rotations.rotate_lat_lon(0.0, -90.0, [1, 0, 0], 90)
     self.assertAlmostEqual(lat_new, -90.0)
Esempio n. 12
0
 def center(self):
     """
     Get the center of the domain.
     """
     domain = self.comm.project.domain
     c = domain.unrotated_center
     Point = collections.namedtuple("CenterPoint",
                                    ["longitude", "latitude"])
     from lasif import rotations
     r_lat, r_lng = rotations.rotate_lat_lon(
         c.latitude, c.longitude, domain.rotation_axis,
         domain.rotation_angle_in_degree)
     return Point(longitude=r_lng, latitude=r_lat)
Esempio n. 13
0
    def _get_maximum_bounds(self, min_lat, max_lat, min_lng, max_lng,
                            rotation_axis, rotation_angle_in_degree):
        """
        Small helper function to get the domain bounds of a rotated spherical
        section.

        :param min_lat: Minimum Latitude of the unrotated section.
        :param max_lat: Maximum Latitude of the unrotated section.
        :param min_lng: Minimum Longitude of the unrotated section.
        :param max_lng: Maximum Longitude of the unrotated section.
        :param rotation_axis: Rotation axis as a list in the form of [x, y, z]
        :param rotation_angle_in_degree: Rotation angle in degree.
        """
        number_of_points_per_side = 50
        north_border = np.empty((number_of_points_per_side, 2))
        south_border = np.empty((number_of_points_per_side, 2))
        east_border = np.empty((number_of_points_per_side, 2))
        west_border = np.empty((number_of_points_per_side, 2))

        north_border[:, 0] = np.linspace(min_lng, max_lng,
                                         number_of_points_per_side)
        north_border[:, 1] = min_lat

        south_border[:, 0] = np.linspace(max_lng, min_lng,
                                         number_of_points_per_side)
        south_border[:, 1] = max_lat

        east_border[:, 0] = max_lng
        east_border[:, 1] = np.linspace(min_lat, max_lat,
                                        number_of_points_per_side)

        west_border[:, 0] = min_lng
        west_border[:, 1] = np.linspace(max_lat, min_lat,
                                        number_of_points_per_side)

        # Rotate everything.
        for border in [north_border, south_border, east_border, west_border]:
            for _i in xrange(number_of_points_per_side):
                border[_i, 1], border[_i, 0] = rotations.rotate_lat_lon(
                    border[_i, 1], border[_i, 0], rotation_axis,
                    rotation_angle_in_degree)

        border = np.concatenate([north_border, south_border, east_border,
                                 west_border])

        min_lng, max_lng = border[:, 0].min(), border[:, 0].max()
        min_lat, max_lat = border[:, 1].min(), border[:, 1].max()

        return min_lat, max_lat, min_lng, max_lng
Esempio n. 14
0
    def _get_maximum_bounds(self, min_lat, max_lat, min_lng, max_lng,
                            rotation_axis, rotation_angle_in_degree):
        """
        Small helper function to get the domain bounds of a rotated spherical
        section.

        :param min_lat: Minimum Latitude of the unrotated section.
        :param max_lat: Maximum Latitude of the unrotated section.
        :param min_lng: Minimum Longitude of the unrotated section.
        :param max_lng: Maximum Longitude of the unrotated section.
        :param rotation_axis: Rotation axis as a list in the form of [x, y, z]
        :param rotation_angle_in_degree: Rotation angle in degree.
        """
        number_of_points_per_side = 50
        north_border = np.empty((number_of_points_per_side, 2))
        south_border = np.empty((number_of_points_per_side, 2))
        east_border = np.empty((number_of_points_per_side, 2))
        west_border = np.empty((number_of_points_per_side, 2))

        north_border[:, 0] = np.linspace(min_lng, max_lng,
                                         number_of_points_per_side)
        north_border[:, 1] = min_lat

        south_border[:, 0] = np.linspace(max_lng, min_lng,
                                         number_of_points_per_side)
        south_border[:, 1] = max_lat

        east_border[:, 0] = max_lng
        east_border[:, 1] = np.linspace(min_lat, max_lat,
                                        number_of_points_per_side)

        west_border[:, 0] = min_lng
        west_border[:, 1] = np.linspace(max_lat, min_lat,
                                        number_of_points_per_side)

        # Rotate everything.
        for border in [north_border, south_border, east_border, west_border]:
            for _i in range(number_of_points_per_side):
                border[_i, 1], border[_i, 0] = rotations.rotate_lat_lon(
                    border[_i, 1], border[_i, 0], rotation_axis,
                    rotation_angle_in_degree)

        border = np.concatenate(
            [north_border, south_border, east_border, west_border])

        min_lng, max_lng = border[:, 0].min(), border[:, 0].max()
        min_lat, max_lat = border[:, 1].min(), border[:, 1].max()

        return min_lat, max_lat, min_lng, max_lng
Esempio n. 15
0
    def filter_location_fct(latitude, longitude):
        """
        Simple function checking if a geographic point is placed inside a
        rotated spherical section. It simple rotates the point and checks if it
        is inside the unrotated domain. The domain specification are passed in
        as a closure.

        Returns True or False.
        """
        # Rotate the station and check if it is still in bounds.
        r_lat, r_lng = rotations.rotate_lat_lon(latitude, longitude,
            rotation_axis, -1.0 * rotation_angle_in_degree)
        # Check if in bounds. If not continue.
        if not (min_latitude <= r_lat <= max_latitude) or \
                not (min_longitude <= r_lng <= max_longitude):
            return False
        return True
Esempio n. 16
0
 def _plot_snapshot(self, inmap, component, vmin, vmax, iteration, stations):
     """Plot snapshot, private function used by make_animation
     """
     print 'Plotting Snapshot for:',iteration,' steps!'
     subgroup=self[component+'/'+str(iteration)]
     for key in subgroup.keys():
         subdset = subgroup[key]
         field   = subdset[...]
         theta   = subdset.attrs['theta']
         phi     = subdset.attrs['phi']
         lats    = 90.0 - theta * 180.0 / np.pi
         lons    = phi * 180.0 / np.pi
         lon, lat = np.meshgrid(lons, lats)
         if self.rotangle != 0.0:
             lat_rot = np.zeros(np.shape(lon),dtype=float)
             lon_rot = np.zeros(np.shape(lat),dtype=float)
             for idlon in np.arange(len(lons)):
                 for idlat in np.arange(len(lats)):
                     lat_rot[idlat,idlon], lon_rot[idlat,idlon]  = rotations.rotate_lat_lon(lat[idlat,idlon], lon[idlat,idlon],  self.n, -self.rotangle)
                     lat_rot[idlat,idlon] = 90.0-lat_rot[idlat,idlon]
             lon = lon_rot
             lat = lat_rot
         # - colourmap. ---------------------------------------------------------
         cmap = colors.get_colormap('tomo_80_perc_linear_lightness')
         x, y = inmap(lon, lat)
         im = inmap.pcolormesh(x, y, field, shading='gouraud', cmap=cmap, vmin=vmin, vmax=vmax) 
     # - Add colobar and title. ------------------------------------------------------------------
     cb = inmap.colorbar(im, "right", size="3%", pad='2%')
     if component in UNIT_DICT:
         cb.set_label(UNIT_DICT[component], fontsize="x-large", rotation=0)
     # - Plot stations if available. ------------------------------------------------------------
     # if (self.stations == True) & (stations==True):
     #     x,y = mymap(self.stlons,self.stlats)
     #     for n in range(self.n_stations):
     #         plt.text(x[n],y[n],self.stnames[n][:4])
     #         plt.plot(x[n],y[n],'ro')
     return
Esempio n. 17
0
def point_in_domain(latitude,
                    longitude,
                    domain,
                    rotation_axis=[0.0, 0.0, 1.0],
                    rotation_angle_in_degree=0.0):
    """
    Simple function checking if a geographic point is placed inside a
    rotated spherical section. It simple rotates the point and checks if it
    is inside the non-rotated domain.

    Domain is a dictionary containing at least the following keys:
        * "minimum_latitude"
        * "maximum_latitude"
        * "minimum_longitude"
        * "maximum_longitude"
        * "boundary_width_in_degree"

    Returns True or False.
    """
    from lasif import rotations
    min_latitude = domain["minimum_latitude"] + \
        domain["boundary_width_in_degree"]
    max_latitude = domain["maximum_latitude"] - \
        domain["boundary_width_in_degree"]
    min_longitude = domain["minimum_longitude"] + \
        domain["boundary_width_in_degree"]
    max_longitude = domain["maximum_longitude"] - \
        domain["boundary_width_in_degree"]

    # Rotate the station and check if it is still in bounds.
    r_lat, r_lng = rotations.rotate_lat_lon(latitude, longitude, rotation_axis,
                                            -1.0 * rotation_angle_in_degree)
    # Check if in bounds. If not continue.
    if not (min_latitude <= r_lat <= max_latitude) or \
            not (min_longitude <= r_lng <= max_longitude):
        return False
    return True
Esempio n. 18
0
def lasif_generate_dummy_data(args):
    """
    Usage: lasif generate_dummy_data

    Generates some random example event and waveforms. Useful for debugging,
    testing, and following the tutorial.
    """
    import inspect
    from lasif import rotations
    from lasif.adjoint_sources.utils import get_dispersed_wavetrain
    import numpy as np
    import obspy

    if len(args):
        msg = "No arguments allowed."
        raise LASIFCommandLineException(msg)

    proj = _find_project_root(".")

    # Use a seed to make it somewhat predictable.
    random.seed(34235234)
    # Create 5 events.
    d = proj.domain["bounds"]
    b = d["boundary_width_in_degree"] * 1.5
    event_count = 8
    for _i in xrange(8):
        lat = random.uniform(d["minimum_latitude"] + b,
            d["maximum_latitude"] - b)
        lon = random.uniform(d["minimum_longitude"] + b,
            d["maximum_longitude"] - b)
        depth_in_m = random.uniform(d["minimum_depth_in_km"],
            d["maximum_depth_in_km"]) * 1000.0
        # Rotate the coordinates.
        lat, lon = rotations.rotate_lat_lon(lat, lon,
            proj.domain["rotation_axis"], proj.domain["rotation_angle"])
        time = obspy.UTCDateTime(random.uniform(
            obspy.UTCDateTime(2008, 1, 1).timestamp,
            obspy.UTCDateTime(2013, 1, 1).timestamp))

        # The moment tensor. XXX: Make sensible values!
        values = [-3.3e+18, 1.43e+18, 1.87e+18, -1.43e+18, -2.69e+17,
            -1.77e+18]
        random.shuffle(values)

        mrr = values[0]
        mtt = values[1]
        mpp = values[2]
        mrt = values[3]
        mrp = values[4]
        mtp = values[5]
        mag = random.uniform(5, 7)
        scalar_moment = 3.661e+25

        event_name = os.path.join(proj.paths["events"],
            "dummy_event_%i.xml" % (_i + 1))

        cat = obspy.core.event.Catalog(events=[
            obspy.core.event.Event(
                event_type="earthquake",
                origins=[obspy.core.event.Origin(
                    latitude=lat, longitude=lon, depth=depth_in_m, time=time)],
                magnitudes=[obspy.core.event.Magnitude(
                    mag=mag, magnitude_type="Mw")],
                focal_mechanisms=[obspy.core.event.FocalMechanism(
                    moment_tensor=obspy.core.event.MomentTensor(
                        scalar_moment=scalar_moment,
                        tensor=obspy.core.event.Tensor(m_rr=mrr, m_tt=mtt,
                            m_pp=mpp, m_rt=mrt, m_rp=mrp, m_tp=mtp)))])])
        cat.write(event_name, format="quakeml", validate=False)
    print "Generated %i random events." % event_count

    # Update the folder structure.
    proj.update_folder_structure()

    names_taken = []

    def _get_random_name(length):
        while True:
            ret = ""
            for i in xrange(length):
                ret += chr(int(random.uniform(ord("A"), ord("Z"))))
            if ret in names_taken:
                continue
            names_taken.append(ret)
            break
        return ret

    # Now generate 30 station coordinates. Use a land-sea mask included in
    # basemap and loop until thirty stations on land are found.
    from mpl_toolkits.basemap import _readlsmask
    from obspy.core.util.geodetics import gps2DistAzimuth
    ls_lon, ls_lat, ls_mask = _readlsmask()
    stations = []
    # Do not use an infinite loop. One could choose a region with no land.
    for i in xrange(10000):
        if len(stations) >= 30:
            break
        lat = random.uniform(d["minimum_latitude"] + b,
            d["maximum_latitude"] - b)
        lon = random.uniform(d["minimum_longitude"] + b,
            d["maximum_longitude"] - b)
        # Rotate the coordinates.
        lat, lon = rotations.rotate_lat_lon(lat, lon,
            proj.domain["rotation_axis"], proj.domain["rotation_angle"])
        if not ls_mask[np.abs(ls_lat - lat).argmin()][
                np.abs(ls_lon - lon).argmin()]:
            continue
        stations.append({"latitude": lat, "longitude": lon,
            "network": "XX", "station": _get_random_name(3)})

    if not len(stations):
        msg = "Could not create stations. Pure ocean region?"
        raise ValueError(msg)

    # Create a RESP file for every channel.
    resp_file_temp = os.path.join(os.path.dirname(os.path.abspath(
        inspect.getfile(inspect.currentframe()))), os.path.pardir, "tools",
        "RESP.template_file")
    with open(resp_file_temp, "rt") as open_file:
        resp_file_template = open_file.read()

    for station in stations:
        for component in ["E", "N", "Z"]:
            filename = os.path.join(proj.paths["resp"], "RESP.%s.%s.%s.BE%s" %
                (station["network"], station["station"], "", component))
            with open(filename, "wt") as open_file:
                open_file.write(resp_file_template.format(
                    station=station["station"], network=station["network"],
                    channel="BH%s" % component))

    print "Generated %i RESP files." % (30 * 3)

    def _empty_sac_trace():
        """
        Helper function to create and empty SAC header.
        """
        sac_dict = {}
        # floats = -12345.8
        floats = ["a", "mag", "az", "baz", "cmpaz", "cmpinc", "b", "depmax",
            "depmen", "depmin", "dist", "e", "evdp", "evla", "evlo", "f",
            "gcarc", "o", "odelta", "stdp", "stel", "stla", "stlo", "t0", "t1",
            "t2", "t3", "t4", "t5", "t6", "t7", "t8", "t9", "unused10",
            "unused11", "unused12", "unused6", "unused7", "unused8", "unused9",
            "user0", "user1", "user2", "user3", "user4", "user5", "user6",
            "user7", "user8", "user9", "xmaximum", "xminimum", "ymaximum",
            "yminimum"]
        sac_dict.update({key: -12345.0 for key in floats})
        # Integers: -12345
        integers = ["idep", "ievreg", "ievtype", "iftype", "iinst", "imagsrc",
            "imagtyp", "iqual", "istreg", "isynth", "iztype", "lcalda",
            "lovrok", "nevid", "norid", "nwfid"]
        sac_dict.update({key: -12345 for key in integers})
        # Strings: "-12345  "
        strings = ["ka", "kdatrd", "kevnm", "kf", "kinst", "ko", "kt0", "kt1",
            "kt2", "kt3", "kt4", "kt5", "kt6", "kt7", "kt8", "kt9",
            "kuser0", "kuser1", "kuser2"]

        sac_dict.update({key: "-12345  " for key in strings})

        # Header version
        sac_dict["nvhdr"] = 6
        # Data is evenly spaced
        sac_dict["leven"] = 1
        # And a positive polarity.
        sac_dict["lpspol"] = 1

        tr = obspy.Trace()
        tr.stats.sac = obspy.core.AttribDict(sac_dict)
        return tr

    events = proj.get_all_events()
    # Now loop over all events and create SAC file for them.
    for _i, event in enumerate(events):
        lat, lng = event.origins[0].latitude, event.origins[0].longitude
        # Get the distance to each events.
        for station in stations:
            # Add some perturbations.
            distance_in_km = gps2DistAzimuth(lat, lng, station["latitude"],
                station["longitude"])[0] / 1000.0
            a = random.uniform(3.9, 4.1)
            b = random.uniform(0.9, 1.1)
            c = random.uniform(0.9, 1.1)
            body_wave_factor = random.uniform(0.095, 0.015)
            body_wave_freq_scale = random.uniform(0.45, 0.55)
            distance_in_km = random.uniform(0.99 * distance_in_km, 1.01 *
                distance_in_km)
            _, u = get_dispersed_wavetrain(dw=0.001,
                distance=distance_in_km, t_min=0, t_max=900, a=a, b=b, c=c,
                body_wave_factor=body_wave_factor,
                body_wave_freq_scale=body_wave_freq_scale)
            for component in ["E", "N", "Z"]:
                tr = _empty_sac_trace()
                tr.data = u
                tr.stats.network = station["network"]
                tr.stats.station = station["station"]
                tr.stats.location = ""
                tr.stats.channel = "BH%s" % component
                tr.stats.sac.stla = station["latitude"]
                tr.stats.sac.stlo = station["longitude"]
                tr.stats.sac.stdp = 0.0
                tr.stats.sac.stel = 0.0
                path = os.path.join(proj.paths["data"],
                    "dummy_event_%i" % (_i + 1), "raw")
                if not os.path.exists(path):
                    os.makedirs(path)
                tr.write(os.path.join(path, "%s.%s..BH%s.sac" %
                    (station["network"], station["station"], component)),
                    format="sac")
    print "Generated %i waveform files." % (30 * 3 * len(events))
Esempio n. 19
0
    def plot_depth_slice(self, component, depth_in_km):
        """
        Plots a depth slice.
        """
        lat_bounds = [rotations.colat2lat(_i)
            for _i in self.setup["physical_boundaries_x"][::-1]]
        lng_bounds = self.setup["physical_boundaries_y"]
        depth_bounds = [6371 - _i / 1000
            for _i in self.setup["physical_boundaries_z"]]

        data = self.parsed_components[component]

        available_depths = np.linspace(*depth_bounds, num=data.shape[2])[::-1]
        depth_index = np.argmin(np.abs(available_depths - depth_in_km))

        lon, lat = np.meshgrid(
            np.linspace(*lng_bounds, num=data.shape[1]),
            np.linspace(*lat_bounds, num=data.shape[0]))
        if self.rotation_axis and self.rotation_angle_in_degree:
            lon_shape = lon.shape
            lat_shape = lat.shape
            lon.shape = lon.size
            lat.shape = lat.size
            lat, lon = rotations.rotate_lat_lon(lat, lon, self.rotation_axis,
                self.rotation_angle_in_degree)
            lon.shape = lon_shape
            lat.shape = lat_shape

        # Get the center of the map.
        lon_0 = lon.min() + lon.ptp() / 2.0
        lat_0 = lat.min() + lat.ptp() / 2.0

        plt.figure(0)

        # Attempt to zoom into the region of interest.
        max_extend = max(lon.ptp(), lat.ptp())
        extend_used = max_extend / 180.0
        if extend_used < 0.5:
            x_buffer = 0.2 * lon.ptp()
            y_buffer = 0.2 * lat.ptp()

            m = Basemap(projection='merc', resolution="l",
                #lat_0=lat_0, lon_0=lon_0,
                llcrnrlon=lon.min() - x_buffer,
                urcrnrlon=lon.max() + x_buffer,
                llcrnrlat=lat.min() - y_buffer,
                urcrnrlat=lat.max() + y_buffer)
        else:
            m = Basemap(projection='ortho', lon_0=lon_0, lat_0=lat_0,
                resolution="c")


        m.drawcoastlines()
        m.fillcontinents("0.9", zorder=0)
        m.drawmapboundary(fill_color="white")
        m.drawparallels(np.arange(-80.0, 80.0, 10.0), labels=[1, 0, 0, 0])
        m.drawmeridians(np.arange(-170.0, 170.0, 10.0), labels=[0, 0, 0, 1])
        m.drawcountries()

        x, y = m(lon, lat)
        im = m.pcolormesh(x, y, data[::-1, :, depth_index],
            cmap=tomo_colormap)

        # Add colorbar and potentially unit.
        cm = m.colorbar(im, "right", size="3%", pad='2%')
        if component in UNIT_DICT:
            cm.set_label(UNIT_DICT[component], fontsize="x-large", rotation=0)

        plt.suptitle("Depth slice of %s at %i km" % (component,
            int(depth_in_km)), size="large")

        def _on_button_press(event):
            if event.button != 1 or not event.inaxes:
                return
            lon, lat = m(event.xdata, event.ydata, inverse=True)
            # Convert to colat to ease indexing.
            colat = rotations.lat2colat(lat)

            x_range = (self.setup["physical_boundaries_x"][1] -
                self.setup["physical_boundaries_x"][0])
            x_frac = (colat - self.setup["physical_boundaries_x"][0]) / x_range
            x_index = int(((self.setup["boundaries_x"][1] -
                self.setup["boundaries_x"][0]) * x_frac) +
                self.setup["boundaries_x"][0])
            y_range = (self.setup["physical_boundaries_y"][1] -
                self.setup["physical_boundaries_y"][0])
            y_frac = (lon - self.setup["physical_boundaries_y"][0]) / y_range
            y_index = int(((self.setup["boundaries_y"][1] -
                self.setup["boundaries_y"][0]) * y_frac) +
                self.setup["boundaries_y"][0])

            plt.figure(1, figsize=(3, 8))
            depths = available_depths
            values = data[x_index, y_index, :]
            plt.plot(values, depths)
            plt.grid()
            plt.ylim(depths[-1], depths[0])
            plt.show()
            plt.close()
            plt.figure(0)

        plt.gcf().canvas.mpl_connect('button_press_event', _on_button_press)

        plt.show()
Esempio n. 20
0
    def plot_slice(self,
                   depth,
                   min_val_plot=None,
                   max_val_plot=None,
                   colormap='tomo',
                   res='i',
                   save_under=None,
                   verbose=False,
                   lasif_folder=None,
                   vmin=None,
                   vmax=None):
        """
        plot horizontal slices through an ses3d model

        plot_slice(self,depth,colormap='tomo',res='i',save_under=None,
        verbose=False)

        depth=depth in km of the slice
        colormap='tomo','mono'
        res=resolution of the map, admissible values are: c, l, i, h f
        save_under=save figure as *.png with the filename "save_under".
        Prevents plotting of the slice.
        """
        import matplotlib.cm
        from matplotlib.colors import LogNorm
        import matplotlib.pylab as plt

        plt.style.use('seaborn-pastel')

        if not lasif_folder:
            raise NotImplementedError

        from lasif.scripts.lasif_cli import _find_project_comm
        comm = _find_project_comm(lasif_folder, read_only_caches=False)

        plt.figure(figsize=(32, 18))

        depth_position_map = {
            50: (0, 0),
            100: (0, 1),
            150: (1, 0),
            250: (1, 1),
            400: (2, 0),
            600: (2, 1)
        }

        for depth, location in depth_position_map.items():
            ax = plt.subplot2grid((3, 5), location)
            radius = 6371.0 - depth

            # set up a map and colourmap
            m = comm.project.domain.plot(ax=ax)

            import lasif.colors
            my_colormap = lasif.colors.get_colormap(
                "tomo_full_scale_linear_lightness")

            from lasif import rotations

            x, y = np.meshgrid(self.data.longitude, self.data.latitude)

            x_shape = x.shape
            y_shape = y.shape

            lat_r, lon_r = rotations.rotate_lat_lon(
                y.ravel(), x.ravel(), comm.project.domain.rotation_axis,
                comm.project.domain.rotation_angle_in_degree)

            x, y = m(lon_r, lat_r)

            x.shape = x_shape
            y.shape = y_shape

            plot_data = self.data.sel(radius=radius, method="nearest")
            plot_data = np.ma.masked_invalid(plot_data.data)

            # Overwrite colormap things if given.
            if vmin is not None and vmax is not None:
                min_val_plot = vmin
                max_val_plot = vmax
            else:
                mean = plot_data.mean()
                max_diff = max(abs(mean - plot_data.min()),
                               abs(plot_data.max() - mean))
                min_val_plot = mean - max_diff
                max_val_plot = mean + max_diff
                # Plotting essentially constant models.
                min_delta = 0.01 * abs(max_val_plot)
                if (max_val_plot - min_val_plot) < min_delta:
                    max_val_plot = max_val_plot + min_delta
                    min_val_plot = min_val_plot - min_delta

            # Plot.
            im = m.pcolormesh(x,
                              y,
                              plot_data,
                              cmap=my_colormap,
                              vmin=min_val_plot,
                              vmax=max_val_plot,
                              shading="gouraud")

            # make a colorbar and title
            m.colorbar(im, "right", size="3%", pad='2%')
            plt.title(str(depth) + ' km')

        # Depth based statistics.
        plt.subplot2grid((3, 5), (0, 4), rowspan=3)
        plt.title("Depth statistics")
        mean = self.data.mean(axis=(0, 1))
        std = self.data.std(axis=(0, 1))
        _min = self.data.min(axis=(0, 1))
        _max = self.data.max(axis=(0, 1))

        plt.fill_betweenx(self.data.radius,
                          mean - std,
                          mean + std,
                          label="std",
                          color="#FF3C83")
        plt.plot(mean, self.data.radius, label="mean", color="k", lw=2)
        plt.plot(_min, self.data.radius, color="grey", label="min")
        plt.plot(_max, self.data.radius, color="grey", label="max")
        plt.legend(loc="best")
        plt.xlabel("Value")
        plt.ylabel("Radius")

        # Roughness plots.
        plt.subplot2grid((3, 5), (0, 2))
        data = np.abs(self.data.diff("latitude", n=1)).sum("latitude").data
        plt.title("Roughness in latitude direction, Total: %g" % data.sum())
        plt.pcolormesh(self.data.longitude.data,
                       self.data.radius.data,
                       data.T,
                       cmap=matplotlib.cm.viridis,
                       norm=LogNorm(data.max() * 1E-2, data.max()))
        plt.colorbar()
        plt.xlabel("Longitude")
        plt.ylabel("Radius")

        plt.subplot2grid((3, 5), (1, 2))
        data = np.abs(self.data.diff("longitude", n=1)).sum("longitude").data
        plt.title("Roughness in longitude direction. Total: %g" % data.sum())
        plt.pcolormesh(self.data.latitude.data,
                       self.data.radius.data,
                       data.T,
                       cmap=matplotlib.cm.viridis,
                       norm=LogNorm(data.max() * 1E-2, data.max()))
        plt.colorbar()
        plt.xlabel("Latitude")
        plt.ylabel("Radius")

        plt.subplot2grid((3, 5), (2, 2))
        data = np.abs(self.data.diff("radius", n=1)).sum("radius").data
        plt.title("Roughness in radius direction. Total: %g" % data.sum())
        plt.pcolormesh(self.data.longitude.data,
                       self.data.latitude.data,
                       data,
                       cmap=matplotlib.cm.viridis)
        plt.colorbar()
        plt.xlabel("Longitude")
        plt.ylabel("Latitude")

        # L2
        plt.subplot2grid((3, 5), (0, 3))
        data = (self.data**2).sum("latitude").data
        plt.title("L2 Norm in latitude direction, Total: %g" % data.sum())
        plt.pcolormesh(self.data.longitude.data,
                       self.data.radius.data,
                       data.T,
                       cmap=matplotlib.cm.viridis)
        plt.colorbar()
        plt.xlabel("Longitude")
        plt.ylabel("Radius")

        plt.subplot2grid((3, 5), (1, 3))
        data = (self.data**2).sum("longitude").data
        plt.title("L2 Norm in longitude direction, Total: %g" % data.sum())
        plt.pcolormesh(self.data.latitude.data,
                       self.data.radius.data,
                       data.T,
                       cmap=matplotlib.cm.viridis)
        plt.colorbar()
        plt.xlabel("Latitude")
        plt.ylabel("Radius")

        plt.subplot2grid((3, 5), (2, 3))
        data = (self.data**2).sum("radius").data
        plt.title("L2 Norm in radius direction, Total: %g" % data.sum())
        plt.pcolormesh(self.data.longitude.data,
                       self.data.latitude.data,
                       data,
                       cmap=matplotlib.cm.viridis)
        plt.colorbar()
        plt.xlabel("Longitude")
        plt.ylabel("Latitude")

        plt.suptitle("File %s" % self._filename, fontsize=20)

        plt.tight_layout(rect=(0, 0, 1, 0.95))

        # save image if wanted
        if save_under is None:
            plt.show()
        else:
            plt.savefig(save_under, dpi=150)
            plt.close()
Esempio n. 21
0
            def get_value(self):
                station_id, coordinates = self.items[self.current_index]

                data = Stream()
                # Now get the actual waveform files. Also find the
                # corresponding station file and check the coordinates.
                this_waveforms = {
                    _i["channel_id"]: _i
                    for _i in waveforms
                    if _i["channel_id"].startswith(station_id + ".")
                }
                marked_for_deletion = []
                for key, value in this_waveforms.iteritems():
                    value["trace"] = read(value["filename"])[0]
                    data += value["trace"]
                    value["station_file"] = \
                        station_cache.get_station_filename(
                            value["channel_id"],
                            UTCDateTime(value["starttime_timestamp"]))
                    if value["station_file"] is None:
                        marked_for_deletion.append(key)
                        msg = ("Warning: Data and station information for '%s'"
                               " is available, but the station information "
                               "only for the wrong timestamp. You should try "
                               "and retrieve the correct station file.")
                        warnings.warn(msg % value["channel_id"])
                        continue
                    data[-1].stats.station_file = value["station_file"]
                for key in marked_for_deletion:
                    del this_waveforms[key]
                if not this_waveforms:
                    msg = "Could not retrieve data for station '%s'." % \
                        station_id
                    warnings.warn(msg)
                    return None
                # Now attempt to get the synthetics.
                synthetics_filenames = []
                for name, path in synthetic_files.iteritems():
                    if (station_id + ".") in name:
                        synthetics_filenames.append(path)

                if len(synthetics_filenames) != 3:
                    msg = "Found %i not 3 synthetics for station '%s'." % (
                        len(synthetics_filenames), station_id)
                    warnings.warn(msg)
                    return None

                synthetics = Stream()
                # Read all synthetics.
                for filename in synthetics_filenames:
                    synthetics += read(filename)
                for synth in synthetics:
                    if synth.stats.channel in ["X", "Z"]:
                        synth.data *= -1.0
                    synth.stats.channel = SYNTH_MAPPING[synth.stats.channel]
                    synth.stats.starttime = event_info["origin_time"]

                # Process the data.
                len_synth = synthetics[0].stats.endtime - \
                    synthetics[0].stats.starttime
                data.trim(synthetics[0].stats.starttime - len_synth * 0.05,
                          synthetics[0].stats.endtime + len_synth * 0.05)
                if data:
                    max_length = max([tr.stats.npts for tr in data])
                else:
                    max_length = 0
                if max_length == 0:
                    msg = (
                        "Warning: After trimming the waveform data to "
                        "the time window of the synthetics, no more data is "
                        "left. The reference time is the one given in the "
                        "QuakeML file. Make sure it is correct and that "
                        "the waveform data actually contains data in that "
                        "time span.")
                    warnings.warn(msg)
                data.detrend("linear")
                data.taper()

                new_time_array = np.linspace(
                    synthetics[0].stats.starttime.timestamp,
                    synthetics[0].stats.endtime.timestamp,
                    synthetics[0].stats.npts)

                # Simulate the traces.
                for trace in data:
                    # Decimate in case there is a large difference between
                    # synthetic sampling rate and sampling_rate of the data.
                    # XXX: Ugly filter, change!
                    if trace.stats.sampling_rate > (6 *
                                                    synth.stats.sampling_rate):
                        new_nyquist = trace.stats.sampling_rate / 2.0 / 5.0
                        trace.filter("lowpass",
                                     freq=new_nyquist,
                                     corners=4,
                                     zerophase=True)
                        trace.decimate(factor=5, no_filter=None)

                    station_file = trace.stats.station_file
                    if "/SEED/" in station_file:
                        paz = Parser(station_file).getPAZ(
                            trace.id, trace.stats.starttime)
                        trace.simulate(paz_remove=paz)
                    elif "/RESP/" in station_file:
                        trace.simulate(
                            seedresp={
                                "filename": station_file,
                                "units": "VEL",
                                "date": trace.stats.starttime
                            })
                    else:
                        raise NotImplementedError

                    # Make sure that the data array is at least as long as the
                    # synthetics array. Also add some buffer sample for the
                    # spline interpolation to work in any case.
                    buf = synth.stats.delta * 5
                    if synth.stats.starttime < (trace.stats.starttime + buf):
                        trace.trim(starttime=synth.stats.starttime - buf,
                                   pad=True,
                                   fill_value=0.0)
                    if synth.stats.endtime > (trace.stats.endtime - buf):
                        trace.trim(endtime=synth.stats.endtime + buf,
                                   pad=True,
                                   fill_value=0.0)

                    old_time_array = np.linspace(
                        trace.stats.starttime.timestamp,
                        trace.stats.endtime.timestamp, trace.stats.npts)

                    # Interpolation.
                    trace.data = interp1d(old_time_array, trace.data,
                                          kind=1)(new_time_array)
                    trace.stats.starttime = synthetics[0].stats.starttime
                    trace.stats.sampling_rate = \
                        synthetics[0].stats.sampling_rate

                data.filter("bandpass", freqmin=lowpass, freqmax=highpass)
                synthetics.filter("bandpass",
                                  freqmin=lowpass,
                                  freqmax=highpass)

                # Rotate the synthetics if nessesary.
                if self.rot_angle:
                    # First rotate the station back to see, where it was
                    # recorded.
                    lat, lng = rotations.rotate_lat_lon(
                        coordinates["latitude"], coordinates["longitude"],
                        self.rot_axis, -self.rot_angle)
                    # Rotate the data.
                    n_trace = synthetics.select(component="N")[0]
                    e_trace = synthetics.select(component="E")[0]
                    z_trace = synthetics.select(component="Z")[0]
                    n, e, z = rotations.rotate_data(n_trace.data, e_trace.data,
                                                    z_trace.data, lat, lng,
                                                    self.rot_axis,
                                                    self.rot_angle)
                    n_trace.data = n
                    e_trace.data = e
                    z_trace.data = z

                return {
                    "data": data,
                    "synthetics": synthetics,
                    "coordinates": coordinates
                }
Esempio n. 22
0
    def plot_slice(self, depth, min_val_plot=None, max_val_plot=None,
                   colormap='tomo', res='i', save_under=None, verbose=False,
                   lasif_folder=None, vmin=None, vmax=None):
        """
        plot horizontal slices through an ses3d model

        plot_slice(self,depth,colormap='tomo',res='i',save_under=None,
        verbose=False)

        depth=depth in km of the slice
        colormap='tomo','mono'
        res=resolution of the map, admissible values are: c, l, i, h f
        save_under=save figure as *.png with the filename "save_under".
        Prevents plotting of the slice.
        """
        import matplotlib.cm
        from matplotlib.colors import LogNorm
        import matplotlib.pylab as plt

        plt.style.use('seaborn-pastel')


        if not lasif_folder:
            raise NotImplementedError

        from lasif.scripts.lasif_cli import _find_project_comm
        comm = _find_project_comm(lasif_folder, read_only_caches=False)

        plt.figure(figsize=(32, 18))

        depth_position_map = {
            50: (0, 0),
            100: (0, 1),
            150: (1, 0),
            250: (1, 1),
            400: (2, 0),
            600: (2, 1)
        }

        for depth, location in depth_position_map.items():
            ax = plt.subplot2grid((3, 5), location)
            radius = 6371.0 - depth

            # set up a map and colourmap
            m = comm.project.domain.plot(ax=ax)

            import lasif.colors
            my_colormap = lasif.colors.get_colormap(
                "tomo_full_scale_linear_lightness")

            from lasif import rotations

            x, y = np.meshgrid(self.data.longitude, self.data.latitude)

            x_shape = x.shape
            y_shape = y.shape

            lat_r, lon_r = rotations.rotate_lat_lon(
                y.ravel(), x.ravel(),
                comm.project.domain.rotation_axis,
                comm.project.domain.rotation_angle_in_degree)

            x, y = m(lon_r, lat_r)

            x.shape = x_shape
            y.shape = y_shape

            plot_data = self.data.sel(radius=radius, method="nearest")
            plot_data = np.ma.masked_invalid(plot_data.data)

            # Overwrite colormap things if given.
            if vmin is not None and vmax is not None:
                min_val_plot = vmin
                max_val_plot = vmax
            else:
                mean = plot_data.mean()
                max_diff = max(abs(mean - plot_data.min()),
                               abs(plot_data.max() - mean))
                min_val_plot = mean - max_diff
                max_val_plot = mean + max_diff
                # Plotting essentially constant models.
                min_delta = 0.01 * abs(max_val_plot)
                if (max_val_plot - min_val_plot) < min_delta:
                    max_val_plot = max_val_plot + min_delta
                    min_val_plot = min_val_plot - min_delta

            # Plot.
            im = m.pcolormesh(
                x, y, plot_data,
                cmap=my_colormap, vmin=min_val_plot, vmax=max_val_plot,
                shading="gouraud")

            # make a colorbar and title
            m.colorbar(im, "right", size="3%", pad='2%')
            plt.title(str(depth) + ' km')


        # Depth based statistics.
        plt.subplot2grid((3, 5), (0, 4), rowspan=3)
        plt.title("Depth statistics")
        mean = self.data.mean(axis=(0, 1))
        std = self.data.std(axis=(0, 1))
        _min = self.data.min(axis=(0, 1))
        _max = self.data.max(axis=(0, 1))

        plt.fill_betweenx(self.data.radius, mean - std, mean + std,
                          label="std", color="#FF3C83")
        plt.plot(mean, self.data.radius, label="mean", color="k", lw=2)
        plt.plot(_min, self.data.radius, color="grey", label="min")
        plt.plot(_max, self.data.radius, color="grey", label="max")
        plt.legend(loc="best")
        plt.xlabel("Value")
        plt.ylabel("Radius")

        # Roughness plots.
        plt.subplot2grid((3, 5), (0, 2))
        data = np.abs(self.data.diff("latitude", n=1)).sum("latitude").data
        plt.title("Roughness in latitude direction, Total: %g" % data.sum())
        plt.pcolormesh(self.data.longitude.data, self.data.radius.data,
                       data.T, cmap=matplotlib.cm.viridis,
                       norm=LogNorm(data.max() * 1E-2, data.max()))
        plt.colorbar()
        plt.xlabel("Longitude")
        plt.ylabel("Radius")

        plt.subplot2grid((3, 5), (1, 2))
        data = np.abs(self.data.diff("longitude", n=1)).sum("longitude").data
        plt.title("Roughness in longitude direction. Total: %g" % data.sum())
        plt.pcolormesh(self.data.latitude.data, self.data.radius.data, data.T,
                       cmap=matplotlib.cm.viridis,
                       norm=LogNorm(data.max() * 1E-2, data.max()))
        plt.colorbar()
        plt.xlabel("Latitude")
        plt.ylabel("Radius")

        plt.subplot2grid((3, 5), (2, 2))
        data = np.abs(self.data.diff("radius", n=1)).sum("radius").data
        plt.title("Roughness in radius direction. Total: %g" % data.sum())
        plt.pcolormesh(self.data.longitude.data, self.data.latitude.data,
                       data, cmap=matplotlib.cm.viridis)
        plt.colorbar()
        plt.xlabel("Longitude")
        plt.ylabel("Latitude")

        # L2
        plt.subplot2grid((3, 5), (0, 3))
        data = (self.data ** 2).sum("latitude").data
        plt.title("L2 Norm in latitude direction, Total: %g" % data.sum())
        plt.pcolormesh(self.data.longitude.data, self.data.radius.data,
                       data.T, cmap=matplotlib.cm.viridis)
        plt.colorbar()
        plt.xlabel("Longitude")
        plt.ylabel("Radius")

        plt.subplot2grid((3, 5), (1, 3))
        data = (self.data ** 2).sum("longitude").data
        plt.title("L2 Norm in longitude direction, Total: %g" % data.sum())
        plt.pcolormesh(self.data.latitude.data, self.data.radius.data, data.T,
                       cmap=matplotlib.cm.viridis)
        plt.colorbar()
        plt.xlabel("Latitude")
        plt.ylabel("Radius")

        plt.subplot2grid((3, 5), (2, 3))
        data = (self.data ** 2).sum("radius").data
        plt.title("L2 Norm in radius direction, Total: %g" % data.sum())
        plt.pcolormesh(self.data.longitude.data, self.data.latitude.data,
                       data, cmap=matplotlib.cm.viridis)
        plt.colorbar()
        plt.xlabel("Longitude")
        plt.ylabel("Latitude")

        plt.suptitle("File %s" % self._filename, fontsize=20)

        plt.tight_layout(rect=(0, 0, 1, 0.95))

        # save image if wanted
        if save_under is None:
            plt.show()
        else:
            plt.savefig(save_under, dpi=150)
            plt.close()
Esempio n. 23
0
    def plot_depth_slice(self, component, depth_in_km):
        """
        Plots a depth slice.
        """
        lat_bounds = [
            rotations.colat2lat(_i)
            for _i in self.setup["physical_boundaries_x"][::-1]
        ]
        lng_bounds = self.setup["physical_boundaries_y"]
        depth_bounds = [
            6371 - _i / 1000 for _i in self.setup["physical_boundaries_z"]
        ]

        data = self.parsed_components[component]

        available_depths = np.linspace(*depth_bounds, num=data.shape[2])[::-1]
        depth_index = np.argmin(np.abs(available_depths - depth_in_km))

        lon, lat = np.meshgrid(np.linspace(*lng_bounds, num=data.shape[1]),
                               np.linspace(*lat_bounds, num=data.shape[0]))
        if self.rotation_axis and self.rotation_angle_in_degree:
            lon_shape = lon.shape
            lat_shape = lat.shape
            lon.shape = lon.size
            lat.shape = lat.size
            lat, lon = rotations.rotate_lat_lon(lat, lon, self.rotation_axis,
                                                self.rotation_angle_in_degree)
            lon.shape = lon_shape
            lat.shape = lat_shape

        # Get the center of the map.
        lon_0 = lon.min() + lon.ptp() / 2.0
        lat_0 = lat.min() + lat.ptp() / 2.0

        plt.figure(0)

        # Attempt to zoom into the region of interest.
        max_extend = max(lon.ptp(), lat.ptp())
        extend_used = max_extend / 180.0
        if extend_used < 0.5:
            x_buffer = 0.2 * lon.ptp()
            y_buffer = 0.2 * lat.ptp()

            m = Basemap(
                projection='merc',
                resolution="l",
                #lat_0=lat_0, lon_0=lon_0,
                llcrnrlon=lon.min() - x_buffer,
                urcrnrlon=lon.max() + x_buffer,
                llcrnrlat=lat.min() - y_buffer,
                urcrnrlat=lat.max() + y_buffer)
        else:
            m = Basemap(projection='ortho',
                        lon_0=lon_0,
                        lat_0=lat_0,
                        resolution="c")

        m.drawcoastlines()
        m.fillcontinents("0.9", zorder=0)
        m.drawmapboundary(fill_color="white")
        m.drawparallels(np.arange(-80.0, 80.0, 10.0), labels=[1, 0, 0, 0])
        m.drawmeridians(np.arange(-170.0, 170.0, 10.0), labels=[0, 0, 0, 1])
        m.drawcountries()

        x, y = m(lon, lat)
        im = m.pcolormesh(x, y, data[::-1, :, depth_index], cmap=tomo_colormap)

        # Add colorbar and potentially unit.
        cm = m.colorbar(im, "right", size="3%", pad='2%')
        if component in UNIT_DICT:
            cm.set_label(UNIT_DICT[component], fontsize="x-large", rotation=0)

        plt.suptitle("Depth slice of %s at %i km" %
                     (component, int(depth_in_km)),
                     size="large")

        def _on_button_press(event):
            if event.button != 1 or not event.inaxes:
                return
            lon, lat = m(event.xdata, event.ydata, inverse=True)
            # Convert to colat to ease indexing.
            colat = rotations.lat2colat(lat)

            x_range = (self.setup["physical_boundaries_x"][1] -
                       self.setup["physical_boundaries_x"][0])
            x_frac = (colat - self.setup["physical_boundaries_x"][0]) / x_range
            x_index = int(((self.setup["boundaries_x"][1] -
                            self.setup["boundaries_x"][0]) * x_frac) +
                          self.setup["boundaries_x"][0])
            y_range = (self.setup["physical_boundaries_y"][1] -
                       self.setup["physical_boundaries_y"][0])
            y_frac = (lon - self.setup["physical_boundaries_y"][0]) / y_range
            y_index = int(((self.setup["boundaries_y"][1] -
                            self.setup["boundaries_y"][0]) * y_frac) +
                          self.setup["boundaries_y"][0])

            plt.figure(1, figsize=(3, 8))
            depths = available_depths
            values = data[x_index, y_index, :]
            plt.plot(values, depths)
            plt.grid()
            plt.ylim(depths[-1], depths[0])
            plt.show()
            plt.close()
            plt.figure(0)

        plt.gcf().canvas.mpl_connect('button_press_event', _on_button_press)

        plt.show()
Esempio n. 24
0
def par2quakeml(Par_filename, QuakeML_filename, rotation_axis=[0.0, 1.0, 0.0],
                rotation_angle=-57.5, origin_time="2000-01-01 00:00:00.0",
                event_type="other event"):
    # initialise event
    ev = Event()

    # open and read Par file
    fid = open(Par_filename, 'r')

    fid.readline()
    fid.readline()
    fid.readline()
    fid.readline()

    lat_old = 90.0 - float(fid.readline().strip().split()[0])
    lon_old = float(fid.readline().strip().split()[0])
    depth = float(fid.readline().strip().split()[0])

    fid.readline()

    Mtt_old = float(fid.readline().strip().split()[0])
    Mpp_old = float(fid.readline().strip().split()[0])
    Mrr_old = float(fid.readline().strip().split()[0])
    Mtp_old = float(fid.readline().strip().split()[0])
    Mtr_old = float(fid.readline().strip().split()[0])
    Mpr_old = float(fid.readline().strip().split()[0])

    # rotate event into physical domain

    lat, lon = rot.rotate_lat_lon(lat_old, lon_old, rotation_axis,
                                  rotation_angle)
    Mrr, Mtt, Mpp, Mtr, Mpr, Mtp = rot.rotate_moment_tensor(
        Mrr_old, Mtt_old, Mpp_old, Mtr_old, Mpr_old, Mtp_old, lat_old, lon_old,
        rotation_axis, rotation_angle)

    # populate event origin data
    ev.event_type = event_type

    ev_origin = Origin()
    ev_origin.time = UTCDateTime(origin_time)
    ev_origin.latitude = lat
    ev_origin.longitude = lon
    ev_origin.depth = depth
    ev.origins.append(ev_origin)

    # populte event moment tensor

    ev_tensor = Tensor()
    ev_tensor.m_rr = Mrr
    ev_tensor.m_tt = Mtt
    ev_tensor.m_pp = Mpp
    ev_tensor.m_rt = Mtr
    ev_tensor.m_rp = Mpr
    ev_tensor.m_tp = Mtp

    ev_momenttensor = MomentTensor()
    ev_momenttensor.tensor = ev_tensor
    ev_momenttensor.scalar_moment = np.sqrt(Mrr ** 2 + Mtt ** 2 + Mpp ** 2 +
                                            Mtr ** 2 + Mpr ** 2 + Mtp ** 2)

    ev_focalmechanism = FocalMechanism()
    ev_focalmechanism.moment_tensor = ev_momenttensor
    ev_focalmechanism.nodal_planes = NodalPlanes().setdefault(0, 0)

    ev.focal_mechanisms.append(ev_focalmechanism)

    # populate event magnitude
    ev_magnitude = Magnitude()
    ev_magnitude.mag = 0.667 * (np.log10(ev_momenttensor.scalar_moment) - 9.1)
    ev_magnitude.magnitude_type = 'Mw'
    ev.magnitudes.append(ev_magnitude)

    # write QuakeML file
    cat = Catalog()
    cat.append(ev)
    cat.write(QuakeML_filename, format="quakeml")

    # clean up
    fid.close()
Esempio n. 25
0
def plot_raydensity(map_object, station_events, domain):
    """
    Create a ray-density plot for all events and all stations.

    This function is potentially expensive and will use all CPUs available.
    Does require geographiclib to be installed.
    """
    import ctypes as C
    from lasif import rotations
    from lasif.domain import RectangularSphericalSection
    from lasif.tools.great_circle_binner import GreatCircleBinner
    from lasif.utils import Point
    import multiprocessing
    import progressbar
    from scipy.stats import scoreatpercentile

    if not isinstance(domain, RectangularSphericalSection):
        raise NotImplementedError(
            "Raydensity currently only implemented for rectangular domains. "
            "Should be easy to implement for other domains. Let me know.")

    # Merge everything so that a list with coordinate pairs is created. This
    # list is then distributed among all processors.
    station_event_list = []
    for event, stations in station_events:
        if domain.rotation_angle_in_degree:
            # Rotate point to the non-rotated domain.
            e_point = Point(*rotations.rotate_lat_lon(
                event["latitude"], event["longitude"], domain.rotation_axis,
                -1.0 * domain.rotation_angle_in_degree))
        else:
            e_point = Point(event["latitude"], event["longitude"])
        for station in stations.itervalues():
            # Rotate point to the non-rotated domain if necessary.
            if domain.rotation_angle_in_degree:
                p = Point(*rotations.rotate_lat_lon(
                    station["latitude"], station["longitude"],
                    domain.rotation_axis,
                    -1.0 * domain.rotation_angle_in_degree))
            else:
                p = Point(station["latitude"], station["longitude"])
            station_event_list.append((e_point, p))

    circle_count = len(station_event_list)

    # The granularity of the latitude/longitude discretization for the
    # raypaths. Attempt to get a somewhat meaningful result in any case.
    lat_lng_count = 1000
    if circle_count < 1000:
        lat_lng_count = 1000
    if circle_count < 10000:
        lat_lng_count = 2000
    else:
        lat_lng_count = 3000

    cpu_count = multiprocessing.cpu_count()

    def to_numpy(raw_array, dtype, shape):
        data = np.frombuffer(raw_array.get_obj())
        data.dtype = dtype
        return data.reshape(shape)

    print "\nLaunching %i greatcircle calculations on %i CPUs..." % \
        (circle_count, cpu_count)

    widgets = ["Progress: ", progressbar.Percentage(),
               progressbar.Bar(), "", progressbar.ETA()]
    pbar = progressbar.ProgressBar(widgets=widgets,
                                   maxval=circle_count).start()

    def great_circle_binning(sta_evs, bin_data_buffer, bin_data_shape,
                             lock, counter):
        new_bins = GreatCircleBinner(
            domain.min_latitude, domain.max_latitude,
            lat_lng_count, domain.min_longitude,
            domain.max_longitude, lat_lng_count)
        for event, station in sta_evs:
            with lock:
                counter.value += 1
            if not counter.value % 25:
                pbar.update(counter.value)
            new_bins.add_greatcircle(event, station)

        bin_data = to_numpy(bin_data_buffer, np.uint32, bin_data_shape)
        with bin_data_buffer.get_lock():
            bin_data += new_bins.bins

    # Split the data in cpu_count parts.
    def chunk(seq, num):
        avg = len(seq) / float(num)
        out = []
        last = 0.0
        while last < len(seq):
            out.append(seq[int(last):int(last + avg)])
            last += avg
        return out
    chunks = chunk(station_event_list, cpu_count)

    # One instance that collects everything.
    collected_bins = GreatCircleBinner(
        domain.min_latitude, domain.max_latitude,
        lat_lng_count, domain.min_longitude,
        domain.max_longitude, lat_lng_count)

    # Use a multiprocessing shared memory array and map it to a numpy view.
    collected_bins_data = multiprocessing.Array(C.c_uint32,
                                                collected_bins.bins.size)
    collected_bins.bins = to_numpy(collected_bins_data, np.uint32,
                                   collected_bins.bins.shape)

    # Create, launch and join one process per CPU. Use a shared value as a
    # counter and a lock to avoid race conditions.
    processes = []
    lock = multiprocessing.Lock()
    counter = multiprocessing.Value("i", 0)
    for _i in xrange(cpu_count):
        processes.append(multiprocessing.Process(
            target=great_circle_binning, args=(chunks[_i], collected_bins_data,
                                               collected_bins.bins.shape, lock,
                                               counter)))
    for process in processes:
        process.start()
    for process in processes:
        process.join()

    pbar.finish()

    stations = chain.from_iterable((
        _i[1].values() for _i in station_events if _i[1]))
    # Remove duplicates
    stations = [(_i["latitude"], _i["longitude"]) for _i in stations]
    stations = set(stations)
    title = "%i Events, %i unique raypaths, "\
            "%i unique stations" % (len(station_events), circle_count,
                                    len(stations))
    plt.title(title, size="xx-large")

    data = collected_bins.bins.transpose()

    if data.max() >= 10:
        data = np.log10(np.clip(data, a_min=0.5, a_max=data.max()))
        data[data >= 0.0] += 0.1
        data[data < 0.0] = 0.0
        max_val = scoreatpercentile(data.ravel(), 99)
    else:
        max_val = data.max()

    cmap = cm.get_cmap("gist_heat")
    cmap._init()
    cmap._lut[:120, -1] = np.linspace(0, 1.0, 120) ** 2

    # Slightly change the appearance of the map so it suits the rays.
    map_object.fillcontinents(color='#dddddd', lake_color='#dddddd', zorder=2)

    lngs, lats = collected_bins.coordinates
    # Rotate back if necessary!
    if domain.rotation_angle_in_degree:
        for lat, lng in zip(lats, lngs):
            lat[:], lng[:] = rotations.rotate_lat_lon(
                lat, lng, domain.rotation_axis,
                domain.rotation_angle_in_degree)
    ln, la = map_object(lngs, lats)
    map_object.pcolormesh(ln, la, data, cmap=cmap, vmin=0, vmax=max_val,
                          zorder=10)
    # Draw the coastlines so they appear over the rays. Otherwise things are
    # sometimes hard to see.
    map_object.drawcoastlines(zorder=3)
    map_object.drawcountries(linewidth=0.2, zorder=3)
Esempio n. 26
0
def Iter2snapshot(snapD, evlo, evla, component, vmin, vmax, stations, fprx, projection, outdir, dpi, \
        lat_min, lat_max, lon_min, lon_max, lat_centre, lon_centre, res, zoomin, geopolygons, dt):
    """Plot snapshot, used by plot_snapshots_mp
    """
    print 'Plotting Snapshot for:',snapD.iteration,' step!'
    # - Set up the map. ------------------------------------------------------------------------
    if projection=='global':
        m=Basemap(projection='ortho', lon_0=lon_centre, lat_0=lat_centre, resolution=res)
        m.drawparallels(np.arange(-80.0,80.0,10.0),labels=[1,0,0,1])
        m.drawmeridians(np.arange(-170.0,170.0,10.0),labels=[1,0,0,1])	
    elif projection=='regional_ortho':
        m1 = Basemap(projection='ortho', lon_0=lon_min, lat_0=lat_min, resolution='l')
        m = Basemap(projection='ortho',lon_0=lon_min,lat_0=lat_min, resolution=res,\
            llcrnrx=0., llcrnry=0., urcrnrx=m1.urcrnrx/zoomin, urcrnry=m1.urcrnry/3.5)
    elif projection=='regional_merc':
        m=Basemap(projection='merc',llcrnrlat=lat_min,urcrnrlat=lat_max,llcrnrlon=lon_min,urcrnrlon=lon_max,lat_ts=20,resolution=res)
        m.drawparallels(np.arange(np.round(lat_min),np.round(lat_max),d_lat),labels=[1,0,0,1])
        m.drawmeridians(np.arange(np.round(lon_min),np.round(lon_max),d_lon),labels=[1,0,0,1])
    elif projection=='lambert':
        distEW, az, baz=obspy.geodetics.gps2dist_azimuth(lat_min, lon_min,
                            lat_min, lon_max) # distance is in m
        distNS, az, baz=obspy.geodetics.gps2dist_azimuth(lat_min, lon_min,
                            lat_max+2., lon_min) # distance is in m
        m = Basemap(width=distEW, height=distNS, rsphere=(6378137.00,6356752.3142), resolution='l', projection='lcc',\
            lat_1=lat_min, lat_2=lat_max, lon_0=lon_centre, lat_0=lat_centre+1.)
        m.drawparallels(np.arange(-80.0,80.0,10.0), linewidth=1, dashes=[2,2], labels=[1,0,0,0], fontsize=15)
        m.drawmeridians(np.arange(-170.0,170.0,10.0), linewidth=1, dashes=[2,2], labels=[0,0,1,1], fontsize=15)
    m.drawcoastlines()
    m.fillcontinents(lake_color='#99ffff',zorder=0.2)
    m.drawmapboundary(fill_color="white")
    m.drawcountries()
    try:
        evx, evy=m(evlo, evla)
        m.plot(evx, evy, 'yo', markersize=2)
    except: pass
    for procD in snapD:
        field   = procD.field
        lats    = procD.lat
        lons    = procD.lon
        lon, lat = np.meshgrid(lons, lats)
        if snapD.rotangle != 0.0:
            lat_rot = np.zeros(np.shape(lon),dtype=float)
            lon_rot = np.zeros(np.shape(lat),dtype=float)
            for idlon in np.arange(len(lons)):
                for idlat in np.arange(len(lats)):
                    lat_rot[idlat,idlon], lon_rot[idlat,idlon]  = rotations.rotate_lat_lon(lat[idlat,idlon], lon[idlat,idlon],  snapD.n, -snapD.rotangle)
                    lat_rot[idlat,idlon] = 90.0-lat_rot[idlat,idlon]
            lon = lon_rot
            lat = lat_rot
        # - colourmap. ---------------------------------------------------------
        cmap = colors.get_colormap('tomo_80_perc_linear_lightness')
        x, y = m(lon, lat)
        im = m.pcolormesh(x, y, field, shading='gouraud', cmap=cmap, vmin=vmin, vmax=vmax) 
    # - Add colobar and title. ------------------------------------------------------------------
    cb = m.colorbar(im, "right", size="3%", pad='2%')
    if component in UNIT_DICT:
        cb.set_label(UNIT_DICT[component], fontsize="x-large", rotation=0)
    try:
        geopolygons.PlotPolygon(inbasemap=m)
    except:
        pass
    outfname=outdir+'/'+fprx+'_%06d.png' %(int(snapD.iteration))
    savefig(outfname, format='png', dpi=dpi)
    del m, lon, lat
    return
Esempio n. 27
0
 def plot_snapshots_mp(self, component, vmin, vmax, outdir, fprx='wavefield',iter0=100, iterf=17100, diter=200,
         stations=False, res="i", projection='lambert', dpi=300, zoomin=2, geopolygons=None, evlo=None, evla=None, dt=None ):
     """Multiprocessing version of plot_snapshots
     ================================================================================================
     Input parameters:
     component       - component for plotting
                         The currently available "components" are:
                             Material parameters: A, B, C, mu, lambda, rhoinv, vp, vsh, vsv, rho
                             Velocity field snapshots: vx, vy, vz
                             Sensitivity kernels: Q_mu, Q_kappa, alpha_mu, alpha_kappa
     depth           - depth for plot (km)
     vmin, vmax      - minimum/maximum value for plotting
     outdir          - output directory
     fprx            - output file name prefix
     iter0, iterf    - inital/final iterations for plotting
     diter           - iteration interval
     stations        - plot stations or not
     res             - resolution of the coastline (c, l, i, h, f)
     projection      - projection type (global, regional_ortho, regional_merc)
     dpi             - dots per inch (figure resolution parameter)
     zoomin          - zoom in factor for proj = regional_ortho
     geopolygons     - geological polygons( basins etc. ) for plot
     evlo, evla      - event location for plotting 
     =================================================================================================
     """
     if not os.path.isdir(outdir):
         os.makedirs(outdir)
     iterArr=np.arange(iter0 ,iterf+diter, diter, dtype=int)
     use_default_iter=False
     for iteration in iterArr:
         if not str(iteration) in self[component].keys():
             warnings.warn('Velocity Snapshot:'+str(iteration)+' does not exist!', UserWarning, stacklevel=1)
             use_default_iter=True
     if use_default_iter: iterArr = self[component].keys()
     lat_min = self.attrs['lat_min']; lat_max = self.attrs['lat_max']
     lon_min = self.attrs['lon_min']; lon_max = self.attrs['lon_max']
     self.n = self.attrs['rotation_axis']; self.rotangle = self.attrs['rotation_angle']
     lat_centre = (lat_max+lat_min)/2.0; lon_centre = (lon_max+lon_min)/2.0
     lat_centre, lon_centre = rotations.rotate_lat_lon(lat_centre, lon_centre, self.n, -self.rotangle)
     print '================================= Start preparing generating snapshots =================================='
     dataLst=[]
     for iteration in iterArr:
         subgroup=self[component+'/'+str(iteration)]
         snapD=snap_data()
         for key in subgroup.keys():
             subdset = subgroup[key]
             field   = subdset[...]
             theta   = subdset.attrs['theta']
             phi     = subdset.attrs['phi']
             lats    = 90.0 - theta * 180.0 / np.pi
             lons    = phi * 180.0 / np.pi
             snapD.append(proc_data(field=field, lon=lons, lat=lats))
         snapD.n=self.n; snapD.rotangle=self.rotangle; snapD.iteration=iteration
         dataLst.append(snapD)
     self.close()
     print '============================= Start multiprocessing generating snapshots ==============================='
     PLOTSNAP = partial(Iter2snapshot, evlo=evlo, evla=evla, component=component, \
         vmin=vmin, vmax=vmax, stations=stations, fprx=fprx, projection=projection, \
         outdir=outdir, dpi=dpi, lat_min=lat_min, lat_max=lat_max, lon_min=lon_min, lon_max=lon_max,\
         lat_centre=lat_centre, lon_centre=lon_centre, res=res, \
         zoomin=zoomin, geopolygons=geopolygons, dt=dt)
     pool=multiprocessing.Pool()
     pool.map(PLOTSNAP, dataLst) #make our results with a map call
     pool.close() #we are not adding any more processes
     pool.join() #tell it to wait until all threads are done before going on
     print '============================== End multiprocessing generating snapshots ================================='
     return
Esempio n. 28
0
 def plot_depth_slice(self, component, vmin, vmax, iteration=0,
         res="l", projection='lambert', zoomin=2, geopolygons=None, evlo=None, evla=None):
     """
     Plot depth slices of field component at given depth ranging between "valmin" and "valmax"
     ================================================================================================
     Input parameters:
     component       - component for plotting
                         The currently available "components" are:
                             Material parameters: A, B, C, mu, lambda, rhoinv, vp, vsh, vsv, rho
                             Velocity field snapshots: vx, vy, vz
                             Sensitivity kernels: Q_mu, Q_kappa, alpha_mu, alpha_kappa
     vmin, vmax      - minimum/maximum value for plotting
     iteration       - iteration step for snapshot
     res             - resolution of the coastline (c, l, i, h, f)
     proj            - projection type (global, regional_ortho, regional_merc)
     zoomin          - zoom in factor for proj = regional_ortho
     geopolygons     - geological polygons( basins etc. ) for plot
     =================================================================================================
     """
     # - Some initialisations. ------------------------------------------------------------------
     fig=plt.figure()
     self.minlat = self.attrs['lat_min']; self.maxlat = self.attrs['lat_max']
     self.minlon = self.attrs['lon_min']; self.maxlon = self.attrs['lon_max']
     self.n = self.attrs['rotation_axis']; self.rotangle = self.attrs['rotation_angle']
     lat_centre = (self.maxlat+self.minlat)/2.0; lon_centre = (self.maxlon+self.minlon)/2.0
     self.lat_centre, self.lon_centre = rotations.rotate_lat_lon(lat_centre, lon_centre, self.n, -self.rotangle)
     # - Set up the map. ------------------------------------------------------------------------
     m=self._get_basemap(projection=projection)
     try: geopolygons.PlotPolygon(inbasemap=m)
     except: pass
     try:
         evx, evy=m(evlo, evla)
         m.plot(evx, evy, 'yo', markersize=2)
     except: pass
     group=self[component]
     subgroup=group[str(iteration)]
     for key in subgroup.keys():
         subdset = subgroup[key]
         field   = subdset[...]
         theta   = subdset.attrs['theta']
         phi     = subdset.attrs['phi']
         lats    = 90.0 - theta * 180.0 / np.pi
         lons    = phi * 180.0 / np.pi
         lon, lat = np.meshgrid(lons, lats)
         if self.rotangle != 0.0:
             lat_rot = np.zeros(np.shape(lon),dtype=float)
             lon_rot = np.zeros(np.shape(lat),dtype=float)
             for idlon in np.arange(len(lons)):
                 for idlat in np.arange(len(lats)):
                     lat_rot[idlat,idlon],lon_rot[idlat,idlon]  = rotations.rotate_lat_lon(lat[idlat,idlon], lon[idlat,idlon],  self.n, -self.rotangle)
                     lat_rot[idlat,idlon] = 90.0-lat_rot[idlat,idlon]
             lon = lon_rot
             lat = lat_rot
         # - colourmap. ---------------------------------------------------------
         cmap = colors.get_colormap('tomo_80_perc_linear_lightness')
         x, y = m(lon, lat)
         im = m.pcolormesh(x, y, field, shading='gouraud', cmap=cmap, vmin=vmin, vmax=vmax) 
     # - Add colobar and title. ------------------------------------------------------------------
     cb = m.colorbar(im, "right", size="3%", pad='2%')
     if component in UNIT_DICT:
         cb.set_label(UNIT_DICT[component], fontsize="x-large", rotation=0)
     # plt.suptitle("Depth slice of %s at %i km" % (component, r_effective), fontsize=20)
     # - Plot stations if available. ------------------------------------------------------------
     # if self.stations and stations:
     #     x,y = m(self.stlons,self.stlats)
     #     for n in range(self.n_stations):
     #         plt.text(x[n],y[n],self.stnames[n][:4])
     #         plt.plot(x[n],y[n],'ro')
     plt.show()
     print "minimum value: "+str(vmin)+", maximum value: "+str(vmax)
     return
Esempio n. 29
0
    def get_waveforms_synthetic(self, event_name, station_id,
                                long_iteration_name):
        """
        Gets the synthetic waveforms for the given event and station as a
        :class:`~obspy.core.stream.Stream` object.

        :param event_name: The name of the event.
        :param station_id: The id of the station in the form ``NET.STA``.
        :param long_iteration_name: The long form of an iteration name.
        """
        from lasif import rotations
        import lasif.domain

        iteration = self.comm.iterations.get(long_iteration_name)

        st = self._get_waveforms(event_name,
                                 station_id,
                                 data_type="synthetic",
                                 tag_or_iteration=iteration.long_name)
        network, station = station_id.split(".")

        formats = list(set([tr.stats._format for tr in st]))
        if len(formats) != 1:
            raise ValueError(
                "The synthetics for one Earthquake must all have the same "
                "data format under the assumption that they all originate "
                "from the same solver. Found formats: %s" % (str(formats)))
        format = formats[0].lower()

        # In the case of data coming from SES3D the components must be
        # mapped to ZNE as it works in XYZ.
        if format == "ses3d":
            # This maps the synthetic channels to ZNE.
            synthetic_coordinates_mapping = {"X": "N", "Y": "E", "Z": "Z"}

            for tr in st:
                tr.stats.network = network
                tr.stats.station = station
                # SES3D X points south. Reverse it to arrive at ZNE.
                if tr.stats.channel in ["X"]:
                    tr.data *= -1.0
                # SES3D files have no starttime. Set to the event time.
                tr.stats.starttime = \
                    self.comm.events.get(event_name)["origin_time"]
                tr.stats.channel = \
                    synthetic_coordinates_mapping[tr.stats.channel]

            # Rotate if needed. Again only SES3D synthetics need to be rotated.
            domain = self.comm.project.domain
            if isinstance(domain, lasif.domain.RectangularSphericalSection) \
                    and domain.rotation_angle_in_degree and \
                    "ses3d" in iteration.solver_settings["solver"].lower():
                # Coordinates are required for the rotation.
                coordinates = self.comm.query.get_coordinates_for_station(
                    event_name, station_id)

                # First rotate the station back to see, where it was
                # recorded.
                lat, lng = rotations.rotate_lat_lon(
                    lat=coordinates["latitude"],
                    lon=coordinates["longitude"],
                    rotation_axis=domain.rotation_axis,
                    angle=-domain.rotation_angle_in_degree)
                # Rotate the synthetics.
                n, e, z = rotations.rotate_data(
                    st.select(channel="N")[0].data,
                    st.select(channel="E")[0].data,
                    st.select(channel="Z")[0].data, lat, lng,
                    domain.rotation_axis, domain.rotation_angle_in_degree)
                st.select(channel="N")[0].data = n
                st.select(channel="E")[0].data = e
                st.select(channel="Z")[0].data = z

        st.sort()

        # Apply the project function that modifies synthetics on the fly.
        fct = self.comm.project.get_project_function("process_synthetics")
        return fct(st,
                   iteration=iteration,
                   event=self.comm.events.get(event_name))
Esempio n. 30
0
    def plot_depth_slice(self,
                         component,
                         depth_in_km,
                         m,
                         absolute_values=True):
        """
        Plots a depth slice.

        :param component: The component to plot.
        :type component: basestring
        :param depth_in_km: The depth in km to plot. If the exact depth does
             not exists, the nearest neighbour will be plotted.
        :type depth_in_km: integer or float
        :param m: Basemap instance.
        """
        depth_index = self.get_closest_gll_index("depth", depth_in_km)

        # No need to do anything if the currently plotted slice is already
        # plotted. This is useful for interactive use when the desired depth
        # is changed but the closest GLL collocation point is still the same.
        if hasattr(m, "_plotted_depth_slice"):
            # Use a tuple of relevant parameters.
            if m._plotted_depth_slice == (self.directory, depth_index,
                                          component, absolute_values):
                return None

        data = self.parsed_components[component]

        depth = self.collocation_points_depth[depth_index]
        lngs = self.collocation_points_lngs
        lats = self.collocation_points_lats

        # Rotate data if needed.
        lon, lat = np.meshgrid(lngs, lats)
        if hasattr(self.domain, "rotation_axis") and \
                self.domain.rotation_axis and \
                self.domain.rotation_angle_in_degree:
            lon_shape = lon.shape
            lat_shape = lat.shape
            lon.shape = lon.size
            lat.shape = lat.size
            lat, lon = rotations.rotate_lat_lon(
                lat, lon, self.domain.rotation_axis,
                self.domain.rotation_angle_in_degree)
            lon.shape = lon_shape
            lat.shape = lat_shape

        x, y = m(lon, lat)
        depth_data = data[::-1, :, depth_index]

        # Plot values relative to AK135.
        if not absolute_values:
            cmp_map = {"rho": "density", "vp": "vp", "vsh": "vs", "vsv": "vs"}

            factor = {
                "rho": 1000.0,
                "vp": 1.0,
                "vsh": 1.0,
                "vsv": 1.0,
            }

            if component not in cmp_map:
                vmin, vmax = depth_data.min(), depth_data.max()
                vmedian = np.median(depth_data)
                offset = max(abs(vmax - vmedian), abs(vmedian - vmin))

                if vmax - vmin == 0:
                    offset = 0.01

                vmin = vmedian - offset
                vmax = vmedian + offset
            else:
                reference_value = self.one_d_model.get_value(
                    cmp_map[component], depth) * factor[component]

                depth_data = (depth_data - reference_value) / reference_value
                depth_data *= 100.0
                offset = np.abs(depth_data)
                try:
                    offset = offset[offset < 50].max()
                except BaseException:
                    offset = offset.max()
                vmin = -offset
                vmax = offset
        else:
            vmin, vmax = depth_data.min(), depth_data.max()
            vmedian = np.median(depth_data)
            offset = max(abs(vmax - vmedian), abs(vmedian - vmin))

            min_delta = abs(vmax * 0.005)
            if (vmax - vmin) < min_delta:
                offset = min_delta

            vmin = vmedian - offset
            vmax = vmedian + offset

        # Remove an existing pcolormesh if it exists. This does not hurt in
        # any case but is useful for interactive use.
        if hasattr(m, "_depth_slice"):
            m._depth_slice.remove()
            del m._depth_slice

        im = m.pcolormesh(x,
                          y,
                          depth_data,
                          cmap=tomo_colormap,
                          vmin=vmin,
                          vmax=vmax)
        m._depth_slice = im

        # Store what is currently plotted.
        m._plotted_depth_slice = (self.directory, depth_index, component,
                                  absolute_values)

        return {"depth": depth, "mesh": im, "data": depth_data}
Esempio n. 31
0
def _plot_hdf5_model_horizontal(f,
                                component,
                                output_filename,
                                vmin=None,
                                vmax=None):
    import matplotlib.cm
    import matplotlib.pylab as plt

    data = xarray.DataArray(f["data"][component][:],
                            [("latitude", 90.0 - f["coordinate_0"][:]),
                             ("longitude", f["coordinate_1"][:]),
                             ("radius", f["coordinate_2"][:] / 1000.0)])

    plt.style.use('seaborn-pastel')

    from lasif.domain import RectangularSphericalSection
    domain = RectangularSphericalSection(**dict(f["_meta"]["domain"].attrs))

    plt.figure(figsize=(32, 18))

    depth_position_map = {
        50: (0, 0),
        100: (0, 1),
        150: (1, 0),
        250: (1, 1),
        400: (2, 0),
        600: (2, 1)
    }

    for depth, location in depth_position_map.items():
        ax = plt.subplot2grid((3, 5), location)
        radius = 6371.0 - depth

        # set up a map and colourmap
        m = domain.plot(ax=ax, resolution="c", skip_map_features=True)

        import lasif.colors
        my_colormap = lasif.colors.get_colormap(
            "tomo_full_scale_linear_lightness")

        from lasif import rotations

        x, y = np.meshgrid(data.longitude, data.latitude)

        x_shape = x.shape
        y_shape = y.shape

        lat_r, lon_r = rotations.rotate_lat_lon(
            y.ravel(), x.ravel(), domain.rotation_axis,
            domain.rotation_angle_in_degree)

        x, y = m(lon_r, lat_r)

        x.shape = x_shape
        y.shape = y_shape

        plot_data = data.sel(radius=radius, method="nearest")
        plot_data = np.ma.masked_invalid(plot_data.data)

        # Overwrite colormap things if given.
        if vmin is not None and vmax is not None:
            min_val_plot = vmin
            max_val_plot = vmax
        else:
            mean = plot_data.mean()
            max_diff = max(abs(mean - plot_data.min()),
                           abs(plot_data.max() - mean))
            min_val_plot = mean - max_diff
            max_val_plot = mean + max_diff
            # Plotting essentially constant models.
            min_delta = 0.001 * abs(max_val_plot)
            if (max_val_plot - min_val_plot) < min_delta:
                max_val_plot = max_val_plot + min_delta
                min_val_plot = min_val_plot - min_delta

        # Plot.
        im = m.pcolormesh(x,
                          y,
                          plot_data,
                          cmap=my_colormap,
                          vmin=min_val_plot,
                          vmax=max_val_plot,
                          shading="gouraud")

        # make a colorbar and title
        m.colorbar(im, "right", size="3%", pad='2%')
        plt.title(str(depth) + ' km')

    # Depth based statistics.
    plt.subplot2grid((3, 5), (0, 4), rowspan=3)
    plt.title("Depth statistics")
    mean = data.mean(axis=(0, 1))
    std = data.std(axis=(0, 1))
    _min = data.min(axis=(0, 1))
    _max = data.max(axis=(0, 1))

    plt.fill_betweenx(data.radius,
                      mean - std,
                      mean + std,
                      label="std",
                      color="#FF3C83")
    plt.plot(mean, data.radius, label="mean", color="k", lw=2)
    plt.plot(_min, data.radius, color="grey", label="min")
    plt.plot(_max, data.radius, color="grey", label="max")
    plt.legend(loc="best")
    plt.xlabel("Value")
    plt.ylabel("Radius")

    plt.hlines(data.radius,
               plt.xlim()[0],
               plt.xlim()[1],
               color="0.8",
               zorder=-10,
               linewidth=0.5)

    # Roughness plots.
    plt.subplot2grid((3, 5), (0, 2))
    _d = np.abs(data.diff("latitude", n=1)).sum("latitude").data
    plt.title("Roughness in latitude direction, Total: %g" % _d.sum())
    plt.pcolormesh(data.longitude.data,
                   data.radius.data,
                   _d.T,
                   cmap=matplotlib.cm.viridis)
    try:
        plt.colorbar()
    except:
        pass
    plt.xlabel("Longitude")
    plt.ylabel("Radius")

    plt.subplot2grid((3, 5), (1, 2))
    _d = np.abs(data.diff("longitude", n=1)).sum("longitude").data
    plt.title("Roughness in longitude direction. Total: %g" % data.sum())
    plt.pcolormesh(data.latitude.data,
                   data.radius.data,
                   _d.T,
                   cmap=matplotlib.cm.viridis)
    try:
        plt.colorbar()
    except:
        pass
    plt.xlabel("Latitude")
    plt.ylabel("Radius")

    plt.subplot2grid((3, 5), (2, 2))
    _d = np.abs(data.diff("radius", n=1)).sum("radius").data
    plt.title("Roughness in radius direction. Total: %g" % _d.sum())
    plt.pcolormesh(data.longitude.data,
                   data.latitude.data,
                   _d,
                   cmap=matplotlib.cm.viridis)
    try:
        plt.colorbar()
    except:
        pass
    plt.xlabel("Longitude")
    plt.ylabel("Latitude")

    # L2
    plt.subplot2grid((3, 5), (0, 3))
    _d = (data**2).sum("latitude").data
    plt.title("L2 Norm in latitude direction, Total: %g" % _d.sum())
    plt.pcolormesh(data.longitude.data,
                   data.radius.data,
                   _d.T,
                   cmap=matplotlib.cm.viridis)
    try:
        plt.colorbar()
    except:
        pass
    plt.xlabel("Longitude")
    plt.ylabel("Radius")

    plt.subplot2grid((3, 5), (1, 3))
    _d = (data**2).sum("longitude").data
    plt.title("L2 Norm in longitude direction, Total: %g" % _d.sum())
    plt.pcolormesh(data.latitude.data,
                   data.radius.data,
                   _d.T,
                   cmap=matplotlib.cm.viridis)
    try:
        plt.colorbar()
    except:
        pass
    plt.xlabel("Latitude")
    plt.ylabel("Radius")

    plt.subplot2grid((3, 5), (2, 3))
    _d = (data**2).sum("radius").data
    plt.title("L2 Norm in radius direction, Total: %g" % _d.sum())
    plt.pcolormesh(data.longitude.data,
                   data.latitude.data,
                   _d,
                   cmap=matplotlib.cm.viridis)
    try:
        plt.colorbar()
    except:
        pass
    plt.xlabel("Longitude")
    plt.ylabel("Latitude")

    plt.suptitle("Component %s - File %s" % (component, output_filename),
                 fontsize=20)

    plt.tight_layout(rect=(0, 0, 1, 0.95))

    plt.savefig(output_filename, dpi=150)
    plt.close()
Esempio n. 32
0
    def finalize_adjoint_sources(self, iteration_name, event_name):
        """
        Finalizes the adjoint sources.
        """

        import numpy as np
        from lasif import rotations

        window_manager = self.comm.windows.get(event_name, iteration_name)
        event = self.comm.events.get(event_name)
        iteration = self.comm.iterations.get(iteration_name)
        iteration_event_def = iteration.events[event["event_name"]]
        iteration_stations = iteration_event_def["stations"]

        # For now assume that the adjoint sources have the same
        # sampling rate as the synthetics which in LASIF's workflow
        # actually has to be true.
        dt = iteration.get_process_params()["dt"]

        # Current domain and solver.
        domain = self.comm.project.domain
        solver = iteration.solver_settings["solver"].lower()

        adjoint_source_stations = set()

        if "ses3d" in solver:
            ses3d_all_coordinates = []

        event_weight = iteration_event_def["event_weight"]

        output_folder = self.comm.project.get_output_folder(
            type="adjoint_sources",
            tag="ITERATION_%s__%s" % (iteration_name, event_name))

        l = sorted(window_manager.list())
        for station, windows in itertools.groupby(
                l, key=lambda x: ".".join(x.split(".")[:2])):
            if station not in iteration_stations:
                continue
            print(".", end=' ')
            station_weight = iteration_stations[station]["station_weight"]
            channels = {}
            try:
                for w in windows:
                    w = window_manager.get(w)
                    channel_weight = 0
                    srcs = []
                    for window in w:
                        ad_src = window.adjoint_source
                        if not ad_src["adjoint_source"].ptp():
                            continue
                        srcs.append(ad_src["adjoint_source"] * window.weight)
                        channel_weight += window.weight
                    if not srcs:
                        continue
                    # Final adjoint source for that channel and apply all
                    # weights.
                    adjoint_source = np.sum(srcs, axis=0) / channel_weight * \
                        event_weight * station_weight
                    channels[w.channel_id[-1]] = adjoint_source
            except LASIFError as e:
                print(("Could not calculate adjoint source for iteration %s "
                       "and station %s. Repick windows? Reason: %s" %
                       (iteration.name, station, str(e))))
                continue
            if not channels:
                continue
            # Now all adjoint sources of a window should have the same length.
            length = set(len(v) for v in list(channels.values()))
            assert len(length) == 1
            length = length.pop()
            # All missing channels will be replaced with a zero array.
            for c in ["Z", "N", "E"]:
                if c in channels:
                    continue
                channels[c] = np.zeros(length)

            # Get the station coordinates
            coords = self.comm.query.get_coordinates_for_station(
                event_name, station)

            # Rotate. if needed
            rec_lat = coords["latitude"]
            rec_lng = coords["longitude"]

            # The adjoint sources depend on the solver.
            if "ses3d" in solver:
                # Rotate if needed.
                if domain.rotation_angle_in_degree:
                    # Rotate the adjoint source location.
                    r_rec_lat, r_rec_lng = rotations.rotate_lat_lon(
                        rec_lat, rec_lng, domain.rotation_axis,
                        -domain.rotation_angle_in_degree)
                    # Rotate the adjoint sources.
                    channels["N"], channels["E"], channels["Z"] = \
                        rotations.rotate_data(
                            channels["N"], channels["E"],
                            channels["Z"], rec_lat, rec_lng,
                            domain.rotation_axis,
                            -domain.rotation_angle_in_degree)
                else:
                    r_rec_lat = rec_lat
                    r_rec_lng = rec_lng
                r_rec_depth = 0.0
                r_rec_colat = rotations.lat2colat(r_rec_lat)

                # Now once again map from ZNE to the XYZ of SES3D.
                CHANNEL_MAPPING = {"X": "N", "Y": "E", "Z": "Z"}
                adjoint_source_stations.add(station)
                adjoint_src_filename = os.path.join(
                    output_folder, "ad_src_%i" % len(adjoint_source_stations))
                ses3d_all_coordinates.append(
                    (r_rec_colat, r_rec_lng, r_rec_depth))

                # Actually write the adjoint source file in SES3D specific
                # format.
                with open(adjoint_src_filename, "wt") as open_file:
                    open_file.write("-- adjoint source ------------------\n")
                    open_file.write(
                        "-- source coordinates (colat,lon,depth)\n")
                    open_file.write("%f %f %f\n" %
                                    (r_rec_colat, r_rec_lng, r_rec_depth))
                    open_file.write("-- source time function (x, y, z) --\n")
                    # Revert the X component as it has to point south in SES3D.
                    for x, y, z in zip(-1.0 * channels[CHANNEL_MAPPING["X"]],
                                       channels[CHANNEL_MAPPING["Y"]],
                                       channels[CHANNEL_MAPPING["Z"]]):
                        open_file.write("%e %e %e\n" % (x, y, z))
                    open_file.write("\n")
            elif "specfem" in solver:
                s_set = iteration.solver_settings["solver_settings"]
                if "adjoint_source_time_shift" not in s_set:
                    warnings.warn(
                        "No <adjoint_source_time_shift> tag in the "
                        "iteration XML file. No time shift for the "
                        "adjoint sources will be applied.", LASIFWarning)
                    src_time_shift = 0
                else:
                    src_time_shift = float(s_set["adjoint_source_time_shift"])
                adjoint_source_stations.add(station)
                # Write all components. The adjoint sources right now are
                # not time shifted.
                for component in ["Z", "N", "E"]:
                    # XXX: M band code could be different.
                    adjoint_src_filename = os.path.join(
                        output_folder, "%s.MX%s.adj" % (station, component))
                    adj_src = channels[component]
                    l = len(adj_src)
                    to_write = np.empty((l, 2))
                    to_write[:, 0] = \
                        np.linspace(0, (l - 1) * dt, l) + src_time_shift

                    # SPECFEM expects non-time reversed adjoint sources and
                    # the sign is different for some reason.
                    to_write[:, 1] = -1.0 * adj_src[::-1]

                    np.savetxt(adjoint_src_filename, to_write)
            else:
                raise NotImplementedError(
                    "Adjoint source writing for solver '%s' not yet "
                    "implemented." % iteration.solver_settings["solver"])

        if not adjoint_source_stations:
            print("Could not create a single adjoint source.")
            return

        if "ses3d" in solver:
            with open(os.path.join(output_folder, "ad_srcfile"), "wt") as fh:
                fh.write("%i\n" % len(adjoint_source_stations))
                for line in ses3d_all_coordinates:
                    fh.write("%.6f %.6f %.6f\n" % (line[0], line[1], line[2]))
                fh.write("\n")
        elif "specfem" in solver:
            adjoint_source_stations = sorted(list(adjoint_source_stations))
            with open(os.path.join(output_folder, "STATIONS_ADJOINT"),
                      "wt") as fh:
                for station in adjoint_source_stations:
                    coords = self.comm.query.get_coordinates_for_station(
                        event_name, station)
                    fh.write("{sta} {net} {lat} {lng} {ele} {dep}\n".format(
                        sta=station.split(".")[1],
                        net=station.split(".")[0],
                        lat=coords["latitude"],
                        lng=coords["longitude"],
                        ele=coords["elevation_in_m"],
                        dep=coords["local_depth_in_m"]))

        print("Wrote adjoint sources for %i station(s) to %s." %
              (len(adjoint_source_stations), os.path.relpath(output_folder)))
Esempio n. 33
0
            def get_value(self):
                station_id, coordinates = self.items[self.current_index]

                data = Stream()
                # Now get the actual waveform files. Also find the
                # corresponding station file and check the coordinates.
                this_waveforms = {_i["channel_id"]: _i for _i in waveforms
                    if _i["channel_id"].startswith(station_id + ".")}
                marked_for_deletion = []
                for key, value in this_waveforms.iteritems():
                    value["trace"] = read(value["filename"])[0]
                    data += value["trace"]
                    value["station_file"] = \
                        station_cache.get_station_filename(
                            value["channel_id"],
                            UTCDateTime(value["starttime_timestamp"]))
                    if value["station_file"] is None:
                        marked_for_deletion.append(key)
                        msg = ("Warning: Data and station information for '%s'"
                               " is available, but the station information "
                               "only for the wrong timestamp. You should try "
                               "and retrieve the correct station file.")
                        warnings.warn(msg % value["channel_id"])
                        continue
                    data[-1].stats.station_file = value["station_file"]
                for key in marked_for_deletion:
                    del this_waveforms[key]
                if not this_waveforms:
                    msg = "Could not retrieve data for station '%s'." % \
                        station_id
                    warnings.warn(msg)
                    return None
                # Now attempt to get the synthetics.
                synthetics_filenames = []
                for name, path in synthetic_files.iteritems():
                    if (station_id + ".") in name:
                        synthetics_filenames.append(path)

                if len(synthetics_filenames) != 3:
                    msg = "Found %i not 3 synthetics for station '%s'." % (
                        len(synthetics_filenames), station_id)
                    warnings.warn(msg)
                    return None

                synthetics = Stream()
                # Read all synthetics.
                for filename in synthetics_filenames:
                    synthetics += read(filename)
                for synth in synthetics:
                    if synth.stats.channel in ["X", "Z"]:
                        synth.data *= -1.0
                    synth.stats.channel = SYNTH_MAPPING[synth.stats.channel]
                    synth.stats.starttime = event_info["origin_time"]

                # Process the data.
                len_synth = synthetics[0].stats.endtime - \
                    synthetics[0].stats.starttime
                data.trim(synthetics[0].stats.starttime - len_synth * 0.05,
                    synthetics[0].stats.endtime + len_synth * 0.05)
                if data:
                    max_length = max([tr.stats.npts for tr in data])
                else:
                    max_length = 0
                if max_length == 0:
                    msg = ("Warning: After trimming the waveform data to "
                        "the time window of the synthetics, no more data is "
                        "left. The reference time is the one given in the "
                        "QuakeML file. Make sure it is correct and that "
                        "the waveform data actually contains data in that "
                        "time span.")
                    warnings.warn(msg)
                data.detrend("linear")
                data.taper()

                new_time_array = np.linspace(
                    synthetics[0].stats.starttime.timestamp,
                    synthetics[0].stats.endtime.timestamp,
                    synthetics[0].stats.npts)

                # Simulate the traces.
                for trace in data:
                    # Decimate in case there is a large difference between
                    # synthetic sampling rate and sampling_rate of the data.
                    # XXX: Ugly filter, change!
                    if trace.stats.sampling_rate > (6 *
                            synth.stats.sampling_rate):
                        new_nyquist = trace.stats.sampling_rate / 2.0 / 5.0
                        trace.filter("lowpass", freq=new_nyquist, corners=4,
                            zerophase=True)
                        trace.decimate(factor=5, no_filter=None)

                    station_file = trace.stats.station_file
                    if "/SEED/" in station_file:
                        paz = Parser(station_file).getPAZ(trace.id,
                            trace.stats.starttime)
                        trace.simulate(paz_remove=paz)
                    elif "/RESP/" in station_file:
                        trace.simulate(seedresp={"filename": station_file,
                            "units": "VEL", "date": trace.stats.starttime})
                    else:
                        raise NotImplementedError

                    # Make sure that the data array is at least as long as the
                    # synthetics array. Also add some buffer sample for the
                    # spline interpolation to work in any case.
                    buf = synth.stats.delta * 5
                    if synth.stats.starttime < (trace.stats.starttime + buf):
                        trace.trim(starttime=synth.stats.starttime - buf,
                            pad=True, fill_value=0.0)
                    if synth.stats.endtime > (trace.stats.endtime - buf):
                        trace.trim(endtime=synth.stats.endtime + buf, pad=True,
                            fill_value=0.0)

                    old_time_array = np.linspace(
                        trace.stats.starttime.timestamp,
                        trace.stats.endtime.timestamp,
                        trace.stats.npts)

                    # Interpolation.
                    trace.data = interp1d(old_time_array, trace.data,
                        kind=1)(new_time_array)
                    trace.stats.starttime = synthetics[0].stats.starttime
                    trace.stats.sampling_rate = \
                        synthetics[0].stats.sampling_rate

                data.filter("bandpass", freqmin=lowpass, freqmax=highpass)
                synthetics.filter("bandpass", freqmin=lowpass,
                    freqmax=highpass)

                # Rotate the synthetics if nessesary.
                if self.rot_angle:
                    # First rotate the station back to see, where it was
                    # recorded.
                    lat, lng = rotations.rotate_lat_lon(
                        coordinates["latitude"], coordinates["longitude"],
                        self.rot_axis, -self.rot_angle)
                    # Rotate the data.
                    n_trace = synthetics.select(component="N")[0]
                    e_trace = synthetics.select(component="E")[0]
                    z_trace = synthetics.select(component="Z")[0]
                    n, e, z = rotations.rotate_data(n_trace.data, e_trace.data,
                        z_trace.data, lat, lng, self.rot_axis, self.rot_angle)
                    n_trace.data = n
                    e_trace.data = e
                    z_trace.data = z

                return {"data": data, "synthetics": synthetics,
                    "coordinates": coordinates}
Esempio n. 34
0
def plot_raydensity(map_object, station_events, domain):
    """
    Create a ray-density plot for all events and all stations.

    This function is potentially expensive and will use all CPUs available.
    Does require geographiclib to be installed.
    """
    import ctypes as C
    from lasif import rotations
    from lasif.domain import RectangularSphericalSection
    from lasif.tools.great_circle_binner import GreatCircleBinner
    from lasif.utils import Point
    import multiprocessing
    import progressbar
    from scipy.stats import scoreatpercentile

    if not isinstance(domain, RectangularSphericalSection):
        raise NotImplementedError(
            "Raydensity currently only implemented for rectangular domains. "
            "Should be easy to implement for other domains. Let me know.")

    # Merge everything so that a list with coordinate pairs is created. This
    # list is then distributed among all processors.
    station_event_list = []
    for event, stations in station_events:
        if domain.rotation_angle_in_degree:
            # Rotate point to the non-rotated domain.
            e_point = Point(*rotations.rotate_lat_lon(
                event["latitude"], event["longitude"], domain.rotation_axis,
                -1.0 * domain.rotation_angle_in_degree))
        else:
            e_point = Point(event["latitude"], event["longitude"])
        for station in stations.itervalues():
            # Rotate point to the non-rotated domain if necessary.
            if domain.rotation_angle_in_degree:
                p = Point(*rotations.rotate_lat_lon(
                    station["latitude"], station["longitude"],
                    domain.rotation_axis, -1.0 *
                    domain.rotation_angle_in_degree))
            else:
                p = Point(station["latitude"], station["longitude"])
            station_event_list.append((e_point, p))

    circle_count = len(station_event_list)

    # The granularity of the latitude/longitude discretization for the
    # raypaths. Attempt to get a somewhat meaningful result in any case.
    lat_lng_count = 1000
    if circle_count < 1000:
        lat_lng_count = 1000
    if circle_count < 10000:
        lat_lng_count = 2000
    else:
        lat_lng_count = 3000

    cpu_count = multiprocessing.cpu_count()

    def to_numpy(raw_array, dtype, shape):
        data = np.frombuffer(raw_array.get_obj())
        data.dtype = dtype
        return data.reshape(shape)

    print "\nLaunching %i greatcircle calculations on %i CPUs..." % \
        (circle_count, cpu_count)

    widgets = [
        "Progress: ",
        progressbar.Percentage(),
        progressbar.Bar(), "",
        progressbar.ETA()
    ]
    pbar = progressbar.ProgressBar(widgets=widgets,
                                   maxval=circle_count).start()

    def great_circle_binning(sta_evs, bin_data_buffer, bin_data_shape, lock,
                             counter):
        new_bins = GreatCircleBinner(domain.min_latitude, domain.max_latitude,
                                     lat_lng_count, domain.min_longitude,
                                     domain.max_longitude, lat_lng_count)
        for event, station in sta_evs:
            with lock:
                counter.value += 1
            if not counter.value % 25:
                pbar.update(counter.value)
            new_bins.add_greatcircle(event, station)

        bin_data = to_numpy(bin_data_buffer, np.uint32, bin_data_shape)
        with bin_data_buffer.get_lock():
            bin_data += new_bins.bins

    # Split the data in cpu_count parts.
    def chunk(seq, num):
        avg = len(seq) / float(num)
        out = []
        last = 0.0
        while last < len(seq):
            out.append(seq[int(last):int(last + avg)])
            last += avg
        return out

    chunks = chunk(station_event_list, cpu_count)

    # One instance that collects everything.
    collected_bins = GreatCircleBinner(domain.min_latitude,
                                       domain.max_latitude, lat_lng_count,
                                       domain.min_longitude,
                                       domain.max_longitude, lat_lng_count)

    # Use a multiprocessing shared memory array and map it to a numpy view.
    collected_bins_data = multiprocessing.Array(C.c_uint32,
                                                collected_bins.bins.size)
    collected_bins.bins = to_numpy(collected_bins_data, np.uint32,
                                   collected_bins.bins.shape)

    # Create, launch and join one process per CPU. Use a shared value as a
    # counter and a lock to avoid race conditions.
    processes = []
    lock = multiprocessing.Lock()
    counter = multiprocessing.Value("i", 0)
    for _i in xrange(cpu_count):
        processes.append(
            multiprocessing.Process(target=great_circle_binning,
                                    args=(chunks[_i], collected_bins_data,
                                          collected_bins.bins.shape, lock,
                                          counter)))
    for process in processes:
        process.start()
    for process in processes:
        process.join()

    pbar.finish()

    stations = chain.from_iterable(
        (_i[1].values() for _i in station_events if _i[1]))
    # Remove duplicates
    stations = [(_i["latitude"], _i["longitude"]) for _i in stations]
    stations = set(stations)
    title = "%i Events, %i unique raypaths, "\
            "%i unique stations" % (len(station_events), circle_count,
                                    len(stations))
    plt.title(title, size="xx-large")

    data = collected_bins.bins.transpose()

    if data.max() >= 10:
        data = np.log10(np.clip(data, a_min=0.5, a_max=data.max()))
        data[data >= 0.0] += 0.1
        data[data < 0.0] = 0.0
        max_val = scoreatpercentile(data.ravel(), 99)
    else:
        max_val = data.max()

    cmap = cm.get_cmap("gist_heat")
    cmap._init()
    cmap._lut[:120, -1] = np.linspace(0, 1.0, 120)**2

    # Slightly change the appearance of the map so it suits the rays.
    map_object.fillcontinents(color='#dddddd', lake_color='#dddddd', zorder=0)

    lngs, lats = collected_bins.coordinates
    # Rotate back if necessary!
    if domain.rotation_angle_in_degree:
        for lat, lng in zip(lats, lngs):
            lat[:], lng[:] = rotations.rotate_lat_lon(
                lat, lng, domain.rotation_axis,
                domain.rotation_angle_in_degree)
    ln, la = map_object(lngs, lats)
    map_object.pcolormesh(ln, la, data, cmap=cmap, vmin=0, vmax=max_val)
    # Draw the coastlines so they appear over the rays. Otherwise things are
    # sometimes hard to see.
    map_object.drawcoastlines()
    map_object.drawcountries(linewidth=0.2)
Esempio n. 35
0
def _plot_hdf5_model_horizontal(f, component, output_filename,
                                vmin=None, vmax=None):
    import matplotlib.cm
    import matplotlib.pylab as plt

    data = xarray.DataArray(
        f["data"][component][:], [
            ("latitude", 90.0 - f["coordinate_0"][:]),
            ("longitude", f["coordinate_1"][:]),
            ("radius", f["coordinate_2"][:] / 1000.0)])

    plt.style.use('seaborn-pastel')

    from lasif.domain import RectangularSphericalSection
    domain = RectangularSphericalSection(**dict(f["_meta"]["domain"].attrs))

    plt.figure(figsize=(32, 18))

    depth_position_map = {
        50: (0, 0),
        100: (0, 1),
        150: (1, 0),
        250: (1, 1),
        400: (2, 0),
        600: (2, 1)
    }

    for depth, location in depth_position_map.items():
        ax = plt.subplot2grid((3, 5), location)
        radius = 6371.0 - depth

        # set up a map and colourmap
        m = domain.plot(ax=ax, resolution="c", skip_map_features=True)

        import lasif.colors
        my_colormap = lasif.colors.get_colormap(
                "tomo_full_scale_linear_lightness")

        from lasif import rotations

        x, y = np.meshgrid(data.longitude, data.latitude)

        x_shape = x.shape
        y_shape = y.shape

        lat_r, lon_r = rotations.rotate_lat_lon(
                y.ravel(), x.ravel(),
                domain.rotation_axis,
                domain.rotation_angle_in_degree)

        x, y = m(lon_r, lat_r)

        x.shape = x_shape
        y.shape = y_shape

        plot_data = data.sel(radius=radius, method="nearest")
        plot_data = np.ma.masked_invalid(plot_data.data)

        # Overwrite colormap things if given.
        if vmin is not None and vmax is not None:
            min_val_plot = vmin
            max_val_plot = vmax
        else:
            mean = plot_data.mean()
            max_diff = max(abs(mean - plot_data.min()),
                           abs(plot_data.max() - mean))
            min_val_plot = mean - max_diff
            max_val_plot = mean + max_diff
            # Plotting essentially constant models.
            min_delta = 0.001 * abs(max_val_plot)
            if (max_val_plot - min_val_plot) < min_delta:
                max_val_plot = max_val_plot + min_delta
                min_val_plot = min_val_plot - min_delta

        # Plot.
        im = m.pcolormesh(
                x, y, plot_data,
                cmap=my_colormap, vmin=min_val_plot, vmax=max_val_plot,
                shading="gouraud")

        # make a colorbar and title
        m.colorbar(im, "right", size="3%", pad='2%')
        plt.title(str(depth) + ' km')


    # Depth based statistics.
    plt.subplot2grid((3, 5), (0, 4), rowspan=3)
    plt.title("Depth statistics")
    mean = data.mean(axis=(0, 1))
    std = data.std(axis=(0, 1))
    _min = data.min(axis=(0, 1))
    _max = data.max(axis=(0, 1))

    plt.fill_betweenx(data.radius, mean - std, mean + std,
                      label="std", color="#FF3C83")
    plt.plot(mean, data.radius, label="mean", color="k", lw=2)
    plt.plot(_min, data.radius, color="grey", label="min")
    plt.plot(_max, data.radius, color="grey", label="max")
    plt.legend(loc="best")
    plt.xlabel("Value")
    plt.ylabel("Radius")

    plt.hlines(data.radius, plt.xlim()[0], plt.xlim()[1], color="0.8",
               zorder=-10, linewidth=0.5)

    # Roughness plots.
    plt.subplot2grid((3, 5), (0, 2))
    _d = np.abs(data.diff("latitude", n=1)).sum("latitude").data
    plt.title("Roughness in latitude direction, Total: %g" % _d.sum())
    plt.pcolormesh(data.longitude.data, data.radius.data,
                   _d.T, cmap=matplotlib.cm.viridis)
    try:
        plt.colorbar()
    except:
        pass
    plt.xlabel("Longitude")
    plt.ylabel("Radius")

    plt.subplot2grid((3, 5), (1, 2))
    _d = np.abs(data.diff("longitude", n=1)).sum("longitude").data
    plt.title("Roughness in longitude direction. Total: %g" % data.sum())
    plt.pcolormesh(data.latitude.data, data.radius.data, _d.T,
                   cmap=matplotlib.cm.viridis)
    try:
        plt.colorbar()
    except:
        pass
    plt.xlabel("Latitude")
    plt.ylabel("Radius")

    plt.subplot2grid((3, 5), (2, 2))
    _d = np.abs(data.diff("radius", n=1)).sum("radius").data
    plt.title("Roughness in radius direction. Total: %g" % _d.sum())
    plt.pcolormesh(data.longitude.data, data.latitude.data,
                   _d, cmap=matplotlib.cm.viridis)
    try:
        plt.colorbar()
    except:
        pass
    plt.xlabel("Longitude")
    plt.ylabel("Latitude")

    # L2
    plt.subplot2grid((3, 5), (0, 3))
    _d = (data ** 2).sum("latitude").data
    plt.title("L2 Norm in latitude direction, Total: %g" % _d.sum())
    plt.pcolormesh(data.longitude.data, data.radius.data,
                   _d.T, cmap=matplotlib.cm.viridis)
    try:
        plt.colorbar()
    except:
        pass
    plt.xlabel("Longitude")
    plt.ylabel("Radius")

    plt.subplot2grid((3, 5), (1, 3))
    _d = (data ** 2).sum("longitude").data
    plt.title("L2 Norm in longitude direction, Total: %g" % _d.sum())
    plt.pcolormesh(data.latitude.data, data.radius.data, _d.T,
                   cmap=matplotlib.cm.viridis)
    try:
        plt.colorbar()
    except:
        pass
    plt.xlabel("Latitude")
    plt.ylabel("Radius")

    plt.subplot2grid((3, 5), (2, 3))
    _d = (data ** 2).sum("radius").data
    plt.title("L2 Norm in radius direction, Total: %g" % _d.sum())
    plt.pcolormesh(data.longitude.data, data.latitude.data,
                   _d, cmap=matplotlib.cm.viridis)
    try:
        plt.colorbar()
    except:
        pass
    plt.xlabel("Longitude")
    plt.ylabel("Latitude")

    plt.suptitle("Component %s - File %s" % (component, output_filename),
                 fontsize=20)

    plt.tight_layout(rect=(0, 0, 1, 0.95))

    plt.savefig(output_filename, dpi=150)
    plt.close()
Esempio n. 36
0
    def get_waveforms_synthetic(self, event_name, station_id,
                                long_iteration_name):
        """
        Gets the synthetic waveforms for the given event and station as a
        :class:`~obspy.core.stream.Stream` object.

        :param event_name: The name of the event.
        :param station_id: The id of the station in the form ``NET.STA``.
        :param long_iteration_name: The long form of an iteration name.
        """
        from lasif import rotations
        import lasif.domain

        iteration = self.comm.iterations.get(long_iteration_name)

        st = self._get_waveforms(event_name, station_id,
                                 data_type="synthetic",
                                 tag_or_iteration=iteration.long_name)
        network, station = station_id.split(".")

        formats = list(set([tr.stats._format for tr in st]))
        if len(formats) != 1:
            raise ValueError(
                "The synthetics for one Earthquake must all have the same "
                "data format under the assumption that they all originate "
                "from the same solver. Found formats: %s" % (str(formats)))
        format = formats[0].lower()

        # In the case of data coming from SES3D the components must be
        # mapped to ZNE as it works in XYZ.
        if format == "ses3d":
            # This maps the synthetic channels to ZNE.
            synthetic_coordinates_mapping = {"X": "N", "Y": "E", "Z": "Z"}

            for tr in st:
                tr.stats.network = network
                tr.stats.station = station
                # SES3D X points south. Reverse it to arrive at ZNE.
                if tr.stats.channel in ["X"]:
                    tr.data *= -1.0
                # SES3D files have no starttime. Set to the event time.
                tr.stats.starttime = \
                    self.comm.events.get(event_name)["origin_time"]
                tr.stats.channel = \
                    synthetic_coordinates_mapping[tr.stats.channel]

            # Rotate if needed. Again only SES3D synthetics need to be rotated.
            domain = self.comm.project.domain
            if isinstance(domain, lasif.domain.RectangularSphericalSection) \
                    and domain.rotation_angle_in_degree and \
                    "ses3d" in iteration.solver_settings["solver"].lower():
                # Coordinates are required for the rotation.
                coordinates = self.comm.query.get_coordinates_for_station(
                    event_name, station_id)

                # First rotate the station back to see, where it was
                # recorded.
                lat, lng = rotations.rotate_lat_lon(
                    lat=coordinates["latitude"], lon=coordinates["longitude"],
                    rotation_axis=domain.rotation_axis,
                    angle=-domain.rotation_angle_in_degree)
                # Rotate the synthetics.
                n, e, z = rotations.rotate_data(
                    st.select(channel="N")[0].data,
                    st.select(channel="E")[0].data,
                    st.select(channel="Z")[0].data,
                    lat, lng,
                    domain.rotation_axis,
                    domain.rotation_angle_in_degree)
                st.select(channel="N")[0].data = n
                st.select(channel="E")[0].data = e
                st.select(channel="Z")[0].data = z

        st.sort()

        # Apply the project function that modifies synthetics on the fly.
        fct = self.comm.project.get_project_function("process_synthetics")
        return fct(st, iteration=iteration,
                   event=self.comm.events.get(event_name))
Esempio n. 37
0
def par2quakeml(Par_filename,
                QuakeML_filename,
                rotation_axis=[0.0, 1.0, 0.0],
                rotation_angle=-57.5,
                origin_time="2000-01-01 00:00:00.0",
                event_type="other event"):
    # initialise event
    ev = Event()

    # open and read Par file
    fid = open(Par_filename, 'r')

    fid.readline()
    fid.readline()
    fid.readline()
    fid.readline()

    lat_old = 90.0 - float(fid.readline().strip().split()[0])
    lon_old = float(fid.readline().strip().split()[0])
    depth = float(fid.readline().strip().split()[0])

    fid.readline()

    Mtt_old = float(fid.readline().strip().split()[0])
    Mpp_old = float(fid.readline().strip().split()[0])
    Mrr_old = float(fid.readline().strip().split()[0])
    Mtp_old = float(fid.readline().strip().split()[0])
    Mtr_old = float(fid.readline().strip().split()[0])
    Mpr_old = float(fid.readline().strip().split()[0])

    # rotate event into physical domain

    lat, lon = rot.rotate_lat_lon(lat_old, lon_old, rotation_axis,
                                  rotation_angle)
    Mrr, Mtt, Mpp, Mtr, Mpr, Mtp = rot.rotate_moment_tensor(
        Mrr_old, Mtt_old, Mpp_old, Mtr_old, Mpr_old, Mtp_old, lat_old, lon_old,
        rotation_axis, rotation_angle)

    # populate event origin data
    ev.event_type = event_type

    ev_origin = Origin()
    ev_origin.time = UTCDateTime(origin_time)
    ev_origin.latitude = lat
    ev_origin.longitude = lon
    ev_origin.depth = depth
    ev.origins.append(ev_origin)

    # populte event moment tensor

    ev_tensor = Tensor()
    ev_tensor.m_rr = Mrr
    ev_tensor.m_tt = Mtt
    ev_tensor.m_pp = Mpp
    ev_tensor.m_rt = Mtr
    ev_tensor.m_rp = Mpr
    ev_tensor.m_tp = Mtp

    ev_momenttensor = MomentTensor()
    ev_momenttensor.tensor = ev_tensor
    ev_momenttensor.scalar_moment = np.sqrt(Mrr**2 + Mtt**2 + Mpp**2 + Mtr**2 +
                                            Mpr**2 + Mtp**2)

    ev_focalmechanism = FocalMechanism()
    ev_focalmechanism.moment_tensor = ev_momenttensor
    ev_focalmechanism.nodal_planes = NodalPlanes().setdefault(0, 0)

    ev.focal_mechanisms.append(ev_focalmechanism)

    # populate event magnitude
    ev_magnitude = Magnitude()
    ev_magnitude.mag = 0.667 * (np.log10(ev_momenttensor.scalar_moment) - 9.1)
    ev_magnitude.magnitude_type = 'Mw'
    ev.magnitudes.append(ev_magnitude)

    # write QuakeML file
    cat = Catalog()
    cat.append(ev)
    cat.write(QuakeML_filename, format="quakeml")

    # clean up
    fid.close()
Esempio n. 38
0
    def finalize_adjoint_sources(self, iteration_name, event_name):
        """
        Finalizes the adjoint sources.
        """
        from itertools import izip
        import numpy as np
        from lasif import rotations

        window_manager = self.comm.windows.get(event_name, iteration_name)
        event = self.comm.events.get(event_name)
        iteration = self.comm.iterations.get(iteration_name)
        iteration_event_def = iteration.events[event["event_name"]]
        iteration_stations = iteration_event_def["stations"]

        # For now assume that the adjoint sources have the same
        # sampling rate as the synthetics which in LASIF's workflow
        # actually has to be true.
        dt = iteration.get_process_params()["dt"]

        # Current domain and solver.
        domain = self.comm.project.domain
        solver = iteration.solver_settings["solver"].lower()

        adjoint_source_stations = set()

        if "ses3d" in solver:
            ses3d_all_coordinates = []

        event_weight = iteration_event_def["event_weight"]

        output_folder = self.comm.project.get_output_folder(
            type="adjoint_sources",
            tag="ITERATION_%s__%s" % (iteration_name, event_name))

        l = sorted(window_manager.list())
        for station, windows in itertools.groupby(
                l, key=lambda x: ".".join(x.split(".")[:2])):
            if station not in iteration_stations:
                continue
            print ".",
            station_weight = iteration_stations[station]["station_weight"]
            channels = {}
            try:
                for w in windows:
                    w = window_manager.get(w)
                    channel_weight = 0
                    srcs = []
                    for window in w:
                        ad_src = window.adjoint_source
                        if not ad_src["adjoint_source"].ptp():
                            continue
                        srcs.append(ad_src["adjoint_source"] * window.weight)
                        channel_weight += window.weight
                    if not srcs:
                        continue
                    # Final adjoint source for that channel and apply all
                    # weights.
                    adjoint_source = np.sum(srcs, axis=0) / channel_weight * \
                        event_weight * station_weight
                    channels[w.channel_id[-1]] = adjoint_source
            except LASIFError as e:
                print("Could not calculate adjoint source for iteration %s "
                      "and station %s. Repick windows? Reason: %s" % (
                          iteration.name, station, str(e)))
                continue
            if not channels:
                continue
            # Now all adjoint sources of a window should have the same length.
            length = set(len(v) for v in channels.values())
            assert len(length) == 1
            length = length.pop()
            # All missing channels will be replaced with a zero array.
            for c in ["Z", "N", "E"]:
                if c in channels:
                    continue
                channels[c] = np.zeros(length)

            # Get the station coordinates
            coords = self.comm.query.get_coordinates_for_station(event_name,
                                                                 station)

            # Rotate. if needed
            rec_lat = coords["latitude"]
            rec_lng = coords["longitude"]

            # The adjoint sources depend on the solver.
            if "ses3d" in solver:
                # Rotate if needed.
                if domain.rotation_angle_in_degree:
                    # Rotate the adjoint source location.
                    r_rec_lat, r_rec_lng = rotations.rotate_lat_lon(
                        rec_lat, rec_lng, domain.rotation_axis,
                        -domain.rotation_angle_in_degree)
                    # Rotate the adjoint sources.
                    channels["N"], channels["E"], channels["Z"] = \
                        rotations.rotate_data(
                            channels["N"], channels["E"],
                            channels["Z"], rec_lat, rec_lng,
                            domain.rotation_axis,
                            -domain.rotation_angle_in_degree)
                else:
                    r_rec_lat = rec_lat
                    r_rec_lng = rec_lng
                r_rec_depth = 0.0
                r_rec_colat = rotations.lat2colat(r_rec_lat)

                # Now once again map from ZNE to the XYZ of SES3D.
                CHANNEL_MAPPING = {"X": "N", "Y": "E", "Z": "Z"}
                adjoint_source_stations.add(station)
                adjoint_src_filename = os.path.join(
                    output_folder, "ad_src_%i" % len(adjoint_source_stations))
                ses3d_all_coordinates.append(
                    (r_rec_colat, r_rec_lng, r_rec_depth))

                # Actually write the adjoint source file in SES3D specific
                # format.
                with open(adjoint_src_filename, "wt") as open_file:
                    open_file.write("-- adjoint source ------------------\n")
                    open_file.write(
                        "-- source coordinates (colat,lon,depth)\n")
                    open_file.write("%f %f %f\n" % (r_rec_colat, r_rec_lng,
                                                    r_rec_depth))
                    open_file.write("-- source time function (x, y, z) --\n")
                    # Revert the X component as it has to point south in SES3D.
                    for x, y, z in izip(-1.0 * channels[CHANNEL_MAPPING["X"]],
                                        channels[CHANNEL_MAPPING["Y"]],
                                        channels[CHANNEL_MAPPING["Z"]]):
                        open_file.write("%e %e %e\n" % (x, y, z))
                    open_file.write("\n")
            elif "specfem" in solver:
                s_set = iteration.solver_settings["solver_settings"]
                if "adjoint_source_time_shift" not in s_set:
                    warnings.warn("No <adjoint_source_time_shift> tag in the "
                                  "iteration XML file. No time shift for the "
                                  "adjoint sources will be applied.",
                                  LASIFWarning)
                    src_time_shift = 0
                else:
                    src_time_shift = float(s_set["adjoint_source_time_shift"])
                adjoint_source_stations.add(station)
                # Write all components. The adjoint sources right now are
                # not time shifted.
                for component in ["Z", "N", "E"]:
                    # XXX: M band code could be different.
                    adjoint_src_filename = os.path.join(
                        output_folder, "%s.MX%s.adj" % (station, component))
                    adj_src = channels[component]
                    l = len(adj_src)
                    to_write = np.empty((l, 2))
                    to_write[:, 0] = \
                        np.linspace(0, (l - 1) * dt, l) + src_time_shift

                    # SPECFEM expects non-time reversed adjoint sources and
                    # the sign is different for some reason.
                    to_write[:, 1] = -1.0 * adj_src[::-1]

                    np.savetxt(adjoint_src_filename, to_write)
            else:
                raise NotImplementedError(
                    "Adjoint source writing for solver '%s' not yet "
                    "implemented." % iteration.solver_settings["solver"])

        if not adjoint_source_stations:
            print("Could not create a single adjoint source.")
            return

        if "ses3d" in solver:
            with open(os.path.join(output_folder, "ad_srcfile"), "wt") as fh:
                fh.write("%i\n" % len(adjoint_source_stations))
                for line in ses3d_all_coordinates:
                    fh.write("%.6f %.6f %.6f\n" % (line[0], line[1], line[2]))
                fh.write("\n")
        elif "specfem" in solver:
            adjoint_source_stations = sorted(list(adjoint_source_stations))
            with open(os.path.join(output_folder, "STATIONS_ADJOINT"),
                      "wt") as fh:
                for station in adjoint_source_stations:
                    coords = self.comm.query.get_coordinates_for_station(
                        event_name, station)
                    fh.write("{sta} {net} {lat} {lng} {ele} {dep}\n".format(
                        sta=station.split(".")[1],
                        net=station.split(".")[0],
                        lat=coords["latitude"],
                        lng=coords["longitude"],
                        ele=coords["elevation_in_m"],
                        dep=coords["local_depth_in_m"]))

        print "Wrote adjoint sources for %i station(s) to %s." % (
            len(adjoint_source_stations), os.path.relpath(output_folder))
Esempio n. 39
0
    def plot_depth_slice(self, component, depth_in_km):
        """
        Plots a depth slice.

        :param component: The component to plot.
        :type component: basestring
        :param depth_in_km: The depth in km to plot. If the exact depth does
             not exists, the nearest neighbour will be plotted.
        :type depth_in_km: integer or float
        """
        lat_bounds = [rotations.colat2lat(_i)
                      for _i in self.setup["physical_boundaries_x"][::-1]]
        lng_bounds = self.setup["physical_boundaries_y"]
        depth_bounds = [6371 - _i / 1000.0
                        for _i in self.setup["physical_boundaries_z"]]

        data = self.parsed_components[component]

        available_depths = np.linspace(*depth_bounds, num=data.shape[2])
        depth_index = np.argmin(np.abs(available_depths - depth_in_km))
        actual_depth = available_depths[depth_index]

        lngs = self._get_collocation_points_along_axis(
            lng_bounds[0], lng_bounds[1], data.shape[1])
        lats = self._get_collocation_points_along_axis(
            lat_bounds[0], lat_bounds[1], data.shape[0])

        lon, lat = np.meshgrid(lngs, lats)
        if self.rotation_axis and self.rotation_angle_in_degree:
            lon_shape = lon.shape
            lat_shape = lat.shape
            lon.shape = lon.size
            lat.shape = lat.size
            lat, lon = rotations.rotate_lat_lon(lat, lon, self.rotation_axis,
                                                self.rotation_angle_in_degree)
            lon.shape = lon_shape
            lat.shape = lat_shape

        # Get the center of the map.
        lon_0 = lon.min() + lon.ptp() / 2.0
        lat_0 = lat.min() + lat.ptp() / 2.0

        plt.figure(0)

        # Attempt to zoom into the region of interest.
        max_extend = max(lon.ptp(), lat.ptp())
        extend_used = max_extend / 180.0
        if extend_used < 0.5:
            # Calculate approximate width and height in meters.
            width = lon.ptp()
            height = lat.ptp()
            width *= 110000 * 1.1
            height *= 110000 * 1.1
            # Lambert azimuthal equal area projection. Equal area projections
            # are useful for interpreting features and this particular one also
            # does not distort features a lot on regional scales.
            m = Basemap(projection='laea', resolution="l",
                        width=width, height=height,
                        lat_0=lat_0, lon_0=lon_0)
        else:
            m = Basemap(projection='ortho', lon_0=lon_0, lat_0=lat_0,
                        resolution="c")

        m.drawcoastlines()
        m.fillcontinents("0.9", zorder=0)
        m.drawmapboundary(fill_color="white")
        m.drawparallels(np.arange(-80.0, 80.0, 10.0), labels=[1, 0, 0, 0])
        m.drawmeridians(np.arange(-170.0, 170.0, 10.0), labels=[0, 0, 0, 1])
        m.drawcountries()

        x, y = m(lon, lat)
        depth_data = data[::-1, :, depth_index]
        vmin, vmax = depth_data.min(), depth_data.max()
        vmedian = np.median(depth_data)
        offset = max(abs(vmax - vmedian), abs(vmedian - vmin))

        if vmax - vmin == 0:
            offset = 0.01

        vmin = vmedian - offset
        vmax = vmedian + offset

        im = m.pcolormesh(x, y, depth_data, cmap=tomo_colormap, vmin=vmin,
                          vmax=vmax)

        # Add colorbar and potentially unit.
        cm = m.colorbar(im, "right", size="3%", pad='2%')
        if component in UNIT_DICT:
            cm.set_label(UNIT_DICT[component], fontsize="x-large", rotation=0)

        plt.suptitle("Depth slice of %s at %i km" % (
            component, int(round(actual_depth))), size="large")

        border = rotations.get_border_latlng_list(
            rotations.colat2lat(self.setup["physical_boundaries_x"][0]),
            rotations.colat2lat(self.setup["physical_boundaries_x"][1]),
            self.setup["physical_boundaries_y"][0],
            self.setup["physical_boundaries_y"][1],
            rotation_axis=self.rotation_axis,
            rotation_angle_in_degree=self.rotation_angle_in_degree)
        border = np.array(border)
        lats = border[:, 0]
        lngs = border[:, 1]
        lngs, lats = m(lngs, lats)
        m.plot(lngs, lats, color="black", lw=2, path_effects=[
            PathEffects.withStroke(linewidth=4, foreground="white")])

        def _on_button_press(event):
            if event.button != 1 or not event.inaxes:
                return
            lon, lat = m(event.xdata, event.ydata, inverse=True)
            # Convert to colat to ease indexing.
            colat = rotations.lat2colat(lat)

            x_range = (self.setup["physical_boundaries_x"][1] -
                       self.setup["physical_boundaries_x"][0])
            x_frac = (colat - self.setup["physical_boundaries_x"][0]) / x_range
            x_index = int(((self.setup["boundaries_x"][1] -
                            self.setup["boundaries_x"][0]) * x_frac) +
                          self.setup["boundaries_x"][0])
            y_range = (self.setup["physical_boundaries_y"][1] -
                       self.setup["physical_boundaries_y"][0])
            y_frac = (lon - self.setup["physical_boundaries_y"][0]) / y_range
            y_index = int(((self.setup["boundaries_y"][1] -
                            self.setup["boundaries_y"][0]) * y_frac) +
                          self.setup["boundaries_y"][0])

            plt.figure(1, figsize=(3, 8))
            depths = available_depths
            values = data[x_index, y_index, :]
            plt.plot(values, depths)
            plt.grid()
            plt.ylim(depths[-1], depths[0])
            plt.show()
            plt.close()
            plt.figure(0)

        plt.gcf().canvas.mpl_connect('button_press_event', _on_button_press)

        plt.show()
Esempio n. 40
0
    def plot_depth_slice(self, component, depth_in_km, m,
                         absolute_values=True):
        """
        Plots a depth slice.

        :param component: The component to plot.
        :type component: basestring
        :param depth_in_km: The depth in km to plot. If the exact depth does
             not exists, the nearest neighbour will be plotted.
        :type depth_in_km: integer or float
        :param m: Basemap instance.
        """
        depth_index = self.get_closest_gll_index("depth", depth_in_km)

        # No need to do anything if the currently plotted slice is already
        # plotted. This is useful for interactive use when the desired depth
        # is changed but the closest GLL collocation point is still the same.
        if hasattr(m, "_plotted_depth_slice"):
            # Use a tuple of relevant parameters.
            if m._plotted_depth_slice == (self.directory, depth_index,
                                          component, absolute_values):
                return None

        data = self.parsed_components[component]

        depth = self.collocation_points_depth[depth_index]
        lngs = self.collocation_points_lngs
        lats = self.collocation_points_lats

        # Rotate data if needed.
        lon, lat = np.meshgrid(lngs, lats)
        if hasattr(self.domain, "rotation_axis") and \
                self.domain.rotation_axis and \
                self.domain.rotation_angle_in_degree:
            lon_shape = lon.shape
            lat_shape = lat.shape
            lon.shape = lon.size
            lat.shape = lat.size
            lat, lon = rotations.rotate_lat_lon(
                lat, lon, self.domain.rotation_axis,
                self.domain.rotation_angle_in_degree)
            lon.shape = lon_shape
            lat.shape = lat_shape

        x, y = m(lon, lat)
        depth_data = data[::-1, :, depth_index]

        # Plot values relative to AK135.
        if not absolute_values:
            cmp_map = {
                "rho": "density",
                "vp": "vp",
                "vsh": "vs",
                "vsv": "vs"
            }

            factor = {
                "rho": 1000.0,
                "vp": 1.0,
                "vsh": 1.0,
                "vsv": 1.0,
            }

            if component not in cmp_map:
                vmin, vmax = depth_data.min(), depth_data.max()
                vmedian = np.median(depth_data)
                offset = max(abs(vmax - vmedian), abs(vmedian - vmin))

                if vmax - vmin == 0:
                    offset = 0.01

                vmin = vmedian - offset
                vmax = vmedian + offset
            else:
                reference_value = self.one_d_model.get_value(
                    cmp_map[component], depth) * factor[component]

                depth_data = (depth_data - reference_value) / reference_value
                depth_data *= 100.0
                offset = np.abs(depth_data)
                try:
                    offset = offset[offset < 50].max()
                except:
                    offset = offset.max()
                vmin = -offset
                vmax = offset
        else:
            vmin, vmax = depth_data.min(), depth_data.max()
            vmedian = np.median(depth_data)
            offset = max(abs(vmax - vmedian), abs(vmedian - vmin))

            min_delta = abs(vmax * 0.005)
            if (vmax - vmin) < min_delta:
                offset = min_delta

            vmin = vmedian - offset
            vmax = vmedian + offset

        # Remove an existing pcolormesh if it exists. This does not hurt in
        # any case but is useful for interactive use.
        if hasattr(m, "_depth_slice"):
            m._depth_slice.remove()
            del m._depth_slice

        im = m.pcolormesh(x, y, depth_data, cmap=tomo_colormap, vmin=vmin,
                          vmax=vmax)
        m._depth_slice = im

        # Store what is currently plotted.
        m._plotted_depth_slice = (self.directory, depth_index, component,
                                  absolute_values)

        return {
            "depth": depth,
            "mesh": im,
            "data": depth_data
        }
Esempio n. 41
0
    def generate_input_files(self, iteration_name, event_name,
                             simulation_type):
        """
        Generate the input files for one event.

        :param iteration_name: The name of the iteration.
        :param event_name: The name of the event for which to generate the
            input files.
        :param simulate_type: The type of simulation to perform. Possible
            values are: 'normal simulate', 'adjoint forward', 'adjoint
            reverse'
        """
        from wfs_input_generator import InputFileGenerator

        # =====================================================================
        # read iteration xml file, get event and list of stations
        # =====================================================================

        iteration = self.comm.iterations.get(iteration_name)

        # Check that the event is part of the iterations.
        if event_name not in iteration.events:
            msg = ("Event '%s' not part of iteration '%s'.\nEvents available "
                   "in iteration:\n\t%s" %
                   (event_name, iteration_name, "\n\t".join(
                       sorted(iteration.events.keys()))))
            raise ValueError(msg)

        event = self.comm.events.get(event_name)
        stations_for_event = iteration.events[event_name]["stations"].keys()

        # Get all stations and create a dictionary for the input file
        # generator.
        stations = self.comm.query.get_all_stations_for_event(event_name)
        stations = [{"id": key, "latitude": value["latitude"],
                     "longitude": value["longitude"],
                     "elevation_in_m": value["elevation_in_m"],
                     "local_depth_in_m": value["local_depth_in_m"]}
                    for key, value in stations.iteritems()
                    if key in stations_for_event]

        # =====================================================================
        # set solver options
        # =====================================================================

        solver = iteration.solver_settings

        # Currently only SES3D 4.1 is supported
        solver_format = solver["solver"].lower()
        if solver_format not in ["ses3d 4.1", "ses3d 2.0",
                                 "specfem3d cartesian", "specfem3d globe cem"]:
            msg = ("Currently only SES3D 4.1, SES3D 2.0, SPECFEM3D "
                   "CARTESIAN, and SPECFEM3D GLOBE CEM are supported.")
            raise ValueError(msg)
        solver_format = solver_format.replace(' ', '_')
        solver_format = solver_format.replace('.', '_')

        solver = solver["solver_settings"]

        # =====================================================================
        # create the input file generator, add event and stations,
        # populate the configuration items
        # =====================================================================

        # Add the event and the stations to the input file generator.
        gen = InputFileGenerator()
        gen.add_events(event["filename"])
        gen.add_stations(stations)

        if solver_format in ["ses3d_4_1", "ses3d_2_0"]:
            # event tag
            gen.config.event_tag = event_name

            # Time configuration.
            npts = solver["simulation_parameters"]["number_of_time_steps"]
            delta = solver["simulation_parameters"]["time_increment"]
            gen.config.number_of_time_steps = npts
            gen.config.time_increment_in_s = delta

            # SES3D specific configuration
            gen.config.output_folder = solver["output_directory"].replace(
                "{{EVENT_NAME}}", event_name.replace(" ", "_"))
            gen.config.simulate_type = simulation_type

            gen.config.adjoint_forward_wavefield_output_folder = \
                solver["adjoint_output_parameters"][
                    "forward_field_output_directory"].replace(
                    "{{EVENT_NAME}}", event_name.replace(" ", "_"))
            gen.config.adjoint_forward_sampling_rate = \
                solver["adjoint_output_parameters"][
                    "sampling_rate_of_forward_field"]

            # Visco-elastic dissipation
            diss = solver["simulation_parameters"]["is_dissipative"]
            gen.config.is_dissipative = diss

            # Only SES3D 4.1 has the relaxation parameters.
            if solver_format == "ses3d_4_1":
                gen.config.Q_model_relaxation_times = \
                    solver["relaxation_parameter_list"]["tau"]
                gen.config.Q_model_weights_of_relaxation_mechanisms = \
                    solver["relaxation_parameter_list"]["w"]

            # Discretization
            disc = solver["computational_setup"]
            gen.config.nx_global = disc["nx_global"]
            gen.config.ny_global = disc["ny_global"]
            gen.config.nz_global = disc["nz_global"]
            gen.config.px = disc["px_processors_in_theta_direction"]
            gen.config.py = disc["py_processors_in_phi_direction"]
            gen.config.pz = disc["pz_processors_in_r_direction"]
            gen.config.lagrange_polynomial_degree = \
                disc["lagrange_polynomial_degree"]

            # Configure the mesh.
            domain = self.comm.project.domain
            gen.config.mesh_min_latitude = \
                domain["bounds"]["minimum_latitude"]
            gen.config.mesh_max_latitude = \
                domain["bounds"]["maximum_latitude"]
            gen.config.mesh_min_longitude = \
                domain["bounds"]["minimum_longitude"]
            gen.config.mesh_max_longitude = \
                domain["bounds"]["maximum_longitude"]
            gen.config.mesh_min_depth_in_km = \
                domain["bounds"]["minimum_depth_in_km"]
            gen.config.mesh_max_depth_in_km = \
                domain["bounds"]["maximum_depth_in_km"]

            # Set the rotation parameters.
            gen.config.rotation_angle_in_degree = domain["rotation_angle"]
            gen.config.rotation_axis = domain["rotation_axis"]

            # Make source time function
            gen.config.source_time_function = \
                iteration.get_source_time_function()["data"]
        elif solver_format == "specfem3d_cartesian":
            gen.config.NSTEP = \
                solver["simulation_parameters"]["number_of_time_steps"]
            gen.config.DT = \
                solver["simulation_parameters"]["time_increment"]
            gen.config.NPROC = \
                solver["computational_setup"]["number_of_processors"]
            if simulation_type == "normal simulation":
                msg = ("'normal_simulate' not supported for SPECFEM3D "
                       "Cartesian. Please choose either 'adjoint_forward' or "
                       "'adjoint_reverse'.")
                raise NotImplementedError(msg)
            elif simulation_type == "adjoint forward":
                gen.config.SIMULATION_TYPE = 1
            elif simulation_type == "adjoint reverse":
                gen.config.SIMULATION_TYPE = 2
            else:
                raise NotImplementedError
            solver_format = solver_format.upper()

        elif solver_format == "specfem3d_globe_cem":
            cs = solver["computational_setup"]
            gen.config.NPROC_XI = cs["number_of_processors_xi"]
            gen.config.NPROC_ETA = cs["number_of_processors_eta"]
            gen.config.NCHUNKS = cs["number_of_chunks"]
            gen.config.NEX_XI = cs["elements_per_chunk_xi"]
            gen.config.NEX_ETA = cs["elements_per_chunk_eta"]
            gen.config.OCEANS = cs["simulate_oceans"]
            gen.config.ELLIPTICITY = cs["simulate_ellipticity"]
            gen.config.TOPOGRAPHY = cs["simulate_topography"]
            gen.config.GRAVITY = cs["simulate_gravity"]
            gen.config.ROTATION = cs["simulate_rotation"]
            gen.config.ATTENUATION = cs["simulate_attenuation"]
            gen.config.ABSORBING_CONDITIONS = True
            if cs["fast_undo_attenuation"]:
                gen.config.PARTIAL_PHYS_DISPERSION_ONLY = True
                gen.config.UNDO_ATTENUATION = False
            else:
                gen.config.PARTIAL_PHYS_DISPERSION_ONLY = False
                gen.config.UNDO_ATTENUATION = True
            gen.config.GPU_MODE = cs["use_gpu"]
            gen.config.SOURCE_TIME_FUNCTION = \
                iteration.get_source_time_function()["data"]

            if simulation_type == "normal simulation":
                gen.config.SIMULATION_TYPE = 1
                gen.config.SAVE_FORWARD = False
            elif simulation_type == "adjoint forward":
                gen.config.SIMULATION_TYPE = 1
                gen.config.SAVE_FORWARD = True
            elif simulation_type == "adjoint reverse":
                gen.config.SIMULATION_TYPE = 2
                gen.config.SAVE_FORWARD = True
            else:
                raise NotImplementedError

            # Use the current domain setting to derive the bounds in the way
            # SPECFEM specifies them.
            domain = self.comm.project.domain

            lat_range = domain["bounds"]["maximum_latitude"] - \
                domain["bounds"]["minimum_latitude"]
            lng_range = domain["bounds"]["maximum_longitude"] - \
                        domain["bounds"]["minimum_longitude"]

            c_lat = \
                domain["bounds"]["minimum_latitude"] + lat_range / 2.0
            c_lng = \
                domain["bounds"]["minimum_longitude"] + lng_range / 2.0

            # Rotate the point.
            c_lat_1, c_lng_1 = rotations.rotate_lat_lon(
                c_lat, c_lng, domain["rotation_axis"],
                domain["rotation_angle"])

            # SES3D rotation.
            A = rotations._get_rotation_matrix(
                domain["rotation_axis"], domain["rotation_angle"])

            latitude_rotation = -(c_lat_1 - c_lat)
            longitude_rotation = c_lng_1 - c_lng

            # Rotate the latitude. The rotation axis is latitude 0 and
            # the center longitude + 90 degree
            B = rotations._get_rotation_matrix(
                rotations.lat_lon_radius_to_xyz(0.0, c_lng + 90, 1.0),
                latitude_rotation)
            # Rotate around the North pole.
            C = rotations._get_rotation_matrix(
                [0.0, 0.0, 1.0], longitude_rotation)

            D = A * np.linalg.inv(C * B)

            axis, angle = rotations._get_axis_and_angle_from_rotation_matrix(D)
            rotated_axis = rotations.xyz_to_lat_lon_radius(*axis)

            # Consistency check
            if abs(rotated_axis[0] - c_lat_1) >= 0.01 or \
                    abs(rotated_axis[1] - c_lng_1) >= 0.01:
                axis *= -1.0
                angle *= -1.0
                rotated_axis = rotations.xyz_to_lat_lon_radius(*axis)

            if abs(rotated_axis[0] - c_lat_1) >= 0.01 or \
                        abs(rotated_axis[1] - c_lng_1) >= 0.01:
                msg = "Failed to describe the domain in terms that SPECFEM " \
                      "understands"
                raise LASIFError(msg)

            gen.config.ANGULAR_WIDTH_XI_IN_DEGREES = lng_range
            gen.config.ANGULAR_WIDTH_ETA_IN_DEGREES = lat_range
            gen.config.CENTER_LATITUDE_IN_DEGREES = c_lat_1
            gen.config.CENTER_LONGITUDE_IN_DEGREES = c_lng_1
            gen.config.GAMMA_ROTATION_AZIMUTH = angle

            gen.config.MODEL = cs["model"]

            pp = iteration.get_process_params()
            gen.config.RECORD_LENGTH_IN_MINUTES = \
                (pp["npts"] * pp["dt"]) / 60.0
            solver_format = solver_format.upper()

        else:
            msg = "Unknown solver '%s'." % solver_format
            raise NotImplementedError(msg)

        # =================================================================
        # output
        # =================================================================
        output_dir = self.comm.project.get_output_folder(
            "input_files___ITERATION_%s__%s__EVENT_%s" % (
                iteration_name, simulation_type.replace(" ", "_"),
                event_name))

        gen.write(format=solver_format, output_dir=output_dir)
        print "Written files to '%s'." % output_dir
Esempio n. 42
0
def lasif_generate_dummy_data(args):
    """
    Usage: lasif generate_dummy_data

    Generates some random example event and waveforms. Useful for debugging,
    testing, and following the tutorial.
    """
    import inspect
    from lasif import rotations
    from lasif.adjoint_sources.utils import get_dispersed_wavetrain
    import numpy as np
    import obspy

    if len(args):
        msg = "No arguments allowed."
        raise LASIFCommandLineException(msg)

    proj = _find_project_root(".")

    # Use a seed to make it somewhat predictable.
    random.seed(34235234)
    # Create 5 events.
    d = proj.domain["bounds"]
    b = d["boundary_width_in_degree"] * 1.5
    event_count = 8
    for _i in xrange(8):
        lat = random.uniform(d["minimum_latitude"] + b,
                             d["maximum_latitude"] - b)
        lon = random.uniform(d["minimum_longitude"] + b,
                             d["maximum_longitude"] - b)
        depth_in_m = random.uniform(d["minimum_depth_in_km"],
                                    d["maximum_depth_in_km"]) * 1000.0
        # Rotate the coordinates.
        lat, lon = rotations.rotate_lat_lon(lat, lon,
                                            proj.domain["rotation_axis"],
                                            proj.domain["rotation_angle"])
        time = obspy.UTCDateTime(
            random.uniform(
                obspy.UTCDateTime(2008, 1, 1).timestamp,
                obspy.UTCDateTime(2013, 1, 1).timestamp))

        # The moment tensor. XXX: Make sensible values!
        values = [
            -3.3e+18, 1.43e+18, 1.87e+18, -1.43e+18, -2.69e+17, -1.77e+18
        ]
        random.shuffle(values)

        mrr = values[0]
        mtt = values[1]
        mpp = values[2]
        mrt = values[3]
        mrp = values[4]
        mtp = values[5]
        mag = random.uniform(5, 7)
        scalar_moment = 3.661e+25

        event_name = os.path.join(proj.paths["events"],
                                  "dummy_event_%i.xml" % (_i + 1))

        cat = obspy.core.event.Catalog(events=[
            obspy.core.event.Event(
                event_type="earthquake",
                origins=[
                    obspy.core.event.Origin(latitude=lat,
                                            longitude=lon,
                                            depth=depth_in_m,
                                            time=time)
                ],
                magnitudes=[
                    obspy.core.event.Magnitude(mag=mag, magnitude_type="Mw")
                ],
                focal_mechanisms=[
                    obspy.core.event.FocalMechanism(
                        moment_tensor=obspy.core.event.MomentTensor(
                            scalar_moment=scalar_moment,
                            tensor=obspy.core.event.Tensor(m_rr=mrr,
                                                           m_tt=mtt,
                                                           m_pp=mpp,
                                                           m_rt=mrt,
                                                           m_rp=mrp,
                                                           m_tp=mtp)))
                ])
        ])
        cat.write(event_name, format="quakeml", validate=False)
    print "Generated %i random events." % event_count

    # Update the folder structure.
    proj.update_folder_structure()

    names_taken = []

    def _get_random_name(length):
        while True:
            ret = ""
            for i in xrange(length):
                ret += chr(int(random.uniform(ord("A"), ord("Z"))))
            if ret in names_taken:
                continue
            names_taken.append(ret)
            break
        return ret

    # Now generate 30 station coordinates. Use a land-sea mask included in
    # basemap and loop until thirty stations on land are found.
    from mpl_toolkits.basemap import _readlsmask
    from obspy.core.util.geodetics import gps2DistAzimuth
    ls_lon, ls_lat, ls_mask = _readlsmask()
    stations = []
    # Do not use an infinite loop. One could choose a region with no land.
    for i in xrange(10000):
        if len(stations) >= 30:
            break
        lat = random.uniform(d["minimum_latitude"] + b,
                             d["maximum_latitude"] - b)
        lon = random.uniform(d["minimum_longitude"] + b,
                             d["maximum_longitude"] - b)
        # Rotate the coordinates.
        lat, lon = rotations.rotate_lat_lon(lat, lon,
                                            proj.domain["rotation_axis"],
                                            proj.domain["rotation_angle"])
        if not ls_mask[np.abs(ls_lat - lat).argmin()][np.abs(ls_lon -
                                                             lon).argmin()]:
            continue
        stations.append({
            "latitude": lat,
            "longitude": lon,
            "network": "XX",
            "station": _get_random_name(3)
        })

    if not len(stations):
        msg = "Could not create stations. Pure ocean region?"
        raise ValueError(msg)

    # Create a RESP file for every channel.
    resp_file_temp = os.path.join(
        os.path.dirname(
            os.path.abspath(inspect.getfile(inspect.currentframe()))),
        os.path.pardir, "tools", "RESP.template_file")
    with open(resp_file_temp, "rt") as open_file:
        resp_file_template = open_file.read()

    for station in stations:
        for component in ["E", "N", "Z"]:
            filename = os.path.join(
                proj.paths["resp"], "RESP.%s.%s.%s.BE%s" %
                (station["network"], station["station"], "", component))
            with open(filename, "wt") as open_file:
                open_file.write(
                    resp_file_template.format(station=station["station"],
                                              network=station["network"],
                                              channel="BH%s" % component))

    print "Generated %i RESP files." % (30 * 3)

    def _empty_sac_trace():
        """
        Helper function to create and empty SAC header.
        """
        sac_dict = {}
        # floats = -12345.8
        floats = [
            "a", "mag", "az", "baz", "cmpaz", "cmpinc", "b", "depmax",
            "depmen", "depmin", "dist", "e", "evdp", "evla", "evlo", "f",
            "gcarc", "o", "odelta", "stdp", "stel", "stla", "stlo", "t0", "t1",
            "t2", "t3", "t4", "t5", "t6", "t7", "t8", "t9", "unused10",
            "unused11", "unused12", "unused6", "unused7", "unused8", "unused9",
            "user0", "user1", "user2", "user3", "user4", "user5", "user6",
            "user7", "user8", "user9", "xmaximum", "xminimum", "ymaximum",
            "yminimum"
        ]
        sac_dict.update({key: -12345.0 for key in floats})
        # Integers: -12345
        integers = [
            "idep", "ievreg", "ievtype", "iftype", "iinst", "imagsrc",
            "imagtyp", "iqual", "istreg", "isynth", "iztype", "lcalda",
            "lovrok", "nevid", "norid", "nwfid"
        ]
        sac_dict.update({key: -12345 for key in integers})
        # Strings: "-12345  "
        strings = [
            "ka", "kdatrd", "kevnm", "kf", "kinst", "ko", "kt0", "kt1", "kt2",
            "kt3", "kt4", "kt5", "kt6", "kt7", "kt8", "kt9", "kuser0",
            "kuser1", "kuser2"
        ]

        sac_dict.update({key: "-12345  " for key in strings})

        # Header version
        sac_dict["nvhdr"] = 6
        # Data is evenly spaced
        sac_dict["leven"] = 1
        # And a positive polarity.
        sac_dict["lpspol"] = 1

        tr = obspy.Trace()
        tr.stats.sac = obspy.core.AttribDict(sac_dict)
        return tr

    events = proj.get_all_events()
    # Now loop over all events and create SAC file for them.
    for _i, event in enumerate(events):
        lat, lng = event.origins[0].latitude, event.origins[0].longitude
        # Get the distance to each events.
        for station in stations:
            # Add some perturbations.
            distance_in_km = gps2DistAzimuth(lat, lng, station["latitude"],
                                             station["longitude"])[0] / 1000.0
            a = random.uniform(3.9, 4.1)
            b = random.uniform(0.9, 1.1)
            c = random.uniform(0.9, 1.1)
            body_wave_factor = random.uniform(0.095, 0.015)
            body_wave_freq_scale = random.uniform(0.45, 0.55)
            distance_in_km = random.uniform(0.99 * distance_in_km,
                                            1.01 * distance_in_km)
            _, u = get_dispersed_wavetrain(
                dw=0.001,
                distance=distance_in_km,
                t_min=0,
                t_max=900,
                a=a,
                b=b,
                c=c,
                body_wave_factor=body_wave_factor,
                body_wave_freq_scale=body_wave_freq_scale)
            for component in ["E", "N", "Z"]:
                tr = _empty_sac_trace()
                tr.data = u
                tr.stats.network = station["network"]
                tr.stats.station = station["station"]
                tr.stats.location = ""
                tr.stats.channel = "BH%s" % component
                tr.stats.sac.stla = station["latitude"]
                tr.stats.sac.stlo = station["longitude"]
                tr.stats.sac.stdp = 0.0
                tr.stats.sac.stel = 0.0
                path = os.path.join(proj.paths["data"],
                                    "dummy_event_%i" % (_i + 1), "raw")
                if not os.path.exists(path):
                    os.makedirs(path)
                tr.write(os.path.join(
                    path, "%s.%s..BH%s.sac" %
                    (station["network"], station["station"], component)),
                         format="sac")
    print "Generated %i waveform files." % (30 * 3 * len(events))
Esempio n. 43
0
    def finalize_adjoint_sources(self, iteration_name, event_name):
        """
        Finalizes the adjoint sources.
        """
        from itertools import izip
        import numpy as np

        from lasif import rotations

        all_coordinates = []
        _i = 0

        window_manager = self.comm.windows.get(event_name, iteration_name)
        event = self.comm.events.get(event_name)
        iteration = self.comm.iterations.get(iteration_name)
        iteration_event_def = iteration.events[event["event_name"]]
        iteration_stations = iteration_event_def["stations"]

        event_weight = iteration_event_def["event_weight"]

        output_folder = self.comm.project.get_output_folder(
            "adjoint_sources__ITERATION_%s__%s" % (iteration_name, event_name))

        l = sorted(window_manager.list())
        for station, windows in itertools.groupby(
                l, key=lambda x: ".".join(x.split(".")[:2])):
            if station not in iteration_stations:
                continue
            station_weight = iteration_stations[station]["station_weight"]
            channels = {}
            for w in windows:
                w = window_manager.get(w)
                channel_weight = 0
                srcs = []
                for window in w:
                    ad_src = window.adjoint_source
                    if not ad_src["adjoint_source"].ptp():
                        continue
                    srcs.append(ad_src["adjoint_source"] * window.weight)
                    channel_weight += window.weight
                if not srcs:
                    continue
                # Final adjoint source for that channel and apply all weights.
                adjoint_source = np.sum(srcs, axis=0) / channel_weight * \
                    event_weight * station_weight
                channels[w.channel_id[-1]] = adjoint_source
            if not channels:
                continue
            # Now all adjoint sources of a window should have the same length.
            length = set(len(v) for v in channels.values())
            assert len(length) == 1
            length = length.pop()
            # All missing channels will be replaced with a zero array.
            for c in ["Z", "N", "E"]:
                if c in  channels:
                    continue
                channels[c] = np.zeros(length)

            # Get the station coordinates
            coords = self.comm.query.get_coordinates_for_station(event_name,
                                                                 station)

            # Rotate. if needed
            rec_lat = coords["latitude"]
            rec_lng = coords["longitude"]
            domain = self.comm.project.domain

            if domain["rotation_angle"]:
                # Rotate the adjoint source location.
                r_rec_lat, r_rec_lng = rotations.rotate_lat_lon(
                    rec_lat, rec_lng, domain["rotation_axis"],
                    -domain["rotation_angle"])
                # Rotate the adjoint sources.
                channels["N"], channels["E"], channels["Z"] = \
                    rotations.rotate_data(
                        channels["N"], channels["E"],
                        channels["Z"], rec_lat, rec_lng,
                        domain["rotation_axis"],
                        -domain["rotation_angle"])
            else:
                r_rec_lat = rec_lat
                r_rec_lng = rec_lng
            r_rec_depth = 0.0
            r_rec_colat = rotations.lat2colat(r_rec_lat)

            CHANNEL_MAPPING = {"X": "N", "Y": "E", "Z": "Z"}

            _i += 1

            adjoint_src_filename = os.path.join(output_folder,
                                                "ad_src_%i" % _i)

            all_coordinates.append((r_rec_colat, r_rec_lng, r_rec_depth))

            # Actually write the adjoint source file in SES3D specific format.
            with open(adjoint_src_filename, "wt") as open_file:
                open_file.write("-- adjoint source ------------------\n")
                open_file.write("-- source coordinates (colat,lon,depth)\n")
                open_file.write("%f %f %f\n" % (r_rec_colat, r_rec_lng,
                                                r_rec_depth))
                open_file.write("-- source time function (x, y, z) --\n")
                for x, y, z in izip(-1.0 * channels[CHANNEL_MAPPING["X"]],
                                    channels[CHANNEL_MAPPING["Y"]],
                                    channels[CHANNEL_MAPPING["Z"]]):
                    open_file.write("%e %e %e\n" % (x, y, z))
                open_file.write("\n")

        # Write the final file.
        with open(os.path.join(output_folder, "ad_srcfile"), "wt") as fh:
            fh.write("%i\n" % _i)
            for line in all_coordinates:
                fh.write("%.6f %.6f %.6f\n" % (line[0], line[1], line[2]))
            fh.write("\n")

        print "Wrote %i adjoint sources to %s." % (
            _i, os.path.relpath(output_folder))
Esempio n. 44
0
    def generate_input_files(self, iteration_name, event_name,
                             simulation_type):
        """
        Generate the input files for one event.

        :param iteration_name: The name of the iteration.
        :param event_name: The name of the event for which to generate the
            input files.
        :param simulate_type: The type of simulation to perform. Possible
            values are: 'normal simulate', 'adjoint forward', 'adjoint
            reverse'
        """
        from wfs_input_generator import InputFileGenerator

        # =====================================================================
        # read iteration xml file, get event and list of stations
        # =====================================================================

        iteration = self.comm.iterations.get(iteration_name)

        # Check that the event is part of the iterations.
        if event_name not in iteration.events:
            msg = ("Event '%s' not part of iteration '%s'.\nEvents available "
                   "in iteration:\n\t%s" %
                   (event_name, iteration_name, "\n\t".join(
                       sorted(iteration.events.keys()))))
            raise ValueError(msg)

        event = self.comm.events.get(event_name)
        stations_for_event = list(
            iteration.events[event_name]["stations"].keys())

        # Get all stations and create a dictionary for the input file
        # generator.
        stations = self.comm.query.get_all_stations_for_event(event_name)
        stations = [{
            "id": key,
            "latitude": value["latitude"],
            "longitude": value["longitude"],
            "elevation_in_m": value["elevation_in_m"],
            "local_depth_in_m": value["local_depth_in_m"]
        } for key, value in stations.items() if key in stations_for_event]

        # =====================================================================
        # set solver options
        # =====================================================================

        solver = iteration.solver_settings

        # Currently only SES3D 4.1 is supported
        solver_format = solver["solver"].lower()
        if solver_format not in [
                "ses3d 4.1", "ses3d 2.0", "specfem3d cartesian",
                "specfem3d globe cem"
        ]:
            msg = ("Currently only SES3D 4.1, SES3D 2.0, SPECFEM3D "
                   "CARTESIAN, and SPECFEM3D GLOBE CEM are supported.")
            raise ValueError(msg)
        solver_format = solver_format.replace(' ', '_')
        solver_format = solver_format.replace('.', '_')

        solver = solver["solver_settings"]

        # =====================================================================
        # create the input file generator, add event and stations,
        # populate the configuration items
        # =====================================================================

        # Add the event and the stations to the input file generator.
        gen = InputFileGenerator()
        gen.add_events(event["filename"])
        gen.add_stations(stations)

        if solver_format in ["ses3d_4_1", "ses3d_2_0"]:
            # event tag
            gen.config.event_tag = event_name

            # Time configuration.
            npts = solver["simulation_parameters"]["number_of_time_steps"]
            delta = solver["simulation_parameters"]["time_increment"]
            gen.config.number_of_time_steps = npts
            gen.config.time_increment_in_s = delta

            # SES3D specific configuration
            gen.config.output_folder = solver["output_directory"].replace(
                "{{EVENT_NAME}}", event_name.replace(" ", "_"))
            gen.config.simulation_type = simulation_type

            gen.config.adjoint_forward_wavefield_output_folder = \
                solver["adjoint_output_parameters"][
                    "forward_field_output_directory"].replace(
                    "{{EVENT_NAME}}", event_name.replace(" ", "_"))
            gen.config.adjoint_forward_sampling_rate = \
                solver["adjoint_output_parameters"][
                    "sampling_rate_of_forward_field"]

            # Visco-elastic dissipation
            diss = solver["simulation_parameters"]["is_dissipative"]
            gen.config.is_dissipative = diss

            # Only SES3D 4.1 has the relaxation parameters.
            if solver_format == "ses3d_4_1":
                gen.config.Q_model_relaxation_times = \
                    solver["relaxation_parameter_list"]["tau"]
                gen.config.Q_model_weights_of_relaxation_mechanisms = \
                    solver["relaxation_parameter_list"]["w"]

            # Discretization
            disc = solver["computational_setup"]
            gen.config.nx_global = disc["nx_global"]
            gen.config.ny_global = disc["ny_global"]
            gen.config.nz_global = disc["nz_global"]
            gen.config.px = disc["px_processors_in_theta_direction"]
            gen.config.py = disc["py_processors_in_phi_direction"]
            gen.config.pz = disc["pz_processors_in_r_direction"]
            gen.config.lagrange_polynomial_degree = \
                disc["lagrange_polynomial_degree"]

            # Configure the mesh.
            domain = self.comm.project.domain
            gen.config.mesh_min_latitude = domain.min_latitude
            gen.config.mesh_max_latitude = domain.max_latitude
            gen.config.mesh_min_longitude = domain.min_longitude
            gen.config.mesh_max_longitude = domain.max_longitude
            gen.config.mesh_min_depth_in_km = domain.min_depth_in_km
            gen.config.mesh_max_depth_in_km = domain.max_depth_in_km

            # Set the rotation parameters.
            gen.config.rotation_angle_in_degree = \
                domain.rotation_angle_in_degree
            gen.config.rotation_axis = domain.rotation_axis

            # Make source time function
            gen.config.source_time_function = \
                iteration.get_source_time_function()["data"]
        elif solver_format == "specfem3d_cartesian":
            gen.config.NSTEP = \
                solver["simulation_parameters"]["number_of_time_steps"]
            gen.config.DT = \
                solver["simulation_parameters"]["time_increment"]
            gen.config.NPROC = \
                solver["computational_setup"]["number_of_processors"]
            if simulation_type == "normal simulation":
                msg = ("'normal_simulate' not supported for SPECFEM3D "
                       "Cartesian. Please choose either 'adjoint_forward' or "
                       "'adjoint_reverse'.")
                raise NotImplementedError(msg)
            elif simulation_type == "adjoint forward":
                gen.config.SIMULATION_TYPE = 1
            elif simulation_type == "adjoint reverse":
                gen.config.SIMULATION_TYPE = 2
            else:
                raise NotImplementedError
            solver_format = solver_format.upper()

        elif solver_format == "specfem3d_globe_cem":
            cs = solver["computational_setup"]
            gen.config.NPROC_XI = cs["number_of_processors_xi"]
            gen.config.NPROC_ETA = cs["number_of_processors_eta"]
            gen.config.NCHUNKS = cs["number_of_chunks"]
            gen.config.NEX_XI = cs["elements_per_chunk_xi"]
            gen.config.NEX_ETA = cs["elements_per_chunk_eta"]
            gen.config.OCEANS = cs["simulate_oceans"]
            gen.config.ELLIPTICITY = cs["simulate_ellipticity"]
            gen.config.TOPOGRAPHY = cs["simulate_topography"]
            gen.config.GRAVITY = cs["simulate_gravity"]
            gen.config.ROTATION = cs["simulate_rotation"]
            gen.config.ATTENUATION = cs["simulate_attenuation"]
            gen.config.ABSORBING_CONDITIONS = True
            if cs["fast_undo_attenuation"]:
                gen.config.PARTIAL_PHYS_DISPERSION_ONLY = True
                gen.config.UNDO_ATTENUATION = False
            else:
                gen.config.PARTIAL_PHYS_DISPERSION_ONLY = False
                gen.config.UNDO_ATTENUATION = True
            gen.config.GPU_MODE = cs["use_gpu"]
            gen.config.SOURCE_TIME_FUNCTION = \
                iteration.get_source_time_function()["data"]

            if simulation_type == "normal simulation":
                gen.config.SIMULATION_TYPE = 1
                gen.config.SAVE_FORWARD = False
            elif simulation_type == "adjoint forward":
                gen.config.SIMULATION_TYPE = 1
                gen.config.SAVE_FORWARD = True
            elif simulation_type == "adjoint reverse":
                gen.config.SIMULATION_TYPE = 2
                gen.config.SAVE_FORWARD = True
            else:
                raise NotImplementedError

            # Use the current domain setting to derive the bounds in the way
            # SPECFEM specifies them.
            domain = self.comm.project.domain

            lat_range = domain.max_latitude - \
                domain.min_latitude
            lng_range = domain.max_longitude - \
                domain.min_longitude

            c_lat = \
                domain.min_latitude + lat_range / 2.0
            c_lng = \
                domain.min_longitude + lng_range / 2.0

            # Rotate the point.
            c_lat_1, c_lng_1 = rotations.rotate_lat_lon(
                c_lat, c_lng, domain.rotation_axis,
                domain.rotation_angle_in_degree)

            # SES3D rotation.
            A = rotations._get_rotation_matrix(domain.rotation_axis,
                                               domain.rotation_angle_in_degree)

            latitude_rotation = -(c_lat_1 - c_lat)
            longitude_rotation = c_lng_1 - c_lng

            # Rotate the latitude. The rotation axis is latitude 0 and
            # the center longitude + 90 degree
            B = rotations._get_rotation_matrix(
                rotations.lat_lon_radius_to_xyz(0.0, c_lng + 90, 1.0),
                latitude_rotation)
            # Rotate around the North pole.
            C = rotations._get_rotation_matrix([0.0, 0.0, 1.0],
                                               longitude_rotation)

            D = A * np.linalg.inv(C * B)

            axis, angle = rotations._get_axis_and_angle_from_rotation_matrix(D)
            rotated_axis = rotations.xyz_to_lat_lon_radius(*axis)

            # Consistency check
            if abs(rotated_axis[0] - c_lat_1) >= 0.01 or \
                    abs(rotated_axis[1] - c_lng_1) >= 0.01:
                axis *= -1.0
                angle *= -1.0
                rotated_axis = rotations.xyz_to_lat_lon_radius(*axis)

            if abs(rotated_axis[0] - c_lat_1) >= 0.01 or \
                    abs(rotated_axis[1] - c_lng_1) >= 0.01:
                msg = "Failed to describe the domain in terms that SPECFEM " \
                      "understands. The domain definition in the output " \
                      "files will NOT BE CORRECT!"
                warnings.warn(msg, LASIFWarning)

            gen.config.ANGULAR_WIDTH_XI_IN_DEGREES = lng_range
            gen.config.ANGULAR_WIDTH_ETA_IN_DEGREES = lat_range
            gen.config.CENTER_LATITUDE_IN_DEGREES = c_lat_1
            gen.config.CENTER_LONGITUDE_IN_DEGREES = c_lng_1
            gen.config.GAMMA_ROTATION_AZIMUTH = angle

            gen.config.MODEL = cs["model"]

            pp = iteration.get_process_params()
            gen.config.RECORD_LENGTH_IN_MINUTES = \
                (pp["npts"] * pp["dt"]) / 60.0
            solver_format = solver_format.upper()

        else:
            msg = "Unknown solver '%s'." % solver_format
            raise NotImplementedError(msg)

        # =================================================================
        # output
        # =================================================================
        output_dir = self.comm.project.get_output_folder(
            type="input_files",
            tag="ITERATION_%s__%s__EVENT_%s" %
            (iteration_name, simulation_type.replace(" ", "_"), event_name))

        gen.write(format=solver_format, output_dir=output_dir)
        print("Written files to '%s'." % output_dir)