Пример #1
0
    def test_adjoint_time_frequency_phase_misfit_source(self):
        """
        Tests the adjoint source calculation for the time frequency phase
        misfit after Fichtner et. al. (2008).
        """
        # Load the matlab output.
        ad_src_matlab = os.path.join(
            self.data_dir,
            "matlab_tf_phase_misfit_adjoint_source_reference_solution.mat")
        ad_src_matlab = loadmat(ad_src_matlab)["ad_src"].transpose()[0]

        # Generate some data.
        t, u = utils.get_dispersed_wavetrain()
        _, u0 = utils.get_dispersed_wavetrain(a=3.91,
                                              b=0.87,
                                              c=0.8,
                                              body_wave_factor=0.015,
                                              body_wave_freq_scale=1.0 / 2.2)

        adjoint_src = ad_src_tf_phase_misfit.adsrc_tf_phase_misfit(
            t, u, u0, 2, 10, 0.0)
        ad_src = adjoint_src["adjoint_source"]
        # Assert the misfit.
        self.assertAlmostEqual(adjoint_src["misfit"], 0.271417, 5)

        # Some testing tolerance is needed mainly due to the phase being hard
        # to define for small amplitudes.
        tolerance = np.abs(ad_src).max() * 1.2E-3
        np.testing.assert_allclose(ad_src, ad_src_matlab, 1E-7, tolerance)
Пример #2
0
def test_adjoint_time_frequency_phase_misfit_source():
    """
    Tests the adjoint source calculation for the time frequency phase
    misfit after Fichtner et. al. (2008).

    XXX: Adjust the test.
    """
    # Load the matlab output.
    ad_src_matlab = os.path.join(
        data_dir,
        "matlab_tf_phase_misfit_adjoint_source_reference_solution.mat")
    ad_src_matlab = loadmat(ad_src_matlab)["ad_src"].transpose()[0]

    # Generate some data.
    t, u = utils.get_dispersed_wavetrain()
    _, u0 = utils.get_dispersed_wavetrain(
        a=3.91, b=0.87, c=0.8, body_wave_factor=0.015,
        body_wave_freq_scale=1.0 / 2.2)

    adjoint_src = ad_src_tf_phase_misfit.adsrc_tf_phase_misfit(
        t, u, u0, 2, 10, 0.0)
    ad_src = adjoint_src["adjoint_source"]
    # Assert the misfit.
    np.testing.assert_almost_equal(adjoint_src["misfit"], 0.271417, 5)

    # Some testing tolerance is needed mainly due to the phase being hard
    # to define for small amplitudes.
    tolerance = np.abs(ad_src).max() * 1.2E-3
    np.testing.assert_allclose(ad_src, ad_src_matlab, 1E-7, tolerance)
Пример #3
0
def test_cross_correlation():
    """
    Tests the cross correlation function and compares it to a reference
    solution calculated in Matlab.
    """
    # Load the matlab file.
    matlab_file = os.path.join(data_dir, "matlab_cross_correlation_reference_solution.mat")
    cc_matlab = loadmat(matlab_file)["cc"][0]

    # Calculate two test signals.
    _, u = utils.get_dispersed_wavetrain()
    _, u0 = utils.get_dispersed_wavetrain(a=3.91, b=0.87, c=0.8, body_wave_factor=0.015, body_wave_freq_scale=1.0 / 2.2)

    cc = utils.cross_correlation(u, u0)
    np.testing.assert_allclose(cc, cc_matlab)
Пример #4
0
def test_time_frequency_transform():
    """
    Tests the basic time frequency transformation.
    """
    t, u = utils.get_dispersed_wavetrain(dt=2.0)
    tau, nu, tfs = time_frequency.time_frequency_transform(t=t,
                                                           s=u,
                                                           width=10.0)

    # Load the matlab output.
    matlab = os.path.join(data_dir, "matlab_tfa_output_reference_solution.mat")
    matlab = loadmat(matlab)
    tfs_matlab = matlab["tfs"]

    # Cut away some frequencies - the matlab version performs internal
    # interpolation resulting in aliasing. The rest of the values are a very
    # good fit.
    tfs = tfs[200:, :]
    tfs_matlab = tfs_matlab[200:, :]

    # Some tolerance is needed to due numeric differences.
    tolerance = 1E-5
    min_value = np.abs(tfs).max() * tolerance
    tfs[np.abs(tfs) < min_value] = 0 + 0j
    tfs_matlab[np.abs(tfs_matlab) < min_value] = 0 + 0j

    np.testing.assert_allclose(np.abs(tfs), np.abs(tfs_matlab))
    np.testing.assert_allclose(np.angle(tfs), np.angle(tfs_matlab))
Пример #5
0
def test_time_frequency_transform():
    """
    Tests the basic time frequency transformation.
    """
    t, u = utils.get_dispersed_wavetrain(dt=2.0)
    tau, nu, tfs = time_frequency.time_frequency_transform(t=t, s=u, width=10.0)

    # Load the matlab output.
    matlab = os.path.join(data_dir, "matlab_tfa_output_reference_solution.mat")
    matlab = loadmat(matlab)
    tfs_matlab = matlab["tfs"]

    # Cut away some frequencies - the matlab version performs internal
    # interpolation resulting in aliasing. The rest of the values are a very
    # good fit.
    tfs = tfs[200:, :]
    tfs_matlab = tfs_matlab[200:, :]

    # Some tolerance is needed to due numeric differences.
    tolerance = 1e-5
    min_value = np.abs(tfs).max() * tolerance
    tfs[np.abs(tfs) < min_value] = 0 + 0j
    tfs_matlab[np.abs(tfs_matlab) < min_value] = 0 + 0j

    np.testing.assert_allclose(np.abs(tfs), np.abs(tfs_matlab))
    np.testing.assert_allclose(np.angle(tfs), np.angle(tfs_matlab))
Пример #6
0
def test_time_frequency_transform():
    """
    Tests the basic time frequency transformation.

    XXX: Adjust the test.
    """
    t, u = utils.get_dispersed_wavetrain()
    tau, nu, tfs = time_frequency.time_frequency_transform(t, u, 2, 10, 0.0)

    # Load the matlab output.
    matlab = os.path.join(
        data_dir, "matlab_tfa_output_reference_solution.mat")
    matlab = loadmat(matlab)
    # tau_matlab = matlab["TAU"]
    # nu_matlab = matlab["NU"]
    tfs_matlab = matlab["tfs"]

    # Some tolerance is needed to due numeric differences.
    tolerance = 1E-5
    min_value = np.abs(tfs).max() * tolerance
    tfs[np.abs(tfs) < min_value] = 0 + 0j
    tfs_matlab[np.abs(tfs_matlab) < min_value] = 0 + 0j

    np.testing.assert_allclose(np.abs(tfs), np.abs(tfs_matlab))
    np.testing.assert_allclose(np.angle(tfs), np.angle(tfs_matlab))
Пример #7
0
def test_dispersive_wavetrain():
    """
    Tests the dispersive wavetrain calculation by comparing it to a
    reference solution implemented in Matlab.
    """
    # Load the matlab file.
    matlab_file = os.path.join(data_dir, "matlab_dispersive_wavetrain_reference_solution.mat")
    matlab_file = loadmat(matlab_file)
    u_matlab = matlab_file["u"][0]
    u0_matlab = matlab_file["u0"][0]
    t, u = utils.get_dispersed_wavetrain()
    np.testing.assert_allclose(u, u_matlab)
    np.testing.assert_allclose(t, np.arange(901))
    t0, u0 = utils.get_dispersed_wavetrain(
        a=3.91, b=0.87, c=0.8, body_wave_factor=0.015, body_wave_freq_scale=1.0 / 2.2
    )
    np.testing.assert_allclose(u0, u0_matlab)
    np.testing.assert_allclose(t0, np.arange(901))
Пример #8
0
    def test_cross_correlation(self):
        """
        Tests the cross correlation function and compares it to a reference
        solution calculated in Matlab.
        """
        # Load the matlab file.
        matlab_file = os.path.join(
            self.data_dir, "matlab_cross_correlation_reference_solution.mat")
        cc_matlab = loadmat(matlab_file)["cc"][0]

        # Calculate two test signals.
        _, u = utils.get_dispersed_wavetrain()
        _, u0 = utils.get_dispersed_wavetrain(a=3.91,
                                              b=0.87,
                                              c=0.8,
                                              body_wave_factor=0.015,
                                              body_wave_freq_scale=1.0 / 2.2)

        cc = utils.cross_correlation(u, u0)
        np.testing.assert_allclose(cc, cc_matlab)
Пример #9
0
def test_dispersive_wavetrain():
    """
    Tests the dispersive wavetrain calculation by comparing it to a
    reference solution implemented in Matlab.
    """
    # Load the matlab file.
    matlab_file = os.path.join(
        data_dir, "matlab_dispersive_wavetrain_reference_solution.mat")
    matlab_file = loadmat(matlab_file)
    u_matlab = matlab_file["u"][0]
    u0_matlab = matlab_file["u0"][0]
    t, u = utils.get_dispersed_wavetrain()
    np.testing.assert_allclose(u, u_matlab)
    np.testing.assert_allclose(t, np.arange(901))
    t0, u0 = utils.get_dispersed_wavetrain(a=3.91,
                                           b=0.87,
                                           c=0.8,
                                           body_wave_factor=0.015,
                                           body_wave_freq_scale=1.0 / 2.2)
    np.testing.assert_allclose(u0, u0_matlab)
    np.testing.assert_allclose(t0, np.arange(901))
Пример #10
0
    def test_time_frequency_transform(self):
        """
        Tests the basic time frequency transformation.
        """
        t, u = utils.get_dispersed_wavetrain()
        tau, nu, tfs = \
            time_frequency.time_frequency_transform(t, u, 2, 10, 0.0)

        # Load the matlab output.
        matlab = os.path.join(self.data_dir,
                              "matlab_tfa_output_reference_solution.mat")
        matlab = loadmat(matlab)
        #tau_matlab = matlab["TAU"]
        #nu_matlab = matlab["NU"]
        tfs_matlab = matlab["tfs"]

        # Some tolerance is needed to due numeric differences.
        tolerance = 1E-5
        min_value = np.abs(tfs).max() * tolerance
        tfs[np.abs(tfs) < min_value] = 0 + 0j
        tfs_matlab[np.abs(tfs_matlab) < min_value] = 0 + 0j

        np.testing.assert_allclose(np.abs(tfs), np.abs(tfs_matlab))
        np.testing.assert_allclose(np.angle(tfs), np.angle(tfs_matlab))
Пример #11
0
def lasif_generate_dummy_data(args):
    """
    Usage: lasif generate_dummy_data

    Generates some random example event and waveforms. Useful for debugging,
    testing, and following the tutorial.
    """
    import inspect
    from lasif import rotations
    from lasif.adjoint_sources.utils import get_dispersed_wavetrain
    import numpy as np
    import obspy

    if len(args):
        msg = "No arguments allowed."
        raise LASIFCommandLineException(msg)

    proj = _find_project_root(".")

    # Use a seed to make it somewhat predictable.
    random.seed(34235234)
    # Create 5 events.
    d = proj.domain["bounds"]
    b = d["boundary_width_in_degree"] * 1.5
    event_count = 8
    for _i in xrange(8):
        lat = random.uniform(d["minimum_latitude"] + b,
            d["maximum_latitude"] - b)
        lon = random.uniform(d["minimum_longitude"] + b,
            d["maximum_longitude"] - b)
        depth_in_m = random.uniform(d["minimum_depth_in_km"],
            d["maximum_depth_in_km"]) * 1000.0
        # Rotate the coordinates.
        lat, lon = rotations.rotate_lat_lon(lat, lon,
            proj.domain["rotation_axis"], proj.domain["rotation_angle"])
        time = obspy.UTCDateTime(random.uniform(
            obspy.UTCDateTime(2008, 1, 1).timestamp,
            obspy.UTCDateTime(2013, 1, 1).timestamp))

        # The moment tensor. XXX: Make sensible values!
        values = [-3.3e+18, 1.43e+18, 1.87e+18, -1.43e+18, -2.69e+17,
            -1.77e+18]
        random.shuffle(values)

        mrr = values[0]
        mtt = values[1]
        mpp = values[2]
        mrt = values[3]
        mrp = values[4]
        mtp = values[5]
        mag = random.uniform(5, 7)
        scalar_moment = 3.661e+25

        event_name = os.path.join(proj.paths["events"],
            "dummy_event_%i.xml" % (_i + 1))

        cat = obspy.core.event.Catalog(events=[
            obspy.core.event.Event(
                event_type="earthquake",
                origins=[obspy.core.event.Origin(
                    latitude=lat, longitude=lon, depth=depth_in_m, time=time)],
                magnitudes=[obspy.core.event.Magnitude(
                    mag=mag, magnitude_type="Mw")],
                focal_mechanisms=[obspy.core.event.FocalMechanism(
                    moment_tensor=obspy.core.event.MomentTensor(
                        scalar_moment=scalar_moment,
                        tensor=obspy.core.event.Tensor(m_rr=mrr, m_tt=mtt,
                            m_pp=mpp, m_rt=mrt, m_rp=mrp, m_tp=mtp)))])])
        cat.write(event_name, format="quakeml", validate=False)
    print "Generated %i random events." % event_count

    # Update the folder structure.
    proj.update_folder_structure()

    names_taken = []

    def _get_random_name(length):
        while True:
            ret = ""
            for i in xrange(length):
                ret += chr(int(random.uniform(ord("A"), ord("Z"))))
            if ret in names_taken:
                continue
            names_taken.append(ret)
            break
        return ret

    # Now generate 30 station coordinates. Use a land-sea mask included in
    # basemap and loop until thirty stations on land are found.
    from mpl_toolkits.basemap import _readlsmask
    from obspy.core.util.geodetics import gps2DistAzimuth
    ls_lon, ls_lat, ls_mask = _readlsmask()
    stations = []
    # Do not use an infinite loop. One could choose a region with no land.
    for i in xrange(10000):
        if len(stations) >= 30:
            break
        lat = random.uniform(d["minimum_latitude"] + b,
            d["maximum_latitude"] - b)
        lon = random.uniform(d["minimum_longitude"] + b,
            d["maximum_longitude"] - b)
        # Rotate the coordinates.
        lat, lon = rotations.rotate_lat_lon(lat, lon,
            proj.domain["rotation_axis"], proj.domain["rotation_angle"])
        if not ls_mask[np.abs(ls_lat - lat).argmin()][
                np.abs(ls_lon - lon).argmin()]:
            continue
        stations.append({"latitude": lat, "longitude": lon,
            "network": "XX", "station": _get_random_name(3)})

    if not len(stations):
        msg = "Could not create stations. Pure ocean region?"
        raise ValueError(msg)

    # Create a RESP file for every channel.
    resp_file_temp = os.path.join(os.path.dirname(os.path.abspath(
        inspect.getfile(inspect.currentframe()))), os.path.pardir, "tools",
        "RESP.template_file")
    with open(resp_file_temp, "rt") as open_file:
        resp_file_template = open_file.read()

    for station in stations:
        for component in ["E", "N", "Z"]:
            filename = os.path.join(proj.paths["resp"], "RESP.%s.%s.%s.BE%s" %
                (station["network"], station["station"], "", component))
            with open(filename, "wt") as open_file:
                open_file.write(resp_file_template.format(
                    station=station["station"], network=station["network"],
                    channel="BH%s" % component))

    print "Generated %i RESP files." % (30 * 3)

    def _empty_sac_trace():
        """
        Helper function to create and empty SAC header.
        """
        sac_dict = {}
        # floats = -12345.8
        floats = ["a", "mag", "az", "baz", "cmpaz", "cmpinc", "b", "depmax",
            "depmen", "depmin", "dist", "e", "evdp", "evla", "evlo", "f",
            "gcarc", "o", "odelta", "stdp", "stel", "stla", "stlo", "t0", "t1",
            "t2", "t3", "t4", "t5", "t6", "t7", "t8", "t9", "unused10",
            "unused11", "unused12", "unused6", "unused7", "unused8", "unused9",
            "user0", "user1", "user2", "user3", "user4", "user5", "user6",
            "user7", "user8", "user9", "xmaximum", "xminimum", "ymaximum",
            "yminimum"]
        sac_dict.update({key: -12345.0 for key in floats})
        # Integers: -12345
        integers = ["idep", "ievreg", "ievtype", "iftype", "iinst", "imagsrc",
            "imagtyp", "iqual", "istreg", "isynth", "iztype", "lcalda",
            "lovrok", "nevid", "norid", "nwfid"]
        sac_dict.update({key: -12345 for key in integers})
        # Strings: "-12345  "
        strings = ["ka", "kdatrd", "kevnm", "kf", "kinst", "ko", "kt0", "kt1",
            "kt2", "kt3", "kt4", "kt5", "kt6", "kt7", "kt8", "kt9",
            "kuser0", "kuser1", "kuser2"]

        sac_dict.update({key: "-12345  " for key in strings})

        # Header version
        sac_dict["nvhdr"] = 6
        # Data is evenly spaced
        sac_dict["leven"] = 1
        # And a positive polarity.
        sac_dict["lpspol"] = 1

        tr = obspy.Trace()
        tr.stats.sac = obspy.core.AttribDict(sac_dict)
        return tr

    events = proj.get_all_events()
    # Now loop over all events and create SAC file for them.
    for _i, event in enumerate(events):
        lat, lng = event.origins[0].latitude, event.origins[0].longitude
        # Get the distance to each events.
        for station in stations:
            # Add some perturbations.
            distance_in_km = gps2DistAzimuth(lat, lng, station["latitude"],
                station["longitude"])[0] / 1000.0
            a = random.uniform(3.9, 4.1)
            b = random.uniform(0.9, 1.1)
            c = random.uniform(0.9, 1.1)
            body_wave_factor = random.uniform(0.095, 0.015)
            body_wave_freq_scale = random.uniform(0.45, 0.55)
            distance_in_km = random.uniform(0.99 * distance_in_km, 1.01 *
                distance_in_km)
            _, u = get_dispersed_wavetrain(dw=0.001,
                distance=distance_in_km, t_min=0, t_max=900, a=a, b=b, c=c,
                body_wave_factor=body_wave_factor,
                body_wave_freq_scale=body_wave_freq_scale)
            for component in ["E", "N", "Z"]:
                tr = _empty_sac_trace()
                tr.data = u
                tr.stats.network = station["network"]
                tr.stats.station = station["station"]
                tr.stats.location = ""
                tr.stats.channel = "BH%s" % component
                tr.stats.sac.stla = station["latitude"]
                tr.stats.sac.stlo = station["longitude"]
                tr.stats.sac.stdp = 0.0
                tr.stats.sac.stel = 0.0
                path = os.path.join(proj.paths["data"],
                    "dummy_event_%i" % (_i + 1), "raw")
                if not os.path.exists(path):
                    os.makedirs(path)
                tr.write(os.path.join(path, "%s.%s..BH%s.sac" %
                    (station["network"], station["station"], component)),
                    format="sac")
    print "Generated %i waveform files." % (30 * 3 * len(events))
Пример #12
0
def lasif_generate_dummy_data(args):
    """
    Usage: lasif generate_dummy_data

    Generates some random example event and waveforms. Useful for debugging,
    testing, and following the tutorial.
    """
    import inspect
    from lasif import rotations
    from lasif.adjoint_sources.utils import get_dispersed_wavetrain
    import numpy as np
    import obspy

    if len(args):
        msg = "No arguments allowed."
        raise LASIFCommandLineException(msg)

    proj = _find_project_root(".")

    # Use a seed to make it somewhat predictable.
    random.seed(34235234)
    # Create 5 events.
    d = proj.domain["bounds"]
    b = d["boundary_width_in_degree"] * 1.5
    event_count = 8
    for _i in xrange(8):
        lat = random.uniform(d["minimum_latitude"] + b,
                             d["maximum_latitude"] - b)
        lon = random.uniform(d["minimum_longitude"] + b,
                             d["maximum_longitude"] - b)
        depth_in_m = random.uniform(d["minimum_depth_in_km"],
                                    d["maximum_depth_in_km"]) * 1000.0
        # Rotate the coordinates.
        lat, lon = rotations.rotate_lat_lon(lat, lon,
                                            proj.domain["rotation_axis"],
                                            proj.domain["rotation_angle"])
        time = obspy.UTCDateTime(
            random.uniform(
                obspy.UTCDateTime(2008, 1, 1).timestamp,
                obspy.UTCDateTime(2013, 1, 1).timestamp))

        # The moment tensor. XXX: Make sensible values!
        values = [
            -3.3e+18, 1.43e+18, 1.87e+18, -1.43e+18, -2.69e+17, -1.77e+18
        ]
        random.shuffle(values)

        mrr = values[0]
        mtt = values[1]
        mpp = values[2]
        mrt = values[3]
        mrp = values[4]
        mtp = values[5]
        mag = random.uniform(5, 7)
        scalar_moment = 3.661e+25

        event_name = os.path.join(proj.paths["events"],
                                  "dummy_event_%i.xml" % (_i + 1))

        cat = obspy.core.event.Catalog(events=[
            obspy.core.event.Event(
                event_type="earthquake",
                origins=[
                    obspy.core.event.Origin(latitude=lat,
                                            longitude=lon,
                                            depth=depth_in_m,
                                            time=time)
                ],
                magnitudes=[
                    obspy.core.event.Magnitude(mag=mag, magnitude_type="Mw")
                ],
                focal_mechanisms=[
                    obspy.core.event.FocalMechanism(
                        moment_tensor=obspy.core.event.MomentTensor(
                            scalar_moment=scalar_moment,
                            tensor=obspy.core.event.Tensor(m_rr=mrr,
                                                           m_tt=mtt,
                                                           m_pp=mpp,
                                                           m_rt=mrt,
                                                           m_rp=mrp,
                                                           m_tp=mtp)))
                ])
        ])
        cat.write(event_name, format="quakeml", validate=False)
    print "Generated %i random events." % event_count

    # Update the folder structure.
    proj.update_folder_structure()

    names_taken = []

    def _get_random_name(length):
        while True:
            ret = ""
            for i in xrange(length):
                ret += chr(int(random.uniform(ord("A"), ord("Z"))))
            if ret in names_taken:
                continue
            names_taken.append(ret)
            break
        return ret

    # Now generate 30 station coordinates. Use a land-sea mask included in
    # basemap and loop until thirty stations on land are found.
    from mpl_toolkits.basemap import _readlsmask
    from obspy.core.util.geodetics import gps2DistAzimuth
    ls_lon, ls_lat, ls_mask = _readlsmask()
    stations = []
    # Do not use an infinite loop. One could choose a region with no land.
    for i in xrange(10000):
        if len(stations) >= 30:
            break
        lat = random.uniform(d["minimum_latitude"] + b,
                             d["maximum_latitude"] - b)
        lon = random.uniform(d["minimum_longitude"] + b,
                             d["maximum_longitude"] - b)
        # Rotate the coordinates.
        lat, lon = rotations.rotate_lat_lon(lat, lon,
                                            proj.domain["rotation_axis"],
                                            proj.domain["rotation_angle"])
        if not ls_mask[np.abs(ls_lat - lat).argmin()][np.abs(ls_lon -
                                                             lon).argmin()]:
            continue
        stations.append({
            "latitude": lat,
            "longitude": lon,
            "network": "XX",
            "station": _get_random_name(3)
        })

    if not len(stations):
        msg = "Could not create stations. Pure ocean region?"
        raise ValueError(msg)

    # Create a RESP file for every channel.
    resp_file_temp = os.path.join(
        os.path.dirname(
            os.path.abspath(inspect.getfile(inspect.currentframe()))),
        os.path.pardir, "tools", "RESP.template_file")
    with open(resp_file_temp, "rt") as open_file:
        resp_file_template = open_file.read()

    for station in stations:
        for component in ["E", "N", "Z"]:
            filename = os.path.join(
                proj.paths["resp"], "RESP.%s.%s.%s.BE%s" %
                (station["network"], station["station"], "", component))
            with open(filename, "wt") as open_file:
                open_file.write(
                    resp_file_template.format(station=station["station"],
                                              network=station["network"],
                                              channel="BH%s" % component))

    print "Generated %i RESP files." % (30 * 3)

    def _empty_sac_trace():
        """
        Helper function to create and empty SAC header.
        """
        sac_dict = {}
        # floats = -12345.8
        floats = [
            "a", "mag", "az", "baz", "cmpaz", "cmpinc", "b", "depmax",
            "depmen", "depmin", "dist", "e", "evdp", "evla", "evlo", "f",
            "gcarc", "o", "odelta", "stdp", "stel", "stla", "stlo", "t0", "t1",
            "t2", "t3", "t4", "t5", "t6", "t7", "t8", "t9", "unused10",
            "unused11", "unused12", "unused6", "unused7", "unused8", "unused9",
            "user0", "user1", "user2", "user3", "user4", "user5", "user6",
            "user7", "user8", "user9", "xmaximum", "xminimum", "ymaximum",
            "yminimum"
        ]
        sac_dict.update({key: -12345.0 for key in floats})
        # Integers: -12345
        integers = [
            "idep", "ievreg", "ievtype", "iftype", "iinst", "imagsrc",
            "imagtyp", "iqual", "istreg", "isynth", "iztype", "lcalda",
            "lovrok", "nevid", "norid", "nwfid"
        ]
        sac_dict.update({key: -12345 for key in integers})
        # Strings: "-12345  "
        strings = [
            "ka", "kdatrd", "kevnm", "kf", "kinst", "ko", "kt0", "kt1", "kt2",
            "kt3", "kt4", "kt5", "kt6", "kt7", "kt8", "kt9", "kuser0",
            "kuser1", "kuser2"
        ]

        sac_dict.update({key: "-12345  " for key in strings})

        # Header version
        sac_dict["nvhdr"] = 6
        # Data is evenly spaced
        sac_dict["leven"] = 1
        # And a positive polarity.
        sac_dict["lpspol"] = 1

        tr = obspy.Trace()
        tr.stats.sac = obspy.core.AttribDict(sac_dict)
        return tr

    events = proj.get_all_events()
    # Now loop over all events and create SAC file for them.
    for _i, event in enumerate(events):
        lat, lng = event.origins[0].latitude, event.origins[0].longitude
        # Get the distance to each events.
        for station in stations:
            # Add some perturbations.
            distance_in_km = gps2DistAzimuth(lat, lng, station["latitude"],
                                             station["longitude"])[0] / 1000.0
            a = random.uniform(3.9, 4.1)
            b = random.uniform(0.9, 1.1)
            c = random.uniform(0.9, 1.1)
            body_wave_factor = random.uniform(0.095, 0.015)
            body_wave_freq_scale = random.uniform(0.45, 0.55)
            distance_in_km = random.uniform(0.99 * distance_in_km,
                                            1.01 * distance_in_km)
            _, u = get_dispersed_wavetrain(
                dw=0.001,
                distance=distance_in_km,
                t_min=0,
                t_max=900,
                a=a,
                b=b,
                c=c,
                body_wave_factor=body_wave_factor,
                body_wave_freq_scale=body_wave_freq_scale)
            for component in ["E", "N", "Z"]:
                tr = _empty_sac_trace()
                tr.data = u
                tr.stats.network = station["network"]
                tr.stats.station = station["station"]
                tr.stats.location = ""
                tr.stats.channel = "BH%s" % component
                tr.stats.sac.stla = station["latitude"]
                tr.stats.sac.stlo = station["longitude"]
                tr.stats.sac.stdp = 0.0
                tr.stats.sac.stel = 0.0
                path = os.path.join(proj.paths["data"],
                                    "dummy_event_%i" % (_i + 1), "raw")
                if not os.path.exists(path):
                    os.makedirs(path)
                tr.write(os.path.join(
                    path, "%s.%s..BH%s.sac" %
                    (station["network"], station["station"], component)),
                         format="sac")
    print "Generated %i waveform files." % (30 * 3 * len(events))