def test_long_year_range(self):
        """
        Tests reading and writing years 1900 to 2100.
        """
        tr = Trace(np.arange(5, dtype=np.float32))

        # Year 2056 is non-deterministic for days 1, 256 and 257. These three
        # dates are simply simply not supported right now. See the libmseed
        # documentation for more details.
        # Use every 5th year. Otherwise the test takes too long. Use 1901 as
        # start to get year 2056.
        years = range(1901, 2101, 5)
        for year in years:
            for byteorder in ["<", ">"]:
                memfile = io.BytesIO()
                # Get some random time with the year and the byte order as the
                # seed.
                random.seed(year + ord(byteorder))
                tr.stats.starttime = UTCDateTime(
                    year,
                    julday=random.randrange(1, 365),
                    hour=random.randrange(0, 24),
                    minute=random.randrange(0, 60),
                    second=random.randrange(0, 60))
                if year == 2056:
                    tr.stats.starttime = UTCDateTime(2056, 2, 1)
                tr.write(memfile, format="mseed")
                st2 = read(memfile)
                self.assertEqual(len(st2), 1)
                tr2 = st2[0]
                # Remove the mseed specific header fields. These are obviously
                # not equal.
                del tr2.stats.mseed
                del tr2.stats._format
                self.assertEqual(tr, tr2)
    def test_microsecond_accuracy_reading_and_writing_before_1970(self):
        """
        Tests that reading and writing data with microsecond accuracy and
        before 1970 works as expected.
        """
        # Test a couple of timestamps. Positive and negative ones.
        timestamps = [123456.789123, -123456.789123, 1.123400, 1.123412,
                      1.123449, 1.123450, 1.123499, -1.123400, -1.123412,
                      -1.123449, -1.123450, -1.123451, -1.123499]

        for timestamp in timestamps:
            starttime = UTCDateTime(timestamp)
            self.assertEqual(starttime.timestamp, timestamp)

            tr = Trace(data=np.linspace(0, 100, 101))
            tr.stats.starttime = starttime

            with io.BytesIO() as fh:
                tr.write(fh, format="mseed")
                fh.seek(0, 0)
                tr2 = read(fh)[0]

            del tr2.stats.mseed
            del tr2.stats._format

            self.assertEqual(tr2.stats.starttime, starttime)
            self.assertEqual(tr2, tr)
Beispiel #3
0
 def test_issue_193(self):
     """
     Test for issue #193: if non-contiguous array is written correctly.
     """
     warnings.filterwarnings("ignore", "Detected non contiguous data")
     # test all plugins with both read and write method
     formats_write = \
         set(_get_default_eps('obspy.plugin.waveform', 'writeFormat'))
     formats_read = \
         set(_get_default_eps('obspy.plugin.waveform', 'readFormat'))
     formats = set.intersection(formats_write, formats_read)
     # mseed will raise exception for int64 data, thus use int32 only
     data = np.arange(10, dtype=np.int32)
     # make array non-contiguous
     data = data[::2]
     tr = Trace(data=data)
     for format in formats:
         # XXX: skip SEGY and SU formats for now as they need some special
         # headers.
         if format in ['SEGY', 'SU', 'SEG2']:
             continue
         with NamedTemporaryFile() as tf:
             tempfile = tf.name
             tr.write(tempfile, format)
             if format == "Q":
                 tempfile = tempfile + ".QHD"
             tr_test = read(tempfile, format)[0]
             if format == 'Q':
                 os.remove(tempfile[:-4] + '.QBN')
                 os.remove(tempfile[:-4] + '.QHD')
         np.testing.assert_array_equal(tr.data, tr_test.data)
    def test_long_year_range(self):
        """
        Tests reading and writing years 1900 to 2100.
        """
        tr = Trace(np.arange(5, dtype=np.float32))

        # Year 2056 is non-deterministic for days 1, 256 and 257. These three
        # dates are simply simply not supported right now. See the libmseed
        # documentation for more details.
        # Use every 5th year. Otherwise the test takes too long. Use 1901 as
        # start to get year 2056.
        years = range(1901, 2101, 5)
        for year in years:
            for byteorder in ["<", ">"]:
                memfile = io.BytesIO()
                # Get some random time with the year and the byte order as the
                # seed.
                random.seed(year + ord(byteorder))
                tr.stats.starttime = UTCDateTime(
                    year,
                    julday=random.randrange(1, 365),
                    hour=random.randrange(0, 24),
                    minute=random.randrange(0, 60),
                    second=random.randrange(0, 60))
                if year == 2056:
                    tr.stats.starttime = UTCDateTime(2056, 2, 1)
                tr.write(memfile, format="mseed")
                st2 = read(memfile)
                self.assertEqual(len(st2), 1)
                tr2 = st2[0]
                # Remove the mseed specific header fields. These are obviously
                # not equal.
                del tr2.stats.mseed
                del tr2.stats._format
                self.assertEqual(tr, tr2)
Beispiel #5
0
def conv2sac(root, component, datatype, conv_format):

    fpattern = 'seismo.' + component + '.*.' + 'sd' + datatype

    chan_conv = dict(X='1', Y='2', Z='3', R='R', T='T')

    files = glob.glob1(root, fpattern)

    for fn in files:
        data = np.loadtxt(fn)
        chan, station = fn.split('.')[1].strip().upper(), fn.split(
            '.')[2].strip()

        time = data[:, 0]
        stime = UTCDateTime(time[0])
        delta = time[1] - time[0]
        freq = 1 / delta
        data = data[:, 1]
        tr = Trace()
        tr.data = data
        tr.stats.station = station
        tr.stats.channel = chan_conv[chan]
        tr.stats.starttime = stime
        tr.stats.delta = delta
        tr.stats.sampling_rate = freq

        outfile = '{0}.{1}.{2}'.format(station, tr.stats.channel,
                                       conv_format.lower())
        tr.write(outfile, format=conv_format)
Beispiel #6
0
 def test_issue_193(self):
     """
     Test for issue #193: if non-contiguous array is written correctly.
     """
     warnings.filterwarnings("ignore", "Detected non contiguous data")
     # test all plugins with both read and write method
     formats_write = \
         set(_get_default_eps('obspy.plugin.waveform', 'writeFormat'))
     formats_read = \
         set(_get_default_eps('obspy.plugin.waveform', 'readFormat'))
     formats = set.intersection(formats_write, formats_read)
     # mseed will raise exception for int64 data, thus use int32 only
     data = np.arange(10, dtype=np.int32)
     # make array non-contiguous
     data = data[::2]
     tr = Trace(data=data)
     for format in formats:
         # XXX: skip SEGY and SU formats for now as they need some special
         # headers.
         if format in ['SEGY', 'SU', 'SEG2']:
             continue
         with NamedTemporaryFile() as tf:
             tempfile = tf.name
             tr.write(tempfile, format)
             if format == "Q":
                 tempfile = tempfile + ".QHD"
             tr_test = read(tempfile, format)[0]
             if format == 'Q':
                 os.remove(tempfile[:-4] + '.QBN')
                 os.remove(tempfile[:-4] + '.QHD')
         np.testing.assert_array_equal(tr.data, tr_test.data)
Beispiel #7
0
    def test_microsecond_accuracy_reading_and_writing_before_1970(self):
        """
        Tests that reading and writing data with microsecond accuracy and
        before 1970 works as expected.
        """
        # Test a couple of timestamps. Positive and negative ones.
        timestamps = [
            123456.789123, -123456.789123, 1.123400, 1.123412, 1.123449,
            1.123450, 1.123499, -1.123400, -1.123412, -1.123449, -1.123450,
            -1.123451, -1.123499
        ]

        for timestamp in timestamps:
            starttime = UTCDateTime(timestamp)
            self.assertEqual(starttime.timestamp, timestamp)

            tr = Trace(data=np.linspace(0, 100, 101))
            tr.stats.starttime = starttime

            with io.BytesIO() as fh:
                tr.write(fh, format="mseed")
                fh.seek(0, 0)
                tr2 = read(fh)[0]

            del tr2.stats.mseed
            del tr2.stats._format

            self.assertEqual(tr2.stats.starttime, starttime)
            self.assertEqual(tr2, tr)
Beispiel #8
0
 def test_write_and_read_correct_network(self):
     """
     Tests that writing and reading the STA2 line works (otherwise the
     network code of the data is missing), even if some details like e.g.
     latitude are not present.
     """
     tr = Trace(np.arange(5, dtype=np.int32))
     tr.stats.network = "BW"
     with NamedTemporaryFile() as tf:
         tmpfile = tf.name
         tr.write(tmpfile, format='GSE2')
         tr = read(tmpfile)[0]
     self.assertEqual(tr.stats.network, "BW")
Beispiel #9
0
 def test_write_sac_xy_with_minimum_stats(self):
     """
     Write SACXY with minimal stats header, no inhereted from SAC file
     """
     tr = Trace()
     tr.stats.delta = 0.01
     tr.data = np.arange(0, 3000)
     with NamedTemporaryFile() as tf:
         sac_file = tf.name
         tr.write(sac_file, 'SACXY')
         st = read(sac_file)
     self.assertEqual(st[0].stats.delta, 0.01)
     self.assertEqual(st[0].stats.sampling_rate, 100.0)
Beispiel #10
0
 def test_write_and_read_correct_network(self):
     """
     Tests that writing and reading the STA2 line works (otherwise the
     network code of the data is missing), even if some details like e.g.
     latitude are not present.
     """
     tr = Trace(np.arange(5, dtype=np.int32))
     tr.stats.network = "BW"
     with NamedTemporaryFile() as tf:
         tmpfile = tf.name
         tr.write(tmpfile, format='GSE2')
         tr = read(tmpfile)[0]
     self.assertEqual(tr.stats.network, "BW")
Beispiel #11
0
 def test_writeSACXYWithMinimumStats(self):
     """
     Write SACXY with minimal stats header, no inhereted from SAC file
     """
     tr = Trace()
     tr.stats.delta = 0.01
     tr.data = np.arange(0, 3000)
     sac_file = NamedTemporaryFile().name
     tr.write(sac_file, 'SACXY')
     st = read(sac_file)
     os.remove(sac_file)
     self.assertEquals(st[0].stats.delta, 0.01)
     self.assertEquals(st[0].stats.sampling_rate, 100.0)
Beispiel #12
0
 def test_write_sac_xy_with_minimum_stats(self):
     """
     Write SACXY with minimal stats header, no inhereted from SAC file
     """
     tr = Trace()
     tr.stats.delta = 0.01
     tr.data = np.arange(0, 3000)
     with NamedTemporaryFile() as tf:
         sac_file = tf.name
         tr.write(sac_file, 'SACXY')
         st = read(sac_file)
     self.assertEqual(st[0].stats.delta, 0.01)
     self.assertEqual(st[0].stats.sampling_rate, 100.0)
Beispiel #13
0
 def test_writeSmallTrace(self):
     """
     Tests writing Traces containing 0, 1 or 2 samples only.
     """
     for format in ["SLIST", "TSPAIR"]:
         for num in range(0, 4):
             tr = Trace(data=np.arange(num))
             tempfile = NamedTemporaryFile().name
             tr.write(tempfile, format=format)
             # test results
             st = read(tempfile, format=format)
             self.assertEquals(len(st), 1)
             self.assertEquals(len(st[0]), num)
             os.remove(tempfile)
Beispiel #14
0
 def test_write_small_trace(self):
     """
     Tests writing Traces containing 0, 1, 2, 3, 4 samples only.
     """
     for format in ['SAC', 'SACXY']:
         for num in range(5):
             tr = Trace(data=np.arange(num))
             with NamedTemporaryFile() as tf:
                 tempfile = tf.name
                 tr.write(tempfile, format=format)
                 # test results
                 st = read(tempfile, format=format)
             self.assertEqual(len(st), 1)
             np.testing.assert_array_equal(tr.data, st[0].data)
Beispiel #15
0
    def test_valid_sac_from_minimal_existing_sac_header(self):
        """
        An incomplete manually-produced SAC header should still produce a
        valid SAC file, including values from the ObsPy header.  Issue 1204.
        """
        tr = Trace(np.arange(100))
        t = UTCDateTime()
        tr.stats.starttime = t
        tr.stats.station = 'AAA'
        tr.stats.network = 'XX'
        tr.stats.channel = 'BHZ'
        tr.stats.location = '00'

        tr.stats.sac = AttribDict()
        tr.stats.sac.iztype = 9
        tr.stats.sac.nvhdr = 6
        tr.stats.sac.leven = 1
        tr.stats.sac.lovrok = 1
        tr.stats.sac.iftype = 1
        tr.stats.sac.stla = 1.
        tr.stats.sac.stlo = 2.

        with NamedTemporaryFile() as tf:
            tempfile = tf.name
            with warnings.catch_warnings(record=True) as w:
                warnings.simplefilter('always')
                tr.write(tempfile, format='SAC')
                self.assertEqual(len(w), 1)
                self.assertIn('reftime', str(w[-1].message))
            tr1 = read(tempfile)[0]

        # starttime made its way to SAC file
        nztimes, microsecond = utcdatetime_to_sac_nztimes(t)
        self.assertEqual(tr1.stats.sac.nzyear, nztimes['nzyear'])
        self.assertEqual(tr1.stats.sac.nzjday, nztimes['nzjday'])
        self.assertEqual(tr1.stats.sac.nzhour, nztimes['nzhour'])
        self.assertEqual(tr1.stats.sac.nzmin, nztimes['nzmin'])
        self.assertEqual(tr1.stats.sac.nzsec, nztimes['nzsec'])
        self.assertEqual(tr1.stats.sac.nzmsec, nztimes['nzmsec'])
        self.assertEqual(tr1.stats.sac.kstnm, 'AAA')
        self.assertEqual(tr1.stats.sac.knetwk, 'XX')
        self.assertEqual(tr1.stats.sac.kcmpnm, 'BHZ')
        self.assertEqual(tr1.stats.sac.khole, '00')
        self.assertEqual(tr1.stats.sac.iztype, 9)
        self.assertEqual(tr1.stats.sac.nvhdr, 6)
        self.assertEqual(tr1.stats.sac.leven, 1)
        self.assertEqual(tr1.stats.sac.lovrok, 1)
        self.assertEqual(tr1.stats.sac.iftype, 1)
        self.assertEqual(tr1.stats.sac.stla, 1.0)
        self.assertEqual(tr1.stats.sac.stlo, 2.0)
Beispiel #16
0
 def test_write_small_trace(self):
     """
     Tests writing Traces containing 0, 1 or 2 samples only.
     """
     for format in ['SLIST', 'TSPAIR']:
         for num in range(0, 4):
             tr = Trace(data=np.arange(num))
             with NamedTemporaryFile() as tf:
                 tempfile = tf.name
                 tr.write(tempfile, format=format)
                 # test results
                 st = read(tempfile, format=format)
             self.assertEqual(len(st), 1)
             self.assertEqual(len(st[0]), num)
Beispiel #17
0
    def test_valid_sac_from_minimal_existing_sac_header(self):
        """
        An incomplete manually-produced SAC header should still produce a
        valid SAC file, including values from the ObsPy header.  Issue 1204.
        """
        tr = Trace(np.arange(100))
        t = UTCDateTime()
        tr.stats.starttime = t
        tr.stats.station = 'AAA'
        tr.stats.network = 'XX'
        tr.stats.channel = 'BHZ'
        tr.stats.location = '00'

        tr.stats.sac = AttribDict()
        tr.stats.sac.iztype = 9
        tr.stats.sac.nvhdr = 6
        tr.stats.sac.leven = 1
        tr.stats.sac.lovrok = 1
        tr.stats.sac.iftype = 1
        tr.stats.sac.stla = 1.
        tr.stats.sac.stlo = 2.

        with NamedTemporaryFile() as tf:
            tempfile = tf.name
            with warnings.catch_warnings(record=True) as w:
                warnings.simplefilter('always')
                tr.write(tempfile, format='SAC')
                self.assertEqual(len(w), 1)
                self.assertIn('reftime', str(w[-1].message))
            tr1 = read(tempfile)[0]

        # starttime made its way to SAC file
        nztimes, microsecond = utcdatetime_to_sac_nztimes(t)
        self.assertEqual(tr1.stats.sac.nzyear, nztimes['nzyear'])
        self.assertEqual(tr1.stats.sac.nzjday, nztimes['nzjday'])
        self.assertEqual(tr1.stats.sac.nzhour, nztimes['nzhour'])
        self.assertEqual(tr1.stats.sac.nzmin, nztimes['nzmin'])
        self.assertEqual(tr1.stats.sac.nzsec, nztimes['nzsec'])
        self.assertEqual(tr1.stats.sac.nzmsec, nztimes['nzmsec'])
        self.assertEqual(tr1.stats.sac.kstnm, 'AAA')
        self.assertEqual(tr1.stats.sac.knetwk, 'XX')
        self.assertEqual(tr1.stats.sac.kcmpnm, 'BHZ')
        self.assertEqual(tr1.stats.sac.khole, '00')
        self.assertEqual(tr1.stats.sac.iztype, 9)
        self.assertEqual(tr1.stats.sac.nvhdr, 6)
        self.assertEqual(tr1.stats.sac.leven, 1)
        self.assertEqual(tr1.stats.sac.lovrok, 1)
        self.assertEqual(tr1.stats.sac.iftype, 1)
        self.assertEqual(tr1.stats.sac.stla, 1.0)
        self.assertEqual(tr1.stats.sac.stlo, 2.0)
Beispiel #18
0
 def test_write_small_trace(self):
     """
     Tests writing Traces containing 0, 1, 2, 3, 4 samples only.
     """
     for format in ['SAC', 'SACXY']:
         for num in range(5):
             tr = Trace(data=np.arange(num))
             with NamedTemporaryFile() as tf:
                 tempfile = tf.name
                 tr.write(tempfile, format=format)
                 # test results
                 st = read(tempfile, format=format)
             self.assertEqual(len(st), 1)
             np.testing.assert_array_equal(tr.data, st[0].data)
Beispiel #19
0
 def test_sac_file_from_new_header(self):
     """
     Writing to disk a new Trace object shouldn't ignore custom header
     fields, if an arrival time is set. See ObsPy issue #1519
     """
     tr = Trace(np.zeros(1000))
     tr.stats.delta = 0.01
     tr.stats.station = 'XXX'
     tr.stats.sac = {'stla': 10., 'stlo': -5., 'a': 12.34}
     with io.BytesIO() as tf:
         tr.write(tf, format='SAC')
         tf.seek(0)
         tr1 = read(tf)[0]
     self.assertAlmostEqual(tr1.stats.sac.stla, 10., places=4)
     self.assertAlmostEqual(tr1.stats.sac.stlo, -5., places=4)
     self.assertAlmostEqual(tr1.stats.sac.a, 12.34, places=5)
Beispiel #20
0
 def test_sac_file_from_new_header(self):
     """
     Writing to disk a new Trace object shouldn't ignore custom header
     fields, if an arrival time is set. See ObsPy issue #1519
     """
     tr = Trace(np.zeros(1000))
     tr.stats.delta = 0.01
     tr.stats.station = 'XXX'
     tr.stats.sac = {'stla': 10., 'stlo': -5., 'a': 12.34}
     with io.BytesIO() as tf:
         tr.write(tf, format='SAC')
         tf.seek(0)
         tr1 = read(tf)[0]
     self.assertAlmostEqual(tr1.stats.sac.stla, 10., places=4)
     self.assertAlmostEqual(tr1.stats.sac.stlo, -5., places=4)
     self.assertAlmostEqual(tr1.stats.sac.a, 12.34, places=5)
Beispiel #21
0
 def write_stack(self):
     if self.stack is not None and len(self.stack) > 0:
         root_path = os.path.dirname(os.path.abspath(__file__))
         if "darwin" == platform:
             dir_path = pw.QFileDialog.getExistingDirectory(
                 self, 'Select Directory', root_path)
         else:
             dir_path = pw.QFileDialog.getExistingDirectory(
                 self, 'Select Directory', root_path,
                 pw.QFileDialog.DontUseNativeDialog)
         if dir_path:
             tr = Trace(data=self.stack, header=self.stats)
             file = os.path.join(dir_path, tr.id)
             tr.write(file, format="MSEED")
     else:
         md = MessageDialog(self)
         md.set_info_message("Nothing to write")
Beispiel #22
0
    def test_merge_sac_obspy_headers(self):
        """
        Test that manually setting a set of SAC headers not related
        to validity or reference time on Trace.stats.sac is properly merged
        with the Trace.stats header. Issue 1285.
        """
        tr = Trace(data=np.arange(30))
        o = 10.0
        tr.stats.sac = {'o': o}

        with NamedTemporaryFile() as tf:
            tempfile = tf.name
            tr.write(tempfile, format='SAC')
            tr1 = read(tempfile)[0]

        self.assertEqual(tr1.stats.starttime, tr.stats.starttime)
        self.assertEqual(tr1.stats.sac.o, o)
Beispiel #23
0
    def test_merge_sac_obspy_headers(self):
        """
        Test that manually setting a set of SAC headers not related
        to validity or reference time on Trace.stats.sac is properly merged
        with the Trace.stats header. Issue 1285.
        """
        tr = Trace(data=np.arange(30))
        o = 10.0
        tr.stats.sac = {'o': o}

        with NamedTemporaryFile() as tf:
            tempfile = tf.name
            tr.write(tempfile, format='SAC')
            tr1 = read(tempfile)[0]

        self.assertEqual(tr1.stats.starttime, tr.stats.starttime)
        self.assertEqual(tr1.stats.sac.o, o)
Beispiel #24
0
 def test_float_sampling_rates_write_and_read(self):
     """
     Tests writing and reading Traces with floating point and with less than
     1 Hz sampling rates.
     """
     tr = Trace(np.arange(10))
     check_sampling_rates = (0.000000001, 1.000000001, 100.000000001,
                             99.999999999, 1.5, 1.666666, 10000.0001)
     for format in ['SLIST', 'TSPAIR']:
         for sps in check_sampling_rates:
             tr.stats.sampling_rate = sps
             with NamedTemporaryFile() as tf:
                 tempfile = tf.name
                 tr.write(tempfile, format=format)
                 # test results
                 got = read(tempfile, format=format)[0]
             self.assertEqual(tr.stats.sampling_rate,
                              got.stats.sampling_rate)
 def test_issue376(self):
     """
     Tests writing Traces containing 1 or 2 samples only.
     """
     # one samples
     tr = Trace(data=np.ones(1))
     tempfile = NamedTemporaryFile().name
     tr.write(tempfile, format="MSEED")
     st = read(tempfile)
     self.assertEqual(len(st), 1)
     self.assertEqual(len(st[0]), 1)
     os.remove(tempfile)
     # two samples
     tr = Trace(data=np.ones(2))
     with NamedTemporaryFile() as tf:
         tempfile = tf.name
         tr.write(tempfile, format="MSEED")
         st = read(tempfile)
     self.assertEqual(len(st), 1)
     self.assertEqual(len(st[0]), 2)
Beispiel #26
0
    def test_merge_sac_obspy_headers(self):
        """
        Test that manually setting a set of SAC headers not related
        to validity or reference time on Trace.stats.sac is properly merged
        with the Trace.stats header. Issue 1285.
        """
        tr = Trace(data=np.arange(30))
        o = 10.0
        tr.stats.sac = {"o": o}

        with NamedTemporaryFile() as tf:
            tempfile = tf.name
            with warnings.catch_warnings(record=True) as w:
                warnings.simplefilter("always")
                tr.write(tempfile, format="SAC")
                self.assertEqual(len(w), 1)
            tr1 = read(tempfile)[0]

        self.assertEqual(tr1.stats.starttime, tr.stats.starttime)
        self.assertEqual(tr1.stats.sac.o, o)
    def test_enforcing_reading_byteorder(self):
        """
        Tests if setting the byte order of the header for reading is passed to
        the C functions.

        Quite simple. It just checks if reading with the correct byte order
        works and reading with the wrong byte order fails.
        """
        tr = Trace(data=np.arange(10, dtype=np.int32))

        # Test with little endian.
        memfile = io.BytesIO()
        tr.write(memfile, format="mseed", byteorder="<")
        memfile.seek(0, 0)
        # Reading little endian should work just fine.
        tr2 = read(memfile, header_byteorder="<")[0]
        memfile.seek(0, 0)
        self.assertEqual(tr2.stats.mseed.byteorder, "<")
        # Remove the mseed specific header fields. These are obviously not
        # equal.
        del tr2.stats.mseed
        del tr2.stats._format
        self.assertEqual(tr, tr2)
        # Wrong byte order raises.
        self.assertRaises(ValueError, read, memfile, header_byteorder=">")

        # Same test with big endian
        memfile = io.BytesIO()
        tr.write(memfile, format="mseed", byteorder=">")
        memfile.seek(0, 0)
        # Reading big endian should work just fine.
        tr2 = read(memfile, header_byteorder=">")[0]
        memfile.seek(0, 0)
        self.assertEqual(tr2.stats.mseed.byteorder, ">")
        # Remove the mseed specific header fields. These are obviously not
        # equal.
        del tr2.stats.mseed
        del tr2.stats._format
        self.assertEqual(tr, tr2)
        # Wrong byte order raises.
        self.assertRaises(ValueError, read, memfile, header_byteorder="<")
    def test_enforcing_reading_byteorder(self):
        """
        Tests if setting the byte order of the header for reading is passed to
        the C functions.

        Quite simple. It just checks if reading with the correct byte order
        works and reading with the wrong byte order fails.
        """
        tr = Trace(data=np.arange(10, dtype=np.int32))

        # Test with little endian.
        memfile = io.BytesIO()
        tr.write(memfile, format="mseed", byteorder="<")
        memfile.seek(0, 0)
        # Reading little endian should work just fine.
        tr2 = read(memfile, header_byteorder="<")[0]
        memfile.seek(0, 0)
        self.assertEqual(tr2.stats.mseed.byteorder, "<")
        # Remove the mseed specific header fields. These are obviously not
        # equal.
        del tr2.stats.mseed
        del tr2.stats._format
        self.assertEqual(tr, tr2)
        # Wrong byte order raises.
        self.assertRaises(ValueError, read, memfile, header_byteorder=">")

        # Same test with big endian
        memfile = io.BytesIO()
        tr.write(memfile, format="mseed", byteorder=">")
        memfile.seek(0, 0)
        # Reading big endian should work just fine.
        tr2 = read(memfile, header_byteorder=">")[0]
        memfile.seek(0, 0)
        self.assertEqual(tr2.stats.mseed.byteorder, ">")
        # Remove the mseed specific header fields. These are obviously not
        # equal.
        del tr2.stats.mseed
        del tr2.stats._format
        self.assertEqual(tr, tr2)
        # Wrong byte order raises.
        self.assertRaises(ValueError, read, memfile, header_byteorder="<")
Beispiel #29
0
def add_metadata_and_write(correlation, sta1, sta2, output_file, Fs):
    # save output
    trace = Trace()
    trace.stats.sampling_rate = Fs
    trace.data = correlation
    # try to add some meta data
    try:
        trace.stats.station = sta1.split('.')[1]
        trace.stats.network = sta1.split('.')[0]
        trace.stats.location = sta1.split('.')[2]
        trace.stats.channel = sta1.split('.')[3]
        trace.stats.sac = {}
        trace.stats.sac['kuser0'] = sta2.split('.')[1]
        trace.stats.sac['kuser1'] = sta2.split('.')[0]
        trace.stats.sac['kuser2'] = sta2.split('.')[2]
        trace.stats.sac['kevnm'] = sta2.split('.')[3]
    except (KeyError, IndexError):
        pass

    trace.write(filename=output_file, format='SAC')
    return()
Beispiel #30
0
 def test_always_sac_reftime(self):
     """
     Writing a SAC file from a .stats.sac with no reference time should
     still write a SAC file with a reference time.
     """
     reftime = UTCDateTime('2010001')
     a = 12.34
     b = 0.0
     tr = Trace(np.zeros(1000))
     tr.stats.delta = 0.01
     tr.stats.station = 'XXX'
     tr.stats.starttime = reftime
     tr.stats.sac = {}
     tr.stats.sac['a'] = a
     tr.stats.sac['b'] = b
     with io.BytesIO() as tf:
         tr.write(tf, format='SAC')
         tf.seek(0)
         tr1 = read(tf)[0]
     self.assertEqual(tr1.stats.starttime, reftime)
     self.assertAlmostEqual(tr1.stats.sac.a, a, places=5)
     self.assertEqual(tr1.stats.sac.b, b)
Beispiel #31
0
 def test_write_small_trace(self):
     """
     Tests writing Traces containing 0, 1 or 2 samples only.
     """
     for format in ['SH_ASC', 'Q']:
         for num in range(0, 4):
             tr = Trace(data=np.arange(num))
             with NamedTemporaryFile() as tf:
                 tempfile = tf.name
                 if format == 'Q':
                     tempfile += '.QHD'
                 tr.write(tempfile, format=format)
                 # test results
                 with warnings.catch_warnings() as _:  # NOQA
                     warnings.simplefilter("ignore")
                     st = read(tempfile, format=format)
                 self.assertEqual(len(st), 1)
                 self.assertEqual(len(st[0]), num)
                 # Q files consist of two files - deleting additional file
                 if format == 'Q':
                     os.remove(tempfile[:-4] + '.QBN')
                     os.remove(tempfile[:-4] + '.QHD')
Beispiel #32
0
 def test_writeSmallTrace(self):
     """
     Tests writing Traces containing 0, 1 or 2 samples only.
     """
     for format in ['SH_ASC', 'Q']:
         for num in range(0, 4):
             tr = Trace(data=np.arange(num))
             with NamedTemporaryFile() as tf:
                 tempfile = tf.name
                 if format == 'Q':
                     tempfile += '.QHD'
                 tr.write(tempfile, format=format)
                 # test results
                 with warnings.catch_warnings() as _:  # NOQA
                     warnings.simplefilter("ignore")
                     st = read(tempfile, format=format)
                 self.assertEqual(len(st), 1)
                 self.assertEqual(len(st[0]), num)
                 # Q files consist of two files - deleting additional file
                 if format == 'Q':
                     os.remove(tempfile[:-4] + '.QBN')
                     os.remove(tempfile[:-4] + '.QHD')
Beispiel #33
0
 def test_always_sac_reftime(self):
     """
     Writing a SAC file from a .stats.sac with no reference time should
     still write a SAC file with a reference time.
     """
     reftime = UTCDateTime('2010001')
     a = 12.34
     b = 0.0
     tr = Trace(np.zeros(1000))
     tr.stats.delta = 0.01
     tr.stats.station = 'XXX'
     tr.stats.starttime = reftime
     tr.stats.sac = {}
     tr.stats.sac['a'] = a
     tr.stats.sac['b'] = b
     with io.BytesIO() as tf:
         tr.write(tf, format='SAC')
         tf.seek(0)
         tr1 = read(tf)[0]
     self.assertEqual(tr1.stats.starttime, reftime)
     self.assertAlmostEqual(tr1.stats.sac.a, a, places=5)
     self.assertEqual(tr1.stats.sac.b, b)
Beispiel #34
0
 def test_issue_156(self):
     """
     Test case for issue #156.
     """
     # 1
     tr = Trace()
     tr.stats.delta = 0.01
     tr.data = np.arange(0, 3000)
     with NamedTemporaryFile() as tf:
         sac_file = tf.name
         tr.write(sac_file, 'SAC')
         st = read(sac_file)
     self.assertEqual(st[0].stats.delta, 0.01)
     self.assertEqual(st[0].stats.sampling_rate, 100.0)
     # 2
     tr = Trace()
     tr.stats.delta = 0.005
     tr.data = np.arange(0, 2000)
     with NamedTemporaryFile() as tf:
         sac_file = tf.name
         tr.write(sac_file, 'SAC')
         st = read(sac_file)
     self.assertEqual(st[0].stats.delta, 0.005)
     self.assertEqual(st[0].stats.sampling_rate, 200.0)
Beispiel #35
0
 def test_issue_156(self):
     """
     Test case for issue #156.
     """
     # 1
     tr = Trace()
     tr.stats.delta = 0.01
     tr.data = np.arange(0, 3000)
     with NamedTemporaryFile() as tf:
         sac_file = tf.name
         tr.write(sac_file, 'SAC')
         st = read(sac_file)
     self.assertEqual(st[0].stats.delta, 0.01)
     self.assertEqual(st[0].stats.sampling_rate, 100.0)
     # 2
     tr = Trace()
     tr.stats.delta = 0.005
     tr.data = np.arange(0, 2000)
     with NamedTemporaryFile() as tf:
         sac_file = tf.name
         tr.write(sac_file, 'SAC')
         st = read(sac_file)
     self.assertEqual(st[0].stats.delta, 0.005)
     self.assertEqual(st[0].stats.sampling_rate, 200.0)
Beispiel #36
0
 def _write_trace(self):
     trace = Trace(self.data.get(), self.header.stats)
     trace.write(self.absname, format='MSEED')
Beispiel #37
0
    def test_readThreadSafe(self):
        """
        Tests for race conditions. Reading n_threads (currently 30) times
        the same waveform file in parallel and compare the results which must
        be all the same.
        """
        data = np.arange(0, 500)
        start = UTCDateTime(2009, 1, 13, 12, 1, 2, 999000)
        formats = _getEntryPoints('obspy.plugin.waveform', 'writeFormat')
        for format in formats:
            # XXX: skip SEGY and SU formats for now as they need some special
            # headers.
            if format in ['SEGY', 'SU', 'SEG2']:
                continue

            dt = np.dtype("int")
            if format in ('MSEED', 'GSE2'):
                dt = "int32"
            tr = Trace(data=data.astype(dt))
            tr.stats.network = "BW"
            tr.stats.station = "MANZ1"
            tr.stats.location = "00"
            tr.stats.channel = "EHE"
            tr.stats.calib = 0.999999
            tr.stats.delta = 0.005
            tr.stats.starttime = start
            # create waveform file with given format and byte order
            with NamedTemporaryFile() as tf:
                outfile = tf.name
                tr.write(outfile, format=format)
                if format == 'Q':
                    outfile += '.QHD'
                n_threads = 30
                streams = []

                def testFunction(streams):
                    st = read(outfile, format=format)
                    streams.append(st)

                # Read the ten files at one and save the output in the just
                # created class.
                for _i in range(n_threads):
                    thread = threading.Thread(target=testFunction,
                                              args=(streams, ))
                    thread.start()
                # Loop until all threads are finished.
                start = time.time()
                while True:
                    if threading.activeCount() == 1:
                        break
                    # Avoid infinite loop and leave after 120 seconds
                    # such a long time is needed for debugging with valgrind
                    elif time.time() - start >= 120:  # pragma: no cover
                        msg = 'Not all threads finished!'
                        raise Warning(msg)
                # Compare all values which should be identical and clean up
                # files
                #for data in :
                #    np.testing.assert_array_equal(values, original)
                if format == 'Q':
                    os.remove(outfile[:-4] + '.QBN')
                    os.remove(outfile[:-4] + '.QHD')
Beispiel #38
0
    def test_read_thread_safe(self):
        """
        Tests for race conditions. Reading n_threads (currently 30) times
        the same waveform file in parallel and compare the results which must
        be all the same.
        """
        data = np.arange(0, 500)
        start = UTCDateTime(2009, 1, 13, 12, 1, 2, 999000)
        formats = _get_default_eps('obspy.plugin.waveform', 'writeFormat')
        for format in formats:
            # XXX: skip SEGY and SU formats for now as they need some special
            # headers.
            if format in ['SEGY', 'SU', 'SEG2']:
                continue

            dt = np.int_
            if format in ('MSEED', 'GSE2'):
                dt = np.int32
            tr = Trace(data=data.astype(dt))
            tr.stats.network = "BW"
            tr.stats.station = "MANZ1"
            tr.stats.location = "00"
            tr.stats.channel = "EHE"
            tr.stats.calib = 0.999999
            tr.stats.delta = 0.005
            tr.stats.starttime = start
            # create waveform file with given format and byte order
            with NamedTemporaryFile() as tf:
                outfile = tf.name
                tr.write(outfile, format=format)
                if format == 'Q':
                    outfile += '.QHD'
                n_threads = 30
                streams = []
                timeout = 120
                if 'TRAVIS' in os.environ:
                    timeout = 570  # 30 seconds under Travis' limit
                cond = threading.Condition()

                def test_functions(streams, cond):
                    st = read(outfile, format=format)
                    streams.append(st)
                    with cond:
                        cond.notify()
                # Read the ten files at one and save the output in the just
                # created class.
                our_threads = []
                for _i in range(n_threads):
                    thread = threading.Thread(target=test_functions,
                                              args=(streams, cond))
                    thread.start()
                    our_threads.append(thread)
                our_threads = set(our_threads)
                # Loop until all threads are finished.
                start = time.time()
                while True:
                    with cond:
                        cond.wait(1)
                    remaining_threads = set(threading.enumerate())
                    if len(remaining_threads & our_threads) == 0:
                        break
                    # Avoid infinite loop and leave after some time; such a
                    # long time is needed for debugging with valgrind or Travis
                    elif time.time() - start >= timeout:  # pragma: no cover
                        msg = 'Not all threads finished after %d seconds!' % (
                            timeout)
                        raise Warning(msg)
                # Compare all values which should be identical and clean up
                # files
                for st in streams:
                    np.testing.assert_array_equal(st[0].data, tr.data)
                if format == 'Q':
                    os.remove(outfile[:-4] + '.QBN')
                    os.remove(outfile[:-4] + '.QHD')
Beispiel #39
0
def g1g2_corr(wf1,wf2,corr_file,src,source_conf,insta):
    """
    Compute noise cross-correlations from two .h5 'wavefield' files.
    Noise source distribution and spectrum is given by starting_model.h5
    It is assumed that noise sources are delta-correlated in space.
    """
    
    
    #ToDo: check whether to include autocorrs from user (now hardcoded off)
    #ToDo: Parallel loop(s)
    #ToDo tests
    

    # Metainformation: Include the reference station names for both stations
    # from wavefield files, if possible. Do not include geographic information
    # from .csv file as this might be error-prone. Just add the geographic 
    # info later if needed.

    with NoiseSource(src) as nsrc:

        ntime, n, n_corr, Fs = get_ns(wf1,source_conf,insta)

    # use a one-sided taper: The seismogram probably has a non-zero end, 
    # being cut off whereever the solver stopped running.
        taper = cosine_taper(ntime,p=0.01)
        taper[0:ntime//2] = 1.0
        ntraces = nsrc.src_loc[0].shape[0]
        print(taper.shape)
        correlation = np.zeros(n_corr)

        if insta:
            # open database
            dbpath = json.load(open(os.path.join(source_conf['project_path'],
                'config.json')))['wavefield_path']
            # open and determine Fs, nt
            db = instaseis.open_db(dbpath)
            # get receiver locations
            lat1 = geograph_to_geocent(float(wf1[2]))
            lon1 = float(wf1[3])
            rec1 = instaseis.Receiver(latitude=lat1,longitude=lon1)
            lat2 = geograph_to_geocent(float(wf2[2]))
            lon2 = float(wf2[3])
            rec2 = instaseis.Receiver(latitude=lat2,longitude=lon2)

        else:
            wf1 = WaveField(wf1)
            wf2 = WaveField(wf2)

            
        # Loop over source locations
        for i in range(ntraces):

            # noise source spectrum at this location
            S = nsrc.get_spect(i)
            

            if S.sum() == 0.: 
            #If amplitude is 0, continue. (Spectrum has 0 phase anyway. )
                continue

           
            if insta:
            # get source locations
                lat_src = geograph_to_geocent(nsrc.src_loc[1,i])
                lon_src = nsrc.src_loc[0,i]
                fsrc = instaseis.ForceSource(latitude=lat_src,
                    longitude=lon_src,f_r=1.e12)
                
                s1 = np.ascontiguousarray(db.get_seismograms(source=fsrc,
                    receiver=rec1,
                    dt=1./source_conf['sampling_rate'])[0].data*taper)
                s2 = np.ascontiguousarray(db.get_seismograms(source=fsrc,
                    receiver=rec2,
                    dt=1./source_conf['sampling_rate'])[0].data*taper)
                

            else:
            # read Green's functions
                s1 = np.ascontiguousarray(wf1.data[i,:]*taper)
                s2 = np.ascontiguousarray(wf2.data[i,:]*taper)
            
            
            # Fourier transform for greater ease of convolution
            spec1 = np.fft.rfft(s1,n)
            spec2 = np.fft.rfft(s2,n)
            
            # convolve G1G2
            g1g2_tr = np.multiply(np.conjugate(spec1),spec2)
            
            # convolve noise source
            c = np.multiply(g1g2_tr,S)
            
            # transform back    
            correlation += my_centered(np.fft.ifftshift(np.fft.irfft(c,n)),
                n_corr) * nsrc.surf_area[i]
            
            # occasional info
            if i%50000 == 0:
                print("Finished {} source locations.".format(i))
###################### end of loop over all source locations ###################

        if not insta:
            wf1.file.close()
            wf2.file.close()

        # save output
        trace = Trace()
        trace.stats.sampling_rate = Fs
        trace.data = correlation
# try to add some meta data
        try:
            sta1 = wf1.stats['reference_station']
            sta2 = wf2.stats['reference_station']
            trace.stats.station = sta1.split('.')[1]
            trace.stats.network = sta1.split('.')[0]
            trace.stats.location = sta1.split('.')[2]
            trace.stats.channel = sta1.split('.')[3]
            trace.stats.sac = {}
            trace.stats.sac['kuser0']  =   sta2.split('.')[1]
            trace.stats.sac['kuser1']  =   sta2.split('.')[0]
            trace.stats.sac['kuser2']  =  sta2.split('.')[2]
            trace.stats.sac['kevnm']   =   sta2.split('.')[3]
        except:
            pass

        trace.write(filename=corr_file,format='SAC')
Beispiel #40
0
class CorrTrace(object):

	"""
	Object holds correlation data along with metainformation (station id, geographic location).
	"""

	def __init__(self,cha1,cha2,sampling_rate,corr_type='ccc',
		t0=None,t1=None,stck_int=None,prepstring=None,
		window_length=None,overlap=None,corr_params=None):

		
		self.stack = Trace() # These traces get 01,01,1970 as start date, a completely random choice of start time...no better idea. 
		self.pstak = None
		self.maxlag = None # maxlag will be set the first time a correlation is added.

		# Parameters that must be set 
		self.cnt_tot = 0
		self.cnt_int = 0
		self.id1   = cha1
		self.id2   = cha2

		if self.id1[-1] == 'E':
			cha = self.id1.split('.')[-1]
			cha = cha[0] + cha [1] + 'T'
			inf = self.id1.split('.')
			self.id1 = '{}.{}.{}.{}'.format(*(inf[0:3]+[cha]))

		if self.id1[-1] == 'N':
			cha = self.id1.split('.')[-1]
			cha = cha[0] + cha [1] + 'R'
			inf = self.id1.split('.')
			self.id1 = '{}.{}.{}.{}'.format(*(inf[0:3]+[cha]))

		if self.id2[-1] == 'E':
			cha = self.id2.split('.')[-1]
			cha = cha[0] + cha [1] + 'T'
			inf = self.id2.split('.')
			self.id2 = '{}.{}.{}.{}'.format(*(inf[0:3]+[cha]))

		if self.id2[-1] == 'N':
			cha = self.id2.split('.')[-1]
			cha = cha[0] + cha [1] + 'R'
			inf = self.id2.split('.')
			self.id2 = '{}.{}.{}.{}'.format(*(inf[0:3]+[cha]))


		self.id    = self.id1 + '--' + self.id2
		self.corr_type = corr_type

		self.stack.stats.sampling_rate = sampling_rate
		self.sampling_rate = sampling_rate
		(self.stack.stats.network, 
			self.stack.stats.station, 
			self.stack.stats.location, 
			self.stack.stats.channel) = cha1.split('.')

		
		
		geo_inf = get_geoinf(cha1,cha2)
		
		self.lat1 = geo_inf[0]
		self.lat2 = geo_inf[2]
		self.lon1 = geo_inf[1]
		self.lon2 = geo_inf[3]
		self.az   = geo_inf[5]
		self.baz  = geo_inf[6]
		self.dist = geo_inf[4]


		# Parameters that are optional and will be ignored if they are set to None
		self.stck_int = stck_int
		self.params = corr_params
		self.begin = t0
		self.end   = t1
		self.window_length = window_length
		self.overlap = overlap
		self.prepstring = prepstring

		# open the file to dump intermediate stack results
		if self.stck_int > 0:
			int_file = '{}.{}.windows.h5'.format(self.id,self.corr_type)
			int_file = os.path.join('data','correlations',int_file)
			int_file = h5py.File(int_file,'a')

			# Save some basic information
			int_stats = int_file.create_dataset('stats',data=(0,))
			int_stats.attrs['sampling_rate'] 	= self.sampling_rate
			int_stats.attrs['channel1'] 		= self.id1
			int_stats.attrs['channel2'] 		= self.id2
			int_stats.attrs['distance']			= self.dist

			# Prepare a group for writing the data window
			self.int_file = int_file
			self.interm_data = int_file.create_group("corr_windows")
		else:
			self.int_file = None
			self.interm_data = None



	


	def _add_corr(self,corr,t):

		"""
		Add one correlation window to the stack
		"""

		
		if self.stack.stats.npts == 0:
			self.stack.data = corr 
			# set the lag
			self.nlag = self.stack.stats.npts
			self.maxlag = (self.nlag - 1)/2 * self.sampling_rate
		else:
			self.stack.data += corr # This will cause an error if the correlations have different length.

		
		self.cnt_tot += 1
		

		

		if self.stck_int is not None:

			if self.pstak is not None: 
				self.pstak += corr # This will cause an error if the correlations have different length.
			else:
				self.pstak = corr.copy()

			if self.cnt_int == self.stck_int and self.stck_int > 0:
				# write intermediate result
				self.write_int(t)
				self.cnt_int = 0
				self.pstak = None

			self.cnt_int += 1

		del corr


	def write_stack(self,output_format):

		
		# SAC format

		if output_format.upper() == 'SAC':
			filename = os.path.join('data','correlations','{}.SAC'.format(self.id))

			#- open file and write correlation function
			if self.cnt_tot > 0:
				#- Add metadata
				self.add_sacmeta()
				self.stack.write(filename,format='SAC')
			else:
				print('** Correlation stack contains no windows. Nothing written.')
				print(filename)

		#- ASDF format

		if output_format.upper() == 'ASDF':
			filename = os.path.join('data','correlations','correlations.h5')

			if self.cnt_tot > 0:
				with pyasdf.ASDFDataSet(filename) as ds:
					info = self.add_asdfmeta()
					ds.add_auxiliary_data(self.stack.data,
				                          data_type="CrossCorrelation",
				                          path="%s/%s" % (info["trace_id_a"].replace(".", "_"), 
				                          	              info["trace_id_b"].replace(".", "_")),
				                          parameters=info)

		if self.int_file is not None:
			self.int_file.file.close()

		
    	
		

	def write_int(self,t):

		tstr = t.strftime("%Y.%j.%H.%M.%S")
		
		self.interm_data.create_dataset(tstr,data=self.pstak)		
#
#
	def add_sacmeta(self):

		self.stack.stats.sac={}
		#==============================================================================
		#- Essential metadata  
		#==============================================================================



		self.stack.stats.sac['kt8']		=	self.corr_type
		self.stack.stats.sac['user0']	=	self.cnt_tot

		self.stack.stats.sac['b']		=	-self.maxlag
		self.stack.stats.sac['e']		=	self.maxlag


		self.stack.stats.sac['stla']	=	self.lat1
		self.stack.stats.sac['stlo']	=	self.lon1
		self.stack.stats.sac['evla']	=	self.lat2
		self.stack.stats.sac['evlo']	=	self.lon2
		self.stack.stats.sac['dist']	=	self.dist
		self.stack.stats.sac['az']		=	self.az
		self.stack.stats.sac['baz']		=	self.baz

		self.stack.stats.sac['kuser0']	=	self.id2.split('.')[0]
		self.stack.stats.sac['kuser1']	=	self.id2.split('.')[2]
		self.stack.stats.sac['kuser2']	=	self.id2.split('.')[3]
		self.stack.stats.sac['kevnm']	=	self.id2.split('.')[1]


		#==============================================================================
		#- Optional metadata, can be None  
		#==============================================================================


		self.stack.stats.sac['kt2']		=	self.prepstring
		self.stack.stats.sac['user1']	=	self.window_length
		self.stack.stats.sac['user2']	=	self.overlap

		if self.begin is not None:
			self.stack.stats.sac['kt0']	=	self.begin.strftime('%Y%j')

		if self.end is not None:
			self.stack.stats.sac['kt1']	=	self.end.strftime('%Y%j')

		if self.params is not None: #ToDo: this

			tr.stats.sac['user3'] = self.params[0]
			tr.stats.sac['user4'] = self.params[1]
			tr.stats.sac['user5'] = self.params[2]
			tr.stats.sac['user6'] = self.params[3]
			tr.stats.sac['user7'] = self.params[4]
			tr.stats.sac['user8'] = self.params[5]
    
    
    
	def add_asdfmeta(self):

		info = {}
		info["trace_id_a"] = self.id.split("--")[0]
		info["trace_id_b"] = self.id.split("--")[1]
		info["trace_id_a"] = info["trace_id_a"].replace(" ", "")
		info["trace_id_b"] = info["trace_id_b"].replace(" ", "")
		info["dt"] = round(self.stack.stats.sampling_rate,6)
		info["correlation_type"] = self.corr_type
		info["max_lag"] = self.maxlag
		info["lat_a"] = self.lat1
		info["lat_b"] = self.lat2
		info["lon_a"] = self.lon1
		info["lon_b"] = self.lon2
		info["dist"] = self.dist
		info["az"] = self.az
		info["baz"] = self.baz
		info["preprocess_steps"] = self.prepstring
		info["window_length"] = self.window_length
		info["window_overlap"] = self.overlap

		try:
			info["noisedata_first"] = self.begin.strftime('%Y-%m-%dT%H:%M:%S')
			info["noisedata_last"] = self.end.strftime('%Y-%m-%dT%H:%M:%S')
		except:
			pass

		print(info)

		return info
 st = read(s)
 # few parameters are stored because they will be used more than once
 sta_name = st[0].stats.station
 # load the mask
 os.chdir(path_data_mask)
 msk = read(lst_msk[ista])
 if m_or_c == 'M':
     tr = np.multiply(st[0].data, norm1(msk[0].data))
 elif m_or_c == 'C':
     tr = np.multiply(st[0].data, 1 - np.asarray(norm1(msk[0].data)))
 else:
     print('Issue between mask and complementary')
 tr[-1] = (st[0].data).max()
 os.chdir(path_rslt_tr)
 tr = Trace(np.asarray(tr), st[0].stats)
 tr.write(sta_name + '.sac', format='SAC')
 st = read(sta_name + '.sac')
 # the maximum of the envelope is set to 1
 env_norm = norm1(st[0].data)
 # x-axis corresponding to the trace
 t = np.arange(st[0].stats.npts) / st[0].stats.sampling_rate
 # interpolate the trace so we can assess a value even between two bins
 f = interpolate.interp1d(t, env_norm)
 # initialise 3D np.array which will contain back projection values for
 # one station
 bp1sta = []
 print('Processing of the station {}'.format(sta_name),
       '{} / {}'.format(ista + 1, len(lst_sta)),
       end=' ')
 os.chdir(path_trvt)
 with open(event + '_' + sta_name + '_absolute_delays', 'rb') as mfch:
Beispiel #42
0
    os.system('cp {} temp.h5'.format(source_file))
    n = h5py.File('temp.h5', 'r+')

    n['model'][:, 0] += 10.**step * d_q_0
    n['model'][:, 1] += 10.**step * d_q_1
    n.flush()
    n.close()
    with NoiseSource('temp.h5') as nsrc:
        correlation = compute_correlation(input_files, all_config, nsrc,
                                          all_ns, taper)

# evaluate misfit and add to list.
    syntest = Trace(data=correlation[0])
    syntest.stats.sac = {}
    syntest.stats.sac['dist'] = obs.stats.sac['dist']
    syntest.write('temp.sac', format='SAC')
    syntest = read('temp.sac')[0]
    msr_sh = m_func(syntest, **m_a_options)

    # plt.plot(correlation[0])
    # plt.plot(syn.data, '--')
    # plt.title(str(step))
    # plt.show()
    if mtype in ['ln_energy_ratio']:
        jh = 0.5 * (msr_sh - msr_o)**2
    elif mtype in ['full_waveform']:
        jh = 0.5 * np.sum(np.power((msr_sh - msr_o), 2))
    djdqh = (jh - j) / (10.**step)
    print(djdqh, grad_dq)
    dcheck.append(abs(grad_dq - djdqh) / abs(grad_dq))
Beispiel #43
0
def run_parallel_hfsims(home,project_name,rupture_name,N,M0,sta,sta_lon,sta_lat,component,model_name,
                        rise_time_depths0,rise_time_depths1,moho_depth_in_km,total_duration,
                        hf_dt,stress_parameter,kappa,Qexp,Pwave,Swave,high_stress_depth,
                        Qmethod,scattering,Qc_exp,baseline_Qc,rank,size): 
    '''
    Run stochastic HF sims
    
    stress parameter is in bars
    '''
    
    from numpy import genfromtxt,pi,logspace,log10,mean,where,exp,arange,zeros,argmin,rad2deg,arctan2,real,savetxt,c_
    from pyproj import Geod
    from obspy.geodetics import kilometer2degrees
    from obspy.taup import TauPyModel
    from mudpy.forward import get_mu, read_fakequakes_hypo_time
    from mudpy import hfsims
    from obspy import Stream,Trace
    from sys import stdout
    from os import path,makedirs
    from mudpy.hfsims import is_subfault_in_smga
    import warnings

    rank=int(rank)
    
    if rank==0 and component=='N':
        #print out what's going on:
        out='''Running with input parameters:
        home = %s
        project_name = %s
        rupture_name = %s
        N = %s
        M0 (N-m) = %s
        sta = %s
        sta_lon = %s
        sta_lat = %s
        model_name = %s
        rise_time_depths = %s
        moho_depth_in_km = %s
        total_duration = %s
        hf_dt = %s
        stress_parameter = %s
        kappa = %s
        Qexp = %s
        component = %s
        Pwave = %s
        Swave = %s
        high_stress_depth = %s
        Qmethod = %s
        scattering = %s
        Qc_exp = %s
        baseline_Qc = %s
        '''%(home,project_name,rupture_name,str(N),str(M0/1e7),sta,str(sta_lon),str(sta_lat),model_name,str([rise_time_depths0,rise_time_depths1]),
        str(moho_depth_in_km),str(total_duration),str(hf_dt),str(stress_parameter),str(kappa),str(Qexp),str(component),str(Pwave),str(Swave),
        str(high_stress_depth),str(Qmethod),str(scattering),str(Qc_exp),str(baseline_Qc))
        print(out)

    if rank==0:
        out='''
        Rupture_Name = %s
        Station = %s
        Component (N,E,Z) = %s
        Sample rate = %sHz
        Duration = %ss
        '''%(rupture_name,sta,component,str(1/hf_dt),str(total_duration))
        print(out)
        
    #print 'stress is '+str(stress_parameter)

    #I don't condone it but this cleans up the warnings
    warnings.filterwarnings("ignore")
    
    
    #Fix input formats:
    rise_time_depths=[rise_time_depths0,rise_time_depths1]
    #Load the source
    mpi_rupt=home+project_name+'/output/ruptures/mpi_rupt.'+str(rank)+'.'+rupture_name
    fault=genfromtxt(mpi_rupt)  
    
    #Onset times for each subfault
    onset_times=fault[:,12]
    
    #load velocity structure
    structure=genfromtxt(home+project_name+'/structure/'+model_name)
    
    #Frequencies vector
    f=logspace(log10(1/total_duration),log10(1/(2*hf_dt))+0.01,100)
    omega=2*pi*f
    
    #Output time vector (0 is origin time)
    t=arange(0,total_duration,hf_dt)
    
    #Projection object for distance calculations
    g=Geod(ellps='WGS84')
    
    #Create taup velocity model object, paste on top of iaspei91
    #taup_create.build_taup_model(home+project_name+'/structure/bbp_norcal.tvel',output_folder=home+project_name+'/structure/')
#    velmod=TauPyModel(model=home+project_name+'/structure/iquique',verbose=True)
    velmod = TauPyModel(model=home+project_name+'/structure/'+model_name.split('.')[0]+'.npz')
    
    #Get epicentral time
    epicenter,time_epi=read_fakequakes_hypo_time(home,project_name,rupture_name)
    
    #Moments
    slip=(fault[:,8]**2+fault[:,9]**2)**0.5
    subfault_M0=slip*fault[:,10]*fault[:,11]*fault[:,13]
    subfault_M0=subfault_M0*1e7 #to dyne-cm
    relative_subfault_M0=subfault_M0/M0
    Mw=(2./3)*(log10(M0*1e-7)-9.1)
    
    #Corner frequency scaling
    i=where(slip>0)[0] #Non-zero faults
    dl=mean((fault[:,10]+fault[:,11])/2) #predominant length scale
    dl=dl/1000 # to km
    
    #Tau=p perturbation
    tau_perturb=0.1
    
    #Deep faults receive a higher stress
    stress_multiplier=1

    #initalize output seismogram
    tr=Trace()
    tr.stats.station=sta
    tr.stats.delta=hf_dt
    tr.stats.starttime=time_epi
    #info for sac header (added at the end)
    az,backaz,dist_m=g.inv(epicenter[0],epicenter[1],sta_lon,sta_lat)
    dist_in_km=dist_m/1000.    
    
    hf=zeros(len(t))
    
    #Loop over subfaults
    for kfault in range(len(fault)):
        if rank==0:
            #Print status to screen            
            if kfault % 25 == 0:
                if kfault==0:
                    stdout.write('      [.')
                    stdout.flush()
                stdout.write('.')
                stdout.flush()
            if kfault==len(fault)-1:
                stdout.write('.]\n')
                stdout.flush()                
        
        #Include only subfaults with non-zero slip
        if subfault_M0[kfault]>0:
            
            #Get subfault to station distance
            lon_source=fault[kfault,1]
            lat_source=fault[kfault,2]
            azimuth,baz,dist=g.inv(lon_source,lat_source,sta_lon,sta_lat)
            dist_in_degs=kilometer2degrees(dist/1000.)
            
            #Source depth?
            z_source=fault[kfault,3]
            
            #No change
            stress=stress_parameter
            
            #Is subfault in an SMGA?
            #SMGA1
#            radius_in_km=15.0
#            smga_center_lon=-71.501
#            smga_center_lat=-30.918
            
            
            #SMGA2
#            radius_in_km=15.0
#            smga_center_lon=-71.863
#            smga_center_lat=-30.759
            
            #smga3
#            radius_in_km=7.5  
#            smga_center_lon=-72.3923
#            smga_center_lat=-30.58
            
            
            #smga4
            # radius_in_km=7.5  
            # smga_center_lon=-72.3923
            # smga_center_lat=-30.61
            
            # in_smga=is_subfault_in_smga(lon_source,lat_source,smga_center_lon,smga_center_lat,radius_in_km)
            
            # ###Apply multiplier?
            # if in_smga==True:
            #     stress=stress_parameter*stress_multiplier
            #     print("%.4f,%.4f is in SMGA, stress is %d" % (lon_source,lat_source,stress))
            # else:
            #     stress=stress_parameter
            
            #Apply multiplier?
            #if slip[kfault]>7.5:
            #    stress=stress_parameter*stress_multiplier
            ##elif lon_source>-72.057 and lon_source<-71.2 and lat_source>-30.28:
            ##    stress=stress_parameter*stress_multiplier
            #else:
            #    stress=stress_parameter
                
            #Apply multiplier?
            #if z_source>high_stress_depth:
            #    stress=stress_parameter*stress_multiplier
            #else:
            #    stress=stress_parameter
            
            # Frankel 95 scaling of corner frequency #verified this looks the same in GP
            # Right now this applies the same factor to all faults
            fc_scale=(M0)/(N*stress*dl**3*1e21) #Frankel scaling
            small_event_M0 = stress*dl**3*1e21
            
        

            
            #Get rho, alpha, beta at subfault depth
            zs=fault[kfault,3]
            mu,alpha,beta=get_mu(structure,zs,return_speeds=True)
            rho=mu/beta**2
            
            #Get radiation scale factor
            Spartition=1/2**0.5
            if component=='N' :
                component_angle=0
            elif component=='E':
                component_angle=90
            
            rho=rho/1000 #to g/cm**3
            beta=(beta/1000)*1e5 #to cm/s
            alpha=(alpha/1000)*1e5
            
            # print('rho = '+str(rho))
            # print('beta = '+str(beta))
            # print('alpha = '+str(alpha))
            
            #Verified this produces same value as in GP
            CS=(2*Spartition)/(4*pi*(rho)*(beta**3))
            CP=2/(4*pi*(rho)*(alpha**3))

            
            #Get local subfault rupture speed
            beta=beta/100 #to m/s
            vr=hfsims.get_local_rupture_speed(zs,beta,rise_time_depths)
            vr=vr/1000 #to km/s
            dip_factor=hfsims.get_dip_factor(fault[kfault,5],fault[kfault,8],fault[kfault,9])
            
            #Subfault corner frequency
            c0=2.0 #GP2015 value
            fc_subfault=(c0*vr)/(dip_factor*pi*dl)
            
            #get subfault source spectrum
            #S=((relative_subfault_M0[kfault]*M0/N)*f**2)/(1+fc_scale*(f/fc_subfault)**2)
            S=small_event_M0*(omega**2/(1+(f/fc_subfault)**2))
            frankel_conv_operator= fc_scale*((fc_subfault**2+f**2)/(fc_subfault**2+fc_scale*f**2))
            S=S*frankel_conv_operator
            
            #get high frequency decay
            P=exp(-pi*kappa*f)
            
            
            #Get other geometric parameters necessar for radiation pattern
            strike=fault[kfault,4]
            dip=fault[kfault,5]
            ss=fault[kfault,8]
            ds=fault[kfault,9]
            rake=rad2deg(arctan2(ds,ss))
            
            #Get ray paths for all direct P arrivals
            Ppaths=velmod.get_ray_paths(zs,dist_in_degs,phase_list=['P','p'])
            
            #Get ray paths for all direct S arrivals
            try:
                Spaths=velmod.get_ray_paths(zs,dist_in_degs,phase_list=['S','s'])
            except:
                Spaths=velmod.get_ray_paths(zs+tau_perturb,dist_in_degs,phase_list=['S','s'])
            
            #sometimes there's no S, weird I know. Check twice.
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs+tau_perturb,dist_in_degs,phase_list=['S','s'])
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs+5*tau_perturb,dist_in_degs,phase_list=['S','s'])   
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs-5*tau_perturb,dist_in_degs,phase_list=['S','s'])
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs+5*tau_perturb,dist_in_degs,phase_list=['S','s'])  
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs-10*tau_perturb,dist_in_degs,phase_list=['S','s'])
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs+10*tau_perturb,dist_in_degs,phase_list=['S','s']) 
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs-50*tau_perturb,dist_in_degs,phase_list=['S','s'])
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs+50*tau_perturb,dist_in_degs,phase_list=['S','s']) 
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs-75*tau_perturb,dist_in_degs,phase_list=['S','s'])
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs+75*tau_perturb,dist_in_degs,phase_list=['S','s']) 
            if len(Spaths)==0:
                print('ERROR: I give up, no direct S in spite of multiple attempts at subfault '+str(kfault))

            #Which ray should I keep? 
            
            #This is the fastest arriving P
            directP=Ppaths[0]
            
            #Get moho depth from velmod
            moho_depth  = velmod.model.moho_depth
            
            # In this method here are the rules:
            #For S do not allow Moho turning rays, keep the fastest non Moho turning ray. If
            #only Moho rays are available, then keep the one that turns the shallowest.
            if Qmethod == 'no_moho':
            
                #get turning depths and arrival times of S rays
                turning_depths = zeros(len(Spaths))
                S_ray_times = zeros(len(Spaths))
                
                for kray in range(len(Spaths)):
                    turning_depths[kray] = Spaths[kray].path['depth'].max()
                    S_ray_times[kray] = Spaths[kray].path['time'].max()
                    
                #Keep only rays that turn above Moho
                i=where(turning_depths < moho_depth)[0]
                
                if len(i) == 0: #all rays turn below Moho, keep shallowest turning
                    i_min_depth = argmin(turning_depths)
                    directS = Spaths[i_min_depth]
                
                else:  #Keep fastest arriving ray that turns above Moho
                    Spaths = [Spaths[j] for j in i]  #Rays turning above Moho, NOTE: I hate list comprehension
                    S_ray_times = S_ray_times[i]
                    i_min_time = argmin(S_ray_times)
                    directS = Spaths[i_min_time]
                    
            elif Qmethod =='shallowest':
                                
                #get turning depths and arrival times of S rays
                turning_depths = zeros(len(Spaths))
                
                for kray in range(len(Spaths)):
                    turning_depths[kray] = Spaths[kray].path['depth'].max()

                i_min_depth = argmin(turning_depths)
                directS = Spaths[i_min_depth]
                
            elif Qmethod == 'fastest' or Qmethod=='direct':   #Pick first arriving S wave
                
                directS = Spaths[0]
                
                
            
            #directS=Spaths[0]  #this is the old way, kept fastest S
            mohoS=None
            
            # #print len(Spaths)
            # if len(Spaths)==1: #only direct S
            #     pass
            # else:
            #     #turn_depth=zeros(len(Spaths)-1) #turning depth of other non-direct rays
            #     #for k in range(1,len(Spaths)):
            #     #    turn_depth[k-1]=Spaths[k].path['depth'].max()
            #     ##If there's a ray that turns within 2km of Moho, callt hat guy the Moho reflection
            #     #deltaz=abs(turn_depth-moho_depth_in_km)
            #     #i=argmin(deltaz)
            #     #if deltaz[i]<2: #Yes, this is a moho reflection
            #     #    mohoS=Spaths[i+1]
            #     #else:
            #     #    mohoS=None
            #     mohoS=Spaths[-1]
                

                 
 
            #######         Build Direct P ray           ######
            if Pwave==True:
                take_off_angle_P=directP.takeoff_angle
                
                # #Get attenuation due to geometrical spreading (from the path length)
                # path_length_P=hfsims.get_path_length(directP,zs,dist_in_degs)
                # path_length_P=path_length_P*100 #to cm
                
                # #Get effect of intrinsic attenuation for that ray (path integrated)
                # #Q_P=hfsims.get_attenuation(f,structure,directP,Qexp,Qtype='P')   <- This causes problems and I don't know why underlying assumptions might be bad
                # Q_P=hfsims.get_attenuation(f,structure,directS,Qexp,Qtype='S')
                
                # #get quarter wavelength amplificationf actors
                # # pass rho in kg/m^3 (this units nightmare is what I get for following Graves' code)
                # I_P=hfsims.get_amplification_factors(f,structure,zs,alpha,rho*1000)
                
                # #Build the entire path term
                # G_P=(I_P*Q_P)/path_length_P
                
                #Get attenuation due to geometrical spreading (from the path length)
                path_length_S=hfsims.get_path_length(directS,zs,dist_in_degs)
                path_length_S=path_length_S*100 #to cm
                
                #Get effect of intrinsic aptimeenuation for that ray (path integrated)
                Q_S=hfsims.get_attenuation(f,structure,directS,Qexp)
                
                #get quarter wavelength amplificationf actors
                # pass rho in kg/m^3 (this units nightmare is what I get for following Graves' code)
                I_S=hfsims.get_amplification_factors(f,structure,zs,beta,rho*1000)
                
                #Build the entire path term
                # G_S=(I_S*Q_S)/path_length_S
                G_S=(1*Q_S)/path_length_S
                
                

                #Get conically averaged radiation pattern terms
                RP=hfsims.conically_avg_P_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_P)
                RP=abs(RP)
                   
                #Get partition of Pwave into Z and N,E components 
                incidence_angle=directP.incident_angle
                Npartition,Epartition,Zpartition=hfsims.get_P_wave_partition(incidence_angle,azimuth)
                if component=='Z':
                   Ppartition=Zpartition 
                elif component=='N':
                    Ppartition=Npartition
                else:
                    Ppartition=Epartition
                    
                #And finally multiply everything together to get the subfault amplitude spectrum
                AP=CP*S*G_S*P*RP*Ppartition           

                #Generate windowed time series
                duration=1./fc_subfault+0.09*(dist/1000)
                w=hfsims.windowed_gaussian(duration,hf_dt,window_type='saragoni_hart')
                
                #Go to frequency domain, apply amplitude spectrum and ifft for final time series
                hf_seis_P=hfsims.apply_spectrum(w,AP,f,hf_dt)
                
                #save thigns to check
                # if sta=='AL2H':
                #     path_out = '/Users/dmelgarm/FakeQuakes/ONC_debug/analysis/frequency/Pwave/'
                #     path_out = path_out+str(kfault)
                #     # savetxt(path_out+'.all',c_[f,AP])
                #     # savetxt(path_out+'.source',c_[f,CP*S])
                #     # savetxt(path_out+'.path',c_[f,G_P])
                #     # savetxt(path_out+'.site',c_[f,P])
                
                
                #What time after OT should this time series start at?
                time_insert=directP.path['time'][-1]+onset_times[kfault]
                i=argmin(abs(t-time_insert))
                j=i+len(hf_seis_P)
                
                #Check seismogram doesn't go past last sample
                if i<len(hf)-1: #if i (the beginning of the seimogram) is less than the length
                    if j>len(hf): #seismogram goes past total_duration length, trim it
                        len_paste=len(hf)-i
                        j=len(hf)
                        #Add seismogram
                        hf[i:j]=hf[i:j]+real(hf_seis_P[0:len_paste])
                    else: #Lengths are fine
                        hf[i:j]=hf[i:j]+real(hf_seis_P)      
                else: #Seismogram starts after end of available space
                    pass   
                
                                           
                                                                  
                                                                                                                
                          
            #######         Build Direct S ray           ######

            if Swave==True:
                take_off_angle_S=directS.takeoff_angle
                
                #Get attenuation due to geometrical spreading (from the path length)
                path_length_S=hfsims.get_path_length(directS,zs,dist_in_degs)
                path_length_S=path_length_S*100 #to cm
                
                #Get effect of intrinsic aptimeenuation for that ray (path integrated)
                if Qmethod == 'direct':#No ray tracing use bulka ttenuation along path
                    Q_S = hfsims.get_attenuation_linear(f,structure,zs,dist,Qexp,Qtype='S')
                else: #Use ray tracing
                    Q_S=hfsims.get_attenuation(f,structure,directS,Qexp,scattering=scattering,
                                               Qc_exp=Qc_exp,baseline_Qc=baseline_Qc)
                
                #get quarter wavelength amplificationf actors
                # pass rho in kg/m^3 (this units nightmare is what I get for following Graves' code)
                I_S=hfsims.get_amplification_factors(f,structure,zs,beta,rho*1000)
                
                #Build the entire path term
                G_S=(I_S*Q_S)/path_length_S
                # G_S=(1*Q_S)/path_length_S
    
                #Get conically averaged radiation pattern terms
                if component=='Z':
                    RP_vert=hfsims.conically_avg_vert_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_S)
                    #And finally multiply everything together to get the subfault amplitude spectrum
                    AS=CS*S*G_S*P*RP_vert   
                    # print('... RP_vert = '+str(RP_vert))
                else:
                    RP=hfsims.conically_avg_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_S,component_angle)
                    RP=abs(RP)
                    # print('... RP_horiz = '+str(RP))
                    #And finally multiply everything together to get the subfault amplitude spectrum
                    AS=CS*S*G_S*P*RP                
    
                #Generate windowed time series
                duration=1./fc_subfault+0.063*(dist/1000)
                w=hfsims.windowed_gaussian(duration,hf_dt,window_type='saragoni_hart')
                #w=windowed_gaussian(3*duration,hf_dt,window_type='cua',ptime=Ppaths[0].path['time'][-1],stime=Spaths[0].path['time'][-1])
                
                #Go to frequency domain, apply amplitude spectrum and ifft for final time series
                hf_seis_S=hfsims.apply_spectrum(w,AS,f,hf_dt)
                
                #save thigns to check
                # if sta=='AL2H':
                #     path_out = '/Users/dmelgarm/FakeQuakes/ONC_debug/analysis/frequency/Swave/'
                #     path_out = path_out+str(kfault)
                #     # savetxt(path_out+'.soverp',c_[f,(CS*S)/(CP*S)])
                #     savetxt(path_out+'.all',c_[f,AS])
                #     savetxt(path_out+'.source',c_[f,CS*S])
                #     savetxt(path_out+'.path',c_[f,G_S])
                #     savetxt(path_out+'.site',c_[f,P])
                
                #What time after OT should this time series start at?
                time_insert=directS.path['time'][-1]+onset_times[kfault]
                #print 'ts = '+str(time_insert)+' , Td = '+str(duration)
                #time_insert=Ppaths[0].path['time'][-1]
                i=argmin(abs(t-time_insert))
                j=i+len(hf_seis_S)
                
                
                #Check seismogram doesn't go past last sample
                if i<len(hf)-1: #if i (the beginning of the seimogram) is less than the length
                    if j>len(hf): #seismogram goes past total_duration length, trim it
                        len_paste=len(hf)-i
                        j=len(hf)
                        #Add seismogram
                        hf[i:j]=hf[i:j]+real(hf_seis_S[0:len_paste])
                    else: #Lengths are fine
                        hf[i:j]=hf[i:j]+real(hf_seis_S)
                else: #Beginning of seismogram is past end of available space
                    pass

            
            
            #######         Build Moho reflected S ray           ######
#            if mohoS==None:
#                pass
#            else:
#                if kfault%100==0:
#                    print '... ... building Moho reflected S wave'
#                take_off_angle_mS=mohoS.takeoff_angle
#                
#                #Get attenuation due to geometrical spreading (from the path length)
#                path_length_mS=get_path_length(mohoS,zs,dist_in_degs)
#                path_length_mS=path_length_mS*100 #to cm
#                
#                #Get effect of intrinsic aptimeenuation for that ray (path integrated)
#                Q_mS=get_attenuation(f,structure,mohoS,Qexp)
#                
#                #Build the entire path term
#                G_mS=(I*Q_mS)/path_length_mS
#
#                #Get conically averaged radiation pattern terms
#                if component=='Z':
#                    RP_vert=conically_avg_vert_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_mS)
#                    #And finally multiply everything together to get the subfault amplitude spectrum
#                    A=C*S*G_mS*P*RP_vert   
#                else:
#                    RP=conically_avg_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_mS,component_angle)
#                    RP=abs(RP)
#                    #And finally multiply everything together to get the subfault amplitude spectrum
#                    A=C*S*G_mS*P*RP                
#
#                #Generate windowed time series
#                duration=1./fc_subfault+0.063*(dist/1000)
#                w=windowed_gaussian(duration,hf_dt,window_type='saragoni_hart')
#                #w=windowed_gaussian(3*duration,hf_dt,window_type='cua',ptime=Ppaths[0].path['time'][-1],stime=Spaths[0].path['time'][-1])
#                
#                #Go to frequency domain, apply amplitude spectrum and ifft for final time series
#                hf_seis=apply_spectrum(w,A,f,hf_dt)
#                
#                #What time after OT should this time series start at?
#                time_insert=mohoS.path['time'][-1]+onset_times[kfault]
#                #print 'ts = '+str(time_insert)+' , Td = '+str(duration)
#                #time_insert=Ppaths[0].path['time'][-1]
#                i=argmin(abs(t-time_insert))
#                j=i+len(hf_seis)
#                
#                #Add seismogram
#                hf[i:j]=hf[i:j]+hf_seis
#                
#                #Done, reset
#                mohoS=None        
           
    #Done
    tr.data=hf/100 #convert to m/s**2
    #Add station location, event location, and first P-wave arrival time to SAC header
    tr.stats.update({'sac':{'stlo':sta_lon,'stla':sta_lat,'evlo':epicenter[0],'evla':epicenter[1],'evdp':epicenter[2],'dist':dist_in_km,'az':az,'baz':backaz,'mag':Mw}}) #,'idep':"ACC (m/s^2)" not sure why idep won't work
    
    #Write out to file 
    #old
    rupture=rupture_name.split('.')[0]+'.'+rupture_name.split('.')[1]
    #new
    rupture=rupture_name.rsplit('.',1)[0]
    if not path.exists(home+project_name+'/output/waveforms/'+rupture+'/'):
        makedirs(home+project_name+'/output/waveforms/'+rupture+'/')
    if rank < 10:
        tr.write(home+project_name+'/output/waveforms/'+rupture+'/'+sta+'.HN'+component+'.00'+str(rank)+'.sac',format='SAC')
    elif rank < 100:
        tr.write(home+project_name+'/output/waveforms/'+rupture+'/'+sta+'.HN'+component+'.0'+str(rank)+'.sac',format='SAC')
    else:
        tr.write(home+project_name+'/output/waveforms/'+rupture+'/'+sta+'.HN'+component+'.'+str(rank)+'.sac',format='SAC')
Beispiel #44
0
 def test_read_and_write(self):
     """
     Tests read and write methods for all waveform plug-ins.
     """
     data = np.arange(0, 2000)
     start = UTCDateTime(2009, 1, 13, 12, 1, 2, 999000)
     formats = _get_default_eps('obspy.plugin.waveform', 'writeFormat')
     for format in formats:
         # XXX: skip SEGY and SU formats for now as they need some special
         # headers.
         if format in ['SEGY', 'SU', 'SEG2']:
             continue
         for native_byteorder in ['<', '>']:
             for byteorder in (['<', '>', '='] if format in
                               WAVEFORM_ACCEPT_BYTEORDER else [None]):
                 if format == 'SAC' and byteorder == '=':
                     # SAC file format enforces '<' or '>'
                     # byteorder on writing
                     continue
                 # new trace object in native byte order
                 dt = np.dtype(np.int_).newbyteorder(native_byteorder)
                 if format in ('MSEED', 'GSE2'):
                     # MiniSEED and GSE2 cannot write int64, enforce type
                     dt = np.int32
                 tr = Trace(data=data.astype(dt))
                 tr.stats.network = "BW"
                 tr.stats.station = "MANZ1"
                 tr.stats.location = "00"
                 tr.stats.channel = "EHE"
                 tr.stats.calib = 0.199999
                 tr.stats.delta = 0.005
                 tr.stats.starttime = start
                 # create waveform file with given format and byte order
                 with NamedTemporaryFile() as tf:
                     outfile = tf.name
                     if byteorder is None:
                         tr.write(outfile, format=format)
                     else:
                         tr.write(outfile, format=format,
                                  byteorder=byteorder)
                     if format == 'Q':
                         outfile += '.QHD'
                     # read in again using auto detection
                     st = read(outfile)
                     self.assertEqual(len(st), 1)
                     self.assertEqual(st[0].stats._format, format)
                     # read in using format argument
                     st = read(outfile, format=format)
                     self.assertEqual(len(st), 1)
                     self.assertEqual(st[0].stats._format, format)
                     # read in using a BytesIO instances, skip Q files as
                     # it needs multiple files
                     if format not in ['Q']:
                         # file handler without format
                         with open(outfile, 'rb') as fp:
                             st = read(fp)
                         self.assertEqual(len(st), 1)
                         self.assertEqual(st[0].stats._format, format)
                         # file handler with format
                         with open(outfile, 'rb') as fp:
                             st = read(fp, format=format)
                         self.assertEqual(len(st), 1)
                         self.assertEqual(st[0].stats._format, format)
                         # BytesIO without format
                         with open(outfile, 'rb') as fp:
                             temp = io.BytesIO(fp.read())
                         st = read(temp)
                         self.assertEqual(len(st), 1)
                         self.assertEqual(st[0].stats._format, format)
                         # BytesIO with format
                         with open(outfile, 'rb') as fp:
                             temp = io.BytesIO(fp.read())
                         st = read(temp, format=format)
                         self.assertEqual(len(st), 1)
                         self.assertEqual(st[0].stats._format, format)
                     # Q files consist of two files - deleting additional
                     # file
                     if format == 'Q':
                         os.remove(outfile[:-4] + '.QBN')
                         os.remove(outfile[:-4] + '.QHD')
                 # check byte order
                 if format == 'SAC':
                     # SAC format preserves byteorder on writing
                     self.assertTrue(st[0].data.dtype.byteorder
                                     in ('=', byteorder))
                 else:
                     self.assertEqual(st[0].data.dtype.byteorder, '=')
                 # check meta data
                 # some formats do not contain a calibration factor
                 if format not in ['MSEED', 'WAV', 'TSPAIR', 'SLIST']:
                     self.assertAlmostEqual(st[0].stats.calib, 0.199999, 5)
                 else:
                     self.assertEqual(st[0].stats.calib, 1.0)
                 if format not in ['WAV']:
                     self.assertEqual(st[0].stats.starttime, start)
                     self.assertEqual(st[0].stats.endtime, start + 9.995)
                     self.assertEqual(st[0].stats.delta, 0.005)
                     self.assertEqual(st[0].stats.sampling_rate, 200.0)
                 # network/station/location/channel codes
                 if format in ['Q', 'SH_ASC', 'GSE2']:
                     # no network or location code in Q, SH_ASC, GSE2
                     self.assertEqual(st[0].id, ".MANZ1..EHE")
                 elif format not in ['WAV']:
                     self.assertEqual(st[0].id, "BW.MANZ1.00.EHE")
Beispiel #45
0
        stx.detrend(type = 'constant')
        sty.detrend(type = 'constant')
        stz.detrend(type = 'constant')
        # tapering
        tr_x = stx[0]
        tr_y = sty[0]
        tr_z = stz[0]
        # creation of the different velocity waveforms (3cpn, hori and vert)
        tr3 = [math.sqrt(a**2 + b**2 + c**2)
               for a, b, c in zip(tr_x, tr_y, tr_z)]
        trh = [math.sqrt(a**2 + b**2) for a, b in zip(tr_x, tr_y)]
        trv = [math.sqrt(a**2) for a in tr_z]
        # preparation for SAC format
        tr3 = Trace(np.asarray(tr3), stz[0].stats)
        trh = Trace(np.asarray(trh), stz[0].stats)
        trv = Trace(np.asarray(trv), stz[0].stats)
        # save the files
        os.chdir(path_rslt[0])
        tr3.write(sx[:7] + cpnt[0] + sx[9:], format = 'SAC')
        os.chdir(path_rslt[1])
        trh.write(sx[:7] + cpnt[1] + sx[9:], format = 'SAC')
        os.chdir(path_rslt[2])
        trv.write(sx[:7] + cpnt[2] + sx[9:], format = 'SAC')
        print('The different component combinations',
                'of the station {}'.format(sx[:6]),
                'have been successfully done')
    else:
        print('The three velocity waveforms',
                '{}, {} and {}'.format(sx[:6], sy[:6], sz[:6]),
                'are not corresponding and combination can not be done')
Beispiel #46
0
    def test_read_thread_safe(self):
        """
        Tests for race conditions. Reading n_threads (currently 30) times
        the same waveform file in parallel and compare the results which must
        be all the same.
        """
        data = np.arange(0, 500)
        start = UTCDateTime(2009, 1, 13, 12, 1, 2, 999000)
        formats = _get_default_eps('obspy.plugin.waveform', 'writeFormat')
        for format in formats:
            # XXX: skip SEGY and SU formats for now as they need some special
            # headers.
            if format in ['SEGY', 'SU', 'SEG2']:
                continue

            dt = np.int_
            if format in ('MSEED', 'GSE2'):
                dt = np.int32
            tr = Trace(data=data.astype(dt))
            tr.stats.network = "BW"
            tr.stats.station = "MANZ1"
            tr.stats.location = "00"
            tr.stats.channel = "EHE"
            tr.stats.calib = 0.999999
            tr.stats.delta = 0.005
            tr.stats.starttime = start
            # create waveform file with given format and byte order
            with NamedTemporaryFile() as tf:
                outfile = tf.name
                tr.write(outfile, format=format)
                if format == 'Q':
                    outfile += '.QHD'
                n_threads = 30
                streams = []
                timeout = 120
                if 'TRAVIS' in os.environ:
                    timeout = 570  # 30 seconds under Travis' limit
                cond = threading.Condition()

                def test_functions(streams, cond):
                    st = read(outfile, format=format)
                    streams.append(st)
                    with cond:
                        cond.notify()
                # Read the ten files at one and save the output in the just
                # created class.
                our_threads = []
                for _i in range(n_threads):
                    thread = threading.Thread(target=test_functions,
                                              args=(streams, cond))
                    thread.start()
                    our_threads.append(thread)
                our_threads = set(our_threads)
                # Loop until all threads are finished.
                start = time.time()
                while True:
                    with cond:
                        cond.wait(1)
                    remaining_threads = set(threading.enumerate())
                    if len(remaining_threads & our_threads) == 0:
                        break
                    # Avoid infinite loop and leave after some time; such a
                    # long time is needed for debugging with valgrind or Travis
                    elif time.time() - start >= timeout:  # pragma: no cover
                        msg = 'Not all threads finished after %d seconds!' % (
                            timeout)
                        raise Warning(msg)
                # Compare all values which should be identical and clean up
                # files
                for st in streams:
                    np.testing.assert_array_equal(st[0].data, tr.data)
                if format == 'Q':
                    os.remove(outfile[:-4] + '.QBN')
                    os.remove(outfile[:-4] + '.QHD')
Beispiel #47
0
    dirname = 'sac'

    if not os.path.exists(dirname):
        os.mkdir(dirname)

    suffix = 'BH' + fname.split('_')[2][0].upper()

    for i, sta_name in enumerate(header[1:len(header)]):

        trace = Trace()
        # trace.id = sta_name.replace('_','.')+'.'+suffix
        print('Processing ' + trace.id)
        trace.data = array[:, i + 1]
        trace.stats.sampling_rate = 100
        trace.stats.delta = 1.0 / trace.stats.sampling_rate
        trace.stats.network = sta_name.split('_')[0]
        trace.stats.station = sta_name.split('_')[1]
        trace.stats.location = '00'  # TODO
        trace.stats.channel = suffix
        trace.id = trace.stats.network + '.' + trace.stats.station + '.' + trace.stats.location + '.' + trace.stats.channel
        trace.stats.starttime = UTCDateTime(array[0, 0])
        trace.stats._format = 'SAC'
        trace.write(dirname + '/' + trace.id + '.SAC', format='SAC')

        st = read(dirname + '/' + trace.id + '.SAC')
        st[0].stats.sac.b = array[0, 0]
        st[0].stats.sac.e = array[len(stdata) - 1, 0]

        st.write(dirname + '/' + trace.id + '.SAC', format='SAC')
Beispiel #48
0
            )  # + lst_dist[lst_st.index(st)]/2.5*st[0].stats.sampling_rate)
            for itr in tr[int(
                    lst_dist[lst_st.index(st)] / 3.4 *
                    st[0].stats.sampling_rate
            ):]:  # + lst_dist[lst_st.index(st)]/2.5*st[0].stats.sampling_rate):]:
                tr[tr.index(itr)] = itr + st[0].data[tr.index(itr) - i0]
    else:
        tr = [
            a for a in lst_st[sta.index(sta[0] + lst_EQ[0][2:-2] +
                                        '.vel_4_10Hz_hori.sac') - 1][0].data
        ]
        for st in lst_st[1:]:
            i0 = int(
                lst_dist[lst_st.index(st)] / 3.4 * st[0].stats.sampling_rate
            )  # + lst_dist[lst_st.index(st)]/2.5*st[0].stats.sampling_rate)
            for itr in tr[int(
                    lst_dist[lst_st.index(st)] / 3.4 *
                    st[0].stats.sampling_rate
            ):]:  # + lst_dist[lst_st.index(st)]/2.5*st[0].stats.sampling_rate):]:
                tr[tr.index(itr)] = itr + st[0].data[tr.index(itr) - i0]
    tr = Trace(np.asarray(tr), lst_st[lst_dist.index(0)][0].stats)
    os.chdir(path_results)
    tr.write(sta[1][:6] + sta[1][16:], format='SAC')

to_register = [dict_vel_used, dict_delai]

os.chdir(path + 'syn')
with open('ligne_veldata', 'wb') as my_fch:
    my_pk = pickle.Pickler(my_fch)
    my_pk.dump(to_register)
Beispiel #49
0
def run_parallel_hfsims(home,project_name,rupture_name,N,M0,sta,sta_lon,sta_lat,component,model_name,
                        rise_time_depths0,rise_time_depths1,moho_depth_in_km,total_duration,
                        hf_dt,stress_parameter,kappa,Qexp,Pwave,high_stress_depth,rank,size): 
    '''
    Run stochastic HF sims
    
    stress parameter is in bars
    '''
    
    from numpy import genfromtxt,pi,logspace,log10,mean,where,exp,arange,zeros,argmin,rad2deg,arctan2,real
    from pyproj import Geod
    from obspy.geodetics import kilometer2degrees
    from obspy.taup import TauPyModel
    from mudpy.forward import get_mu, write_fakequakes_hf_waveforms_one_by_one,read_fakequakes_hypo_time
    from mudpy import hfsims
    from obspy import Stream,Trace
    from sys import stdout
    from os import path,makedirs
    import warnings

    rank=int(rank)
    
#    if rank==0:
#        #print out what's going on:
#        out='''Running with input parameters:
#        home = %s
#        project_name = %s
#        rupture_name = %s
#        N = %s
#        M0 = %s
#        sta = %s
#        sta_lon = %s
#        sta_lat = %s
#        model_name = %s
#        rise_time_depths = %s
#        moho_depth_in_km = %s
#        total_duration = %s
#        hf_dt = %s
#        stress_parameter = %s
#        kappa = %s
#        Qexp = %s
#        component = %s
#        Pwave = %s
#        high_stress_depth = %s
#        '''%(home,project_name,rupture_name,str(N),str(M0),sta,str(sta_lon),str(sta_lat),model_name,str([rise_time_depths0,rise_time_depths1]),
#        str(moho_depth_in_km),str(total_duration),str(hf_dt),str(stress_parameter),
#        str(kappa),str(Qexp),str(component),str(Pwave),str(high_stress_depth))
#        print out

    if rank==0:
        out='''
        Rupture_Name = %s
        Station = %s
        Component (N,E,Z) = %s
        '''%(rupture_name,sta,component)
        print out
        
    #print 'stress is '+str(stress_parameter)

    #I don't condone it but this cleans up the warnings
    warnings.filterwarnings("ignore")
    
    #Fix input formats:
    rise_time_depths=[rise_time_depths0,rise_time_depths1]
    #Load the source
    mpi_rupt=home+project_name+'/output/ruptures/mpi_rupt.'+str(rank)+'.'+rupture_name
    fault=genfromtxt(mpi_rupt)  
    
    #Onset times for each subfault
    onset_times=fault[:,12]
    
    #load velocity structure
    structure=genfromtxt(home+project_name+'/structure/'+model_name)
    
    #Frequencies vector
    f=logspace(log10(hf_dt),log10(1/(2*hf_dt))+0.01,50)
    omega=2*pi*f
    
    #Output time vector (0 is origin time)
    t=arange(0,total_duration,hf_dt)
    
    #Projection object for distance calculations
    g=Geod(ellps='WGS84')
    
    #Create taup velocity model object, paste on top of iaspei91
    #taup_create.build_taup_model(home+project_name+'/structure/bbp_norcal.tvel',output_folder=home+project_name+'/structure/')
    velmod=TauPyModel(model=home+project_name+'/structure/maule',verbose=True)
    #Get epicentral time
    epicenter,time_epi=read_fakequakes_hypo_time(home,project_name,rupture_name)
    
    #Moments
    slip=(fault[:,8]**2+fault[:,9]**2)**0.5
    subfault_M0=slip*fault[:,10]*fault[:,11]*fault[:,13]
    subfault_M0=subfault_M0*1e7 #to dyne-cm
    relative_subfault_M0=subfault_M0/M0
    Mw=(2./3)*(log10(M0*1e-7)-9.1)
    
    #Corner frequency scaling
    i=where(slip>0)[0] #Non-zero faults
    dl=mean((fault[:,10]+fault[:,11])/2) #predominant length scale
    dl=dl/1000 # to km
    
    #Tau=p perturbation
    tau_perturb=0.1
    
    #Deep faults receive a higher stress
    stress_multiplier=3

    #initalize output seismogram
    tr=Trace()
    tr.stats.station=sta
    tr.stats.delta=hf_dt
    tr.stats.starttime=time_epi
    #info for sac header (added at the end)
    az,backaz,dist_m=g.inv(epicenter[0],epicenter[1],sta_lon,sta_lat)
    dist_in_km=dist_m/1000.    
    
    hf=zeros(len(t))
    
    #Loop over subfaults
    for kfault in range(len(fault)):
        if rank==0:
            #Print status to screen            
            if kfault % 150 == 0:
                if kfault==0:
                    stdout.write('      [')
                    stdout.flush()
                stdout.write('.')
                stdout.flush()
            if kfault==len(fault)-1:
                stdout.write(']\n')
                stdout.flush()                
        
        #Include only subfaults with non-zero slip
        if subfault_M0[kfault]>0:
            
            #Get subfault to station distance
            lon_source=fault[kfault,1]
            lat_source=fault[kfault,2]
            azimuth,baz,dist=g.inv(lon_source,lat_source,sta_lon,sta_lat)
            dist_in_degs=kilometer2degrees(dist/1000.)
            
            #Source depth?
            z_source=fault[kfault,3]
            
            #No change
            stress=stress_parameter
            
            #Is subfault in an SMGA?
            #radius_in_km=15.0
            #smga_center_lon=-69.709200
            #smga_center_lat=-19.683600
            #in_smga=is_subfault_in_smga(lon_source,lat_source,smga_center_lon,smga_center_lat,radius_in_km)
            #
            ###Apply multiplier?
            #if in_smga==True:
            #    stress=stress_parameter*stress_multiplier
            #    print "%.4f,%.4f is in SMGA, stress is %d" % (lon_source,lat_source,stress)
            #else:
            #    stress=stress_parameter
            
            #Apply multiplier?
            #if slip[kfault]>7.5:
            #    stress=stress_parameter*stress_multiplier
            ##elif lon_source>-72.057 and lon_source<-71.2 and lat_source>-30.28:
            ##    stress=stress_parameter*stress_multiplier
            #else:
            #    stress=stress_parameter
                
            #Apply multiplier?
            #if z_source>high_stress_depth:
            #    stress=stress_parameter*stress_multiplier
            #else:
            #    stress=stress_parameter
            
            # Frankel 95 scaling of corner frequency #verified this looks the same in GP
            # Right now this applies the same factor to all faults
            fc_scale=(M0)/(N*stress*dl**3*1e21) #Frankel scaling
            small_event_M0 = stress*dl**3*1e21
            
        

            
            #Get rho, alpha, beta at subfault depth
            zs=fault[kfault,3]
            mu,alpha,beta=get_mu(structure,zs,return_speeds=True)
            rho=mu/beta**2
            
            #Get radiation scale factor
            Spartition=1/2**0.5
            if component=='N' :
                component_angle=0
            elif component=='E':
                component_angle=90
            
            rho=rho/1000 #to g/cm**3
            beta=(beta/1000)*1e5 #to cm/s
            alpha=(alpha/1000)*1e5
            
            #Verified this produces same value as in GP
            CS=(2*Spartition)/(4*pi*(rho)*(beta**3))
            CP=2/(4*pi*(rho)*(alpha**3))
            
            #Get local subfault rupture speed
            beta=beta/100 #to m/s
            vr=hfsims.get_local_rupture_speed(zs,beta,rise_time_depths)
            vr=vr/1000 #to km/s
            dip_factor=hfsims.get_dip_factor(fault[kfault,5],fault[kfault,8],fault[kfault,9])
            
            #Subfault corner frequency
            c0=2.0 #GP2015 value
            fc_subfault=(c0*vr)/(dip_factor*pi*dl)
            
            #get subfault source spectrum
            #S=((relative_subfault_M0[kfault]*M0/N)*f**2)/(1+fc_scale*(f/fc_subfault)**2)
            S=small_event_M0*(omega**2/(1+(f/fc_subfault)**2))
            frankel_conv_operator= fc_scale*((fc_subfault**2+f**2)/(fc_subfault**2+fc_scale*f**2))
            S=S*frankel_conv_operator
            
            #get high frequency decay
            P=exp(-pi*kappa*f)
            
            #get quarter wavelength amplificationf actors
            # pass rho in kg/m^3 (this units nightmare is what I get for following Graves' code)
            I=hfsims.get_amplification_factors(f,structure,zs,beta,rho*1000)
            
            #Get other geometric parameters necessar for radiation pattern
            strike=fault[kfault,4]
            dip=fault[kfault,5]
            ss=fault[kfault,8]
            ds=fault[kfault,9]
            rake=rad2deg(arctan2(ds,ss))
            
            #Get ray paths for all direct P arrivals
            Ppaths=velmod.get_ray_paths(zs,dist_in_degs,phase_list=['P','p'])
            
            #Get ray paths for all direct S arrivals
            try:
                Spaths=velmod.get_ray_paths(zs,dist_in_degs,phase_list=['S','s'])
            except:
                Spaths=velmod.get_ray_paths(zs+tau_perturb,dist_in_degs,phase_list=['S','s'])
            
            #sometimes there's no S, weird I know. Check twice.
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs+tau_perturb,dist_in_degs,phase_list=['S','s'])
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs+5*tau_perturb,dist_in_degs,phase_list=['S','s'])   
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs-5*tau_perturb,dist_in_degs,phase_list=['S','s'])
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs+5*tau_perturb,dist_in_degs,phase_list=['S','s'])  
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs-10*tau_perturb,dist_in_degs,phase_list=['S','s'])
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs+10*tau_perturb,dist_in_degs,phase_list=['S','s']) 
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs-50*tau_perturb,dist_in_degs,phase_list=['S','s'])
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs+50*tau_perturb,dist_in_degs,phase_list=['S','s']) 
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs-75*tau_perturb,dist_in_degs,phase_list=['S','s'])
            if len(Spaths)==0:
                Spaths=velmod.get_ray_paths(zs+75*tau_perturb,dist_in_degs,phase_list=['S','s']) 
            if len(Spaths)==0:
                print 'ERROR: I give up, no direct S in spite of multiple attempts at subfault '+str(kfault)

            #Get direct s path and moho reflection
            mohoS=None
            directS=Spaths[0]
            directP=Ppaths[0]
            #print len(Spaths)
            if len(Spaths)==1: #only direct S
                pass
            else:
                #turn_depth=zeros(len(Spaths)-1) #turning depth of other non-direct rays
                #for k in range(1,len(Spaths)):
                #    turn_depth[k-1]=Spaths[k].path['depth'].max()
                ##If there's a ray that turns within 2km of Moho, callt hat guy the Moho reflection
                #deltaz=abs(turn_depth-moho_depth_in_km)
                #i=argmin(deltaz)
                #if deltaz[i]<2: #Yes, this is a moho reflection
                #    mohoS=Spaths[i+1]
                #else:
                #    mohoS=None
                mohoS=Spaths[-1]
                 
 
            #######         Build Direct P ray           ######
            if Pwave==True:
                take_off_angle_P=directP.takeoff_angle
                
                #Get attenuation due to geometrical spreading (from the path length)
                path_length_P=hfsims.get_path_length(directP,zs,dist_in_degs)
                path_length_P=path_length_P*100 #to cm
                
                #Get effect of intrinsic attenuation for that ray (path integrated)
                Q_P=hfsims.get_attenuation(f,structure,directS,Qexp,Qtype='P')
                
                #Build the entire path term
                G_P=(I*Q_P)/path_length_P

                #Get conically averaged radiation pattern terms
                RP=hfsims.conically_avg_P_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_P)
                RP=abs(RP)
                   
                #Get partition of Pwave into Z and N,E components 
                incidence_angle=directP.incident_angle
                Npartition,Epartition,Zpartition=hfsims.get_P_wave_partition(incidence_angle,azimuth)
                if component=='Z':
                   Ppartition=Zpartition 
                elif component=='N':
                    Ppartition=Npartition
                else:
                    Ppartition=Epartition
                    
                #And finally multiply everything together to get the subfault amplitude spectrum
                AP=CP*S*G_P*P*RP*Ppartition           

                #Generate windowed time series
                duration=1./fc_subfault+0.09*(dist/1000)
                w=hfsims.windowed_gaussian(duration,hf_dt,window_type='saragoni_hart')
                
                #Go to frequency domain, apply amplitude spectrum and ifft for final time series
                hf_seis_P=hfsims.apply_spectrum(w,AP,f,hf_dt)
                
                #What time after OT should this time series start at?
                time_insert=directP.path['time'][-1]+onset_times[kfault]
                i=argmin(abs(t-time_insert))
                j=i+len(hf_seis_P)
                
                #Check seismogram doesn't go past last sample
                if i<len(hf)-1: #if i (the beginning of the seimogram) is less than the length
                    if j>len(hf): #seismogram goes past total_duration length, trim it
                        len_paste=len(hf)-i
                        j=len(hf)
                        #Add seismogram
                        hf[i:j]=hf[i:j]+real(hf_seis_P[0:len_paste])
                    else: #Lengths are fine
                        hf[i:j]=hf[i:j]+real(hf_seis_P)      
                else: #Seismogram starts after end of available space
                    pass   
                
                                           
                                                                  
                                                                                                                
                          
            #######         Build Direct S ray           ######
            take_off_angle_S=directS.takeoff_angle
            
            #Get attenuation due to geometrical spreading (from the path length)
            path_length_S=hfsims.get_path_length(directS,zs,dist_in_degs)
            path_length_S=path_length_S*100 #to cm
            
            #Get effect of intrinsic aptimeenuation for that ray (path integrated)
            Q_S=hfsims.get_attenuation(f,structure,directS,Qexp)
            
            #Build the entire path term
            G_S=(I*Q_S)/path_length_S

            #Get conically averaged radiation pattern terms
            if component=='Z':
                RP_vert=hfsims.conically_avg_vert_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_S)
                #And finally multiply everything together to get the subfault amplitude spectrum
                AS=CS*S*G_S*P*RP_vert   
            else:
                RP=hfsims.conically_avg_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_S,component_angle)
                RP=abs(RP)
                #And finally multiply everything together to get the subfault amplitude spectrum
                AS=CS*S*G_S*P*RP                

            #Generate windowed time series
            duration=1./fc_subfault+0.063*(dist/1000)
            w=hfsims.windowed_gaussian(duration,hf_dt,window_type='saragoni_hart')
            #w=windowed_gaussian(3*duration,hf_dt,window_type='cua',ptime=Ppaths[0].path['time'][-1],stime=Spaths[0].path['time'][-1])
            
            #Go to frequency domain, apply amplitude spectrum and ifft for final time series
            hf_seis_S=hfsims.apply_spectrum(w,AS,f,hf_dt)
            
            #What time after OT should this time series start at?
            time_insert=directS.path['time'][-1]+onset_times[kfault]
            #print 'ts = '+str(time_insert)+' , Td = '+str(duration)
            #time_insert=Ppaths[0].path['time'][-1]
            i=argmin(abs(t-time_insert))
            j=i+len(hf_seis_S)
            
            
            #Check seismogram doesn't go past last sample
            if i<len(hf)-1: #if i (the beginning of the seimogram) is less than the length
                if j>len(hf): #seismogram goes past total_duration length, trim it
                    len_paste=len(hf)-i
                    j=len(hf)
                    #Add seismogram
                    hf[i:j]=hf[i:j]+real(hf_seis_S[0:len_paste])
                else: #Lengths are fine
                    hf[i:j]=hf[i:j]+real(hf_seis_S)
            else: #Beginning of seismogram is past end of available space
                pass
            
            
            #######         Build Moho reflected S ray           ######
#            if mohoS==None:
#                pass
#            else:
#                if kfault%100==0:
#                    print '... ... building Moho reflected S wave'
#                take_off_angle_mS=mohoS.takeoff_angle
#                
#                #Get attenuation due to geometrical spreading (from the path length)
#                path_length_mS=get_path_length(mohoS,zs,dist_in_degs)
#                path_length_mS=path_length_mS*100 #to cm
#                
#                #Get effect of intrinsic aptimeenuation for that ray (path integrated)
#                Q_mS=get_attenuation(f,structure,mohoS,Qexp)
#                
#                #Build the entire path term
#                G_mS=(I*Q_mS)/path_length_mS
#
#                #Get conically averaged radiation pattern terms
#                if component=='Z':
#                    RP_vert=conically_avg_vert_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_mS)
#                    #And finally multiply everything together to get the subfault amplitude spectrum
#                    A=C*S*G_mS*P*RP_vert   
#                else:
#                    RP=conically_avg_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_mS,component_angle)
#                    RP=abs(RP)
#                    #And finally multiply everything together to get the subfault amplitude spectrum
#                    A=C*S*G_mS*P*RP                
#
#                #Generate windowed time series
#                duration=1./fc_subfault+0.063*(dist/1000)
#                w=windowed_gaussian(duration,hf_dt,window_type='saragoni_hart')
#                #w=windowed_gaussian(3*duration,hf_dt,window_type='cua',ptime=Ppaths[0].path['time'][-1],stime=Spaths[0].path['time'][-1])
#                
#                #Go to frequency domain, apply amplitude spectrum and ifft for final time series
#                hf_seis=apply_spectrum(w,A,f,hf_dt)
#                
#                #What time after OT should this time series start at?
#                time_insert=mohoS.path['time'][-1]+onset_times[kfault]
#                #print 'ts = '+str(time_insert)+' , Td = '+str(duration)
#                #time_insert=Ppaths[0].path['time'][-1]
#                i=argmin(abs(t-time_insert))
#                j=i+len(hf_seis)
#                
#                #Add seismogram
#                hf[i:j]=hf[i:j]+hf_seis
#                
#                #Done, reset
#                mohoS=None        
           
    #Done
    tr.data=hf/100 #convert to m/s**2
    #Add station location, event location, and first P-wave arrival time to SAC header
    tr.stats.update({'sac':{'stlo':sta_lon,'stla':sta_lat,'evlo':epicenter[0],'evla':epicenter[1],'evdp':epicenter[2],'dist':dist_in_km,'az':az,'baz':backaz,'mag':Mw}}) #,'idep':"ACC (m/s^2)" not sure why idep won't work
    
    #Write out to file 
    rupture=rupture_name.split('.')[0]+'.'+rupture_name.split('.')[1]
    if not path.exists(home+project_name+'/output/waveforms/'+rupture+'/'):
        makedirs(home+project_name+'/output/waveforms/'+rupture+'/')
    if rank < 10:
        tr.write(home+project_name+'/output/waveforms/'+rupture+'/'+sta+'.HN'+component+'.00'+str(rank)+'.sac',format='SAC')
    elif rank < 100:
        tr.write(home+project_name+'/output/waveforms/'+rupture+'/'+sta+'.HN'+component+'.0'+str(rank)+'.sac',format='SAC')
    else:
        tr.write(home+project_name+'/output/waveforms/'+rupture+'/'+sta+'.HN'+component+'.'+str(rank)+'.sac',format='SAC')
Beispiel #50
0
 def _write_trace(self):
     trace = Trace(np.array(self.data, dtype='int32'), self.header.stats)
     trace.write(self.filename, format='MSEED')
Beispiel #51
0
dist = 150

freq, xcorr = noise.noisecorr(tr1, tr2, window_length=30., overlap=0.3)

smoothed = noise.velocity_filter(freq,
                                 xcorr,
                                 dist / 1000.,
                                 cmin=.1,
                                 cmax=1.0,
                                 return_all=False)

cc = np.real(np.fft.fftshift(np.fft.ifft(smoothed)))
tr = Trace(data=cc)
tr.stats.sampling_rate = 50
tr.write('test.mseed', format='MSEED')
dkjd


crossings,phase_vel = noise.extract_phase_velocity(freq,smoothed,dist/1000.,ref_curve,\
                       freqmin=1,freqmax=20, min_vel=.01, max_vel=2.0,min_amp=0.0,\
                       horizontal_polarization=False, smooth_spectrum=False,plotting=False)#True)

plt.figure(figsize=(16, 10))
plt.subplot(2, 2, 1)
plt.plot(freq, np.real(xcorr), label='original')
plt.plot(freq, np.real(smoothed), 'r', lw=2, label='smoothed')
plt.title("Cross-correlation spectrum")
plt.xlabel("Frequency")
#plt.legend(numpoints=1)
#plt.subplot(2,2,2)
    def test_readThreadSafe(self):
        """
        Tests for race conditions. Reading n_threads (currently 30) times
        the same waveform file in parallel and compare the results which must
        be all the same.
        """
        data = np.arange(0, 500)
        start = UTCDateTime(2009, 1, 13, 12, 1, 2, 999000)
        formats = _getEntryPoints('obspy.plugin.waveform', 'writeFormat')
        for format in formats:
            # XXX: skip SEGY and SU formats for now as they need some special
            # headers.
            if format in ['SEGY', 'SU', 'SEG2']:
                continue

            dt = np.dtype("int")
            if format in ('MSEED', 'GSE2'):
                dt = "int32"
            tr = Trace(data=data.astype(dt))
            tr.stats.network = "BW"
            tr.stats.station = "MANZ1"
            tr.stats.location = "00"
            tr.stats.channel = "EHE"
            tr.stats.calib = 0.999999
            tr.stats.delta = 0.005
            tr.stats.starttime = start
            # create waveform file with given format and byte order
            outfile = NamedTemporaryFile().name
            tr.write(outfile, format=format)
            if format == 'Q':
                outfile += '.QHD'
            n_threads = 30
            streams = []

            def testFunction(streams):
                st = read(outfile, format=format)
                streams.append(st)
            # Read the ten files at one and save the output in the just created
            # class.
            for _i in xrange(n_threads):
                thread = threading.Thread(target=testFunction,
                                          args=(streams,))
                thread.start()
            # Loop until all threads are finished.
            start = time.time()
            while True:
                if threading.activeCount() == 1:
                    break
                # Avoid infinite loop and leave after 120 seconds
                # such a long time is needed for debugging with valgrind
                elif time.time() - start >= 120:
                    msg = 'Not all threads finished!'
                    raise Warning(msg)
                    break
                else:
                    continue
            # Compare all values which should be identical and clean up files
            #for data in :
            #    np.testing.assert_array_equal(values, original)
            os.remove(outfile)
            if format == 'Q':
                os.remove(outfile[:-4] + '.QBN')
                os.remove(outfile[:-4])
 def test_readAndWrite(self):
     """
     Tests read and write methods for all waveform plug-ins.
     """
     data = np.arange(0, 2000)
     start = UTCDateTime(2009, 1, 13, 12, 1, 2, 999000)
     formats = _getEntryPoints('obspy.plugin.waveform', 'writeFormat')
     for format in formats:
         # XXX: skip SEGY and SU formats for now as they need some special
         # headers.
         if format in ['SEGY', 'SU', 'SEG2']:
             continue
         for native_byteorder in ['<', '>']:
             for byteorder in ['<', '>', '=']:
                 # new trace object in native byte order
                 dt = np.dtype("int").newbyteorder(native_byteorder)
                 if format in ('MSEED', 'GSE2'):
                     # MiniSEED and GSE2 cannot write int64, enforce type
                     dt = "int32"
                 tr = Trace(data=data.astype(dt))
                 tr.stats.network = "BW"
                 tr.stats.station = "MANZ1"
                 tr.stats.location = "00"
                 tr.stats.channel = "EHE"
                 tr.stats.calib = 0.199999
                 tr.stats.delta = 0.005
                 tr.stats.starttime = start
                 # create waveform file with given format and byte order
                 outfile = NamedTemporaryFile().name
                 tr.write(outfile, format=format, byteorder=byteorder)
                 if format == 'Q':
                     outfile += '.QHD'
                 # read in again using auto detection
                 st = read(outfile)
                 self.assertEquals(len(st), 1)
                 self.assertEquals(st[0].stats._format, format)
                 # read in using format argument
                 st = read(outfile, format=format)
                 self.assertEquals(len(st), 1)
                 self.assertEquals(st[0].stats._format, format)
                 # read in using a StringIO instances, skip Q files as it
                 # needs multiple files
                 if format not in ['Q']:
                     # file handler without format
                     temp = open(outfile, 'rb')
                     st = read(temp)
                     self.assertEquals(len(st), 1)
                     self.assertEquals(st[0].stats._format, format)
                     # file handler with format
                     temp = open(outfile, 'rb')
                     st = read(temp, format=format)
                     self.assertEquals(len(st), 1)
                     self.assertEquals(st[0].stats._format, format)
                     # StringIO without format
                     temp = StringIO.StringIO(open(outfile, 'rb').read())
                     st = read(temp)
                     self.assertEquals(len(st), 1)
                     self.assertEquals(st[0].stats._format, format)
                     # StringIO with format
                     temp = StringIO.StringIO(open(outfile, 'rb').read())
                     st = read(temp, format=format)
                     self.assertEquals(len(st), 1)
                     self.assertEquals(st[0].stats._format, format)
                     # cStringIO without format
                     temp = cStringIO.StringIO(open(outfile, 'rb').read())
                     st = read(temp)
                     self.assertEquals(len(st), 1)
                     self.assertEquals(st[0].stats._format, format)
                     # cStringIO with format
                     temp = cStringIO.StringIO(open(outfile, 'rb').read())
                     st = read(temp, format=format)
                     self.assertEquals(len(st), 1)
                     self.assertEquals(st[0].stats._format, format)
                 # check byte order
                 self.assertEquals(st[0].data.dtype.byteorder, '=')
                 # check meta data
                 # some formats do not contain a calibration factor
                 if format not in ['MSEED', 'WAV', 'TSPAIR', 'SLIST']:
                     self.assertAlmostEquals(st[0].stats.calib, 0.199999, 5)
                 else:
                     self.assertEquals(st[0].stats.calib, 1.0)
                 if format not in ['WAV']:
                     self.assertEquals(st[0].stats.starttime, start)
                     self.assertEquals(st[0].stats.endtime, start + 9.995)
                     self.assertEquals(st[0].stats.delta, 0.005)
                     self.assertEquals(st[0].stats.sampling_rate, 200.0)
                 # network/station/location/channel codes
                 if format in ['Q', 'SH_ASC', 'GSE2']:
                     # no network or location code in Q, SH_ASC, GSE2
                     self.assertEquals(st[0].id, ".MANZ1..EHE")
                 elif format not in ['WAV']:
                     self.assertEquals(st[0].id, "BW.MANZ1.00.EHE")
                 # remove temporary files
                 os.remove(outfile)
                 # Q files consist of two files - deleting additional file
                 if format == 'Q':
                     os.remove(outfile[:-4] + '.QBN')
                     os.remove(outfile[:-4])
 def _write_trace(self):
     trace = Trace(self.data.get(), self.header.stats)
     trace.write(self.absname, format='MSEED')
Beispiel #55
0
    def test_read_and_write(self):
        """
        Tests read and write methods for all waveform plug-ins.
        """
        data = np.arange(0, 2000)
        start = UTCDateTime(2009, 1, 13, 12, 1, 2, 999000)
        formats = _get_default_eps('obspy.plugin.waveform', 'writeFormat')
        for format in formats:
            # XXX: skip SEGY and SU formats for now as they need some special
            # headers.
            if format in ['SEGY', 'SU', 'SEG2']:
                continue
            for native_byteorder in ['<', '>']:
                for byteorder in (['<', '>', '='] if format in
                                  WAVEFORM_ACCEPT_BYTEORDER else [None]):
                    if format == 'SAC' and byteorder == '=':
                        # SAC file format enforces '<' or '>'
                        # byteorder on writing
                        continue
                    # new trace object in native byte order
                    dt = np.dtype(np.int_).newbyteorder(native_byteorder)
                    if format in ('MSEED', 'GSE2'):
                        # MiniSEED and GSE2 cannot write int64, enforce type
                        dt = np.int32
                    tr = Trace(data=data.astype(dt))
                    tr.stats.network = "BW"
                    tr.stats.station = "MANZ1"
                    tr.stats.location = "00"
                    tr.stats.channel = "EHE"
                    tr.stats.calib = 0.199999
                    tr.stats.delta = 0.25
                    tr.stats.starttime = start
                    # create waveform file with given format and byte order
                    with NamedTemporaryFile() as tf:
                        outfile = tf.name
                        if byteorder is None:
                            tr.write(outfile, format=format)
                        else:
                            tr.write(outfile, format=format,
                                     byteorder=byteorder)
                        if format == 'Q':
                            outfile += '.QHD'
                        # read in again using auto detection
                        st = read(outfile)
                        self.assertEqual(len(st), 1)
                        self.assertEqual(st[0].stats._format, format)
                        # read in using format argument
                        st = read(outfile, format=format)
                        self.assertEqual(len(st), 1)
                        self.assertEqual(st[0].stats._format, format)
                        # read in using a BytesIO instances, skip Q files as
                        # it needs multiple files
                        if format not in ['Q']:
                            # file handler without format
                            with open(outfile, 'rb') as fp:
                                st = read(fp)
                            self.assertEqual(len(st), 1)
                            self.assertEqual(st[0].stats._format, format)
                            # file handler with format
                            with open(outfile, 'rb') as fp:
                                st = read(fp, format=format)
                            self.assertEqual(len(st), 1)
                            self.assertEqual(st[0].stats._format, format)
                            # BytesIO without format
                            with open(outfile, 'rb') as fp:
                                temp = io.BytesIO(fp.read())
                            st = read(temp)
                            self.assertEqual(len(st), 1)
                            self.assertEqual(st[0].stats._format, format)
                            # BytesIO with format
                            with open(outfile, 'rb') as fp:
                                temp = io.BytesIO(fp.read())
                            st = read(temp, format=format)
                            self.assertEqual(len(st), 1)
                            self.assertEqual(st[0].stats._format, format)
                        # Q files consist of two files - deleting additional
                        # file
                        if format == 'Q':
                            os.remove(outfile[:-4] + '.QBN')
                            os.remove(outfile[:-4] + '.QHD')
                    # check byte order
                    if format == 'SAC':
                        # SAC format preserves byteorder on writing
                        self.assertTrue(st[0].data.dtype.byteorder
                                        in ('=', byteorder))
                    else:
                        self.assertEqual(st[0].data.dtype.byteorder, '=')
                    # check meta data
                    # some formats do not contain a calibration factor
                    if format not in ['MSEED', 'WAV', 'TSPAIR', 'SLIST', 'AH']:
                        self.assertAlmostEqual(st[0].stats.calib, 0.199999, 5)
                    else:
                        self.assertEqual(st[0].stats.calib, 1.0)
                    if format not in ['WAV']:
                        self.assertEqual(st[0].stats.starttime, start)
                        self.assertEqual(st[0].stats.delta, 0.25)
                        self.assertEqual(st[0].stats.endtime, start + 499.75)
                        self.assertEqual(st[0].stats.sampling_rate, 4.0)

                    # network/station/location/channel codes
                    if format in ['Q', 'SH_ASC', 'AH']:
                        # no network or location code in Q, SH_ASC
                        self.assertEqual(st[0].id, ".MANZ1..EHE")
                    elif format == "GSE2":
                        # no location code in GSE2
                        self.assertEqual(st[0].id, "BW.MANZ1..EHE")
                    elif format not in ['WAV']:
                        self.assertEqual(st[0].id, "BW.MANZ1.00.EHE")