Esempio n. 1
0
 def test_component(self):
     """
     Test setting and getting of component.
     """
     stats = Stats()
     # Channel with 3 characters
     stats.channel = 'HHZ'
     assert stats.component == 'Z'
     stats.component = 'L'
     assert stats.component == 'L'
     assert stats.channel == 'HHL'
     stats['component'] = 'Q'
     assert stats['component'] == 'Q'
     assert stats.channel == 'HHQ'
     # Channel with 1 character as component
     stats.channel = 'N'
     stats.component = 'E'
     assert stats.channel == 'E'
     assert stats.component == 'E'
     # Channel with 0 characters
     stats.channel = ''
     assert stats.component == ''
     stats.component = 'Z'
     assert stats.channel == 'Z'
     # Components must be single character
     stats.channel = 'HHZ'
     with pytest.raises(ValueError):
         stats.component = ''
     assert stats.channel == 'HHZ'
     with pytest.raises(ValueError):
         stats.component = 'ZZ'
     assert stats.channel == 'HHZ'
Esempio n. 2
0
 def test_component(self):
     """
     Test setting and getting of component.
     """
     stats = Stats()
     # Channel with 3 characters
     stats.channel = 'HHZ'
     self.assertEqual(stats.component, 'Z')
     stats.component = 'L'
     self.assertEqual(stats.component, 'L')
     self.assertEqual(stats.channel, 'HHL')
     stats['component'] = 'Q'
     self.assertEqual(stats['component'], 'Q')
     self.assertEqual(stats.channel, 'HHQ')
     # Channel with 1 character as component
     stats.channel = 'N'
     stats.component = 'E'
     self.assertEqual(stats.channel, 'E')
     self.assertEqual(stats.component, 'E')
     # Channel with 0 characters
     stats.channel = ''
     self.assertEqual(stats.component, '')
     stats.component = 'Z'
     self.assertEqual(stats.channel, 'Z')
     # Components must be single character
     stats.channel = 'HHZ'
     with self.assertRaises(ValueError):
         stats.component = ''
     self.assertEqual(stats.channel, 'HHZ')
     with self.assertRaises(ValueError):
         stats.component = 'ZZ'
     self.assertEqual(stats.channel, 'HHZ')
Esempio n. 3
0
def ascii(path, filename):
    """
    Reads SPECFEM3D-style ASCII data

    :type path: str
    :param path: path to datasets
    :type filenames: list
    :param filenames: files to read
    """
    st = Stream()
    stats = Stats()

    time, data = loadtxt(os.path.join(path, filename)).T

    stats.filename = filename
    stats.starttime = time[0]
    stats.delta = time[1] - time[0]
    stats.npts = len(data)

    try:
        parts = filename.split(".")
        stats.network = parts[0]
        stats.station = parts[1]
        stats.channel = parts[2]
    except:
        pass

    st.append(Trace(data=data, header=stats))

    return st
Esempio n. 4
0
def read_ascii(path, NR, nt):
    from numpy import loadtxt
    from obspy.core import Stream, Stats, Trace
    dat_type = 'semd'
    comp1 = 'FXX'
    comp2 = 'FXY'
    stream = Stream()
    for rec_x in range(0,NR):
        file_name_in1 = path + 'P.R' + str(int(rec_x+1)) + '.' + comp1 + '.' + dat_type
        file_name_in2 = path + 'P.R' + str(int(rec_x+1)) + '.' + comp2 + '.' + dat_type
        xz1 = np.genfromtxt(file_name_in1)
        xz2 = np.genfromtxt(file_name_in2)
        deg = 0.0
        alpha = np.arctan(xz2[:nt,1]/(1.0e-40 + xz1[:nt,1])) # angle of projection
        direction = np.sign(np.cos(deg*np.pi/180.0)*xz1[:nt,1]*np.cos(alpha) + np.sin(deg*np.pi/180.0)*xz2[:nt,1]*np.cos(alpha))    
        data = direction*np.sqrt(xz1[:nt,1]**2 + xz2[:nt,1]**2)*np.cos(alpha) # scalar radial component

        stats = Stats()
        stats.filename = path + 'P.R' + str(int(rec_x+1))
        stats.starttime = xz1[0,0]
        stats.delta = xz1[1,0] - xz1[0,0]
        stats.npts = len(xz1[:nt,0])

        try:
            parts = filename.split('.')
            stats.network = parts[0]
            stats.station = parts[1]
            stats.channel = temp[2]
        except:
            pass

        stream.append(Trace(data=data[:], header=stats))

    return stream
Esempio n. 5
0
    def test_casted_stats_nscl_writes_to_mseed(self):
        """
        Ensure a Stream object that has had its nslc types cast to str can
        still be written.
        """
        st = Stream(traces=read()[0])

        # Get a new stats object with just the basic items in it
        stats_items = set(Stats())
        new_stats = Stats()
        new_stats.__dict__.update({x: st[0].stats[x] for x in stats_items})
        new_stats.network = 1
        new_stats.station = 1.1
        new_stats.channel = 'Non'
        st[0].stats = new_stats
        # try writing stream to bytes buffer
        bio = io.BytesIO()
        st.write(bio, 'mseed')
        bio.seek(0)
        # read bytes and compare
        stt = read(bio)
        # remove _mseed so streams can compare equal
        stt[0].stats.pop('mseed')
        del stt[0].stats._format  # format gets added upon writing
        self.assertEqual(st, stt)
Esempio n. 6
0
def ascii(path, filenames):
    """ Reads SPECFEM3D-style ascii data
    """
    from numpy import loadtxt
    from obspy.core import Stream, Stats, Trace

    stream = Stream()
    for filename in filenames:
        stats = Stats()
        data = loadtxt(path +'/'+ filename)

        stats.filename = filename
        stats.starttime = data[0,0]
        stats.sampling_rate = data[0,1] - data[0,0]
        stats.npts = len(data[:,0])

        try:
            parts = filename.split('.')
            stats.network = parts[0]
            stats.station = parts[1]
            stats.channel = temp[2]
        except:
            pass

        stream.append(Trace(data=data[:,1], header=stats))

    return stream
Esempio n. 7
0
def ascii(path, filenames):
    from numpy import loadtxt
    from obspy.core import Stream, Stats, Trace

    stream = Stream()
    for filename in filenames:
        stats = Stats()
        data = loadtxt(path + '/' + filename)

        stats.filename = filename
        stats.starttime = data[0, 0]
        stats.sampling_rate = data[0, 1] - data[0, 0]
        stats.npts = len(data[:, 0])

        try:
            parts = filename.split('.')
            stats.network = parts[0]
            stats.station = parts[1]
            stats.channel = temp[2]
        except:
            pass

        stream.append(Trace(data=data[:, 1], header=stats))

    return stream
Esempio n. 8
0
    def test_casted_stats_nscl_writes_to_mseed(self):
        """
        Ensure a Stream object that has had its nslc types cast to str can
        still be written.
        """
        st = Stream(traces=read()[0])

        # Get a new stats object with just the basic items in it
        stats_items = set(Stats())
        new_stats = Stats()
        new_stats.__dict__.update({x: st[0].stats[x] for x in stats_items})
        with warnings.catch_warnings(record=True):
            new_stats.network = 1
            new_stats.station = 1.1
        new_stats.channel = 'Non'
        st[0].stats = new_stats
        # try writing stream to bytes buffer
        bio = io.BytesIO()
        st.write(bio, 'mseed')
        bio.seek(0)
        # read bytes and compare
        stt = read(bio)
        # remove _mseed so streams can compare equal
        stt[0].stats.pop('mseed')
        del stt[0].stats._format  # format gets added upon writing
        self.assertEqual(st, stt)
Esempio n. 9
0
def __create_trace(
    data,
    network="NT",
    station="BOU",
    channel="H",
    location="R0",
    data_interval="second",
    data_type="interval",
):
    """
    Utility to create a trace containing the given numpy array.

    Parameters
    ----------
    data: array
        The array to be inserted into the trace.

    Returns
    -------
    obspy.core.Stream
        Stream containing the channel.
    """
    stats = Stats()
    stats.starttime = UTCDateTime("2019-12-01")
    stats.delta = TimeseriesUtility.get_delta_from_interval(data_interval)
    stats.channel = channel
    stats.station = station
    stats.npts = len(data)
    stats.data_interval = data_interval
    stats.data_type = data_type
    numpy_data = numpy.array(data, dtype=numpy.float64)
    return Trace(numpy_data, stats)
Esempio n. 10
0
 def process(self, stream):
     """
     Run algorithm for a stream.
     Processes all traces in the stream.
     Parameters
     ----------
     stream : obspy.core.Stream
         stream of data to process
     Returns
     -------
     out : obspy.core.Stream
         stream containing 1 trace per original trace.
     """
     out = Stream()
     for trace in stream:
         dbdt = np.around(np.diff(trace.data), decimals=6)
         stats = Stats(trace.stats)
         stats.channel = "{}_DT".format(stats.channel)
         trace_out = create_empty_trace(
             starttime=stats.starttime + stats.delta,
             endtime=stats.endtime,
             observatory=stats.station,
             type=stats.location,
             interval=get_interval_from_delta(stats.delta),
             channel=stats.channel,
             network=stats.network,
             station=stats.station,
             location=stats.location,
         )
         trace_out.data = dbdt
         out += trace_out
     return out
def _create_trace(data, channel, starttime, delta=60.):
    stats = Stats()
    stats.channel = channel
    stats.delta = delta
    stats.starttime = starttime
    stats.npts = len(data)
    data = numpy.array(data, dtype=numpy.float64)
    return Trace(data, stats)
def _create_trace(data, channel, starttime, delta=60.0):
    stats = Stats()
    stats.channel = channel
    stats.delta = delta
    stats.starttime = starttime
    stats.npts = len(data)
    data = numpy.array(data, dtype=numpy.float64)
    return Trace(data, stats)
Esempio n. 13
0
 def get_obspy_trace(self):
     """
     Return class contents as obspy.Trace object
     """
     stat = Stats()
     stat.network = self.net.split(b'\x00')[0].decode()
     stat.station = self.sta.split(b'\x00')[0].decode()
     location = self.loc.split(b'\x00')[0].decode()
     if location == '--':
         stat.location = ''
     else:
         stat.location = location
     stat.channel = self.chan.split(b'\x00')[0].decode()
     stat.starttime = UTCDateTime(self.start)
     stat.sampling_rate = self.rate
     stat.npts = len(self.data)
     return Trace(data=self.data, header=stat)
Esempio n. 14
0
 def get_obspy_trace(self):
     """
     Return class contents as obspy.Trace object
     """
     stat = Stats()
     stat.network = self.net.split(b'\x00')[0].decode()
     stat.station = self.sta.split(b'\x00')[0].decode()
     location = self.loc.split(b'\x00')[0].decode()
     if location == '--':
         stat.location = ''
     else:
         stat.location = location
     stat.channel = self.chan.split(b'\x00')[0].decode()
     stat.starttime = UTCDateTime(self.start)
     stat.sampling_rate = self.rate
     stat.npts = len(self.data)
     return Trace(data=self.data, header=stat)
Esempio n. 15
0
def read_specfem_seismogram(output_files, network, station, band):
    st = Stream()
    for component in 'ZNE':
        channel = '%sX%s' % (band, component)
        filename = os.path.join(
            output_files, '%s.%s.%s.sem.ascii' % (network, station, channel))
        tmp = np.genfromtxt(filename)

        stats = Stats()
        stats.network = network
        stats.station = station
        stats.channel = channel
        stats.delta = tmp[1, 0] - tmp[0, 0]
        stats.npts = tmp.shape[0]
        stats.starttime = tmp[0, 0]

        tr = Trace(tmp[:, 1], stats)
        st += tr

    return st
Esempio n. 16
0
def read_specfem_seismogram(output_files, network, station, band):
    st = Stream()
    for component in 'ZNE':
        channel = '%sX%s' % (band, component)
        filename = os.path.join(output_files,
                                '%s.%s.%s.sem.ascii' % (network, station,
                                                        channel))
        tmp = np.genfromtxt(filename)

        stats = Stats()
        stats.network = network
        stats.station = station
        stats.channel = channel
        stats.delta = tmp[1, 0] - tmp[0, 0]
        stats.npts = tmp.shape[0]
        stats.starttime = tmp[0, 0]

        tr = Trace(tmp[:, 1], stats)
        st += tr

    return st
Esempio n. 17
0
def create_empty_trace(trace, channel):
    """
    Utility to create an empty trace, similar to another trace.

    Parameters
    ----------
    trace: obspy.core.Trace
        Trace that is source of most metadata, including array length.
    channel: String
        Channel name for created Trace.

    Returns
    -------
    obspy.core.Trace
        a Trace object, filled with numpy.nan.
    """
    stats = Stats(trace.stats)
    stats.channel = channel
    count = len(trace.data)
    numpy_data = numpy.full((count), numpy.nan)
    return Trace(numpy_data, stats)
Esempio n. 18
0
def readSLIST(filename, headonly=False, **kwargs):  # @UnusedVariable
    """
    Reads a ASCII SLIST file and returns an ObsPy Stream object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.stream.read` function, call this instead.

    :type filename: str
    :param filename: ASCII file to be read.
    :type headonly: bool, optional
    :param headonly: If set to True, read only the head. This is most useful
        for scanning available data in huge (temporary) data sets.
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: A ObsPy Stream object.

    .. rubric:: Example

    >>> from obspy import read
    >>> st = read('/path/to/slist.ascii')
    """
    with open(filename, 'rt') as fh:
        # read file and split text into channels
        buf = []
        key = False
        for line in fh:
            if line.isspace():
                # blank line
                continue
            elif line.startswith('TIMESERIES'):
                # new header line
                key = True
                buf.append((line, StringIO()))
            elif headonly:
                # skip data for option headonly
                continue
            elif key:
                # data entry - may be written in multiple columns
                buf[-1][1].write(line.strip() + ' ')
    # create ObsPy stream object
    stream = Stream()
    for header, data in buf:
        # create Stats
        stats = Stats()
        parts = header.replace(',', '').split()
        temp = parts[1].split('_')
        stats.network = temp[0]
        stats.station = temp[1]
        stats.location = temp[2]
        stats.channel = temp[3]
        stats.sampling_rate = parts[4]
        # quality only used in MSEED
        stats.mseed = AttribDict({'dataquality': temp[4]})
        stats.ascii = AttribDict({'unit': parts[-1]})
        stats.starttime = UTCDateTime(parts[6])
        stats.npts = parts[2]
        if headonly:
            # skip data
            stream.append(Trace(header=stats))
        else:
            data = _parse_data(data, parts[8])
            stream.append(Trace(data=data, header=stats))
    return stream
Esempio n. 19
0
def readSLIST(filename, headonly=False, **kwargs):  # @UnusedVariable
    """
    Reads a ASCII SLIST file and returns an ObsPy Stream object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.stream.read` function, call this instead.

    :type filename: str
    :param filename: ASCII file to be read.
    :type headonly: bool, optional
    :param headonly: If set to True, read only the head. This is most useful
        for scanning available data in huge (temporary) data sets.
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: A ObsPy Stream object.

    .. rubric:: Example

    >>> from obspy.core import read
    >>> st = read('/path/to/slist.ascii')
    """
    fh = open(filename, 'rt')
    # read file and split text into channels
    headers = {}
    key = None
    for line in fh:
        if line.isspace():
            # blank line
            continue
        elif line.startswith('TIMESERIES'):
            # new header line
            key = line
            headers[key] = StringIO()
        elif headonly:
            # skip data for option headonly
            continue
        elif key:
            # data entry - may be written in multiple columns
            headers[key].write(line.strip() + ' ')
    fh.close()
    # create ObsPy stream object
    stream = Stream()
    for header, data in headers.iteritems():
        # create Stats
        stats = Stats()
        parts = header.replace(',', '').split()
        temp = parts[1].split('_')
        stats.network = temp[0]
        stats.station = temp[1]
        stats.location = temp[2]
        stats.channel = temp[3]
        stats.sampling_rate = parts[4]
        # quality only used in MSEED
        stats.mseed = AttribDict({'dataquality': temp[4]})
        stats.ascii = AttribDict({'unit': parts[-1]})
        stats.starttime = UTCDateTime(parts[6])
        stats.npts = parts[2]
        if headonly:
            # skip data
            stream.append(Trace(header=stats))
        else:
            # parse data
            data.seek(0)
            if parts[8] == 'INTEGER':
                data = loadtxt(data, dtype='int', ndlim=1)
            elif parts[8] == 'FLOAT':
                data = loadtxt(data, dtype='float32', ndlim=1)
            else:
                raise NotImplementedError
            stream.append(Trace(data=data, header=stats))
    return stream
Esempio n. 20
0
def rf_test(phase,
            dip,
            rfloc='output/waveforms/RF',
            geom_file='3D.geom',
            decon_meth='it'):
    """
    Creates synthetic PRFs from Raysum data.

    Parameters
    ----------
    phase : string
        "P" or "S".
    dip : int
        Dip of the LAB in deg, determines, which files to use
    rfloc : The parental directory, in which the RFs are saved.
    geom_file : str, optional
        Filename of the geometry file

    Returns
    -------
    rfs: list
        List of RFTrace objects. Will in addition be saved in SAC format.

    """
    # Determine filenames
    PSS_file = []
    for i in range(16):
        PSS_file.append('3D_' + str(dip) + '_' + str(i) + '.tr')

    # Read geometry
    baz, q, dN, dE = read_geom(geom_file, phase)

    # statlat = dN/(DEG2KM*1000)
    d = np.sqrt(np.square(dN) + np.square(dE))
    az = np.rad2deg(np.arccos(dN / d))
    i = np.where(dE < 0)
    az[i] = az[i] + 180
    statlat = []
    statlon = []
    for azimuth, delta in zip(az, d):
        if delta == 0:
            statlat.append(0)
            statlon.append(0)
            continue
        coords = Geodesic.WGS84.Direct(0, 0, azimuth, delta)
        statlat.append(coords["lat2"])
        statlon.append(coords["lon2"])
    #         for n, longitude in enumerate(lon):
#             y, _, _ = gps2dist_azimuth(latitude, 0, latitude, longitude)
# statlon = dE/(DEG2KM*1000)
    rayp = q * DEG2KM * 1000

    # Read traces
    stream = []

    for f in PSS_file:
        PSS, dt, _, N, shift = read_raysum(phase, PSS_file=f)
        stream.append(PSS)

    streams = np.vstack(stream)
    del stream

    M = len(baz)

    if M != streams.shape[0]:
        raise ValueError([
            "Number of traces", streams.shape[0], """does not
             equal the number of backazimuths in the geom file""", M
        ])

    rfs = []
    odir = os.path.join(rfloc, phase, 'raysum', str(dip))
    ch = ['BHP', 'BHV', 'BHH']  # Channel names

    os.makedirs(odir, exist_ok=True)

    # Create RF objects
    for i, st in enumerate(streams):
        s = Stream()
        for j, tr in enumerate(st):
            stats = Stats()
            stats.npts = N
            stats.delta = dt
            stats.st  # if old:
            stats.channel = ch[j]
            stats.network = 'RS'
            stats.station = str(dip)
            s.append(Trace(data=tr, header=stats))

        # Create info dictionary for rf creation
        info = {
            'onset': [UTCDateTime(0) + shift],
            'starttime': [UTCDateTime(0)],
            'statlat': statlat[i],
            'statlon': statlon[i],
            'statel': 0,
            'rayp_s_deg': [rayp[i]],
            'rbaz': [baz[i]],
            'rdelta': [np.nan],
            'ot_ret': [0],
            'magnitude': [np.nan],
            'evt_depth': [np.nan],
            'evtlon': [np.nan],
            'evtlat': [np.nan]
        }

        rf = createRF(s, phase=phase, method=decon_meth, info=info)

        # Write RF
        rf.write(os.path.join(odir, str(i) + '.sac'), format='SAC')
        rfs.append(rf)

    return rfs, statlat, statlon
Esempio n. 21
0
def _read_tspair(filename, headonly=False, **kwargs):  # @UnusedVariable
    """
    Reads a ASCII TSPAIR file and returns an ObsPy Stream object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.stream.read` function, call this instead.

    :type filename: str
    :param filename: ASCII file to be read.
    :type headonly: bool, optional
    :param headonly: If set to True, read only the headers. This is most useful
        for scanning available data in huge (temporary) data sets.
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: A ObsPy Stream object.

    .. rubric:: Example

    >>> from obspy import read
    >>> st = read('/path/to/tspair.ascii')
    """
    with open(filename, 'rt') as fh:
        # read file and split text into channels
        buf = []
        key = False
        for line in fh:
            if line.isspace():
                # blank line
                continue
            elif line.startswith('TIMESERIES'):
                # new header line
                key = True
                buf.append((line, io.StringIO()))
            elif headonly:
                # skip data for option headonly
                continue
            elif key:
                # data entry - may be written in multiple columns
                buf[-1][1].write(line.strip().split()[-1] + ' ')
    # create ObsPy stream object
    stream = Stream()
    for header, data in buf:
        # create Stats
        stats = Stats()
        parts = header.replace(',', '').split()
        temp = parts[1].split('_')
        stats.network = temp[0]
        stats.station = temp[1]
        stats.location = temp[2]
        stats.channel = temp[3]
        stats.sampling_rate = parts[4]
        # quality only used in MSEED
        # don't put blank quality code into 'mseed' dictionary
        # (quality code is mentioned as optional by format specs anyway)
        if temp[4]:
            stats.mseed = AttribDict({'dataquality': temp[4]})
        stats.ascii = AttribDict({'unit': parts[-1]})
        stats.starttime = UTCDateTime(parts[6])
        stats.npts = parts[2]
        if headonly:
            # skip data
            stream.append(Trace(header=stats))
        else:
            data = _parse_data(data, parts[8])
            stream.append(Trace(data=data, header=stats))
    return stream
Esempio n. 22
0
def readTSPAIR(filename, headonly=False, **kwargs):  # @UnusedVariable
    """
    Reads a ASCII TSPAIR file and returns an ObsPy Stream object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.stream.read` function, call this instead.

    :type filename: str
    :param filename: ASCII file to be read.
    :type headonly: bool, optional
    :param headonly: If set to True, read only the headers. This is most useful
        for scanning available data in huge (temporary) data sets.
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: A ObsPy Stream object.

    .. rubric:: Example

    >>> from obspy import read
    >>> st = read('/path/to/tspair.ascii')
    """
    fh = open(filename, "rt")
    # read file and split text into channels
    headers = {}
    key = None
    for line in fh:
        if line.isspace():
            # blank line
            continue
        elif line.startswith("TIMESERIES"):
            # new header line
            key = line
            headers[key] = StringIO()
        elif headonly:
            # skip data for option headonly
            continue
        elif key:
            # data entry - may be written in multiple columns
            headers[key].write(line.strip().split()[-1] + " ")
    fh.close()
    # create ObsPy stream object
    stream = Stream()
    for header, data in headers.iteritems():
        # create Stats
        stats = Stats()
        parts = header.replace(",", "").split()
        temp = parts[1].split("_")
        stats.network = temp[0]
        stats.station = temp[1]
        stats.location = temp[2]
        stats.channel = temp[3]
        stats.sampling_rate = parts[4]
        # quality only used in MSEED
        stats.mseed = AttribDict({"dataquality": temp[4]})
        stats.ascii = AttribDict({"unit": parts[-1]})
        stats.starttime = UTCDateTime(parts[6])
        stats.npts = parts[2]
        if headonly:
            # skip data
            stream.append(Trace(header=stats))
        else:
            data = _parse_data(data, parts[8])
            stream.append(Trace(data=data, header=stats))
    return stream
Esempio n. 23
0
    def save_wave(self):

        # Fetch a wave from Ring 0
        wave = self.ring2buff.get_wave(0)

        # if wave is empty return
        if wave == {}:
            return

        # Lets try to buffer with python dictionaries and obspy
        name = wave["station"] + '.' + wave["channel"] + '.' + wave[
            "network"] + '.' + wave["location"]

        if name in self.wave_buffer:

            # Determine max samples for buffer
            max_samp = wave["samprate"] * 60 * self.minutes

            # Create a header:
            wavestats = Stats()
            wavestats.station = wave["station"]
            wavestats.network = wave["network"]
            wavestats.channel = wave["channel"]
            wavestats.location = wave["location"]
            wavestats.sampling_rate = wave["samprate"]
            wavestats.starttime = UTCDateTime(wave['startt'])

            # Create a trace
            wavetrace = Trace(header=wavestats)
            wavetrace.data = wave["data"]

            # Try to append data to buffer, if gap shutdown.
            try:
                self.wave_buffer[name].append(wavetrace,
                                              gap_overlap_check=True)
            except TypeError as err:
                logger.warning(err)
                self.runs = False
            except:
                raise
                self.runs = False

            # Debug data
            if self.debug:
                logger.info("Station Channel combo is in buffer:")
                logger.info(name)
                logger.info("Size:")
                logger.info(self.wave_buffer[name].count())
                logger.debug("Data:")
                logger.debug(self.wave_buffer[name])

        else:
            # First instance of data in buffer, create a header:
            wavestats = Stats()
            wavestats.station = wave["station"]
            wavestats.network = wave["network"]
            wavestats.channel = wave["channel"]
            wavestats.location = wave["location"]
            wavestats.sampling_rate = wave["samprate"]
            wavestats.starttime = UTCDateTime(wave['startt'])

            # Create a trace
            wavetrace = Trace(header=wavestats)
            wavetrace.data = wave["data"]

            # Create a RTTrace
            rttrace = RtTrace(int(self.minutes * 60))
            self.wave_buffer[name] = rttrace

            # Append data
            self.wave_buffer[name].append(wavetrace, gap_overlap_check=True)

            # Debug data
            if self.debug:
                logger.info("First instance of station/channel:")
                logger.info(name)
                logger.info("Size:")
                logger.info(self.wave_buffer[name].count())
                logger.debug("Data:")
                logger.debug(self.wave_buffer[name])
Esempio n. 24
0
    stats.starttime = sttime
    stats.station = station
    stats.network = 'NT'
    stats.location = 'R0'
    stats.data_interval = '256Hz'
    stats.delta = .00390625
    stats.data_type = 'variation'

    # Create list of arrays and channel names and intialize counter k
    arrays = [Hx, Hy, Ex, Ey]
    k = 0

    # Loop over channels to create an obspy stream of the data
    for ar in arrays:
        stats.npts = len(ar)
        stats.channel = channels[k]
        ar = np.asarray(ar)
        trace = Trace(ar, stats)
        stream += trace
        trace = None
        k += 1

    # Create a copy of the stream and resample the copied stream to
    # 10 Hz using the default options of the obspy function resample
    finStream = stream.copy()
    finStream.resample(10.0)

    # Create numpy arrays of the resampled data
    Hx_fin = finStream.select(channel='Hx')[0].data
    Hy_fin = finStream.select(channel='Hy')[0].data
    Ex_fin = finStream.select(channel='Ex')[0].data