Example #1
0
def process_traces(config, st):
    """Remove mean, deconvolve and ignore unwanted components."""
    out_st = Stream()
    for id in sorted(set(tr.id for tr in st)):
        # We still use a stream, since the trace can have
        # gaps or overlaps
        st_sel = st.select(id=id)
        network, station, location, channel = id.split('.')
        # build a list of all possible ids, from station only
        # to full net.sta.loc.chan
        ss = [
            station,
        ]
        ss.append('.'.join((network, station)))
        ss.append('.'.join((network, station, location)))
        ss.append('.'.join((network, station, location, channel)))
        if config.use_stations is not None:
            combined = ("(" + ")|(".join(config.use_stations) + ")").replace(
                '.', '\.')
            if not any(re.match(combined, s) for s in ss):
                logger.warning('%s: ignored from config file' % id)
                continue
        if config.ignore_stations is not None:
            combined = ("(" + ")|(".join(config.ignore_stations) +
                        ")").replace('.', '\.')
            if any(re.match(combined, s) for s in ss):
                logger.warning('%s: ignored from config file' % id)
                continue
        try:
            _add_hypo_dist_and_arrivals(config, st_sel)
            trace = _merge_stream(config, st_sel)
            trace.stats.ignore = False
            trace_process = _process_trace(config, trace)
            out_st.append(trace_process)
        except (ValueError, RuntimeError):
            continue

    if len(out_st) == 0:
        logger.error('No traces left! Exiting.')
        ssp_exit()

    # Rotate traces, if SH or SV is requested
    if config.wave_type in ['SH', 'SV']:
        for id in sorted(set(tr.id[:-1] for tr in out_st)):
            net, sta, loc, chan = id.split('.')
            st_sel = out_st.select(network=net,
                                   station=sta,
                                   location=loc,
                                   channel=chan + '?')
            t0 = max(tr.stats.starttime for tr in st_sel)
            t1 = min(tr.stats.endtime for tr in st_sel)
            st_sel.trim(t0, t1)
            st_sel.rotate('NE->RT')

    return out_st
Example #2
0
File: core.py Project: znamy/obspy
def _read_pdas(filename, **kwargs):
    """
    Reads a PDAS file and returns an ObsPy Stream object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.stream.read` function, call this instead.

    :type filename: str
    :param filename: PDAS file to be read.
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: An ObsPy Stream object.

    .. rubric:: Example

    >>> from obspy import read
    >>> st = read("/path/to/p1246001.108")
    >>> st  # doctest: +ELLIPSIS
    <obspy.core.stream.Stream object at 0x...>
    >>> print(st)  # doctest: +ELLIPSIS
    1 Trace(s) in Stream:
    ... | 1994-04-18T00:00:00.000000Z - ... | 200.0 Hz, 500 samples
    """
    extra_headers = {}
    with open(filename, "rb") as fh:
        items = [fh.readline().split() for i_ in range(11)]
        data = fh.read()
    for i_ in (0, 1, 2, 3, 7, 8, 9):
        extra_headers[items[i_][0].decode()] = items[i_][1].decode()
    month, day, year = items[4][1].decode().split("-")
    if UTCDateTime().year > 2050:
        raise NotImplementedError()
    if len(year) == 2:
        if int(year) < 50:
            year = "20" + year
        else:
            year = "19" + year
    time = items[5][1].decode()
    t = UTCDateTime("%s-%s-%sT%s" % (year, month, day, time))
    sampling_rate = 1.0 / float(items[6][1].decode())
    dtype = items[1][1].decode()
    if dtype.upper() == "LONG":
        data = from_buffer(data, dtype=np.int16)
    elif dtype.upper() == "SHORT":
        data = from_buffer(data, dtype=np.int8)
    else:
        raise NotImplementedError()

    tr = Trace(data=data)
    tr.stats.starttime = t
    tr.stats.sampling_rate = sampling_rate
    tr.stats._format = "PDAS"
    tr.stats.pdas = extra_headers
    st = Stream(traces=[tr])
    return st
Example #3
0
def QC_streams(start, end, st):

    # Check start times
    if not np.all([tr.stats.starttime == start for tr in st]):
        print("* Start times are not all close to true start: ")
        [print("*   "+tr.stats.channel+" " +
               str(tr.stats.starttime)+" " +
               str(tr.stats.endtime)) for tr in st]
        print("*   True start: "+str(start))
        print("* -> Shifting traces to true start")
        delay = [tr.stats.starttime - start for tr in st]
        st_shifted = Stream(
            traces=[traceshift(tr, dt) for tr, dt in zip(st, delay)])
        st = st_shifted.copy()

    # # Check sampling rate
    # sr = st[0].stats.sampling_rate
    # sr_round = float(floor_decimal(sr, 0))
    # if not sr == sr_round:
    #     print("* Sampling rate is not an integer value: ", sr)
    #     print("* -> Resampling")
    #     st.resample(sr_round, no_filter=False)

    # Try trimming
    dt = st[0].stats.delta
    try:
        st.trim(start, end-dt, fill_value=0., pad=True)
    except:
        print("* Unable to trim")
        print("* -> Skipping")
        print("**************************************************")
        return False, None

    # Check final lengths - they should all be equal if start times
    # and sampling rates are all equal and traces have been trimmed
    sr = st[0].stats.sampling_rate
    if not np.allclose([tr.stats.npts for tr in st[1:]], st[0].stats.npts):
        print("* Lengths are incompatible: ")
        [print("*     "+str(tr.stats.npts)) for tr in st]
        print("* -> Skipping")
        print("**************************************************")

        return False, None

    elif not np.allclose([st[0].stats.npts], int((end - start)*sr), atol=1):
        print("* Length is too short: ")
        print("*    "+str(st[0].stats.npts) +
              " ~= "+str(int((end - start)*sr)))
        print("* -> Skipping")
        print("**************************************************")

        return False, None

    else:
        return True, st
Example #4
0
def find_longest_equal(d, p):
    """
    Find the longest time and return the new Stream
    :param d,p: a Stream object has several time slice
    :return d_slice,p_slice: Stream object (For remove response)
    :exception pressure date not coherent with displacement
                data length too short
    """
    d = d.traces
    index = 0
    tempT = d[0]

    for i in range(len(d)):
        if len(d[i]) > len(tempT):
            index = i
            tempT = d[i]

    d_slice = d[index]

    #  the admittance of each day is based on 9 to 30 2000 s long time series
    # 9*2000 = 18000
    if d_slice.meta.endtime - d_slice.meta.starttime < 18000:
        print d_slice.meta.endtime - d_slice.meta.starttime
        print "SHORT"
        raise AttributeError

    p_slice = p.slice(d_slice.meta.starttime, d_slice.meta.endtime)
    # Must be a single trace
    if len(p_slice.traces) != 1:
        print "MUCH"
        raise AttributeError

    p_slice = p_slice.traces[0]

    sta = max(p_slice.meta.starttime, d_slice.meta.starttime)
    end = min(p_slice.meta.endtime, d_slice.meta.endtime)

    if end - sta < 18000:
        print "SHORT"
        raise AttributeError

    return Stream().append(d_slice).slice(sta, end), Stream().append(p_slice).slice(sta, end)
Example #5
0
def test_Porter2011():
    import matplotlib
    matplotlib.use('Agg')
    import numpy as np
    from obspy.core import Stream
    from telewavesim import utils as ut
    from telewavesim import wiggle as wg

    modfile = resource_filename('telewavesim',
                                'examples/models/model_Porter2011.txt')
    wvtype = 'P'
    npts = 3000  # Number of samples
    dt = 0.01  # Sample distance in seconds
    slow = 0.06  # Horizontal slowness (or ray parameter) in s/km
    baz = np.arange(0., 360., 10.)
    model = ut.read_model(modfile)
    trR = Stream()
    trT = Stream()
    # Loop over range of data
    for bb in baz:
        # Calculate the plane waves seismograms
        trxyz = ut.run_plane(model, slow, npts, dt, bb, wvtype=wvtype,
                             obs=False)
        # Then the transfer functions in Z-R-T coordinate system
        tfs = ut.tf_from_xyz(trxyz, pvh=False)
        # Append to streams
        trR.append(tfs[0])
        trT.append(tfs[1])
    # Set frequency corners in Hz
    f1 = 0.01
    f2 = 1.0
    # Filter to get wave-like traces
    trR.filter('bandpass', freqmin=f1, freqmax=f2, corners=2, zerophase=True)
    trT.filter('bandpass', freqmin=f1, freqmax=f2, corners=2, zerophase=True)
    # Stack over all traces
    trR_stack, trT_stack = ut.stack_all(trR, trT, pws=True)
    # Plot as wiggles
    with tempfile.TemporaryDirectory() as tempdir:
        wg.rf_wiggles_baz(trR, trT, trR_stack, trT_stack, 'test', btyp='baz',
                          scale=1.e3, tmin=-5., tmax=8., save=True,
                          ftitle=join(tempdir, 'porter2011.png'),
                          wvtype='P')
Example #6
0
    def get_waveforms(self,
                      network,
                      station,
                      location,
                      channel,
                      starttime,
                      endtime,
                      automerge=False,
                      trace_count_threshold=200):

        starttime = UTCDateTime(starttime).timestamp
        endtime = UTCDateTime(endtime).timestamp

        query = "select * from wdb where net='%s' and sta='%s' and loc='%s' and cha='%s' " \
                %(network, station, location, channel) + \
                "and et>=%f and st<=%f" \
                 % (starttime, endtime)

        rows = self.conn.execute(query).fetchall()
        s = Stream()

        if (len(rows) > trace_count_threshold): return s

        #print 'rank: %d net: %s sta: %s loc:%s cha:%s ntraces: %d'%(self.rank,
        #                                                           network, station,
        #                                                           location, channel, len(rows))
        #return s

        for row in rows:
            ds_id, net, sta, loc, cha, st, et, tag = row
            station_data = self.asdf_datasets[ds_id].waveforms['%s.%s' %
                                                               (net, sta)]
            try:
                s += station_data[tag]
            except:
                pass
            # end try
        # end for

        if (automerge):
            try:
                s.merge(method=-1)
            except:
                pass
            # end try
        # end if

        # Trim traces
        for t in s:
            t.trim(starttime=UTCDateTime(starttime),
                   endtime=UTCDateTime(endtime))
        # end for

        return s
Example #7
0
 def test_setVersion(self):
     """
     Tests if SAC version is set when writing
     """
     tempfile = NamedTemporaryFile().name
     np.random.seed(815)
     st = Stream([Trace(data=np.random.randn(1000))])
     st.write(tempfile, format="SAC")
     st2 = read(tempfile, format="SAC")
     os.remove(tempfile)
     self.assertEqual(st2[0].stats['sac'].nvhdr, 6)
def test__set_metadata():
    """edge_test.EdgeFactory_test.test__set_metadata()
    """
    # Call _set_metadata with 2 traces,  and make certain the stats get
    # set for both traces.
    trace1 = Trace()
    trace2 = Trace()
    stream = Stream(traces=[trace1, trace2])
    EdgeFactory()._set_metadata(stream, "BOU", "H", "variation", "minute")
    assert_equal(stream[0].stats["channel"], "H")
    assert_equal(stream[1].stats["channel"], "H")
Example #9
0
def test__set_metadata():
    """edge_test.EdgeFactory_test.test__set_metadata()
    """
    # Call _set_metadata with 2 traces,  and make certain the stats get
    # set for both traces.
    trace1 = Trace()
    trace2 = Trace()
    stream = Stream(traces=[trace1, trace2])
    EdgeFactory()._set_metadata(stream, 'BOU', 'H', 'variation', 'minute')
    assert_equals(stream[0].stats['channel'], 'H')
    assert_equals(stream[1].stats['channel'], 'H')
Example #10
0
def tracebufs2obspyStream(tbuflist):
    """
    Returns obspy.Stream object from input list of tracebuf2 objects
    """
    if not tbuflist:
        return None
    tlist = []
    for tb in tbuflist:
        tlist.append(tb.getObspyTrace())
    strm = Stream(tlist)
    return strm
Example #11
0
def plot_stack(trace,
               times,
               description,
               offset_before=0.5,
               offset_after=1.5,
               plot_span=False,
               filebase=None,
               show=True):
    """
    Plot a stack of time series from one trace

    trace is an obspy Trace object
    title is the title to put on the graph
    outfile is the filename to write the plot out to (None = print to screen)
    times is a list of UTCDateTimes
    plot_span: whether to plot a time series spanning the first to last time
    """
    title = description + f', orientation_code="{trace.stats.channel[-1]}"'
    assert offset_before >= 0,\
        f'plot_stack "{description}": offset_before < 0 ({offset_before:g})'
    assert offset_after > 0,\
        f'plot_stack "{description}": offset_after <= 0 ({offset_after:g})'
    if plot_span:
        _plot_span(times, Stream(traces=[trace]))
    print(f'Plotting stack "{title}"')
    colors = plt.cm.rainbow(np.linspace(0, 1, len(times)))
    offset_vertical = 0
    # time_zero = UTCDateTime(times[0])
    ax = plt.axes()
    max_val = 0
    # Set up y axis range
    for time in times:
        temp = trace.slice(time - offset_before, time + offset_after)
        if abs(temp.max()) > max_val:
            max_val = abs(temp.max())
    # Plot the subtraces
    for time, c in zip(times, colors):
        offset_vertical += max_val
        t = trace.slice(time - offset_before, time + offset_after)
        ax.plot(t.times("utcdatetime") - time,
                t.data - offset_vertical,
                color=c,
                label=time.strftime('%H:%M:%S') +
                '.{:02d}'.format(int(time.microsecond / 1e4)))
        offset_vertical += max_val
    ax.set_title(title)
    ax.grid()
    ax.legend()
    if filebase:
        plt.savefig(filebase + '_' + get_valid_filename(description) +
                    '_stack.png')
    if show:
        plt.show()
Example #12
0
def create_random_trace(sampling_rate=40.0, duration=5.0, **header_kwargs):
    header = {
        'sampling_rate': sampling_rate,
        'starttime': UTCDateTime(0),
        'channel': 'Z',
        'station': 'test'
    }
    header = {**header, **header_kwargs}
    data = np.random.uniform(-1, 1, (int(duration * sampling_rate)))
    trace = Trace(data=data, header=header)
    trace.stats.data_type = 'test'
    return Stream(traces=[trace])
    def _writeData(self, traceData, stats, timeObj):
        streamObj = Stream([Trace(data=traceData, header=stats)])

        filename = self._prepareFilename(timeObj)
        offset = int(np.mean(streamObj.traces[0].data))
        streamObj.traces[0].data = np.array(
            [x - offset for x in streamObj.traces[0].data])

        self.logger.debug("[" + strftime('%X') +
                          "] Saving %d samples (corrected by %d) to %s..." %
                          (len(traceData), offset, filename))
        streamObj.write(filename, format='MSEED')
Example #14
0
def read_sds(event,
             sds_root,
             phase="P",
             component="Z",
             trace_length=30,
             sample_rate=100,
             random_time=0):
    stream = Stream()
    client = Client(sds_root=sds_root)
    for pick in event.picks:
        if not pick.phase_hint == phase:
            print("Skip " + pick.phase_hint + " phase pick")
            continue

        if not pick.waveform_id.channel_code[-1] == component:
            print(pick.waveform_id.channel_code)
            continue

        t = event.origins[0].time

        if pick.time > t + trace_length:
            t = pick.time - trace_length + 5
            print("origin: " + t.isoformat() + " pick: " +
                  pick.time.isoformat())

        if random_time:
            t = t - random_time + np.random.random_sample() * random_time * 2

        net = "*"
        sta = pick.waveform_id.station_code
        loc = "*"
        chan = "??" + component

        st = client.get_waveforms(net, sta, loc, chan, t, t + trace_length + 1)

        if st.traces:
            trace = st.traces[0]
            trace = signal_preprocessing(trace)

            points = trace_length * sample_rate + 1
            try:
                trim_trace(trace, points)

            except Exception as err:
                print(err)
                continue

            stream += st.traces[0]

        else:
            print("No trace in ", t.isoformat(), net, sta, loc, chan)

    return stream
Example #15
0
def _read_SES3D(fh, headonly=False):
    """
    Internal SES3D parsing routine.
    """
    # Import here to avoid circular imports.
    from obspy.core import AttribDict, Trace, Stream

    # Read the header.
    component = fh.readline().split()[0].lower()
    npts = int(fh.readline().split()[-1])
    delta = float(fh.readline().split()[-1])
    # Skip receiver location line.
    fh.readline()
    rec_loc = fh.readline().split()
    rec_x, rec_y, rec_z = list(map(float,
                                   [rec_loc[1], rec_loc[3], rec_loc[5]]))
    # Skip the source location line.
    fh.readline()
    src_loc = fh.readline().split()
    src_x, src_y, src_z = list(map(float,
                                   [src_loc[1], src_loc[3], src_loc[5]]))

    # Read the data.
    if headonly is False:
        data = np.array(list(map(float, fh.readlines())), dtype="float32")
    else:
        data = np.array([])

    ses3d = AttribDict()
    ses3d.receiver_latitude = rotations.colat2lat(rec_x)
    ses3d.receiver_longitude = rec_y
    ses3d.receiver_depth_in_m = rec_z
    ses3d.source_latitude = rotations.colat2lat(src_x)
    ses3d.source_longitude = src_y
    ses3d.source_depth_in_m = src_z

    header = {
        "delta": delta,
        "channel": COMPONENT_MAP[component],
        "ses3d": ses3d,
        "npts": npts
    }

    # Setup Obspy Stream/Trace structure.
    tr = Trace(data=data, header=header)

    # Small check.
    if headonly is False and npts != tr.stats.npts:
        msg = "The sample count specified in the header does not match " + \
            "the actual data count."
        warnings.warn(msg)
    return Stream(traces=[tr])
Example #16
0
def stream_seishub_read(host="localhost", port=8080, timeout=100,
                 start_time="2010-01-01 00:20:03", time_interval=30,
                 network_id="PF", station_id="", location_id="",
                 channel_id="HLE", get_paz=False, remove_mean=False,
                 remove_trend=False):
    """ Seishub server client.

    For a detailed description of how it works refer to ObsPy website
    (obspy.org)

    """

    client = Client_seis(base_url="http://" + host + ':' + str(port),
                         timeout=timeout)
    t = UTCDateTime(start_time)

    st = Stream()

    if station_id == "":
        st = client.waveform.getWaveform(network_id, str(station_id),
                                         location_id,
                                         channel_id, t, t + time_interval)
    else:
        for station in station_id:
            try:
                st += client.waveform.getWaveform(network_id, str(station),
                                                  location_id,
                                                  channel_id,
                                                  t,
                                                  t + time_interval)
            except:
                pass

    if len(st) > 0:
        if remove_trend:
            st = stream_detrend(st)

        if remove_mean:
            st = stream_demean(st)

        st.merge(method=1, fill_value=0, interpolation_samples=1)
        n_trace = len(st)
    else:
        n_trace = 0

    if get_paz:
        paz = client.station.getPAZ(network_id, station_id, t)
        return st, paz, n_trace
    else:
        return st, n_trace

    return st, n_trace
Example #17
0
 def __init__(self, filename=None, headonly=False, dummy=False):
     if dummy:
         self.stream = self.set_dummy()
     else:
         if not os.path.isfile(filename):
             raise Vol12Error("Can't find file %s." % filename)
         self.fn = filename
         self.f = open(self.fn, 'r')
         self.stream = Stream()
         self.headonly = headonly
         for i in xrange(3):
             self.stream += self.read_comp(self.f)
         self.f.close()
Example #18
0
 def test_bugfix_setStats(self):
     """
     Test related to issue #4.
     """
     st = Stream([Trace()])
     st += st
     # change stats attributes
     st[0].stats.station = 'AAA'
     st[1].stats['station'] = 'BBB'
     self.assertEquals(st[0].stats.station, 'BBB')
     self.assertEquals(st[0].stats['station'], 'BBB')
     self.assertEquals(st[1].stats['station'], 'BBB')
     self.assertEquals(st[1].stats.station, 'BBB')
Example #19
0
def _build_weight_st(config, spec_st, specnoise_st):
    """Build the weight spectrum."""
    weight_st = Stream()
    spec_ids = set(sp.id[:-1] for sp in spec_st if not sp.stats.ignore)
    for specid in spec_ids:
        try:
            spec_h = _select_spectra(spec_st, specid + 'H')[0]
            specnoise_h = _select_spectra(specnoise_st, specid + 'H')[0]
        except Exception:
            continue
        weight = _build_weight(config, spec_h, specnoise_h)
        weight_st.append(weight)
    return weight_st
Example #20
0
def linear_ramp_trend(sampling_rate=40.0, duration=5.0, height=1.0, mean=0.5):
    header = {
        'sampling_rate': sampling_rate,
        'starttime': UTCDateTime(0),
        'channel': 'Z',
        'station': 'test'
    }
    data = np.random.uniform(-1, 1, (int(duration * sampling_rate)))
    data += np.linspace(0, height, num=int(duration * sampling_rate))
    data += mean * np.ones(int(duration * sampling_rate))
    trace = Trace(data=data, header=header)
    trace.stats.data_type = 'test'
    return Stream(traces=[trace])
Example #21
0
def create_triangle_trace(sampling_rate=40.0, duration=5.0, **header_kwargs):
    header = {
        'sampling_rate': sampling_rate,
        'starttime': UTCDateTime(0),
        'channel': 'Z',
        'station': 'test'
    }
    header = {**header, **header_kwargs}
    x = np.linspace(0, duration, num=int(duration * sampling_rate))
    data = signal.triang(int(duration * sampling_rate))
    trace = Trace(data=data, header=header)
    trace.stats.data_type = 'test'
    return Stream(traces=[trace])
Example #22
0
def create_impulse_stream(sampling_rate=40.0, duration=5.0, **header_kwargs):
    header = {
        'sampling_rate': sampling_rate,
        'starttime': UTCDateTime(0),
        'channel': 'Z',
        'station': 'test'
    }
    header = {**header, **header_kwargs}
    data = np.zeros(int(duration * sampling_rate))
    data[0] = 1
    trace = Trace(data=data, header=header)
    trace.stats.data_type = 'test'
    return Stream(traces=[trace])
Example #23
0
    def _read_data(self):
        # Read data
        data = [None for _ in self.station.name]
        for i in range(self.station.nsta):
            st = Stream()
            for c in self.header.component:
                st.append(
                    read('%s/%s.%c.dat' %
                         (self.header.datadir, self.station.name[i], c),
                         format='SAC')[0])
            data[i] = st

        return data
Example #24
0
def add_arrival_to_obspy_trace(obspy_trace, reformat_p_file):
    from obspy.core import Stream
    station = obspy_trace.stats.station
    arrival = list(filter(lambda sta: sta['station'] == station, reformat_p_file))
    obspy_trace.stats.o = reformat_p_file[0]["o"]
    obspy_trace.stats.evla = reformat_p_file[0]["evla"]
    obspy_trace.stats.evlo = reformat_p_file[0]["evlo"]
    obspy_trace.stats.evdp = reformat_p_file[0]["evdp"]
    obspy_trace.stats.mag = reformat_p_file[0]["mag"]
    if arrival:
        obspy_trace.stats.t1 = arrival[0]["t1"]
        obspy_trace.stats.t2 = arrival[0]["t2"]
    return Stream(traces=obspy_trace)
Example #25
0
def axisem2mseed(path):
    """
    change .dat files into MSEED format
    """
    global test_param

    if not os.path.isdir(os.path.join(path, 'MSEED')):
        os.mkdir(os.path.join(path, 'MSEED'))
    else:
        print 'Following directory already exists:'
        print os.path.join(path, 'MSEED')
        sys.exit()

    t = UTCDateTime(0)
    traces = []

    for file in glob.iglob(os.path.join(path, '*.dat')):
        stationID = file.split('/')[-1].split('_')[0]
        networkID = file.split('/')[-1].split('_')[1]
        chan = file.split('/')[-1].split('_')[-1].split('.')[0]
        if chan == 'E': chan = 'BHE'
        elif chan == 'N': chan = 'BHN'
        elif chan == 'Z': chan = 'BHZ'
        try:
            dat = np.loadtxt(file)
            npts = len(dat[:,0])
            stats = {'network': networkID,
                     'station': stationID,
                     'location': '',
                     'channel': chan,
                     'npts': npts,
                     'sampling_rate': (npts - 1.)/(dat[-1,0] - dat[0,0]),
                     'starttime': t + dat[0,0],
                     'mseed' : {'dataquality': 'D'}}
            st = Stream(Trace(data=dat[:,1], header=stats))
            if test_param['convSTF'] == 'Y':
                sigma =  test_param['halfduration'] / np.sqrt(2.) / 3.5
                convSTF(st, sigma=sigma)
            if test_param['filter'] == 'Y':
                st.filter('lowpass', freq=test_param['fmax'], corners=2)
                st.filter('lowpass', freq=test_param['fmax'], corners=2)
                st.filter('lowpass', freq=test_param['fmax'], corners=2)
                st.filter('lowpass', freq=test_param['fmax'], corners=2)
                st.filter('highpass', freq=test_param['fmin'], corners=2)
                st.filter('highpass', freq=test_param['fmin'], corners=2)
            fname =  os.path.join(path, 'MSEED', 'dis.' + stationID + '..' + chan)
            st.write(fname, format='MSEED')
        except Exception, e:
            print e
            print networkID + '.' + stationID + '.' + chan + '.mseed'
            print '-------------------------------------------------'
Example #26
0
def write_seisan(filename, args):
    """ writes seisan file from baikal one """
    bf = BaikalFile(filename)
    if not bf.valid:
        print("Invalid file {}".format(filename))
        return
    header = bf.MainHeader
    # datetime
    date = datetime.datetime(header["year"], header["month"], header["day"])
    delta = datetime.timedelta(seconds=header["to"])
    dt = date + delta
    _time = dt.time()  # time
    # make utc datetime
    utcdatetime = UTCDateTime(date.year,
                              date.month,
                              date.day,
                              _time.hour,
                              _time.minute,
                              _time.second,
                              _time.microsecond,
                              precision=3)
    bf.traces = bf.traces.astype(np.int32)
    bf.traces = bf.traces[:3]
    traces = []
    for channel, data in zip(CHANNELS, bf.traces):
        stats = DEFAULT_STATS.copy()
        stats.update({
            "station": header['station'].upper()[:3],
            'channel': channel,
            'sampling_rate': int(1. / header["dt"]),
            "delta": header["dt"],
            "npts": data.size,  #shape[0]
            'starttime': utcdatetime,
        })
        # save coordinates
        stats['gse2']["lat"] = header['latitude']
        stats['gse2']["lon"] = header["longitude"]
        trace = Trace(data=data, header=stats)
        traces.append(trace)
    # create Stream
    stream = Stream(traces)
    #== write seisan
    # date
    name = "{year:04}-{month:02}-{day:02}".format(**header)
    # time
    name += "-{t.hour:02}-{t.minute:02}".format(t=stats['starttime'])
    # + station name + Day_of_Year
    name += "{0}__{1:03}".format(stats["station"],
                                 stats['starttime'].timetuple().tm_yday)
    print('Writing GSE2 file %s.' % name)
    writeGSE2(stream, os.path.join(args.outdir, name))
Example #27
0
 def get_waveforms(self, **kwargs):
     stream = self.bank.get_waveforms(**kwargs)
     traces = []
     for trace in stream:
         data = trace.data[:-1]
         header = {
             'delta': trace.stats.delta,
             'station': trace.stats.station,
             'starttime': trace.stats.starttime,
             'channel': trace.stats.channel,
             'network': trace.stats.network
         }
         traces.append(Trace(data, header=header))
     return Stream(traces=traces)
Example #28
0
def custom_get_waveforms(network, station, location, channel, starttime,
                         endtime, quality=None, minimumlength=None,
                         longestonly=None, filename=None, attach_response=False,
                         **kwargs):
    with pyasdf.ASDFDataSet('/g/data/ha3/Passive/_ANU/7X(2009-2011)/ASDF/7X(2009-2011).h5', mode='r') as asdfDataSet:
        st = Stream()
        # ignoring channel for now as all the 7D network waveforms have only BH? channels
        filteredList = [i for i in asdfDataSet.waveforms[network + '.' + station].list() if
                        'raw_recording' in i and
                        UTC(i.split("__")[1]) < starttime and
                        UTC(i.split("__")[2]) > endtime]
        for t in filteredList:
            st += asdfDataSet.waveforms[network + '.' + station][t]
        return st
Example #29
0
    def add(self, stream, verbose=True):
        """
        Process all traces with compatible information and add their spectral
        estimates to the histogram containg the probabilistic psd.
        Also ensures that no piece of data is inserted twice.
        """
        # return later if any changes were applied to the ppsd statistics
        changed = False
        # prepare the list of traces to go through
        if isinstance(stream, Trace):
            stream = Stream([stream])
        # select appropriate traces
        stream = stream.select(id=self.id,
                               sampling_rate=self.sampling_rate)
        # save information on available data and gaps
        self.__insert_data_times(stream)
        self.__insert_gap_times(stream)
        # merge depending on skip_on_gaps set during __init__
        stream.merge(self.merge_method, fill_value=0)

        for tr in stream:
            # the following check should not be necessary due to the select()..
            if not self.__sanity_check(tr):
                msg = "Skipping incompatible trace."
                warnings.warn(msg)
                continue
            t1 = tr.stats.starttime
            t2 = tr.stats.endtime
            while t1 + PPSD_LENGTH <= t2:
                if self.__check_time_present(t1):
                    msg = "Already computed time spans detected (e.g. %s), " + \
                          "skipping these slices."
                    msg = msg % t1
                    print msg
                else:
                    # throw warnings if trace length is different than one
                    # hour..!?!
                    slice = tr.slice(t1, t1 + PPSD_LENGTH)
                    success = self.__process(slice)
                    if success:
                        self.__insert_used_time(t1)
                        if verbose:
                            stdout.write("\r adding %s" % t1)
                            stdout.flush()
                        changed = True
                t1 += PPSD_STRIDE  # advance half an hour
        if verbose:
            stdout.write("\r")
            stdout.flush()
        return changed
Example #30
0
    def query_sql_db(self, query, sql_filename, sta):
        # Open a new st object
        st = Stream()

        # Initialize (open/create) the sqlalchemy sqlite engine
        engine = create_engine('sqlite:///' + sql_filename)
        Session = sessionmaker()
        Session.configure(bind=engine)
        session = Session()

        for matched_waveform in session.query(Waveforms).filter(query):
            st += self.ds.waveforms[sta][matched_waveform.full_id]

        return(st)