Exemplo n.º 1
0
    def process(self, stream):
        """Run algorithm for a stream.
        Processes all traces in the stream.
        Parameters
        ----------
        stream : obspy.core.Stream
            stream of data to process
        Returns
        -------
        out : obspy.core.Stream
            stream containing 1 trace per original trace.
        """

        out = Stream()

        for trace in stream:
            data = trace.data
            step = self.decimation

            filtered = self.firfilter(data, self.window, step)

            stats = Stats(trace.stats)
            # stats.channel = trace_chan_dict2[stats.channel]
            stats.starttime = trace.stats.starttime + \
                    self.numtaps * self.sample_period // 2
            stats.delta = stats.delta * step
            # stats.processing.append('[Gaussian Filter]')
            stats.npts = filtered.shape[0]
            trace_out = self.create_trace(stats.channel, stats, filtered)

            out += trace_out

        return out
Exemplo n.º 2
0
def ascii(path, filename):
    """
    Reads SPECFEM3D-style ASCII data

    :type path: str
    :param path: path to datasets
    :type filenames: list
    :param filenames: files to read
    """
    st = Stream()
    stats = Stats()

    time, data = loadtxt(os.path.join(path, filename)).T

    stats.filename = filename
    stats.starttime = time[0]
    stats.delta = time[1] - time[0]
    stats.npts = len(data)

    try:
        parts = filename.split(".")
        stats.network = parts[0]
        stats.station = parts[1]
        stats.channel = parts[2]
    except:
        pass

    st.append(Trace(data=data, header=stats))

    return st
Exemplo n.º 3
0
def __create_trace(
    data,
    network="NT",
    station="BOU",
    channel="H",
    location="R0",
    data_interval="second",
    data_type="interval",
):
    """
    Utility to create a trace containing the given numpy array.

    Parameters
    ----------
    data: array
        The array to be inserted into the trace.

    Returns
    -------
    obspy.core.Stream
        Stream containing the channel.
    """
    stats = Stats()
    stats.starttime = UTCDateTime("2019-12-01")
    stats.delta = TimeseriesUtility.get_delta_from_interval(data_interval)
    stats.channel = channel
    stats.station = station
    stats.npts = len(data)
    stats.data_interval = data_interval
    stats.data_type = data_type
    numpy_data = numpy.array(data, dtype=numpy.float64)
    return Trace(numpy_data, stats)
Exemplo n.º 4
0
def read_ascii(path, NR, nt):
    from numpy import loadtxt
    from obspy.core import Stream, Stats, Trace
    dat_type = 'semd'
    comp1 = 'FXX'
    comp2 = 'FXY'
    stream = Stream()
    for rec_x in range(0,NR):
        file_name_in1 = path + 'P.R' + str(int(rec_x+1)) + '.' + comp1 + '.' + dat_type
        file_name_in2 = path + 'P.R' + str(int(rec_x+1)) + '.' + comp2 + '.' + dat_type
        xz1 = np.genfromtxt(file_name_in1)
        xz2 = np.genfromtxt(file_name_in2)
        deg = 0.0
        alpha = np.arctan(xz2[:nt,1]/(1.0e-40 + xz1[:nt,1])) # angle of projection
        direction = np.sign(np.cos(deg*np.pi/180.0)*xz1[:nt,1]*np.cos(alpha) + np.sin(deg*np.pi/180.0)*xz2[:nt,1]*np.cos(alpha))    
        data = direction*np.sqrt(xz1[:nt,1]**2 + xz2[:nt,1]**2)*np.cos(alpha) # scalar radial component

        stats = Stats()
        stats.filename = path + 'P.R' + str(int(rec_x+1))
        stats.starttime = xz1[0,0]
        stats.delta = xz1[1,0] - xz1[0,0]
        stats.npts = len(xz1[:nt,0])

        try:
            parts = filename.split('.')
            stats.network = parts[0]
            stats.station = parts[1]
            stats.channel = temp[2]
        except:
            pass

        stream.append(Trace(data=data[:], header=stats))

    return stream
    def process_step(self, step, stream):
        """Filters stream for one step.
        Filters all traces in stream.
        Parameters
        ----------
        step : array element
            step holding variables for one filtering operation
        stream : obspy.core.Stream
            stream of data to filter
        Returns
        -------
        out : obspy.core.Stream
            stream containing 1 trace per original trace.
        """
        # gather variables from step
        input_sample_period = step["input_sample_period"]
        output_sample_period = step["output_sample_period"]
        window = np.array(step["window"])
        decimation = int(output_sample_period / input_sample_period)
        numtaps = len(window)
        window = window / sum(window)

        out = Stream()
        for trace in stream:
            filtered = self.firfilter(trace.data, window, decimation)
            stats = Stats(trace.stats)
            stats.starttime = stats.starttime + input_sample_period * (numtaps // 2)
            stats.delta = output_sample_period
            stats.npts = len(filtered)
            trace_out = self.create_trace(stats.channel, stats, filtered)
            out += trace_out
        return out
def _create_trace(data, channel, starttime, delta=60.):
    stats = Stats()
    stats.channel = channel
    stats.delta = delta
    stats.starttime = starttime
    stats.npts = len(data)
    data = numpy.array(data, dtype=numpy.float64)
    return Trace(data, stats)
def _create_trace(data, channel, starttime, delta=60.0):
    stats = Stats()
    stats.channel = channel
    stats.delta = delta
    stats.starttime = starttime
    stats.npts = len(data)
    data = numpy.array(data, dtype=numpy.float64)
    return Trace(data, stats)
Exemplo n.º 8
0
 def process_step(self, step, stream):
     """Filters stream for one step.
     Filters all traces in stream.
     Parameters
     ----------
     step : array element
         step holding variables for one filtering operation
     stream : obspy.core.Stream
         stream of data to filter
     Returns
     -------
     out : obspy.core.Stream
         stream containing 1 trace per original trace.
     """
     # gather variables from step
     input_sample_period = step["input_sample_period"]
     output_sample_period = step["output_sample_period"]
     window = np.array(step["window"])
     decimation = int(output_sample_period / input_sample_period)
     numtaps = len(window)
     window = window / sum(window)
     # first output timestamp is in the center of the filter window
     filter_time_shift = input_sample_period * (numtaps // 2)
     out = Stream()
     for trace in stream:
         # data to filter
         data = trace.data
         starttime = trace.stats.starttime + filter_time_shift
         # align with the output period
         misalignment = starttime.timestamp % output_sample_period
         if misalignment != 0:
             # skip incomplete input
             starttime = (starttime - misalignment) + output_sample_period
             input_starttime = starttime - filter_time_shift
             offset = int(1e-6 + (input_starttime - trace.stats.starttime) /
                          input_sample_period)
             print(f"Skipping {offset} input samples to align output",
                   file=sys.stderr)
             data = data[offset:]
             # check there is still enough data for filter
             if len(data) < numtaps:
                 continue
         filtered = self.firfilter(data, window, decimation)
         stats = Stats(trace.stats)
         stats.starttime = starttime
         stats.delta = output_sample_period
         stats.npts = len(filtered)
         trace_out = self.create_trace(stats.channel, stats, filtered)
         out += trace_out
     return out
Exemplo n.º 9
0
def read_specfem_seismogram(output_files, network, station, band):
    st = Stream()
    for component in 'ZNE':
        channel = '%sX%s' % (band, component)
        filename = os.path.join(
            output_files, '%s.%s.%s.sem.ascii' % (network, station, channel))
        tmp = np.genfromtxt(filename)

        stats = Stats()
        stats.network = network
        stats.station = station
        stats.channel = channel
        stats.delta = tmp[1, 0] - tmp[0, 0]
        stats.npts = tmp.shape[0]
        stats.starttime = tmp[0, 0]

        tr = Trace(tmp[:, 1], stats)
        st += tr

    return st
Exemplo n.º 10
0
def read_specfem_seismogram(output_files, network, station, band):
    st = Stream()
    for component in 'ZNE':
        channel = '%sX%s' % (band, component)
        filename = os.path.join(output_files,
                                '%s.%s.%s.sem.ascii' % (network, station,
                                                        channel))
        tmp = np.genfromtxt(filename)

        stats = Stats()
        stats.network = network
        stats.station = station
        stats.channel = channel
        stats.delta = tmp[1, 0] - tmp[0, 0]
        stats.npts = tmp.shape[0]
        stats.starttime = tmp[0, 0]

        tr = Trace(tmp[:, 1], stats)
        st += tr

    return st
Exemplo n.º 11
0
def load_axisem3d_data(file,station_code,component):
    
    axisem3d_data = Dataset(file)
    times_a = axisem3d_data.variables['time_points']
    axisem3d_stats = Stats()
    axisem3d_stats.delta = (times_a[1] - times_a[0])
    axisem3d_stats.starttime = UTCDateTime(times_a[0])
    axisem3d_stats.npts = times_a.size
    station_code = station_code+('.KO.RTZ')
    
    if component =='z':
        c_a = axisem3d_data.variables[station_code][:, 2] 
    elif component =='t':
        c_a = axisem3d_data.variables[station_code][:, 1]
    elif component =='r':
        c_a = axisem3d_data.variables[station_code][:, 0] 
    else:
        raise Exception('No component with this name')
    
    axisem3d_trace = Trace(c_a, header=axisem3d_stats)
    axisem3d_data.close()
    return axisem3d_trace
Exemplo n.º 12
0
 def process_step(self, step, stream):
     """Filters stream for one step.
     Filters all traces in stream.
     Parameters
     ----------
     step : array element
         step holding variables for one filtering operation
     stream : obspy.core.Stream
         stream of data to filter
     Returns
     -------
     out : obspy.core.Stream
         stream containing 1 trace per original trace.
     """
     # gather variables from step
     input_sample_period = step["input_sample_period"]
     output_sample_period = step["output_sample_period"]
     window = np.array(step["window"])
     decimation = int(output_sample_period / input_sample_period)
     numtaps = len(window)
     window = window / sum(window)
     out = Stream()
     for trace in stream:
         starttime, data = self.align_trace(step, trace)
         # check that there is still enough data to filter
         if len(data) < numtaps:
             continue
         filtered = self.firfilter(data, window, decimation)
         stats = Stats(trace.stats)
         stats.delta = output_sample_period
         stats.data_interval = step["data_interval"]
         stats.data_interval_type = step["data_interval_type"]
         stats.filter_comments = step["filter_comments"]
         stats.starttime = starttime
         stats.npts = len(filtered)
         trace_out = self.create_trace(stats.channel, stats, filtered)
         out += trace_out
     return out
Exemplo n.º 13
0
#****************************************************************************
# data description
#****************************************************************************
NR = 50  # number of receivers
LEN = 10000  #8000# data length
comp1 = 'Uz'

# read Heidimode data
dz_src = path_in + '/arbzseis'
dz_data = np.fromfile(dz_src, dtype='>f')  # big endian float (4 bytes)
dz_data = dz_data.reshape((NR, LEN), order="F")
dz_data = np.float32(dz_data)

# write SU-format binary
dz_dest = path_out + '/' + comp1 + '_file_single.su'
stats = Stats()
stats.filename = dz_dest
stats.starttime = 0.0
stats.delta = 1.0e-4  # ObsPy DO NOT WORK WITH VERY SMALL TIMESTEP 10.0e-8
stats.npts = LEN
stream = Stream()
for i in range(NR):
    stream.append(Trace(data=dz_data[i, :], header=stats))
    #stream.append(Trace(data=dz_data[i,:]))
    print 'max of trace %i is %f' % (i, np.max(dz_data[i, :]))
print stream
#stream[0].plot()

stream.write(dz_dest, format='su')
Exemplo n.º 14
0
def moveout_test(PSS_file, q, phase):
    """
    Creates synthetic PRFs and stacks them after depth migration.

    Parameters
    ----------
    PSS_file : str
        Filename of raysum file containing P-Sv-Sh traces.
    q : float
        Slowness [s/m].
    phase : str
        Either "P" for Ps or "S" for Sp.

    Returns
    -------
    z : np.array
        Depth vector.
    stack : np.array
        Receiver function stack.
    RF_mo : np.array
        Matrix containing all depth migrated RFs.
    RF : np.array
        Matrix containing all RFs.
    dt : float
        Sampling interval.
    PSS : np.array
        Matrix containing all traces in P-Sv-Sh.

    """
    rayp = q * 1.111949e5
    PSS, dt, M, N, shift = read_raysum(phase, PSS_file=PSS_file)

    # Create receiver functions
    RF = []
    RF_mo = []
    stats = Stats()
    stats.npts = N
    stats.delta = dt
    stats.starttime = UTCDateTime(0)

    for i in range(M):
        if phase == "P":
            data, _, IR = it(PSS[i, 0, :],
                             PSS[i, 1, :],
                             dt,
                             shift=shift,
                             width=4)
        elif phase == "S":
            data, _, _ = it(PSS[i, 1, :],
                            PSS[i, 0, :],
                            dt,
                            shift=shift,
                            width=4)
        RF.append(data)
        z, rfc = moveout(data,
                         stats,
                         UTCDateTime(shift),
                         rayp[i],
                         phase,
                         fname="raysum.dat")
        RF_mo.append(rfc)
    stack = np.average(RF_mo, axis=0)
    plt.close('all')
    plt.figure()
    for mo in RF_mo:
        plt.plot(z, mo)
    return z, stack, RF_mo, RF, dt, PSS
Exemplo n.º 15
0
def rf_test(phase,
            dip,
            rfloc='output/waveforms/RF',
            geom_file='3D.geom',
            decon_meth='it'):
    """
    Creates synthetic PRFs from Raysum data.

    Parameters
    ----------
    phase : string
        "P" or "S".
    dip : int
        Dip of the LAB in deg, determines, which files to use
    rfloc : The parental directory, in which the RFs are saved.
    geom_file : str, optional
        Filename of the geometry file

    Returns
    -------
    rfs: list
        List of RFTrace objects. Will in addition be saved in SAC format.

    """
    # Determine filenames
    PSS_file = []
    for i in range(16):
        PSS_file.append('3D_' + str(dip) + '_' + str(i) + '.tr')

    # Read geometry
    baz, q, dN, dE = read_geom(geom_file, phase)

    # statlat = dN/(DEG2KM*1000)
    d = np.sqrt(np.square(dN) + np.square(dE))
    az = np.rad2deg(np.arccos(dN / d))
    i = np.where(dE < 0)
    az[i] = az[i] + 180
    statlat = []
    statlon = []
    for azimuth, delta in zip(az, d):
        if delta == 0:
            statlat.append(0)
            statlon.append(0)
            continue
        coords = Geodesic.WGS84.Direct(0, 0, azimuth, delta)
        statlat.append(coords["lat2"])
        statlon.append(coords["lon2"])
    #         for n, longitude in enumerate(lon):
#             y, _, _ = gps2dist_azimuth(latitude, 0, latitude, longitude)
# statlon = dE/(DEG2KM*1000)
    rayp = q * DEG2KM * 1000

    # Read traces
    stream = []

    for f in PSS_file:
        PSS, dt, _, N, shift = read_raysum(phase, PSS_file=f)
        stream.append(PSS)

    streams = np.vstack(stream)
    del stream

    M = len(baz)

    if M != streams.shape[0]:
        raise ValueError([
            "Number of traces", streams.shape[0], """does not
             equal the number of backazimuths in the geom file""", M
        ])

    rfs = []
    odir = os.path.join(rfloc, phase, 'raysum', str(dip))
    ch = ['BHP', 'BHV', 'BHH']  # Channel names

    os.makedirs(odir, exist_ok=True)

    # Create RF objects
    for i, st in enumerate(streams):
        s = Stream()
        for j, tr in enumerate(st):
            stats = Stats()
            stats.npts = N
            stats.delta = dt
            stats.st  # if old:
            stats.channel = ch[j]
            stats.network = 'RS'
            stats.station = str(dip)
            s.append(Trace(data=tr, header=stats))

        # Create info dictionary for rf creation
        info = {
            'onset': [UTCDateTime(0) + shift],
            'starttime': [UTCDateTime(0)],
            'statlat': statlat[i],
            'statlon': statlon[i],
            'statel': 0,
            'rayp_s_deg': [rayp[i]],
            'rbaz': [baz[i]],
            'rdelta': [np.nan],
            'ot_ret': [0],
            'magnitude': [np.nan],
            'evt_depth': [np.nan],
            'evtlon': [np.nan],
            'evtlat': [np.nan]
        }

        rf = createRF(s, phase=phase, method=decon_meth, info=info)

        # Write RF
        rf.write(os.path.join(odir, str(i) + '.sac'), format='SAC')
        rfs.append(rf)

    return rfs, statlat, statlon
Exemplo n.º 16
0
def raw_import(gzip_filename):
    """
    Makes a 'raw' stream file from the gzipped csv file.
    The csv file has been downloaded from the JAXA website.
    The method makes a raw stream which does not yet have the frames
    reconstructed.

    :type gzip_filename: str
    :param gzip_filename: gzipped filename of the CSV file to be read.
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: A ObsPy Stream object.

    """

    # read the gzipped csv file
    with gzip.open(gzip_filename, 'rt') as fh:
        # read file
        buf = []
        header = next(fh).split(',')

        # read the header
        # it should contain either 1 channel or 3
        if len(header) == 8:
            # the RESP files use either 'MH1', 'MH2', 'MHZ'
            # the JAXA files use 'LPX', 'LPY', 'LPZ'
            # X should point north, Y east, but this is not always the case
            # so we rename LPX to MH1, and LPY to MH2
            channels = ['MH1', 'MH2', 'MHZ']
            raw_channels = ['_M1', '_M2', '_MZ']
            for line in fh:
                temp = line.split(',')

                try:
                    temp[4] = UTCDateTime(temp[4])
                except ValueError as e:
                    # this is a specific error which is found in the csv file
                    if temp[4] == '1975-49-11 19:13:04.232000':
                        temp[4] = UTCDateTime('1975-09-11 19:13:04.232000')
                    else:
                        raise

                try:
                    temp[0] = int(temp[0])
                except ValueError as e:
                    # this is a specific error which is found in the csv file
                    if temp[4] == UTCDateTime(
                            '1975-09-15 12:53:36.849000') and temp[0] == '<3':
                        temp[0] = 83
                    else:
                        raise

                buf.append(
                    (temp[1], temp[2], temp[4], int(temp[0]), int(temp[3]),
                     int(temp[5]), int(temp[6]), int(temp[7])))

        elif len(header) == 6:
            channels = ['SPZ']
            raw_channels = ['_SZ']
            for line in fh:
                # check the manual list of points which have been removed
                if line in remove_manually:
                    continue

                temp = line.split(',')
                # the original order:
                # frame_count, ap_station, ground_station, nc, time, spz
                # make a tuple (in a new order so that it can be sorted):
                # ap_station, ground_station, time, frame_count, nc, spz
                buf.append(
                    (temp[1], temp[2], UTCDateTime(temp[4]), int(temp[0]),
                     int(temp[3]), int(temp[5])))

    # sort by ap_station, ground_station and time (and also everything else,
    # but that won't matter)
    buf.sort()

    stream = Stream()
    data_x = []
    data_y = []
    data_z = []
    data_sz = []
    abs_times = []
    frame_count_ncs = []
    corr_frame_count_ncs = []

    stats = Stats()
    stats.delta = DELTA
    network = 'XA'
    last_id = None

    for data in buf:

        # read in the data from the buffer
        station = data[0].rjust(3, 'S')
        ground_station = data[1].rjust(2, '0')
        time = data[2]

        frame_count = data[3]
        nc = data[4]
        # create a combination of frame count and nc - from 0.0 to 89.75
        frame_count_nc = float(frame_count) + (float(nc) - 1.) * 0.25

        id = "{0:s}.{1:s}.{2:s}.{3:s}".format(network, station, ground_station,
                                              channels[0])

        # check whether we are adding to an existing one, or creating a new one
        if (last_id is None or last_id != id):
            # before creating the new one, add previous trace(s) to the stream
            if len(abs_times) > 0:
                _make_traces(stream=stream,
                             stats=stats,
                             header=header,
                             channels=raw_channels,
                             data_x=data_x,
                             data_y=data_y,
                             data_z=data_z,
                             data_sz=data_sz,
                             abs_times=abs_times,
                             frame_count_ncs=frame_count_ncs)

            data_x = []
            data_y = []
            data_z = []
            data_sz = []
            abs_times = []
            frame_count_ncs = []

            stats = Stats()
            stats.delta = DELTA
            stats.starttime = time
            stats.network = network
            stats.station = station
            stats.location = ground_station

        # add the data) from any line
        if len(header) == 8:
            data_x.append(data[5])
            data_y.append(data[6])
            data_z.append(data[7])
        else:
            data_sz.append(data[5])
        abs_times.append(time.timestamp)
        frame_count_ncs.append(frame_count_nc)

        last_id = id

    # add the last one
    if len(abs_times) > 0:
        _make_traces(stream=stream,
                     stats=stats,
                     header=header,
                     channels=raw_channels,
                     data_x=data_x,
                     data_y=data_y,
                     data_z=data_z,
                     data_sz=data_sz,
                     abs_times=abs_times,
                     frame_count_ncs=frame_count_ncs)

    return stream
Exemplo n.º 17
0
 def test_delta_zero(self):
     """
     Make sure you can set delta = 0. for #1989
     """
     stat = Stats()
     stat.delta = 0
Exemplo n.º 18
0
 def test_delta_zero(self):
     """
     Make sure you can set delta = 0. for #1989
     """
     stat = Stats()
     stat.delta = 0
Exemplo n.º 19
0
    endtime = UTCDateTime(Time_stamps[len(Time_stamps) - 1])
    sttime._set_year(2017)
    endtime._set_year(2017)
    sttime._set_month(8)
    endtime._set_month(8)
    sttime._set_day(13 + UTCDateTime(Time_stamps[0]).day)
    endtime._set_day(13 + UTCDateTime(Time_stamps[len(Time_stamps) - 1]).day)

    # Define stats
    stats = Stats()
    stats.starttime = sttime
    stats.station = station
    stats.network = 'NT'
    stats.location = 'R0'
    stats.data_interval = '256Hz'
    stats.delta = .00390625
    stats.data_type = 'variation'

    # Create list of arrays and channel names and intialize counter k
    arrays = [Hx, Hy, Ex, Ey]
    k = 0

    # Loop over channels to create an obspy stream of the data
    for ar in arrays:
        stats.npts = len(ar)
        stats.channel = channels[k]
        ar = np.asarray(ar)
        trace = Trace(ar, stats)
        stream += trace
        trace = None
        k += 1