コード例 #1
1
ファイル: qssp2sac.py プロジェクト: shenjianzhang/QSSP2SAC
def main():
    print('QSSP input file is', sys.argv[1])
    print('QSSP output file is', sys.argv[2])
    qsspinput = sys.argv[1]
    qsspoutput = sys.argv[2]
    # read header info.s from QSSP input file
    deltat, spara, prfx, nr, rdepth, rpara, rnames = readinput(qsspinput)
    # read seismograms from QSSP output files
    seis = readoutput(qsspoutput)
    # read channel code and observable types
    chan, caz, cin, datatype = datatype_channel(prfx, qsspoutput)
    # write SAC files for each receiver/station
    for i in range(0, nr):
        # name for SAC file
        sacname = prfx.upper() + '.' + rnames[i].upper(
        ) + '.' + datatype + '.' + chan + '.SAC'
        header = {'iztype': 'io', 'o': 0, 'b': rpara[i,2], 'delta': deltat, \
        'kevnm': prfx, 'evla': spara[0], 'evlo': spara[1], 'evdp': spara[2],\
        'kstnm': rnames[i], 'kcmpnm': chan, 'cmpaz': caz, 'cmpinc': cin, \
        'stla': rpara[i,0], 'stlo': rpara[i,1], 'stdp': rdepth*1e3, \
        'lcalda': True}
        tr = SACTrace(data=seis[:, i], **header)
        tr.write(sacname)
        print('Create SAC file:', sacname)
コード例 #2
0
ファイル: test_core.py プロジェクト: Keita1/obspy
 def test_undefined_b(self):
     """
     Test that an undefined B value (-12345.0) is not messing up the
     starttime
     """
     # read in the test file an see that sac reference time and
     # starttime of seismogram are correct
     tr = read(self.file)[0]
     self.assertEqual(tr.stats.starttime.timestamp, 269596810.0)
     self.assertEqual(tr.stats.sac.b, 10.0)
     with open(self.file, 'rb') as fh:
         sac_ref_time = SACTrace.read(fh).reftime
     self.assertEqual(sac_ref_time.timestamp, 269596800.0)
     # change b to undefined and write (same case as if b == 0.0)
     # now sac reference time and reftime of seismogram must be the
     # same
     tr.stats.sac.b = -12345.0
     with NamedTemporaryFile() as tf:
         tmpfile = tf.name
         tr.write(tmpfile, format="SAC")
         tr2 = read(tmpfile)[0]
         self.assertEqual(tr2.stats.starttime.timestamp, 269596810.0)
         self.assertEqual(tr2.stats.sac.b, 10.0)
         with open(tmpfile, "rb") as fh:
             sac_ref_time2 = SACTrace.read(fh).reftime
     self.assertEqual(sac_ref_time2.timestamp, 269596800.0)
コード例 #3
0
 def test_undefined_b(self):
     """
     Test that an undefined B value (-12345.0) is not messing up the
     starttime
     """
     # read in the test file an see that sac reference time and
     # starttime of seismogram are correct
     tr = read(self.file)[0]
     self.assertEqual(tr.stats.starttime.timestamp, 269596810.0)
     self.assertEqual(tr.stats.sac.b, 10.0)
     with open(self.file, 'rb') as fh:
         sac_ref_time = SACTrace.read(fh).reftime
     self.assertEqual(sac_ref_time.timestamp, 269596800.0)
     # change b to undefined and write (same case as if b == 0.0)
     # now sac reference time and reftime of seismogram must be the
     # same
     tr.stats.sac.b = -12345.0
     with NamedTemporaryFile() as tf:
         tmpfile = tf.name
         tr.write(tmpfile, format="SAC")
         tr2 = read(tmpfile)[0]
         self.assertEqual(tr2.stats.starttime.timestamp, 269596810.0)
         self.assertEqual(tr2.stats.sac.b, 10.0)
         with open(tmpfile, "rb") as fh:
             sac_ref_time2 = SACTrace.read(fh).reftime
     self.assertEqual(sac_ref_time2.timestamp, 269596800.0)
コード例 #4
0
def main(data_dir, output_dir):

    # Get borehole geode info
    stats = pd.read_csv("walkaround_borehole_orientation_stats.csv")

    # Loop over stations/geodes
    for sta in ['BH001', 'BH002', 'BH004', 'BH005', 'BH006', 'BH008', 'BH011', 'BH012', 'BH013', 'BH014', 'BH015', 'BH016', 'BH017', 'BH018']:  # geodes.Geode:

        print("Processing station %s" % sta)
        # Rotation matrix for correction
        inc_corr = - stats.loc[stats['geode'] == sta, 'inclination'].values[0]
        baz_corr = - stats.loc[stats['geode'] == sta, 'mean'].values[0]

        # Read data
        flist1 = glob(os.path.join(data_dir, "8O.%s..DP1*" % sta))
        flist2 = glob(os.path.join(data_dir, "8O.%s..DP2*" % sta))
        flist3 = glob(os.path.join(data_dir, "8O.%s..DP3*" % sta))
        if not flist1 or not flist2 or not flist3:
            continue
            
        # Look for suffix
        file = os.path.split(flist1[0])[1]
        if file.find("unit") != -1:
            idx = file.find("unit")
            suffix = "_" + file[idx:].split(".")[0]
        else:
            suffix = ""
        
        st = Stream()
        st += read(os.path.join(data_dir, "8O.%s..DP1*" % sta), format="SAC")[0]
        st += read(os.path.join(data_dir, "8O.%s..DP2*" % sta), format="SAC")[0]
        st += read(os.path.join(data_dir, "8O.%s..DP3*" % sta), format="SAC")[0]
        
        if len(st) == 0:
            continue
            
        st.resample(500.0)
        starttime = st[0].stats.starttime
        endtime = st[0].stats.endtime

        # Save rotated coordinates
        st3 = st.copy()
        compN, compE, compZ = rotate_to_zne(st3[0].data, st3[1].data, st3[2].data, inc_corr, baz_corr)
        trN = SACTrace.from_obspy_trace(st3[0])
        trN.data = compN
        trN.kcmpnm = "DPN"
        trN.write(os.path.join(output_dir,
                               "8O.%s..DPN.%s_%s%s.sac" % (sta, starttime.strftime("%Y%m%d%H%M%S"), endtime.strftime("%Y%m%d%H%M%S"), suffix)))
        trE = SACTrace.from_obspy_trace(st3[1])
        trE.data = compE
        trE.kcmpnm = "DPE"
        trE.write(os.path.join(output_dir,
                               "8O.%s..DPE.%s_%s%s.sac" % (sta, starttime.strftime("%Y%m%d%H%M%S"), endtime.strftime("%Y%m%d%H%M%S"), suffix)))
        trZ = SACTrace.from_obspy_trace(st3[2])
        trZ.data = compZ
        trZ.kcmpnm = "DPZ"
        trZ.write(os.path.join(output_dir,
                               "8O.%s..DPZ.%s_%s%s.sac" % (sta, starttime.strftime("%Y%m%d%H%M%S"), endtime.strftime("%Y%m%d%H%M%S"), suffix)))
コード例 #5
0
def cgps_traces(files, tensor_info, data_prop):
    """Write json dictionary with specified properties for cGPS data
    
    :param files: list of waveform files in sac format
    :param tensor_info: dictionary with moment tensor information
    :param data_prop: dictionary with waveform properties
    :type files: list
    :type tensor_info: dict
    :type data_prop: dict
    
    .. warning::
        
        Make sure the filters of cGPS data agree with the values in
        sampling_filter.json!
    """
    if len(files) == 0:
        return
    event_lat = tensor_info['lat']
    event_lon = tensor_info['lon']
    depth = tensor_info['depth']
    origin_time = tensor_info['date_origin']
    headers = [SACTrace.read(file) for file in files]
    dt_cgps = headers[0].delta
    dt_cgps = round(dt_cgps, 1)
    values = [mng._distazbaz(header.stla, header.stlo, event_lat, event_lon)\
        for header in headers]
    distances = [value[0] for value in values]
    zipped = zip(distances, headers)
    arrivals = [np.sqrt(dist**2 + depth**2) / 5 + header.b for dist, header in zipped]
    duration = duration_strong_motion(distances, arrivals, tensor_info, dt_cgps)
    filter0 = data_prop['strong_filter']
    n0, n1 = data_prop['wavelet_scales']
    wavelet_weight = wavelets_strong_motion(
            duration, filter0, dt_cgps, n0, n1, cgps=True)
    info_traces = []
    vertical = ['LXZ', 'LHZ', 'LYZ']
    headers = [SACTrace.read(file) for file in files]
    streams = [read(file) for file in files]
    channels = [header.kcmpnm for header in headers]
    weights = [1.2 if not channel in vertical else 0.6 for file, channel in\
               zip(files, channels)]

    for file, header, stream, weight in zip(files, headers, streams, weights):
        start = origin_time - stream[0].stats.starttime
        distance, azimuth, back_azimuth = mng._distazbaz(
                header.stla, header.stlo, event_lat, event_lon)
        info = _dict_trace(
                file, header.kstnm, header.kcmpnm, azimuth, distance / 111.11,
                dt_cgps, duration, int(start // dt_cgps), weight,
                wavelet_weight, [], location=[header.stla, header.stlo])
        info_traces.append(info)
    with open('cgps_waves.json','w') as f:
         json.dump(
                 info_traces, f, sort_keys=True, indent=4,
                 separators=(',', ': '), ensure_ascii=False)
    return info_traces
コード例 #6
0
ファイル: eq.py プロジェクト: Ji-Cong/seispy
    def saverf(self, path, evtstr=None, phase='P', shift=0, evla=-12345., evlo=-12345., evdp=-12345., mag=-12345.,
               gauss=0, baz=-12345., gcarc=-12345., only_r=False, **kwargs):
        if phase == 'P':
            if only_r:
                loop_lst = [1]
            else:
                loop_lst = [0, 1]
            rayp = seispy.geo.srad2skm(self.PArrival.ray_param)
        elif phase == 'S':
            loop_lst = [2]
            rayp = seispy.geo.srad2skm(self.SArrival.ray_param)
        else:
            raise ValueError('Phase must be in \'P\' or \'S\'')

        if evtstr is None:
            filename = join(path, self.datestr)
        else:
            filename = join(path, evtstr)
        for i in loop_lst:
            header = {'evla': evla, 'evlo': evlo, 'evdp': evdp, 'mag': mag, 'baz': baz,
                      'gcarc': gcarc, 'user0': rayp, 'kuser0': 'Ray Para', 'user1': gauss, 'kuser1': 'G factor'}
            for key in kwargs:
                header[key] = kwargs[key]
            for key, value in header.items():
                self.rf[i].stats['sac'][key] = value
            tr = SACTrace.from_obspy_trace(self.rf[i])
            tr.b = -shift
            tr.o = 0
            tr.write(filename + '_{0}_{1}.sac'.format(phase, tr.kcmpnm[-1]))
コード例 #7
0
def theoretical_arrival(tr, modelname="prem", phase_list=["P"]):
    """Get predicted phase arrival based the SAC trace 
    
    Parameter
    =========
    tr : obspy.trace
        obspy trace read from SAC file
    modelname : str
        model name
    phase_list : list
        phase list to get arrivals
    """
    # -------------------------------------------------------------------------
    # construct the origin time
    # -------------------------------------------------------------------------
    sactr = SACTrace.from_obspy_trace(tr)
    evdp, gcarc = sactr.evdp, sactr.gcarc

    # -------------------------------------------------------------------------
    # get waveforms of P and S wave based on given 1D model and time shift
    # -------------------------------------------------------------------------
    model = TauPyModel(model=modelname)
    arr = model.get_travel_times(source_depth_in_km = evdp / 1000, 
                                 distance_in_degree = gcarc, 
                                 phase_list=phase_list)
    return  sactr.reftime, arr
コード例 #8
0
def Bandpass(datapath):
    # for j in tqdm(range(10),desc = "prosessing"):
    filepath = datapath + "/predict/syn/Z/"
    overpath = datapath + "/predict/syn/Z/"
    os.chdir(filepath)

    for i in range(300):
        st = obspy.read(filepath + str(i) + '.sac')
        tr = st[0]
        tr.filter('bandpass', freqmin=8, freqmax=15, corners=4, zerophase=True)
        # tr.filter('highpass', freq=8, corners=4, zerophase=True)
        tr = (tr.data) / np.max(abs(tr.data))
        sacfile = Trace()
        sacfile.data = tr[:]

        sac = SACTrace.from_obspy_trace(sacfile)
        sac_data = sac.data
        sac.stla = 35
        sac.stlo = 110 + (80 + 72 * 500 + i * 25) / 111000

        sac.delta = 0.0006
        sac.evla = 35
        sac.evlo = 110 + (6568 + 500 * 72) / 111000
        sac.evdp = 0.05
        sac.write(overpath + str(72 * 520 + i) + ".sac")
コード例 #9
0
def tele_body_traces(files, tensor_info, data_prop):
    """Write json dictionary with specified properties for teleseismic data
    
    :param files: list of waveform files in sac format
    :param tensor_info: dictionary with moment tensor information
    :param data_prop: dictionary with waveform properties
    :type files: list
    :type tensor_info: dict
    :type data_prop: dict
    
    .. warning::
        
        Make sure the filters of teleseismic data agree with the values in
        sampling_filter.json! 
    """
    if len(files) == 0:
        return
    origin_time = tensor_info['date_origin']
    headers = [SACTrace.read(file) for file in files]
    streams = [read(file) for file in files]
    dt = headers[0].delta
    dt = round(dt, 1)
    n0, n1 = data_prop['wavelet_scales']
    filter0 = data_prop['tele_filter']
    duration = duration_tele_waves(tensor_info, dt)
    wavelet_weight0, wavelet_weight1 = wavelets_body_waves(
            duration, filter0, dt, n0, n1)
    info_traces = []
    event_lat = tensor_info['lat']
    event_lon = tensor_info['lon']
    depth = tensor_info['depth']
    model = TauPyModel(model="ak135f_no_mud")
    
    for file, header, stream in zip(files, headers, streams):
        __failsafe(filter0, header)
        distance, azimuth, back_azimuth = mng._distazbaz(
                header.stla, header.stlo, event_lat, event_lon)
        arrivals = mng.theoretic_arrivals(model, distance / 111.11, depth)
        if header.kcmpnm == 'BHZ':
            arrival = arrivals['p_arrival'][0].time
            weight = 1.0
            wavelet_weight = wavelet_weight0
        elif header.kcmpnm == 'SH':
            arrival = arrivals['s_arrival'][0].time
            weight = 0.5
            wavelet_weight = wavelet_weight1
        starttime = stream[0].stats.starttime
        begin = starttime - origin_time
        n_start_obs = int((arrival - begin) / dt + 0.5)
        info = _dict_trace(
                file, header.kstnm, header.kcmpnm, azimuth, distance / 111.11,
                dt, duration, n_start_obs, weight, wavelet_weight, [],   ##!!!!!!!!!!
                location=[header.stla, header.stlo], derivative=False)
        info_traces.append(info)
    with open('tele_waves.json','w') as f:
         json.dump(
                 info_traces, f, sort_keys=True, indent=4,
                 separators=(',', ': '), ensure_ascii=False)
    return info_traces
コード例 #10
0
 def saverf(self,
            path,
            evtstr=None,
            shift=0,
            evla=-12345.,
            evlo=-12345.,
            evdp=-12345.,
            mag=-12345.,
            gauss=0,
            baz=-12345.,
            gcarc=-12345.,
            only_r=False,
            **kwargs):
     if self.phase[-1] == 'P':
         if self.comp == 'lqt':
             svcomp = 'Q'
         else:
             svcomp = 'R'
         if only_r:
             loop_lst = [svcomp]
         else:
             loop_lst = [svcomp, 'T']
         rayp = srad2skm(self.rayp)
     elif self.phase[-1] == 'S':
         if self.comp == 'lqt':
             loop_lst = ['L']
         else:
             loop_lst = ['Z']
         rayp = srad2skm(self.rayp)
     else:
         pass
     if evtstr is None:
         filename = join(path, self.datestr)
     else:
         filename = join(path, evtstr)
     for comp in loop_lst:
         trrf = self.rf.select(channel='*' + comp)[0]
         header = {
             'evla': evla,
             'evlo': evlo,
             'evdp': evdp,
             'mag': mag,
             'baz': baz,
             'gcarc': gcarc,
             'user0': rayp,
             'kuser0': 'Ray Para',
             'user1': gauss,
             'kuser1': 'G factor'
         }
         for key in kwargs:
             header[key] = kwargs[key]
         for key, value in header.items():
             trrf.stats['sac'][key] = value
         tr = SACTrace.from_obspy_trace(trrf)
         tr.b = -shift
         tr.a = 0
         tr.ka = self.phase
         tr.write(filename +
                  '_{0}_{1}.sac'.format(self.phase, tr.kcmpnm[-1]))
コード例 #11
0
ファイル: rf.py プロジェクト: Ji-Cong/seispy
def load_station_info(pathname, ref_comp, suffix):
    try:
        ex_sac = glob.glob(join(pathname, '*{0}*{1}'.format(ref_comp,
                                                            suffix)))[0]
    except Exception:
        raise FileNotFoundError('no such SAC file in {0}'.format(pathname))
    ex_tr = SACTrace.read(ex_sac, headonly=True)
    return ex_tr.knetwk, ex_tr.kstnm, ex_tr.stla, ex_tr.stlo, ex_tr.stel
コード例 #12
0
 def test_sac_booleans_from_trace(self):
     """
     SAC booleans "lcalda" and "lpspol" should be "False" and "True",
     respectively, by default when converting from a "Trace".
     """
     tr = Trace()
     sac = SACTrace.from_obspy_trace(tr)
     self.assertFalse(sac.lcalda)
     self.assertTrue(sac.lpspol)
コード例 #13
0
ファイル: test_case01.py プロジェクト: xumi1993/seispy
def gen_list(para):
    with open(os.path.join(para.rfpath, "CB.NJ2finallist.dat"), 'w+') as fid:
        files = sorted(glob.glob(join(para.rfpath, '*R.sac')))
        for fname in files:
            sac = SACTrace.read(fname)
            evname = basename(fname).split('_')[0]
            fid.write('%s %s %6.3f %6.3f %6.3f %6.3f %6.3f %8.7f %6.3f %6.3f\n' % (
                evname, 'P', sac.evla, sac.evlo, sac.evdp, sac.gcarc, sac.baz, sac.user0, sac.mag, sac.user1
            ))
コード例 #14
0
 def test_sac_booleans_from_trace(self):
     """
     SAC booleans "lcalda" and "lpspol" should be "False" and "True",
     respectively, by default when converting from a "Trace".
     """
     tr = Trace()
     sac = SACTrace.from_obspy_trace(tr)
     self.assertFalse(sac.lcalda)
     self.assertTrue(sac.lpspol)
コード例 #15
0
ファイル: test_core.py プロジェクト: Keita1/obspy
 def test_iztype11(self):
     # test that iztype 11 is read correctly
     sod_file = os.path.join(self.path, 'data', 'dis.G.SCZ.__.BHE_short')
     tr = read(sod_file)[0]
     with open(sod_file, "rb") as fh:
         sac = SACTrace.read(fh)
     t1 = tr.stats.starttime - float(tr.stats.sac.b)
     t2 = sac.reftime
     self.assertAlmostEqual(t1.timestamp, t2.timestamp, 5)
     # see that iztype is written correctly
     with NamedTemporaryFile() as tf:
         tempfile = tf.name
         tr.write(tempfile, format="SAC")
         with open(tempfile, "rb") as fh:
             sac2 = SACTrace.read(fh)
     self.assertEqual(sac2._header['iztype'], 11)
     self.assertAlmostEqual(tr.stats.sac.b, sac2.b)
     self.assertAlmostEqual(t2.timestamp, sac2.reftime.timestamp, 5)
コード例 #16
0
 def test_iztype11(self):
     # test that iztype 11 is read correctly
     sod_file = os.path.join(self.path, 'data', 'dis.G.SCZ.__.BHE_short')
     tr = read(sod_file)[0]
     with open(sod_file, "rb") as fh:
         sac = SACTrace.read(fh)
     t1 = tr.stats.starttime - float(tr.stats.sac.b)
     t2 = sac.reftime
     self.assertAlmostEqual(t1.timestamp, t2.timestamp, 5)
     # see that iztype is written correctly
     with NamedTemporaryFile() as tf:
         tempfile = tf.name
         tr.write(tempfile, format="SAC")
         with open(tempfile, "rb") as fh:
             sac2 = SACTrace.read(fh)
     self.assertEqual(sac2._header['iztype'], 11)
     self.assertAlmostEqual(tr.stats.sac.b, sac2.b)
     self.assertAlmostEqual(t2.timestamp, sac2.reftime.timestamp, 5)
コード例 #17
0
def load_station_info(pathname, ref_comp, suffix):
    try:
        ex_sac = glob.glob(join(pathname, '*{0}*{1}'.format(ref_comp,
                                                            suffix)))[0]
    except Exception:
        raise FileNotFoundError('no such SAC file in {0}'.format(pathname))
    ex_tr = SACTrace.read(ex_sac, headonly=True)
    if (ex_tr.stla is None or ex_tr.stlo is None):
        raise ValueError('The stlo and stla are not in the SACHeader')
    return ex_tr.knetwk, ex_tr.kstnm, ex_tr.stla, ex_tr.stlo, ex_tr.stel
コード例 #18
0
ファイル: Pycwb2sac_lib.py プロジェクト: zjzzqs/Pycwb2sac
def make_sac_trace(data_block, sac_header, data_length):
    from obspy.io.sac import SACTrace
    flatten_data = np.concatenate(data_block[..., 15::])
    sac_header.npts = flatten_data.size
    new_trace = Trace(data=flatten_data, header=sac_header)
    sac_trace = SACTrace.from_obspy_trace(new_trace)
    sac_trace.stla = sac_header.stla
    sac_trace.stlo = sac_header.stlo
    sac_trace.stel = sac_header.stel
    if int(new_trace.stats.npts % data_length) is not 0:
        raise AttributeError("data format error!")
    else:
        return sac_trace
コード例 #19
0
ファイル: data_management.py プロジェクト: gferragu/WASP
def tele_surf_traces(files, tensor_info, data_prop):
    """Write json dictionary with specified properties for surface wave data
    
    :param files: list of waveform files in sac format
    :param tensor_info: dictionary with moment tensor information
    :param data_prop: dictionary with waveform properties
    :type files: list
    :type tensor_info: dict
    :type data_prop: dict
    """
    if len(files) == 0:
        return
    n0, n1 = data_prop['wavelet_scales']
    wavelet_weight = wavelets_surf_tele(n0, n1)
    info_traces = []
    headers = [SACTrace.read(file) for file in files]
    event_lat = tensor_info['lat']
    event_lon = tensor_info['lon']
    for file, header in zip(files, headers):
        npts = header.npts
        n_start = int(-header.b / 4.0)
        if npts < n_start:
            continue
        if npts + n_start < 0:
            continue
        length = 900
        if 900 >= (npts - n_start):
            length = npts - n_start if n_start > 0 else npts
        distance, azimuth, back_azimuth = mng._distazbaz(
            header.stla, header.stlo, event_lat, event_lon)
        info = _dict_trace(file,
                           header.kstnm,
                           header.kcmpnm,
                           azimuth,
                           distance / 111.11,
                           4.0,
                           length,
                           n_start,
                           1.0,
                           wavelet_weight, [],
                           location=[header.stla, header.stlo])
        info_traces.append(info)
    with open('surf_waves.json', 'w') as f:
        json.dump(info_traces,
                  f,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '),
                  ensure_ascii=False)
    return info_traces
コード例 #20
0
def initpara():
    h = np.arange(40, 80, 0.1)
    kappa = np.arange(1.6, 1.9, 0.01)
    path = join(dirname(__file__), 'benchmark')
    npts, dt, shift = readpara(path)
    baz, rayp = np.loadtxt(join(path, 'sample.geom'),
                           usecols=(0, 1),
                           unpack=True)
    rayp *= 1000
    ev_num = baz.shape[0]
    seis = np.empty([ev_num, npts])
    for _i, b in enumerate(baz):
        sac = SACTrace.read(
            join(path, 'tr_{:d}_{:.3f}.r'.format(int(b), rayp[0])))
        seis[_i] = sac.data
    return h, kappa, baz, rayp, npts, dt, shift, ev_num, seis
コード例 #21
0
 def update_traces(self):
     # AXITRA convention is x=north and y=east
     cmp = {'X': 'N', 'Y': 'E', 'Z': 'Z'}
     instr = {'velocity': 'H', 'displacement': 'H', 'acceleration': 'N'}
     idep = {'velocity': 7, 'displacement': 6, 'acceleration': 8}
     for n, station in enumerate(self.stations):
         stid = station.code.split('.')
         if len(stid) == 3:
             net, sta, loc = stid
         elif len(stid) == 2:
             net, sta = stid
             loc = None
         else:
             sta = '_'.join(stid)
             net = None
             loc = None
         filenames = 'axi{:03d}.?.sac'.format(n + 1)
         filenames = os.path.join(self.run_name, filenames)
         for fname in glob(filenames):
             tr = read(fname)[0]
             tr.stats.network = net
             tr.stats.station = sta
             tr.stats.location = loc
             sr = tr.stats.sampling_rate
             if sr < 10:
                 band = 'L'
             elif 10 <= sr < 80:
                 band = 'B'
             elif sr >= 80:
                 band = 'H'
             channel = band + instr[self.output] + cmp[tr.stats.channel]
             tr.stats.channel = channel
             # Set SAC data type. Note that SAC data amplitude is in nm,
             # nm/s or nm/s/s, so we need to multiply by 1e9
             # https://ds.iris.edu/files/sac-manual/manual/file_format.html
             tr.data *= 1e9
             tr.stats.sac.idep = idep[self.output]
             tr.stats.sac.stla = station.lat
             tr.stats.sac.stlo = station.lon
             tr.stats.sac.stel = -station.z
             sac = SACTrace.from_obspy_trace(tr)
             sac.reftime = self.min_origin_time + self.trace_start_offset
             sac.b = 0
             outfile = tr.id + '.sac'
             outfile = os.path.join(self.run_name, outfile)
             sac.write(outfile)
             os.remove(fname)
コード例 #22
0
 def update_traces(self):
     # AXITRA convention is x=north and y=east
     cmp = {'X': 'N', 'Y': 'E', 'Z': 'Z'}
     instr = {'velocity': 'H', 'displacement': 'H', 'acceleration': 'N'}
     for n, station in enumerate(self.stations):
         stid = station.code.split('.')
         if len(stid) == 3:
             net, sta, loc = stid
         elif len(stid) == 2:
             net, sta = stid
             loc = None
         else:
             sta = '_'.join(stid)
             net = None
             loc = None
         filenames = 'axi{:03d}.?.sac'.format(n+1)
         filenames = os.path.join(self.run_name, filenames)
         for fname in glob(filenames):
             tr = read(fname)[0]
             tr.stats.network = net
             tr.stats.station = sta
             tr.stats.location = loc
             sr = tr.stats.sampling_rate
             if sr < 10:
                 band = 'L'
             elif 10 <= sr < 80:
                 band = 'B'
             elif sr >= 80:
                 band = 'H'
             channel = band + instr[self.output] + cmp[tr.stats.channel]
             tr.stats.channel = channel
             tr.stats.sac.stla = station.lat
             tr.stats.sac.stlo = station.lon
             tr.stats.sac.stel = -station.z
             sac = SACTrace.from_obspy_trace(tr)
             sac.reftime = self.min_origin_time+self.trace_start_offset
             sac.b = 0
             outfile = tr.id + '.sac'
             outfile = os.path.join(self.run_name, outfile)
             sac.write(outfile)
             os.remove(fname)
コード例 #23
0
ファイル: mseed2sac.py プロジェクト: sislzu/CGRM-DMC
    def _writesac(self, stream, event, station, outdir):
        """
        Write data with SAC format with event and station information.
        """
        for trace in stream:  # loop over 3-component traces
            # transfer obspy trace to sac trace
            sac_trace = SACTrace.from_obspy_trace(trace=trace)

            # set station related headers
            sac_trace.stla = station["stla"]
            sac_trace.stlo = station["stlo"]
            sac_trace.stel = station["stel"]

            if trace.stats.channel[-1] == "E":
                sac_trace.cmpaz = 90
                sac_trace.cmpinc = 90
            elif trace.stats.channel[-1] == "N":
                sac_trace.cmpaz = 0
                sac_trace.cmpinc = 90
            elif trace.stats.channel[-1] == "Z":
                sac_trace.cmpaz = 0
                sac_trace.cmpinc = 0
            else:
                logger.warning("Not E|N|Z component")

            # set event related headers
            sac_trace.evla = event["latitude"]
            sac_trace.evlo = event["longitude"]
            sac_trace.evdp = event["depth"]
            sac_trace.mag = event["magnitude"]

            # 1. SACTrace.from_obspy_trace automatically set Trace starttime
            #    as the reference time of SACTrace, when converting Trace to
            #    SACTrace. Thus in SACTrace, b = 0.0.
            # 2. Set SACTrace.o as the time difference in seconds between
            #    event origin time and reference time (a.k.a. starttime).
            # 3. Set SACTrace.iztype to 'io' change the reference time to
            #    event origin time (determined by SACTrace.o) and also
            #    automatically change other time-related headers
            #    (e.g. SACTrace.b).

            # 1.from_obspy_trace
            #   o
            #   |
            #   b----------------------e
            #   |=>   shift  <=|
            # reftime          |
            #               origin time
            #
            # 2.sac_trace.o = shift
            #   o:reset to be zero
            #   |
            #   b---------------------e
            #   |            |
            #   | refer(origin) time
            # -shift
            sac_trace.o = event["origin"] - sac_trace.reftime
            sac_trace.iztype = 'io'
            sac_trace.lcalda = True

            # SAC file location
            sac_flnm = ".".join([event["origin"].strftime("%Y.%j.%H.%M.%S"),
                                 "0000", trace.id, "M", "SAC"])
            sac_fullname = os.path.join(outdir, sac_flnm)
            sac_trace.write(sac_fullname)
        return
コード例 #24
0
ファイル: Obspy2SAC.py プロジェクト: zjzzqs/Pycwb2sac
from obspy.io.sac import SACTrace
from obspy import UTCDateTime

# ---- 01. Ask user for inputs ---------------------------
if len(sys.argv) == 2:
    filename = sys.argv[1]
else:
    print(
        "Please enter one variable i.e. Obspy2SAC 1991-01-01T01:19:46.930000Z_1991-01-01T01:26:02.760000Z.pk"
    )
    sys.exit()

# ---- 02. read in data
Waveforms = read(filename)
for wf in Waveforms:
    sac_trace = SACTrace.from_obspy_trace(wf)
    sac_reftime = sac_trace.reftime
    if "o" in wf.stats and wf.stats.o != UTCDateTime(-12345):
        sac_trace.o = wf.stats.o - sac_reftime
    if "t1" in wf.stats and wf.stats.t1 != UTCDateTime(-12345):
        sac_trace.t1 = wf.stats.t1 - sac_reftime
    if "t2" in wf.stats and wf.stats.t2 != UTCDateTime(-12345):
        sac_trace.t2 = wf.stats.t2 - sac_reftime
    if "evla" in wf.stats:
        sac_trace.evla = wf.stats.evla
    if "evlo" in wf.stats:
        sac_trace.evlo = wf.stats.evlo
    if "evdp" in wf.stats:
        sac_trace.evdp = wf.stats.evdp
    if "stla" in wf.stats:
        sac_trace.stla = wf.stats.stla
コード例 #25
0
ファイル: data_management.py プロジェクト: gferragu/WASP
def tele_body_traces(files, tensor_info, data_prop):
    """Write json dictionary with specified properties for teleseismic data
    
    :param files: list of waveform files in sac format
    :param tensor_info: dictionary with moment tensor information
    :param data_prop: dictionary with waveform properties
    :type files: list
    :type tensor_info: dict
    :type data_prop: dict
    
    .. warning::
        
        Make sure the filters of teleseismic data agree with the values in
        sampling_filter.json! 
    """
    if len(files) == 0:
        return
    headers = [SACTrace.read(file) for file in files]
    dt = headers[0].delta
    dt = round(dt, 1)
    n0, n1 = data_prop['wavelet_scales']
    print('Wavelet: ', n0, n1)
    filter0 = data_prop['tele_filter']
    duration = duration_tele_waves(tensor_info, dt)
    wavelet_weight0, wavelet_weight1 = wavelets_body_waves(
        duration, filter0, dt, n0, n1)
    info_traces = []
    event_lat = tensor_info['lat']
    event_lon = tensor_info['lon']

    for file, header in zip(files, headers):
        if header.kcmpnm == 'BHZ':
            n_start_obs = int((header.t1 - header.b) / dt + 0.5)
            weight = 1.0
            wavelet_weight = wavelet_weight0
        elif header.kcmpnm == 'SH':
            n_start_obs = int((header.t5 - header.b) / dt + 0.5)
            weight = 0.5
            wavelet_weight = wavelet_weight1
        __failsafe(filter0, header)
        distance, azimuth, back_azimuth = mng._distazbaz(
            header.stla, header.stlo, event_lat, event_lon)
        info = _dict_trace(file,
                           header.kstnm,
                           header.kcmpnm,
                           azimuth,
                           distance / 111.11,
                           dt,
                           duration,
                           n_start_obs,
                           weight,
                           wavelet_weight, [],
                           location=[header.stla, header.stlo])
        info_traces.append(info)
    with open('tele_waves.json', 'w') as f:
        json.dump(info_traces,
                  f,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '),
                  ensure_ascii=False)
    return info_traces
コード例 #26
0
ファイル: data_management.py プロジェクト: gferragu/WASP
def strong_motion_traces(files, tensor_info, data_prop):
    """Write json dictionary with specified properties for strong motion data
    
    :param files: list of waveform files in sac format
    :param tensor_info: dictionary with moment tensor information
    :param data_prop: dictionary with waveform properties
    :type files: list
    :type tensor_info: dict
    :type data_prop: dict
    
    .. warning::
        
        Make sure the filters of strong motion data agree with the values in
        sampling_filter.json!
    """
    if len(files) == 0:
        return
    event_lat = tensor_info['lat']
    event_lon = tensor_info['lon']
    headers = [SACTrace.read(file) for file in files]
    dt_strong = headers[0].delta
    dt_strong = round(dt_strong, 1)
    starts = [header.o if header.o else 0 for header in headers]  #20
    values = [mng._distazbaz(header.stla, header.stlo, event_lat, event_lon)\
        for header in headers]
    distances = [value[0] for value in values]
    arrivals = [header.t1 if header.t1 else 0 for header in headers]
    filter0 = data_prop['strong_filter']
    seismic_moment = tensor_info['moment_mag']
    #    outliers = strong_outliers(files, tensor_info)
    if seismic_moment < 2 * 10**26:
        outliers = strong_outliers2(files, distances, tensor_info)
    else:
        outliers = strong_outliers(files, tensor_info)
    duration = duration_strong_motion(distances, arrivals, tensor_info,
                                      dt_strong)
    n0, n1 = data_prop['wavelet_scales']
    wavelet_weight = wavelets_strong_motion(duration, filter0, dt_strong, n0,
                                            n1)
    black_list = {'PB02': ['HNE', 'HNN', 'HLE', 'HLN'], 'PX02': ['HNE', 'HLE']}

    info_traces = []
    outlier_traces = []
    headers = [SACTrace.read(file) for file in files]
    streams = [read(file) for file in files]
    weights = [1.0 for st, file in zip(streams, files)]
    weights = [0 if header.kstnm in black_list\
        and header.kcmpnm in black_list[header.kstnm] else weight\
        for weight, header in zip(weights, headers)]
    #    weights = [0 if file in outliers else weight\
    #               for file, weight in zip(files, weights)]

    for file, header, start, weight, stream in zip(files, headers, starts,
                                                   weights, streams):
        # __failsafe(filter0, header)
        distance, azimuth, back_azimuth = mng._distazbaz(
            header.stla, header.stlo, event_lat, event_lon)
        info = _dict_trace(file,
                           header.kstnm,
                           header.kcmpnm,
                           azimuth,
                           distance / 111.11,
                           dt_strong,
                           duration,
                           int(start / dt_strong),
                           weight,
                           wavelet_weight, [],
                           location=[header.stla, header.stlo])
        info_traces.append(info)
    with open('strong_motion_waves.json', 'w') as f:
        json.dump(info_traces,
                  f,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '),
                  ensure_ascii=False)
    with open('outlier_strong_motion_waves.json', 'w') as f:
        json.dump(outlier_traces,
                  f,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '),
                  ensure_ascii=False)
    return info_traces
コード例 #27
0
ファイル: dat2sac_mseed.py プロジェクト: tychoaussie/MDTCal
def csv2sac(infile,cconstant):
#
    Channel = ["","","",""]
    units = ['Counts  ','Counts  ','Counts  ','Counts  ']
    comment = ['Velocity','Velocity','Velocity','Velocity']
    (header,stack) = load(infile)

    first_sample,medsps = timingvalidate(stack)

    if first_sample <> 0:
        print "\n\nA timing error exists in the 1st sample of the 1st record in this DAT series.\n"
        print "Remove the first DAT file in the folder and try again."
        sys.exit()

   # print "The first sample shows instantaneous sample rate of {} S/second.".format(medsps)

    # datetime = stack[0][13]+","+stack[0][14]
    # Frac_second = float(stack[0][17])

    datetime = stack[0][15]+","+stack[0][16]  # New cs4 format the fields are offset by two more columns
    Frac_second = float(stack[0][17])

    St_time = time.strptime(datetime,"%Y/%m/%d,%H:%M:%S")
    stdate = str(St_time.tm_year)+"_"+str(St_time.tm_mon)+"_"+str(St_time.tm_mday)
    stmin = str(St_time.tm_hour)+"_"+str(St_time.tm_min)+"_"+str(St_time.tm_sec)+"_"+str(int(Frac_second*1000))
    Filetime = stdate+"_"+stmin+"_"
    Station = cconstant[0]
    Network = cconstant[15]
    #     File naming convention: YYYY.DDD.HH.MM.SS.SSS.NN.STATION.CHANNEL
    
    seedfil = infile[0:string.rfind(infile,'\\')]+"/"+Filetime+Network+"_"+Station
    sacfil = infile[0:string.rfind(infile,'.')]

    for i in range(0,4):
        Channel[i]=cconstant[(2*i)+1]

    Samplecount = len(stack)
    print "Sample count stands at {} samples.".format(Samplecount)

    Delta = 1.0/float(getsps(stack))

#    Delta = 1/medsps

    print "Delta = {0:.8f}, Sample rate = {1:.8f}".format(Delta,1/Delta)  

        #
        # stack[1] = channel 1 time history
        # .
        #
        # stack[4] = channel 4 time history
        #


    for i in range(0,4): # Build each channel
       
        b = np.arange(len(stack),dtype=np.float32)   #   Establishes the size of the datastream
        for n in range(len(stack)):        #   Load the array with time-history data
            b[n] = np.float32(stack[n][i+1]) #   Convert the measurement from counts to volts.

        t = SACTrace(data = b)         
                    #                     set the SAC header values
        t.scale=1.0     # Set the scale for each channel for use with DIMAS software
        t.delta=Delta
        t.nzyear=St_time.tm_year
        t.nzjday=St_time.tm_yday
        t.nzhour=St_time.tm_hour
        t.nzmin=St_time.tm_min
        t.nzsec=St_time.tm_sec
        t.nzmsec=int((Frac_second)*1000)
        t.kstnm=Station
        t.kcmpnm=Channel[i]
        t.IDEP=4 # 4 = units of velocity (in Volts)
                              # Dependent variable choices: (1)unknown, (2)displacement(nm), 
                              # (3)velocity(nm/sec), (4)velocity(volts), 
                              # (5)nm/sec/sec
        t.kinst=comment[i-1]       # Instrument type
        t.knetwk=Network        # Network designator
        t.kuser0=units[i-1]        # Place the system of units into the user text field 0

        out = sacfil+"_{}.sac".format(Channel[i])
        seed = seedfil+"_{}.mseed".format(Channel[i])

        if Channel[i] !="UNK":   # We do not write streams in which the channel name is UNK
            with open(out,'wb') as sacfile:
                t.write(sacfile)
            print " File successfully written: {0}".format(out)       
            sacfile.close()
            st=read(out)
            st.write(seed,format="mseed")
            print " File successfully written: {0}".format(seed)

#            subprocess.call(["del",f],shell=True) 

#    for i in range(0,1): # Build special channel for timing

    b = np.arange(len(stack),dtype=np.float32)   #   Establishes the size of the datastream
    for n in range(len(stack)):        #   Load the array with time-history data
        b[n] = np.float32(stack[n][13]) #   Get the timing value.
    t = SACTrace(data = b)

             #                     set the SAC header values
    t.scale=1.0 # Set the scale for each channel. This one is important to declare.
    t.delta=Delta
    t.nzyear=St_time.tm_year
    t.nzjday=St_time.tm_yday
    t.nzhour=St_time.tm_hour
    t.nzmin=St_time.tm_min
    t.nzsec= St_time.tm_sec
    t.nzmsec= int((Frac_second)*1000)
    t.kstnm=Station
    t.kcmpnm='GPS' # This is a GPS timing signal.
    t.IDEP=1 # 4 = units of velocity (in Volts)
                              # Dependent variable choices: (1)unknown, (2)displacement(nm), 
                              # (3)velocity(nm/sec), (4)velocity(volts), 
                              # (5)nm/sec/sec
    t.kinst='GPS'       # Instrument type
    t.knetwk=Network         # Network designator
    t.kuser0='digital'        # Place the system of units into the user text field 0

    out = sacfil+"_{}.sac".format('GPS')
    with open(out,'wb') as sacfile:
        t.write(sacfile)

#    print " File successfully written: {}.sac".format(out)
#    print "Published sample rate in sac file = {}".format(t.stats.sampling_rate)       
    sacfile.close()
コード例 #28
0
ファイル: PNE2SAC.py プロジェクト: tychoaussie/Converters
def main():
#           MAIN PROGRAM BODY
#  Parse the command line switches
    optioncount = len(sys.argv)
    SAC = False
    outputfile_defined = False
    filelist = []
    dir=""
    extension = '.txt'
    if optioncount > 1:

        if optioncount == 4:
            if sys.argv[3] == '-s':
                SAC = True
#               print "SAC set to true."
            outfile = sys.argv[2]
            infile = sys.argv[1]
            filelist.append(infile)

        elif optioncount == 3:
            if "." in sys.argv[1]:
                
                infile = sys.argv[1]
                filelist.append(infile)
                outputfile_defined = True
                outfile = sys.argv[2]
            else:
                if len(sys.argv[2])==4 and "." in sys.argv[2]: # set a different extension
                    extension = sys.argv[2]
                filelist = os.listdir(sys.argv[1])
                dir=sys.argv[1]
            
        elif optioncount == 2:
            if "." in sys.argv[1]:
                infile = sys.argv[1]
                filelist.append(infile)
            else:
                dir = sys.argv[1]
                filelist = os.listdir(sys.argv[1])

        
        for n in range(len(filelist)):
            if extension in filelist[n]:
                if len(filelist)>1:
                    infile = dir+"/"+filelist[n]

                if string.find(infile,'.') > 0:
                    outfile = infile[:string.find(infile,'.')]+'.sac'
                    seedfile = infile[:string.find(infile,'.')]+'.mseed'
                else:
                    outfile = infile +'.sac'
                    seedfile = infile + '.mseed'

                PNE       = load(infile)

#                        PNE[0] is the header where:
#                        PNE[0][0][1] = The comment descriptor of the data
#                        PNE[0][1][1] = The station name
#                        PNE[0][2][1] = Component axis(Z,N, or E)
#                        PNE[0][4][1] = Start time
#                        PNE[0][5][1] = Calibration factor
#                        PNE[0][6][1] = Time correction in seconds

                Comment   = PNE[0][0][1]
                Stname    = PNE[0][1][1][:7]
                Component = PNE[0][2][1][:3]
                St_time   = time.strptime(PNE[0][4][1][:-4],"%d_%b_%Y_%H:%M:%S")
                Frac_sec  = int(PNE[0][4][1][21:])
                CF        = np.float32(PNE[0][5][1])

                TC        = PNE[0][6][1] # time correction

                Offset    = float(PNE[1][0][0])
#
#                       Delta is calculated from the offset time of last sample 
#                       minus offset of first sample / total number of samples.

            
                Delta     = (float(PNE[1][len(PNE[1])-1][0])-Offset)/(len(PNE[1])-1)

#                       Load Data array
#                       Samples in file are multiplied by 10,000 to convert from
#                       measurements of centimeters to microns, then it's divided by
#                       the Amplification (conversion) factor, known as CF

                Data     = []
                for n in range (len(PNE[1])-1):
                    Datum = np.float32(np.float32(PNE[1][n][1])*10000.0/CF)
                    Data.append(Datum)
                
                b        = np.arange(len(Data),dtype=np.float32)
                for n in range(len(Data)): #   Load the array with time-history data
                    b[n] = Data[n]

                t        = SACTrace(data = b)         
                                             # set the SAC header values
                t.scale  = 1.0               # Set the scale for use with DIMAS software
                t.delta  = Delta
                t.nzyear = St_time.tm_year
                t.nzjday = St_time.tm_yday
                t.nzhour = St_time.tm_hour
                t.nzmin  = St_time.tm_min
                t.nzsec  = St_time.tm_sec
                t.nzmsec = Frac_sec          # int((Frac_second)*1000)
                t.kstnm  = Stname[:7]
                t.kcmpnm = Component
                t.IDEP   = 4                 # 4 = units of velocity (in Volts)
                                             # Dependent variable choices: 
                                             # (1)unknown, 
                                             # (2)displacement(nm), 
                                             # (3)velocity(nm/sec), 
                                             # (4)velocity(volts), 
                                             # (5)nm/sec/sec
                t.kinst  = "Velocity"        # Instrument type
                t.knetwk = "LM"              # Network designator
                t.kuser0 = "Nanometr"        # Place the system of units into the user text field 0

#                t.WriteSacBinary(outfile)
                with open(outfile,'wb') as sacfile:
                    t.write(sacfile)
                print " File successfully written: {0}".format(outfile)       
                sacfile.close()
                st=read(outfile)
                st.write(seedfile,format="mseed")
                print " File successfully written: {0}".format(seedfile)
                print "File written to {}".format(outfile)
        
    else:
        print "Useage: PNE2SAC infile.txt (outfile.asc)"
        print "Or, PNE2SAC target_directory target_extension(like .txt)"
        print "No infile or directory specified."
        print len(sys.argv)
コード例 #29
0
            #                     st.remove_response(output="DISP", zero_mean=True, taper=True, taper_fraction=0.05, pre_filt=[0.001, 0.005, sr/3, sr/2], water_level=60)

            # Downsample
            st.detrend(type='demean')
            st.detrend(type='linear')
            st.filter('lowpass', freq=0.4 * sr_new,
                      zerophase=True)  # anti-alias filter
            st.filter('highpass', freq=1 / 60 / 60,
                      zerophase=True)  # Remove daily oscillations
            st.decimate(factor=int(sr / sr_new), no_filter=True)  # downsample
            st.taper(type="cosine", max_percentage=0.05)
            st.detrend(type='demean')
            st.detrend(type='linear')

            # convert to SAC and fill out station/event header info
            sac = SACTrace.from_obspy_trace(st[0])
            sac.stel = staz[ista]
            sac.stla = stala[ista]
            sac.stlo = stalo[ista]
            kcmpnm = comp
            sac.kcmpnm = kcmpnm
            yr = str(st[0].stats.starttime.year)
            jday = '%03i' % (st[0].stats.starttime.julday)
            hr = '%02i' % (st[0].stats.starttime.hour)
            mn = '%02i' % (st[0].stats.starttime.minute)
            sec = '%02i' % (st[0].stats.starttime.second)
            sac_out = path2sac + sta + '/' + sta + '.' + yr + '.' + jday + '.' + hr + '.' + mn + '.' + sec + '.' + kcmpnm + '.sac'
            sac.write(sac_out)

# %% codecell
# sta = "CC04"
コード例 #30
0
        st_bhr.stats.channel = Rcomp
        st_bhr.data = r

        # Remove existing file
        if os.path.exists(evdir + event+'.'+network+'.'+str(sta)+'.'+Ncomp+'.sac'):
            os.remove(evdir + event+'.'+network+'.'+str(sta)+'.'+Ncomp+'.sac')
        if os.path.exists(evdir + event+'.'+network+'.'+str(sta)+'.'+Ecomp+'.sac'):
            os.remove(evdir + event+'.'+network+'.'+str(sta)+'.'+Ecomp+'.sac')
        if os.path.exists(evdir + event+'.'+network+'.'+str(sta)+'.'+Tcomp+'.sac'):
            os.remove(evdir + event+'.'+network+'.'+str(sta)+'.'+Tcomp+'.sac')
        if os.path.exists(evdir + event+'.'+network+'.'+str(sta)+'.'+Rcomp+'.sac'):
            os.remove(evdir + event+'.'+network+'.'+str(sta)+'.'+Rcomp+'.sac')
        
        network = st_bhr.stats.network
        # Save BHN, BHE, BHR, and BHT
        sac_n = SACTrace.from_obspy_trace(st_bhn)
        sac_n.write(evdir + event+'.'+network+'.'+str(sta)+'.'+Ncomp+'.sac')
        sac_e = SACTrace.from_obspy_trace(st_bhe)
        sac_e.write(evdir + event+'.'+network+'.'+str(sta)+'.'+Ecomp+'.sac')
        sac_t = SACTrace.from_obspy_trace(st_bht)
        sac_t.write(evdir + event+'.'+network+'.'+str(sta)+'.'+Tcomp+'.sac')
        sac_r = SACTrace.from_obspy_trace(st_bhr)
        sac_r.write(evdir + event+'.'+network+'.'+str(sta)+'.'+Rcomp+'.sac')
        
#         fmin = 1/100
#         fmax = 1/20
#         st_bht.filter("bandpass", freqmin=fmin, freqmax=fmax, corners=2, zerophase=True)
#         st_bhr.filter("bandpass", freqmin=fmin, freqmax=fmax, corners=2, zerophase=True)
#         plt.figure(figsize=(10,5))
#         plt.plot(np.arange(0,len(r)), st_bht.data, color="red")
#         plt.plot(np.arange(0,len(t)), st_bhr.data, color="black")
コード例 #31
0
def process_seg2(flist, cha_list, outdir, day):
    """ Given a list of 30-s long SEG2 files, merge into one file per channel, decimate to 100 Hz and write as SAC"""
    
    # Create output directory with name of date for start time
    day_dir = os.path.join(outdir, day)
    if not os.path.exists(day_dir):
        os.mkdir(day_dir)

    # Loop over channels
    for channel in cha_list:

        sta, comp, dep = get_station_component(channel)
        print("Assembling data for channel %d: geode %s, component %s, depth %f m" % (channel, sta, comp, dep))
#        if glob(os.path.join(day_dir, "8O.%s..%s.*.sac" % (sta, comp))):
#            print("Station %s already processed." % sta)
#            continue
                                                
        # Initialize a Stream object
        st = Stream()

        # Loop over 30-s long files and append data
        for file in sorted(flist):
            tmp = read(file, format="SEG2")
            if not tmp:
                raise ValueError("Data file read is empty: %s" % file)
            trace = []
            for tr in tmp:
                if int(tr.stats.seg2.CHANNEL_NUMBER) == channel:
                    
                    # Check for NaN
                    if np.isnan(tr.data).any():
                        print("There are NaN values in this file: %s" % file)
                        idx_nan = np.argwhere(np.isnan(tr.data))
                        # Replace by 0
                        tr.data[idx_nan] = 0
                        print("Replaced %d samples by 0." % len(idx_nan))
                        continue
                        
                    trace = tr
                    # correct start_time for SEG-2 acquisition delay
                    trace.stats.starttime = tr.stats.starttime + float(tr.stats.seg2.DELAY)
                    # correct amplitude for amplifier-gain
                    amp_gain = float(tr.stats.seg2.FIXED_GAIN.split(" ")[0])
                    instrument_correction_scalar = SENSITIVITY * (10.**(amp_gain/20))  # See 7.7 from Eaton 2018
                    trace.data = trace.data * float(tr.stats.seg2.DESCALING_FACTOR)/float(tr.stats.seg2.STACK) * 1e-3  # convert to Volt
                    trace.data = trace.data / instrument_correction_scalar
                    break
            if not trace:
                print("Could not find data for this channel in file %s" % file)
                continue
            st.append(trace)
            st.merge(method=1, interpolation_samples=-1, fill_value=0)

        # Check stream has only one merged trace
        if len(st) == 0:
            raise ValueError("Something went wrong: no data found for channel %d" % channel)
        elif len(st) > 1:
            print(st)
            raise ValueError("Something went wrong: there should be only one trace in the stream.")
        
        # Resample to 500Hz
        #st.resample(500.0)
        
        # Check data for gaps
        if st.get_gaps():
            st.print_gaps()
            warnings.warn("There are gaps in the data.")

        # Decimate data by factor of 5: from 500Hz to 100Hz
        #st.decimate(factor=5)

        # Cut day
        ts = UTCDateTime(int(day[0:4]),int(day[4:6]),int(day[6:]),0,0,0)
        te = ts + 3600*24
        st.trim(starttime=ts,endtime=te)
        print(st)

        # Add header info
        sac = SACTrace.from_obspy_trace(st[0])
        sac.kcmpnm = comp
        sac.kstnm = sta
        sac.stel = 779.60  # Ground elevation in m
        sac.stdp = dep
        sac.stla = 50.45031  # coordinates of geophysics well
        sac.stlo = -112.12087  # coordinates of geophysics well
        sac.knetwk = "8O"  # give network code BH for "borehole"

        # Define output file name
        sacname = "8O.%s..%s.%s_%s_%dHz_unitmps.sac" % (sta, comp,
                                                 st[0].stats.starttime.strftime("%Y%m%d%H%M%S"),
                                                 st[0].stats.endtime.strftime("%Y%m%d%H%M%S"), int(st[0].stats.sampling_rate))

        sac.write(os.path.join(day_dir, sacname))
コード例 #32
0
def writesac(velfile,site,stalat,stalon,doy,year,samprate,event):
    a = numpy.loadtxt(velfile)
    tind = a[:,0]
    gtime = a[:,1]
    leapsec = gpsleapsec(gtime[0])
    
    #Get the start time of the file in UTC
    date = datetime.datetime(int(year), 1, 1) + datetime.timedelta(int(doy) - 1)
    gpstime = (numpy.datetime64(date) - numpy.datetime64('1980-01-06T00:00:00'))/ numpy.timedelta64(1, 's')
    stime = (gtime[0]-leapsec)*numpy.timedelta64(1, 's')+ numpy.datetime64('1980-01-06T00:00:00')
    sitem = stime.item()
    print(sitem)
    styr = sitem.year
    stdy = sitem.day
    stmon = sitem.month
    sthr = sitem.hour
    stmin = sitem.minute
    stsec = sitem.second

    
    nv = a[:,2]
    ev = a[:,3]
    uv = a[:,4]
    print('Writing SAC file ' + 'output/' + site + '.LXN.sac')
    headN = {'kstnm': site, 'kcmpnm': 'LXN', 'stla': float(stalat),'stlo': float(stalon),
             'nzyear': int(year), 'nzjday': int(doy), 'nzhour': int(sthr), 'nzmin': int(stmin),
             'nzsec': int(stsec), 'nzmsec': int(0), 'delta': float(samprate)}

    sacn = SACTrace(data=nv, **headN)
    sacn.write('output/' + site.upper() + '.vel.n')
    print('Writing SAC file ' + 'output/' + site + '.LXE.sac')

    headE = {'kstnm': site, 'kcmpnm': 'LXE', 'stla': float(stalat),'stlo': float(stalon),
         'nzyear': int(year), 'nzjday': int(doy), 'nzhour': int(sthr), 'nzmin': int(stmin),
         'nzsec': int(stsec), 'nzmsec': int(0), 'delta': float(samprate)}
    sace = SACTrace(data=ev, **headE)
    sace.write('output/' + site.upper() + '.vel.e')
    print('Writing SAC file ' + 'output/' + site + '.LXZ.sac')

    headZ = {'kstnm': site, 'kcmpnm': 'LXZ', 'stla': float(stalat),'stlo': float(stalon),
             'nzyear': int(year), 'nzjday': int(doy), 'nzhour': int(sthr), 'nzmin': int(stmin),
             'nzsec': int(stsec), 'nzmsec': int(0), 'delta': float(samprate)}
    sacu = SACTrace(data=uv, **headZ)
    sacu.write('output/' + site.upper() + '.vel.u')
コード例 #33
0
def test_sac2asdf_script(tmpdir, capsys):
    tmpdir = tmpdir.strpath

    # Create some test data
    data_1 = np.arange(10, dtype=np.float32)
    header = {
        "kstnm": "ANMO",
        "knetwk": "IU",
        "kcmpnm": "BHZ",
        "stla": 40.5,
        "stlo": -108.23,
        "stel": 100.0,
        "stdp": 3.4,
        "evla": -15.123,
        "evlo": 123,
        "evdp": 50,
        "nzyear": 2012,
        "nzjday": 123,
        "nzhour": 13,
        "nzmin": 43,
        "nzsec": 17,
        "nzmsec": 100,
        "delta": 1.0 / 40,
        "o": -10.0,
    }
    sac = SACTrace(data=data_1, **header)
    sac.write(os.path.join(tmpdir, "a.sac"))

    data_2 = 2.0 * np.arange(10, dtype=np.float32)
    header = {
        "kstnm": "BBBB",
        "knetwk": "AA",
        "kcmpnm": "CCC",
        "stla": 40.5,
        "stlo": -108.23,
        "stel": 200.0,
        "stdp": 2.4,
        "evla": -14.123,
        "evlo": 125,
        "evdp": 30,
        "nzyear": 2013,
        "nzjday": 123,
        "nzhour": 13,
        "nzmin": 43,
        "nzsec": 17,
        "nzmsec": 100,
        "delta": 1.0 / 40,
        "o": 10.0,
    }
    sac = SACTrace(data=data_2, **header)
    sac.write(os.path.join(tmpdir, "b.sac"))

    output_file = os.path.join(tmpdir, "out.h5")
    assert not os.path.exists(output_file)

    sys_argv_backup = copy.copy(sys.argv)
    try:
        sys.argv = sys.argv[:1]
        sys.argv.append(tmpdir)
        sys.argv.append(output_file)
        sys.argv.append("random")
        sac2asdf.__main__()
    finally:
        # Restore to not mess with any of pytests logic.
        sys.argv = sys_argv_backup

    non_verbose_out, non_verbose_err = capsys.readouterr()
    assert not non_verbose_err

    assert os.path.exists(output_file)
    with pyasdf.ASDFDataSet(output_file, mode="r") as ds:
        # 2 Events.
        assert len(ds.events) == 2
        # 2 Stations.
        assert len(ds.waveforms) == 2

        events = ds.events  # NOQA

        # Data should actually be fully identical
        np.testing.assert_equal(data_1, ds.waveforms.IU_ANMO.random[0].data)
        np.testing.assert_equal(data_2, ds.waveforms.AA_BBBB.random[0].data)

        assert ds.waveforms.IU_ANMO.random[0].id == "IU.ANMO..BHZ"
        assert ds.waveforms.AA_BBBB.random[0].id == "AA.BBBB..CCC"

        c = ds.waveforms.IU_ANMO.coordinates
        np.testing.assert_allclose(
            [c["latitude"], c["longitude"], c["elevation_in_m"]],
            [40.5, -108.23, 100.0],
        )
        c = ds.waveforms.AA_BBBB.coordinates
        np.testing.assert_allclose(
            [c["latitude"], c["longitude"], c["elevation_in_m"]],
            [40.5, -108.23, 200.0],
        )

        c = ds.waveforms.IU_ANMO.channel_coordinates["IU.ANMO..BHZ"][0]
        np.testing.assert_allclose(
            [
                c["latitude"],
                c["longitude"],
                c["elevation_in_m"],
                c["local_depth_in_m"],
            ],
            [40.5, -108.23, 100.0, 3.4],
        )
        c = ds.waveforms.AA_BBBB.channel_coordinates["AA.BBBB..CCC"][0]
        np.testing.assert_allclose(
            [
                c["latitude"],
                c["longitude"],
                c["elevation_in_m"],
                c["local_depth_in_m"],
            ],
            [40.5, -108.23, 200.0, 2.4],
        )

        # Events
        origin = (ds.waveforms.IU_ANMO.random[0].stats.asdf.event_ids[0].
                  get_referred_object().origins[0])
        np.testing.assert_allclose(
            [origin.latitude, origin.longitude, origin.depth],
            [-15.123, 123.0, 50.0],
        )
        assert (origin.time == obspy.UTCDateTime(
            year=2012,
            julday=123,
            hour=13,
            minute=43,
            second=17,
            microsecond=100000,
        ) - 10.0)

        origin = (ds.waveforms.AA_BBBB.random[0].stats.asdf.event_ids[0].
                  get_referred_object().origins[0])
        np.testing.assert_allclose(
            [origin.latitude, origin.longitude, origin.depth],
            [-14.123, 125.0, 30.0],
        )
        assert (origin.time == obspy.UTCDateTime(
            year=2013,
            julday=123,
            hour=13,
            minute=43,
            second=17,
            microsecond=100000,
        ) + 10.0)

    # Run once again in verbose mode but just test that the output is
    # actually more.
    os.remove(output_file)
    sys_argv_backup = copy.copy(sys.argv)
    try:
        sys.argv = sys.argv[:1]
        sys.argv.append("--verbose")
        sys.argv.append(tmpdir)
        sys.argv.append(output_file)
        sys.argv.append("random")
        sac2asdf.__main__()
    finally:
        # Restore to not mess with any of pytests logic.
        sys.argv = sys_argv_backup
    verbose_out, verbose_err = capsys.readouterr()
    assert not verbose_err
    assert len(verbose_out) > len(non_verbose_out)
コード例 #34
0
def main():
    args = get_args()

    if args.nickname[-3:] == 'ph5':
        ph5file = os.path.join(args.ph5path, args.nickname)
    else:
        ph5file = os.path.join(args.ph5path, args.nickname + '.ph5')
        args.nickname += '.ph5'

    if not os.path.exists(ph5file):
        LOGGER.error("{0} not found.\n".format(ph5file))
        sys.exit(-1)

    ph5API_object = ph5api.PH5(path=args.ph5path, nickname=args.nickname)

    if args.array:
        args.array = args.array.split(',')
    if args.sta_id_list:
        args.sta_id_list = args.sta_id_list.split(',')
    if args.sta_list:
        args.sta_list = args.sta_list.split(',')
    if args.shotline:
        args.shotline = args.shotline.split(',')
    if args.eventnumbers:
        args.eventnumbers = args.eventnumbers.split(',')
    if args.sample_rate:
        args.sample_rate = args.sample_rate.split(',')
    if args.component:
        args.component = args.component.split(',')
    if args.channel:
        args.channel = args.channel.split(',')

    args.reqtype = args.reqtype.upper()
    args.format = args.format.upper()

    try:
        if args.reqtype != "SHOT" and args.reqtype != "FDSN" and \
                args.reqtype != "RECEIVER":
            raise PH5toMSAPIError("Error - Invalid request type {0}. "
                                  "Choose from FDSN, SHOT, or RECEIVER."
                                  .format(args.reqtype))

        if args.format != "MSEED" and args.format != "SAC":
            raise PH5toMSAPIError("Error - Invalid data format {0}. "
                                  "Choose from MSEED or SAC."
                                  .format(args.format))

        ph5ms = PH5toMSeed(ph5API_object, out_dir=args.out_dir,
                           reqtype=args.reqtype, netcode=args.network,
                           station=args.sta_list, station_id=args.sta_id_list,
                           channel=args.channel, component=args.component,
                           array=args.array, shotline=args.shotline,
                           eventnumbers=args.eventnumbers, length=args.length,
                           starttime=args.start_time, stoptime=args.stop_time,
                           offset=args.offset, das_sn=args.das_sn,
                           use_deploy_pickup=args.deploy_pickup,
                           decimation=args.decimation,
                           sample_rate_keep=args.sample_rate,
                           doy_keep=args.doy_keep, stream=args.stream,
                           reduction_velocity=args.red_vel,
                           notimecorrect=args.notimecorrect,
                           format=args.format)

        for stream in ph5ms.process_all():
            if args.format.upper() == "MSEED":
                if not args.non_standard:
                    stream.write(ph5ms.filenamemseed_gen(stream),
                                 format='MSEED', reclen=4096)
                else:
                    stream.write(ph5ms.filenamemseed_nongen(stream),
                                 format='MSEED', reclen=4096)
            elif args.format.upper() == "SAC":
                for trace in stream:
                    sac = SACTrace.from_obspy_trace(trace)
                    if not args.non_standard:
                        sac.write(ph5ms.filenamesac_gen(trace))
                    else:
                        sac.write(ph5ms.filenamesac_nongen(trace))

    except PH5toMSAPIError as err:
        LOGGER.error("{0}".format(err.message))
        exit(-1)

    ph5API_object.close()
コード例 #35
0
def csv2sac(infile, cconstant):
    #
    Channel = ["", "", "", ""]
    units = ['Counts  ', 'Counts  ', 'Counts  ', 'Counts  ']
    comment = ['Velocity', 'Velocity', 'Velocity', 'Velocity']
    (header, stack) = load(infile)

    first_sample, medsps = timingvalidate(stack)

    if first_sample <> 0:
        print "\n\nA timing error exists in the 1st sample of the 1st record in this DAT series.\n"
        print "Remove the first DAT file in the folder and try again."
        sys.exit()

# print "The first sample shows instantaneous sample rate of {} S/second.".format(medsps)

# datetime = stack[0][13]+","+stack[0][14]
# Frac_second = float(stack[0][17])

    datetime = stack[0][15] + "," + stack[0][
        16]  # New cs4 format the fields are offset by two more columns
    Frac_second = float(stack[0][17])

    St_time = time.strptime(datetime, "%Y/%m/%d,%H:%M:%S")
    stdate = str(St_time.tm_year) + "_" + str(St_time.tm_mon) + "_" + str(
        St_time.tm_mday)
    stmin = str(St_time.tm_hour) + "_" + str(St_time.tm_min) + "_" + str(
        St_time.tm_sec) + "_" + str(int(Frac_second * 1000))
    Filetime = stdate + "_" + stmin + "_"
    Station = cconstant[0]
    Network = cconstant[15]
    #     File naming convention: YYYY.DDD.HH.MM.SS.SSS.NN.STATION.CHANNEL

    seedfil = infile[0:string.rfind(
        infile, '\\')] + "/" + Filetime + Network + "_" + Station
    sacfil = infile[0:string.rfind(infile, '.')]

    for i in range(0, 4):
        Channel[i] = cconstant[(2 * i) + 1]

    Samplecount = len(stack)
    print "Sample count stands at {} samples.".format(Samplecount)

    Delta = 1.0 / float(getsps(stack))

    #    Delta = 1/medsps

    print "Delta = {0:.8f}, Sample rate = {1:.8f}".format(Delta, 1 / Delta)

    #
    # stack[1] = channel 1 time history
    # .
    #
    # stack[4] = channel 4 time history
    #

    for i in range(0, 4):  # Build each channel

        b = np.arange(
            len(stack),
            dtype=np.float32)  #   Establishes the size of the datastream
        for n in range(len(stack)):  #   Load the array with time-history data
            b[n] = np.float32(
                stack[n][i +
                         1])  #   Convert the measurement from counts to volts.

        t = SACTrace(data=b)
        #                     set the SAC header values
        t.scale = 1.0  # Set the scale for each channel for use with DIMAS software
        t.delta = Delta
        t.nzyear = St_time.tm_year
        t.nzjday = St_time.tm_yday
        t.nzhour = St_time.tm_hour
        t.nzmin = St_time.tm_min
        t.nzsec = St_time.tm_sec
        t.nzmsec = int((Frac_second) * 1000)
        t.kstnm = Station
        t.kcmpnm = Channel[i]
        t.IDEP = 4  # 4 = units of velocity (in Volts)
        # Dependent variable choices: (1)unknown, (2)displacement(nm),
        # (3)velocity(nm/sec), (4)velocity(volts),
        # (5)nm/sec/sec
        t.kinst = comment[i - 1]  # Instrument type
        t.knetwk = Network  # Network designator
        t.kuser0 = units[
            i - 1]  # Place the system of units into the user text field 0

        out = sacfil + "_{}.sac".format(Channel[i])
        seed = seedfil + "_{}.mseed".format(Channel[i])

        if Channel[
                i] != "UNK":  # We do not write streams in which the channel name is UNK
            with open(out, 'wb') as sacfile:
                t.write(sacfile)
            print " File successfully written: {0}".format(out)
            sacfile.close()
            st = read(out)
            st.write(seed, format="mseed")
            print " File successfully written: {0}".format(seed)

#            subprocess.call(["del",f],shell=True)

#    for i in range(0,1): # Build special channel for timing

    b = np.arange(len(stack),
                  dtype=np.float32)  #   Establishes the size of the datastream
    for n in range(len(stack)):  #   Load the array with time-history data
        b[n] = np.float32(stack[n][13])  #   Get the timing value.
    t = SACTrace(data=b)

    #                     set the SAC header values
    t.scale = 1.0  # Set the scale for each channel. This one is important to declare.
    t.delta = Delta
    t.nzyear = St_time.tm_year
    t.nzjday = St_time.tm_yday
    t.nzhour = St_time.tm_hour
    t.nzmin = St_time.tm_min
    t.nzsec = St_time.tm_sec
    t.nzmsec = int((Frac_second) * 1000)
    t.kstnm = Station
    t.kcmpnm = 'GPS'  # This is a GPS timing signal.
    t.IDEP = 1  # 4 = units of velocity (in Volts)
    # Dependent variable choices: (1)unknown, (2)displacement(nm),
    # (3)velocity(nm/sec), (4)velocity(volts),
    # (5)nm/sec/sec
    t.kinst = 'GPS'  # Instrument type
    t.knetwk = Network  # Network designator
    t.kuser0 = 'digital'  # Place the system of units into the user text field 0

    out = sacfil + "_{}.sac".format('GPS')
    with open(out, 'wb') as sacfile:
        t.write(sacfile)


#    print " File successfully written: {}.sac".format(out)
#    print "Published sample rate in sac file = {}".format(t.stats.sampling_rate)
    sacfile.close()
コード例 #36
0
ファイル: test_scripts.py プロジェクト: SeismicData/pyasdf
def test_sac2asdf_script(tmpdir, capsys):
    tmpdir = tmpdir.strpath

    # Create some test data
    data_1 = np.arange(10, dtype=np.float32)
    header = {'kstnm': 'ANMO',
              'knetwk': 'IU',
              'kcmpnm': 'BHZ',
              'stla': 40.5,
              'stlo': -108.23,
              'stel': 100.0,
              'stdp': 3.4,
              'evla': -15.123,
              'evlo': 123,
              'evdp': 50,
              'nzyear': 2012,
              'nzjday': 123,
              'nzhour': 13,
              'nzmin': 43,
              'nzsec': 17,
              'nzmsec': 100,
              'delta': 1.0 / 40,
              'o': -10.0}
    sac = SACTrace(data=data_1, **header)
    sac.write(os.path.join(tmpdir, "a.sac"))

    data_2 = 2.0 * np.arange(10, dtype=np.float32)
    header = {'kstnm': 'BBBB',
              'knetwk': 'AA',
              'kcmpnm': 'CCC',
              'stla': 40.5,
              'stlo': -108.23,
              'stel': 200.0,
              'stdp': 2.4,
              'evla': -14.123,
              'evlo': 125,
              'evdp': 30,
              'nzyear': 2013,
              'nzjday': 123,
              'nzhour': 13,
              'nzmin': 43,
              'nzsec': 17,
              'nzmsec': 100,
              'delta': 1.0 / 40,
              'o': 10.0}
    sac = SACTrace(data=data_2, **header)
    sac.write(os.path.join(tmpdir, "b.sac"))

    output_file = os.path.join(tmpdir, "out.h5")
    assert not os.path.exists(output_file)

    sys_argv_backup = copy.copy(sys.argv)
    try:
        sys.argv = sys.argv[:1]
        sys.argv.append(tmpdir)
        sys.argv.append(output_file)
        sys.argv.append("random")
        sac2asdf.__main__()
    finally:
        # Restore to not mess with any of pytests logic.
        sys.argv = sys_argv_backup

    non_verbose_out, non_verbose_err = capsys.readouterr()
    assert not non_verbose_err

    assert os.path.exists(output_file)
    with pyasdf.ASDFDataSet(output_file, mode="r") as ds:
        # 2 Events.
        assert len(ds.events) == 2
        # 2 Stations.
        assert len(ds.waveforms) == 2

        events = ds.events  # flake8: noqa

        # Data should actually be fully identical
        np.testing.assert_equal(
            data_1, ds.waveforms.IU_ANMO.random[0].data)
        np.testing.assert_equal(
            data_2, ds.waveforms.AA_BBBB.random[0].data)

        assert ds.waveforms.IU_ANMO.random[0].id == "IU.ANMO..BHZ"
        assert ds.waveforms.AA_BBBB.random[0].id == "AA.BBBB..CCC"

        c = ds.waveforms.IU_ANMO.coordinates
        np.testing.assert_allclose(
            [c["latitude"], c["longitude"], c["elevation_in_m"]],
            [40.5, -108.23, 100.0])
        c = ds.waveforms.AA_BBBB.coordinates
        np.testing.assert_allclose(
            [c["latitude"], c["longitude"], c["elevation_in_m"]],
            [40.5, -108.23, 200.0])

        c = ds.waveforms.IU_ANMO.channel_coordinates["IU.ANMO..BHZ"][0]
        np.testing.assert_allclose(
            [c["latitude"], c["longitude"], c["elevation_in_m"],
             c["local_depth_in_m"]],
            [40.5, -108.23, 100.0, 3.4])
        c = ds.waveforms.AA_BBBB.channel_coordinates["AA.BBBB..CCC"][0]
        np.testing.assert_allclose(
            [c["latitude"], c["longitude"], c["elevation_in_m"],
             c["local_depth_in_m"]],
            [40.5, -108.23, 200.0, 2.4])

        # Events
        origin = ds.waveforms.IU_ANMO.random[
            0].stats.asdf.event_ids[0].get_referred_object().origins[0]
        np.testing.assert_allclose(
            [origin.latitude, origin.longitude, origin.depth],
            [-15.123, 123.0, 50.0])
        assert origin.time == obspy.UTCDateTime(
            year=2012, julday=123, hour=13, minute=43, second=17,
            microsecond=100000) - 10.0

        origin = ds.waveforms.AA_BBBB.random[
            0].stats.asdf.event_ids[0].get_referred_object().origins[0]
        np.testing.assert_allclose(
            [origin.latitude, origin.longitude, origin.depth],
            [-14.123, 125.0, 30.0])
        assert origin.time == obspy.UTCDateTime(
            year=2013, julday=123, hour=13, minute=43, second=17,
            microsecond=100000) + 10.0

    # Run once again in verbose mode but just test that the output is
    # actually more.
    os.remove(output_file)
    sys_argv_backup = copy.copy(sys.argv)
    try:
        sys.argv = sys.argv[:1]
        sys.argv.append("--verbose")
        sys.argv.append(tmpdir)
        sys.argv.append(output_file)
        sys.argv.append("random")
        sac2asdf.__main__()
    finally:
        # Restore to not mess with any of pytests logic.
        sys.argv = sys_argv_backup
    verbose_out, verbose_err = capsys.readouterr()
    assert not verbose_err
    assert len(verbose_out) > len(non_verbose_out)