Example #1
0
def test_mseed_float32(_data, inv_output):
    mseed = _data['mseed']
#     inv_obj = _data['inventory']
    mseed2 = _data['mseed_'+inv_output]
    sio = StringIO()

    # so, apparently mseed2 has still the eoncoding of the original mseed,
    # which is "STEIM2" for the current mseed (mapped to np.int32, see 
    #  obspy.io.mseed.headers.ENCODINGS
    # if we set a custom encoding, IT MUST MATCH the dtype of the trace data
    
    # THIS RAISES WARNING (commented out):
    # mseed2.write(sio, format="MSEED")
    
    # THIS IS FINE:
    mseed2.write(sio, format="MSEED", encoding=5)
    
    # NOW LET"'s CONVERT:
    mseed2_32 = Stream([Trace(trace.data.astype(np.float32), trace.stats)
                        for trace in mseed2])
    sio32 = StringIO()
    mseed2_32.write(sio32, format='MSEED', encoding=4) 
    
    # sio32 HAS APPROX HALF THE LENGTH OF sio64 (uncomment below and debug breakpoint or print):
    # size32 = len(sio32.getvalue())
    # size64 = len(sio64.getvalue())
    
    # let's see how this affects data. What is the relative error?
    mseed64 = obspy_read(sio)
    mseed32 = obspy_read(sio32)
    
    print ""
    print inv_output
    for t64, t32, i in zip(mseed64, mseed32, count()):
        min = np.nanmin(t64.data)
        max = np.nanmax(t64.data)
        range = np.abs(max-min)
        diff = np.abs(t64.data - t32.data) / range
        meanerr = np.nanmean(diff)
        maxerr = np.nanmax(diff)
        minerr = np.nanmin(diff)
        print "Trace#%d" % (i+1)
        print "min %.2E" % min
        print "max %.2E" % max
        print "min err ([0,1]): %.2E" % minerr
        print "mean err ([0,1]): %.2E" % meanerr
        print "max err ([0,1]): %.2E" % maxerr
        g = 9
    
    print ""
    h = 9
Example #2
0
def test_pick_amplitude_assocs(miniseed_conf, algorithm, mseed):
    picker = pickermaps[algorithm]()
    st = obspy_read(mseed)
    st2 = Stream(st[0:1])
    event = picker.event(st2, config=miniseed_conf)
    assert len(event.picks) == len(event.amplitudes)
    for p, a in zip(event.picks, event.amplitudes):
        assert a.pick_id == p.resource_id
Example #3
0
 def _read_waveforms(self, database_filename, format='NORDIC'):
     if format == 'NORDIC':
         full_wavefile = self._get_nordic_wavefile_name(database_filename)
     else:
         raise NameError(f'type {type} not implemented')
     stream = obspy_read(full_wavefile, 'MSEED')
     # get rid of bad last sample in some streams, and detrend
     for tr in stream:
         tr.data = tr.data[:-10]
         tr.detrend(type='demean')
     return stream, full_wavefile
Example #4
0
def _data():
    """returns a dict with fields 'mseed', 'mseed_ACC', 'mseed_VEL', 'mseed_DISP' (all Streams.
    The latter three after removing the response)
    'inventory' (an inventory object) and two strings: 'mseed_path' and 'inventory_path'"""
    folder = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'data')
    mseed_path = os.path.join(folder, 'trace_GE.APE.mseed')
    mseed = obspy_read(mseed_path)
    inv_path = os.path.join(folder, 'inventory_GE.APE.xml')
    s = StringIO()
    with open(inv_path) as _opn:
        s.write(_opn.read())
    s.seek(0)
    inv_obj = read_inventory(s)
    ret = {'mseed': mseed, 'inventory': inv_obj, 'mseed_path': mseed_path,
           'data_path': folder, 
           'inventory_path': inv_path}
    for inv_output in ['ACC', 'VEL', 'DISP']:
        mseed2 = remove_response(mseed, inv_obj, output=inv_output)
        ret['mseed_'+inv_output] = mseed2
    return ret
Example #5
0
def pick(config_file):
    """
    :param config_file: user supplied config file for picking
    :return: tba
    """
    log.info('Reading config file...')
    cf = config.Config(config_file)

    log.info('Preparing time series')
    st = Stream()

    if cf.seeds:
        for f in cf.miniseeds:
            st += obspy_read(f)
        log.info('Miniseeds accumulated')
    else:
        raise NotImplementedError
    log.info('Applying picking algorithm')
    picker = pickermaps[cf.picker['algorithm']](**cf.picker['params'])

    event = picker.event(st, config=cf)
    event.write(filename='test.xml', format='SC3ML')
Example #6
0
def test_1dassociater(random_filename):
    db_assoc = random_filename(ext='.db')
    db_tt_test = random_filename(ext='.db')

    # Our SQLite databases are:
    db_assoc = 'sqlite:///' + db_assoc
    shutil.copy(db_tt, db_tt_test)
    db_tt_test = 'sqlite:///' + db_tt_test
    # Traveltime database
    # Connect to our databases
    engine_assoc = create_engine(db_assoc, echo=False)
    # # Create the tables required to run the 1D associator
    tables1D.Base.metadata.create_all(engine_assoc)
    Session = sessionmaker(bind=engine_assoc)
    session = Session()
    # Find all waveform data in the data directory
    file_list = glob.glob(os.path.join(EX_DATA, '*.msd'))

    # Define our picker instance
    picker = fbpicker.FBPicker(t_long=5,
                               freqmin=0.5,
                               mode='rms',
                               t_ma=20,
                               nsigma=3,
                               t_up=0.78,
                               nr_len=2,
                               nr_coeff=2,
                               pol_len=10,
                               pol_coeff=10,
                               uncert_coeff=3)

    st = Stream()

    for f in file_list:
        st += obspy_read(f)

    # Pick the waveforms
    for s in st:
        # st.merge()  # merge will cause issues if there is a data gap
        s.detrend('linear')
        scnl, picks, polarity, snr, uncert = picker.picks(s)
        t_create = datetime.utcnow()  # Record the time we made the picks
        # Add each pick to the database
        for i in range(len(picks)):
            log.debug('st = {} Pick = {} {} {} scnl = {}'.format(
                s, i, picks[i], picks[i].datetime, scnl))

            new_pick = tables1D.Pick(scnl, picks[i].datetime, polarity[i],
                                     snr[i], uncert[i], t_create)
            session.add(new_pick)  # Add pick i to the database
        session.commit()  # Commit the pick to the database
        log.debug('Wrote picks')

    # Define the associator
    assocOK = assoc1D.LocalAssociator(db_assoc,
                                      db_tt_test,
                                      max_km=350,
                                      aggregation=1,
                                      aggr_norm='L2',
                                      cutoff_outlier=30,
                                      assoc_ot_uncert=7,
                                      nsta_declare=3,
                                      loc_uncert_thresh=0.2)
    # Identify candidate events (Pick Aggregation)
    assocOK.id_candidate_events()
    # Associate events
    assocOK.associate_candidates()

    print("Unit Testing for 1Dassociator ...............")

    # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    #   Function Testing rms sort list
    # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

    radius = [
        ('SIO', -137.26, 68.992, 83.5, 0.7514, 0),
        ('U32A', -137.26, 68.992, 203.0, 1.8268, 1),
        ('W35A', -137.26, 68.992, 42.5, 0.3825, 2),
        ('OKCFA', -137.26, 68.992, 33.0, 0.297, 3),
        ('X34A', -137.26, 68.992, 122.0, 1.0979, 4),
        ('FNO', -137.26, 68.992, 36.5, 0.3285, 5),
    ]
    lon = -137.26
    lat = 68.992
    st_declare = 3  # number of station requires to monitor earth quake
    rms_sort = []
    rms_sort, cb = assocOK._LocalAssociator__accumulate_rms_sort(
        radius, lon, lat, st_declare)

    print("")
    print('rms                   = {}'.format(rms_sort))
    print('Combinations Stations = {}'.format(cb))
    print("")
    assert len(rms_sort) > 0

    # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    #   Function Testing for radius paremeters
    # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    #    candis_arr = [Candidate Event <2013-06-16T15:38:50.150000 U32A 203.00 1.83 18 19>]
    #    radius=[]
    #    radius, lon, lat = assocOK.accumulate_radius(candis_arr)
    #    print ("")
    #    print ("Radius    = {}".format(radius))
    #    print ("Lon       = {}".format(lon))
    #    print ("Lat       = {}".format(lat))

    # Add singles stations to events
    assocOK.single_phase()

    events = assocOK.assoc_db.query(Associated).all()
    assert len(events) == 1
    event = events[0]
    assert event.nsta == 3
    print(event.longitude)
    print(event.latitude)

    print('event.longitude = ', event.longitude)
    print('event.latitude = ', event.latitude)
Example #7
0
def get_stream_with_gaps(_data):
    mseed_dir = _data['data_path']
    return obspy_read(os.path.join(mseed_dir, "IA.BAKI..BHZ.D.2016.004.head"))