Example #1
0
    def setUp(self):
        self.path = os.path.dirname(__file__)
        # Parkfield
        self.pkd = ParkfieldTest(os.path.join(self.path, 'data'),
                                 os.path.join(self.path, 'temp'))
        self.t1_pkd = UTC('1997-09-03')
        self.t2_pkd = UTC('1997-09-03')
        self.ipoc = IPOCTest(os.path.join(self.path, 'data'),
                             os.path.join(self.path, 'temp'))
        self.ipoc2 = IPOCTest(os.path.join(self.path, 'data'),
                              os.path.join(self.path, 'temp/test2'))

        # ## Copy test files from location of raw files
        # print os.path.isfile(self.pkd.getDay('PKD', self.t1_pkd) + '.QHD')
        try:
            self.pkd.getRawStream(self.t1_pkd, 'PKD', 'Z', checkfile=True)
        except ValueError:
            print('Copy missing raw files for Parkfield...')
            self.pkd.copyTestDataFiles(self.t1_pkd, self.t2_pkd, component='Z')
        self.t1_ipoc = UTC('2010-05-04')
        self.t1b_ipoc = UTC('2010-05-05')
        self.t2_ipoc = UTC('2010-05-06')
        try:
            self.ipoc.getRawStream(self.t1_ipoc, 'PB01', 'Z', checkfile=True)
        except ValueError:
            print('Copy missing raw files for IPOC...')
            self.ipoc.copyTestDataFiles(self.t1_ipoc,
                                        self.t2_ipoc,
                                        component='Z')
Example #2
0
def mean_hour_file():
    fname = data_path + 'climate_hour_IQ'
    #fname = data_path + 'climate_CHO'
    sens = False
    data = get_dict2(np.load(fname + '.npz'))
    dates = data['date']
    temp = data['temp' + '_sens' * sens]

    preselect = (UTC('2007-01-01').toordinal() <= dates) * (UTC('2012-01-01').toordinal() >= dates)
    dates = dates[preselect]
    temp = temp[preselect]
    date0 = int(dates[0] + 1)
    temps = []
    for i in range(int(dates[-1] - date0)):
        select = (date0 + i <= dates) * (dates <= date0 + i + 1)
        if i == 0 or len(plotdates) != 25:
            plotdates = dates[select]
        temps.append(temp[select])
        if i % 1000 == 0:
            print i
    print len(temps)
    print [len(i) for i in temps]

    temps = [t for t in temps if len(t) == 25]
    print len(temps)
    temps = np.ma.mean(temps, axis=0)
    np.savez(fname + '_sens' * sens + '_mean', date=plotdates, temp=np.array(temps))
Example #3
0
def get_events(individual=True):
    endtime = UTC('2015-09-01')
    kw = {
        'starttime': UTC('2005-04-01'),
        'endtime': endtime,
        'minmagnitude': 1.5,
        'maxmagnitude': 3.5,
        'minlatitude': 25,
        'maxlatitude': 49.5,
        'minlongitude': -124,
        'maxlongitude': -67,
        'maxdepth': 40,
        'includearrivals': individual,
        'catalog': 'ANF'
    }
    client = FSDNClient()
    if individual:
        for t in range(6, 17):
            kw['endtime'] = UTC('20%02d-01-01' % t) if t < 16 else endtime
            events = client.get_events(**kw)
            for ev in events:
                id_ = _extract_eventid(ev)
                ev.resource_id = ResourceIdentifier(id_)
                ev.write(EVENT_PICKS_FNAME + id_ + '.xml', 'QUAKEML')
            print('fetched events of year %d' % kw['starttime'].year)
            kw['starttime'] = kw['endtime']
    else:
        events = client.get_events(**kw)
        events.plot(projection='local', outfile=EVENT_FNAME + '.png')
        for ev in events:
            id_ = _extract_eventid(ev)
            ev.resource_id = ResourceIdentifier(id_)
        events.write(EVENT_FNAME + '.xml', 'QUAKEML')
        return events
Example #4
0
 def set_dates(self):
     try:
         self.startdate = self.date = UTC(UTC(str(self.ui.t1.text())).date)
         self.enddate = UTC(UTC(str(self.ui.t2.text())).date)
         self.daystream = None
     except:
         log.exception('')
Example #5
0
def main():
    #stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16 HMBCX MNMCX PATCX PSGCX LVC TAIQ'
    stations = 'PB01 PB02 PB03 PB04 PB05'
    stations2 = None

    components = 'Z'
    # TOcopilla earthquake:
    #t_Toco=UTC('2007-11-14 15:14:00')
    t1 = UTC('2006-01-01')
    #t2 = UTC('2011-09-01')
    #t1 = UTC('2007-01-01')
    #t2 = UTC('2009-01-01')
    t2 = UTC('2012-01-01')

    shift = 500
    correlations = get_correlations(stations, components, stations2, only_cross=True)

#    method = 'FINAL_filter0.005-5_1bit_whitening_2011+2012'
#    method = 'filter0.01-1_1bit_whitening0.01'
#    method = 'filter0.005_rm20'
#    method = 'filter0.005_1bit'
    method = 'filter0.01-1_water_env2_whitening_1bit_fft'


    data = IPOC(xcorr_append='/Tocopilla/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)

#    pool = Pool()
#    prepare(data, stations.split(), t1, t2, component=components,
#            filter=(0.005, 5, 2, True), downsample=20,
##            filter=(1, 10), downsample=None,
##            eventremoval=None, #'waterlevel_env2', param_removal=(10, 0),
#            whitening=True,
#            use_this_filter_after_whitening=(0.005, 5, 2),
#            normalize='1bit', param_norm=None,
#            pool=pool)
#    noisexcorrf(data, correlations, t1, t2, shift, pool=pool)
#    pool.close()
#    pool.join()
#
#    stack(data, correlations, dt=10 * 24 * 3600, shift=5 * 24 * 3600)
#    stack(data, correlations, dt= -1)

    t1p, t2p = t1, t2
#    t1p, t2p = None, None

    filters = None
    filters = getFilters((0.025, 0.05, 0.1, 0.25, 0.5, 1))

    plotXcorrs(data, correlations, t1=t1p, t2=t2p, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None, filters=filters, filter_now=False)

    plotXcorrs(data, correlations, t1=t1p, t2=t2p, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None, stack=('10days', 'day'), filters=filters, filter_now=False)

    plotXcorrs(data, correlations, t1=t1p, t2=t2p, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None, stack=('50days', '5days'), filters=filters, filter_now=False)
Example #6
0
def main():

    stations = 'PB01 PB02 PB03 PB04 PB04 PB05 PB06'
    stations2 = 'PB01 PB02 PB03 PB04 PB04 PB05 PB06 PB07 PB08 HMBCX PATCX'

    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2007-09-01')
    t2 = UTC('2008-01-31')

    shift = 200
    correlations = get_correlations(stations, components, stations2)

    #    method = 'filter0.01-1_1bit'
    #    method = 'filter0.01-1_1bit_whitening0.01'
    #    method = 'filter2-20_1bit'
    #    method = 'filter0.005_1bit'
    #    period = 'day'
    #
    #    data = IPOC(xcorr_append='/Tocopilla/' + method, use_local_LVC=True)
    #    data.setXLogger('_' + method)
    #    prepare(data, stations.split(), t1, t2, filter=(0.005, 1.), downsample=5, whitening=True,
    #            component=components, normalize='1bit', norm_param=None)
    #    noisexcorr(data, correlations, t1, t2, shift_sec=shift, period=period)

    #    correlations = (('PB03Z', 'PB04Z'),)
    #    data.x_plot_day = data.x_res + '/plots2/%s_day_%s'
    #    plotXcorrs(data, correlations, t1, t2, start=9, end=15, plot_overview=True, filter=(2, None, 2, True), stack_lim=(-0.01, 0.01), downsample=None, plot_years=False,
    #                      plot_stack=True, plot_psd=True, add_to_title=method + '_filter2_9-15', add_to_file='_filter2_9-15.png', show=False)

    method = 'filter4-6_1bit'
    period = 'day'
    data = IPOC(xcorr_append='/Tocopilla/' + method, use_local_LVC=True)
    data.setXLogger('_' + method)

    #    prepare(data, stations.split(), t1, t2, filter=(4, 6), downsample=None, whitening=None,
    #            component=components, normalize='1bit', norm_param=None)
    #    noisexcorr(data, correlations, t1, t2, shift_sec=shift, period=period)

    plotXcorrs(data,
               correlations,
               t1,
               t2,
               start=-50,
               end=50,
               plot_overview=True,
               filter=None,
               stack_lim=(-0.1, 0.1),
               plot_years=False,
               plot_stack=True,
               plot_psd=False,
               add_to_title=method + '_wodlognorm_50s',
               add_to_file='_wodlognorm_50s.png',
               show=True,
               landscape=True,
               use_dlognorm=False)
Example #7
0
def synrf(mod, p=6.4, wave='P', gauss=2., nsamp=1024, fsamp=20., \
          tshft=10., nsv=(3.5, 0.25)):
    """
    Computes a synthetic teleseismic P or SV receiver function.

    Method from seispy.

    Parameters:

    mod             velocity model

    Optional parameters:

    p               horizontal slowness of r.f. is; this value *must*
                    be the angular slowness in seconds/degree and will
                    stored in the extra[10] entry of the trace header
    wave            "P" or "SV".
    gauss           Gauss lowpass parameter used to compute r.f.
    nsamp           number of samples to be used in synthetic computation
    fsamp           sampling frequency in Hz
    tshft           time shift used in the computation of the r.f., i.e.
                    zero delay time corresponds to the sample at the
                    index tshft*fsamp. This value is stored in the 'sec'
                    field of the trace starting time, e.g. a time of
                    0/0/0 0:0:-10 corresponds to tshft of +10 sec.
    nsv             tuple containing the near-surface S velocity and
                    Poisson's ratio
    """
    import seis._rf
    import sito
    from obspy.core import UTCDateTime as UTC

    if type(nsv) is tuple: nsvs, nspr = nsv
    else: nsvs, nspr = nsv, 0.25

    if not wave in ["P", "SV"]:
        raise ValueError, "wave type must be either 'P' or 'SV'"
    if wave == "P": c = "RFQ"
    else: c = "RFL"

    # Response of L component, Response of Q component, RF of Q component
    fzz, frr, frf = seis._rf.synrf(mod.z, mod.vp, mod.vs, mod.rh, mod.qp, mod.qs, \
                           p, gauss, nsamp, fsamp, tshft, nsvs, nspr, wave)
    stream = sito.Stream(traces=[
        sito.Trace(data=fzz),
        sito.Trace(data=frr),
        sito.Trace(data=frf)
    ])
    for i, tr in enumerate(stream):
        tr.stats.starttime = UTC('2000-01-01') - tshft
        tr.stats.ponset = UTC('2000-01-01')
        tr.stats.sampling_rate = fsamp
        tr.stats.channel = ('LRSP', 'QRSP', c)[i]
        tr.stats.slowness = p
    return stream
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 HMBCX MNMCX PATCX PSGCX LVC'
    stations = 'PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16'
    stations = 'PB02 PB03 PB04 PB05 HMBCX MNMCX PSGCX'
    stations = 'PATCX'
    stations2 = None


    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2007-01-01')
    #t1 = UTC('2007-12-01')
    #t2 = UTC('2008-12-31')
    #t2 = UTC('2012-10-01')
    t2 = UTC('2011-12-31')
    #t2 = UTC('2007-02-03')
#    t1 = UTC('2009-05-01')
#    t2 = UTC('2009-05-03')

    shift = 100
    shift = 60
    correlations = get_correlations(stations, components, stations2, only_auto=True)
    #correlations = get_correlations(stations, components, stations2)
    print correlations

    method = 'FINAL_filter4-6_1bit_auto'
    method = 'FINAL_filter4-6_1bit_auto_3C'
    method = 'FINAL_filter4-6_1bit_auto_hour2'

    data = IPOC(xcorr_append='/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)

#    pool = Pool()
    pool = None
    prepare(data, stations.split(), t1, t2, component=components,
            filter=(4, 6, 2, True), downsample=50,
            eventremoval='waterlevel_env2', param_removal=(10, 0),
            whitening=False,
            normalize='1bit', param_norm=None,
            pool=pool, discard=0.1 * 24 * 3600, freq_domain=False, trim='day')
    noisexcorrf(data, correlations, t1, t2, shift, period=3600, pool=pool, overlap=1800)

#    noisexcorrf(data, correlations, t1, t2, shift, period=5 * 60, pool=pool,
#                max_preload=1000)
#    pool.close()
#    pool.join()

#    plotXcorrs(data, correlations, t1, t2, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
#                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None)

    plotXcorrs(data, correlations, t1, t2, start= -20, end=20, plot_overview=True, plot_years=True, use_dlognorm=False,
                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None, ext='_hg.png', vmax=0.1,
                      period=3600)
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 HMBCX MNMCX PATCX PSGCX LVC'
    #stations = 'PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16'
    stations2 = None

    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2006-02-01')
    t2 = UTC('2012-10-01')

    shift = 100
    correlations = get_correlations(stations,
                                    components,
                                    stations2,
                                    only_auto=True)

    method = 'FINAL_filter1-3_1bit_auto'

    data = IPOC(xcorr_append='/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)

    #    pool = Pool()
    #    prepare(data, stations.split(), t1, t2, component=components,
    #            filter=(1, 3, 2, True), downsample=20,
    #            eventremoval='waterlevel_env2', param_removal=(10, 0),
    #            whitening=False,
    #            normalize='1bit', param_norm=None,
    #            pool=pool)
    #    noisexcorrf(data, correlations, t1, t2, shift, pool=pool)
    #    pool.close()
    #    pool.join()

    #    plotXcorrs(data, correlations, t1, t2, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
    #                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None)

    plt.rc('font', size=16)
    plotXcorrs(data,
               correlations,
               t1,
               t2,
               start=0,
               end=20,
               plot_overview=True,
               plot_years=False,
               use_dlognorm=False,
               plot_stack=True,
               plot_psd=False,
               downsample=None,
               ext='_hg_dis.pdf',
               vmax=0.1,
               ylabel=None,
               add_to_title='1-3Hz')
Example #10
0
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 HMBCX MNMCX PATCX PSGCX'

    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2006-07-01')
    t2 = UTC('2008-12-31')

    shift = 500
    correlations = get_correlations(stations, components)

    method = 'FINAL_filter0.005-10_1bit_Tocopilla'

    data = IPOC(xcorr_append='/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)
    pool = Pool()
    prepare(data,
            stations.split(),
            t1,
            t2,
            component=components,
            filter=(0.005, 10, 2, True),
            downsample=20,
            whitening=False,
            normalize='1bit',
            param_norm=None,
            pool=pool)
    noisexcorrf(data, correlations, t1, t2, shift, pool=pool)
    pool.close()
    pool.join()
    stack(data, correlations, dt=10 * 24 * 3600, shift=5 * 24 * 3600)
    stack(data, correlations, dt=-1)

    filters = None
    #filters = getFilters((0.005, 0.01, 0.1, 1, 5, 10), zerophase=True, corners=2)
    #    plotXcorrs(data, correlations, t1, t2, start=None, end=None, filters=filters, plot_overview=True, plot_years=False, use_dlognorm=False,
    #                      plot_stack=True, plot_psd=True, add_to_title='', downsample=None)
    plotXcorrs(data,
               correlations,
               t1=None,
               t2=None,
               start=None,
               end=None,
               filters=filters,
               plot_overview=True,
               plot_years=False,
               use_dlognorm=False,
               plot_stack=True,
               plot_psd=True,
               add_to_title='',
               downsample=None,
               stack=('10days', '5days'))
Example #11
0
    def pick(self,
             latitude=None,
             longitude=None,
             minval=0,
             maxval=180,
             indegree=True,
             after='1900-01-01',
             before='3000-01-01',
             bigger=0.,
             smaller=10.,
             replace=True):
        """
        Pick events fullfilling the given conditions.

        :param latitude, longitude: coordinates for distance condition
        :param minval, maxval: distance of event has to be between this values
        :param indegree: True if minval and maxval in deg, False if in km
        :param after, before: UTCDateTime objects or strings with time range
        :param bigger, smaller: magnitude range
        :param replace: if True the data in the event list is overwritten
        :return: picked Events instance
        """
        if indegree:
            degorkm = 'deg'
        else:
            degorkm = 'km'
        newdata = []
        dist = 50
        for event in self[::-1]:
            if latitude != None and longitude != None:
                if not indegree:
                    dist = gps2DistAzimuth(event.latitude, event.longitude,
                                           latitude, longitude)[0] / 1000.
                else:
                    dist = gps2DistDegree(event.latitude, event.longitude,
                                          latitude, longitude)
            if bigger <= event.magnitude and smaller >= event.magnitude and \
                dist >= minval and dist <= maxval and \
                UTC(after) <= event.datetime and UTC(before) >= event.datetime:
                newdata.append(event)
            elif replace:
                self.remove(event)
        if latitude == None:
            latitude = 0
        if longitude == None:
            longitude = 0
        log.info(
            'Pick %d events with distance between %d%s and %d%s from coordinates lat:%5.2f lon:%5.2f; between the dates %s and %s and between the magnitudes %3.1f and %3.1f'
            % (len(newdata), minval, degorkm, maxval, degorkm, latitude,
               longitude, after, before, bigger, smaller))
        return self.__class__(newdata[::-1])
Example #12
0
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 HMBCX MNMCX PATCX PSGCX LVC'
    stations = 'PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16'
    stations = 'PATCX'
    stations2 = None


    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2007-10-01')
    t2 = UTC('2007-11-30')
    #t2 = UTC('2012-10-01')
    #t2 = UTC('2011-12-31')
#    t1 = UTC('2009-05-01')
#    t2 = UTC('2009-05-03')

    shift = 100
    shift = 60
    #correlations = get_correlations(stations, components, stations2, only_auto=True)
    correlations = get_correlations(stations, components, stations2)
    print correlations

    method = 'zerotest_nozero'
    #method = 'FINAL_filter4-6_1bit_auto_3C'
    #method = 'FINAL_filter3-5'

    data = IPOC(xcorr_append='/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)

    pool = Pool()
    prepare(data, stations.split(), t1, t2, component=components,
            filter=(4, 6, 2, True), downsample=50,
            #eventremoval='waterlevel_env2', param_removal=(10, 0),
            eventremoval=None, param_removal=None,
            whitening=False,
            normalize='1bit', param_norm=None,
            pool=pool)
    noisexcorrf(data, correlations, t1, t2, shift, period=24 * 3600, pool=pool)

#    noisexcorrf(data, correlations, t1, t2, shift, period=5 * 60, pool=pool,
#                max_preload=1000)
    pool.close()
    pool.join()

#    plotXcorrs(data, correlations, t1, t2, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
#                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None)
    #plt.rc('font', size=16)
    plotXcorrs(data, correlations, t1, t2, start=-20, end=20, plot_overview=True, plot_years=False, use_dlognorm=False,
                      plot_stack=True, plot_psd=False, downsample=None, ext='_hg0.02_dis.pdf', vmax=0.02,
                      add_to_title='4-6Hz', ylabel=None)
Example #13
0
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05'

    component = 'Z'
    t1 = UTC('2009-06-01')
    t2 = UTC('2009-07-01')

    data = IPOC('test', use_local_LVC=False)
    data.setXLogger('_test')
    period = 24 * 3600
    ax = None
    plt.ion()
    for station in stations.split():
        pxxs = []
        freqs_old = None
        i = 0
        for t in timegen(t1, t2, period):
            st = data.getRawStreamFromClient(t, t + period, station, component)
            st.merge(method=1,
                     interpolation_samples=10,
                     fill_value='interpolate')
            print st
            pxx, freqs = st.plotPSD(just_calculate=True)
            assert np.all(freqs == freqs_old) or not freqs_old
            freqs_old = freqs
            if max(pxx[4:]) > 1e7:
                print 'discard'
                i += 1
                continue
            pxxs.append(pxx)
        pxx = sum(pxxs) / len(pxxs)
        del pxxs
        tr = Trace(data=pxx,
                   header=dict(is_fft=True,
                               sampling_rate=2 * max(freqs),
                               freq_min=min(freqs),
                               freq_max=max(freqs)))
        ax = tr.plotPSD(ax=ax,
                        label='%s-%d' % (st[0].stats.station, i),
                        figtitle=None)
        plt.draw()
        # embed()
    ax.legend()
    fig = ax.get_figure()
    fig.suptitle('%s  %s  %s to %s' %
                 (stations, component, t1.strftime('%Y-%m-%d'),
                  t2.strftime('%Y-%m-%d')))
    plt.ioff()
    plt.show()
Example #14
0
def get_stations(level='station'):
    fname = STATION_FNAME + '_bh' * (level == 'channel') + '.xml'
    kw = {
        'network': '_US-REF',
        'location': '',
        'channel': 'BH?',
        'starttime': UTC('2005-04-01'),
        'endtime': UTC('2015-09-01'),
        'minlatitude': 20,
        'maxlatitude': 50,
        'minlongitude': -130,
        'maxlongitude': -65,
        'level': level
    }
    client = FSDNClient()
    stations = client.get_stations(**kw)
    print('num stations:', len(stations.get_contents()['stations']))
    kw['network'] = '_US-TA'
    stations2 = client.get_stations(**kw)
    merge_stations(stations, stations2)
    print('num stations:', len(stations.get_contents()['stations']))
    # use HH channel only if BH channel not availlable
    kw['network'] = '_US-REF'
    kw['channel'] = 'HH?'
    stations2 = client.get_stations(**kw)
    merge_stations(stations, stations2)
    print('num stations:', len(stations.get_contents()['stations']))
    kw['network'] = '_US-TA'
    stations2 = client.get_stations(**kw)
    merge_stations(stations, stations2)
    print('num stations:', len(stations.get_contents()['stations']))
    # get stations with loc code 00 if no other station
    kw['network'] = '_US-REF'
    kw['channel'] = 'BH?'
    kw['location'] = '00'
    stations2 = client.get_stations(**kw)
    merge_stations(stations, stations2)
    print('num stations:', len(stations.get_contents()['stations']))
    kw['network'] = '_US-TA'
    stations2 = client.get_stations(**kw)
    merge_stations(stations, stations2)
    print('final num stations:', len(stations.get_contents()['stations']))
    stations.write(fname, 'STATIONXML')
    if level == 'station':
        stations.plot(projection='local',
                      size=1,
                      outfile=STATION_FNAME + '.pdf')
    return stations
Example #15
0
def plot_some_events():
    from obspy.core.event import Catalog, Event, Origin, Magnitude
    from obspy.core import UTCDateTime as UTC

    eqs = """2008-09-10T16:12:03    6.0    -20.40    -69.40     40
    2008-03-24T20:39:06    5.9    -20.10    -69.20     85
    2008-03-01T19:51:59    5.7    -20.10    -69.60     15
    2008-02-15T16:54:04    5.5    -23.00    -70.20     32
    2008-02-04T17:01:30    6.6    -20.20    -70.00     36
    2007-12-16T08:09:16    7.1    -22.80    -70.00     14
    2007-11-14T15:40:51    7.8    -22.34    -70.06     37"""  #GEOFON:-22.30    -69.80
    events = []
    for eq in eqs.split('\n'):
        time, mag, lat, lon, depth = eq.split()
        ev = Event(event_type='earthquake', creation_info='GEOFON',
                    origins=[Origin(time=UTC(time), latitude=float(lat),
                                    longitude=float(lon), depth=float(depth))],
                    magnitudes=[Magnitude(mag=float(mag), magnitude_type='M')])
        events.append(ev)
    cat = Catalog(events[::-1])
    #print cat
    #cat.plot(projection='local')
    lons = [ev.origins[0].longitude for ev in cat]
    lats = [ev.origins[0].latitude for ev in cat]
    dates = [ev.origins[0].time for ev in cat]
    mags = [ev.magnitudes[0].mag for ev in cat]
Example #16
0
def extract_cdo_file():
    file = data_path + 'CDO_CAL.txt'  #AF, IQ

    regex = """
    ^
    (?P<stn>\d+)\s+
    (?P<wban>\d+)\s+
    (?P<date>\d+)\s+
    (?P<temp>[-\d.]+)\s+
    (?P<temp_count>\d+)\s+
    (?P<dewp>[-\d.]+)\s+
    (?P<dewp_count>\d+)\s+
    (?P<slp>[\d.]+)\s+
    (?P<slp_count>\d+)\s+
    (?P<stp>[\d.]+)\s+
    (?P<stp_count>\d+)\s+
    (?P<visib>[\d.]+)\s+
    (?P<visib_count>\d+)\s+
    (?P<wdsp>[\d.]+)\s+
    (?P<wdsp_count>\d+)\s+
    (?P<mxspd>[\d.]+)\s+
    (?P<gust>[\d.]+)\s+
    (?P<max>[-\d.]+)\*?\s+
    (?P<min>[-\d.]+)\*?\s+
    (?P<prcp>[\d.]+)
    (?P<prcp_flag>.)\s+
    (?P<sndp>[\d.]+)\s+
    (?P<frshtt>\d+)
    """

    with open(file, 'r') as f:
        filedata = f.read()
    matches = re.finditer(regex, filedata, re.VERBOSE + re.MULTILINE)
    data_list = [i.groupdict() for i in matches]
    data = {}
    for i, dp in enumerate(data_list):  # convert numbers to float and int types
        for key, item in dp.iteritems():
            if key == 'date':
                dp[key] = UTC(item).toordinal()
            elif item is not None and key != 'frshtt':
                if isint(item):
                    dp[key] = int(item)
                elif isfloat(item):
                    dp[key] = float(item)
        if dp['stn'] == 854420:
            dp['station'] = 'Antofagasta'
        elif dp['stn'] == 854320:
            dp['station'] = 'Calama'
        elif dp['stn'] == 854180:
            dp['station'] = 'Iquique'
        else:
            dp['station'] = 'other'
        if i == 0:
            for key, item in dp.iteritems():
                data[key] = []
        for key, item in dp.iteritems():
            #ipshell()
            data[key].append(item)

    np.savez(data_path + 'climate_CAL', **data)  #AF, IQ
Example #17
0
def main2():
    # dis
    fwd = 155 / 25.4
    mpl.rcParams.update({
        'font.size': 9,
        'lines.linewidth': 1,
        'xtick.major.width': 1.
    })

    pathav = '/home/richter/Results/IPOC/avail'
    args = [
        '--nox', '-l', pathav + '/avail_IPOC_2012_03.npz', '-o',
        pathav + '/avail_IPOC_2012_09.png'
    ]
    fig = main(args)
    fig.set_size_inches(fwd, 0.9 * fwd / 1.61, forward=True)

    ax = fig.axes[0]
    ax.set_xticklabels([])
    ax.set_ylim([-0.6, ax.get_ylim()[1] + 0.1])
    for y in range(2006, 2012):
        ax.annotate(str(y),
                    xy=(date2num(UTC(year=y, month=7, day=1)), -0.6),
                    xycoords='data',
                    xytext=(0, -5),
                    textcoords='offset points',
                    ha='center',
                    va='top',
                    clip_on=False)
    plt.tight_layout()
    plt.subplots_adjust(bottom=0.08)
    #from IPython import embed
    #embed()

    fig.savefig(pathav + '/avail_IPOC_2012_09.png', dpi=300)
Example #18
0
def _load_data(seedid, day, data, data_format, key='data', **prep_kw):
    """Load preprocessed or raw data"""

    from obspy import UTCDateTime as UTC
    from yam.util import _seedid2meta
    from yam.correlate import get_data, preprocess
    smeta = _seedid2meta(seedid)
    day = UTC(day)
    if key == 'data':
        obj = get_data(smeta,
                       data,
                       data_format,
                       day,
                       overlap=0,
                       edge=0,
                       trim_and_merge=True)
        return obj
    stream = get_data(smeta,
                      data,
                      data_format,
                      day,
                      overlap=0,
                      edge=60,
                      trim_and_merge=False)
    preprocess(stream, day, **prep_kw)
    return stream
Example #19
0
def main():
    stations = 'PB03 PB04'

    stations2 = None

    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2006-01-01')
    t1 = UTC('2007-11-10')
    t2 = UTC()
    t2 = UTC('2007-11-20')
    shift = 500
    correlations = get_correlations(stations,
                                    components,
                                    stations2,
                                    only_cross=True)

    method = 'filter4-6_water_env2_whitening_1bit'
    #    method = 'filter0.01-1_1bit_whitening0.01'
    #    method = 'filter0.005_rm20'
    #    method = 'filter0.005_1bit'

    data = IPOC(xcorr_append='/Tocopilla/tests/' + method, use_local_LVC=True)
    data.setXLogger('_' + method)
    #    prepare(data, stations.split(), t1, t2, component=components,
    #            filter=(4, 6), downsample=None,
    #            eventremoval='waterlevel_env2', param_removal=(10, 0),
    #            #whitening=True,
    #            normalize='1bit', param_norm=None)
    correlations = get_correlations(stations,
                                    components,
                                    stations2,
                                    only_auto=True)
    #    noisexcorr(data, correlations, t1, t2, shift)
    plotXcorrs(data,
               correlations,
               t1,
               t2,
               start=-150,
               end=150,
               plot_overview=True,
               plot_years=False,
               use_dlognorm=True,
               plot_stack=True,
               plot_psd=True,
               add_to_title=method,
               show=True)
Example #20
0
def main():
    data = IPOC(xcorr_append='/tests/1bit_filter0.1-1', use_local_LVC=True)
    data.setXLogger('_1bit')
    stations = 'PB01 PB03'
    stations2 = 'PB03'

    components = 'Z'
    t1 = UTC('2010-01-01')
    t2 = UTC('2010-12-31')
    shift = 500

    prepare(data, stations.split(), t1, t2, filter=(0.1, 1.), downsample=10,
            component=components, normalize='1bit', param_norm=None,
            use_floating_stream=True)
    correlations = get_correlations(stations, components, stations2)
    xcorr_day(data, correlations, t1, t2, shift, use_floating_stream=True)
    plotXcorrs(data, correlations, t1, t2)
Example #21
0
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16 HMBCX MNMCX PATCX PSGCX LVC'
    stations2 = None


    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2006-02-01')
    t2 = UTC('2012-10-01')

    shift = 500
    correlations = get_correlations(stations, components, stations2, only_auto=True)

    method = 'FINAL_filter0.01-0.5_1bit_auto'

    data = IPOC(xcorr_append='/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)

#    pool = Pool()
#    prepare(data, stations.split(), t1, t2, component=components,
#            filter=(0.01, 0.5, 2, True), downsample=5,
#            eventremoval='waterlevel_env2', param_removal=(10, 0),
#            whitening=False,
#            normalize='1bit', param_norm=None,
#            pool=pool)
#    noisexcorrf(data, correlations, t1, t2, shift, pool=pool)
#    pool.close()
#    pool.join()

#    plotXcorrs(data, correlations, t1, t2, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
#                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None)
    plotXcorrs(data, correlations, t1, t2, start=0, end=200, plot_overview=True, plot_years=False, use_dlognorm=False,
                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None, ext='_hg.png', vmax=0.1)

#    stack(data, correlations, dt= -1)

#    stack(data, correlations, dt=10 * 24 * 3600, shift=2 * 24 * 3600)
#    plotXcorrs(data, correlations, t1=None, t2=None, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
#               plot_stack=True, plot_psd=False, add_to_title='', downsample=None,
#               stack=('10days', '2days'))
    plotXcorrs(data, correlations, t1=None, t2=None, start=0, end=200, plot_overview=True, plot_years=False, use_dlognorm=False,
               plot_stack=True, plot_psd=False, add_to_title='', downsample=None,
               stack=('10days', '2days'), ext='_hg.png', vmax=0.1)
Example #22
0
def _get_data_glob(data):
    """
    Construct a glob expression from the data expression
    """
    from obspy import UTCDateTime as UTC
    kw = dict(network='*', station='*', location='*', channel='*',
              t=UTC('2211-11-11 11:11:11'))
    dataglob = data.format(**kw)
    dataglob = dataglob.replace('22', '*').replace('11', '*')
    return dataglob
Example #23
0
def psd(station, parser):
    data = IPOC()
    ppsd_length = 6 * 3600
    overlap = 0.5
    dt = 3 * 24 * 3600
    t1 = UTC('2006-01-01')
    t2 = UTC('2013-11-01')
    ppsd = None
    print t1, t2
    while t1 < t2:
        try:
            if station != 'LVC':
                stream = data.client.getWaveform(
                    'CX', station, '', 'HHZ', t1,
                    t1 + dt + overlap * ppsd_length)
            else:
                stream = data.client.getWaveform(
                    'GE', 'LVC', '00', 'BHZ', t1,
                    t1 + dt + overlap * ppsd_length)

        except:
            t1 += dt
            continue
        if ppsd is None:
            ppsd = PPSD(stream[0].stats,
                        parser=parser,
                        skip_on_gaps=True,
                        db_bins=(-200, -50, 0.5),
                        ppsd_length=ppsd_length,
                        overlap=overlap)
        print t1
        ppsd.add(stream)
        t1 += dt
    if ppsd is not None:
        print 'station %s: %d segments' % (station, len(ppsd.times))
        ppsd.save("/home/richter/Results/IPOC/PPSD/ppsd_%s_6h.pkl.bz2" %
                  station,
                  compress=True)
        return True
    else:
        return False
Example #24
0
def mean_hour_file2():
    #fname = data_path + 'climate_hour_IQ'
    fname = data_path + 'climate_CHO'
    sens = False
    data = get_dict2(np.load(fname + '.npz'))
    dates = data['date']
    temp = data['temp' + '_sens' * sens]

    preselect = (UTC('2007-01-01').toordinal() <= dates) * (UTC('2012-01-01').toordinal() >= dates)
    dates = dates[preselect]
    temp = temp[preselect]
    date0 = int(dates[0] + 1)
    temps = []
    dates2 = []
    for i in range(int(dates[-1] - date0)):
        select = (date0 + i <= dates) * (dates <= date0 + i + 1)
        if i == 0 or len(plotdates) != 1441:
            plotdates = dates[select]
        if len(temp[select]) >= 144:
            temps.append(temp[select])
            dates2.append(dates[select])
        if i % 1000 == 0:
            print i
    print len(temps)
    print [len(i) for i in temps]
    for i in range(len(temps)):
        #print i
        #print plotdates
        #print dates2[i] - dates2[i][0] + plotdates[0]
        #print temps[i]
        temps[i] = np.interp(plotdates, dates2[i] - dates2[i][0] + plotdates[0], temps[i])
    temps = [t for t in temps if len(t) == 1441]
    print len(temps)
    temps_mean = np.ma.mean(temps, axis=0)
    temps2 = np.array(temps) - np.tile(np.ma.mean(temps, axis=1)[:, np.newaxis], len(temps[0]))
    #temps_mean = np.ma.mean(temps2, axis=0) + np.ma.mean(temps2)
    #from IPython import embed
    #embed()
    temps_std = np.ma.std(temps2, axis=0)
    np.savez(fname + '_sens' * sens + '_hui_mean', date=plotdates, temp=np.array(temps_mean), temp_std=np.array(temps_std))
Example #25
0
def extract_hour_file():
    fname = data_path + 'HOUR_ALL.txt'
    data = defaultdict(lambda : defaultdict(list))
    with open(fname, 'r') as f:
        for line in f:
            if line[:2] in ('AN', 'IQ'):
                ls = line.split()
                st = ls[0]
                time = date2num(UTC(ls[3] + ls[4]))
                data[st]['date'].append(time)
                data[st]['temp'].append(float(ls[20]))
    np.savez(data_path + 'climate_hour_IQ', **data['IQUIQUE'])
    np.savez(data_path + 'climate_hour_AF', **data['ANTOFAGASTA'])
Example #26
0
def main2():
    data = IPOC(xcorr_append='/1bit', use_local_LVC=True)
    t1 = UTC('2010-01-01')
    stream0_1 = data.getRawStream(t1, 'PB01', component='Z')
    stream0_2 = data.getRawStream(t1, 'PB02', component='Z')

    stream2_1 = data.getStream(t1, 'PB01', component='Z')
    stream2_2 = data.getStream(t1, 'PB02', component='Z')

    plot_streams([stream0_1, stream0_2, stream2_1, stream2_2], [None, None, 0.1, 0.1])
    plotPSD([stream0_1, stream0_2, stream2_1, stream2_2], 4096)

    ipshell()
Example #27
0
def main():
    data = IPOC(xcorr_append='/tests/1bit_filter0.01', use_local_LVC=True)
    data.setXLogger('_1bit0.01Hz')
    stations = 'PB01 PB03'
    stations2 = 'PB03'

    components = 'Z'
    t1 = UTC('2010-01-01')
    t2 = UTC('2010-01-02')
    shift = 500

    #    prepare(data, stations.split(), t1, t2, filter=(0.01, None), downsample=None,
    #            component=components, normalize='1bit', norm_param=None,
    #            use_floating_stream=True)
    correlations = get_correlations(stations, components, stations2)
    #    xcorr_day(data, correlations, t1, t2, shift, use_floating_stream=True)
    plotXcorrs(data,
               correlations,
               t1,
               t2,
               plot_overview=False,
               plot_stack=True,
               plot_psd=True)
Example #28
0
 def setUp(self):
     self.stream = read().sort()
     # add processing info
     self.stream.decimate(2)
     self.stream.differentiate()
     self.stream[0].stats.onset = UTC()
     self.stream[0].stats.header = 42
     self.stream[0].stats.header2 = 'Test entry'
     self.stream[0].stats.header3 = u'Test entry unicode'
     stack = dict(group='all', count=5, type=['pw', 2])
     self.stream[0].stats.stack = stack
     for tr in self.stream:
         if 'response' in tr.stats:
             del tr.stats.response
Example #29
0
 def produceHist(self, start=None, end=None, period=24 * 3600):
     """
     produce Histogramm Information and saves into stream
     """
     self.sort()
     if len(self) == 0:
         return
     if start is None:
         start = UTC(self[0].datetime.date)
     if end is None:
         end = UTC(self[-1].datetime)
     entries = int((end - start) / period) + 1
     hist_list = np.zeros(entries)
     mag_list = np.zeros(entries)
     for event in self:
         #date = UTC(event.datetime.date)
         entry = int((event.datetime - start) / period)
         print entry
         try:
             hist_list[entry] += 1
             mag_list[entry] = max(event.magnitude, mag_list[entry])
         except IndexError:
             pass
     return hist_list, mag_list
Example #30
0
def main():
    stations = 'PB03 PB04'

    component = 'Z'
    t1 = UTC('2006-01-01')
    t2 = UTC()
    #    t1 = UTC('2007-01-01')
    #    t2 = UTC('2007-01-03')

    method1 = 'filter0.01-1_water_env2_whitening_1bit'
    method2 = 'filter0.01-1_water_env2_whitening_1bit_fft'

    data1 = IPOC(xcorr_append='/Tocopilla/' + method1, use_local_LVC=True)
    data2 = IPOC(xcorr_append='/Tocopilla/' + method2, use_local_LVC=True)

    for station in stations.split():
        for day in daygen(t1, t2):
            try:
                stream = data1.getStream(day, station, component)
            except:
                log.warning('Could not read stream for day %s station %s' %
                            (day, station))
            else:
                if len(stream) != 1:
                    log.warning(
                        'Stream for day %s station %s has wrong length %d' %
                        (day, station, len(stream)))
                elif stream[0].stats.npts / stream[
                        0].stats.sampling_rate < 24 * 3600 * 0.5:
                    log.warning(
                        'Stream for day %s station %s has only a coverage of %f  -> discard'
                        % (day, station, 1. * stream[0].stats.npts /
                           stream[0].stats.sampling_rate / 24 / 3600))
                else:
                    stream.fft()
                    stream.write(data2.getDay(station, day), 'Q')