示例#1
0
def main():
    stations = 'PB03 PB04'

    stations2 = None

    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2006-01-01')
    t1 = UTC('2007-11-10')
    t2 = UTC()
    t2 = UTC('2007-11-20')
    shift = 500
    correlations = get_correlations(stations, components, stations2, only_cross=True)


    method = 'filter4-6_water_env2_whitening_1bit'
#    method = 'filter0.01-1_1bit_whitening0.01'
#    method = 'filter0.005_rm20'
#    method = 'filter0.005_1bit'

    data = IPOC(xcorr_append='/Tocopilla/tests/' + method, use_local_LVC=True)
    data.setXLogger('_' + method)
#    prepare(data, stations.split(), t1, t2, component=components,
#            filter=(4, 6), downsample=None,
#            eventremoval='waterlevel_env2', param_removal=(10, 0),
#            #whitening=True,
#            normalize='1bit', param_norm=None)
    correlations = get_correlations(stations, components, stations2, only_auto=True)
#    noisexcorr(data, correlations, t1, t2, shift)
    plotXcorrs(data, correlations, t1, t2, start= -150, end=150, plot_overview=True, plot_years=False, use_dlognorm=True,
                      plot_stack=True, plot_psd=True, add_to_title=method, show=True)
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 HMBCX MNMCX PATCX PSGCX'

    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2006-07-01')
    t2 = UTC('2008-12-31')

    shift = 500
    correlations = get_correlations(stations, components)

    method = 'FINAL_filter0.005-10_1bit_Tocopilla'

    data = IPOC(xcorr_append='/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)
    pool = Pool()
    prepare(data, stations.split(), t1, t2, component=components,
            filter=(0.005, 10, 2, True), downsample=20,
            whitening=False,
            normalize='1bit', param_norm=None,
            pool=pool)
    noisexcorrf(data, correlations, t1, t2, shift, pool=pool)
    pool.close()
    pool.join()
    stack(data, correlations, dt=10 * 24 * 3600, shift=5 * 24 * 3600)
    stack(data, correlations, dt= -1)

    filters = None
    #filters = getFilters((0.005, 0.01, 0.1, 1, 5, 10), zerophase=True, corners=2)
#    plotXcorrs(data, correlations, t1, t2, start=None, end=None, filters=filters, plot_overview=True, plot_years=False, use_dlognorm=False,
#                      plot_stack=True, plot_psd=True, add_to_title='', downsample=None)
    plotXcorrs(data, correlations, t1=None, t2=None, start=None, end=None, filters=filters, plot_overview=True, plot_years=False, use_dlognorm=False,
                      plot_stack=True, plot_psd=True, add_to_title='', downsample=None, stack=('10days', '5days'))
示例#3
0
def main():
    stations = 'PB03 PB04'

    component = 'Z'
    t1 = UTC('2006-01-01')
    t2 = UTC()
#    t1 = UTC('2007-01-01')
#    t2 = UTC('2007-01-03')

    method1 = 'filter0.01-1_water_env2_whitening_1bit'
    method2 = 'filter0.01-1_water_env2_whitening_1bit_fft'

    data1 = IPOC(xcorr_append='/Tocopilla/' + method1, use_local_LVC=True)
    data2 = IPOC(xcorr_append='/Tocopilla/' + method2, use_local_LVC=True)

    for station in stations.split():
        for day in daygen(t1, t2):
            try:
                stream = data1.getStream(day, station, component)
            except:
                log.warning('Could not read stream for day %s station %s' % (day, station))
            else:
                if len(stream) != 1:
                    log.warning('Stream for day %s station %s has wrong length %d' % (day, station, len(stream)))
                elif stream[0].stats.npts / stream[0].stats.sampling_rate < 24 * 3600 * 0.5:
                    log.warning('Stream for day %s station %s has only a coverage of %f  -> discard' % (day, station, 1. * stream[0].stats.npts / stream[0].stats.sampling_rate / 24 / 3600))
                else:
                    stream.fft()
                    stream.write(data2.getDay(station, day), 'Q')
示例#4
0
def main():
    #stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16 HMBCX MNMCX PATCX PSGCX LVC TAIQ'
    stations = 'PB01 PB02 PB03 PB04 PB05'
    stations2 = None

    components = 'Z'
    # TOcopilla earthquake:
    #t_Toco=UTC('2007-11-14 15:14:00')
    t1 = UTC('2006-01-01')
    #t2 = UTC('2011-09-01')
    #t1 = UTC('2007-01-01')
    #t2 = UTC('2009-01-01')
    t2 = UTC('2012-01-01')

    shift = 500
    correlations = get_correlations(stations, components, stations2, only_cross=True)

#    method = 'FINAL_filter0.005-5_1bit_whitening_2011+2012'
#    method = 'filter0.01-1_1bit_whitening0.01'
#    method = 'filter0.005_rm20'
#    method = 'filter0.005_1bit'
    method = 'filter0.01-1_water_env2_whitening_1bit_fft'


    data = IPOC(xcorr_append='/Tocopilla/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)

#    pool = Pool()
#    prepare(data, stations.split(), t1, t2, component=components,
#            filter=(0.005, 5, 2, True), downsample=20,
##            filter=(1, 10), downsample=None,
##            eventremoval=None, #'waterlevel_env2', param_removal=(10, 0),
#            whitening=True,
#            use_this_filter_after_whitening=(0.005, 5, 2),
#            normalize='1bit', param_norm=None,
#            pool=pool)
#    noisexcorrf(data, correlations, t1, t2, shift, pool=pool)
#    pool.close()
#    pool.join()
#
#    stack(data, correlations, dt=10 * 24 * 3600, shift=5 * 24 * 3600)
#    stack(data, correlations, dt= -1)

    t1p, t2p = t1, t2
#    t1p, t2p = None, None

    filters = None
    filters = getFilters((0.025, 0.05, 0.1, 0.25, 0.5, 1))

    plotXcorrs(data, correlations, t1=t1p, t2=t2p, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None, filters=filters, filter_now=False)

    plotXcorrs(data, correlations, t1=t1p, t2=t2p, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None, stack=('10days', 'day'), filters=filters, filter_now=False)

    plotXcorrs(data, correlations, t1=t1p, t2=t2p, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None, stack=('50days', '5days'), filters=filters, filter_now=False)
示例#5
0
def main():

    stations = 'PB01 PB02 PB03 PB04 PB04 PB05 PB06'
    stations2 = 'PB01 PB02 PB03 PB04 PB04 PB05 PB06 PB07 PB08 HMBCX PATCX'

    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2007-09-01')
    t2 = UTC('2008-01-31')

    shift = 200
    correlations = get_correlations(stations, components, stations2)

    #    method = 'filter0.01-1_1bit'
    #    method = 'filter0.01-1_1bit_whitening0.01'
    #    method = 'filter2-20_1bit'
    #    method = 'filter0.005_1bit'
    #    period = 'day'
    #
    #    data = IPOC(xcorr_append='/Tocopilla/' + method, use_local_LVC=True)
    #    data.setXLogger('_' + method)
    #    prepare(data, stations.split(), t1, t2, filter=(0.005, 1.), downsample=5, whitening=True,
    #            component=components, normalize='1bit', norm_param=None)
    #    noisexcorr(data, correlations, t1, t2, shift_sec=shift, period=period)

    #    correlations = (('PB03Z', 'PB04Z'),)
    #    data.x_plot_day = data.x_res + '/plots2/%s_day_%s'
    #    plotXcorrs(data, correlations, t1, t2, start=9, end=15, plot_overview=True, filter=(2, None, 2, True), stack_lim=(-0.01, 0.01), downsample=None, plot_years=False,
    #                      plot_stack=True, plot_psd=True, add_to_title=method + '_filter2_9-15', add_to_file='_filter2_9-15.png', show=False)

    method = 'filter4-6_1bit'
    period = 'day'
    data = IPOC(xcorr_append='/Tocopilla/' + method, use_local_LVC=True)
    data.setXLogger('_' + method)

    #    prepare(data, stations.split(), t1, t2, filter=(4, 6), downsample=None, whitening=None,
    #            component=components, normalize='1bit', norm_param=None)
    #    noisexcorr(data, correlations, t1, t2, shift_sec=shift, period=period)

    plotXcorrs(data,
               correlations,
               t1,
               t2,
               start=-50,
               end=50,
               plot_overview=True,
               filter=None,
               stack_lim=(-0.1, 0.1),
               plot_years=False,
               plot_stack=True,
               plot_psd=False,
               add_to_title=method + '_wodlognorm_50s',
               add_to_file='_wodlognorm_50s.png',
               show=True,
               landscape=True,
               use_dlognorm=False)
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 HMBCX MNMCX PATCX PSGCX LVC'
    stations = 'PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16'
    stations = 'PB02 PB03 PB04 PB05 HMBCX MNMCX PSGCX'
    stations = 'PATCX'
    stations2 = None


    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2007-01-01')
    #t1 = UTC('2007-12-01')
    #t2 = UTC('2008-12-31')
    #t2 = UTC('2012-10-01')
    t2 = UTC('2011-12-31')
    #t2 = UTC('2007-02-03')
#    t1 = UTC('2009-05-01')
#    t2 = UTC('2009-05-03')

    shift = 100
    shift = 60
    correlations = get_correlations(stations, components, stations2, only_auto=True)
    #correlations = get_correlations(stations, components, stations2)
    print correlations

    method = 'FINAL_filter4-6_1bit_auto'
    method = 'FINAL_filter4-6_1bit_auto_3C'
    method = 'FINAL_filter4-6_1bit_auto_hour2'

    data = IPOC(xcorr_append='/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)

#    pool = Pool()
    pool = None
    prepare(data, stations.split(), t1, t2, component=components,
            filter=(4, 6, 2, True), downsample=50,
            eventremoval='waterlevel_env2', param_removal=(10, 0),
            whitening=False,
            normalize='1bit', param_norm=None,
            pool=pool, discard=0.1 * 24 * 3600, freq_domain=False, trim='day')
    noisexcorrf(data, correlations, t1, t2, shift, period=3600, pool=pool, overlap=1800)

#    noisexcorrf(data, correlations, t1, t2, shift, period=5 * 60, pool=pool,
#                max_preload=1000)
#    pool.close()
#    pool.join()

#    plotXcorrs(data, correlations, t1, t2, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
#                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None)

    plotXcorrs(data, correlations, t1, t2, start= -20, end=20, plot_overview=True, plot_years=True, use_dlognorm=False,
                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None, ext='_hg.png', vmax=0.1,
                      period=3600)
示例#7
0
def analyze():
    #stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16 HMBCX MNMCX PATCX PSGCX LVC TAIQ'
    stations = 'PB01 PB02 PATCX'
    stations = 'PATCX'
    t1 = UTC('2009-01-01')
    t2 = UTC('2010-01-01')
    data = IPOC()
    for station in stations.split():
        hours = [[] for i in range(24)]
        times = []
        levels = []
        for t_day in daygen(t1, t2):
            try:
                stream = data.getRawStreamFromClient(t_day, t_day + 24 * 3600, station, component='Z')
            except ValueError:
                continue
            for tr in stream:
                tr.stats.filter = ''
            stream.demean()
            stream.detrend()
            stream.filter2(4, 6)
            stream.downsample2(5)
            stream.merge()
            tr = stream[0]
            startt = tr.stats.starttime
            endt = tr.stats.endtime
            if endt - startt < 12 * 3600:
                continue
            tr.data = obspy.signal.cpxtrace.envelope(tr.data)[1][:len(tr.data)]
            for hour in range(24):
                tr2 = tr.slice(t_day + hour * 3600, t_day + (hour + 1) * 3600)
                if tr2.stats.endtime - tr2.stats.starttime < 1800:
                    continue
                num_stds = 60  # =^ every minute
                len_parts = len(tr2.data) // 60  # =^ 1min
                len_stds = len_parts // 6  # =^ 10s
                stds = np.array([np.std(tr2.data[i:i + len_stds]) for i in np.arange(num_stds) * len_parts])
                stds = stds[stds != 0.]
                num_stds = len(stds)
                if num_stds < 50:
                    continue
                stds = np.sort(stds)[num_stds // 5:-num_stds // 5]
                stds = stds[stds < np.min(stds) * 2.]
                val = np.mean(stds)
                levels.append(val)
                times.append(date2num(t_day + (0.5 + hour) * 3600))
                hours[hour].append(val)
        errors = np.array([np.std(hours[i], ddof=1) / len(hours[i]) ** 0.5 for i in range(24)])
        hours = np.array([np.mean(hours[i]) for i in range(24)])
        times = np.array(times)
        levels = np.array(levels)
        np.savez('/home/richter/Results/IPOC/xcorr/noise_apmlitudes_%s_4-6Hz.npz' % station,
                 hours=hours, errors=errors, times=times, levels=levels)
示例#8
0
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 HMBCX MNMCX PATCX PSGCX'

    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2006-07-01')
    t2 = UTC('2008-12-31')

    shift = 500
    correlations = get_correlations(stations, components)

    method = 'FINAL_filter0.005-10_1bit_Tocopilla'

    data = IPOC(xcorr_append='/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)
    pool = Pool()
    prepare(data,
            stations.split(),
            t1,
            t2,
            component=components,
            filter=(0.005, 10, 2, True),
            downsample=20,
            whitening=False,
            normalize='1bit',
            param_norm=None,
            pool=pool)
    noisexcorrf(data, correlations, t1, t2, shift, pool=pool)
    pool.close()
    pool.join()
    stack(data, correlations, dt=10 * 24 * 3600, shift=5 * 24 * 3600)
    stack(data, correlations, dt=-1)

    filters = None
    #filters = getFilters((0.005, 0.01, 0.1, 1, 5, 10), zerophase=True, corners=2)
    #    plotXcorrs(data, correlations, t1, t2, start=None, end=None, filters=filters, plot_overview=True, plot_years=False, use_dlognorm=False,
    #                      plot_stack=True, plot_psd=True, add_to_title='', downsample=None)
    plotXcorrs(data,
               correlations,
               t1=None,
               t2=None,
               start=None,
               end=None,
               filters=filters,
               plot_overview=True,
               plot_years=False,
               use_dlognorm=False,
               plot_stack=True,
               plot_psd=True,
               add_to_title='',
               downsample=None,
               stack=('10days', '5days'))
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 HMBCX MNMCX PATCX PSGCX LVC'
    #stations = 'PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16'
    stations2 = None

    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2006-02-01')
    t2 = UTC('2012-10-01')

    shift = 100
    correlations = get_correlations(stations,
                                    components,
                                    stations2,
                                    only_auto=True)

    method = 'FINAL_filter1-3_1bit_auto'

    data = IPOC(xcorr_append='/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)

    #    pool = Pool()
    #    prepare(data, stations.split(), t1, t2, component=components,
    #            filter=(1, 3, 2, True), downsample=20,
    #            eventremoval='waterlevel_env2', param_removal=(10, 0),
    #            whitening=False,
    #            normalize='1bit', param_norm=None,
    #            pool=pool)
    #    noisexcorrf(data, correlations, t1, t2, shift, pool=pool)
    #    pool.close()
    #    pool.join()

    #    plotXcorrs(data, correlations, t1, t2, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
    #                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None)

    plt.rc('font', size=16)
    plotXcorrs(data,
               correlations,
               t1,
               t2,
               start=0,
               end=20,
               plot_overview=True,
               plot_years=False,
               use_dlognorm=False,
               plot_stack=True,
               plot_psd=False,
               downsample=None,
               ext='_hg_dis.pdf',
               vmax=0.1,
               ylabel=None,
               add_to_title='1-3Hz')
示例#10
0
def main2():
    data = IPOC(xcorr_append='/1bit', use_local_LVC=True)
    t1 = UTC('2010-01-01')
    stream0_1 = data.getRawStream(t1, 'PB01', component='Z')
    stream0_2 = data.getRawStream(t1, 'PB02', component='Z')

    stream2_1 = data.getStream(t1, 'PB01', component='Z')
    stream2_2 = data.getStream(t1, 'PB02', component='Z')

    plot_streams([stream0_1, stream0_2, stream2_1, stream2_2], [None, None, 0.1, 0.1])
    plotPSD([stream0_1, stream0_2, stream2_1, stream2_2], 4096)

    ipshell()
示例#11
0
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 HMBCX MNMCX PATCX PSGCX LVC'
    stations = 'PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16'
    stations = 'PATCX'
    stations2 = None


    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2007-10-01')
    t2 = UTC('2007-11-30')
    #t2 = UTC('2012-10-01')
    #t2 = UTC('2011-12-31')
#    t1 = UTC('2009-05-01')
#    t2 = UTC('2009-05-03')

    shift = 100
    shift = 60
    #correlations = get_correlations(stations, components, stations2, only_auto=True)
    correlations = get_correlations(stations, components, stations2)
    print correlations

    method = 'zerotest_nozero'
    #method = 'FINAL_filter4-6_1bit_auto_3C'
    #method = 'FINAL_filter3-5'

    data = IPOC(xcorr_append='/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)

    pool = Pool()
    prepare(data, stations.split(), t1, t2, component=components,
            filter=(4, 6, 2, True), downsample=50,
            #eventremoval='waterlevel_env2', param_removal=(10, 0),
            eventremoval=None, param_removal=None,
            whitening=False,
            normalize='1bit', param_norm=None,
            pool=pool)
    noisexcorrf(data, correlations, t1, t2, shift, period=24 * 3600, pool=pool)

#    noisexcorrf(data, correlations, t1, t2, shift, period=5 * 60, pool=pool,
#                max_preload=1000)
    pool.close()
    pool.join()

#    plotXcorrs(data, correlations, t1, t2, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
#                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None)
    #plt.rc('font', size=16)
    plotXcorrs(data, correlations, t1, t2, start=-20, end=20, plot_overview=True, plot_years=False, use_dlognorm=False,
                      plot_stack=True, plot_psd=False, downsample=None, ext='_hg0.02_dis.pdf', vmax=0.02,
                      add_to_title='4-6Hz', ylabel=None)
示例#12
0
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05'

    component = 'Z'
    t1 = UTC('2009-06-01')
    t2 = UTC('2009-07-01')

    data = IPOC('test', use_local_LVC=False)
    data.setXLogger('_test')
    period = 24 * 3600
    ax = None
    plt.ion()
    for station in stations.split():
        pxxs = []
        freqs_old = None
        i = 0
        for t in timegen(t1, t2, period):
            st = data.getRawStreamFromClient(t, t + period, station, component)
            st.merge(method=1,
                     interpolation_samples=10,
                     fill_value='interpolate')
            print st
            pxx, freqs = st.plotPSD(just_calculate=True)
            assert np.all(freqs == freqs_old) or not freqs_old
            freqs_old = freqs
            if max(pxx[4:]) > 1e7:
                print 'discard'
                i += 1
                continue
            pxxs.append(pxx)
        pxx = sum(pxxs) / len(pxxs)
        del pxxs
        tr = Trace(data=pxx,
                   header=dict(is_fft=True,
                               sampling_rate=2 * max(freqs),
                               freq_min=min(freqs),
                               freq_max=max(freqs)))
        ax = tr.plotPSD(ax=ax,
                        label='%s-%d' % (st[0].stats.station, i),
                        figtitle=None)
        plt.draw()
        # embed()
    ax.legend()
    fig = ax.get_figure()
    fig.suptitle('%s  %s  %s to %s' %
                 (stations, component, t1.strftime('%Y-%m-%d'),
                  t2.strftime('%Y-%m-%d')))
    plt.ioff()
    plt.show()
示例#13
0
def main():
    stations = 'PB03 PB04'

    stations2 = None

    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2006-01-01')
    t1 = UTC('2007-11-10')
    t2 = UTC()
    t2 = UTC('2007-11-20')
    shift = 500
    correlations = get_correlations(stations,
                                    components,
                                    stations2,
                                    only_cross=True)

    method = 'filter4-6_water_env2_whitening_1bit'
    #    method = 'filter0.01-1_1bit_whitening0.01'
    #    method = 'filter0.005_rm20'
    #    method = 'filter0.005_1bit'

    data = IPOC(xcorr_append='/Tocopilla/tests/' + method, use_local_LVC=True)
    data.setXLogger('_' + method)
    #    prepare(data, stations.split(), t1, t2, component=components,
    #            filter=(4, 6), downsample=None,
    #            eventremoval='waterlevel_env2', param_removal=(10, 0),
    #            #whitening=True,
    #            normalize='1bit', param_norm=None)
    correlations = get_correlations(stations,
                                    components,
                                    stations2,
                                    only_auto=True)
    #    noisexcorr(data, correlations, t1, t2, shift)
    plotXcorrs(data,
               correlations,
               t1,
               t2,
               start=-150,
               end=150,
               plot_overview=True,
               plot_years=False,
               use_dlognorm=True,
               plot_stack=True,
               plot_psd=True,
               add_to_title=method,
               show=True)
示例#14
0
def main():

    stations = 'PB01 PB02 PB03 PB04 PB04 PB05 PB06'
    stations2 = 'PB01 PB02 PB03 PB04 PB04 PB05 PB06 PB07 PB08 HMBCX PATCX'

    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2007-09-01')
    t2 = UTC('2008-01-31')

    shift = 200
    correlations = get_correlations(stations, components, stations2)


#    method = 'filter0.01-1_1bit'
#    method = 'filter0.01-1_1bit_whitening0.01'
#    method = 'filter2-20_1bit'
#    method = 'filter0.005_1bit'
#    period = 'day'
#
#    data = IPOC(xcorr_append='/Tocopilla/' + method, use_local_LVC=True)
#    data.setXLogger('_' + method)
#    prepare(data, stations.split(), t1, t2, filter=(0.005, 1.), downsample=5, whitening=True,
#            component=components, normalize='1bit', norm_param=None)
#    noisexcorr(data, correlations, t1, t2, shift_sec=shift, period=period)

#    correlations = (('PB03Z', 'PB04Z'),)
#    data.x_plot_day = data.x_res + '/plots2/%s_day_%s'
#    plotXcorrs(data, correlations, t1, t2, start=9, end=15, plot_overview=True, filter=(2, None, 2, True), stack_lim=(-0.01, 0.01), downsample=None, plot_years=False,
#                      plot_stack=True, plot_psd=True, add_to_title=method + '_filter2_9-15', add_to_file='_filter2_9-15.png', show=False)




    method = 'filter4-6_1bit'
    period = 'day'
    data = IPOC(xcorr_append='/Tocopilla/' + method, use_local_LVC=True)
    data.setXLogger('_' + method)

#    prepare(data, stations.split(), t1, t2, filter=(4, 6), downsample=None, whitening=None,
#            component=components, normalize='1bit', norm_param=None)
#    noisexcorr(data, correlations, t1, t2, shift_sec=shift, period=period)


    plotXcorrs(data, correlations, t1, t2, start= -50, end=50, plot_overview=True, filter=None, stack_lim=(-0.1, 0.1), plot_years=False,
               plot_stack=True, plot_psd=False, add_to_title=method + '_wodlognorm_50s', add_to_file='_wodlognorm_50s.png', show=True, landscape=True, use_dlognorm=False)
示例#15
0
def main():
    data = IPOC(xcorr_append='/tests/1bit_filter0.01', use_local_LVC=True)
    data.setXLogger('_1bit0.01Hz')
    stations = 'PB01 PB03'
    stations2 = 'PB03'

    components = 'Z'
    t1 = UTC('2010-01-01')
    t2 = UTC('2010-01-02')
    shift = 500

#    prepare(data, stations.split(), t1, t2, filter=(0.01, None), downsample=None,
#            component=components, normalize='1bit', norm_param=None,
#            use_floating_stream=True)
    correlations = get_correlations(stations, components, stations2)
#    xcorr_day(data, correlations, t1, t2, shift, use_floating_stream=True)
    plotXcorrs(data, correlations, t1, t2, plot_overview=False, plot_stack=True, plot_psd=True)
示例#16
0
def main():
    data = IPOC(xcorr_append='/tests/1bit_filter0.1-1', use_local_LVC=True)
    data.setXLogger('_1bit')
    stations = 'PB01 PB03'
    stations2 = 'PB03'

    components = 'Z'
    t1 = UTC('2010-01-01')
    t2 = UTC('2010-12-31')
    shift = 500

    prepare(data, stations.split(), t1, t2, filter=(0.1, 1.), downsample=10,
            component=components, normalize='1bit', param_norm=None,
            use_floating_stream=True)
    correlations = get_correlations(stations, components, stations2)
    xcorr_day(data, correlations, t1, t2, shift, use_floating_stream=True)
    plotXcorrs(data, correlations, t1, t2)
示例#17
0
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16 HMBCX MNMCX PATCX PSGCX LVC'
    stations2 = None


    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2006-02-01')
    t2 = UTC('2012-10-01')

    shift = 500
    correlations = get_correlations(stations, components, stations2, only_auto=True)

    method = 'FINAL_filter0.01-0.5_1bit_auto'

    data = IPOC(xcorr_append='/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)

#    pool = Pool()
#    prepare(data, stations.split(), t1, t2, component=components,
#            filter=(0.01, 0.5, 2, True), downsample=5,
#            eventremoval='waterlevel_env2', param_removal=(10, 0),
#            whitening=False,
#            normalize='1bit', param_norm=None,
#            pool=pool)
#    noisexcorrf(data, correlations, t1, t2, shift, pool=pool)
#    pool.close()
#    pool.join()

#    plotXcorrs(data, correlations, t1, t2, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
#                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None)
    plotXcorrs(data, correlations, t1, t2, start=0, end=200, plot_overview=True, plot_years=False, use_dlognorm=False,
                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None, ext='_hg.png', vmax=0.1)

#    stack(data, correlations, dt= -1)

#    stack(data, correlations, dt=10 * 24 * 3600, shift=2 * 24 * 3600)
#    plotXcorrs(data, correlations, t1=None, t2=None, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
#               plot_stack=True, plot_psd=False, add_to_title='', downsample=None,
#               stack=('10days', '2days'))
    plotXcorrs(data, correlations, t1=None, t2=None, start=0, end=200, plot_overview=True, plot_years=False, use_dlognorm=False,
               plot_stack=True, plot_psd=False, add_to_title='', downsample=None,
               stack=('10days', '2days'), ext='_hg.png', vmax=0.1)
示例#18
0
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05'

    component = 'Z'
    t1 = UTC('2009-06-01')
    t2 = UTC('2009-07-01')

    data = IPOC('test', use_local_LVC=False)
    data.setXLogger('_test')
    period = 24 * 3600
    ax = None
    plt.ion()
    for station in stations.split():
        pxxs = []
        freqs_old = None
        i = 0
        for t in timegen(t1, t2, period):
            st = data.getRawStreamFromClient(t, t + period, station, component)
            st.merge(method=1, interpolation_samples=10, fill_value='interpolate')
            print st
            pxx, freqs = st.plotPSD(just_calculate=True)
            assert np.all(freqs == freqs_old) or not freqs_old
            freqs_old = freqs
            if max(pxx[4:]) > 1e7:
                print 'discard'
                i += 1
                continue
            pxxs.append(pxx)
        pxx = sum(pxxs) / len(pxxs)
        del pxxs
        tr = Trace(data=pxx,
                   header=dict(is_fft=True, sampling_rate=2 * max(freqs),
                               freq_min=min(freqs), freq_max=max(freqs)))
        ax = tr.plotPSD(ax=ax, label='%s-%d' % (st[0].stats.station, i), figtitle=None)
        plt.draw()
        # embed()
    ax.legend()
    fig = ax.get_figure()
    fig.suptitle('%s  %s  %s to %s' % (stations, component, t1.strftime('%Y-%m-%d'),
                                       t2.strftime('%Y-%m-%d')))
    plt.ioff()
    plt.show()
示例#19
0
def calc_temp():
    station = "PB12"
    channel = "WKI"
    from sito.data import IPOC

    ipoc = IPOC()
    stream = ipoc.getChannelFromClient("2006-01-01", "2012-01-01", station=station, channel=channel)
    stream2 = stream.copy()
    day = stream[0].stats.starttime
    day2 = UTC(day.year, day.month, day.day)
    if day2 < day:
        day = day2 + 24 * 3600
    stream2.trim(day, day + 24 * 3600)
    stream2.merge()
    data = []
    while day < stream[-1].stats.endtime:
        st = stream.slice(day, day + 24 * 3600)
        st.merge()
        if len(st) == 0 or len(st[0].data) < 8640:
            print "skip %s" % day.date
            day = day + 24 * 3600
            continue
        a = st[0].data[:8640]
        if channel == "WKI":
            a = a / 100.0
            a = np.ma.masked_outside(a, 0, 50)
        elif channel == "WDI":
            a = a / 10000.0
            a = np.ma.masked_outside(a, 0, 5)
        else:
            a = np.ma.masked_outside(a, 0, 100)
        a = np.ma.masked_invalid(a)
        data.append(a)
        day = day + 24 * 3600
    # from IPython import embed
    # embed()
    stream2[0].data = np.ma.mean(data, axis=0)
    stream2.write(path + "/climate/%s_%s" % (station, channel), "Q")
    stream2.plot(method="full")
    plt.show()
示例#20
0
def calc_strong_motion_Toco():
    t = UTC('2007-11-14')
    data = IPOC()
    channel = 'BHZ'
    acs = {}
    vels = {}

    for station in 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 PATCX HMBCX PSGCX MNMCX'.split(
    ):
        for channel in ('BLZ', 'BHZ'):
            try:
                ms = data.getChannelFromClient(
                    t - 60,
                    t + 24 * 3600 + 60,
                    network='GE' if station == 'LVC' else 'CX',
                    location='10' if station == 'LVC' else '',
                    station=station,
                    channel=channel)
            except Exception as ex:
                print station, channel, 'sucks'
                continue
            for tr in ms:
                if tr.stats.endtime - tr.stats.starttime < 1000:
                    ms.remove(tr)
                    continue
                tr.trim(tr.stats.starttime + 10, tr.stats.endtime - 10)
                tr.stats.filter = ''
                tr.detrend()
                tr.filter('highpass', freq=0.2)
                tr.trim(tr.stats.starttime + 10, tr.stats.endtime - 10)
            ms.merge(fill_value=0)
            if len(ms) == 0:
                continue
            maxi = float(np.max(np.abs(ms[0].data)))
            if 'BH' in channel:
                vels[station] = maxi / 629145000.0 * 100
            elif 'BL' in channel:
                acs[station] = maxi / 427566.942
    with open(GM_TOCO, 'w') as f:
        yaml.dump({'vels': vels, 'acs': acs}, f, default_flow_style=False)
示例#21
0
def main():
    data = IPOC(xcorr_append='/tests/1bit_filter0.01', use_local_LVC=True)
    data.setXLogger('_1bit0.01Hz')
    stations = 'PB01 PB03'
    stations2 = 'PB03'

    components = 'Z'
    t1 = UTC('2010-01-01')
    t2 = UTC('2010-01-02')
    shift = 500

    #    prepare(data, stations.split(), t1, t2, filter=(0.01, None), downsample=None,
    #            component=components, normalize='1bit', norm_param=None,
    #            use_floating_stream=True)
    correlations = get_correlations(stations, components, stations2)
    #    xcorr_day(data, correlations, t1, t2, shift, use_floating_stream=True)
    plotXcorrs(data,
               correlations,
               t1,
               t2,
               plot_overview=False,
               plot_stack=True,
               plot_psd=True)
示例#22
0
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 HMBCX MNMCX PATCX PSGCX LVC'
    #stations = 'PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16'
    stations2 = None


    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2006-02-01')
    t2 = UTC('2012-10-01')

    shift = 100
    correlations = get_correlations(stations, components, stations2, only_auto=True)

    method = 'FINAL_filter1-3_1bit_auto'

    data = IPOC(xcorr_append='/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)

#    pool = Pool()
#    prepare(data, stations.split(), t1, t2, component=components,
#            filter=(1, 3, 2, True), downsample=20,
#            eventremoval='waterlevel_env2', param_removal=(10, 0),
#            whitening=False,
#            normalize='1bit', param_norm=None,
#            pool=pool)
#    noisexcorrf(data, correlations, t1, t2, shift, pool=pool)
#    pool.close()
#    pool.join()

#    plotXcorrs(data, correlations, t1, t2, start=None, end=None, plot_overview=True, plot_years=False, use_dlognorm=False,
#                      plot_stack=True, plot_psd=False, add_to_title='', downsample=None)

    plt.rc('font', size=16)
    plotXcorrs(data, correlations, t1, t2, start=0, end=20, plot_overview=True, plot_years=False, use_dlognorm=False,
                      plot_stack=True, plot_psd=False, downsample=None, ext='_hg_dis.pdf', vmax=0.1, ylabel=None,
                      add_to_title='1-3Hz')
示例#23
0
def calc_strong_motion_Toco():
    t = UTC('2007-11-14')
    data = IPOC()
    channel = 'BHZ'
    acs = {}
    vels = {}

    for station in 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 PATCX HMBCX PSGCX MNMCX'.split():
        for channel in ('BLZ', 'BHZ'):
            try:
                ms = data.getChannelFromClient(t - 60, t + 24 * 3600 + 60,
                                               network='GE' if station == 'LVC' else 'CX',
                                               location='10' if station == 'LVC' else '',
                                               station=station, channel=channel)
            except Exception as ex:
                print station, channel, 'sucks'
                continue
            for tr in ms:
                if tr.stats.endtime - tr.stats.starttime < 1000:
                    ms.remove(tr)
                    continue
                tr.trim(tr.stats.starttime + 10, tr.stats.endtime - 10)
                tr.stats.filter = ''
                tr.detrend()
                tr.filter('highpass', freq=0.2)
                tr.trim(tr.stats.starttime + 10, tr.stats.endtime - 10)
            ms.merge(fill_value=0)
            if len(ms) == 0:
                continue
            maxi = float(np.max(np.abs(ms[0].data)))
            if 'BH' in channel:
                vels[station] = maxi / 629145000.0 * 100
            elif 'BL' in channel:
                acs[station] = maxi / 427566.942
    with open(GM_TOCO, 'w') as f:
        yaml.dump({'vels': vels, 'acs':acs}, f, default_flow_style=False)
示例#24
0
def main():
    stations = 'PB01 PB03'
    stations2 = 'PB03'
    components = 'Z'
    t1 = UTC('2010-01-01')
    t2 = UTC('2010-12-31')
    shift = 500
    correlations = get_correlations(stations, components, stations2)


    method = 'filter0.1-1_1bit_whitening0.01'
    data = IPOC(xcorr_append='/tests/' + method, use_local_LVC=True)
    data.setXLogger('_' + method)
    prepare(data, stations.split(), t1, t2, filter=(0.1, 1), downsample=10, whitening=0.01,
            component=components, normalize='1bit', param_norm=None,
            use_floating_stream=True)
    xcorr_day(data, correlations, t1, t2, shift, use_floating_stream=True)
    plotXcorrs(data, correlations, t1, t2, plot_overview=False, plot_stack=True, plot_psd=True, add_to_title=method)


    method = 'filter0.1-1_1bit_whitening0.001'
    data = IPOC(xcorr_append='/tests/' + method, use_local_LVC=True)
    data.setXLogger('_' + method)
    prepare(data, stations.split(), t1, t2, filter=(0.1, 1), downsample=10, whitening=0.001,
            component=components, normalize='1bit', param_norm=None,
            use_floating_stream=True)
    xcorr_day(data, correlations, t1, t2, shift, use_floating_stream=True)
    plotXcorrs(data, correlations, t1, t2, plot_overview=False, plot_stack=True, plot_psd=True, add_to_title=method)

    method = 'filter0.1-1_1bit_whitening0.1'
    data = IPOC(xcorr_append='/tests/' + method, use_local_LVC=True)
    data.setXLogger('_' + method)
    prepare(data, stations.split(), t1, t2, filter=(0.1, 1), downsample=10, whitening=0.1,
            component=components, normalize='1bit', param_norm=None,
            use_floating_stream=True)
    xcorr_day(data, correlations, t1, t2, shift, use_floating_stream=True)
    plotXcorrs(data, correlations, t1, t2, plot_overview=False, plot_stack=True, plot_psd=True, add_to_title=method)
示例#25
0
def plot_salar_map():
    map_dic = dict(show=False, ll=(-21.4, -70.3), ur=(-20.7, -69.8),
                   figsize=(fw, 1.61 * fw * 0.7),
                   margin=(0.05, 0.05, 0.9, 0.9), lw=2,
                   station_markersize=4,
                   grid=0.2, grid_labels=True, grid_lw=0.2, slip=None, earthquake=None,
                   countries=None, coastlines=None,
                   elevation_args=(1592 * 2, None, False), elevation_offset=1000,
                   shaded=True, shaded_args=(90, 45, 0.7),
                   colormap=cm.binary,
                   elevation='/home/richter/Data/map/salar_90m.tif',
                   elev_oceans=False,
                   stations=None,
                   spines_lw=2,
                   loffset=1000)
    from sito.data import IPOC
    m = map.createIPOCMap(**map_dic)
    chos = [('CHO1', -21.094050, -70.102000, 653),
            ('CHO2', -21.105933, -70.096900, 620),
            ('CHO3', -21.106233, -70.097517, 625)]
    kw = dict(bbox=dict(boxstyle="round", fc="w", alpha=0.5, ec='none'))
    IPOC().stations.plot(m, mfc='w', ms=4, zorder=10, lsize='small', kwargs_an=kw)
    #ASTER GDEM is a product of METI and NASA.
    for station, lat, lon, height in chos[1:2]:
        x, y = m(lon, lat)
        m.plot((x,), (y,), marker='o', mfc='w', ms=4, zorder=10)
        plt.annotate(station, (x, y), xytext=(3, 3), textcoords='offset points', size='small', **kw)
    #plt.annotate('ASTER GDEM is a product of METI and NASA.', (1, 0), xycoords='axes fraction', ha='right', va='bottom', size='xx-small')
    plt.annotate('Salar Grande', (0.62, 0.56), rotation=-80, xycoords='axes fraction',
                 ha='center', va='center', size='small', color='k')
    path = svg2path('/home/richter/Documents/pics/maps/ipoc/salar/salar.svg')[0]
    lat21, lon70, px2deg = 862 - 15, 557 + 15, (1.177425 - 0.869511) / 1055
    for i in range(len(path)):
        x, y = path.vertices[i, :]
        lon = -70 + (x - lon70) * px2deg / cos(21. / 180 * pi)
        lat = -21 - (y - lat21) * px2deg
        path.vertices[i, :] = m(lon, lat)
    patch = patches.PathPatch(path, facecolor='none', lw=0.8, ec='r', alpha=0.5, zorder=50)
    plt.gca().add_patch(patch)
    mpl.rcParams.update({'lines.linewidth':1.})
    m.drawmapscale(-70.2, -21.35, -70, -21, 10, fontsize=7, yoffset=0.005 * (m.ymax - m.ymin))
    plt.gcf().savefig('/home/richter/Documents/pics/maps/ipoc/salar_map.pdf', dpi=1200)
示例#26
0
文件: psd.py 项目: wangwu1991/sito
def psd(station, parser):
    data = IPOC()
    ppsd_length = 6 * 3600
    overlap = 0.5
    dt = 3 * 24 * 3600
    t1 = UTC('2006-01-01')
    t2 = UTC('2013-11-01')
    ppsd = None
    print t1, t2
    while t1 < t2:
        try:
            if station != 'LVC':
                stream = data.client.getWaveform(
                    'CX', station, '', 'HHZ', t1,
                    t1 + dt + overlap * ppsd_length)
            else:
                stream = data.client.getWaveform(
                    'GE', 'LVC', '00', 'BHZ', t1,
                    t1 + dt + overlap * ppsd_length)

        except:
            t1 += dt
            continue
        if ppsd is None:
            ppsd = PPSD(stream[0].stats,
                        parser=parser,
                        skip_on_gaps=True,
                        db_bins=(-200, -50, 0.5),
                        ppsd_length=ppsd_length,
                        overlap=overlap)
        print t1
        ppsd.add(stream)
        t1 += dt
    if ppsd is not None:
        print 'station %s: %d segments' % (station, len(ppsd.times))
        ppsd.save("/home/richter/Results/IPOC/PPSD/ppsd_%s_6h.pkl.bz2" %
                  station,
                  compress=True)
        return True
    else:
        return False
示例#27
0
def main():
    stations = 'PB03 PB04'

    component = 'Z'
    t1 = UTC('2006-01-01')
    t2 = UTC()
    #    t1 = UTC('2007-01-01')
    #    t2 = UTC('2007-01-03')

    method1 = 'filter0.01-1_water_env2_whitening_1bit'
    method2 = 'filter0.01-1_water_env2_whitening_1bit_fft'

    data1 = IPOC(xcorr_append='/Tocopilla/' + method1, use_local_LVC=True)
    data2 = IPOC(xcorr_append='/Tocopilla/' + method2, use_local_LVC=True)

    for station in stations.split():
        for day in daygen(t1, t2):
            try:
                stream = data1.getStream(day, station, component)
            except:
                log.warning('Could not read stream for day %s station %s' %
                            (day, station))
            else:
                if len(stream) != 1:
                    log.warning(
                        'Stream for day %s station %s has wrong length %d' %
                        (day, station, len(stream)))
                elif stream[0].stats.npts / stream[
                        0].stats.sampling_rate < 24 * 3600 * 0.5:
                    log.warning(
                        'Stream for day %s station %s has only a coverage of %f  -> discard'
                        % (day, station, 1. * stream[0].stats.npts /
                           stream[0].stats.sampling_rate / 24 / 3600))
                else:
                    stream.fft()
                    stream.write(data2.getDay(station, day), 'Q')
示例#28
0
def analyze():
    #stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16 HMBCX MNMCX PATCX PSGCX LVC TAIQ'
    stations = 'PB01 PB02 PATCX'
    stations = 'PATCX'
    t1 = UTC('2009-01-01')
    t2 = UTC('2010-01-01')
    data = IPOC()
    for station in stations.split():
        hours = [[] for i in range(24)]
        times = []
        levels = []
        for t_day in daygen(t1, t2):
            try:
                stream = data.getRawStreamFromClient(t_day,
                                                     t_day + 24 * 3600,
                                                     station,
                                                     component='Z')
            except ValueError:
                continue
            for tr in stream:
                tr.stats.filter = ''
            stream.demean()
            stream.detrend()
            stream.filter2(4, 6)
            stream.downsample2(5)
            stream.merge()
            tr = stream[0]
            startt = tr.stats.starttime
            endt = tr.stats.endtime
            if endt - startt < 12 * 3600:
                continue
            tr.data = obspy.signal.cpxtrace.envelope(tr.data)[1][:len(tr.data)]
            for hour in range(24):
                tr2 = tr.slice(t_day + hour * 3600, t_day + (hour + 1) * 3600)
                if tr2.stats.endtime - tr2.stats.starttime < 1800:
                    continue
                num_stds = 60  # =^ every minute
                len_parts = len(tr2.data) // 60  # =^ 1min
                len_stds = len_parts // 6  # =^ 10s
                stds = np.array([
                    np.std(tr2.data[i:i + len_stds])
                    for i in np.arange(num_stds) * len_parts
                ])
                stds = stds[stds != 0.]
                num_stds = len(stds)
                if num_stds < 50:
                    continue
                stds = np.sort(stds)[num_stds // 5:-num_stds // 5]
                stds = stds[stds < np.min(stds) * 2.]
                val = np.mean(stds)
                levels.append(val)
                times.append(date2num(t_day + (0.5 + hour) * 3600))
                hours[hour].append(val)
        errors = np.array(
            [np.std(hours[i], ddof=1) / len(hours[i])**0.5 for i in range(24)])
        hours = np.array([np.mean(hours[i]) for i in range(24)])
        times = np.array(times)
        levels = np.array(levels)
        np.savez(
            '/home/richter/Results/IPOC/xcorr/noise_apmlitudes_%s_4-6Hz.npz' %
            station,
            hours=hours,
            errors=errors,
            times=times,
            levels=levels)
示例#29
0
                   scale=args.relative_scale,
                   downsample=args.downsample,
                   save=args.save, show=args.save is None))
print kwargs
if args.date is None:
    from sito import read
    if not '.' in args.file_station:
        args.file_station = args.file_station + '.QHD'
    stream = read(args.file_station)
    if args.absolute_scale is None and args.relative_scale is None:
        kwargs['scale'] = 1.
else:
    station = args.file_station
    if station.startswith('PB') or station == 'LVC' or station.endswith('CX'):
        from sito.data import IPOC
        data = IPOC(xcorr_append=args.xcorr_append)
    elif station == 'PKD':
        from sito.data import Parkfield
        data = Parkfield(xcorr_append=args.xcorr_append)
    else:
        raise argparse.ArgumentError('Not a valid station name')
    day = UTC(args.date)
    if args.xcorr_append is None:
        #stream = data.getRawStream(day, station, component=args.component)
        stream = data.getRawStreamFromClient(day, day + 24 * 3600, station, component=args.component, channel=args.channel)
    else:
        stream = data.getStream(day, station, component=args.component)
    if args.absolute_scale is None and args.relative_scale is None:
        kwargs['absolutescale'] = 0.0005

tr = stream[0]
示例#30
0
                   scale=args.relative_scale,
                   downsample=args.downsample,
                   save=args.save, show=args.save is None))
print kwargs
if args.date is None:
    from sito import read
    from sito.imaging import plotTrace
    stream = read(args.file_station)
    plotTrace(stream, **kwargs)

else:
    from sito.imaging import plotTrace2
    station = args.file_station
    if station.startswith('PB') or station == 'LVC':
        from sito.data import IPOC
        data = IPOC(xcorr_append=args.xcorr_append)
    elif station == 'PKD':
        from sito.data import Parkfield
        data = Parkfield(xcorr_append=args.xcorr_append)
    else:
        raise argparse.ArgumentError('Not a valid station name')

    day = UTCDateTime(args.date)
    if args.xcorr_append is None:
        stream = data.getRawStream(day, station, component=args.component)
    else:
        stream = data.getStream(day, station, component=args.component)
    if stream[0].stats.is_fft:
        stream.ifft()
    plotTrace(stream, component=args.component, **kwargs)
示例#31
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# by TR

from sito.data import IPOC
from sito.noisexcorr import  removeBad
import glob
from sito import read
import os.path

method = 'filter0.01-1_water_env2_whitening_1bit_fft'
data = IPOC(xcorr_append='/Tocopilla/' + method, use_local_LVC=True)
data.setXLogger('_' + method)
#path = '/home/richter/Results/IPOC/xcorr/Tocopilla/filter4-6_water_env2_1bit/stretch_t/'
#path = '/home/richter/Results/IPOC/xcorr/Tocopilla/filter0.01-1_water_env2_whitening_1bit/stretch2/'
path = '/home/richter/Results/IPOC/xcorr/Tocopilla/filter0.01-1_water_env2_whitening_1bit_fft/xcorr/'

for file in glob.glob(path + '*.QHD'): #@ReservedAssignment
    if not 'filter' in os.path.basename(file):
        print file
        ms = read(file)
        ms.normalize()
        removeBad(ms, 0.8)
        ms.write(os.path.splitext(file)[0], 'Q')
示例#32
0
#                     min_longitude=lon_Toc - 1., max_longitude=lon_Toc + 1,
#                     max_results=1000000,
#                     min_magnitude=None, max_magnitude=None)
#events.write('/home/richter/Data/events/events_Tocopilla.txt')
events = Events.read('/home/richter/Data/events/events_Tocopilla.txt')
events.pick(latitude=lat_Toc,
            longitude=lon_Toc,
            minval=0,
            maxval=100.,
            indegree=False)
#events.plot(lat_Toc, lon_Toc, circles=(1,))

method = 'filter2-20_1bit'
#method = 'filter0.005_1bit'

data = IPOC(xcorr_append='/Tocopilla/' + method, use_local_LVC=True)

t1 = UTC('2007-11-01')
t2 = UTC('2007-12-01')

period = 1800
correlation = ('PB03Z', 'PB03Z')
stream = data.readX(correlation, t1, t2, period=period)
#stream.filter2(2, 20)
stream.setHIForHist(events, period=period)
figsize = (8.267, 11.693)[::-1]
add_to_title = '_againfilter_zoom1'
#save = data.getPlotXCorr(correlation, 'all') + '_againfilter_zoom1 + events.png'
save = False

stream.plotXcorr(
示例#33
0
def main():
    stations = (
        "PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16 HMBCX MNMCX PATCX PSGCX LVC"
    )
    # TAIQ
    stations2 = None

    components = "Z"
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC("2006-01-01")
    # t2 = UTC('2011-09-01')
    # t1 = UTC('2007-01-01')
    # t2 = UTC('2009-01-01')
    t2 = UTC("2012-09-01")

    shift = 500
    correlations = get_correlations(stations, components, stations2)

    method = "FINAL_filter0.01-1_1bit"

    data = IPOC(xcorr_append="/" + method, use_local_LVC=False)
    data.setXLogger("_" + method)

    pool = Pool()
    prepare(
        data,
        stations.split(),
        t1,
        t2,
        component=components,
        filter=(0.01, 1, 2, True),
        downsample=10,
        eventremoval="waterlevel_env2",
        param_removal=(10, 0),
        whitening=False,
        normalize="1bit",
        param_norm=None,
        pool=pool,
    )
    noisexcorrf(data, correlations, t1, t2, shift, pool=pool)
    pool.close()
    pool.join()

    stack(data, correlations, dt=10 * 24 * 3600, shift=5 * 24 * 3600)
    stack(data, correlations, dt=50 * 24 * 3600, shift=5 * 24 * 3600)
    stack(data, correlations, dt=-1)

    plotXcorrs(
        data,
        correlations,
        t1,
        t2,
        start=None,
        end=None,
        plot_overview=True,
        plot_years=False,
        use_dlognorm=False,
        plot_stack=True,
        plot_psd=False,
        add_to_title="",
        downsample=None,
    )

    plotXcorrs(
        data,
        correlations,
        t1=None,
        t2=None,
        start=None,
        end=None,
        plot_overview=True,
        plot_years=False,
        use_dlognorm=False,
        plot_stack=True,
        plot_psd=False,
        add_to_title="",
        downsample=None,
        stack=("10days", "5days"),
    )

    plotXcorrs(
        data,
        correlations,
        t1=None,
        t2=None,
        start=None,
        end=None,
        plot_overview=True,
        plot_years=False,
        use_dlognorm=False,
        plot_stack=True,
        plot_psd=False,
        add_to_title="",
        downsample=None,
        stack=("50days", "5days"),
    )
示例#34
0
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 PB09 PB10 PB11 PB12 PB13 PB14 PB15 PB16 HMBCX MNMCX PATCX PSGCX LVC'
    # TAIQ
    stations2 = None

    components = 'Z'
    # TOcopilla earthquake: 2007-11-14 15:14
    t1 = UTC('2006-01-01')
    #t2 = UTC('2011-09-01')
    #t1 = UTC('2007-01-01')
    #t2 = UTC('2009-01-01')
    t2 = UTC('2012-09-01')

    shift = 500
    correlations = get_correlations(stations, components, stations2)

    method = 'FINAL_filter0.01-1_1bit'

    data = IPOC(xcorr_append='/' + method, use_local_LVC=False)
    data.setXLogger('_' + method)

    pool = Pool()
    prepare(data,
            stations.split(),
            t1,
            t2,
            component=components,
            filter=(0.01, 1, 2, True),
            downsample=10,
            eventremoval='waterlevel_env2',
            param_removal=(10, 0),
            whitening=False,
            normalize='1bit',
            param_norm=None,
            pool=pool)
    noisexcorrf(data, correlations, t1, t2, shift, pool=pool)
    pool.close()
    pool.join()

    stack(data, correlations, dt=10 * 24 * 3600, shift=5 * 24 * 3600)
    stack(data, correlations, dt=50 * 24 * 3600, shift=5 * 24 * 3600)
    stack(data, correlations, dt=-1)

    plotXcorrs(data,
               correlations,
               t1,
               t2,
               start=None,
               end=None,
               plot_overview=True,
               plot_years=False,
               use_dlognorm=False,
               plot_stack=True,
               plot_psd=False,
               add_to_title='',
               downsample=None)

    plotXcorrs(data,
               correlations,
               t1=None,
               t2=None,
               start=None,
               end=None,
               plot_overview=True,
               plot_years=False,
               use_dlognorm=False,
               plot_stack=True,
               plot_psd=False,
               add_to_title='',
               downsample=None,
               stack=('10days', '5days'))

    plotXcorrs(data,
               correlations,
               t1=None,
               t2=None,
               start=None,
               end=None,
               plot_overview=True,
               plot_years=False,
               use_dlognorm=False,
               plot_stack=True,
               plot_psd=False,
               add_to_title='',
               downsample=None,
               stack=('50days', '5days'))
from sito.data import IPOC
import numpy as np
from sito.util.main import streamdaygen
import pylab as plt

stations = 'PB01'  # PB04 PB06 PB07 PB08 PB09 PB10 PB11 PB12 PB16'
channels = 'WKI WDI WII'
#channels = 'WDI_10'
datafile = '/home/richter/Data/climate/2006-2012_%s_%s.npz'
output = '/home/richter/Results/IPOC/climate/%s.pdf'
calculate = False
show = False

if calculate:
    ipoc = IPOC()
    for station in stations.split():
        for channel in channels.split():
            stream = ipoc.getChannelFromClient('2006-01-01', '2013-01-01',
                                               station=station, channel=channel)
            data = []
            dates = []
            for day in streamdaygen(stream):
                day.merge()
                data.append(np.mean(day[0].data))
                st = day[0].stats.starttime
                et = day[0].stats.endtime
                dates.append(st + (et - st) / 2.)
            np.savez(datafile % (station, channel), dates=dates, data=data)
else:
    #http://stackoverflow.com/questions/7733693/matplotlib-overlay-plots-with-different-scales
from sito.data import IPOC
import numpy as np
from sito.util.main import streamdaygen
import pylab as plt

stations = 'PB01'  # PB04 PB06 PB07 PB08 PB09 PB10 PB11 PB12 PB16'
channels = 'WKI WDI WII'
#channels = 'WDI_10'
datafile = '/home/richter/Data/climate/2006-2012_%s_%s.npz'
output = '/home/richter/Results/IPOC/climate/%s.pdf'
calculate = False
show = False

if calculate:
    ipoc = IPOC()
    for station in stations.split():
        for channel in channels.split():
            stream = ipoc.getChannelFromClient('2006-01-01',
                                               '2013-01-01',
                                               station=station,
                                               channel=channel)
            data = []
            dates = []
            for day in streamdaygen(stream):
                day.merge()
                data.append(np.mean(day[0].data))
                st = day[0].stats.starttime
                et = day[0].stats.endtime
                dates.append(st + (et - st) / 2.)
            np.savez(datafile % (station, channel), dates=dates, data=data)
示例#37
0
#!/usr/bin/env python
# by TR

from sito.data import IPOC
from obspy.core import UTCDateTime as UTC
import matplotlib.pyplot as plt
from sito.util.main import streamtimegen
from sito.stream import Stream
from progressbar import ProgressBar

data = IPOC()
t_day = UTC('2008-01-01')
station = 'PB01'

stream = data.getRawStreamFromClient(
    t_day, t_day + 24 * 3600, station, component='Z')
stream.setHI('filter', '')
stream.demean()
stream.filter2(0.5, 5)
stream.trim2(0, 5 * 3600)

auto = Stream()
for st in streamtimegen(stream, dt=60, start=None, shift=30, use_slice=True):
    tr = st[0].copy()
    tr.addZeros(60)
    tr.acorr(60)
    auto.append(tr)

print auto
auto.plotXcorr()
stream.plot(type='dayplot')
示例#38
0
    (2012, ) * 7)
stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 HMBCX MNMCX PATCX PSGCX LVC'
# 20 stations,  10 days, 2008-2012
# days = '2008-01-01 2008-07-01 2009-01-01 2009-07-01 2010-01-01 2010-07-01 2011-01-01 2011-07-01 2012-01-01 2012-07-01'
# stations = 'PB01 PB02 PB03 PB04 PB05 PB06 PB07 PB08 PB09 PB10 PB11 PB12 PB13 PB14 PB15 HMBCX MNMCX PATCX PSGCX LVC'
pic_dir = '/home/richter/Data/IPOC/raw_pictures/'
output = '/home/richter/Results/IPOC/raw_pics/test.pdf'
output = '/home/richter/Results/IPOC/raw_pics/13stations_7days_2012.pdf'
scale_range = 2000
scale_LVC = 20000

days = days.split()
stations = stations.split()
dx = 0.98 / len(days)
dy = 0.98 / len(stations)
data = IPOC()
figsize = (11.69, 16.53)  # A3 # 8.27, 11.69 # A4
fig = plt.figure(figsize=figsize)
for i, station in ProgressBar(len(stations))(enumerate(stations)):
    for j, day in enumerate(days):
        x0 = 0.01 + dx * (j + 0.02)
        y0 = 0.99 - dy * (1 + i)
        if i == 0:
            fig.text(x0 + 0.98 * dx / 2, 0.98, day, ha='center', va='center')
        if j == 0:
            fig.text(0.02,
                     y0 + (0.98 / 2) * dy,
                     station,
                     va='center',
                     ha='center',
                     rotation=90)
示例#39
0
#method = 'filter4-6_water_env2_1bit'
#method = 'filter4-6_water_env2_1bit_fft'
#method = 'filter0.01-1_water_env2_whitening_1bit_fft'

#method = '/Tocopilla/filter0.01-1_water_env2_whitening_1bit_fft'
method = 'FINAL_filter0.01-0.5_1bit_whitening'
method = 'FINAL_filter0.01-0.5_1bit_auto'
method = 'FINAL_filter1-3_1bit_auto'
method = 'FINAL_filter4-6_1bit_auto'
method = 'FINAL_filter4-6_1bit_auto_3C'
method = 'FINAL_filter3-5'
method = 'PAT_filter9-11'
method = 'zerotest_nozero'

data = IPOC(xcorr_append='/' + method, use_local_LVC=False)
#path = '/home/richter/Results/IPOC/xcorr/Tocopilla/filter4-6_water_env2_1bit/stretch_t/'
#path = '/home/richter/Results/IPOC/xcorr/Tocopilla/filter4-6_water_env2_1bit_fft/stretch/'
#path = '/home/richter/Results/IPOC/xcorr/Tocopilla/filter0.01-1_water_env2_whitening_1bit/stretch2/'
#path = '/home/richter/Results/IPOC/xcorr/Tocopilla/filter0.01-1_water_env2_whitening_1bit_fft/stretch_Toco/'
path = '/home/richter/Results/IPOC/xcorr/FINAL_filter0.01-0.5_1bit_whitening/stretch_Toco/swcoda/'
path = '/home/richter/Results/IPOC/xcorr/FINAL_filter0.01-0.5_1bit_auto/stretch_Toco/'
path = '/home/richter/Results/IPOC/xcorr/FINAL_filter1-3_1bit_auto/stretch/'
path = '/home/richter/Results/IPOC/xcorr/FINAL_filter4-6_1bit_auto/stretch/'
path = '/home/richter/Results/IPOC/xcorr/FINAL_filter4-6_1bit_auto_3C/stretch3_10s/'
path = '/home/richter/Results/IPOC/xcorr/FINAL_filter3-5/stretch/'
path = '/home/richter/Results/IPOC/xcorr/PAT_filter9-11/stretch3/'
path = '/home/richter/Results/IPOC/xcorr/zerotest_nozero/stretch/'
#path = '/home/richter/Results/IPOC/xcorr/FINAL_filter4-6_1bit_auto/stretch2/'
#path = '/home/richter/Results/IPOC/xcorr/FINAL_filter4-6_1bit_auto/stretch_Toco_PATCX/'
#path = '/home/richter/Results/IPOC/xcorr/Tocopilla/filter4-6_water_env2_1bit_fft/stretch/'
示例#40
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# by TR

from sito.data import IPOC
from sito.noisexcorr import removeBad
import glob
from sito import read
import os.path

method = 'filter0.01-1_water_env2_whitening_1bit_fft'
data = IPOC(xcorr_append='/Tocopilla/' + method, use_local_LVC=True)
data.setXLogger('_' + method)
#path = '/home/richter/Results/IPOC/xcorr/Tocopilla/filter4-6_water_env2_1bit/stretch_t/'
#path = '/home/richter/Results/IPOC/xcorr/Tocopilla/filter0.01-1_water_env2_whitening_1bit/stretch2/'
path = '/home/richter/Results/IPOC/xcorr/Tocopilla/filter0.01-1_water_env2_whitening_1bit_fft/xcorr/'

for file in glob.glob(path + '*.QHD'):  #@ReservedAssignment
    if not 'filter' in os.path.basename(file):
        print file
        ms = read(file)
        ms.normalize()
        removeBad(ms, 0.8)
        ms.write(os.path.splitext(file)[0], 'Q')
示例#41
0
def main():
    stations = 'PB01 PB03'
    stations2 = 'PB03'
    components = 'Z'
    t1 = UTC('2010-01-01')
    t2 = UTC('2010-12-31')
    shift = 500
    correlations = get_correlations(stations, components, stations2)

    method = 'filter0.1-1_1bit_whitening0.01'
    data = IPOC(xcorr_append='/tests/' + method, use_local_LVC=True)
    data.setXLogger('_' + method)
    prepare(data,
            stations.split(),
            t1,
            t2,
            filter=(0.1, 1),
            downsample=10,
            whitening=0.01,
            component=components,
            normalize='1bit',
            param_norm=None,
            use_floating_stream=True)
    xcorr_day(data, correlations, t1, t2, shift, use_floating_stream=True)
    plotXcorrs(data,
               correlations,
               t1,
               t2,
               plot_overview=False,
               plot_stack=True,
               plot_psd=True,
               add_to_title=method)

    method = 'filter0.1-1_1bit_whitening0.001'
    data = IPOC(xcorr_append='/tests/' + method, use_local_LVC=True)
    data.setXLogger('_' + method)
    prepare(data,
            stations.split(),
            t1,
            t2,
            filter=(0.1, 1),
            downsample=10,
            whitening=0.001,
            component=components,
            normalize='1bit',
            param_norm=None,
            use_floating_stream=True)
    xcorr_day(data, correlations, t1, t2, shift, use_floating_stream=True)
    plotXcorrs(data,
               correlations,
               t1,
               t2,
               plot_overview=False,
               plot_stack=True,
               plot_psd=True,
               add_to_title=method)

    method = 'filter0.1-1_1bit_whitening0.1'
    data = IPOC(xcorr_append='/tests/' + method, use_local_LVC=True)
    data.setXLogger('_' + method)
    prepare(data,
            stations.split(),
            t1,
            t2,
            filter=(0.1, 1),
            downsample=10,
            whitening=0.1,
            component=components,
            normalize='1bit',
            param_norm=None,
            use_floating_stream=True)
    xcorr_day(data, correlations, t1, t2, shift, use_floating_stream=True)
    plotXcorrs(data,
               correlations,
               t1,
               t2,
               plot_overview=False,
               plot_stack=True,
               plot_psd=True,
               add_to_title=method)
示例#42
0
         downsample=args.downsample,
         save=args.save,
         show=args.save is None))
print kwargs
if args.date is None:
    from sito import read
    from sito.imaging import plotTrace
    stream = read(args.file_station)
    plotTrace(stream, **kwargs)

else:
    from sito.imaging import plotTrace2
    station = args.file_station
    if station.startswith('PB') or station == 'LVC':
        from sito.data import IPOC
        data = IPOC(xcorr_append=args.xcorr_append)
    elif station == 'PKD':
        from sito.data import Parkfield
        data = Parkfield(xcorr_append=args.xcorr_append)
    else:
        raise argparse.ArgumentError('Not a valid station name')

    day = UTCDateTime(args.date)
    if args.xcorr_append is None:
        stream = data.getRawStream(day, station, component=args.component)
    else:
        stream = data.getStream(day, station, component=args.component)
    if stream[0].stats.is_fft:
        stream.ifft()
    plotTrace(stream, component=args.component, **kwargs)
示例#43
0
#import numpy as np
#import os.path
#import warnings
#from obspy.core.util.decorator import deprecated
#import itertools
import logging
from sito import Stream, read
from obspy.core.event import readEvents
from sito.data import IPOC
from obspy.core.util.attribdict import AttribDict
import numpy as np
import os.path
import glob
from progressbar import ProgressBar
log = logging.getLogger(__name__)
data = IPOC()


def get_event_id(expr):
    if '/' in expr:
        expr = expr.split('/', 1)[1]
    expr = expr.replace('NLL.', '').replace('Origin#', '')
    return expr


def cut_events(in_, out):
    print 'read events...'
    catalog = readEvents(in_, 'QUAKEML')
    print 'cut events...'
    for event in ProgressBar()(catalog):
        oid = get_event_id(event.origins[0].resource_id.getQuakeMLURI())
示例#44
0
lon_Toc = -69.971

#events = Events.load(min_datetime="2007-01-01", max_datetime="2008-12-31",
#                     min_latitude=lat_Toc - 1, max_latitude=lat_Toc + 1,
#                     min_longitude=lon_Toc - 1., max_longitude=lon_Toc + 1,
#                     max_results=1000000,
#                     min_magnitude=None, max_magnitude=None)
#events.write('/home/richter/Data/events/events_Tocopilla.txt')
events = Events.read('/home/richter/Data/events/events_Tocopilla.txt')
events.pick(latitude=lat_Toc, longitude=lon_Toc, minval=0, maxval=100., indegree=False)
#events.plot(lat_Toc, lon_Toc, circles=(1,))

method = 'filter2-20_1bit'
#method = 'filter0.005_1bit'

data = IPOC(xcorr_append='/Tocopilla/' + method, use_local_LVC=True)

t1 = UTC('2007-11-01')
t2 = UTC('2007-12-01')

period = 1800
correlation = ('PB03Z', 'PB03Z')
stream = data.readX(correlation, t1, t2, period=period)
#stream.filter2(2, 20)
stream.setHIForHist(events, period=period)
figsize = (8.267, 11.693)[::-1]
add_to_title = '_againfilter_zoom1'
#save = data.getPlotXCorr(correlation, 'all') + '_againfilter_zoom1 + events.png'
save = False

stream.plotXcorr(0, 50, imshow=True, use_dlognorm=True, filter=(2, 20),