print 'acorr lagf is %s' % lagf

    # determine significance levels
    (signif, fft_theory) = wave_sig.wave_signif(x, dt, scales, lag1=lagf)
    sig95 = np.ones_like(power) * np.array([signif] * len(t)).transpose()
    sig95 = power / sig95  # where ratio > 1, power is significant

    # Global wavelet spectrum & significance levels:
    global_int = variance * (np.sum(
        power, axis=0)) / x.shape[0]  # time-average over all times
    gs = ((np.sum(power, axis=1)) / x.shape[0]) / variance  #assume var=1
    gswa = wa.global_wavelet_spectrum
    # Global wavelet significance
    (signif_g, fft_theory_g) = wave_sig.global_wave_signif(x,
                                                           dt,
                                                           scales,
                                                           lag1=lagf,
                                                           sigtest=1,
                                                           dof=len(x))
    """----------------------------- plot setup ------------------------------------------"""
    T, S = np.meshgrid(t, scales)
    """----------- plotting WaveTransform Power with confidence interval contour ----------"""

    plt, fig = wavelet_analy_plot.plot_wavetransf(wa,
                                                  T,
                                                  S,
                                                  sig95,
                                                  time_base,
                                                  plot_percentile=True)

    plt.savefig((fig_name_base + '_wave_' +
                 str(depth[level]).replace('.0', 'm') + '.png'),
        scalemax = 32
        scale_ind = ((wa1c.scales >= scalemin) & (wa1c.scales <= scalemax))


        # determine significance levels for stream 1
        (signif_1, fft_theory_1) = wave_sig.wave_signif(data_1c['anom'],wa1c.dt,wa1c.scales[scale_ind],lag1=lagf_1)
        sig95_1 = np.ones_like(wa1c.wavelet_power[scale_ind,:]) * np.array([signif_1] * len(wa1c.time)).transpose()
        sig95_1 = wa1c.wavelet_power[scale_ind,:] / sig95_1         # where ratio > 1, power is significant


        # Global wavelet spectrum & significance levels stream 1:
        global_int_1 = data_1['variance']*(np.sum(wa1c.wavelet_power, axis=0) ) / data_1c['anom'].shape[0]   # time-average over all times
        gs_1 = ((np.sum(wa1c.wavelet_power, axis=1) ) / data_1c['anom'].shape[0]) / data_1['variance'] #assume var=1
        gswa_1 = wa1c.global_wavelet_spectrum
        # Global wavelet significance
        (signif_g_1, fft_theory_g_1) = wave_sig.global_wave_signif(data_1c['anom'],wa1c.dt,wa1c.scales,lag1=lagf_1,sigtest=1, dof=len(data_1c['anom']))


        """----------------------------- plot setup ------------------------------------------"""


        """----------- plotting WaveTransform Power with confidence interval contour ----------"""


        fig_name_base = 'images/' + file1.split('/')[-1].split('.')[0] + '_' + "_".join(file2.split('/')[-1].split('_')[1:3]).split('.')[0] + '_'


        """----------------- zoom in to specified scales ----------"""
        T1, S1 = np.meshgrid(data_1c['time'], wa1c.scales[scale_ind])
        plt, fig = wavelet_analy_plot_goa.plot_wavetransf_time_zoom(data_1c['anom'], wa1c, T1, S1, sig95_1,\
         gs_1, signif_g_1, data_1['time_base'], data_ind=scale_ind, scalemin=scalemin, scalemax=scalemax, ylabel=par_1[1], plot_percentile=True)
Example #3
0
            # determine significance levels for stream 1
            (signif_1, fft_theory_1) = wave_sig.wave_signif(data_1c['anom'],wa1c.dt,wa1c.scales,lag1=lagf_1)
            sig95_1 = np.ones_like(wa1c.wavelet_power) * np.array([signif_1] * len(wa1c.time)).transpose()
            sig95_1 = wa1c.wavelet_power / sig95_1         # where ratio > 1, power is significant

            # determine significance levels for stream 1
            (signif_2, fft_theory_2) = wave_sig.wave_signif(data_2c['anom'],wa2c.dt,wa2c.scales,lag1=lagf_2)
            sig95_2 = np.ones_like(wa2c.wavelet_power) * np.array([signif_2] * len(wa2c.time)).transpose()
            sig95_2 = wa2c.wavelet_power / sig95_2         # where ratio > 1, power is significant

            # Global wavelet spectrum & significance levels stream 1:
            global_int_1 = data_1['variance']*(np.sum(wa1c.wavelet_power, axis=0) ) / data_1c['anom'].shape[0]   # time-average over all times
            gs_1 = ((np.sum(wa1c.wavelet_power, axis=1) ) / data_1c['anom'].shape[0]) / data_1['variance'] #assume var=1
            gswa_1 = wa1c.global_wavelet_spectrum
            # Global wavelet significance
            (signif_g_1, fft_theory_g_1) = wave_sig.global_wave_signif(data_1c['anom'],wa1c.dt,wa1c.scales,lag1=lagf_1,sigtest=1, dof=len(data_1c['anom']))

            # Global wavelet spectrum & significance levels stream 2:
            global_int_2 = data_2['variance']*(np.sum(wa2c.wavelet_power, axis=0) ) / data_2c['anom'].shape[0]   # time-average over all times
            gs_2 = ((np.sum(wa2c.wavelet_power, axis=1) ) / data_2c['anom'].shape[0]) / data_2['variance'] #assume var=1
            gswa_2 = wa2c.global_wavelet_spectrum
            # Global wavelet significance
            (signif_g_2, fft_theory_g_2) = wave_sig.global_wave_signif(data_2c['anom'],wa2c.dt,wa2c.scales,lag1=lagf_2,sigtest=1, dof=len(data_2c['anom']))

            """----------------------------- plot setup ------------------------------------------"""
            T1, S1 = np.meshgrid(data_1c['time'], wa1c.scales)
            T2, S2 = np.meshgrid(data_2c['time'], wa2c.scales)


            """----------- plotting WaveTransform Power with confidence interval contour ----------"""
    # determine acor factor for red noise
    acorr = acf(x)
    lagf = (acorr[1]+np.sqrt(acorr[2]))/2
    print 'acorr lagf is %s' % lagf

    # determine significance levels
    (signif, fft_theory) = wave_sig.wave_signif(x,dt,scales,lag1=lagf)
    sig95 = np.ones_like(power) * np.array([signif] * len(t)).transpose()
    sig95 = power / sig95         # where ratio > 1, power is significant

    # Global wavelet spectrum & significance levels:
    global_int = variance*(np.sum(power, axis=0) ) / x.shape[0]   # time-average over all times
    gs = ((np.sum(power, axis=1) ) / x.shape[0]) / variance #assume var=1
    gswa = wa.global_wavelet_spectrum
    # Global wavelet significance
    (signif_g, fft_theory_g) = wave_sig.global_wave_signif(x,dt,scales,lag1=lagf,sigtest=1, dof=len(x))




    """----------------------------- plot setup ------------------------------------------"""
    T, S = np.meshgrid(t, scales)


    """----------- plotting WaveTransform Power with confidence interval contour ----------"""

    plt, fig = wavelet_analy_plot.plot_wavetransf(wa, T, S, sig95, time_base, plot_percentile=True)

    plt.savefig((fig_name_base + '_wave' + str(depth[level]).replace('.0','m') + '.png'), bbox_inches='tight', dpi = (100))
    plt.close()
        # determine acor factor for red noise
        acorr = acf(data_1['anom'])
        lagf = (acorr[1]+np.sqrt(acorr[2]))/2
        print 'acorr lagf is %s' % lagf

        # determine significance levels
        (signif, fft_theory) = wave_sig.wave_signif(data_1['anom'],data_1['dt'],scales,lag1=lagf)
        sig95 = np.ones_like(power) * np.array([signif] * len(t)).transpose()
        sig95 = power / sig95         # where ratio > 1, power is significant

        # Global wavelet spectrum & significance levels:
        global_int = data_1['variance']*(np.sum(power, axis=0) ) / data_1['anom'].shape[0]   # time-average over all times
        gs = ((np.sum(power, axis=1) ) / data_1['anom'].shape[0]) / data_1['variance'] #assume var=1
        gswa = wa.global_wavelet_spectrum
        # Global wavelet significance
        (signif_g, fft_theory_g) = wave_sig.global_wave_signif(data_1['anom'],data_1['dt'],scales,lag1=lagf,sigtest=1, dof=len(data_1['anom']))




        """----------------------------- plot setup ------------------------------------------"""
        T, S = np.meshgrid(t, scales)


        """----------- plotting WaveTransform Power with confidence interval contour ----------"""



        """----------------- plotting contours w/global and timeseries ----------"""
        """----------------- zoom in to specified scales ----------"""