# scales
    scales = wa.scales

    # associated time vector
    t = wa.time / 24.

    # reconstruction of the original data
    rx = wa.reconstruction()

    # determine acor factor for red noise
    acorr = acf(x)
    lagf = (acorr[1] + np.sqrt(acorr[2])) / 2
    print 'acorr lagf is %s' % lagf

    # determine significance levels
    (signif, fft_theory) = wave_sig.wave_signif(x, dt, scales, lag1=lagf)
    sig95 = np.ones_like(power) * np.array([signif] * len(t)).transpose()
    sig95 = power / sig95  # where ratio > 1, power is significant

    # Global wavelet spectrum & significance levels:
    global_int = variance * (np.sum(
        power, axis=0)) / x.shape[0]  # time-average over all times
    gs = ((np.sum(power, axis=1)) / x.shape[0]) / variance  #assume var=1
    gswa = wa.global_wavelet_spectrum
    # Global wavelet significance
    (signif_g, fft_theory_g) = wave_sig.global_wave_signif(x,
                                                           dt,
                                                           scales,
                                                           lag1=lagf,
                                                           sigtest=1,
                                                           dof=len(x))
        # determine acor factor for red noise stream 1
        # uses entire dataperiod for corr 
        
        #TODO: current implementation uses any preprocessed changes in ingest module (detrended, standardized, etc)
        acorr_1 = acf(data_1['anom'])
        lagf_1 = (acorr_1[1]+np.sqrt(acorr_1[2]))/2
        print 'acorr lagf for datastream 1 is %s' % lagf_1

        scalemin = 1
        scalemax = 32
        scale_ind = ((wa1c.scales >= scalemin) & (wa1c.scales <= scalemax))


        # determine significance levels for stream 1
        (signif_1, fft_theory_1) = wave_sig.wave_signif(data_1c['anom'],wa1c.dt,wa1c.scales[scale_ind],lag1=lagf_1)
        sig95_1 = np.ones_like(wa1c.wavelet_power[scale_ind,:]) * np.array([signif_1] * len(wa1c.time)).transpose()
        sig95_1 = wa1c.wavelet_power[scale_ind,:] / sig95_1         # where ratio > 1, power is significant


        # Global wavelet spectrum & significance levels stream 1:
        global_int_1 = data_1['variance']*(np.sum(wa1c.wavelet_power, axis=0) ) / data_1c['anom'].shape[0]   # time-average over all times
        gs_1 = ((np.sum(wa1c.wavelet_power, axis=1) ) / data_1c['anom'].shape[0]) / data_1['variance'] #assume var=1
        gswa_1 = wa1c.global_wavelet_spectrum
        # Global wavelet significance
        (signif_g_1, fft_theory_g_1) = wave_sig.global_wave_signif(data_1c['anom'],wa1c.dt,wa1c.scales,lag1=lagf_1,sigtest=1, dof=len(data_1c['anom']))


        """----------------------------- plot setup ------------------------------------------"""

    # scales 
    scales = wa.scales

    # associated time vector
    t = wa.time / 24.

    # reconstruction of the original data
    rx = wa.reconstruction()

    # determine acor factor for red noise
    acorr = acf(x)
    lagf = (acorr[1]+np.sqrt(acorr[2]))/2
    print 'acorr lagf is %s' % lagf

    # determine significance levels
    (signif, fft_theory) = wave_sig.wave_signif(x,dt,scales,lag1=lagf)
    sig95 = np.ones_like(power) * np.array([signif] * len(t)).transpose()
    sig95 = power / sig95         # where ratio > 1, power is significant

    # Global wavelet spectrum & significance levels:
    global_int = variance*(np.sum(power, axis=0) ) / x.shape[0]   # time-average over all times
    gs = ((np.sum(power, axis=1) ) / x.shape[0]) / variance #assume var=1
    gswa = wa.global_wavelet_spectrum
    # Global wavelet significance
    (signif_g, fft_theory_g) = wave_sig.global_wave_signif(x,dt,scales,lag1=lagf,sigtest=1, dof=len(x))




    """----------------------------- plot setup ------------------------------------------"""
    T, S = np.meshgrid(t, scales)
Example #4
0
            # determine acor factor for red noise stream 1
            # uses entire dataperiod for corr 
            
            #TODO: current implementation uses any preprocessed changes in ingest module (detrended, standardized, etc)
            acorr_1 = acf(data_1['anom'])
            lagf_1 = (acorr_1[1]+np.sqrt(acorr_1[2]))/2
            print 'acorr lagf for datastream 1 is %s' % lagf_1

            # determine acor factor for red noise stream 2
            # uses entire dataperiod for corr
            acorr_2 = acf(data_2['anom'])
            lagf_2 = (acorr_2[1]+np.sqrt(acorr_2[2]))/2
            print 'acorr lagf for datastream 1 is %s' % lagf_2

            # determine significance levels for stream 1
            (signif_1, fft_theory_1) = wave_sig.wave_signif(data_1c['anom'],wa1c.dt,wa1c.scales,lag1=lagf_1)
            sig95_1 = np.ones_like(wa1c.wavelet_power) * np.array([signif_1] * len(wa1c.time)).transpose()
            sig95_1 = wa1c.wavelet_power / sig95_1         # where ratio > 1, power is significant

            # determine significance levels for stream 1
            (signif_2, fft_theory_2) = wave_sig.wave_signif(data_2c['anom'],wa2c.dt,wa2c.scales,lag1=lagf_2)
            sig95_2 = np.ones_like(wa2c.wavelet_power) * np.array([signif_2] * len(wa2c.time)).transpose()
            sig95_2 = wa2c.wavelet_power / sig95_2         # where ratio > 1, power is significant

            # Global wavelet spectrum & significance levels stream 1:
            global_int_1 = data_1['variance']*(np.sum(wa1c.wavelet_power, axis=0) ) / data_1c['anom'].shape[0]   # time-average over all times
            gs_1 = ((np.sum(wa1c.wavelet_power, axis=1) ) / data_1c['anom'].shape[0]) / data_1['variance'] #assume var=1
            gswa_1 = wa1c.global_wavelet_spectrum
            # Global wavelet significance
            (signif_g_1, fft_theory_g_1) = wave_sig.global_wave_signif(data_1c['anom'],wa1c.dt,wa1c.scales,lag1=lagf_1,sigtest=1, dof=len(data_1c['anom']))
Example #5
0
        # determine acor factor for red noise stream 1
        # uses entire dataperiod for corr

        #TODO: current implementation uses any preprocessed changes in ingest module (detrended, standardized, etc)
        acorr_1 = acf(data_1['anom'])
        lagf_1 = (acorr_1[1] + np.sqrt(acorr_1[2])) / 2
        print 'acorr lagf for datastream 1 is %s' % lagf_1

        scalemin = 1
        scalemax = 32
        scale_ind = ((wa1c.scales >= scalemin) & (wa1c.scales <= scalemax))

        # determine significance levels for stream 1
        (signif_1, fft_theory_1) = wave_sig.wave_signif(data_1c['anom'],
                                                        wa1c.dt,
                                                        wa1c.scales[scale_ind],
                                                        lag1=lagf_1)
        sig95_1 = np.ones_like(wa1c.wavelet_power[scale_ind, :]) * np.array(
            [signif_1] * len(wa1c.time)).transpose()
        sig95_1 = wa1c.wavelet_power[
            scale_ind, :] / sig95_1  # where ratio > 1, power is significant

        # Global wavelet spectrum & significance levels stream 1:
        global_int_1 = data_1['variance'] * (np.sum(
            wa1c.wavelet_power,
            axis=0)) / data_1c['anom'].shape[0]  # time-average over all times
        gs_1 = ((np.sum(wa1c.wavelet_power, axis=1)) /
                data_1c['anom'].shape[0]) / data_1['variance']  #assume var=1
        gswa_1 = wa1c.global_wavelet_spectrum
        # Global wavelet significance
        (signif_g_1, fft_theory_g_1) = wave_sig.global_wave_signif(
        # scales 
        scales = wa.scales

        # associated time vector
        t = wa.time

        # reconstruction of the original data
        rx = wa.reconstruction()

        # determine acor factor for red noise
        acorr = acf(data_1['anom'])
        lagf = (acorr[1]+np.sqrt(acorr[2]))/2
        print 'acorr lagf is %s' % lagf

        # determine significance levels
        (signif, fft_theory) = wave_sig.wave_signif(data_1['anom'],data_1['dt'],scales,lag1=lagf)
        sig95 = np.ones_like(power) * np.array([signif] * len(t)).transpose()
        sig95 = power / sig95         # where ratio > 1, power is significant

        # Global wavelet spectrum & significance levels:
        global_int = data_1['variance']*(np.sum(power, axis=0) ) / data_1['anom'].shape[0]   # time-average over all times
        gs = ((np.sum(power, axis=1) ) / data_1['anom'].shape[0]) / data_1['variance'] #assume var=1
        gswa = wa.global_wavelet_spectrum
        # Global wavelet significance
        (signif_g, fft_theory_g) = wave_sig.global_wave_signif(data_1['anom'],data_1['dt'],scales,lag1=lagf,sigtest=1, dof=len(data_1['anom']))




        """----------------------------- plot setup ------------------------------------------"""
        T, S = np.meshgrid(t, scales)