示例#1
0
def boxpdf(x):
    """
    Forces the probability density function of the input data to have
    a boxed distribution.

    PARAMETERS
        x (array like) :
            Input data

    RETURNS
        X (array like) :
            Boxed data varying between zero and one.
        Bx, By (array like) :
            Data lookup table

    """
    x = np.asarray(x)
    n = x.size

    # Kind of 'unique'
    i = np.argsort(x)
    d = (np.diff(x[i]) != 0)
    I = find(np.concatenate([d, [True]]))
    X = x[i][I]

    I = np.concatenate([[0], I + 1])
    Y = 0.5 * (I[0:-1] + I[1:]) / n
    bX = np.interp(x, X, Y)

    return bX, X, Y
示例#2
0
def boxpdf(x):
    """
    Forces the probability density function of the input data to have
    a boxed distribution.

    PARAMETERS
        x (array like) :
            Input data

    RETURNS
        X (array like) :
            Boxed data varying between zero and one.
        Bx, By (array like) :
            Data lookup table

    """
    x = np.asarray(x)
    n = x.size

    # Kind of 'unique'
    i = np.argsort(x)
    d = (np.diff(x[i]) != 0)
    I = find(np.concatenate([d, [True]]))
    X = x[i][I]

    I = np.concatenate([[0], I+1])
    Y = 0.5 * (I[0:-1] + I[1:]) / n
    bX = np.interp(x, X, Y)

    return bX, X, Y
示例#3
0
    def get_averaged_scale(self, period_filter):
        energy = self.feature.amplitude**2
        selector = find(period_filter(self.get_periods()))
        interval = self.feature.dj * self.feature.dt
        cdelta = self.feature.mother.cdelta

        return pip(
            lambda scales: scales.transpose(),
            lambda scales: self.get_power() / scales,
            lambda scales: scales[selector, :].sum(axis=0),
            lambda global_scale: energy * interval * global_scale / cdelta)(
                self.get_scales() * np.ones((self.feature.N, 1)))
def noise_estimate(indata, f_h, f_l):
    nt = indata.size
    tt = np.arange(0, nt) / Fs
    pf = np.polyfit(tt, indata, 1)
    indata_norm = indata - np.polyval(pf, tt)
    i_wave, i_scales, i_freqs, i_coi, i_fft, i_fftfreqs = wavelet.cwt(indata_norm, 1 / Fs, dj, s0, J, mother)
    i_power = (np.abs(i_wave)) ** 2
    i_period = 1 / i_freqs
    i_sel = find((i_period >= 1 / f_h) & (i_period < 1 / f_l))  # select frequency band for averaging
    i_Cdelta = mother.cdelta
    i_scale_avg = (i_scales * np.ones((nt, 1))).transpose()
    i_scale_avg = i_power / i_scale_avg  # As in Torrence and Compo (1998) equation 24
    i_scale_avg = dj / Fs * i_Cdelta * i_scale_avg[i_sel, :].sum(axis=0)
    i_max = max(i_scale_avg)
    return i_max
示例#5
0
    def get_averaged_scale_significance(self, period_filter, alpha, threshold):
        """
        Performs a scale-average test (equations 25 to 28).
        In this case dof should be set to a two element vector [s1, s2],
            which gives the scale range that were averaged together.
        If, for example, the average between scales 2 and 8 was taken, then dof=[2, 8].
        """
        selector = find(period_filter(self.get_periods()))

        scales = self.get_scales()
        dof = [scales[selector[0]], scales[selector[-1]]]
        signif, _ = self.__get_significance(alpha,
                                            threshold,
                                            self.feature.amplitude**2,
                                            dof,
                                            test_type="scale-average")
        return signif
示例#6
0
                                         wavelet=mother)
sig95 = numpy.ones([1, N]) * signif[:, None]
sig95 = power / sig95

glbl_power = power.mean(axis=1)
dof = N - scales  # Correction for padding at edges
glbl_signif, tmp = wavelet.significance(var,
                                        dt,
                                        scales,
                                        1,
                                        alpha,
                                        significance_level=0.95,
                                        dof=dof,
                                        wavelet=mother)

sel = find((period >= 2) & (period < 8))
Cdelta = mother.cdelta
scale_avg = (scales * numpy.ones((N, 1))).transpose()
scale_avg = power / scale_avg  # As in Torrence and Compo (1998) equation 24
scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0)
scale_avg_signif, tmp = wavelet.significance(
    var,
    dt,
    scales,
    2,
    alpha,
    significance_level=0.95,
    dof=[scales[sel[0]], scales[sel[-1]]],
    wavelet=mother)

示例#7
0
sig95 = numpy.ones([1, N]) * signif[:, None]
sig95 = power / sig95

# Power rectification as of Liu et al. (2007). TODO: confirm if significance
# test ratio should be calculated first.
# power /= scales[:, None]

# Calculates the global wavelet spectrum and determines its significance level.
glbl_power = power.mean(axis=1)
dof = N - scales                     # Correction for padding at edges
glbl_signif, tmp = wavelet.significance(std2, ds.dt, scales, 1, alpha,
                                        significance_level=slevel, dof=dof,
                                        wavelet=mother)

# Scale average between avg1 and avg2 periods and significance level
sel = find((period >= avg1) & (period < avg2))
Cdelta = mother.cdelta
scale_avg = (scales * numpy.ones((N, 1))).transpose()
# As in Torrence and Compo (1998) equation 24
scale_avg = power / scale_avg
scale_avg = std2 * dj * ds.dt / Cdelta * scale_avg[sel, :].sum(axis=0)
scale_avg_signif, tmp = wavelet.significance(std2, ds.dt, scales, 2, alpha,
                                             significance_level=slevel,
                                             dof=[scales[sel[0]],
                                                  scales[sel[-1]]],
                                             wavelet=mother)

# The following routines plot the results in four different subplots containing
# the original series anomaly, the wavelet power spectrum, the global wavelet
# and Fourier spectra and finally the range averaged wavelet spectrum. In all
# sub-plots the significance levels are either included as dotted lines or as
    p = np.polyfit(t, mydata0, 1)
    dat_norm = mydata0 - np.polyval(p, t)

    #####################Perform CWT####################################
    wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(
        dat_norm, 1 / Fs, dj, s0, J, mother)
    power = (np.abs(wave))**2

    ############Plot 2D wavelet spectrum of the entire data chunck######################
    # plt.pcolormesh(t, freqs, power)
    # plt.ylabel('Frequency [Hz]')
    # plt.xlabel('Time [sec]')

    #######################Calculate spectrum scale average and significance#############
    period = 1 / freqs
    sel = find((period >= 1 / f_high) &
               (period < 1 / f_low))  # select frequency band for averaging
    Cdelta = mother.cdelta
    scale_avg = (scales * np.ones((N, 1))).transpose()
    scale_avg = power / scale_avg  # As in Torrence and Compo (1998) equation 24
    scale_avg = dj / Fs * Cdelta * scale_avg[sel, :].sum(axis=0)
    # scale_avg_signif, tmp = wavelet.significance(var, 1/Fs, scales, 2, alpha,significance_level=0.95,dof=[scales[sel[0]],scales[sel[-1]]],wavelet=mother)
    # plt.plot(t, scale_avg, 'k-', linewidth=1.5)
    # plt.show()

    #####################Locate peaks in the scale-averaged spectrum############################################

    indexes = peakutils.indexes(scale_avg,
                                thres=threshold / max(scale_avg),
                                min_dist=5000)
    np.savetxt(logf, [(indexes + I * chunksize)],
               fmt='%d',
示例#9
0
def main():
  
# Then, we load the dataset and define some data related parameters. In this
# case, the first 19 lines of the data file contain meta-data, that we ignore,
# since we set them manually (*i.e.* title, units).
    url = 'http://paos.colorado.edu/research/wavelets/wave_idl/nino3sst.txt'
    dat = numpy.genfromtxt(url, skip_header=19)
    title = 'NINO3 Sea Surface Temperature'
    label = 'NINO3 SST'
    units = 'degC'
    t0 = 1871.0
    dt = 0.25  # In years

#%%
    
    # We also create a time array in years.
    N = dat.size
    t = numpy.arange(0, N) * dt + t0
#%%
# We write the following code to detrend and normalize the input data by its
# standard deviation. Sometimes detrending is not necessary and simply
# removing the mean value is good enough. However, if your dataset has a well
# defined trend, such as the Mauna Loa CO\ :sub:`2` dataset available in the
# above mentioned website, it is strongly advised to perform detrending.
# Here, we fit a one-degree polynomial function and then subtract it from the
# original data.
    p = numpy.polyfit(t - t0, dat, 1)
    dat_notrend = dat - numpy.polyval(p, t - t0)
    std = dat_notrend.std()  # Standard deviation
    var = std ** 2  # Variance
    dat_norm = dat_notrend / std  # Normalized dataset
#%%   
# The next step is to define some parameters of our wavelet analysis. We
# select the mother wavelet, in this case the Morlet wavelet with
# :math:`\omega_0=6`.
    mother = wavelet.Morlet(6)
    s0 = 2 * dt  # Starting scale, in this case 2 * 0.25 years = 6 months
    dj = 1 / 12  # Twelve sub-octaves per octaves
    J = 7 / dj  # Seven powers of two with dj sub-octaves
    alpha, _, _ = wavelet.ar1(dat)  # Lag-1 autocorrelation for red noise

#%%
    
# The following routines perform the wavelet transform and inverse wavelet
# transform using the parameters defined above. Since we have normalized our
# input time-series, we multiply the inverse transform by the standard
# deviation.
    wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(dat_norm, dt, dj, s0, J,
                                                      mother)
    iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std

#%%
# We calculate the normalized wavelet and Fourier power spectra, as well as
# the Fourier equivalent periods for each wavelet scale.
    power = (numpy.abs(wave)) ** 2
    fft_power = numpy.abs(fft) ** 2
    period = 1 / freqs
    
#%%
    
    # We could stop at this point and plot our results. However we are also
    # interested in the power spectra significance test. The power is significant
    # where the ratio ``power / sig95 > 1``.
    signif, fft_theor = wavelet.significance(1.0, dt, scales, 0, alpha,
                                         significance_level=0.95,
                                         wavelet=mother)
    sig95 = numpy.ones([1, N]) * signif[:, None]
    sig95 = power / sig95

#%%
    
    # Then, we calculate the global wavelet spectrum and determine its
# significance level.
    glbl_power = power.mean(axis=1)
    dof = N - scales  # Correction for padding at edges
    glbl_signif, tmp = wavelet.significance(var, dt, scales, 1, alpha,
                                        significance_level=0.95, dof=dof,
                                        wavelet=mother)
    
#%%
    
    # We also calculate the scale average between 2 years and 8 years, and its
# significance level.
    sel = find((period >= 2) & (period < 8))
    Cdelta = mother.cdelta
    scale_avg = (scales * numpy.ones((N, 1))).transpose()
    scale_avg = power / scale_avg  # As in Torrence and Compo (1998) equation 24
    scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0)
    scale_avg_signif, tmp = wavelet.significance(var, dt, scales, 2, alpha,
                                             significance_level=0.95,
                                             dof=[scales[sel[0]],
                                                  scales[sel[-1]]],
                                             wavelet=mother)

#%%
    
    # Finally, we plot our results in four different subplots containing the
# (i) original series anomaly and the inverse wavelet transform; (ii) the
# wavelet power spectrum (iii) the global wavelet and Fourier spectra ; and
# (iv) the range averaged wavelet spectrum. In all sub-plots the significance
# levels are either included as dotted lines or as filled contour lines.

# Prepare the figure
    pyplot.close('all')
    pyplot.ioff()
    figprops = dict(figsize=(11, 8), dpi=72)
    fig = pyplot.figure(**figprops)
    
#%%
    
    # First sub-plot, the original time series anomaly and inverse wavelet
# transform.
    ax = pyplot.axes([0.1, 0.75, 0.65, 0.2])
    ax.plot(t, iwave, '-', linewidth=1, color=[0.5, 0.5, 0.5])
    ax.plot(t, dat, 'k', linewidth=1.5)
    ax.set_title('a) {}'.format(title))
    ax.set_ylabel(r'{} [{}]'.format(label, units))
示例#10
0
def takewav_makefig(dd,moornum):

        if moornum==8:
            dt=1
            dat=pd.read_csv(dd,header=12,sep='\s*')
            date=unique([datetime.datetime(int(dat.ix[ii,0]),
                                               int(dat.ix[ii,1]),
                                               int(dat.ix[ii,2]),
                                               int(dat.ix[ii,3])) for ii in range(len(dat))])
            utest=array(dat.ix[:,6]/100)
            vtest=array(dat.ix[:,7]/100)
            nomd=int(nanmean(array(dat.ix[:,5])))
            dat=utest**2+vtest**2
            savetit='M1-'+str(nomd)+'m'

        else:
            dataset=xr.open_dataset(dd)
            date=dataset['TIME']
            ke=dataset['UCUR']**2+dataset['VCUR']**2
            dat=ke.values.flatten()
            dt=0.5
            nomd=int(dataset.geospatial_vertical_min)
            savetit=dataset.platform_code[-3:]+'-'+str(nomd)+'m'

        dat[isnan(dat)]=nanmean(dat)
        alpha, _, _ = wavelet.ar1(dat)  # Lag-1 autocorrelation for red noise

        N=len(dat)
        #in hours

        t = numpy.arange(0, N) * dt

        std=dat.std()
        var=std**2
        dat_norm=dat/std


        # The following routines perform the wavelet transform and inverse wavelet transform using the parameters defined above. Since we have normalized our input time-series, we multiply the inverse transform by the standard deviation.
        wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(dat_norm,dt, dj, s0, J, mother)

        iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std

        # We calculate the normalized wavelet and Fourier power spectra, as well as the Fourier equivalent periods for each wavelet scale.

        power = (numpy.abs(wave)) ** 2
        fft_power = numpy.abs(fft) ** 2
        period = 1 / freqs

        # Optionally, we could also rectify the power spectrum according to the suggestions proposed by Liu et al. (2007)[2]

        power /= scales[:, None]

        # We could stop at this point and plot our results. However we are also interested in the power spectra significance test. The power is significant where the ratio power / sig95 > 1.


        signif, fft_theor = wavelet.significance(1.0, dt, scales, 0, alpha,
                                                 significance_level=0.95,
                                                 wavelet=mother)


        sig95 = numpy.ones([1, N]) * signif[:, None]
        sig95 = power / sig95

        # Then, we calculate the global wavelet spectrum and determine its significance level.

        glbl_power = power.mean(axis=1)
        dof = N - scales  # Correction for padding at edges
        glbl_signif, tmp = wavelet.significance(var, dt, scales, 1, alpha,
                                                significance_level=0.95, dof=dof,
                                                wavelet=mother)

        # We also calculate the scale average between pmin and pmax, and its significance level.
        f,dx = pyplot.subplots(6,1,figsize=(12,12),sharex=True)
        bands=[1,2,8,16,48,128,512]
        for ii in range(len(bands)-1):
            pmin=bands[ii]
            pmax=bands[ii+1]
            sel = find((period >= pmin) & (period < pmax))
            Cdelta = mother.cdelta
            scale_avg = (scales * numpy.ones((N, 1))).transpose()
            scale_avg = power / scale_avg  # As in Torrence and Compo (1998) equation 24
            scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0)
            scale_avg_signif, tmp = wavelet.significance(var, dt, scales, 2, alpha,
                                                         significance_level=0.95,
                                                         dof=[scales[sel[0]],
                                                              scales[sel[-1]]],
                                                         wavelet=mother)

            dx[ii].axhline(scale_avg_signif, color='C'+str(ii), linestyle='--', linewidth=1.)
            dx[ii].plot(date, scale_avg, '-', color='C'+str(ii), linewidth=1.5,label='{}--{} hour band'.format(pmin,pmax))
            [dx[ii].axvline(dd,color=clist[jj],linewidth=3) for jj,dd in enumerate(dlist)]
            dx[ii].legend()

        dx[0].set_title('Scale-averaged power: '+savetit)
        dx[3].set_ylabel(r'Average variance [{}]'.format(units))
        if moornum ==8:
            dx[0].set_xlim(date[0],date[-1])
        else:
            dx[0].set_xlim(date[0].values,date[-1].values)
        savefig(figdir+'ScaleSep_'+savetit+'.png',bbox_inches='tight')

        pmin=2
        pmax=24


        sel = find((period >= pmin) & (period < pmax))
        Cdelta = mother.cdelta
        scale_avg = (scales * numpy.ones((N, 1))).transpose()
        scale_avg = power / scale_avg  # As in Torrence and Compo (1998) equation 24
        scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0)
        scale_avg_signif, tmp = wavelet.significance(var, dt, scales, 2, alpha,
                                                     significance_level=0.95,
                                                     dof=[scales[sel[0]],
                                                          scales[sel[-1]]],
                                                     wavelet=mother)



        figprops = dict(figsize=(11, 8), dpi=72)
        fig = pyplot.figure(**figprops)

        # First sub-plot, the original time series anomaly and inverse wavelet
        # transform.
        ax = pyplot.axes([0.1, 0.75, 0.65, 0.2])
        ax.plot(date, dat, linewidth=1.5, color=[0.5, 0.5, 0.5])
        ax.plot(date, iwave, 'k-', linewidth=1,zorder=100)
        if moornum ==8:
            ax.set_xlim(date[0],date[-1])
        else:
            ax.set_xlim(date[0].values,date[-1].values)
        # ax.set_title('a) {}'.format(title))
        ax.set_ylabel(r'{} [{}]'.format(label, units))
        # Second sub-plot, the normalized wavelet power spectrum and significance
        # level contour lines and cone of influece hatched area. Note that period
        # scale is logarithmic.

        bx = pyplot.axes([0.1, 0.37, 0.65, 0.28])
        levels = [0.0625, 0.125, 0.25, 0.5, 1, 2, 4, 8, 16]
        bx.contourf(t, numpy.log2(period), numpy.log2(power), numpy.log2(levels),
                    extend='both', cmap=pyplot.cm.viridis)
        extent = [t.min(), t.max(), 0, max(period)]
        bx.contour(t, numpy.log2(period), sig95, [-99, 1], colors='k', linewidths=2,
                   extent=extent)
        bx.fill(numpy.concatenate([t, t[-1:] + dt, t[-1:] + dt,
                                   t[:1] - dt, t[:1] - dt]),
                numpy.concatenate([numpy.log2(coi), [1e-9], numpy.log2(period[-1:]),
                                   numpy.log2(period[-1:]), [1e-9]]),
                'k', alpha=0.3, hatch='x')
        bx.set_title('{} Wavelet Power Spectrum ({})'.format(label, mother.name))
        bx.set_ylabel('Period (hours)')
        #
        Yticks = 2 ** numpy.arange(numpy.ceil(numpy.log2(period.min())),
                                   numpy.ceil(numpy.log2(period.max())))
        bx.set_yticks(numpy.log2(Yticks))
        bx.set_yticklabels(Yticks)
        bx.set_xticklabels('')
        bx.set_xlim(t.min(),t.max())


        # Third sub-plot, the global wavelet and Fourier power spectra and theoretical
        # noise spectra. Note that period scale is logarithmic.
        cx = pyplot.axes([0.77, 0.37, 0.2, 0.28], sharey=bx)
        cx.plot(glbl_signif, numpy.log2(period), 'k--')
        cx.plot(var * fft_theor, numpy.log2(period), '--', color='#cccccc')
        cx.plot(var * fft_power, numpy.log2(1./fftfreqs), '-', color='#cccccc',
                linewidth=1.)
        cx.plot(var * glbl_power, numpy.log2(period), 'k-', linewidth=1.5)
        cx.set_title('Global Wavelet Spectrum')
        cx.set_xlabel(r'Power [({})^2]'.format(units))
        cx.set_xlim([0, glbl_power.max() + var])
        cx.set_ylim(numpy.log2([period.min(), period.max()]))
        cx.set_yticks(numpy.log2(Yticks))
        cx.set_yticklabels(Yticks)
        pyplot.setp(cx.get_yticklabels(), visible=False)

        spowdic={}
        spowdic['sig']=scale_avg_signif
        if moornum==8:
            spowdic['date']=date
        else:
            spowdic['date']=date.values
        spowdic['spow']=scale_avg

        # Fourth sub-plot, the scale averaged wavelet spectrum.
        dx = pyplot.axes([0.1, 0.07, 0.65, 0.2], sharex=ax)
        dx.axhline(scale_avg_signif, color='k', linestyle='--', linewidth=1.)
        dx.plot(date, scale_avg, 'k-', linewidth=1.5)
        dx.set_title('{}--{} hour scale-averaged power'.format(pmin,pmax))
        # [dx.axvline(dd,color=clist[ii],linewidth=3) for ii,dd in enumerate(dlist)]
        # dx.set_xlabel('Time (hours)')
        dx.set_ylabel(r'Average variance [{}]'.format(units))
        if moornum ==8:
            dx.set_xlim(date[0],date[-1])
        else:
            dx.set_xlim(date[0].values,date[-1].values)

        fig.suptitle(savetit)
        savefig(figdir+'Wavelet_'+savetit+'.png',bbox_inches='tight')

        return nomd,spowdic
示例#11
0
def parse_frames(image_file, sig=0.95):
    """
    
    """
    cap = cv2.VideoCapture(image_file)
    if verbose: print("Video successfully loaded")
    FRAME_COUNT = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
    FPS = cap.get(cv2.CAP_PROP_FPS)
    if verbose > 1:
        FRAME_HEIGHT = cap.get(cv2.CAP_PROP_FRAME_HEIGHT)
        FRAME_WIDTH = cap.get(cv2.CAP_PROP_FRAME_WIDTH)
        print(
            "INFO: \n Frame count: ",
            FRAME_COUNT,
            "\n",
            "FPS: ",
            FPS,
            " \n",
            "FRAME_HEIGHT: ",
            FRAME_HEIGHT,
            " \n",
            "FRAME_WIDTH: ",
            FRAME_WIDTH,
            " \n",
        )

    directory = os.getcwd(
    ) + '\\analysis\\{}_{}_{}_{}({})_{}_{}_scaled\\'.format(
        date, trial_type, name, wavelet, order, per_min, per_max)
    if not os.path.exists(directory):
        os.makedirs(directory)
    made = False
    frame_idx = 0
    idx = 0
    dropped = 0
    skip = True
    thresh = None

    df_wav = pd.DataFrame()
    df_auc = pd.DataFrame()
    df_for = pd.DataFrame()
    df_pow = pd.DataFrame()

    for i in range(FRAME_COUNT):
        a, img = cap.read()
        if a:
            frame_idx += 1

            if made == False:
                #first we need to manually determine the boundaries and angle
                res = bg.manual_format(img)
                #print(res)
                x, y, w, h, angle = res
                horizon_begin = x
                horizon_end = x + w
                vert_begin = y
                vert_end = y + h
                #scale_array = np.zeros((FRAME_COUNT, abs(horizon_begin - horizon_end)))
                #area_time = np.zeros((FRAME_COUNT))
                #df[']
                print("Now Select the Red dot")
                red_res = bg.manual_format(img, stop_sign=True)
                red_x, red_y, red_w, red_h = red_res
                box_h_begin = red_x
                box_h_end = red_x + red_w
                box_v_begin = red_y
                box_v_end = red_y + red_h
                made = True
                #dims = (vert_begin, vert_end, horizon_begin, horizon_end)

            real_time = i / FPS
            rows, cols, chs = img.shape
            M = cv2.getRotationMatrix2D((cols / 2, rows / 2), angle, 1)
            rot_img = cv2.warpAffine(img, M, (cols, rows))
            roi = rot_img[vert_begin:vert_end, horizon_begin:horizon_end, :]

            red_box = img[box_v_begin:box_v_end, box_h_begin:box_h_end, 2]
            if thresh == None:
                thresh = np.mean(red_box)
            #print(np.mean(red_box))
            percent_drop = 1 - (np.mean(red_box) / thresh)
            print(percent_drop)
            if percent_drop >= 0.18:
                #cv2.imshow("Red Image", red_box)
                #cv2.waitKey(0)
                skip = False

            if skip:
                if verbose >= 1:
                    print('Frame is skipped {} / {}'.format(
                        frame_idx, FRAME_COUNT))
                continue

            if verbose >= 1:
                print('Processing frame {} / {}'.format(
                    frame_idx, FRAME_COUNT))

            idx += 1
            begin_code, data_line = extract_frame(roi)

            #We need to detrend the data before sending it away
            N = len(data_line)
            dt = su / N
            t = np.arange(0, N) * dt
            t = t - np.mean(t)

            var, std, dat_norm = detrend(data_line)
            ###################################################################
            if wavelet == 'DOG':
                mother = cwt.DOG(order)
            elif wavelet == 'Paul':
                mother = cwt.Paul(order)
            elif wavelet == 'Morlet':
                mother = cwt.Morlet(order)
            elif wavelet == 'MexicanHat':
                mother = cwt.MexicanHat(order)

            s0 = 4 * dt
            try:
                alpha, _, _ = cwt.ar1(dat_norm)
            except:
                alpha = 0.95

            wave, scales, freqs, coi, fft, fftfreqs = cwt.cwt(
                dat_norm, dt, dj, s0, J, mother)

            iwave = cwt.icwt(
                wave, scales, dt, dj,
                mother) * std  #This is a reconstruction of the wave

            power = (np.abs(wave))**2  #This is the power spectra
            fft_power = np.abs(fft)**2  #This is the fourier power
            period = 1 / freqs  #This is the periods of the wavelet analysis in cm
            power /= scales[:,
                            None]  #This is an option suggested by Liu et. al.

            #Next we calculate the significance of the power spectra. Significane where power / sig95 > 1
            signif, fft_theor = cwt.significance(1.0,
                                                 dt,
                                                 scales,
                                                 0,
                                                 alpha,
                                                 significance_level=0.95,
                                                 wavelet=mother)
            sig95 = np.ones([1, N]) * signif[:, None]
            sig95 = power / sig95

            #This is the significance of the global wave
            glbl_power = power.mean(axis=1)
            dof = N - scales  # Correction for padding at edges
            glbl_signif, tmp = cwt.significance(var,
                                                dt,
                                                scales,
                                                1,
                                                alpha,
                                                significance_level=0.95,
                                                dof=dof,
                                                wavelet=mother)

            sel = find((period >= per_min) & (period < per_max))
            Cdelta = mother.cdelta
            scale_avg = (scales * np.ones((N, 1))).transpose()
            scale_avg = power / scale_avg  # As in Torrence and Compo (1998) equation 24
            #scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0)

            #scale_array[i,:] = scale_array[i,:]/np.max(scale_array[i,:])
            #data_array[i,:] = data_array[i,:]/np.max(data_array[i,:])

            scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0)
            scale_avg_signif, tmp = cwt.significance(
                var,
                dt,
                scales,
                2,
                alpha,
                significance_level=0.95,
                dof=[scales[sel[0]], scales[sel[-1]]],
                wavelet=mother)
            Yticks = 2**np.arange(np.ceil(np.log2(period.min())),
                                  np.ceil(np.log2(period.max())))

            plt.close('all')
            plt.ioff()
            figprops = dict(figsize=(11, 8), dpi=72)
            fig = plt.figure(**figprops)

            wx = plt.axes([0.77, 0.75, 0.2, 0.2])
            imz = 0
            for idxy in range(0, len(period), 10):
                wx.plot(t, mother.psi(t / period[idxy]) + imz, linewidth=1.5)
                imz += 1
            wx.xaxis.set_ticklabels([])
            #wx.set_ylim([-10,10])
            # First sub-plot, the original time series anomaly and inverse wavelet
            # transform.
            ax = plt.axes([0.1, 0.75, 0.65, 0.2])
            ax.plot(t,
                    data_line - np.mean(data_line),
                    'k',
                    label="Original Data")
            ax.plot(t,
                    iwave,
                    '-',
                    linewidth=1,
                    color=[0.5, 0.5, 0.5],
                    label="Reconstructed wave")
            ax.plot(t,
                    dat_norm,
                    '--k',
                    linewidth=1.5,
                    color=[0.5, 0.5, 0.5],
                    label="Denoised Wave")
            ax.set_title(
                'a) {:10.2f} from beginning of trial.'.format(real_time))
            ax.set_ylabel(r'{} [{}]'.format("Amplitude", unit))
            ax.legend(loc=1)
            ax.set_ylim([-200, 200])
            #If the non-serrated section, bounds are 200 -
            # Second sub-plot, the normalized wavelet power spectrum and significance
            # level contour lines and cone of influece hatched area. Note that period
            # scale is logarithmic.
            bx = plt.axes([0.1, 0.37, 0.65, 0.28], sharex=ax)
            levels = [0.0625, 0.125, 0.25, 0.5, 1, 2, 4, 8, 16]
            cont = bx.contourf(t,
                               np.log2(period),
                               np.log2(power),
                               np.log2(levels),
                               extend='both',
                               cmap=plt.cm.viridis)
            extent = [t.min(), t.max(), 0, max(period)]
            bx.contour(t,
                       np.log2(period),
                       sig95, [-99, 1],
                       colors='k',
                       linewidths=2,
                       extent=extent)
            bx.fill(np.concatenate(
                [t, t[-1:] + dt, t[-1:] + dt, t[:1] - dt, t[:1] - dt]),
                    np.concatenate([
                        np.log2(coi), [1e-9],
                        np.log2(period[-1:]),
                        np.log2(period[-1:]), [1e-9]
                    ]),
                    'k',
                    alpha=0.3,
                    hatch='x')
            bx.set_title(
                'b) {} Octaves Wavelet Power Spectrum [{}({})]'.format(
                    octaves, mother.name, order))
            bx.set_ylabel('Period (cm)')
            #
            Yticks = 2**np.arange(np.ceil(np.log2(period.min())),
                                  np.ceil(np.log2(period.max())))
            bx.set_yticks(np.log2(Yticks))
            bx.set_yticklabels(Yticks)
            cbar = fig.colorbar(cont, ax=bx)
            # Third sub-plot, the global wavelet and Fourier power spectra and theoretical
            # noise spectra. Note that period scale is logarithmic.
            cx = plt.axes([0.77, 0.37, 0.2, 0.28], sharey=bx)
            cx.plot(glbl_signif, np.log2(period), 'k--')
            cx.plot(var * fft_theor, np.log2(period), '--', color='#cccccc')
            cx.plot(var * fft_power,
                    np.log2(1. / fftfreqs),
                    '-',
                    color='#cccccc',
                    linewidth=1.)
            cx.plot(var * glbl_power, np.log2(period), 'k-', linewidth=1.5)
            cx.set_title('c) Global Wavelet Spectrum')
            cx.set_xlabel(r'Power [({})^2]'.format(unit))
            #cx.set_xlim([0, (var*fft_theor).max()])
            plt.xscale('log')
            cx.set_ylim(np.log2([period.min(), period.max()]))
            cx.set_yticks(np.log2(Yticks))
            cx.set_yticklabels(Yticks)

            #if sig_array == []:
            yvals = np.linspace(Yticks.min(), Yticks.max(), len(period))

            plt.xscale('linear')
            plt.setp(cx.get_yticklabels(), visible=False)

            # Fourth sub-plot, the scale averaged wavelet spectrum.
            dx = plt.axes([0.1, 0.07, 0.65, 0.2], sharex=ax)
            dx.axhline(scale_avg_signif,
                       color='k',
                       linestyle='--',
                       linewidth=1.)
            dx.plot(t, scale_avg, 'k-', linewidth=1.5)
            dx.set_title('d) {}-{}cm scale-averaged power'.format(
                per_min, per_max))
            dx.set_xlabel('Distance from center(cm)')
            dx.set_ylabel(r'Average variance [{}]'.format(unit))
            #dx.set_ylim([0,500])
            ax.set_xlim([t.min(), t.max()])

            #plt.savefig(directory+'{}_analysis_frame-{}.png'.format(name, idx), bbox = 'tight')
            if verbose >= 2:
                print('*' * int((i / FRAME_COUNT) * 100))

            df_wav[real_time] = (pd.Series(dat_norm, index=t))
            df_pow[real_time] = (pd.Series(var * glbl_power,
                                           index=np.log2(period)))
            df_for[real_time] = (pd.Series(var * fft_power,
                                           index=np.log2(1. / fftfreqs)))
            df_auc[real_time] = [np.trapz(data_line)]

        else:
            print("Frame #{} has dropped".format(i))
            dropped += 1

    if verbose >= 1: print('All images saved')
    if verbose >= 1:
        print("{:10.2f} % of the frames have dropped".format(
            (dropped / FRAME_COUNT) * 100))

    #Plotting and saving tyhe

    row, cols = df_pow.shape
    time = np.arange(0, cols) / FPS

    plt.close('all')
    plt.ioff()
    plt.contourf(time, df_pow.index.tolist(), df_pow)
    plt.contour(time, df_pow.index.tolist(), df_pow)
    plt.title("Global Power over Time")
    plt.ylabel("Period[cm]")
    plt.xlabel("Time")
    cax = plt.gca()
    #plt.xscale('log')
    cax.set_ylim(np.log2([period.min(), period.max()]))
    cax.set_yticks(np.log2(Yticks))
    cax.set_yticklabels(Yticks)

    plt.savefig(directory + '{}_global_power-{}.png'.format(name, idx),
                bbox='tight')

    row, cols = df_for.shape
    time = np.arange(0, cols) / FPS
    plt.close('all')
    plt.ioff()
    plt.contourf(time, df_for.index.tolist(), df_for)
    plt.contour(time, df_for.index.tolist(), df_for)
    plt.title("Fourier Power over Time")
    plt.ylabel("Period[cm]")
    plt.xlabel("Time")
    cax = plt.gca()
    #plt.xscale('log')
    cax.set_ylim(np.log2([period.min(), period.max()]))
    cax.set_yticks(np.log2(Yticks))
    cax.set_yticklabels(Yticks)
    plt.savefig(directory + '{}_fourier_power-{}.png'.format(name, idx),
                bbox='tight')

    plt.close('all')
    plt.ioff()
    rows, cols = df_auc.shape
    time = np.arange(0, cols) / FPS
    plt.plot(time, df_auc.T)
    plt.xlabel("Time")
    plt.ylabel("Area under the curve in cm")
    plt.title("Area under the curve over time")
    plt.savefig(directory + '{}_area_under_curve-{}.png'.format(name, idx),
                bbox='tight')

    df_wav['Mean'] = df_wav.mean(axis=1)
    df_pow['Mean'] = df_pow.mean(axis=1)
    df_for['Mean'] = df_for.mean(axis=1)
    df_auc['Mean'] = df_auc.mean(axis=1)

    df_wav['Standard Deviation'] = df_wav.std(axis=1)
    df_pow['Standard Deviation'] = df_pow.std(axis=1)
    df_for['Standard Deviation'] = df_for.std(axis=1)
    df_auc['Standard Deviation'] = df_auc.std(axis=1)

    ##[Writing analysis to excel]##############################################

    print("Writing files")
    writer = pd.ExcelWriter(directory + "analysis{}.xlsx".format(trial_name))
    df_wav.to_excel(writer, "Raw Waveforms")
    df_auc.to_excel(writer, "Area Under the Curve")
    df_for.to_excel(writer, "Fourier Spectra")
    df_pow.to_excel(writer, "Global Power Spectra")
    writer.save()

    ##[Writing means to a single file]#########################################

    #filename = 'C:\\pyscripts\\wavelet_analysis\\Overall_Analysis.xlsx'
    #append_data(filename, df_pow['Mean'].values,  str(trial_name), Yticks)
    ##[Plotting mean power and foruier]########################################
    plt.close('all')
    plt.ioff()
    plt.plot(df_pow['Mean'], df_pow.index.tolist(), label="Global Power")
    plt.plot(df_for['Mean'], df_for.index.tolist(), label="Fourier Power")
    plt.title("Global Power averaged over Time")
    plt.ylabel("Period[cm]")
    plt.xlabel("Power[cm^2]")
    cax = plt.gca()
    #plt.xscale('log')
    cax.set_ylim(np.log2([period.min(), period.max()]))
    cax.set_yticks(np.log2(Yticks))
    cax.set_yticklabels(Yticks)
    plt.legend()
    plt.savefig(directory + '{}_both_{}.png'.format(name, idx), bbox='tight')

    plt.close('all')
    plt.ioff()
    plt.plot(df_pow['Mean'], df_pow.index.tolist(), label="Global Power")
    plt.title("Global Power averaged over Time")
    plt.ylabel("Period[cm]")
    plt.xlabel("Power[cm^2]")
    cax = plt.gca()
    #plt.xscale('log')
    cax.set_ylim(np.log2([period.min(), period.max()]))
    cax.set_yticks(np.log2(Yticks))
    cax.set_yticklabels(Yticks)
    plt.legend()
    plt.savefig(directory + '{}_global_power_{}.png'.format(name, idx),
                bbox='tight')

    plt.close('all')
    plt.ioff()
    plt.plot(df_for['Mean'], df_for.index.tolist(), label="Fourier Power")
    plt.title("Fourier averaged over Time")
    plt.ylabel("Period[cm]")
    plt.xlabel("Power[cm^2]")
    cax = plt.gca()
    #plt.xscale('log')
    cax.set_ylim(np.log2([period.min(), period.max()]))
    cax.set_yticks(np.log2(Yticks))
    cax.set_yticklabels(Yticks)
    plt.legend()
    plt.savefig(directory + '{}_fourier_{}.png'.format(name, idx),
                bbox='tight')

    cap.release()
    return directory
示例#12
0
    def cwt(signal, t, obspy=None):
        # from __future__ import division
        import numpy
        from matplotlib import pyplot

        import pycwt as wavelet
        from pycwt.helpers import find
        signal = signal[10000:11000]
        t = t[10000:11000]
        url = 'http://paos.colorado.edu/research/wavelets/wave_idl/nino3sst.txt'
        dat = numpy.genfromtxt(url, skip_header=19)
        title = 'DICARDIA'
        label = 'DICARDIA SST'
        units = 'degC'
        t0 = 1871.0
        dt = 0.25  # In years

        N = signal.shape[0]
        print(N)
        p = numpy.polyfit(t, signal, 1)
        dat_notrend = signal - numpy.polyval(p, t)
        std = dat_notrend.std()  # Standard deviation
        var = std**2  # Variance
        dat_norm = dat_notrend / std  # Normalized dataset

        mother = wavelet.Morlet(6)
        s0 = 2 * dt  # Starting scale, in this case 2 * 0.25 years = 6 months
        dj = 1 / 12  # Twelve sub-octaves per octaves
        J = 7 / dj  # Seven powers of two with dj sub-octaves
        alpha, _, _ = wavelet.ar1(
            signal)  # Lag-1 autocorrelation for red noise

        wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(
            dat_norm, dt, dj, s0, J, mother)
        iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std

        power = (numpy.abs(wave))**2
        fft_power = numpy.abs(fft)**2
        period = 1 / freqs

        power /= scales[:, None]

        signif, fft_theor = wavelet.significance(1.0,
                                                 dt,
                                                 scales,
                                                 0,
                                                 alpha,
                                                 significance_level=0.95,
                                                 wavelet=mother)
        sig95 = numpy.ones([1, N]) * signif[:, None]
        sig95 = power / sig95

        glbl_power = power.mean(axis=1)
        dof = N - scales  # Correction for padding at edges
        glbl_signif, tmp = wavelet.significance(var,
                                                dt,
                                                scales,
                                                1,
                                                alpha,
                                                significance_level=0.95,
                                                dof=dof,
                                                wavelet=mother)
        sel = find((period >= 2) & (period < 8))
        Cdelta = mother.cdelta
        scale_avg = (scales * numpy.ones((N, 1))).transpose()
        scale_avg = power / scale_avg  # As in Torrence and Compo (1998) equation 24
        scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0)
        scale_avg_signif, tmp = wavelet.significance(
            var,
            dt,
            scales,
            2,
            alpha,
            significance_level=0.95,
            dof=[scales[sel[0]], scales[sel[-1]]],
            wavelet=mother)
        # Prepare the figure
        pyplot.close('all')
        pyplot.ioff()
        figprops = dict(figsize=(11, 8), dpi=72)
        fig = pyplot.figure(**figprops)

        # First sub-plot, the original time series anomaly and inverse wavelet
        # transform.
        ax = pyplot.axes([0.1, 0.75, 0.65, 0.2])
        ax.plot(t, iwave, '-', linewidth=1, color=[0.5, 0.5, 0.5])
        ax.plot(t, signal, 'k', linewidth=1.5)
        ax.set_title('a) {}'.format(title))
        ax.set_ylabel(r'{} [{}]'.format(label, units))

        # Second sub-plot, the normalized wavelet power spectrum and significance
        # level contour lines and cone of influece hatched area. Note that period
        # scale is logarithmic.
        bx = pyplot.axes([0.1, 0.37, 0.65, 0.28], sharex=ax)
        levels = [0.0625, 0.125, 0.25, 0.5, 1, 2, 4, 8, 16]
        bx.contourf(t,
                    numpy.log2(period),
                    numpy.log2(power),
                    numpy.log2(levels),
                    extend='both',
                    cmap=pyplot.cm.viridis)
        extent = [t.min(), t.max(), 0, max(period)]
        bx.contour(t,
                   numpy.log2(period),
                   sig95, [-99, 1],
                   colors='k',
                   linewidths=2,
                   extent=extent)
        bx.fill(numpy.concatenate(
            [t, t[-1:] + dt, t[-1:] + dt, t[:1] - dt, t[:1] - dt]),
                numpy.concatenate([
                    numpy.log2(coi), [1e-9],
                    numpy.log2(period[-1:]),
                    numpy.log2(period[-1:]), [1e-9]
                ]),
                'k',
                alpha=0.3,
                hatch='x')
        bx.set_title('b) {} Wavelet Power Spectrum ({})'.format(
            label, mother.name))
        bx.set_ylabel('Period (years)')
        #
        Yticks = 2**numpy.arange(numpy.ceil(numpy.log2(period.min())),
                                 numpy.ceil(numpy.log2(period.max())))
        bx.set_yticks(numpy.log2(Yticks))
        bx.set_yticklabels(Yticks)

        # Third sub-plot, the global wavelet and Fourier power spectra and theoretical
        # noise spectra. Note that period scale is logarithmic.
        cx = pyplot.axes([0.77, 0.37, 0.2, 0.28], sharey=bx)
        cx.plot(glbl_signif, numpy.log2(period), 'k--')
        cx.plot(var * fft_theor, numpy.log2(period), '--', color='#cccccc')
        cx.plot(var * fft_power,
                numpy.log2(1. / fftfreqs),
                '-',
                color='#cccccc',
                linewidth=1.)
        cx.plot(var * glbl_power, numpy.log2(period), 'k-', linewidth=1.5)
        cx.set_title('c) Global Wavelet Spectrum')
        cx.set_xlabel(r'Power [({})^2]'.format(units))
        cx.set_xlim([0, glbl_power.max() + var])
        cx.set_ylim(numpy.log2([period.min(), period.max()]))
        cx.set_yticks(numpy.log2(Yticks))
        cx.set_yticklabels(Yticks)
        pyplot.setp(cx.get_yticklabels(), visible=False)

        # Fourth sub-plot, the scale averaged wavelet spectrum.
        dx = pyplot.axes([0.1, 0.07, 0.65, 0.2], sharex=ax)
        dx.axhline(scale_avg_signif, color='k', linestyle='--', linewidth=1.)
        dx.plot(t, scale_avg, 'k-', linewidth=1.5)
        dx.set_title('d) {}--{} year scale-averaged power'.format(2, 8))
        dx.set_xlabel('Time (year)')
        dx.set_ylabel(r'Average variance [{}]'.format(units))
        ax.set_xlim([t.min(), t.max()])

        pyplot.show()
示例#13
0
                                         significance_level=0.95,
                                         wavelet=mother)
sig95 = numpy.ones([1, N]) * signif[:, None]
sig95 = power / sig95

# Then, we calculate the global wavelet spectrum and determine its
# significance level.
glbl_power = power.mean(axis=1)
dof = N - scales  # Correction for padding at edges
glbl_signif, tmp = wavelet.significance(var, dt, scales, 1, alpha,
                                        significance_level=0.95, dof=dof,
                                        wavelet=mother)

# We also calculate the scale average between 2 years and 8 years, and its
# significance level.
sel = find((period >= 2) & (period < 8))
Cdelta = mother.cdelta
scale_avg = (scales * numpy.ones((N, 1))).transpose()
scale_avg = power / scale_avg  # As in Torrence and Compo (1998) equation 24
scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0)
scale_avg_signif, tmp = wavelet.significance(var, dt, scales, 2, alpha,
                                             significance_level=0.95,
                                             dof=[scales[sel[0]],
                                                  scales[sel[-1]]],
                                             wavelet=mother)

# Finally, we plot our results in four different subplots containing the
# (i) original series anomaly and the inverse wavelet transform; (ii) the
# wavelet power spectrum (iii) the global wavelet and Fourier spectra ; and
# (iv) the range averaged wavelet spectrum. In all sub-plots the significance
# levels are either included as dotted lines or as filled contour lines.
示例#14
0
def plot_wavelet(t,
                 dat,
                 dt,
                 pl,
                 pr,
                 period_pltlim=None,
                 ax=None,
                 ax2=None,
                 stscale=2,
                 siglev=0.95,
                 cmap='viridis',
                 title='',
                 levels=None,
                 label='',
                 units='',
                 tunits='',
                 sav_img=False):
    import pycwt as wavelet
    from pycwt.helpers import find
    import numpy as np
    import matplotlib.pyplot as plt
    from copy import copy
    import numpy.ma as ma

    t_ = copy(t)
    t0 = t[0]
    # print(Time(t[-1:], format='plot_date').iso)
    # We also create a time array in years.
    N = dat.size
    t = np.arange(0, N) * dt + t0
    # print(Time(t[-1:], format='plot_date').iso)
    # We write the following code to detrend and normalize the input data by its
    # standard deviation. Sometimes detrending is not necessary and simply
    # removing the mean value is good enough. However, if your dataset has a well
    # defined trend, such as the Mauna Loa CO\ :sub:`2` dataset available in the
    # above mentioned website, it is strongly advised to perform detrending.
    # Here, we fit a one-degree polynomial function and then subtract it from the
    # original data.
    p = np.polyfit(t - t0, dat, 1)
    dat_notrend = dat - np.polyval(p, t - t0)
    std = dat_notrend.std()  # Standard deviation
    var = std**2  # Variance
    dat_norm = dat_notrend / std  # Normalized dataset

    # The next step is to define some parameters of our wavelet analysis. We
    # select the mother wavelet, in this case the Morlet wavelet with
    # :math:`\omega_0=6`.
    mother = wavelet.Morlet(6)
    s0 = stscale * dt  # Starting scale, in this case 2 * 0.25 years = 6 months
    dj = 1 / 12  # Twelve sub-octaves per octaves
    J = -1  # 7 / dj  # Seven powers of two with dj sub-octaves
    alpha, _, _ = wavelet.ar1(dat)  # Lag-1 autocorrelation for red noise

    # The following routines perform the wavelet transform and inverse wavelet
    # transform using the parameters defined above. Since we have normalized our
    # input time-series, we multiply the inverse transform by the standard
    # deviation.
    wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(
        dat_norm, dt, dj, s0, J, mother)
    iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std

    # We calculate the normalized wavelet and Fourier power spectra, as well as
    # the Fourier equivalent periods for each wavelet scale.
    power = (np.abs(wave))**2
    fft_power = np.abs(fft)**2
    period = 1 / freqs

    # We could stop at this point and plot our results. However we are also
    # interested in the power spectra significance test. The power is significant
    # where the ratio ``power / sig95 > 1``.
    signif, fft_theor = wavelet.significance(1.0,
                                             dt,
                                             scales,
                                             0,
                                             alpha,
                                             significance_level=siglev,
                                             wavelet=mother)
    sig95 = np.ones([1, N]) * signif[:, None]
    sig95 = power / sig95

    # Then, we calculate the global wavelet spectrum and determine its
    # significance level.
    glbl_power = power.mean(axis=1)
    dof = N - scales  # Correction for padding at edges
    glbl_signif, tmp = wavelet.significance(var,
                                            dt,
                                            scales,
                                            1,
                                            alpha,
                                            significance_level=siglev,
                                            dof=dof,
                                            wavelet=mother)

    # We also calculate the scale average between 2 years and 8 years, and its
    # significance level.
    sel = find((period >= pl) & (period < pr))
    Cdelta = mother.cdelta
    scale_avg = (scales * np.ones((N, 1))).transpose()
    scale_avg = power / scale_avg  # As in Torrence and Compo (1998) equation 24
    scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0)
    scale_avg_signif, tmp = wavelet.significance(
        var,
        dt,
        scales,
        2,
        alpha,
        significance_level=siglev,
        dof=[scales[sel[0]], scales[sel[-1]]],
        wavelet=mother)

    # levels = [0.25, 0.5, 1, 2, 4, 8, 16,32]
    if levels is None:
        levels = np.linspace(0.0, 128., 256)
    # ax.contourf(t, np.log2(period), np.log2(power), np.log2(levels), extend='both', cmap=plt.cm.viridis)
    im = ax.contourf(t_,
                     np.array(period) * 24 * 60,
                     power,
                     levels,
                     extend='both',
                     cmap=cmap,
                     zorder=-20)
    # for pathcoll in im.collections:
    #     pathcoll.set_rasterized(True)
    ax.set_rasterization_zorder(-10)
    # im = ax.pcolormesh(t_, np.array(period) * 24 * 60, power,vmax=32.,vmin=0, cmap=cmap)
    # im = ax.contourf(t, np.array(period)*24*60, np.log2(power), np.log2(levels), extend='both', cmap=cmap)
    extent = [t_.min(), t_.max(), 0, max(period) * 24 * 60]
    # ax.contour(t, np.log2(period), sig95, [-99, 1], colors='k', linewidths=1, extent=extent)
    CS = ax.contour(t_,
                    np.array(period) * 24 * 60,
                    sig95 * siglev, [-99, 1.0 * siglev],
                    colors='k',
                    linewidths=1,
                    extent=extent)
    ax.clabel(CS, inline=1, fmt='%1.3f')
    ax.fill(np.concatenate(
        [t_, t_[-1:] + dt, t_[-1:] + dt, t_[:1] - dt, t_[:1] - dt]),
            np.concatenate([
                np.array(coi), [2**(1e-9)],
                np.array(period[-1:]),
                np.array(period[-1:]), [2**(1e-9)]
            ]) * 24 * 60,
            color='k',
            alpha=0.75,
            edgecolor='None',
            facecolor='k',
            hatch='x')
    # ### not Matplotlib does not display hatching when rendering to pdf. Here is a workaround.
    # ax.fill(np.concatenate([t_, t_[-1:] + dt, t_[-1:] + dt, t_[:1] - dt, t_[:1] - dt]),
    #         np.concatenate(
    #             [np.array(coi), [2 ** (1e-9)], np.array(period[-1:]), np.array(period[-1:]),
    #              [2 ** (1e-9)]]) * 24 * 60,
    #         color='None', alpha=1.0, edgecolor='k', hatch='x')
    # ax.set_title('b) {} Wavelet Power Spectrum ({})'.format(label, mother.name))
    #
    # ax.set_rasterization_zorder(20)
    # Yticks = np.arange(np.ceil(np.array(period.min()*24*60)), np.ceil(np.array(period.max()*24*60)))
    # ax.set_yticks(np.array(Yticks))
    # ax.set_yticklabels(Yticks)

    ax2.plot(glbl_signif, np.array(period) * 24 * 60, 'k--')
    # ax2.plot(var * fft_theor, np.array(period) * 24 * 60, '--', color='#cccccc')
    # ax2.plot(var * fft_power, np.array(1. / fftfreqs) * 24 * 60, '-', color='#cccccc',
    #          linewidth=1.)
    ax2.plot(var * glbl_power, np.array(period) * 24 * 60, 'k-', linewidth=1)
    mperiod = ma.masked_outside(np.array(period), period_pltlim[0],
                                period_pltlim[1])
    mpower = ma.masked_array(var * glbl_power, mask=mperiod.mask)
    # ax2.set_title('c) Global Wavelet Spectrum')
    ax2.set_xlabel(r'Power'.format(units))
    ax2.set_xlim([0, mpower.compressed().max() + var])
    # print(glbl_power)
    # ax2.set_ylim(np.array([period.min(), period.max()]))
    # ax2.set_yticks(np.array(Yticks))
    # ax2.set_yticklabels(Yticks)
    plt.setp(ax2.get_yticklabels(), visible=False)

    if period_pltlim:
        ax.set_ylim(np.array(period_pltlim) * 24 * 60)
    else:
        ax.set_ylim(np.array([period.min(), period.max()]) * 24 * 60)

    return im
def do_wavelet_transform(dat, dt):

    t0 = 0
    # dt = 0.25  # In years

    # We also create a time array in years.
    N = dat.size
    t = np.arange(0, N) * dt + t0
    '''
    We write the following code to detrend and normalize the input data by its
    standard deviation. Sometimes detrending is not necessary and simply
    removing the mean value is good enough. However, if your dataset has a well
    defined trend, such as the Mauna Loa CO\ :sub:`2` dataset available in the
    above mentioned website, it is strongly advised to perform detrending.
    Here, we fit a one-degree polynomial function and then subtract it from the
    original data.
    '''
    p = np.polyfit(t - t0, dat, 1)
    dat_notrend = dat - np.polyval(p, t - t0)
    std = dat_notrend.std()  # Standard deviation
    var = std**2  # Variance
    dat_norm = dat_notrend / std  # Normalized dataset

    # The next step is to define some parameters of our wavelet analysis. We
    # select the mother wavelet, in this case the Morlet wavelet with
    # :math:`\omega_0=6`.
    mother = wavelet.Morlet(6)
    s0 = 2 * dt  # Starting scale, in this case 2 * 0.25 years = 6 months
    dj = 1 / 12  # Twelve sub-octaves per octaves
    J = 7 / dj  # Seven powers of two with dj sub-octaves
    sr = pd.Series(dat)
    alpha = sr.autocorr(lag=1)  # Lag-1 autocorrelation for red noise
    '''
    The following routines perform the wavelet transform and inverse wavelet
    transform using the parameters defined above. Since we have normalized our
    input time-series, we multiply the inverse transform by the standard
    deviation.
    '''
    wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(
        dat_norm, dt, dj, s0, J, mother)
    iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std

    # We calculate the normalized wavelet and Fourier power spectra, as well as
    # the Fourier equivalent periods for each wavelet scale.
    power = (np.abs(wave))**2
    fft_power = np.abs(fft)**2
    period = 1 / freqs
    '''
    We could stop at this point and plot our results. However we are also
    interested in the power spectra significance test. The power is significant
    where the ratio ``power / sig95 > 1``.
    '''
    signif, fft_theor = wavelet.significance(1.0,
                                             dt,
                                             scales,
                                             0,
                                             alpha,
                                             significance_level=0.95,
                                             wavelet=mother)
    sig95 = np.ones([1, N]) * signif[:, None]
    sig95 = power / sig95

    # Then, we calculate the global wavelet spectrum and determine its
    # significance level.
    glbl_power = power.mean(axis=1)
    dof = N - scales  # Correction for padding at edges
    glbl_signif, tmp = wavelet.significance(var,
                                            dt,
                                            scales,
                                            1,
                                            alpha,
                                            significance_level=0.95,
                                            dof=dof,
                                            wavelet=mother)

    # We also calculate the scale average between 2 years and 8 years, and its
    # significance level.
    sel = find((period >= 2) & (period < 8))
    Cdelta = mother.cdelta
    scale_avg = (scales * np.ones((N, 1))).transpose()
    # As in Torrence and Compo (1998) equation 24
    scale_avg = power / scale_avg
    scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0)
    scale_avg_signif, tmp = wavelet.significance(
        var,
        dt,
        scales,
        2,
        alpha,
        significance_level=0.95,
        dof=[scales[sel[0]], scales[sel[-1]]],
        wavelet=mother)

    return dat, t, \
        period, power, coi, wave, \
        scales, dt, dj, mother, sig95, \
        glbl_power, glbl_signif, \
        scale_avg_signif, scale_avg, \
        std, iwave, var, \
        fft_theor, fft_power, fftfreqs
示例#16
0
def get_graph_from_file(in_filepath, out_folder, out_filename):
    # Get data
    # TODO there are differents formats of file
    # TODO implement differents parsers by parameters of function
    p1 = numpy.genfromtxt(in_filepath)

    # TODO fix this shit
    dat = p1

    title = 'NINO3 Sea Surface Temperature'
    label = 'NINO3 SST'
    units = 'degC'

    # Values for calculations
    # TODO spike about args
    t0 = 12.0  # start time
    dt = 0.5  # step of differentiation - in minutes

    N = dat.size
    t = numpy.arange(0, N) * dt + t0

    p = numpy.polyfit(t - t0, dat, 1)
    dat_notrend = dat - numpy.polyval(p, t - t0)
    std = dat_notrend.std()  # Standard deviation
    var = std**2  # Variance
    dat_norm = dat_notrend / std  # Normalized dataset

    mother = wavelet.Morlet(6)
    s0 = 2 * dt  # Starting scale, in this case 2 * 0.25 years = 6 months
    dj = 1 / 12  # Twelve sub-octaves per octaves
    J = 7 / dj  # Seven powers of two with dj sub-octaves
    alpha, _, _ = wavelet.ar1(dat)  # Lag-1 autocorrelation for red noise

    wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(
        dat_norm, dt, dj, s0, J, mother)
    iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std

    power = (numpy.abs(wave))**2
    fft_power = numpy.abs(fft)**2
    period = 1 / freqs

    power /= scales[:, None]

    signif, fft_theor = wavelet.significance(1.0,
                                             dt,
                                             scales,
                                             0,
                                             alpha,
                                             significance_level=0.95,
                                             wavelet=mother)
    sig95 = numpy.ones([1, N]) * signif[:, None]
    sig95 = power / sig95

    glbl_power = power.mean(axis=1)
    dof = N - scales  # Correction for padding at edges
    glbl_signif, tmp = wavelet.significance(var,
                                            dt,
                                            scales,
                                            1,
                                            alpha,
                                            significance_level=0.95,
                                            dof=dof,
                                            wavelet=mother)

    sel = find((period >= 2) & (period < 8))
    Cdelta = mother.cdelta
    scale_avg = (scales * numpy.ones((N, 1))).transpose()
    scale_avg = power / scale_avg  # As in Torrence and Compo (1998) equation 24
    scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0)
    scale_avg_signif, tmp = wavelet.significance(
        var,
        dt,
        scales,
        2,
        alpha,
        significance_level=0.95,
        dof=[scales[sel[0]], scales[sel[-1]]],
        wavelet=mother)

    # Prepare the figure
    pyplot.close('all')
    #pyplot.ioff()
    figprops = dict(dpi=144)
    fig = pyplot.figure(**figprops)

    # Second sub-plot, the normalized wavelet power spectrum and significance
    # level contour lines and cone of influece hatched area. Note that period
    # scale is logarithmic.
    bx = pyplot.axes([0.1, 0.37, 0.65, 0.28])
    levels = [0.0625, 0.125, 0.25, 0.5, 1, 2, 4, 8, 16]
    bx.contourf(t,
                period,
                numpy.log2(power),
                numpy.log2(levels),
                extend='both',
                cmap=pyplot.cm.viridis)
    extent = [t.min(), t.max(), 0, max(period)]
    bx.contour(t,
               period,
               sig95, [-99, 1],
               colors='k',
               linewidths=2,
               extent=extent)
    bx.set_title('{} Wavelet Power Spectrum ({})'.format(label, mother.name))
    bx.set_ylabel('Period (minutes)')
    #
    #Yticks = 2 ** numpy.arange(numpy.ceil(numpy.log2(period.min())),
    #                        numpy.ceil(numpy.log2(period.max())))
    #bx.set_yticks(numpy.log2(Yticks))
    #bx.set_yticklabels(Yticks)
    bx.set_ylim([2, 20])

    # Save graph to file
    # TODO implement
    #pyplot.savefig('{}/{}.png'.format(out_folder, out_filename))
    # ----------------------------------------------
    # or show the graph
    pyplot.show()
def graph_wavelet(data_xs, title, lims, font = 11, params = default_params):
    a_lims, b_lims, d_lims = lims
    plt.rcParams.update({'font.size': font})
    return_data = {}
    
    N = len(data_xs)
    dt = (2*params['per_pixel'])/N #This is how much cm each pixel equals
    t = np.arange(0, N) * dt
    t = t - np.mean(t)
    t0 = 0
    per_min = params['min_per']
    per_max = params['max_per']
    units = params['units']
    sx = params['sx']
    octaves = params['octaves']
    dj = 1/params['suboctaves'] #suboctaves
    order = params['order']
    
    var, std, dat_norm = detrend(data_xs)
    mother = cwt.DOG(order) #This is the Mother Wavelet
    s0 = sx * dt #This is the starting scale, which in out case is two pixels or 0.04cm/40um\
    J = octaves/dj #This is powers of two with dj suboctaves
    
    return_data['var'] = var
    return_data['std'] = std
    
    try:
        alpha, _, _ = cwt.ar1(dat_norm) #This calculates the Lag-1 autocorrelation for red noise
    except: 
        alpha = 0.95
            
    wave, scales, freqs, coi, fft, fftfreqs = cwt.cwt(dat_norm, dt, dj, s0, J,
                                                              mother)
    return_data['scales'] = scales
    return_data['freqs'] = freqs
    return_data['fft'] = fft
    iwave = cwt.icwt(wave, scales, dt, dj, mother) * std
        
    power = (np.abs(wave)) ** 2
    fft_power = np.abs(fft) ** 2
    period = 1 / freqs
    power /= scales[:, None] #This is an option suggested by Liu et. al.
    

    #Next we calculate the significance of the power spectra. Significane where power / sig95 > 1
    signif, fft_theor = cwt.significance(1.0, dt, scales, 0, alpha,
                                             significance_level=0.95,
                                             wavelet=mother)
    sig95 = np.ones([1, N]) * signif[:, None]
    sig95 = power / sig95
    
    glbl_power = power.mean(axis=1)
    dof = N - scales  # Correction for padding at edges
    glbl_signif, tmp = cwt.significance(var, dt, scales, 1, alpha,
                                            significance_level=0.95, dof=dof,
                                            wavelet=mother)
    
    sel = find((period >= per_min) & (period < per_max))
    Cdelta = mother.cdelta
    scale_avg = (scales * np.ones((N, 1))).transpose()
    scale_avg = power / scale_avg  # As in Torrence and Compo (1998) equation 24
    scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0)
    scale_avg_signif, tmp = cwt.significance(var, dt, scales, 2, alpha,
                                                 significance_level=0.95,
                                                 dof=[scales[sel[0]],
                                                      scales[sel[-1]]],
                                                 wavelet=mother)
    
    
    # Prepare the figure
    plt.close('all')
    plt.ioff()
    figprops = dict(figsize=(11, 11), dpi=72)
    fig = plt.figure(**figprops)
    
    wx = plt.axes([0.77, 0.75, 0.2, 0.2])
    imz = 0
    for idxy in range(0,len(period), 10):
        wx.plot(t, mother.psi(t / period[idxy]) + imz, linewidth = 1.5)
        imz+=1
        wx.xaxis.set_ticklabels([])
    
    ax = plt.axes([0.1, 0.75, 0.65, 0.2])
    ax.plot(t, data_xs, 'k', linewidth=1.5)
    ax.plot(t, iwave, '-', linewidth=1, color=[0.5, 0.5, 0.5])
    ax.plot(t, dat_norm, '--', linewidth=1.5, color=[0.5, 0.5, 0.5])
    if a_lims != None:
        ax.set_ylim([-a_lims, a_lims])
    ax.set_title('a) {}'.format(title))
    ax.set_ylabel(r'Displacement [{}]'.format(units))
    #ax.set_ylim([-20,20])

    bx = plt.axes([0.1, 0.37, 0.65, 0.28], sharex=ax)
    levels = [0.0625, 0.125, 0.25, 0.5, 1, 2, 4, 8, 16]
    bx.contourf(t, np.log2(period), np.log2(power), np.log2(levels),
                extend='both', cmap=plt.cm.viridis)
    extent = [t.min(), t.max(), 0, max(period)]
    bx.contour(t, np.log2(period), sig95, [-99, 1], colors='k', linewidths=2,
               extent=extent)
    bx.fill(np.concatenate([t, t[-1:] + dt, t[-1:] + dt,
                               t[:1] - dt, t[:1] - dt]),
            np.concatenate([np.log2(coi), [1e-9], np.log2(period[-1:]),
                               np.log2(period[-1:]), [1e-9]]),
            'k', alpha=0.3, hatch='x')
    bx.set_title('b) {} Octaves Wavelet Power Spectrum [{}({})]'.format(octaves, mother.name, order))
    bx.set_ylabel('Period (cm)')
    #
    Yticks = 2 ** np.arange(np.ceil(np.log2(period.min())),
                               np.ceil(np.log2(period.max())))
    bx.set_yticks(np.log2(Yticks))
    bx.set_yticklabels(Yticks)
    
    # Third sub-plot, the global wavelet and Fourier power spectra and theoretical
    # noise spectra. Note that period scale is logarithmic.
    cx = plt.axes([0.77, 0.37, 0.2, 0.28], sharey=bx)
    cx.plot(glbl_signif, np.log2(period), 'k--')
    cx.plot(var * fft_theor, np.log2(period), '--', color='#cccccc')
    cx.plot(var * fft_power, np.log2(1./fftfreqs), '-', color='#cccccc',
            linewidth=1.)
    
    return_data['global_power'] = var * glbl_power
    return_data['fourier_spectra'] = var * fft_power
    return_data['per'] = np.log2(period)
    return_data['amp'] = np.log2(1./fftfreqs)
    
    cx.plot(var * glbl_power, np.log2(period), 'k-', linewidth=1.5)
    cx.set_title('c) Power Spectrum')
    cx.set_xlabel(r'Power [({})^2]'.format(units))
    if b_lims != None:
        cx.set_xlim([0,b_lims])
    #cx.set_xlim([0,max(glbl_power.max(), var*fft_power.max())])
    #print(max(glbl_power.max(), var*fft_power.max()))
    cx.set_ylim(np.log2([period.min(), period.max()]))
    cx.set_yticks(np.log2(Yticks))
    cx.set_yticklabels(Yticks)
    return_data['yticks'] = Yticks
    
    plt.setp(cx.get_yticklabels(), visible=False)
    
    # Fourth sub-plot, the scale averaged wavelet spectrum.
    dx = plt.axes([0.1, 0.07, 0.65, 0.2], sharex=ax)
    dx.axhline(scale_avg_signif, color='k', linestyle='--', linewidth=1.)
    dx.plot(t, scale_avg, 'k-', linewidth=1.5)
    dx.set_title('d) {}--{} cm scale-averaged power'.format(per_min, per_max))
    dx.set_xlabel('Displacement (cm)')
    dx.set_ylabel(r'Average variance [{}]'.format(units))
    ax.set_xlim([t.min(), t.max()])
    if d_lims != None:
        dx.set_ylim([0,d_lims])
    plt.savefig("C:\pyscripts\wavelet_analysis\Calibrated Images\{}".format(title))
    return fig, return_data
示例#18
0
def simple_sample(sls):
    # Then, we load the dataset and define some data related parameters. In this
    # case, the first 19 lines of the data file contain meta-data, that we ignore,
    # since we set them manually (*i.e.* title, units).
    # url = 'http://paos.colorado.edu/research/wavelets/wave_idl/nino3sst.txt'
    # dat = numpy.genfromtxt(url, skip_header=19)

    title = 'Sentence Length'
    label = 'Zhufu Sentence Length'
    units = 'Characters'
    t0 = 1
    dt = 1  # In years
    dat = numpy.array(sls)
    # We also create a time array in years.
    N = dat.size
    t = numpy.arange(0, N) * dt + t0

    # We write the following code to detrend and normalize the input data by its
    # standard deviation. Sometimes detrending is not necessary and simply
    # removing the mean value is good enough. However, if your dataset has a well
    # defined trend, such as the Mauna Loa CO\ :sub:`2` dataset available in the
    # above mentioned website, it is strongly advised to perform detrending.
    # Here, we fit a one-degree polynomial function and then subtract it from the
    # original data.
    p = numpy.polyfit(t - t0, dat, 1)
    dat_notrend = dat - numpy.polyval(p, t - t0)
    std = dat_notrend.std()  # Standard deviation
    var = std**2  # Variance
    dat_norm = dat_notrend / std  # Normalized dataset

    # The next step is to define some parameters of our wavelet analysis. We
    # select the mother wavelet, in this case the Morlet wavelet with
    # :math:`\omega_0=6`.
    mother = wavelet.Morlet(6)
    s0 = 2 * dt  # Starting scale, in this case 2 * 0.25 years = 6 months
    dj = 1 / 12  # Twelve sub-octaves per octaves
    J = 7 / dj  # Seven powers of two with dj sub-octaves
    alpha, _, _ = wavelet.ar1(dat)  # Lag-1 autocorrelation for red noise

    # The following routines perform the wavelet transform and inverse wavelet
    # transform using the parameters defined above. Since we have normalized our
    # input time-series, we multiply the inverse transform by the standard
    # deviation.
    wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(
        dat_norm, dt, dj, s0, J, mother)
    iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std

    # We calculate the normalized wavelet and Fourier power spectra, as well as
    # the Fourier equivalent periods for each wavelet scale.
    power = (numpy.abs(wave))**2
    fft_power = numpy.abs(fft)**2
    period = 1 / freqs

    # We could stop at this point and plot our results. However we are also
    # interested in the power spectra significance test. The power is significant
    # where the ratio ``power / sig95 > 1``.
    signif, fft_theor = wavelet.significance(1.0,
                                             dt,
                                             scales,
                                             0,
                                             alpha,
                                             significance_level=0.95,
                                             wavelet=mother)
    sig95 = numpy.ones([1, N]) * signif[:, None]
    sig95 = power / sig95

    # Then, we calculate the global wavelet spectrum and determine its
    # significance level.
    glbl_power = power.mean(axis=1)
    dof = N - scales  # Correction for padding at edges
    glbl_signif, tmp = wavelet.significance(var,
                                            dt,
                                            scales,
                                            1,
                                            alpha,
                                            significance_level=0.95,
                                            dof=dof,
                                            wavelet=mother)

    # We also calculate the scale average between 2 years and 8 years, and its
    # significance level.
    sel = find((period >= 2) & (period < 8))
    Cdelta = mother.cdelta
    scale_avg = (scales * numpy.ones((N, 1))).transpose()
    scale_avg = power / scale_avg  # As in Torrence and Compo (1998) equation 24
    scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0)
    scale_avg_signif, tmp = wavelet.significance(
        var,
        dt,
        scales,
        2,
        alpha,
        significance_level=0.95,
        dof=[scales[sel[0]], scales[sel[-1]]],
        wavelet=mother)

    # Finally, we plot our results in four different subplots containing the
    # (i) original series anomaly and the inverse wavelet transform; (ii) the
    # wavelet power spectrum (iii) the global wavelet and Fourier spectra ; and
    # (iv) the range averaged wavelet spectrum. In all sub-plots the significance
    # levels are either included as dotted lines or as filled contour lines.

    # Prepare the figure
    pyplot.close('all')
    pyplot.ioff()
    figprops = dict(figsize=(11, 8), dpi=72)
    fig = pyplot.figure(**figprops)

    # First sub-plot, the original time series anomaly and inverse wavelet
    # transform.
    ax = pyplot.axes([0.1, 0.75, 0.65, 0.2])
    ax.plot(t, iwave, '-', linewidth=1, color=[0.5, 0.5, 0.5])
    ax.plot(t, dat, 'k', linewidth=1.5)
    ax.set_title('a) {}'.format(title))
    ax.set_ylabel(r'{} [{}]'.format(label, units))

    # Second sub-plot, the normalized wavelet power spectrum and significance
    # level contour lines and cone of influece hatched area. Note that period
    # scale is logarithmic.
    bx = pyplot.axes([0.1, 0.37, 0.65, 0.28], sharex=ax)
    levels = [0.0625, 0.125, 0.25, 0.5, 1, 2, 4, 8, 16]
    bx.contourf(t,
                numpy.log2(period),
                numpy.log2(power),
                numpy.log2(levels),
                extend='both',
                cmap=pyplot.cm.viridis)
    extent = [t.min(), t.max(), 0, max(period)]
    bx.contour(t,
               numpy.log2(period),
               sig95, [-99, 1],
               colors='k',
               linewidths=2,
               extent=extent)
    bx.fill(numpy.concatenate(
        [t, t[-1:] + dt, t[-1:] + dt, t[:1] - dt, t[:1] - dt]),
            numpy.concatenate([
                numpy.log2(coi), [1e-9],
                numpy.log2(period[-1:]),
                numpy.log2(period[-1:]), [1e-9]
            ]),
            'k',
            alpha=0.3,
            hatch='x')
    bx.set_title('b) {} Wavelet Power Spectrum ({})'.format(
        label, mother.name))
    bx.set_ylabel('Period (years)')
    #
    Yticks = 2**numpy.arange(numpy.ceil(numpy.log2(period.min())),
                             numpy.ceil(numpy.log2(period.max())))
    bx.set_yticks(numpy.log2(Yticks))
    bx.set_yticklabels(Yticks)

    # Third sub-plot, the global wavelet and Fourier power spectra and theoretical
    # noise spectra. Note that period scale is logarithmic.
    cx = pyplot.axes([0.77, 0.37, 0.2, 0.28], sharey=bx)
    cx.plot(glbl_signif, numpy.log2(period), 'k--')
    cx.plot(var * fft_theor, numpy.log2(period), '--', color='#cccccc')
    cx.plot(var * fft_power,
            numpy.log2(1. / fftfreqs),
            '-',
            color='#cccccc',
            linewidth=1.)
    cx.plot(var * glbl_power, numpy.log2(period), 'k-', linewidth=1.5)
    cx.set_title('c) Global Wavelet Spectrum')
    cx.set_xlabel(r'Power [({})^2]'.format(units))
    cx.set_xlim([0, glbl_power.max() + var])
    cx.set_ylim(numpy.log2([period.min(), period.max()]))
    cx.set_yticks(numpy.log2(Yticks))
    cx.set_yticklabels(Yticks)
    pyplot.setp(cx.get_yticklabels(), visible=False)

    # Third sub-plot, the global wavelet and Fourier power spectra and theoretical
    # noise spectra. Note that period scale is logarithmic.
    dx = pyplot.axes([0.1, 0.07, 0.65, 0.2])
    dx.plot(numpy.log2(fftfreqs), numpy.log2(fft_power), 'k')
    dx.plot(numpy.log2(freqs), var * fft_theor, '--', color='#cccccc')
    dx.plot(numpy.log2(1. / fftfreqs),
            var * fft_power,
            '-',
            color='#cccccc',
            linewidth=1.)
    dx.plot(fftfreqs, fft_power, 'k-', linewidth=1.5)
    dx.set_title('d) Global Wavelet Spectrum')
    dx.set_ylabel(r'Power [({})^2]'.format(units))
    dx.set_xlim([0, 2 * fftfreqs.max()])

    Yticks = 2**numpy.arange(numpy.ceil(numpy.log2(fft_power.min())),
                             numpy.ceil(numpy.log2(fft_power.max())))
    dx.set_ylim(numpy.log2([fft_power.min(), fft_power.max()]))
    dx.set_yticks(numpy.log2(Yticks))
    dx.set_yticklabels(Yticks)
    pyplot.setp(dx.get_yticklabels(), visible=False)

    pyplot.show()