def getAlpha(self): """ Lag-1 autocorrelation for red noise """ self.alpha, var, mu2 = wavelet.ar1(self.signal) if self.verbose: print( f"Variance of signal: {self.wavelet_spectra.feature.amplitude**2}" ) print("Variance of red noise: ", var * self.feature.amplitude**2) print("Mean square of red noise: ", mu2 * self.feature.amplitude**2) return self.alpha
N = dat.size t = numpy.arange(0, N) * dt + t0 p = numpy.polyfit(t - t0, dat, 1) dat_notrend = dat - numpy.polyval(p, t - t0) std = dat_notrend.std() # Standard deviation var = std**2 # Variance dat_norm = dat_notrend / std # Normalized dataset ## Define wavelet parameters mother = wavelet.Morlet(6) s0 = 2 * dt # Starting scale, in this case 2 * 0.25 years = 6 months dj = 1 / 12 # Twelve sub-octaves per octaves J = 7 / dj # Seven powers of two with dj sub-octaves alpha, _, _ = wavelet.ar1(dat) # Lag-1 autocorrelation for red noise wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(dat_norm, dt, dj, s0, J, mother) iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std power = (numpy.abs(wave))**2 fft_power = numpy.abs(fft)**2 period = 1 / freqs power /= scales[:, None] signif, fft_theor = wavelet.significance(1.0, dt, scales, 0,
def wavelet_transform(dat, mother, s0, dj, J, dt, lims=[20, 120], t0=0): """ Plot the continous wavelet transform for a given signal. Make sure to detrend and normalize the data before calling this funcion. This is a function wrapper around the pycwt simple_sample example with some modifications. ---------- Args: dat (Mandatory [array like]): input signal data. mother (Mandatory [str]): the wavelet mother name. s0 (Mandatory [float]): starting scale. dj (Mandatory [float]): number of sub-octaves per octaves. j (Mandatory [float]): powers of two with dj sub-octaves. dt (Mandatory [float]): same frequency in the same unit as the input. lims (Mandatory [list]): Period interval to integrate the local power spectrum. label (Mandatory [str]): the plot y-label. title (Mandatory [str]): the plot title. ---------- Return: No """ # also create a time array in years. N = dat.size t = np.arange(0, N) * dt + t0 # write the following code to detrend and normalize the input data by its # standard deviation. Sometimes detrending is not necessary and simply # removing the mean value is good enough. However, if your dataset has a # well defined trend, such as the Mauna Loa CO\ :sub:`2` dataset available # in the above mentioned website, it is strongly advised to perform # detrending. Here, we fit a one-degree polynomial function and then # subtract it from the # original data. p = np.polyfit(t - t0, dat, 1) dat_notrend = dat - np.polyval(p, t - t0) std = dat_notrend.std() # Standard deviation var = std**2 # Variance dat_norm = dat_notrend / std # Normalized dataset alpha, _, _ = wavelet.ar1(dat) # Lag-1 autocorrelation for red noise # the following routines perform the wavelet transform and inverse wavelet # transform using the parameters defined above. Since we have normalized # our input time-series, we multiply the inverse transform by the standard # deviation. wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt( dat_norm, dt, dj, s0, J, mother) iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std # calculate the normalized wavelet and Fourier power spectra, as well as # the Fourier equivalent periods for each wavelet scale. power = (np.abs(wave))**2 fft_power = np.abs(fft)**2 period = 1 / freqs # inverse transform but only considering lims idx1 = np.argmin(np.abs(period - LIMS[0])) idx2 = np.argmin(np.abs(period - LIMS[1])) _wave = wave.copy() _wave[0:idx1, :] = 0 igwave = wavelet.icwt(_wave, scales, dt, dj, mother) * std # could stop at this point and plot our results. However we are also # interested in the power spectra significance test. The power is # significant where the ratio ``power / sig95 > 1``. signif, fft_theor = wavelet.significance(1.0, dt, scales, 0, alpha, significance_level=0.95, wavelet=mother) sig95 = np.ones([1, N]) * signif[:, None] sig95 = power / sig95 # calculate the global wavelet spectrum and determine its # significance level. glbl_power = power.mean(axis=1) dof = N - scales # Correction for padding at edges glbl_signif, tmp = wavelet.significance(var, dt, scales, 1, alpha, significance_level=0.95, dof=dof, wavelet=mother) return t, dt, power, period, coi, sig95, iwave, igwave
avg1, avg2 = (2, 8) # Range of periods to average slevel = 0.95 # Significance level std = dat.std() # Standard deviation std2 = std ** 2 # Variance dat = (dat - dat.mean()) / std # Calculating anomaly and normalizing N = dat.size # Number of measurements time = numpy.arange(0, N) * ds.dt + ds.t0 # Time array in years dj = 1 / 12 # Twelve sub-octaves per octaves s0 = -1 # 2 * dt # Starting scale, here 6 months J = -1 # 7 / dj # Seven powers of two with dj sub-octaves # alpha = 0.0 # Lag-1 autocorrelation for white noise try: alpha, _, _ = wavelet.ar1(dat) # Lag-1 autocorrelation for red noise except Warning: # When the dataset is too short, or there is a strong trend, ar1 raises a # warning. In this case, we assume a white noise background spectrum. alpha = 1.0 mother = wavelet.Morlet(6) # Morlet mother wavelet with m=6 # The following routines perform the wavelet transform and siginificance # analysis for the chosen data set. wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(dat, ds.dt, dj, s0, J, mother) iwave = wavelet.icwt(wave, scales, ds.dt, dj, mother) # Normalized wavelet and Fourier power spectra power = (numpy.abs(wave)) ** 2
# I. Continuous wavelet transform # =============================== # Calculate the CWT of both normalized time series. The function wavelet.cwt # returns a a list with containing [wave, scales, freqs, coi, fft, fftfreqs] # variables. mother = wavelet.Morlet(6) # Morlet mother wavelet with m=6 slevel = 0.95 # Significance level dj = 1/12 # Twelve sub-octaves per octaves s0 = 2 * dt # Starting scale, here 6 months J = 6 / dj # Seven powers of two with dj sub-octaves if True: alpha1, _, _ = wavelet.ar1(s1) # Lag-1 autocorrelation for red noise alpha2, _, _ = wavelet.ar1(s2) # Lag-1 autocorrelation for red noise else: alpha1 = alpha2 = 0.0 # Lag-1 autocorrelation for white noise # The following routines perform the wavelet transform and siginificance # analysis for two data sets. mother = 'morlet' W1, scales1, freqs1, coi1, _, _ = wavelet.cwt(s1/std1, dt, dj, s0, J, mother) signif1, fft_theor1 = wavelet.significance(1.0, dt, scales1, 0, alpha1, significance_level=slevel, wavelet=mother)
def wct_modified(y1, y2, dt, dj=1 / 12, s0=-1, J=-1, sig=True, significance_level=0.95, wavelet='morlet', normalize=True, **kwargs): """Wavelet coherence transform (WCT). The WCT finds regions in time frequency space where the two time series co-vary, but do not necessarily have high power. Parameters ---------- y1, y2 : numpy.ndarray, list Input signals. dt : float Sample spacing. dj : float, optional Spacing between discrete scales. Default value is 1/12. Smaller values will result in better scale resolution, but slower calculation and plot. s0 : float, optional Smallest scale of the wavelet. Default value is 2*dt. J : float, optional Number of scales less one. Scales range from s0 up to s0 * 2**(J * dj), which gives a total of (J + 1) scales. Default is J = (log2(N*dt/so))/dj. significance_level (float, optional) : Significance level to use. Default is 0.95. normalize (boolean, optional) : If set to true, normalizes CWT by the standard deviation of the signals. Returns ------- TODO: Something TBA and TBC See also -------- cwt, xwt """ wavelet = pycwt.wavelet._check_parameter_wavelet(wavelet) # Checking some input parameters if s0 == -1: # Number of scales s0 = 2 * dt / wavelet.flambda() if J == -1: # Number of scales J = np.int(np.round(np.log2(y1.size * dt / s0) / dj)) # Makes sure input signals are numpy arrays. y1 = np.asarray(y1) y2 = np.asarray(y2) # Calculates the standard deviation of both input signals. std1 = y1.std() std2 = y2.std() # Normalizes both signals, if appropriate. if normalize: y1_normal = (y1 - y1.mean()) / std1 y2_normal = (y2 - y2.mean()) / std2 else: y1_normal = y1 y2_normal = y2 # Calculates the CWT of the time-series making sure the same parameters # are used in both calculations. _kwargs = dict(dj=dj, s0=s0, J=J, wavelet=wavelet) W1, sj, freq, coi, _, _ = pycwt.cwt(y1_normal, dt, **_kwargs) W2, sj, freq, coi, _, _ = pycwt.cwt(y2_normal, dt, **_kwargs) scales1 = np.ones([1, y1.size]) * sj[:, None] scales2 = np.ones([1, y2.size]) * sj[:, None] # Smooth the wavelet spectra before truncating. S1 = wavelet.smooth(np.abs(W1)**2 / scales1, dt, dj, sj) S2 = wavelet.smooth(np.abs(W2)**2 / scales2, dt, dj, sj) # Now the wavelet transform coherence W12 = W1 * W2.conj() scales = np.ones([1, y1.size]) * sj[:, None] S12 = wavelet.smooth(W12 / scales, dt, dj, sj) WCT = np.abs(S12)**2 / (S1 * S2) aWCT = np.angle(W12) # Calculate cross spectrum & its amplitude WXS, WXA = W12, np.abs(S12) # Calculates the significance using Monte Carlo simulations with 95% # confidence as a function of scale. if sig: a1, b1, c1 = pycwt.ar1(y1) a2, b2, c2 = pycwt.ar1(y2) sig = pycwt.wct_significance(a1, a2, dt=dt, dj=dj, s0=s0, J=J, significance_level=significance_level, wavelet=wavelet, **kwargs) else: sig = np.asarray([0]) return WXS, WXA, WCT, aWCT, coi, freq, sig
def main(): # Then, we load the dataset and define some data related parameters. In this # case, the first 19 lines of the data file contain meta-data, that we ignore, # since we set them manually (*i.e.* title, units). url = 'http://paos.colorado.edu/research/wavelets/wave_idl/nino3sst.txt' dat = numpy.genfromtxt(url, skip_header=19) title = 'NINO3 Sea Surface Temperature' label = 'NINO3 SST' units = 'degC' t0 = 1871.0 dt = 0.25 # In years #%% # We also create a time array in years. N = dat.size t = numpy.arange(0, N) * dt + t0 #%% # We write the following code to detrend and normalize the input data by its # standard deviation. Sometimes detrending is not necessary and simply # removing the mean value is good enough. However, if your dataset has a well # defined trend, such as the Mauna Loa CO\ :sub:`2` dataset available in the # above mentioned website, it is strongly advised to perform detrending. # Here, we fit a one-degree polynomial function and then subtract it from the # original data. p = numpy.polyfit(t - t0, dat, 1) dat_notrend = dat - numpy.polyval(p, t - t0) std = dat_notrend.std() # Standard deviation var = std ** 2 # Variance dat_norm = dat_notrend / std # Normalized dataset #%% # The next step is to define some parameters of our wavelet analysis. We # select the mother wavelet, in this case the Morlet wavelet with # :math:`\omega_0=6`. mother = wavelet.Morlet(6) s0 = 2 * dt # Starting scale, in this case 2 * 0.25 years = 6 months dj = 1 / 12 # Twelve sub-octaves per octaves J = 7 / dj # Seven powers of two with dj sub-octaves alpha, _, _ = wavelet.ar1(dat) # Lag-1 autocorrelation for red noise #%% # The following routines perform the wavelet transform and inverse wavelet # transform using the parameters defined above. Since we have normalized our # input time-series, we multiply the inverse transform by the standard # deviation. wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(dat_norm, dt, dj, s0, J, mother) iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std #%% # We calculate the normalized wavelet and Fourier power spectra, as well as # the Fourier equivalent periods for each wavelet scale. power = (numpy.abs(wave)) ** 2 fft_power = numpy.abs(fft) ** 2 period = 1 / freqs #%% # We could stop at this point and plot our results. However we are also # interested in the power spectra significance test. The power is significant # where the ratio ``power / sig95 > 1``. signif, fft_theor = wavelet.significance(1.0, dt, scales, 0, alpha, significance_level=0.95, wavelet=mother) sig95 = numpy.ones([1, N]) * signif[:, None] sig95 = power / sig95 #%% # Then, we calculate the global wavelet spectrum and determine its # significance level. glbl_power = power.mean(axis=1) dof = N - scales # Correction for padding at edges glbl_signif, tmp = wavelet.significance(var, dt, scales, 1, alpha, significance_level=0.95, dof=dof, wavelet=mother) #%% # We also calculate the scale average between 2 years and 8 years, and its # significance level. sel = find((period >= 2) & (period < 8)) Cdelta = mother.cdelta scale_avg = (scales * numpy.ones((N, 1))).transpose() scale_avg = power / scale_avg # As in Torrence and Compo (1998) equation 24 scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0) scale_avg_signif, tmp = wavelet.significance(var, dt, scales, 2, alpha, significance_level=0.95, dof=[scales[sel[0]], scales[sel[-1]]], wavelet=mother) #%% # Finally, we plot our results in four different subplots containing the # (i) original series anomaly and the inverse wavelet transform; (ii) the # wavelet power spectrum (iii) the global wavelet and Fourier spectra ; and # (iv) the range averaged wavelet spectrum. In all sub-plots the significance # levels are either included as dotted lines or as filled contour lines. # Prepare the figure pyplot.close('all') pyplot.ioff() figprops = dict(figsize=(11, 8), dpi=72) fig = pyplot.figure(**figprops) #%% # First sub-plot, the original time series anomaly and inverse wavelet # transform. ax = pyplot.axes([0.1, 0.75, 0.65, 0.2]) ax.plot(t, iwave, '-', linewidth=1, color=[0.5, 0.5, 0.5]) ax.plot(t, dat, 'k', linewidth=1.5) ax.set_title('a) {}'.format(title)) ax.set_ylabel(r'{} [{}]'.format(label, units))
def takewav_makefig(dd,moornum): if moornum==8: dt=1 dat=pd.read_csv(dd,header=12,sep='\s*') date=unique([datetime.datetime(int(dat.ix[ii,0]), int(dat.ix[ii,1]), int(dat.ix[ii,2]), int(dat.ix[ii,3])) for ii in range(len(dat))]) utest=array(dat.ix[:,6]/100) vtest=array(dat.ix[:,7]/100) nomd=int(nanmean(array(dat.ix[:,5]))) dat=utest**2+vtest**2 savetit='M1-'+str(nomd)+'m' else: dataset=xr.open_dataset(dd) date=dataset['TIME'] ke=dataset['UCUR']**2+dataset['VCUR']**2 dat=ke.values.flatten() dt=0.5 nomd=int(dataset.geospatial_vertical_min) savetit=dataset.platform_code[-3:]+'-'+str(nomd)+'m' dat[isnan(dat)]=nanmean(dat) alpha, _, _ = wavelet.ar1(dat) # Lag-1 autocorrelation for red noise N=len(dat) #in hours t = numpy.arange(0, N) * dt std=dat.std() var=std**2 dat_norm=dat/std # The following routines perform the wavelet transform and inverse wavelet transform using the parameters defined above. Since we have normalized our input time-series, we multiply the inverse transform by the standard deviation. wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(dat_norm,dt, dj, s0, J, mother) iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std # We calculate the normalized wavelet and Fourier power spectra, as well as the Fourier equivalent periods for each wavelet scale. power = (numpy.abs(wave)) ** 2 fft_power = numpy.abs(fft) ** 2 period = 1 / freqs # Optionally, we could also rectify the power spectrum according to the suggestions proposed by Liu et al. (2007)[2] power /= scales[:, None] # We could stop at this point and plot our results. However we are also interested in the power spectra significance test. The power is significant where the ratio power / sig95 > 1. signif, fft_theor = wavelet.significance(1.0, dt, scales, 0, alpha, significance_level=0.95, wavelet=mother) sig95 = numpy.ones([1, N]) * signif[:, None] sig95 = power / sig95 # Then, we calculate the global wavelet spectrum and determine its significance level. glbl_power = power.mean(axis=1) dof = N - scales # Correction for padding at edges glbl_signif, tmp = wavelet.significance(var, dt, scales, 1, alpha, significance_level=0.95, dof=dof, wavelet=mother) # We also calculate the scale average between pmin and pmax, and its significance level. f,dx = pyplot.subplots(6,1,figsize=(12,12),sharex=True) bands=[1,2,8,16,48,128,512] for ii in range(len(bands)-1): pmin=bands[ii] pmax=bands[ii+1] sel = find((period >= pmin) & (period < pmax)) Cdelta = mother.cdelta scale_avg = (scales * numpy.ones((N, 1))).transpose() scale_avg = power / scale_avg # As in Torrence and Compo (1998) equation 24 scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0) scale_avg_signif, tmp = wavelet.significance(var, dt, scales, 2, alpha, significance_level=0.95, dof=[scales[sel[0]], scales[sel[-1]]], wavelet=mother) dx[ii].axhline(scale_avg_signif, color='C'+str(ii), linestyle='--', linewidth=1.) dx[ii].plot(date, scale_avg, '-', color='C'+str(ii), linewidth=1.5,label='{}--{} hour band'.format(pmin,pmax)) [dx[ii].axvline(dd,color=clist[jj],linewidth=3) for jj,dd in enumerate(dlist)] dx[ii].legend() dx[0].set_title('Scale-averaged power: '+savetit) dx[3].set_ylabel(r'Average variance [{}]'.format(units)) if moornum ==8: dx[0].set_xlim(date[0],date[-1]) else: dx[0].set_xlim(date[0].values,date[-1].values) savefig(figdir+'ScaleSep_'+savetit+'.png',bbox_inches='tight') pmin=2 pmax=24 sel = find((period >= pmin) & (period < pmax)) Cdelta = mother.cdelta scale_avg = (scales * numpy.ones((N, 1))).transpose() scale_avg = power / scale_avg # As in Torrence and Compo (1998) equation 24 scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0) scale_avg_signif, tmp = wavelet.significance(var, dt, scales, 2, alpha, significance_level=0.95, dof=[scales[sel[0]], scales[sel[-1]]], wavelet=mother) figprops = dict(figsize=(11, 8), dpi=72) fig = pyplot.figure(**figprops) # First sub-plot, the original time series anomaly and inverse wavelet # transform. ax = pyplot.axes([0.1, 0.75, 0.65, 0.2]) ax.plot(date, dat, linewidth=1.5, color=[0.5, 0.5, 0.5]) ax.plot(date, iwave, 'k-', linewidth=1,zorder=100) if moornum ==8: ax.set_xlim(date[0],date[-1]) else: ax.set_xlim(date[0].values,date[-1].values) # ax.set_title('a) {}'.format(title)) ax.set_ylabel(r'{} [{}]'.format(label, units)) # Second sub-plot, the normalized wavelet power spectrum and significance # level contour lines and cone of influece hatched area. Note that period # scale is logarithmic. bx = pyplot.axes([0.1, 0.37, 0.65, 0.28]) levels = [0.0625, 0.125, 0.25, 0.5, 1, 2, 4, 8, 16] bx.contourf(t, numpy.log2(period), numpy.log2(power), numpy.log2(levels), extend='both', cmap=pyplot.cm.viridis) extent = [t.min(), t.max(), 0, max(period)] bx.contour(t, numpy.log2(period), sig95, [-99, 1], colors='k', linewidths=2, extent=extent) bx.fill(numpy.concatenate([t, t[-1:] + dt, t[-1:] + dt, t[:1] - dt, t[:1] - dt]), numpy.concatenate([numpy.log2(coi), [1e-9], numpy.log2(period[-1:]), numpy.log2(period[-1:]), [1e-9]]), 'k', alpha=0.3, hatch='x') bx.set_title('{} Wavelet Power Spectrum ({})'.format(label, mother.name)) bx.set_ylabel('Period (hours)') # Yticks = 2 ** numpy.arange(numpy.ceil(numpy.log2(period.min())), numpy.ceil(numpy.log2(period.max()))) bx.set_yticks(numpy.log2(Yticks)) bx.set_yticklabels(Yticks) bx.set_xticklabels('') bx.set_xlim(t.min(),t.max()) # Third sub-plot, the global wavelet and Fourier power spectra and theoretical # noise spectra. Note that period scale is logarithmic. cx = pyplot.axes([0.77, 0.37, 0.2, 0.28], sharey=bx) cx.plot(glbl_signif, numpy.log2(period), 'k--') cx.plot(var * fft_theor, numpy.log2(period), '--', color='#cccccc') cx.plot(var * fft_power, numpy.log2(1./fftfreqs), '-', color='#cccccc', linewidth=1.) cx.plot(var * glbl_power, numpy.log2(period), 'k-', linewidth=1.5) cx.set_title('Global Wavelet Spectrum') cx.set_xlabel(r'Power [({})^2]'.format(units)) cx.set_xlim([0, glbl_power.max() + var]) cx.set_ylim(numpy.log2([period.min(), period.max()])) cx.set_yticks(numpy.log2(Yticks)) cx.set_yticklabels(Yticks) pyplot.setp(cx.get_yticklabels(), visible=False) spowdic={} spowdic['sig']=scale_avg_signif if moornum==8: spowdic['date']=date else: spowdic['date']=date.values spowdic['spow']=scale_avg # Fourth sub-plot, the scale averaged wavelet spectrum. dx = pyplot.axes([0.1, 0.07, 0.65, 0.2], sharex=ax) dx.axhline(scale_avg_signif, color='k', linestyle='--', linewidth=1.) dx.plot(date, scale_avg, 'k-', linewidth=1.5) dx.set_title('{}--{} hour scale-averaged power'.format(pmin,pmax)) # [dx.axvline(dd,color=clist[ii],linewidth=3) for ii,dd in enumerate(dlist)] # dx.set_xlabel('Time (hours)') dx.set_ylabel(r'Average variance [{}]'.format(units)) if moornum ==8: dx.set_xlim(date[0],date[-1]) else: dx.set_xlim(date[0].values,date[-1].values) fig.suptitle(savetit) savefig(figdir+'Wavelet_'+savetit+'.png',bbox_inches='tight') return nomd,spowdic
def cwt(signal, t, obspy=None): # from __future__ import division import numpy from matplotlib import pyplot import pycwt as wavelet from pycwt.helpers import find signal = signal[10000:11000] t = t[10000:11000] url = 'http://paos.colorado.edu/research/wavelets/wave_idl/nino3sst.txt' dat = numpy.genfromtxt(url, skip_header=19) title = 'DICARDIA' label = 'DICARDIA SST' units = 'degC' t0 = 1871.0 dt = 0.25 # In years N = signal.shape[0] print(N) p = numpy.polyfit(t, signal, 1) dat_notrend = signal - numpy.polyval(p, t) std = dat_notrend.std() # Standard deviation var = std**2 # Variance dat_norm = dat_notrend / std # Normalized dataset mother = wavelet.Morlet(6) s0 = 2 * dt # Starting scale, in this case 2 * 0.25 years = 6 months dj = 1 / 12 # Twelve sub-octaves per octaves J = 7 / dj # Seven powers of two with dj sub-octaves alpha, _, _ = wavelet.ar1( signal) # Lag-1 autocorrelation for red noise wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt( dat_norm, dt, dj, s0, J, mother) iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std power = (numpy.abs(wave))**2 fft_power = numpy.abs(fft)**2 period = 1 / freqs power /= scales[:, None] signif, fft_theor = wavelet.significance(1.0, dt, scales, 0, alpha, significance_level=0.95, wavelet=mother) sig95 = numpy.ones([1, N]) * signif[:, None] sig95 = power / sig95 glbl_power = power.mean(axis=1) dof = N - scales # Correction for padding at edges glbl_signif, tmp = wavelet.significance(var, dt, scales, 1, alpha, significance_level=0.95, dof=dof, wavelet=mother) sel = find((period >= 2) & (period < 8)) Cdelta = mother.cdelta scale_avg = (scales * numpy.ones((N, 1))).transpose() scale_avg = power / scale_avg # As in Torrence and Compo (1998) equation 24 scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0) scale_avg_signif, tmp = wavelet.significance( var, dt, scales, 2, alpha, significance_level=0.95, dof=[scales[sel[0]], scales[sel[-1]]], wavelet=mother) # Prepare the figure pyplot.close('all') pyplot.ioff() figprops = dict(figsize=(11, 8), dpi=72) fig = pyplot.figure(**figprops) # First sub-plot, the original time series anomaly and inverse wavelet # transform. ax = pyplot.axes([0.1, 0.75, 0.65, 0.2]) ax.plot(t, iwave, '-', linewidth=1, color=[0.5, 0.5, 0.5]) ax.plot(t, signal, 'k', linewidth=1.5) ax.set_title('a) {}'.format(title)) ax.set_ylabel(r'{} [{}]'.format(label, units)) # Second sub-plot, the normalized wavelet power spectrum and significance # level contour lines and cone of influece hatched area. Note that period # scale is logarithmic. bx = pyplot.axes([0.1, 0.37, 0.65, 0.28], sharex=ax) levels = [0.0625, 0.125, 0.25, 0.5, 1, 2, 4, 8, 16] bx.contourf(t, numpy.log2(period), numpy.log2(power), numpy.log2(levels), extend='both', cmap=pyplot.cm.viridis) extent = [t.min(), t.max(), 0, max(period)] bx.contour(t, numpy.log2(period), sig95, [-99, 1], colors='k', linewidths=2, extent=extent) bx.fill(numpy.concatenate( [t, t[-1:] + dt, t[-1:] + dt, t[:1] - dt, t[:1] - dt]), numpy.concatenate([ numpy.log2(coi), [1e-9], numpy.log2(period[-1:]), numpy.log2(period[-1:]), [1e-9] ]), 'k', alpha=0.3, hatch='x') bx.set_title('b) {} Wavelet Power Spectrum ({})'.format( label, mother.name)) bx.set_ylabel('Period (years)') # Yticks = 2**numpy.arange(numpy.ceil(numpy.log2(period.min())), numpy.ceil(numpy.log2(period.max()))) bx.set_yticks(numpy.log2(Yticks)) bx.set_yticklabels(Yticks) # Third sub-plot, the global wavelet and Fourier power spectra and theoretical # noise spectra. Note that period scale is logarithmic. cx = pyplot.axes([0.77, 0.37, 0.2, 0.28], sharey=bx) cx.plot(glbl_signif, numpy.log2(period), 'k--') cx.plot(var * fft_theor, numpy.log2(period), '--', color='#cccccc') cx.plot(var * fft_power, numpy.log2(1. / fftfreqs), '-', color='#cccccc', linewidth=1.) cx.plot(var * glbl_power, numpy.log2(period), 'k-', linewidth=1.5) cx.set_title('c) Global Wavelet Spectrum') cx.set_xlabel(r'Power [({})^2]'.format(units)) cx.set_xlim([0, glbl_power.max() + var]) cx.set_ylim(numpy.log2([period.min(), period.max()])) cx.set_yticks(numpy.log2(Yticks)) cx.set_yticklabels(Yticks) pyplot.setp(cx.get_yticklabels(), visible=False) # Fourth sub-plot, the scale averaged wavelet spectrum. dx = pyplot.axes([0.1, 0.07, 0.65, 0.2], sharex=ax) dx.axhline(scale_avg_signif, color='k', linestyle='--', linewidth=1.) dx.plot(t, scale_avg, 'k-', linewidth=1.5) dx.set_title('d) {}--{} year scale-averaged power'.format(2, 8)) dx.set_xlabel('Time (year)') dx.set_ylabel(r'Average variance [{}]'.format(units)) ax.set_xlim([t.min(), t.max()]) pyplot.show()
def parse_frames(image_file, sig=0.95): """ """ cap = cv2.VideoCapture(image_file) if verbose: print("Video successfully loaded") FRAME_COUNT = int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) FPS = cap.get(cv2.CAP_PROP_FPS) if verbose > 1: FRAME_HEIGHT = cap.get(cv2.CAP_PROP_FRAME_HEIGHT) FRAME_WIDTH = cap.get(cv2.CAP_PROP_FRAME_WIDTH) print( "INFO: \n Frame count: ", FRAME_COUNT, "\n", "FPS: ", FPS, " \n", "FRAME_HEIGHT: ", FRAME_HEIGHT, " \n", "FRAME_WIDTH: ", FRAME_WIDTH, " \n", ) directory = os.getcwd( ) + '\\analysis\\{}_{}_{}_{}({})_{}_{}_scaled\\'.format( date, trial_type, name, wavelet, order, per_min, per_max) if not os.path.exists(directory): os.makedirs(directory) made = False frame_idx = 0 idx = 0 dropped = 0 skip = True thresh = None df_wav = pd.DataFrame() df_auc = pd.DataFrame() df_for = pd.DataFrame() df_pow = pd.DataFrame() for i in range(FRAME_COUNT): a, img = cap.read() if a: frame_idx += 1 if made == False: #first we need to manually determine the boundaries and angle res = bg.manual_format(img) #print(res) x, y, w, h, angle = res horizon_begin = x horizon_end = x + w vert_begin = y vert_end = y + h #scale_array = np.zeros((FRAME_COUNT, abs(horizon_begin - horizon_end))) #area_time = np.zeros((FRAME_COUNT)) #df['] print("Now Select the Red dot") red_res = bg.manual_format(img, stop_sign=True) red_x, red_y, red_w, red_h = red_res box_h_begin = red_x box_h_end = red_x + red_w box_v_begin = red_y box_v_end = red_y + red_h made = True #dims = (vert_begin, vert_end, horizon_begin, horizon_end) real_time = i / FPS rows, cols, chs = img.shape M = cv2.getRotationMatrix2D((cols / 2, rows / 2), angle, 1) rot_img = cv2.warpAffine(img, M, (cols, rows)) roi = rot_img[vert_begin:vert_end, horizon_begin:horizon_end, :] red_box = img[box_v_begin:box_v_end, box_h_begin:box_h_end, 2] if thresh == None: thresh = np.mean(red_box) #print(np.mean(red_box)) percent_drop = 1 - (np.mean(red_box) / thresh) print(percent_drop) if percent_drop >= 0.18: #cv2.imshow("Red Image", red_box) #cv2.waitKey(0) skip = False if skip: if verbose >= 1: print('Frame is skipped {} / {}'.format( frame_idx, FRAME_COUNT)) continue if verbose >= 1: print('Processing frame {} / {}'.format( frame_idx, FRAME_COUNT)) idx += 1 begin_code, data_line = extract_frame(roi) #We need to detrend the data before sending it away N = len(data_line) dt = su / N t = np.arange(0, N) * dt t = t - np.mean(t) var, std, dat_norm = detrend(data_line) ################################################################### if wavelet == 'DOG': mother = cwt.DOG(order) elif wavelet == 'Paul': mother = cwt.Paul(order) elif wavelet == 'Morlet': mother = cwt.Morlet(order) elif wavelet == 'MexicanHat': mother = cwt.MexicanHat(order) s0 = 4 * dt try: alpha, _, _ = cwt.ar1(dat_norm) except: alpha = 0.95 wave, scales, freqs, coi, fft, fftfreqs = cwt.cwt( dat_norm, dt, dj, s0, J, mother) iwave = cwt.icwt( wave, scales, dt, dj, mother) * std #This is a reconstruction of the wave power = (np.abs(wave))**2 #This is the power spectra fft_power = np.abs(fft)**2 #This is the fourier power period = 1 / freqs #This is the periods of the wavelet analysis in cm power /= scales[:, None] #This is an option suggested by Liu et. al. #Next we calculate the significance of the power spectra. Significane where power / sig95 > 1 signif, fft_theor = cwt.significance(1.0, dt, scales, 0, alpha, significance_level=0.95, wavelet=mother) sig95 = np.ones([1, N]) * signif[:, None] sig95 = power / sig95 #This is the significance of the global wave glbl_power = power.mean(axis=1) dof = N - scales # Correction for padding at edges glbl_signif, tmp = cwt.significance(var, dt, scales, 1, alpha, significance_level=0.95, dof=dof, wavelet=mother) sel = find((period >= per_min) & (period < per_max)) Cdelta = mother.cdelta scale_avg = (scales * np.ones((N, 1))).transpose() scale_avg = power / scale_avg # As in Torrence and Compo (1998) equation 24 #scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0) #scale_array[i,:] = scale_array[i,:]/np.max(scale_array[i,:]) #data_array[i,:] = data_array[i,:]/np.max(data_array[i,:]) scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0) scale_avg_signif, tmp = cwt.significance( var, dt, scales, 2, alpha, significance_level=0.95, dof=[scales[sel[0]], scales[sel[-1]]], wavelet=mother) Yticks = 2**np.arange(np.ceil(np.log2(period.min())), np.ceil(np.log2(period.max()))) plt.close('all') plt.ioff() figprops = dict(figsize=(11, 8), dpi=72) fig = plt.figure(**figprops) wx = plt.axes([0.77, 0.75, 0.2, 0.2]) imz = 0 for idxy in range(0, len(period), 10): wx.plot(t, mother.psi(t / period[idxy]) + imz, linewidth=1.5) imz += 1 wx.xaxis.set_ticklabels([]) #wx.set_ylim([-10,10]) # First sub-plot, the original time series anomaly and inverse wavelet # transform. ax = plt.axes([0.1, 0.75, 0.65, 0.2]) ax.plot(t, data_line - np.mean(data_line), 'k', label="Original Data") ax.plot(t, iwave, '-', linewidth=1, color=[0.5, 0.5, 0.5], label="Reconstructed wave") ax.plot(t, dat_norm, '--k', linewidth=1.5, color=[0.5, 0.5, 0.5], label="Denoised Wave") ax.set_title( 'a) {:10.2f} from beginning of trial.'.format(real_time)) ax.set_ylabel(r'{} [{}]'.format("Amplitude", unit)) ax.legend(loc=1) ax.set_ylim([-200, 200]) #If the non-serrated section, bounds are 200 - # Second sub-plot, the normalized wavelet power spectrum and significance # level contour lines and cone of influece hatched area. Note that period # scale is logarithmic. bx = plt.axes([0.1, 0.37, 0.65, 0.28], sharex=ax) levels = [0.0625, 0.125, 0.25, 0.5, 1, 2, 4, 8, 16] cont = bx.contourf(t, np.log2(period), np.log2(power), np.log2(levels), extend='both', cmap=plt.cm.viridis) extent = [t.min(), t.max(), 0, max(period)] bx.contour(t, np.log2(period), sig95, [-99, 1], colors='k', linewidths=2, extent=extent) bx.fill(np.concatenate( [t, t[-1:] + dt, t[-1:] + dt, t[:1] - dt, t[:1] - dt]), np.concatenate([ np.log2(coi), [1e-9], np.log2(period[-1:]), np.log2(period[-1:]), [1e-9] ]), 'k', alpha=0.3, hatch='x') bx.set_title( 'b) {} Octaves Wavelet Power Spectrum [{}({})]'.format( octaves, mother.name, order)) bx.set_ylabel('Period (cm)') # Yticks = 2**np.arange(np.ceil(np.log2(period.min())), np.ceil(np.log2(period.max()))) bx.set_yticks(np.log2(Yticks)) bx.set_yticklabels(Yticks) cbar = fig.colorbar(cont, ax=bx) # Third sub-plot, the global wavelet and Fourier power spectra and theoretical # noise spectra. Note that period scale is logarithmic. cx = plt.axes([0.77, 0.37, 0.2, 0.28], sharey=bx) cx.plot(glbl_signif, np.log2(period), 'k--') cx.plot(var * fft_theor, np.log2(period), '--', color='#cccccc') cx.plot(var * fft_power, np.log2(1. / fftfreqs), '-', color='#cccccc', linewidth=1.) cx.plot(var * glbl_power, np.log2(period), 'k-', linewidth=1.5) cx.set_title('c) Global Wavelet Spectrum') cx.set_xlabel(r'Power [({})^2]'.format(unit)) #cx.set_xlim([0, (var*fft_theor).max()]) plt.xscale('log') cx.set_ylim(np.log2([period.min(), period.max()])) cx.set_yticks(np.log2(Yticks)) cx.set_yticklabels(Yticks) #if sig_array == []: yvals = np.linspace(Yticks.min(), Yticks.max(), len(period)) plt.xscale('linear') plt.setp(cx.get_yticklabels(), visible=False) # Fourth sub-plot, the scale averaged wavelet spectrum. dx = plt.axes([0.1, 0.07, 0.65, 0.2], sharex=ax) dx.axhline(scale_avg_signif, color='k', linestyle='--', linewidth=1.) dx.plot(t, scale_avg, 'k-', linewidth=1.5) dx.set_title('d) {}-{}cm scale-averaged power'.format( per_min, per_max)) dx.set_xlabel('Distance from center(cm)') dx.set_ylabel(r'Average variance [{}]'.format(unit)) #dx.set_ylim([0,500]) ax.set_xlim([t.min(), t.max()]) #plt.savefig(directory+'{}_analysis_frame-{}.png'.format(name, idx), bbox = 'tight') if verbose >= 2: print('*' * int((i / FRAME_COUNT) * 100)) df_wav[real_time] = (pd.Series(dat_norm, index=t)) df_pow[real_time] = (pd.Series(var * glbl_power, index=np.log2(period))) df_for[real_time] = (pd.Series(var * fft_power, index=np.log2(1. / fftfreqs))) df_auc[real_time] = [np.trapz(data_line)] else: print("Frame #{} has dropped".format(i)) dropped += 1 if verbose >= 1: print('All images saved') if verbose >= 1: print("{:10.2f} % of the frames have dropped".format( (dropped / FRAME_COUNT) * 100)) #Plotting and saving tyhe row, cols = df_pow.shape time = np.arange(0, cols) / FPS plt.close('all') plt.ioff() plt.contourf(time, df_pow.index.tolist(), df_pow) plt.contour(time, df_pow.index.tolist(), df_pow) plt.title("Global Power over Time") plt.ylabel("Period[cm]") plt.xlabel("Time") cax = plt.gca() #plt.xscale('log') cax.set_ylim(np.log2([period.min(), period.max()])) cax.set_yticks(np.log2(Yticks)) cax.set_yticklabels(Yticks) plt.savefig(directory + '{}_global_power-{}.png'.format(name, idx), bbox='tight') row, cols = df_for.shape time = np.arange(0, cols) / FPS plt.close('all') plt.ioff() plt.contourf(time, df_for.index.tolist(), df_for) plt.contour(time, df_for.index.tolist(), df_for) plt.title("Fourier Power over Time") plt.ylabel("Period[cm]") plt.xlabel("Time") cax = plt.gca() #plt.xscale('log') cax.set_ylim(np.log2([period.min(), period.max()])) cax.set_yticks(np.log2(Yticks)) cax.set_yticklabels(Yticks) plt.savefig(directory + '{}_fourier_power-{}.png'.format(name, idx), bbox='tight') plt.close('all') plt.ioff() rows, cols = df_auc.shape time = np.arange(0, cols) / FPS plt.plot(time, df_auc.T) plt.xlabel("Time") plt.ylabel("Area under the curve in cm") plt.title("Area under the curve over time") plt.savefig(directory + '{}_area_under_curve-{}.png'.format(name, idx), bbox='tight') df_wav['Mean'] = df_wav.mean(axis=1) df_pow['Mean'] = df_pow.mean(axis=1) df_for['Mean'] = df_for.mean(axis=1) df_auc['Mean'] = df_auc.mean(axis=1) df_wav['Standard Deviation'] = df_wav.std(axis=1) df_pow['Standard Deviation'] = df_pow.std(axis=1) df_for['Standard Deviation'] = df_for.std(axis=1) df_auc['Standard Deviation'] = df_auc.std(axis=1) ##[Writing analysis to excel]############################################## print("Writing files") writer = pd.ExcelWriter(directory + "analysis{}.xlsx".format(trial_name)) df_wav.to_excel(writer, "Raw Waveforms") df_auc.to_excel(writer, "Area Under the Curve") df_for.to_excel(writer, "Fourier Spectra") df_pow.to_excel(writer, "Global Power Spectra") writer.save() ##[Writing means to a single file]######################################### #filename = 'C:\\pyscripts\\wavelet_analysis\\Overall_Analysis.xlsx' #append_data(filename, df_pow['Mean'].values, str(trial_name), Yticks) ##[Plotting mean power and foruier]######################################## plt.close('all') plt.ioff() plt.plot(df_pow['Mean'], df_pow.index.tolist(), label="Global Power") plt.plot(df_for['Mean'], df_for.index.tolist(), label="Fourier Power") plt.title("Global Power averaged over Time") plt.ylabel("Period[cm]") plt.xlabel("Power[cm^2]") cax = plt.gca() #plt.xscale('log') cax.set_ylim(np.log2([period.min(), period.max()])) cax.set_yticks(np.log2(Yticks)) cax.set_yticklabels(Yticks) plt.legend() plt.savefig(directory + '{}_both_{}.png'.format(name, idx), bbox='tight') plt.close('all') plt.ioff() plt.plot(df_pow['Mean'], df_pow.index.tolist(), label="Global Power") plt.title("Global Power averaged over Time") plt.ylabel("Period[cm]") plt.xlabel("Power[cm^2]") cax = plt.gca() #plt.xscale('log') cax.set_ylim(np.log2([period.min(), period.max()])) cax.set_yticks(np.log2(Yticks)) cax.set_yticklabels(Yticks) plt.legend() plt.savefig(directory + '{}_global_power_{}.png'.format(name, idx), bbox='tight') plt.close('all') plt.ioff() plt.plot(df_for['Mean'], df_for.index.tolist(), label="Fourier Power") plt.title("Fourier averaged over Time") plt.ylabel("Period[cm]") plt.xlabel("Power[cm^2]") cax = plt.gca() #plt.xscale('log') cax.set_ylim(np.log2([period.min(), period.max()])) cax.set_yticks(np.log2(Yticks)) cax.set_yticklabels(Yticks) plt.legend() plt.savefig(directory + '{}_fourier_{}.png'.format(name, idx), bbox='tight') cap.release() return directory
std1 = s1.std() std2 = s2.std() # I. Continuous wavelet transform # =============================== # Calculate the CWT of both normalized time series. The function wavelet.cwt # returns a a list with containing [wave, scales, freqs, coi, fft, fftfreqs] # variables. mother = wavelet.Morlet(6) # Morlet mother wavelet with m=6 slevel = 0.95 # Significance level dj = 1/12 # Twelve sub-octaves per octaves s0 = -1 # 2 * dt # Starting scale, here 6 months J = -1 # 7 / dj # Seven powers of two with dj sub-octaves if True: alpha1, _, _ = wavelet.ar1(s1) # Lag-1 autocorrelation for red noise alpha2, _, _ = wavelet.ar1(s2) # Lag-1 autocorrelation for red noise else: alpha1 = alpha2 = 0.0 # Lag-1 autocorrelation for white noise # The following routines perform the wavelet transform and siginificance # analysis for two data sets. W1, scales1, freqs1, coi1, _, _ = wavelet.cwt(s1/std1, dt, dj, s0, J, mother) signif1, fft_theor1 = wavelet.significance(1.0, dt, scales1, 0, alpha1, significance_level=slevel, wavelet=mother) W2, scales2, freqs2, coi2, _, _ = wavelet.cwt(s2/std2, dt, dj, s0, J, mother) signif2, fft_theor2 = wavelet.significance(1.0, dt, scales2, 0, alpha2, significance_level=slevel, wavelet=mother)
# HW8-2 """ for ii in i: eta[ii] = np.sin(2 * np.pi * ii / 20) + np.sin(2 * np.pi * ii / 10) / 2 """ fig, sub = plt.subplots(figsize=(10, 4)) sub.plot(i, eta, ls="-", c="k", lw=1) sub.set_xlim(min(i), max(i)) sub.set_xticks(np.arange(0, 200 + 20, 20)) sub.set_xlabel("i", fontdict={"weight": "bold"}) sub.set_ylabel("Eta", fontdict={"weight": "bold"}) sub.set_title("Data", fontdict={"weight": "bold"}) mother = wavelet.Morlet(f0=6) alpha, _, _ = wavelet.ar1(eta) dj = 0.25 s0 = 2 * dt J = 7 / dj wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(signal = eta, dt = dt,\ dj = dj, s0 = s0, J = J,\ wavelet = mother) power = np.abs(wave)**2 fft_power = np.abs(fft)**2 period = 1 / freqs signif, fft_theor = wavelet.significance(signal = 1.0, dt = dt,\ scales = scales, alpha = alpha,\ significance_level = 0.95,\
def plot_wavelet(t, dat, dt, pl, pr, period_pltlim=None, ax=None, ax2=None, stscale=2, siglev=0.95, cmap='viridis', title='', levels=None, label='', units='', tunits='', sav_img=False): import pycwt as wavelet from pycwt.helpers import find import numpy as np import matplotlib.pyplot as plt from copy import copy import numpy.ma as ma t_ = copy(t) t0 = t[0] # print(Time(t[-1:], format='plot_date').iso) # We also create a time array in years. N = dat.size t = np.arange(0, N) * dt + t0 # print(Time(t[-1:], format='plot_date').iso) # We write the following code to detrend and normalize the input data by its # standard deviation. Sometimes detrending is not necessary and simply # removing the mean value is good enough. However, if your dataset has a well # defined trend, such as the Mauna Loa CO\ :sub:`2` dataset available in the # above mentioned website, it is strongly advised to perform detrending. # Here, we fit a one-degree polynomial function and then subtract it from the # original data. p = np.polyfit(t - t0, dat, 1) dat_notrend = dat - np.polyval(p, t - t0) std = dat_notrend.std() # Standard deviation var = std**2 # Variance dat_norm = dat_notrend / std # Normalized dataset # The next step is to define some parameters of our wavelet analysis. We # select the mother wavelet, in this case the Morlet wavelet with # :math:`\omega_0=6`. mother = wavelet.Morlet(6) s0 = stscale * dt # Starting scale, in this case 2 * 0.25 years = 6 months dj = 1 / 12 # Twelve sub-octaves per octaves J = -1 # 7 / dj # Seven powers of two with dj sub-octaves alpha, _, _ = wavelet.ar1(dat) # Lag-1 autocorrelation for red noise # The following routines perform the wavelet transform and inverse wavelet # transform using the parameters defined above. Since we have normalized our # input time-series, we multiply the inverse transform by the standard # deviation. wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt( dat_norm, dt, dj, s0, J, mother) iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std # We calculate the normalized wavelet and Fourier power spectra, as well as # the Fourier equivalent periods for each wavelet scale. power = (np.abs(wave))**2 fft_power = np.abs(fft)**2 period = 1 / freqs # We could stop at this point and plot our results. However we are also # interested in the power spectra significance test. The power is significant # where the ratio ``power / sig95 > 1``. signif, fft_theor = wavelet.significance(1.0, dt, scales, 0, alpha, significance_level=siglev, wavelet=mother) sig95 = np.ones([1, N]) * signif[:, None] sig95 = power / sig95 # Then, we calculate the global wavelet spectrum and determine its # significance level. glbl_power = power.mean(axis=1) dof = N - scales # Correction for padding at edges glbl_signif, tmp = wavelet.significance(var, dt, scales, 1, alpha, significance_level=siglev, dof=dof, wavelet=mother) # We also calculate the scale average between 2 years and 8 years, and its # significance level. sel = find((period >= pl) & (period < pr)) Cdelta = mother.cdelta scale_avg = (scales * np.ones((N, 1))).transpose() scale_avg = power / scale_avg # As in Torrence and Compo (1998) equation 24 scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0) scale_avg_signif, tmp = wavelet.significance( var, dt, scales, 2, alpha, significance_level=siglev, dof=[scales[sel[0]], scales[sel[-1]]], wavelet=mother) # levels = [0.25, 0.5, 1, 2, 4, 8, 16,32] if levels is None: levels = np.linspace(0.0, 128., 256) # ax.contourf(t, np.log2(period), np.log2(power), np.log2(levels), extend='both', cmap=plt.cm.viridis) im = ax.contourf(t_, np.array(period) * 24 * 60, power, levels, extend='both', cmap=cmap, zorder=-20) # for pathcoll in im.collections: # pathcoll.set_rasterized(True) ax.set_rasterization_zorder(-10) # im = ax.pcolormesh(t_, np.array(period) * 24 * 60, power,vmax=32.,vmin=0, cmap=cmap) # im = ax.contourf(t, np.array(period)*24*60, np.log2(power), np.log2(levels), extend='both', cmap=cmap) extent = [t_.min(), t_.max(), 0, max(period) * 24 * 60] # ax.contour(t, np.log2(period), sig95, [-99, 1], colors='k', linewidths=1, extent=extent) CS = ax.contour(t_, np.array(period) * 24 * 60, sig95 * siglev, [-99, 1.0 * siglev], colors='k', linewidths=1, extent=extent) ax.clabel(CS, inline=1, fmt='%1.3f') ax.fill(np.concatenate( [t_, t_[-1:] + dt, t_[-1:] + dt, t_[:1] - dt, t_[:1] - dt]), np.concatenate([ np.array(coi), [2**(1e-9)], np.array(period[-1:]), np.array(period[-1:]), [2**(1e-9)] ]) * 24 * 60, color='k', alpha=0.75, edgecolor='None', facecolor='k', hatch='x') # ### not Matplotlib does not display hatching when rendering to pdf. Here is a workaround. # ax.fill(np.concatenate([t_, t_[-1:] + dt, t_[-1:] + dt, t_[:1] - dt, t_[:1] - dt]), # np.concatenate( # [np.array(coi), [2 ** (1e-9)], np.array(period[-1:]), np.array(period[-1:]), # [2 ** (1e-9)]]) * 24 * 60, # color='None', alpha=1.0, edgecolor='k', hatch='x') # ax.set_title('b) {} Wavelet Power Spectrum ({})'.format(label, mother.name)) # # ax.set_rasterization_zorder(20) # Yticks = np.arange(np.ceil(np.array(period.min()*24*60)), np.ceil(np.array(period.max()*24*60))) # ax.set_yticks(np.array(Yticks)) # ax.set_yticklabels(Yticks) ax2.plot(glbl_signif, np.array(period) * 24 * 60, 'k--') # ax2.plot(var * fft_theor, np.array(period) * 24 * 60, '--', color='#cccccc') # ax2.plot(var * fft_power, np.array(1. / fftfreqs) * 24 * 60, '-', color='#cccccc', # linewidth=1.) ax2.plot(var * glbl_power, np.array(period) * 24 * 60, 'k-', linewidth=1) mperiod = ma.masked_outside(np.array(period), period_pltlim[0], period_pltlim[1]) mpower = ma.masked_array(var * glbl_power, mask=mperiod.mask) # ax2.set_title('c) Global Wavelet Spectrum') ax2.set_xlabel(r'Power'.format(units)) ax2.set_xlim([0, mpower.compressed().max() + var]) # print(glbl_power) # ax2.set_ylim(np.array([period.min(), period.max()])) # ax2.set_yticks(np.array(Yticks)) # ax2.set_yticklabels(Yticks) plt.setp(ax2.get_yticklabels(), visible=False) if period_pltlim: ax.set_ylim(np.array(period_pltlim) * 24 * 60) else: ax.set_ylim(np.array([period.min(), period.max()]) * 24 * 60) return im
def get_graph_from_file(in_filepath, out_folder, out_filename): # Get data # TODO there are differents formats of file # TODO implement differents parsers by parameters of function p1 = numpy.genfromtxt(in_filepath) # TODO fix this shit dat = p1 title = 'NINO3 Sea Surface Temperature' label = 'NINO3 SST' units = 'degC' # Values for calculations # TODO spike about args t0 = 12.0 # start time dt = 0.5 # step of differentiation - in minutes N = dat.size t = numpy.arange(0, N) * dt + t0 p = numpy.polyfit(t - t0, dat, 1) dat_notrend = dat - numpy.polyval(p, t - t0) std = dat_notrend.std() # Standard deviation var = std**2 # Variance dat_norm = dat_notrend / std # Normalized dataset mother = wavelet.Morlet(6) s0 = 2 * dt # Starting scale, in this case 2 * 0.25 years = 6 months dj = 1 / 12 # Twelve sub-octaves per octaves J = 7 / dj # Seven powers of two with dj sub-octaves alpha, _, _ = wavelet.ar1(dat) # Lag-1 autocorrelation for red noise wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt( dat_norm, dt, dj, s0, J, mother) iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std power = (numpy.abs(wave))**2 fft_power = numpy.abs(fft)**2 period = 1 / freqs power /= scales[:, None] signif, fft_theor = wavelet.significance(1.0, dt, scales, 0, alpha, significance_level=0.95, wavelet=mother) sig95 = numpy.ones([1, N]) * signif[:, None] sig95 = power / sig95 glbl_power = power.mean(axis=1) dof = N - scales # Correction for padding at edges glbl_signif, tmp = wavelet.significance(var, dt, scales, 1, alpha, significance_level=0.95, dof=dof, wavelet=mother) sel = find((period >= 2) & (period < 8)) Cdelta = mother.cdelta scale_avg = (scales * numpy.ones((N, 1))).transpose() scale_avg = power / scale_avg # As in Torrence and Compo (1998) equation 24 scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0) scale_avg_signif, tmp = wavelet.significance( var, dt, scales, 2, alpha, significance_level=0.95, dof=[scales[sel[0]], scales[sel[-1]]], wavelet=mother) # Prepare the figure pyplot.close('all') #pyplot.ioff() figprops = dict(dpi=144) fig = pyplot.figure(**figprops) # Second sub-plot, the normalized wavelet power spectrum and significance # level contour lines and cone of influece hatched area. Note that period # scale is logarithmic. bx = pyplot.axes([0.1, 0.37, 0.65, 0.28]) levels = [0.0625, 0.125, 0.25, 0.5, 1, 2, 4, 8, 16] bx.contourf(t, period, numpy.log2(power), numpy.log2(levels), extend='both', cmap=pyplot.cm.viridis) extent = [t.min(), t.max(), 0, max(period)] bx.contour(t, period, sig95, [-99, 1], colors='k', linewidths=2, extent=extent) bx.set_title('{} Wavelet Power Spectrum ({})'.format(label, mother.name)) bx.set_ylabel('Period (minutes)') # #Yticks = 2 ** numpy.arange(numpy.ceil(numpy.log2(period.min())), # numpy.ceil(numpy.log2(period.max()))) #bx.set_yticks(numpy.log2(Yticks)) #bx.set_yticklabels(Yticks) bx.set_ylim([2, 20]) # Save graph to file # TODO implement #pyplot.savefig('{}/{}.png'.format(out_folder, out_filename)) # ---------------------------------------------- # or show the graph pyplot.show()
dat = mei N = dat.size t = np.arange(0, N) * dt + t0 p = np.polyfit(t - t0, dat, 1) dat_notrend = dat - np.polyval(p, t - t0) std = dat_notrend.std() var = std**2 dat_norm = dat_notrend / std mother = wavelet.Morlet(6) s0 = 2 * dt dj = 1 / 12 J = 7 / dj alpha, _, _ = wavelet.ar1(dat) wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(dat_norm, dt, dj, s0, J, mother) iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std power = (np.abs(wave))**2 fft_power = np.abs(fft)**2 period = 1 / freqs power /= scales[:, None] signif, fft_theor = wavelet.significance(1.0, dt, scales, 0,
def graph_wavelet(data_xs, title, lims, font = 11, params = default_params): a_lims, b_lims, d_lims = lims plt.rcParams.update({'font.size': font}) return_data = {} N = len(data_xs) dt = (2*params['per_pixel'])/N #This is how much cm each pixel equals t = np.arange(0, N) * dt t = t - np.mean(t) t0 = 0 per_min = params['min_per'] per_max = params['max_per'] units = params['units'] sx = params['sx'] octaves = params['octaves'] dj = 1/params['suboctaves'] #suboctaves order = params['order'] var, std, dat_norm = detrend(data_xs) mother = cwt.DOG(order) #This is the Mother Wavelet s0 = sx * dt #This is the starting scale, which in out case is two pixels or 0.04cm/40um\ J = octaves/dj #This is powers of two with dj suboctaves return_data['var'] = var return_data['std'] = std try: alpha, _, _ = cwt.ar1(dat_norm) #This calculates the Lag-1 autocorrelation for red noise except: alpha = 0.95 wave, scales, freqs, coi, fft, fftfreqs = cwt.cwt(dat_norm, dt, dj, s0, J, mother) return_data['scales'] = scales return_data['freqs'] = freqs return_data['fft'] = fft iwave = cwt.icwt(wave, scales, dt, dj, mother) * std power = (np.abs(wave)) ** 2 fft_power = np.abs(fft) ** 2 period = 1 / freqs power /= scales[:, None] #This is an option suggested by Liu et. al. #Next we calculate the significance of the power spectra. Significane where power / sig95 > 1 signif, fft_theor = cwt.significance(1.0, dt, scales, 0, alpha, significance_level=0.95, wavelet=mother) sig95 = np.ones([1, N]) * signif[:, None] sig95 = power / sig95 glbl_power = power.mean(axis=1) dof = N - scales # Correction for padding at edges glbl_signif, tmp = cwt.significance(var, dt, scales, 1, alpha, significance_level=0.95, dof=dof, wavelet=mother) sel = find((period >= per_min) & (period < per_max)) Cdelta = mother.cdelta scale_avg = (scales * np.ones((N, 1))).transpose() scale_avg = power / scale_avg # As in Torrence and Compo (1998) equation 24 scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0) scale_avg_signif, tmp = cwt.significance(var, dt, scales, 2, alpha, significance_level=0.95, dof=[scales[sel[0]], scales[sel[-1]]], wavelet=mother) # Prepare the figure plt.close('all') plt.ioff() figprops = dict(figsize=(11, 11), dpi=72) fig = plt.figure(**figprops) wx = plt.axes([0.77, 0.75, 0.2, 0.2]) imz = 0 for idxy in range(0,len(period), 10): wx.plot(t, mother.psi(t / period[idxy]) + imz, linewidth = 1.5) imz+=1 wx.xaxis.set_ticklabels([]) ax = plt.axes([0.1, 0.75, 0.65, 0.2]) ax.plot(t, data_xs, 'k', linewidth=1.5) ax.plot(t, iwave, '-', linewidth=1, color=[0.5, 0.5, 0.5]) ax.plot(t, dat_norm, '--', linewidth=1.5, color=[0.5, 0.5, 0.5]) if a_lims != None: ax.set_ylim([-a_lims, a_lims]) ax.set_title('a) {}'.format(title)) ax.set_ylabel(r'Displacement [{}]'.format(units)) #ax.set_ylim([-20,20]) bx = plt.axes([0.1, 0.37, 0.65, 0.28], sharex=ax) levels = [0.0625, 0.125, 0.25, 0.5, 1, 2, 4, 8, 16] bx.contourf(t, np.log2(period), np.log2(power), np.log2(levels), extend='both', cmap=plt.cm.viridis) extent = [t.min(), t.max(), 0, max(period)] bx.contour(t, np.log2(period), sig95, [-99, 1], colors='k', linewidths=2, extent=extent) bx.fill(np.concatenate([t, t[-1:] + dt, t[-1:] + dt, t[:1] - dt, t[:1] - dt]), np.concatenate([np.log2(coi), [1e-9], np.log2(period[-1:]), np.log2(period[-1:]), [1e-9]]), 'k', alpha=0.3, hatch='x') bx.set_title('b) {} Octaves Wavelet Power Spectrum [{}({})]'.format(octaves, mother.name, order)) bx.set_ylabel('Period (cm)') # Yticks = 2 ** np.arange(np.ceil(np.log2(period.min())), np.ceil(np.log2(period.max()))) bx.set_yticks(np.log2(Yticks)) bx.set_yticklabels(Yticks) # Third sub-plot, the global wavelet and Fourier power spectra and theoretical # noise spectra. Note that period scale is logarithmic. cx = plt.axes([0.77, 0.37, 0.2, 0.28], sharey=bx) cx.plot(glbl_signif, np.log2(period), 'k--') cx.plot(var * fft_theor, np.log2(period), '--', color='#cccccc') cx.plot(var * fft_power, np.log2(1./fftfreqs), '-', color='#cccccc', linewidth=1.) return_data['global_power'] = var * glbl_power return_data['fourier_spectra'] = var * fft_power return_data['per'] = np.log2(period) return_data['amp'] = np.log2(1./fftfreqs) cx.plot(var * glbl_power, np.log2(period), 'k-', linewidth=1.5) cx.set_title('c) Power Spectrum') cx.set_xlabel(r'Power [({})^2]'.format(units)) if b_lims != None: cx.set_xlim([0,b_lims]) #cx.set_xlim([0,max(glbl_power.max(), var*fft_power.max())]) #print(max(glbl_power.max(), var*fft_power.max())) cx.set_ylim(np.log2([period.min(), period.max()])) cx.set_yticks(np.log2(Yticks)) cx.set_yticklabels(Yticks) return_data['yticks'] = Yticks plt.setp(cx.get_yticklabels(), visible=False) # Fourth sub-plot, the scale averaged wavelet spectrum. dx = plt.axes([0.1, 0.07, 0.65, 0.2], sharex=ax) dx.axhline(scale_avg_signif, color='k', linestyle='--', linewidth=1.) dx.plot(t, scale_avg, 'k-', linewidth=1.5) dx.set_title('d) {}--{} cm scale-averaged power'.format(per_min, per_max)) dx.set_xlabel('Displacement (cm)') dx.set_ylabel(r'Average variance [{}]'.format(units)) ax.set_xlim([t.min(), t.max()]) if d_lims != None: dx.set_ylim([0,d_lims]) plt.savefig("C:\pyscripts\wavelet_analysis\Calibrated Images\{}".format(title)) return fig, return_data
def simple_sample(sls): # Then, we load the dataset and define some data related parameters. In this # case, the first 19 lines of the data file contain meta-data, that we ignore, # since we set them manually (*i.e.* title, units). # url = 'http://paos.colorado.edu/research/wavelets/wave_idl/nino3sst.txt' # dat = numpy.genfromtxt(url, skip_header=19) title = 'Sentence Length' label = 'Zhufu Sentence Length' units = 'Characters' t0 = 1 dt = 1 # In years dat = numpy.array(sls) # We also create a time array in years. N = dat.size t = numpy.arange(0, N) * dt + t0 # We write the following code to detrend and normalize the input data by its # standard deviation. Sometimes detrending is not necessary and simply # removing the mean value is good enough. However, if your dataset has a well # defined trend, such as the Mauna Loa CO\ :sub:`2` dataset available in the # above mentioned website, it is strongly advised to perform detrending. # Here, we fit a one-degree polynomial function and then subtract it from the # original data. p = numpy.polyfit(t - t0, dat, 1) dat_notrend = dat - numpy.polyval(p, t - t0) std = dat_notrend.std() # Standard deviation var = std**2 # Variance dat_norm = dat_notrend / std # Normalized dataset # The next step is to define some parameters of our wavelet analysis. We # select the mother wavelet, in this case the Morlet wavelet with # :math:`\omega_0=6`. mother = wavelet.Morlet(6) s0 = 2 * dt # Starting scale, in this case 2 * 0.25 years = 6 months dj = 1 / 12 # Twelve sub-octaves per octaves J = 7 / dj # Seven powers of two with dj sub-octaves alpha, _, _ = wavelet.ar1(dat) # Lag-1 autocorrelation for red noise # The following routines perform the wavelet transform and inverse wavelet # transform using the parameters defined above. Since we have normalized our # input time-series, we multiply the inverse transform by the standard # deviation. wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt( dat_norm, dt, dj, s0, J, mother) iwave = wavelet.icwt(wave, scales, dt, dj, mother) * std # We calculate the normalized wavelet and Fourier power spectra, as well as # the Fourier equivalent periods for each wavelet scale. power = (numpy.abs(wave))**2 fft_power = numpy.abs(fft)**2 period = 1 / freqs # We could stop at this point and plot our results. However we are also # interested in the power spectra significance test. The power is significant # where the ratio ``power / sig95 > 1``. signif, fft_theor = wavelet.significance(1.0, dt, scales, 0, alpha, significance_level=0.95, wavelet=mother) sig95 = numpy.ones([1, N]) * signif[:, None] sig95 = power / sig95 # Then, we calculate the global wavelet spectrum and determine its # significance level. glbl_power = power.mean(axis=1) dof = N - scales # Correction for padding at edges glbl_signif, tmp = wavelet.significance(var, dt, scales, 1, alpha, significance_level=0.95, dof=dof, wavelet=mother) # We also calculate the scale average between 2 years and 8 years, and its # significance level. sel = find((period >= 2) & (period < 8)) Cdelta = mother.cdelta scale_avg = (scales * numpy.ones((N, 1))).transpose() scale_avg = power / scale_avg # As in Torrence and Compo (1998) equation 24 scale_avg = var * dj * dt / Cdelta * scale_avg[sel, :].sum(axis=0) scale_avg_signif, tmp = wavelet.significance( var, dt, scales, 2, alpha, significance_level=0.95, dof=[scales[sel[0]], scales[sel[-1]]], wavelet=mother) # Finally, we plot our results in four different subplots containing the # (i) original series anomaly and the inverse wavelet transform; (ii) the # wavelet power spectrum (iii) the global wavelet and Fourier spectra ; and # (iv) the range averaged wavelet spectrum. In all sub-plots the significance # levels are either included as dotted lines or as filled contour lines. # Prepare the figure pyplot.close('all') pyplot.ioff() figprops = dict(figsize=(11, 8), dpi=72) fig = pyplot.figure(**figprops) # First sub-plot, the original time series anomaly and inverse wavelet # transform. ax = pyplot.axes([0.1, 0.75, 0.65, 0.2]) ax.plot(t, iwave, '-', linewidth=1, color=[0.5, 0.5, 0.5]) ax.plot(t, dat, 'k', linewidth=1.5) ax.set_title('a) {}'.format(title)) ax.set_ylabel(r'{} [{}]'.format(label, units)) # Second sub-plot, the normalized wavelet power spectrum and significance # level contour lines and cone of influece hatched area. Note that period # scale is logarithmic. bx = pyplot.axes([0.1, 0.37, 0.65, 0.28], sharex=ax) levels = [0.0625, 0.125, 0.25, 0.5, 1, 2, 4, 8, 16] bx.contourf(t, numpy.log2(period), numpy.log2(power), numpy.log2(levels), extend='both', cmap=pyplot.cm.viridis) extent = [t.min(), t.max(), 0, max(period)] bx.contour(t, numpy.log2(period), sig95, [-99, 1], colors='k', linewidths=2, extent=extent) bx.fill(numpy.concatenate( [t, t[-1:] + dt, t[-1:] + dt, t[:1] - dt, t[:1] - dt]), numpy.concatenate([ numpy.log2(coi), [1e-9], numpy.log2(period[-1:]), numpy.log2(period[-1:]), [1e-9] ]), 'k', alpha=0.3, hatch='x') bx.set_title('b) {} Wavelet Power Spectrum ({})'.format( label, mother.name)) bx.set_ylabel('Period (years)') # Yticks = 2**numpy.arange(numpy.ceil(numpy.log2(period.min())), numpy.ceil(numpy.log2(period.max()))) bx.set_yticks(numpy.log2(Yticks)) bx.set_yticklabels(Yticks) # Third sub-plot, the global wavelet and Fourier power spectra and theoretical # noise spectra. Note that period scale is logarithmic. cx = pyplot.axes([0.77, 0.37, 0.2, 0.28], sharey=bx) cx.plot(glbl_signif, numpy.log2(period), 'k--') cx.plot(var * fft_theor, numpy.log2(period), '--', color='#cccccc') cx.plot(var * fft_power, numpy.log2(1. / fftfreqs), '-', color='#cccccc', linewidth=1.) cx.plot(var * glbl_power, numpy.log2(period), 'k-', linewidth=1.5) cx.set_title('c) Global Wavelet Spectrum') cx.set_xlabel(r'Power [({})^2]'.format(units)) cx.set_xlim([0, glbl_power.max() + var]) cx.set_ylim(numpy.log2([period.min(), period.max()])) cx.set_yticks(numpy.log2(Yticks)) cx.set_yticklabels(Yticks) pyplot.setp(cx.get_yticklabels(), visible=False) # Third sub-plot, the global wavelet and Fourier power spectra and theoretical # noise spectra. Note that period scale is logarithmic. dx = pyplot.axes([0.1, 0.07, 0.65, 0.2]) dx.plot(numpy.log2(fftfreqs), numpy.log2(fft_power), 'k') dx.plot(numpy.log2(freqs), var * fft_theor, '--', color='#cccccc') dx.plot(numpy.log2(1. / fftfreqs), var * fft_power, '-', color='#cccccc', linewidth=1.) dx.plot(fftfreqs, fft_power, 'k-', linewidth=1.5) dx.set_title('d) Global Wavelet Spectrum') dx.set_ylabel(r'Power [({})^2]'.format(units)) dx.set_xlim([0, 2 * fftfreqs.max()]) Yticks = 2**numpy.arange(numpy.ceil(numpy.log2(fft_power.min())), numpy.ceil(numpy.log2(fft_power.max()))) dx.set_ylim(numpy.log2([fft_power.min(), fft_power.max()])) dx.set_yticks(numpy.log2(Yticks)) dx.set_yticklabels(Yticks) pyplot.setp(dx.get_yticklabels(), visible=False) pyplot.show()