Beispiel #1
0
dj = 0.25  # Four sub-octaves per octaves
s0 = -1  #2 * dt                      # Starting scale, here 6 months
J = -1  # 7 / dj                      # Seven powers of two with dj sub-octaves
alpha = 0.0  # Lag-1 autocorrelation for white noise
#alpha = numpy.correlate(var, var, 'same')
#alpha /= alpha.max()
#alpha = 0.5 * (alpha[N / 2 + 1] + alpha[N / 2 + 2] ** 0.5)
#
#
mother = wavelet.Morlet(6.)  # Morlet mother wavelet with wavenumber=6
#mother = wavelet.Mexican_hat()       # Mexican hat wavelet, or DOG with m=2
#mother = wavelet.Paul(4)             # Paul wavelet with order m=4

# The following routines perform the wavelet transform and siginificance
# analysis for the chosen data set.
wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(var, dt, dj, s0, J,
                                                      mother)
iwave = wavelet.icwt(wave, scales, dt, dj, mother)
power = (abs(wave))**2  # Normalized wavelet power spectrum
fft_power = std2 * abs(fft)**2  # FFT power spectrum
period = 1. / freqs

signif, fft_theor = wavelet.significance(1.0,
                                         dt,
                                         scales,
                                         0,
                                         alpha,
                                         significance_level=slevel,
                                         wavelet=mother)
sig95 = (signif * numpy.ones((N, 1))).transpose()
sig95 = power / sig95  # Where ratio > 1, power is significant
Beispiel #2
0
def plot_mesaclipped(t,
                     x,
                     fs,
                     mother1,
                     mother2,
                     freqs,
                     sync_sqz,
                     fig_filename,
                     snr=np.inf,
                     noise_gamma=0,
                     num_scales=2000,
                     split=False):

    print(f'plotting {fig_filename}')

    # add noise
    noise = colorednoise.powerlaw_psd_gaussian(noise_gamma, len(t))
    x = x + np.sqrt(np.var(x) / snr) * noise

    dt = t[1] - t[0]

    min_cycles = 8

    if sync_sqz:
        syncsqz_freqs = freqs
    else:
        syncsqz_freqs = None

    # compute wavelet transforms
    amps = []
    mus = []
    vmax = -np.inf
    xr = []  # for reconstructed signals
    cois = []
    titles = []
    # for i, (mother, c) in enumerate(zip([mother1, mother1, mother2, mother2], [0, 1, 0, 1])):
    for i, (mother, c, title) in enumerate(
            zip([mother1, mother2], [0, 1], ['Conventional', 'MesaClip'])):

        if sync_sqz:
            scales = np.geomspace(0.5 * dt, t[-1] - t[0], num_scales)
        else:
            scales = mother.convert_freq_scale(freqs)

        if split:
            x_pos = x.copy()
            x_neg = (-x).copy()
            x_pos[x_pos < 0] = 0
            x_neg[x_neg < 0] = 0
            w_pos, _, coi_time_idxs, coi_freq_idxs = wavelet.cwt(
                x_pos,
                dt,
                scales,
                mother,
                syncsqz_freqs=syncsqz_freqs,
                min_cycles=c * min_cycles)
            w_neg, _, coi_time_idxs, coi_freq_idxs = wavelet.cwt(
                x_neg,
                dt,
                scales,
                mother,
                syncsqz_freqs=syncsqz_freqs,
                min_cycles=c * min_cycles)
            phi_pos = np.unwrap(np.angle(w_pos))
            phi_neg = np.unwrap(np.angle(w_neg))
            w = (np.abs(w_pos) + np.abs(w_neg)) * np.exp(0.5j *
                                                         (phi_pos + phi_neg))
        else:
            w, _, coi_time_idxs, coi_freq_idxs = wavelet.cwt(
                x,
                dt,
                scales,
                mother,
                syncsqz_freqs=syncsqz_freqs,
                min_cycles=c * min_cycles)

        coi_times = t[coi_time_idxs] - t[0]
        coi_freqs = freqs[coi_freq_idxs]
        cois.append((coi_times, coi_freqs))

        amp = np.abs(w)
        mus.append(np.sqrt(np.mean(amp**2, axis=1)))

        if sync_sqz:
            amp = scipy.ndimage.gaussian_filter(
                amp,
                [2, 2
                 ])  # smooth a bit so that sharp bits show up in the high-res

        amps.append(amp)
        vmax = max(vmax, np.max(amp))
        xr.append(wavelet.reconstruct(w, mother, scales))

        titles.append(title)

    nw = len(amps)

    # create figure and axes
    fig = plt.figure(figsize=(5, 4))
    gs = gridspec.GridSpec(1 + nw,
                           2,
                           width_ratios=[1, 0.2],
                           height_ratios=[
                               1,
                           ] + [
                               2,
                           ] * nw)
    ax = np.zeros((1 + nw, 2), dtype=np.object)
    ax[0, 0] = fig.add_subplot(gs[0, 0])  # signal
    ax_s = None
    for i in range(1, 1 + nw):  # wavelet transform results
        ax[i, 0] = fig.add_subplot(gs[i, 0])
        ax[i, 1] = fig.add_subplot(gs[i, 1], sharex=ax_s)
        ax_s = ax[i, 1]

    # convenience axes variables
    ax_sig = ax[0, 0]
    ax_w = ax[1:, 0]
    ax_s = ax[1:, 1]

    # plot signal
    ax_sig.plot(t, x, c='k', lw=0.5)
    ax_sig.tick_params(which='both', direction='out')
    ax_sig.set_xlim(t[0], t[-1])
    ax_sig.set_xticklabels([])
    ax_sig.set_yticks([])
    ax_sig.xaxis.set_ticks_position('bottom')
    ax_sig.yaxis.set_ticks_position('left')
    ax_sig.axis('off')
    ax_sig.set_title('Signal', loc='left')

    # pcolormesh grid coordinates
    t_edges = utils.make_edges(t)
    f_edges = utils.make_edges(freqs, log=True)
    t_grid, f_grid = np.meshgrid(t_edges, f_edges)

    # cmap = 'gray_r'
    # cmap = 'binary'
    # cmap = 'bone_r'
    cmap = 'Blues'

    # iterate over each amplitude type
    for pi, (mu, amp, (coi_times, coi_freqs),
             title) in enumerate(zip(mus, amps, cois, titles)):

        # plot amplitudes
        vmax = np.max(amp)
        # vmin = -0.05 * vmax  # when using a white to color colormap, make backgroud slightly off-white
        vmin = 0
        ax_w[pi].pcolormesh(t_grid,
                            f_grid,
                            amp,
                            cmap=cmap,
                            rasterized=True,
                            vmin=vmin,
                            vmax=vmax)
        ax_w[pi].set_xlim(t[0], t[-1])

        ax_w[pi].set_title(title, loc='left')

        # # plot cone-of-influence
        # coi_kwargs = dict(c='k', ls='--', lw=0.5, alpha=0.5)
        # ax_w[pi].plot(t[ 0] + coi_times, coi_freqs, **coi_kwargs)
        # ax_w[pi].plot(t[-1] - coi_times, coi_freqs, **coi_kwargs)

        # plot time-averages
        ax_s[pi].fill(np.r_[0, mu, 0],
                      np.r_[freqs[0], freqs, freqs[-1]],
                      c='k',
                      lw=0,
                      zorder=2,
                      alpha=0.2)
        ax_s[pi].plot(mu, freqs, c='k', lw=1, zorder=3)

        # setup axes ranges and ticks
        for ax_ws in [ax_w[pi], ax_s[pi]]:
            ax_ws.tick_params(which='both', direction='out')
            ax_ws.xaxis.set_ticks_position('bottom')
            ax_ws.yaxis.set_ticks_position('left')
            ax_ws.set_yscale('log')
            ax_ws.set_ylim(freqs[0], freqs[-1])

        for f in fs:
            ax_s[pi].axhline(f, lw=1, ls=':', color='r', alpha=1.0, zorder=5)

        ax_w[pi].set_ylabel(r'Freq (Hz)')
        ax_w[pi].set_xlim(ax[0, 0].get_xlim())

        # clean up signal axes
        ax_s[pi].set_yticklabels([])
        ax_s[pi].set_xticks([])
        for side in ['top', 'right', 'bottom']:
            ax_s[pi].spines[side].set_visible(False)

    for i in range(len(ax_w) - 1):
        ax_w[i].set_xticklabels([])

    ax[-1, 0].set_xlabel(r'Time (s)')

    fig.tight_layout(h_pad=0.2, w_pad=0.5, rect=[-0.025, -0.03, 1.025, 0.98])
    fig.savefig(fig_filename, dpi=300)
Beispiel #3
0
def main():

    beta_opt = calc_optimum_beta()
    gamma = 3

    n = 2**12
    freq = np.arange(0, n)
    omega = freq * 2 * np.pi

    nb = 2**10
    betas = np.geomspace(0.5, 16, nb)
    amps = np.zeros(nb)

    for i, beta in enumerate(betas):

        mother = wavelet.Morse(beta=beta, gam=gamma)
        s = mother.convert_freq_scale(2)
        psi_fspace = mother.freq_domain(s * omega)

        # wavelet transform of Dirac comb
        x = np.fft.ifft(n * np.sqrt(s) * psi_fspace)

        # half-way between two diracs as ratio between 1st harmonic and fundamental
        a = abs(x[len(x) // 2])

        amps[i] = a

    betas_plot = [0.5, beta_opt, 4, 12]

    fig = plt.figure(figsize=(8, 4))
    gs = gridspec.GridSpec(2, len(betas_plot), height_ratios=[1.5, 1])

    # plot amplitudes at varying betas
    ax = fig.add_subplot(gs[0, :])
    ax.plot(betas, amps, c='k', lw=2)
    ax.set_xlabel('$\\beta$')
    ax.set_ylabel('Amplitude')
    ax.set_title(
        'Amplitude at 1st harmonic halfway between two Dirac delta functions')
    ax.set_xscale('log')
    ax.set_yscale('log')
    ax.set_xticks([0.5, 1, beta_opt, 2, 4, 8, 16])
    ax.set_xticklabels(['0.5', '1', '$\\beta*$', '2', '4', '8', '16'])
    ax.set_xticks([], minor=True)

    t = np.linspace(-10, 10, 2**10)
    dt = t[1] - t[0]
    x = np.zeros_like(t)
    x[len(x) // 2] = 1

    # plot wavelets for a few selected betas
    for i, beta in enumerate(betas_plot):
        ax = fig.add_subplot(gs[1, i], facecolor='0.9')

        if np.abs(beta - beta_opt) < 1e-4:
            ax.set_title(f'$\\beta = \\beta* = {beta:.6g}$')
        else:
            ax.set_title(f'$\\beta = {beta:.6g}$')

        mother = wavelet.Morse(beta=beta, gam=gamma)
        w = wavelet.cwt(x, dt, np.array([1]), mother)[0][0]

        ax.axhline(0, c='k', lw=1, ls='--')
        ax.fill_between(t, -np.abs(w), np.abs(w), color='w', lw=1.5)
        ax.plot(t, np.real(w), lw=1.5)
        ax.plot(t, np.imag(w), lw=1.5)
        ax.set_xlim(t[0], t[-1])

        for spine in ax.spines:
            ax.spines[spine].set_visible(False)
            ax.set_xticks([])
            ax.set_yticks([])

    fig.tight_layout()
    # fig.savefig('../output/beta_opt.pdf', dpi=300)
    fig.savefig('../output/beta_opt.png', dpi=300)
    plt.close(fig)
Beispiel #4
0
def get_results():

    cache_filename = '../output/real_data_cache.pkl'

    if os.path.exists(cache_filename):
        with open(cache_filename, 'rb') as f:
            result = pickle.load(f)

    else:

        xs = np.loadtxt('../data/real_emgs.txt')
        t = np.arange(xs.shape[1]) / 100  # data in 100Hz sampling rate
        n = xs.shape[0]

        freqs = np.geomspace(1 / 8, 8, 100)
        log2_freqs = np.log2(freqs)
        log2_freq_edges = utils.make_edges(log2_freqs)

        dt = t[1] - t[0]
        scales = np.geomspace(2 * dt, 0.5 * (t[-1] - t[0]), 1000)
        mother = wavelet.Morse(beta=1.58174, gam=3)
        min_cycles = [2, 4, 8]

        threshes = np.linspace(0, 1, 50)
        peak_hists = np.zeros((n, len(threshes), len(freqs)))
        amps = np.zeros((n, len(min_cycles), len(freqs)))

        for i in range(n):

            print(f'signal {i + 1}/{n}')

            x = xs[i]
            x = x - scipy.ndimage.gaussian_filter1d(
                x, 2 / dt)  # subtract baseline
            x = x - np.mean(x)
            x = x / np.max(x)
            xs[i, :] = x

            # peak detect on x
            peak_hist = np.zeros((len(threshes), len(freqs)))
            for j, thresh in enumerate(threshes):
                spikes = utils.spike_detect(x, t, thresh)
                if len(spikes) > 1:
                    f = np.log2(1 / np.diff(spikes))
                    h = np.histogram(f, bins=log2_freq_edges)[0].astype(float)

                    if np.max(h) > 0:
                        h = scipy.ndimage.gaussian_filter1d(h, 2)
                        h = h / np.max(h)
                        peak_hist[j, :] = h

            peak_hists[i, ...] = peak_hist

            # wavelet
            for j, k in enumerate(min_cycles):
                w = wavelet.cwt(x,
                                dt,
                                scales,
                                mother,
                                syncsqz_freqs=freqs,
                                min_cycles=k)[0]
                amp = np.mean(np.abs(w)**2, axis=1)
                amp = scipy.ndimage.gaussian_filter1d(amp, 2)
                amps[i, j, :] = np.sqrt(amp)

        result = (xs, t, freqs, threshes, peak_hists, amps)
        with open(cache_filename, 'wb') as f:
            pickle.dump(result, f, protocol=2)

    return result
Beispiel #5
0
def wavelet_analysis(
    z,
    tm,
    lon=None,
    lat=None,
    mother="Morlet",
    alpha=0.0,
    siglvl=0.95,
    loc=None,
    onlyloc=False,
    periods=None,
    sel_periods=[],
    show=False,
    save="",
    dsave="",
    prefix="",
    labels=dict(),
    title=None,
    name=None,
    fpath="",
    fpattern="",
    std=dict(),
    crange=None,
    levels=None,
    cmap=cm.GMT_no_green,
    debug=False,
):
    """Continuous wavelet transform and significance analysis.

    The analysis is made using the methodology and statistical approach
    suggested by Torrence and Compo (1998).

    Depending on the dimensions of the input array, three different
    kinds of approaches are taken. If the input array is one-dimensional
    then only a simple analysis is performed. If the array is
    bi- or three-dimensional then spectral Hovmoller diagrams are drawn
    for each Fourier period given within a range of +/-25%.

    PARAMETERS
        z (array like) :
            Input data. The data array should have one of these forms,
            z[tm], z[tm, lat] or z[tm, lat, lon].
        tm (array like) :
            Time axis. It should contain values in matplotlib date
            format (i.e. number of days since 0001-01-01 UTC).
        lon (array like, optional) :
            Longitude.
        lat (array like, optional) :
            Latitude.
        mother (string, optional) :
            Gives the name of the mother wavelet to be used. Possible
            values are 'Morlet' (default), 'Paul' or 'Mexican hat'.
        alpha (float or dictionary, optional) :
            Lag-1 autocorrelation for background noise.  Default value
            is 0.0 (white noise). If different autocorrelation 
            coefficients should be used for different locations, then
            the input should contain a dictionary with 'lon', 'lat',
            'map' keys as for the std parameter.
        siglvl (float, optional) :
            Significance level. Default value is 0.95.
        loc (array like, optional) :
            Special locations of interest. If the input array is of
            higher dimenstions, the output of the simple wavelet
            analysis of each of the locations is output. The list
            should contain the pairs of (lon, lat) for each locations
            of interest.
        onlyloc (boolean, optional) :
            If set to true then only the specified locations are
            analysed. The default is false.
        periods (array like, optional) :
            Special Fourier periods of interest in case of analysis of
            higher dimensions (in years).
        sel_periods (array like, optional) :
            Select which Fourier periods spectral power are averaged.
        show (boolean, optional) :
            If set to true the the resulting maps are shown on screen.
        save (string, optional) :
            The path in which the resulting plots are to be saved. If
            not set, then no images will be saved.
        dsave (string, optional) :
            If set, saves the scale averaged power spectrum series to
            this path. This is especially useful if memory is an issue.
        prefix (string, optional) :
            Prefix to retain naming conventions such as basin.
        labels (dictionary, optional) :
            Sets the labels for the plot axis.
        title (string, array like, optional) :
            Title of each of the selected periods.
        name (string, array like, optional) :
            Name of each of the selected periods. Used when saving the 
            results to files.
        fpath (string, optional) :
            Path for the source files to be loaded when memory issues
            are a concern.
        fpattern (string, optional) :
            Regular expression pattern to match file names.
        std (dictionary, optional) :
            A dictionary containing a map of the standard deviation of
            the analysed time series. To set the longitude and latitude
            coordinates of the map, they should be included as
            separate 'lon' and 'lat' key items. If they are omitted,
            then the regular input parameters are assumed. Accepted
            standard deviation error is set in key 'err' (default value
            is 1e-2).
        crange (array like, optional) :
            Array of power levels to be used in average Hovmoler colour bar.
        levels (array like, optional) :
            Array of power levels to be used in spectrogram colour bar.
        cmap (colormap, optional) :
            Sets the colour map to be used in the plots. The default is
            the Generic Mapping Tools (GMT) no green.
        debug (boolean, optional) :
            If set to True then warnings are shown.

    OUTPUT
        If show or save are set, plots either on screen and or on file
        according to the specified parameters.

        If dsave parameter is set, also saves the scale averaged power
        series to files.

    RETURNS
        wave (dictionary) :
            Dictionary containing the resulting calculations from the
            wavelet analysis according to the input parameters. The
            output items might be:
                scale --
                    Wavelet scales.
                period --
                    Equivalent Fourier periods (in days).
                power_spectrum --
                    Wavelet power spectrum (in units**2).
                power_significance --
                    Relative significance of the power spectrum.
                global_power --
                    Global wavelet power spectrum (in units**2).
                scale_spectrum  --
                    Scale averaged wavelet spectra (in units**2)
                    according to selected periods.
                scale_significance --
                    Relative significance of the scale averaged wavelet
                    spectra.
                fft --
                    Fourier spectrum.
                fft_first --
                    Fourier spectrum of the first half of the 
                    time-series.
                fft_second --
                    Fourier spectrum of the second half of the 
                    time-series.
                fft_period --
                    Fourier periods (in days).
                trend --
                    Signal trend (in units/yr).
                wavelet_trend --
                    Wavelet spectrum trends (in units**2/yr).

    """
    t1 = time()
    result = {}

    # Resseting unit labels for hovmoller plots
    hlabels = dict(labels)
    hlabels["units"] = ""

    # Setting some titles and paths
    if name == None:
        name = title

    # Working with the std parameter and setting its properties:
    if "val" in std.keys():
        if "lon" not in std.keys():
            std["lon"] = lon
        std["lon180"] = common.lon180(std["lon"])
        if "lat" not in std.keys():
            std["lat"] = lat
        if "err" not in std.keys():
            std["err"] = 1e-2
        std["map"] = True
    else:
        std["map"] = False

    # Lag-1 autocorrelation parameter
    if type(alpha).__name__ == "dict":
        if "lon" not in alpha.keys():
            alpha["lon"] = lon
        alpha["lon180"] = common.lon180(alpha["lon"])
        if "lat" not in alpha.keys():
            alpha["lat"] = lat
        alpha["mean"] = alpha["val"].mean()
        alpha["map"] = True
        alpha["calc"] = False
    else:
        if alpha == -1:
            alpha = {"mean": -1, "calc": True}
        else:
            alpha = {"val": alpha, "mean": alpha, "map": False, "calc": False}

    # Shows some of the options on screen.
    print ("Average Lag-1 autocorrelation for background noise: %.2f" % (alpha["mean"]))
    if save:
        print "Saving result figures in '%s'." % (save)
    if dsave:
        print "Saving result data in '%s'." % (dsave)

    if fpath:
        # Gets the list of files to be loaded individually extracts all the
        # latitudes and loads the first file to get the main parameters.
        flist = os.listdir(fpath)
        flist, match = common.reglist(flist, fpattern)
        if len(flist) == 0:
            raise Warning, "No files matched search pattern."
        flist = numpy.asarray(flist)
        lst_lat = []
        for item in match:
            y = string.atof(item[-2])
            if item[-1].upper() == "S":
                y *= -1
            lst_lat.append(y)
        # Detect file type from file name
        ftype = fm.detect_ftype(flist[0])
        x, y, tm, z = fm.load_map("%s/%s" % (fpath, flist[0]), ftype=ftype, masked=True)
        if lon == None:
            lon = x
        lat = numpy.unique(lst_lat)
        dim = 2
    else:
        # Transforms input arrays in numpy arrays and numpy masked arrays.
        tm = numpy.asarray(tm)
        z = numpy.ma.asarray(z)
        z.mask = numpy.isnan(z)

        # Determines the number of dimensions of the variable to be plotted and
        # the sizes of each dimension.
        a = b = c = None
        dim = len(z.shape)
        if dim == 3:
            c, b, a = z.shape
        elif dim == 2:
            c, a = z.shape
            b = 1
            z = z.reshape(c, b, a)
        else:
            c = z.shape[0]
            a = b = 1
            z = z.reshape(c, b, a)
        if tm.size != c:
            raise Warning, "Time and data lengths do not match."

    # Transforms coordinate arrays into numpy arrays
    s = type(lat).__name__
    if s in ["int", "float", "float64"]:
        lat = numpy.asarray([lat])
    elif s != "NoneType":
        lat = numpy.asarray(lat)
    s = type(lon).__name__
    if s in ["int", "float", "float64"]:
        lon = numpy.asarray([lon])
    elif s != "NoneType":
        lon = numpy.asarray(lon)

    # Starts the mother wavelet class instance and determines important
    # analysis parameters
    mother = mother.lower()
    if mother == "morlet":
        mother = wavelet.Morlet()
    elif mother == "paul":
        mother = wavelet.Paul()
    elif mother in ["mexican hat", "mexicanhat", "mexican_hat"]:
        mother = wavelet.Mexican_hat()
    else:
        raise Warning, "Mother wavelet unknown."

    t = tm / common.daysinyear  # Time array in years
    dt = tm[1] - tm[0]  # Temporal sampling interval
    try:  # Zonal sampling interval
        dx = lon[1] - lon[0]
    except:
        dx = 1
    try:  # Meridional sampling interval
        dy = lat[1] - lat[0]
    except:
        dy = dx
    if numpy.isnan(dt):
        dt = 1
    if numpy.isnan(dx):
        dx = 1
    if numpy.isnan(dy):
        dy = dx
    dj = 0.25  # Four sub-octaves per octave
    s0 = 2 * dt  # Smallest scale
    J = 7 / dj - 1  # Seven powers of two with dj sub-octaves
    scales = period = None

    if type(crange).__name__ == "NoneType":
        crange = numpy.arange(0, 1.1, 0.1)
    if type(levels).__name__ == "NoneType":
        levels = 2.0 ** numpy.arange(-3, 6)

    if fpath:
        N = lat.size
        # TODO: refactoring # lon = numpy.arange(-81. - dx / 2., 290. + dx / 2, dx)
        # TODO: refactoring # lat = numpy.unique(numpy.asarray(lst_lat))
        c, b, a = tm.size, lat.size, lon.size
    else:
        N = a * b

    # Making sure that the longitudes range from -180 to 180 degrees and
    # setting the squared search radius R2.
    try:
        lon180 = common.lon180(lon)
    except:
        lon180 = None
    R2 = dx ** 2 + dy ** 2
    if numpy.isnan(R2):
        R2 = 65535.0
    if loc != None:
        loc = numpy.asarray([[common.lon180(item[0]), item[1]] for item in loc])

    # Initializes important result variables such as the global wavelet power
    # spectrum map, scale avaraged spectrum time-series and their significance,
    # wavelet power trend map.
    global_power = numpy.ma.empty([J + 1, b, a]) * numpy.nan
    try:
        C = len(periods) + 1
        dT = numpy.diff(periods)
        pmin = numpy.concatenate([[periods[0] - dT[0] / 2], 0.5 * (periods[:-1] + periods[1:])])
        pmax = numpy.concatenate([0.5 * (periods[:-1] + periods[1:]), [periods[-1] + dT[-1] / 2]])
    except:
        # Sets the lowest period to null and the highest to half the time
        # series length.
        C = 1
        pmin = numpy.array([0])
        pmax = numpy.array([(tm[-1] - tm[0]) / 2])
    if type(sel_periods).__name__ in ["int", "float"]:
        sel_periods = [sel_periods]
    elif len(sel_periods) == 0:
        sel_periods = [-1.0]
    try:
        if fpath:
            raise Warning, "Process files individually"
        avg_spectrum = numpy.ma.empty([C, c, b, a]) * numpy.nan
        mem_error = False
    except:
        avg_spectrum = numpy.ma.empty([C, c, a]) * numpy.nan
        mem_error = True
    avg_spectrum_signif = numpy.ma.empty([C, b, a]) * numpy.nan
    trend = numpy.ma.empty([b, a]) * numpy.nan
    wavelet_trend = numpy.ma.empty([C, b, a]) * numpy.nan
    fft_trend = numpy.ma.empty([C, b, a]) * numpy.nan
    std_map = numpy.ma.empty([b, a]) * numpy.nan
    zero = numpy.ma.empty([c, a])
    fft_spectrum = None
    fft_spectrum1 = None
    fft_spectrum2 = None

    # Walks through each latitude and then through each longitude to perform
    # the temporal wavelet analysis.
    if N == 1:
        plural = ""
    else:
        plural = "s"
    s = "Spectral analysis of %d location%s... " % (N, plural)
    stdout.write(s)
    stdout.flush()
    for j in range(b):
        t2 = time()
        isloc = False  # Ressets 'is special location' flag
        hloc = []  # Cleans location list for Hovmoller plots
        zero *= numpy.nan
        if mem_error:
            # Clears average spectrum for next step.
            avg_spectrum *= numpy.nan
            avg_spectrum.mask = False
        if fpath:
            findex = pylab.find(lst_lat == lat[j])
            if len(findex) == 0:
                continue
            ftype = fm.detect_ftype(flist[findex[0]])
            try:
                x, y, tm, z = fm.load_dataset(
                    fpath, flist=flist[findex], ftype=ftype, masked=True, lon=lon, lat=lat[j : j + 1], verbose=True
                )
            except:
                continue
            z = z[:, 0, :]
            x180 = common.lon180(x)

        # Determines the first and second halves of the time-series and some
        # constants for the FFT
        fft_ta = numpy.ceil(t.min())
        fft_tb = numpy.floor(t.max())
        fft_tc = numpy.round(fft_ta + fft_tb) / 2
        fft_ia = pylab.find((t >= fft_ta) & (t <= fft_tc))
        fft_ib = pylab.find((t >= fft_tc) & (t <= fft_tb))
        fft_N = int(2 ** numpy.ceil(numpy.log2(max([len(fft_ia), len(fft_ib)]))))
        fft_N2 = fft_N / 2 - 1
        fft_dt = t[fft_ib].mean() - t[fft_ia].mean()

        for i in range(a):
            # Some string output.
            try:
                Y, X = common.num2latlon(lon[i], lat[j], mode="each", padding=False)
            except:
                Y = X = "?"

            # Extracts individual time-series from the whole dataset and
            # sets or calculates its standard deviation, squared standard
            # deviation and finally the normalized time-series.
            if fpath:
                try:
                    ilon = pylab.find(x == lon[i])[0]
                    fz = z[:, ilon]
                except:
                    continue
            else:
                fz = z[:, j, i]
            if fz.mask.all():
                continue
            if std["map"]:
                try:
                    u = pylab.find(std["lon180"] == lon180[i])[0]
                    v = pylab.find(std["lat"] == lat[j])[0]
                except:
                    if debug:
                        warnings.warn("Unable to locate standard deviation " "for (%s, %s)" % (X, Y), Warning)
                    continue
                fstd = std["val"][v, u]
                estd = fstd - fz.std()
                if (estd < 0) & (abs(estd) > std["err"]):
                    if debug:
                        warnings.warn(
                            "Discrepant input standard deviation "
                            "(%f) location (%.3f, %.3f) will be "
                            "disregarded." % (estd, lon180[i], lat[j])
                        )
                    continue
            else:
                fstd = fz.std()
            fstd2 = fstd ** 2
            std_map[j, i] = fstd
            zero[:, i] = fz
            fz = (fz - fz.mean()) / fstd

            # Calculates the distance of the current point to any special
            # location set in the 'loc' parameter. If only special locations
            # are to be analysed, then skips all other ones. If the input
            # array is one dimensional, then do the analysis anyway.
            if dim == 1:
                dist = numpy.asarray([0.0])
            else:
                try:
                    dist = numpy.asarray([((item[0] - (lon180[i])) ** 2 + (item[1] - lat[j]) ** 2) for item in loc])
                except:
                    dist = []
            if (dist > R2).all() & (loc != "all") & onlyloc:
                continue

            # Determines the lag-1 autocorrelation coefficient to be used in
            # the significance test from the input parameter
            if alpha["calc"]:
                ac = acorr(fz)
                alpha_ij = (ac[c + 1] + ac[c + 2] ** 0.5) / 2
            elif alpha["map"]:
                try:
                    u = pylab.find(alpha["lon180"] == lon180[i])[0]
                    v = pylab.find(alpha["lat"] == lat[j])[0]
                    alpha_ij = alpha["val"][v, u]
                except:
                    if debug:
                        warnings.warn(
                            "Unable to locate standard deviation " "for (%s, %s) using mean value instead" % (X, Y),
                            Warning,
                        )
                    alpha_ij = alpha["mean"]
            else:
                alpha_ij = alpha["mean"]

            # Calculates the continuous wavelet transform using the wavelet
            # Python module. Calculates the wavelet and Fourier power spectrum
            # and the periods in days. Also calculates the Fourier power
            # spectrum for the first and second halves of the timeseries.
            wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(fz, dt, dj, s0, J, mother)
            power = abs(wave * wave.conj())
            fft_power = abs(fft * fft.conj())
            period = 1.0 / freqs
            fftperiod = 1.0 / fftfreqs
            psel = pylab.find(period <= pmax.max())

            # Calculates the Fourier transform for the first and the second
            # halves ot the time-series for later trend analysis.
            fft_1 = numpy.fft.fft(fz[fft_ia], fft_N)[1 : fft_N / 2] / fft_N ** 0.5
            fft_2 = numpy.fft.fft(fz[fft_ib], fft_N)[1 : fft_N / 2] / fft_N ** 0.5
            fft_p1 = abs(fft_1 * fft_1.conj())
            fft_p2 = abs(fft_2 * fft_2.conj())

            # Creates FFT return array and stores the spectrum accordingly
            try:
                fft_spectrum[:, j, i] = fft_power * fstd2
                fft_spectrum1[:, j, i] = fft_p1 * fstd2
                fft_spectrum2[:, j, i] = fft_p2 * fstd2
            except:
                fft_spectrum = numpy.ma.empty([len(fft_power), b, a]) * numpy.nan
                fft_spectrum1 = numpy.ma.empty([fft_N2, b, a]) * numpy.nan
                fft_spectrum2 = numpy.ma.empty([fft_N2, b, a]) * numpy.nan
                #
                fft_spectrum[:, j, i] = fft_power * fstd2
                fft_spectrum1[:, j, i] = fft_p1 * fstd2
                fft_spectrum2[:, j, i] = fft_p2 * fstd2

            # Performs the significance test according to the article by
            # Torrence and Compo (1998). The wavelet power is significant
            # if the ratio power/sig95 is > 1.
            signif, fft_theor = wavelet.significance(
                1.0, dt, scales, 0, alpha_ij, significance_level=siglvl, wavelet=mother
            )
            sig95 = (signif * numpy.ones((c, 1))).transpose()
            sig95 = power / sig95

            # Calculates the global wavelet power spectrum and its
            # significance. The global wavelet spectrum is the average of the
            # wavelet power spectrum over time. The degrees of freedom (dof)
            # have to be corrected for padding at the edges.
            glbl_power = power.mean(axis=1)
            dof = c - scales
            glbl_signif, tmp = wavelet.significance(
                1.0, dt, scales, 1, alpha_ij, significance_level=siglvl, dof=dof, wavelet=mother
            )
            global_power[:, j, i] = glbl_power * fstd2

            # Calculates the average wavelet spectrum along the scales and its
            # significance according to Torrence and Compo (1998) eq. 24. The
            # scale_avg_full variable is used multiple times according to the
            # selected periods range.
            #
            # Also calculates the average Fourier power spectrum.
            Cdelta = mother.cdelta
            scale_avg_full = (scales * numpy.ones((c, 1))).transpose()
            scale_avg_full = power / scale_avg_full
            for k in range(C):
                if k == 0:
                    sel = pylab.find((period >= pmin[0]) & (period <= pmax[-1]))
                    pminmax = [period[sel[0]], period[sel[-1]]]
                    les = pylab.find((fftperiod >= pmin[0]) & (fftperiod <= pmax[-1]))
                    fminmax = [fftperiod[les[0]], fftperiod[les[-1]]]
                else:
                    sel = pylab.find((period >= pmin[k - 1]) & (period < pmax[k - 1]))
                    pminmax = [pmin[k - 1], pmax[k - 1]]
                    les = pylab.find((fftperiod >= pmin[k - 1]) & (fftperiod <= pmax[k - 1]))
                    fminmax = [fftperiod[les[0]], fftperiod[les[-1]]]

                scale_avg = numpy.ma.array((dj * dt / Cdelta * scale_avg_full[sel, :].sum(axis=0)))
                scale_avg_signif, tmp = wavelet.significance(
                    1.0,
                    dt,
                    scales,
                    2,
                    alpha_ij,
                    significance_level=siglvl,
                    dof=[scales[sel[0]], scales[sel[-1]]],
                    wavelet=mother,
                )
                scale_avg.mask = scale_avg < scale_avg_signif
                if mem_error:
                    avg_spectrum[k, :, i] = scale_avg
                else:
                    avg_spectrum[k, :, j, i] = scale_avg
                avg_spectrum_signif[k, j, i] = scale_avg_signif

                # Trend analysis using least square polynomial fit of one
                # degree of the original input data and scale averaged
                # wavelet power. The wavelet power trend is calculated only
                # where the cone of influence spans the highest analyzed
                # period. In the end, the returned value for the trend is in
                # units**2.
                #
                # Also calculates the trends in the Fourier power spectrum.
                # Note that the FFT power spectrum is already multiplied by
                # the signal's standard deviation.
                incoi = pylab.find(coi >= pmax[-1])
                if len(incoi) == 0:
                    incoi = numpy.arange(c)
                polyw = numpy.polyfit(t[incoi], scale_avg[incoi].data, 1)
                wavelet_trend[k, j, i] = polyw[0] * fstd2
                fft_trend[k, j, i] = (fft_spectrum2[les, j, i] - fft_spectrum1[les, j, i]).mean() / fft_dt
                if k == 0:
                    polyz = numpy.polyfit(t, fz * fstd, 1)
                    trend[j, i] = polyz[0]

                # Plots the wavelet analysis results for the individual
                # series. The plot is only generated if the dimension of the
                # input variable z is one, if a special location is within a
                # range of the search radius R and if the show or save
                # parameters are set.
                if (show | (save != "")) & ((k in sel_periods)):
                    if (dist < R2).any() | (loc == "all") | (dim == 1):
                        # There is an interesting spot within the search
                        # radius of location (%s, %s).' % (Y, X)
                        isloc = True
                        if (dist < R2).any():
                            try:
                                hloc.append(loc[(dist < R2)][0, 0])
                            except:
                                pass
                        if save:
                            try:
                                sv = "%s/tz_%s_%s_%d" % (save, prefix, common.num2latlon(lon[i], lat[j]), k)
                            except:
                                sv = "%s" % (save)
                        else:
                            sv = ""
                        graphics.wavelet_plot(
                            tm,
                            period[psel],
                            fz,
                            power[psel, :],
                            coi,
                            glbl_power[psel],
                            scale_avg.data,
                            fft=fft,
                            fft_period=fftperiod,
                            power_signif=sig95[psel, :],
                            glbl_signif=glbl_signif[psel],
                            scale_signif=scale_avg_signif,
                            pminmax=pminmax,
                            labels=labels,
                            normalized=True,
                            std=fstd,
                            ztrend=polyz,
                            wtrend=polyw,
                            show=show,
                            save=sv,
                            levels=levels,
                            cmap=cmap,
                        )

        # Saves and/or plots the intermediate results as zonal temporal
        # diagrams.
        if dsave:
            for k in range(C):
                if k == 0:
                    sv = "%s/%s/%s_%s.xt.gz" % (
                        dsave,
                        "global",
                        prefix,
                        common.num2latlon(lon[i], lat[j], mode="each")[0],
                    )
                else:
                    sv = "%s/%s/%s_%s.xt.gz" % (
                        dsave,
                        name[k - 1].lower(),
                        prefix,
                        common.num2latlon(lon[i], lat[j], mode="each")[0],
                    )
                if mem_error:
                    fm.save_map(lon, tm, avg_spectrum[k, :, :].data, sv, lat[j])
                else:
                    fm.save_map(lon, tm, avg_spectrum[k, :, j, :].data, sv, lat[j])

        if (dim > 1) and (show or (save != "")) & (not onlyloc) and len(hloc) > 0:
            hloc = common.lon360(numpy.unique(hloc))
            if save:
                sv = "%s/xt_%s_%s" % (save, prefix, common.num2latlon(lon[i], lat[j], mode="each")[0])
            else:
                sv = ""
            if mem_error:
                # To include overlapping original signal, use zz=zero
                mapping.hovmoller(
                    lon,
                    tm,
                    avg_spectrum[1:, :, :],
                    zo=avg_spectrum_signif[1:, j, :],
                    title=title,
                    crange=crange,
                    show=show,
                    save=sv,
                    labels=hlabels,
                    loc=hloc,
                    cmap=cmap,
                    bottom="avg",
                    right="avg",
                    std=std_map[j, :],
                )
            else:
                mapping.hovmoller(
                    lon,
                    tm,
                    avg_spectrum[1:, :, j, :],
                    zo=avg_spectrum_signif[1:, j, :],
                    title=title,
                    crange=crange,
                    show=show,
                    save=sv,
                    labels=hlabels,
                    loc=hloc,
                    cmap=cmap,
                    bottom="avg",
                    right="avg",
                    std=std_map[j, :],
                )

        # Flushing profiling text.
        stdout.write(len(s) * "\b")
        s = "Spectral analysis of %d location%s (%s)... %s " % (N, plural, Y, common.profiler(b, j + 1, 0, t1, t2))
        stdout.write(s)
        stdout.flush()

    stdout.write("\n")

    result["scale"] = scales
    result["period"] = period
    if dim == 1:
        result["power_spectrum"] = power * fstd2
        result["power_significance"] = sig95
    result["global_power"] = global_power
    result["scale_spectrum"] = avg_spectrum
    if fpath:
        result["lon"] = lon
        result["lat"] = lat
    result["scale_significance"] = avg_spectrum_signif
    result["trend"] = trend
    result["wavelet_trend"] = wavelet_trend
    result["fft_power"] = fft_spectrum
    result["fft_first"] = fft_spectrum1
    result["fft_second"] = fft_spectrum2
    result["fft_period"] = fftperiod
    result["fft_trend"] = fft_trend
    return result
# Change the probablity density function (PDF) of the data. The time series 
# of Baltic Sea ice extent is highly bi-modal and we therefore transform the
# timeseries into a series of percentiles. The transformed series probably
# reacts 'more linearly' to climate.
s2, _, _ = boxpdf(s2)

# Calculates the standard deviatio of each time series for later 
# normalization.
std1 = s1.std()
std2 = s2.std()

# Calculate the CWT of both normalized time series. The function wavelet.cwt 
# returns a a list with containing [wave, scales, freqs, coi, fft, fftfreqs] 
# variables.
cwt1 = wavelet.cwt(s1 / std1, dt, wavelet=mother)
sig1 = wavelet.significance(1.0, dt, cwt1[1], 0, data1['alpha'], 
    wavelet=mother)
cwt2 = wavelet.cwt(s2 / std2, dt, wavelet=mother)
sig2 = wavelet.significance(1.0, dt, cwt2[1], 0, data1['alpha'], 
    wavelet=mother)

# Calculate the cross wavelet transform (XWT). The XWT finds regions in time
# frequency space where the time series show high common power. Torrence and
# Compo (1998) state that the percent point function -- PPF (inverse of the
# cumulative distribution function) of a chi-square distribution at 95%
# confidence and two degrees of freedom is Z2(95%)=3.999. However, calculating
# the PPF using chi2.ppf gives Z2(95%)=5.991. To ensure similar significance 
# intervals as in Grinsted et al. (2004), one has to use confidence of 86.46%.
xwt = wavelet.xwt(t1, s1, t2, s2, significance_level=0.8646, normalize=True)
Beispiel #7
0
def get_results(ntrains=1000):
    # cache file is just over 1GB for 1000 ntrains
    cache_filename = f'../output/snr_vs_peak_detect_cache_{ntrains}.pkl'
    if os.path.exists(cache_filename):
        with open(cache_filename, 'rb') as f:
            results = pickle.load(f)
    else:

        progress_start = datetime.datetime.now()
        progress_tick = progress_start

        np.random.seed(2002121548)

        alphas = np.array([0.5, 1, 2, 4, 8, 16])
        threshes = np.array([0.3, 0.5, 0.7])
        min_cycles = np.array([2, 4, 8])
        snrs = np.r_[0.0,
                     np.geomspace(0.1, 10,
                                  31)]  # include SNR=0 for pure noise effect
        freqs = np.geomspace(1, 64, 101)
        t = np.linspace(0, 2, 2000)
        base_frequency = 8

        dt = t[1] - t[0]

        mother = wavelet.Morse(beta=1.58174, gam=3)
        scales = np.geomspace(2 * dt, 0.5 * (t[-1] - t[0]), 200)
        wavelet_kwargs = dict(dt=dt,
                              scales=scales,
                              mother=mother,
                              syncsqz_freqs=freqs,
                              apply_coi=True)
        freq_edges = utils.make_edges(freqs, log=True)
        log2_freq_edges = np.log2(freq_edges)
        t_edges = utils.make_edges(t)

        # to store examples if input data
        example_trains = np.zeros((len(alphas), 100, len(t)))
        example_signals = np.zeros(
            (len(alphas), len(snrs),
             len(t)))  # closest one to base freq in first second
        train_freq_hists = np.zeros((len(alphas), len(freqs)))

        # to store examples of wavelet and peak_hist freqeuncy distribution
        waves = np.zeros(
            (len(alphas), len(snrs), ntrains, len(min_cycles), len(freqs)))
        peaks = np.zeros(
            (len(alphas), len(snrs), ntrains, len(threshes), len(freqs)))

        total_trains = len(alphas) * len(snrs) * ntrains
        elapsed_trains = 0

        for alpha_idx, alpha in enumerate(alphas):

            # isi_distribution = scipy.stats.gamma(a=regularity, scale=1/(base_frequency * regularity))

            for snr_idx, snr in enumerate(snrs):

                # used for overwritting an example signal when the spike count is closer to the expected 1/base_freq
                example_signal_dist = np.inf

                # generate spike trains
                num_resamples = 0
                for i in range(ntrains):

                    # -------------------------------------------------------------------------------------------------
                    #   generate spike train

                    needs_resample = True
                    num_resamples -= 1  # subtact the first sample because it is not a resampling but initial sample
                    train = np.zeros_like(t, dtype=bool)
                    while needs_resample:
                        num_resamples += 1

                        num_gen = int(
                            2 * (t[-1] - t[0]) * base_frequency
                        )  # generate more spikes than expected needed
                        inv_isi = 2**(np.random.randn(num_gen) / alpha +
                                      np.log2(base_frequency))
                        spikes = np.cumsum(1 / inv_isi) - 0.5 / 8

                        # check if we have made a mistake somewhere sampling the spike train
                        if num_resamples > 0 and num_resamples % 10000 == 0:
                            print(
                                f'    spike train {i+1} resample {num_resamples}, alpha={alpha}'
                            )
                            print(f'      spikes: {spikes}')

                        # if we got unlucky and did not generate enough to go past the end, then just try again
                        if spikes[-1] < t[-1]:
                            continue

                        # clip to recording length
                        spikes = spikes[(spikes >= t[0]) & (spikes <= t[-1])]

                        if len(spikes) > 0:
                            f = 1 / np.diff(spikes)
                            raster = np.histogram(spikes, bins=t_edges)[0]
                            if len(f) > 0 and np.all(raster < 2) and np.all(
                                    f >= freq_edges[0]) and np.all(
                                        f <= freq_edges[-1]):
                                needs_resample = False
                                train = raster > 0
                                train_freq_hists[alpha_idx, :] += np.histogram(
                                    f, bins=freq_edges)[0]

                    # store train for an example (doesn't depend on SNR, so just store from the first SNR only)
                    if snr_idx == 0 and i < example_trains.shape[1]:
                        example_trains[alpha_idx, i, :] = train

                    # -------------------------------------------------------------------------------------------------
                    #   generate noise and signal

                    signal = train.astype(float)

                    # membrane potential bumps
                    mempot = 10 * scipy.ndimage.gaussian_filter1d(
                        signal, 0.01 / dt)

                    # make spike heights noisy
                    signal *= 1 / (1 + np.exp(
                        colorednoise.powerlaw_psd_gaussian(0, len(t))))

                    # add mempot bumps
                    signal += mempot

                    # generate additive noise
                    noise = colorednoise.powerlaw_psd_gaussian(
                        1, len(signal))  # use 1/f (pink) noise

                    # scale to attain desired snr
                    if snr_idx == 0:
                        # rather than making noise infinite, makes more sense to set signal to 0
                        signal *= 0
                    else:
                        # noise is scaled rather than scaling signal so that plotting signal examples looks better
                        noise *= np.sqrt(
                            np.mean(signal**2) /
                            snr)  # scale noise to attain desired SNR
                    signal += noise

                    # check if we should keep this signal as the example
                    num_spikes_train = np.sum(
                        train[:len(train) //
                              2])  # only the first 0..1s that will be plotted
                    spike_train_dist = np.abs(num_spikes_train -
                                              base_frequency)
                    if spike_train_dist < example_signal_dist:
                        # store signal as an example
                        example_signals[alpha_idx, snr_idx, :] = signal
                        example_signal_dist = spike_train_dist

                    # -------------------------------------------------------------------------------------------------
                    #   compute wavelet on both noise and signal

                    for j, k in enumerate(min_cycles):
                        w = wavelet.cwt(signal, min_cycles=k,
                                        **wavelet_kwargs)[0]
                        waves[alpha_idx, snr_idx, i,
                              j, :] = np.mean(np.abs(w)**2, axis=1)

                    # -------------------------------------------------------------------------------------------------
                    #   compute peak detection on both noise and signal

                    for j in range(len(threshes)):

                        # compute on signal
                        x = signal
                        x = x - np.mean(x)
                        x = x / np.max(x)
                        spikes = utils.spike_detect(x, t, threshes[j])
                        if len(spikes) > 1:
                            f = np.log2(1 / np.diff(spikes))
                            h = np.histogram(
                                f, bins=log2_freq_edges)[0].astype(float)
                        else:
                            h = np.zeros_like(freqs)
                        peaks[alpha_idx, snr_idx, i, j, :] = h

                    # show progress
                    now = datetime.datetime.now()
                    elapsed_trains += 1
                    if (now - progress_tick).total_seconds() > 10:
                        progress_tick = now
                        proportion_done = elapsed_trains / total_trains
                        alpha_str = f'alpha {alpha} ({alpha_idx+1}/{len(alphas)})'
                        snr_str = f'snr ({snr_idx + 1}/{len(snrs)})'
                        train_str = f'train ({i+1}/{ntrains})'
                        print(
                            f'{alpha_str}, {snr_str}, {train_str}, {progress_str(progress_start, proportion_done)}'
                        )

        results = (alphas, threshes, min_cycles, snrs, freqs, t,
                   base_frequency, train_freq_hists, example_trains,
                   example_signals, waves, peaks)

        with open(cache_filename, 'wb') as f:
            pickle.dump(results, f, protocol=2)

        print(f'elapsed time: {datetime.datetime.now() - progress_start}')

    return results
Beispiel #8
0
dj = 0.25                            # Four sub-octaves per octaves
s0 = -1 #2 * dt                      # Starting scale, here 6 months
J = -1 # 7 / dj                      # Seven powers of two with dj sub-octaves
alpha = 0.0                          # Lag-1 autocorrelation for white noise
#alpha = numpy.correlate(var, var, 'same')
#alpha /= alpha.max()
#alpha = 0.5 * (alpha[N / 2 + 1] + alpha[N / 2 + 2] ** 0.5)
#
#
mother = wavelet.Morlet(6.)          # Morlet mother wavelet with wavenumber=6
#mother = wavelet.Mexican_hat()       # Mexican hat wavelet, or DOG with m=2
#mother = wavelet.Paul(4)             # Paul wavelet with order m=4

# The following routines perform the wavelet transform and siginificance
# analysis for the chosen data set.
wave, scales, freqs, coi, fft, fftfreqs = wavelet.cwt(var, dt, dj, s0, J,
                                                      mother)
iwave = wavelet.icwt(wave, scales, dt, dj, mother)
power = (abs(wave)) ** 2             # Normalized wavelet power spectrum
fft_power = std2 * abs(fft) ** 2     # FFT power spectrum
period = 1. / freqs

signif, fft_theor = wavelet.significance(1.0, dt, scales, 0, alpha,
                        significance_level=slevel, wavelet=mother)
sig95 = (signif * numpy.ones((N, 1))).transpose()
sig95 = power / sig95                # Where ratio > 1, power is significant

# Calculates the global wavelet spectrum and determines its significance level.
glbl_power = std2 * power.mean(axis=1)
dof = N - scales                     # Correction for padding at edges
glbl_signif, tmp = wavelet.significance(std2, dt, scales, 1, alpha,
                       significance_level=slevel, dof=dof, wavelet=mother)