Exemplo n.º 1
0
def g2(cube, moves, uv):
    if cube == full_cube:
        return cube, moves
    if check_g2_ud(cube) is False:
        result, cube = algo(cube, check_g2_ud, g2_1_moves, apply_move)
        moves += result
    moves, cube = match_corners_pairs(cube, moves)
    moves += aligned_corner(cube)
    edges = make_edges(cube)
    bad_edges = check_g2_edges(edges)
    if bad_edges == 0:
        return cube, moves
    if bad_edges != 4:
        result, edges = algo(edges, check_config_edges_g2, g2_3_moves,
                             apply_move_e)
        mv.use_move(result, cube)
        moves += result
        mv.use_move([["U", ""], ["L", "2"], ["R", "2"], ["D", "'"]], cube)
        moves += [["U", ""], ["L", "2"], ["R", "2"], ["D", "'"]]
    edges = make_edges(cube)
    if check_config(edges) is False:
        result, edges = algo(edges, check_config, g2_4_moves, apply_move_e)
        mv.use_move(result, cube)
        moves += result
    mv.use_move([["U", ""], ["L", "2"], ["R", "2"], ["D", "'"]], cube)
    moves += [["U", ""], ["L", "2"], ["R", "2"], ["D", "'"]]
    return cube, moves
Exemplo n.º 2
0
def g0(cube, moves):
    edges = make_edges(cube)
    if check_g0(edges) is False:
        result, edges = algo(edges, check_g0, g0_moves, apply_move_e)
        mv.use_move(result, cube)
        moves += result
    return edges, cube, moves
Exemplo n.º 3
0
def plot_mesaclipped(t,
                     x,
                     fs,
                     mother1,
                     mother2,
                     freqs,
                     sync_sqz,
                     fig_filename,
                     snr=np.inf,
                     noise_gamma=0,
                     num_scales=2000,
                     split=False):

    print(f'plotting {fig_filename}')

    # add noise
    noise = colorednoise.powerlaw_psd_gaussian(noise_gamma, len(t))
    x = x + np.sqrt(np.var(x) / snr) * noise

    dt = t[1] - t[0]

    min_cycles = 8

    if sync_sqz:
        syncsqz_freqs = freqs
    else:
        syncsqz_freqs = None

    # compute wavelet transforms
    amps = []
    mus = []
    vmax = -np.inf
    xr = []  # for reconstructed signals
    cois = []
    titles = []
    # for i, (mother, c) in enumerate(zip([mother1, mother1, mother2, mother2], [0, 1, 0, 1])):
    for i, (mother, c, title) in enumerate(
            zip([mother1, mother2], [0, 1], ['Conventional', 'MesaClip'])):

        if sync_sqz:
            scales = np.geomspace(0.5 * dt, t[-1] - t[0], num_scales)
        else:
            scales = mother.convert_freq_scale(freqs)

        if split:
            x_pos = x.copy()
            x_neg = (-x).copy()
            x_pos[x_pos < 0] = 0
            x_neg[x_neg < 0] = 0
            w_pos, _, coi_time_idxs, coi_freq_idxs = wavelet.cwt(
                x_pos,
                dt,
                scales,
                mother,
                syncsqz_freqs=syncsqz_freqs,
                min_cycles=c * min_cycles)
            w_neg, _, coi_time_idxs, coi_freq_idxs = wavelet.cwt(
                x_neg,
                dt,
                scales,
                mother,
                syncsqz_freqs=syncsqz_freqs,
                min_cycles=c * min_cycles)
            phi_pos = np.unwrap(np.angle(w_pos))
            phi_neg = np.unwrap(np.angle(w_neg))
            w = (np.abs(w_pos) + np.abs(w_neg)) * np.exp(0.5j *
                                                         (phi_pos + phi_neg))
        else:
            w, _, coi_time_idxs, coi_freq_idxs = wavelet.cwt(
                x,
                dt,
                scales,
                mother,
                syncsqz_freqs=syncsqz_freqs,
                min_cycles=c * min_cycles)

        coi_times = t[coi_time_idxs] - t[0]
        coi_freqs = freqs[coi_freq_idxs]
        cois.append((coi_times, coi_freqs))

        amp = np.abs(w)
        mus.append(np.sqrt(np.mean(amp**2, axis=1)))

        if sync_sqz:
            amp = scipy.ndimage.gaussian_filter(
                amp,
                [2, 2
                 ])  # smooth a bit so that sharp bits show up in the high-res

        amps.append(amp)
        vmax = max(vmax, np.max(amp))
        xr.append(wavelet.reconstruct(w, mother, scales))

        titles.append(title)

    nw = len(amps)

    # create figure and axes
    fig = plt.figure(figsize=(5, 4))
    gs = gridspec.GridSpec(1 + nw,
                           2,
                           width_ratios=[1, 0.2],
                           height_ratios=[
                               1,
                           ] + [
                               2,
                           ] * nw)
    ax = np.zeros((1 + nw, 2), dtype=np.object)
    ax[0, 0] = fig.add_subplot(gs[0, 0])  # signal
    ax_s = None
    for i in range(1, 1 + nw):  # wavelet transform results
        ax[i, 0] = fig.add_subplot(gs[i, 0])
        ax[i, 1] = fig.add_subplot(gs[i, 1], sharex=ax_s)
        ax_s = ax[i, 1]

    # convenience axes variables
    ax_sig = ax[0, 0]
    ax_w = ax[1:, 0]
    ax_s = ax[1:, 1]

    # plot signal
    ax_sig.plot(t, x, c='k', lw=0.5)
    ax_sig.tick_params(which='both', direction='out')
    ax_sig.set_xlim(t[0], t[-1])
    ax_sig.set_xticklabels([])
    ax_sig.set_yticks([])
    ax_sig.xaxis.set_ticks_position('bottom')
    ax_sig.yaxis.set_ticks_position('left')
    ax_sig.axis('off')
    ax_sig.set_title('Signal', loc='left')

    # pcolormesh grid coordinates
    t_edges = utils.make_edges(t)
    f_edges = utils.make_edges(freqs, log=True)
    t_grid, f_grid = np.meshgrid(t_edges, f_edges)

    # cmap = 'gray_r'
    # cmap = 'binary'
    # cmap = 'bone_r'
    cmap = 'Blues'

    # iterate over each amplitude type
    for pi, (mu, amp, (coi_times, coi_freqs),
             title) in enumerate(zip(mus, amps, cois, titles)):

        # plot amplitudes
        vmax = np.max(amp)
        # vmin = -0.05 * vmax  # when using a white to color colormap, make backgroud slightly off-white
        vmin = 0
        ax_w[pi].pcolormesh(t_grid,
                            f_grid,
                            amp,
                            cmap=cmap,
                            rasterized=True,
                            vmin=vmin,
                            vmax=vmax)
        ax_w[pi].set_xlim(t[0], t[-1])

        ax_w[pi].set_title(title, loc='left')

        # # plot cone-of-influence
        # coi_kwargs = dict(c='k', ls='--', lw=0.5, alpha=0.5)
        # ax_w[pi].plot(t[ 0] + coi_times, coi_freqs, **coi_kwargs)
        # ax_w[pi].plot(t[-1] - coi_times, coi_freqs, **coi_kwargs)

        # plot time-averages
        ax_s[pi].fill(np.r_[0, mu, 0],
                      np.r_[freqs[0], freqs, freqs[-1]],
                      c='k',
                      lw=0,
                      zorder=2,
                      alpha=0.2)
        ax_s[pi].plot(mu, freqs, c='k', lw=1, zorder=3)

        # setup axes ranges and ticks
        for ax_ws in [ax_w[pi], ax_s[pi]]:
            ax_ws.tick_params(which='both', direction='out')
            ax_ws.xaxis.set_ticks_position('bottom')
            ax_ws.yaxis.set_ticks_position('left')
            ax_ws.set_yscale('log')
            ax_ws.set_ylim(freqs[0], freqs[-1])

        for f in fs:
            ax_s[pi].axhline(f, lw=1, ls=':', color='r', alpha=1.0, zorder=5)

        ax_w[pi].set_ylabel(r'Freq (Hz)')
        ax_w[pi].set_xlim(ax[0, 0].get_xlim())

        # clean up signal axes
        ax_s[pi].set_yticklabels([])
        ax_s[pi].set_xticks([])
        for side in ['top', 'right', 'bottom']:
            ax_s[pi].spines[side].set_visible(False)

    for i in range(len(ax_w) - 1):
        ax_w[i].set_xticklabels([])

    ax[-1, 0].set_xlabel(r'Time (s)')

    fig.tight_layout(h_pad=0.2, w_pad=0.5, rect=[-0.025, -0.03, 1.025, 0.98])
    fig.savefig(fig_filename, dpi=300)
Exemplo n.º 4
0
def main():

    xs, t, freqs, threshes, peak_hists, amps = get_results()

    # subset results for clearer/larger view (using original a..u labels)
    idxs = [
        ord(c) - ord('a')
        for c in ['a', 'b', 'c', 'e', 'k', 'm', 'o', 'q', 'u']
    ]
    xs = xs[idxs]
    peak_hists = peak_hists[idxs]
    amps = amps[idxs]

    print(f'delta logHz = {np.log2(freqs[1]) - np.log2(freqs[0]):.3f}')

    n = xs.shape[0]
    nrows = n // 2 + 1

    # setup axis properties
    log2_freqs = np.log2(freqs)
    log2_freq_edges = utils.make_edges(log2_freqs)
    freq_ticks = np.log2([1 / 8, 1 / 4, 1 / 2, 1, 2, 4, 8])
    freq_ticklabels = [
        '$^1$/$_8$', '$^1$/$_4$', '$^1$/$_2$', '1', '2', '4', '8'
    ]
    thresh_edges = utils.make_edges(threshes)
    gridx, gridy = np.meshgrid(log2_freq_edges, thresh_edges)

    # axes widths and x-positions
    left_margin = 0.25
    right_margin = 0.1
    column_margin = 0.3
    width_ratios = [[1.5], [1]]
    width_ratios = [reduce((lambda x, y: x + [0.1] + y), width_ratios)] * 2
    width_ratios = [left_margin] + reduce(
        (lambda x, y: x + [column_margin] + y), width_ratios) + [right_margin]
    ax_xs, ax_ws = ratios_to_pos_and_size(width_ratios)

    # axes heights and y-positions
    bottom_margin = 0.5
    top_margin = 0.1
    row_margin = 0.3
    height_ratios = [[0.5], [1]]
    height_ratios = [reduce(
        (lambda x, y: x + [0.1] + y), height_ratios)] * nrows
    height_ratios = [bottom_margin] + reduce(
        (lambda x, y: x + [row_margin] + y), height_ratios) + [top_margin]
    ax_ys, ax_hs = ratios_to_pos_and_size(height_ratios)

    # flip vertically since we will refer to rows from the top
    ax_ys = ax_ys[::-1]
    ax_hs = ax_hs[::-1]

    fig = plt.figure(figsize=(8.27, 10))

    spine_color = '0.8'

    # draw legend axes
    ax_x = fig.add_subplot(position=[
        ax_xs[0], ax_ys[1], ax_ws[0], ax_hs[0] + ax_ys[0] - ax_ys[1]
    ])
    ax_p = fig.add_subplot(position=[ax_xs[1], ax_ys[0], ax_ws[1], ax_hs[0]])
    ax_w = fig.add_subplot(position=[ax_xs[1], ax_ys[1], ax_ws[1], ax_hs[1]])
    for ax in [ax_x, ax_w, ax_p]:
        ax.set_xticks([])
        ax.set_yticks([])
        ax.set_xlim([0, 1])
        ax.set_ylim([0, 1])
        for spine in ax.spines:
            ax.spines[spine].set_color(spine_color)
    ax_x.text(0.5, 0.5, 'EMG Signal', ha='center', va='center', fontsize=14)
    ax_w.text(0.5,
              0.5,
              'Mesaclip\n(our method)',
              ha='center',
              va='center',
              fontsize=10)
    ax_p.text(0.5,
              0.5,
              'Inverse ISI\nvia peak-detection',
              ha='center',
              va='center',
              fontsize=10)

    threshes_hline = [0.3, 0.5, 0.7]
    threshes_colors = plt.rcParams['axes.prop_cycle'].by_key()['color'][0:3]
    thresh_kwargs = [
        dict(y=threshes_hline[ci], c=c, lw=2, zorder=50, alpha=0.5)
        for ci, c in enumerate(threshes_colors)
    ]

    for i in range(n):

        row = (i + 1) % nrows
        col_offset = (i + 1) // nrows

        # create axes
        r = 2 * row
        c = 2 * col_offset
        ax_x = fig.add_subplot(position=[
            ax_xs[c + 0], ax_ys[r + 1], ax_ws[c + 0], ax_hs[r + 0] +
            ax_ys[r + 0] - ax_ys[r + 1]
        ])
        ax_w = fig.add_subplot(
            position=[ax_xs[c + 1], ax_ys[r + 1], ax_ws[c + 1], ax_hs[r + 1]])
        ax_p = fig.add_subplot(
            position=[ax_xs[c + 1], ax_ys[r + 0], ax_ws[c + 1], ax_hs[r + 0]])
        axs = [ax_x, ax_p, ax_w]
        ax_fs = [ax_p, ax_w]

        # plot signals
        ax_x.plot(t, xs[i], lw=1, c='k', zorder=10)
        ax_x.set_ylim(
            -0.62,
            thresh_edges[-1])  # hacky manual y-alignment of ax_x and ax_p

        # plot peak frequency estimate
        ax_p.pcolormesh(gridx,
                        gridy,
                        peak_hists[i],
                        cmap='Greys',
                        vmin=-0.2,
                        zorder=10)

        # annotate thresh lines
        for kwargs in thresh_kwargs:
            ax_x.axhline(**kwargs)
            ax_p.axhline(**kwargs)

        # plot wavelet
        ax_w.fill_between(log2_freqs,
                          amps[i, 0]**2,
                          facecolor='k',
                          edgecolor='none')
        # for j in range(amps.shape[1]):
        #     amp = amps[i, j]**2
        #     amp = amp / np.sum(amp)
        #     ax_w.plot(log2_freqs, amp, alpha=0.7)

        # axes refinement
        ax_x.set_ylabel(f'({chr(ord("a") + i)})',
                        position=(0, 0.94),
                        rotation='horizontal',
                        va='center',
                        fontsize=12,
                        labelpad=12)
        for ax in axs:
            ax.set_yticks([])
            for spine in ax.spines:
                ax.spines[spine].set_color(spine_color)
        if row == nrows - 1:
            ax_x.set_xlabel('Time (s)')
            ax_w.set_xlabel('Frequency (Hz)')
            ax_x.set_xticks(range(0, 11, 2))
            ax_w.set_xticklabels(freq_ticklabels)
            for ax in axs:
                ax.spines['bottom'].set_visible(True)
        else:
            ax_x.set_xticks([])
            ax_w.set_xticklabels([])
        ax_p.set_xticklabels([])
        for ax in ax_fs:
            ax.spines['bottom'].set_visible(True)
            ax.set_xlim([log2_freq_edges[0], log2_freq_edges[-1]])
            ax.tick_params(axis='x', which='major', length=2)
            ax.set_xticks(freq_ticks)

    fig.savefig('../output/real_data.png', dpi=600)
    plt.close(fig)
Exemplo n.º 5
0
def get_results():

    cache_filename = '../output/real_data_cache.pkl'

    if os.path.exists(cache_filename):
        with open(cache_filename, 'rb') as f:
            result = pickle.load(f)

    else:

        xs = np.loadtxt('../data/real_emgs.txt')
        t = np.arange(xs.shape[1]) / 100  # data in 100Hz sampling rate
        n = xs.shape[0]

        freqs = np.geomspace(1 / 8, 8, 100)
        log2_freqs = np.log2(freqs)
        log2_freq_edges = utils.make_edges(log2_freqs)

        dt = t[1] - t[0]
        scales = np.geomspace(2 * dt, 0.5 * (t[-1] - t[0]), 1000)
        mother = wavelet.Morse(beta=1.58174, gam=3)
        min_cycles = [2, 4, 8]

        threshes = np.linspace(0, 1, 50)
        peak_hists = np.zeros((n, len(threshes), len(freqs)))
        amps = np.zeros((n, len(min_cycles), len(freqs)))

        for i in range(n):

            print(f'signal {i + 1}/{n}')

            x = xs[i]
            x = x - scipy.ndimage.gaussian_filter1d(
                x, 2 / dt)  # subtract baseline
            x = x - np.mean(x)
            x = x / np.max(x)
            xs[i, :] = x

            # peak detect on x
            peak_hist = np.zeros((len(threshes), len(freqs)))
            for j, thresh in enumerate(threshes):
                spikes = utils.spike_detect(x, t, thresh)
                if len(spikes) > 1:
                    f = np.log2(1 / np.diff(spikes))
                    h = np.histogram(f, bins=log2_freq_edges)[0].astype(float)

                    if np.max(h) > 0:
                        h = scipy.ndimage.gaussian_filter1d(h, 2)
                        h = h / np.max(h)
                        peak_hist[j, :] = h

            peak_hists[i, ...] = peak_hist

            # wavelet
            for j, k in enumerate(min_cycles):
                w = wavelet.cwt(x,
                                dt,
                                scales,
                                mother,
                                syncsqz_freqs=freqs,
                                min_cycles=k)[0]
                amp = np.mean(np.abs(w)**2, axis=1)
                amp = scipy.ndimage.gaussian_filter1d(amp, 2)
                amps[i, j, :] = np.sqrt(amp)

        result = (xs, t, freqs, threshes, peak_hists, amps)
        with open(cache_filename, 'wb') as f:
            pickle.dump(result, f, protocol=2)

    return result
Exemplo n.º 6
0
def main_examples(ntrains):

    alphas, threshes, ks, snrs, freqs, t, base_frequency, \
      train_freq_hists, example_trains, example_signals, waves, peaks = get_results(ntrains)

    # remove the alpha=0.5, doesn't really add anything
    alphas = alphas[1:]
    waves = waves[1:, ...]
    peaks = peaks[1:, ...]
    example_trains = example_trains[1:, ...]
    example_signals = example_signals[1:, ...]
    train_freq_hists = train_freq_hists[1:, ...]

    nalpha, nsnr, ntrains, nthresh, nfreq = peaks.shape
    nks = len(ks)
    log2_freqs = np.log2(freqs)

    np.random.seed(2002171330)
    bootci_kwargs = dict(statfunc=lambda _x: np.nanmean(_x, axis=0),
                         alpha=0.05,
                         n_samples=1000)

    snr_example_idxs = [0]
    for s in [0.1, 0.3, 1, 3]:
        snr_example_idxs.append(np.argmin(np.abs(snrs - s)))

    log2_freq_edges = utils.make_edges(log2_freqs)
    freq_ticks = [1, 2, 4, 8, 16, 32, 64]
    freq_ticklabels = freq_ticks

    wave_kwargs = [
        dict(facecolor=c, edgecolor=c, alpha=0.5, zorder=100 - ci)
        for ci, c in enumerate(['0', '0.3', '0.5'])
    ]
    peak_kwargs = [
        dict(facecolor=c, edgecolor=c, alpha=0.5, zorder=90 - ci) for ci, c in
        enumerate(plt.rcParams['axes.prop_cycle'].by_key()['color'][:nthresh])
    ]

    # axes widths and x-positions (indexed from left)
    left_margin = 0.5
    right_margin = 0.1
    column_margin = 0.15
    width_ratios = [left_margin] + reduce(
        (lambda a, b: a + [column_margin] + b),
        [[1]] * nalpha) + [right_margin]
    xs, ws = ratios_to_pos_and_size(width_ratios)

    # axes heights and y-positions (indexed from bottom)
    top_margin = 0.8
    bottom_margin = 0.6
    height_ratios = [[1.0]] * len(snr_example_idxs) + [[1.5], [0.5], [0.5]]
    height_ratios = [bottom_margin] + reduce(
        (lambda a, b: a + [0.1] + b), height_ratios) + [top_margin]
    height_ratios[-3] = 0.8
    height_ratios[-5] = 0.6
    height_ratios[-7] = 1.1
    ys, hs = ratios_to_pos_and_size(height_ratios)

    fig = plt.figure(figsize=(9, 12))
    fig.text(0.5,
             0.98,
             'Exploration of noise and irregularity',
             fontsize=16,
             ha='center',
             va='center')
    for ai, alpha in enumerate(alphas):

        alpha_value_label = f'{alpha}'.rstrip('0').rstrip('.')
        if alpha > 1:
            sigma_value_label = f'1/{alpha_value_label}'
        else:
            sigma_value_label = f'{1/alpha}'.rstrip('0').rstrip('.')
        print(f'alpha = {alpha_value_label} ({ai+1}/{nalpha})')

        # -----------------------------------------------------------------
        #   1/ISI histogram

        print('  histogram')

        # plot
        ax_isi = fig.add_subplot(position=[xs[ai], ys[-1], ws[ai], hs[-1]])
        ax_isi.bar(log2_freq_edges[:-1],
                   train_freq_hists[ai],
                   width=np.diff(log2_freq_edges),
                   align='edge',
                   color='k')

        # configure axes
        ax_isi.set_xticks([], minor=True)
        ax_isi.set_xticks(np.log2(freq_ticks))
        ax_isi.set_xticklabels(freq_ticklabels)
        ax_isi.set_title(f'$\\sigma$ = {sigma_value_label}')
        for spine in ['left', 'top', 'right']:
            ax_isi.spines[spine].set_visible(False)
        ax_isi.set_yticks([])
        if ai == 0:
            ax_isi.set_ylabel('True\ndistribution', labelpad=10)
        ax_isi.set_xlabel('Frequency (ISI$^{-1}$)')
        ax_isi.set_xlim([log2_freq_edges[0], log2_freq_edges[-1]])

        # -----------------------------------------------------------------
        #   Raster and signal examples

        print('  raster and signal examples')

        # plot raster
        ax_r = fig.add_subplot(position=[xs[ai], ys[-2], ws[ai], hs[-2]])
        for i in range(example_trains.shape[1]):
            train = example_trains[ai, i]
            spikes = t[np.where(train > 0)[0]]
            ax_r.scatter(spikes, [i] * len(spikes), marker='.', color='k', s=1)

        # plot noisy signal
        ax_s = fig.add_subplot(position=[xs[ai], ys[-3], ws[ai], hs[-3]])
        for y, snri in enumerate(snr_example_idxs):
            signal = example_signals[ai, snri]
            signal = (signal - signal.mean()) / (
                0.3 + signal.std())  # wierd scaling for visual aesthetic
            ax_s.plot(t, signal + 4 * y, c='k', lw=1)

        # configure axes
        if ai == 0:
            ax_r.set_ylabel('True\nraster', labelpad=10)
            ax_s.set_ylabel('SNR examples')
        for ax in [ax_r, ax_s]:
            ax.set_xlim([0, 1])
            for spine in ['left', 'top', 'right']:
                ax.spines[spine].set_visible(False)
            ax.set_yticks([])
            ax.set_xlabel('Time')
            ax.set_xticks([0, 0.25, 0.5, 0.75, 1.0])
            ax.set_xticklabels([f'{tick:g}' for tick in ax.get_xticks()])
        if ai == 0:
            ax_s.set_yticks(4 * np.arange(len(snr_example_idxs)))
            ax_s.set_yticklabels([f'{s:.2g}' for s in snrs[snr_example_idxs]])
            ax_s.yaxis.set_tick_params(length=0)

        # -----------------------------------------------------------------
        #   Wavelet and peak examples

        print('  wavelet and peak examples')

        for i, snr_idx in enumerate(snr_example_idxs):
            ax = fig.add_subplot(position=[xs[ai], ys[i], ws[ai], hs[i]])

            snr_label = f'{snrs[snr_idx]:.2g}'
            print(
                f'    example snr {snr_label} ({i+1}/{len(snr_example_idxs)})')

            # nalpha, nsnr, ntrains, nthresh, nfreq = peaks.shape
            # nalpha, nsnr, ntrains, nks    , nfreq = waves.shape

            # plot wavelet
            for j in range(nks):
                y = waves[ai, snr_idx, :, j]
                y = y / np.sum(y, axis=-1, keepdims=True)
                ci = np.sqrt(bootci_pi(y, **bootci_kwargs))
                ax.fill_between(log2_freqs,
                                ci[0],
                                ci[1],
                                label=f'Mesaclip ($k$={ks[j]})',
                                **wave_kwargs[j])

            # plot peak
            for j in range(nthresh):
                y = peaks[ai, snr_idx, :, j]
                y = y / np.sum(y, axis=-1, keepdims=True)
                ci = np.sqrt(bootci_pi(y, **bootci_kwargs))
                ax.fill_between(log2_freqs,
                                ci[0],
                                ci[1],
                                label=f'Peak ($\\theta$={threshes[j]})',
                                **peak_kwargs[j])

            # configure axes
            ax.set_yticks([])
            ax.set_xticks([])
            ax.set_xlim(log2_freq_edges[0], log2_freq_edges[-1])
            if ai == 0:
                ax.set_ylabel(f'SNR\n{snr_label}', fontsize=10, labelpad=10)
            if i == 0:
                ax.set_xticks(np.log2(freq_ticks))
                ax.set_xticklabels(freq_ticklabels)
                ax.set_xlabel('Frequency')

        if ai == 0:
            handles, labels = ax.get_legend_handles_labels()
            handles = list(np.array(handles).reshape(2, -1).T.flatten())
            labels = list(np.array(labels).reshape(2, -1).T.flatten())
            ax.legend(handles,
                      labels,
                      loc='upper left',
                      ncol=3,
                      bbox_to_anchor=(-0.05, 1.6))

    fig.savefig('../output/snr_vs_peak_detect_examples.png', dpi=600)
    plt.close(fig)
Exemplo n.º 7
0
def get_results(ntrains=1000):
    # cache file is just over 1GB for 1000 ntrains
    cache_filename = f'../output/snr_vs_peak_detect_cache_{ntrains}.pkl'
    if os.path.exists(cache_filename):
        with open(cache_filename, 'rb') as f:
            results = pickle.load(f)
    else:

        progress_start = datetime.datetime.now()
        progress_tick = progress_start

        np.random.seed(2002121548)

        alphas = np.array([0.5, 1, 2, 4, 8, 16])
        threshes = np.array([0.3, 0.5, 0.7])
        min_cycles = np.array([2, 4, 8])
        snrs = np.r_[0.0,
                     np.geomspace(0.1, 10,
                                  31)]  # include SNR=0 for pure noise effect
        freqs = np.geomspace(1, 64, 101)
        t = np.linspace(0, 2, 2000)
        base_frequency = 8

        dt = t[1] - t[0]

        mother = wavelet.Morse(beta=1.58174, gam=3)
        scales = np.geomspace(2 * dt, 0.5 * (t[-1] - t[0]), 200)
        wavelet_kwargs = dict(dt=dt,
                              scales=scales,
                              mother=mother,
                              syncsqz_freqs=freqs,
                              apply_coi=True)
        freq_edges = utils.make_edges(freqs, log=True)
        log2_freq_edges = np.log2(freq_edges)
        t_edges = utils.make_edges(t)

        # to store examples if input data
        example_trains = np.zeros((len(alphas), 100, len(t)))
        example_signals = np.zeros(
            (len(alphas), len(snrs),
             len(t)))  # closest one to base freq in first second
        train_freq_hists = np.zeros((len(alphas), len(freqs)))

        # to store examples of wavelet and peak_hist freqeuncy distribution
        waves = np.zeros(
            (len(alphas), len(snrs), ntrains, len(min_cycles), len(freqs)))
        peaks = np.zeros(
            (len(alphas), len(snrs), ntrains, len(threshes), len(freqs)))

        total_trains = len(alphas) * len(snrs) * ntrains
        elapsed_trains = 0

        for alpha_idx, alpha in enumerate(alphas):

            # isi_distribution = scipy.stats.gamma(a=regularity, scale=1/(base_frequency * regularity))

            for snr_idx, snr in enumerate(snrs):

                # used for overwritting an example signal when the spike count is closer to the expected 1/base_freq
                example_signal_dist = np.inf

                # generate spike trains
                num_resamples = 0
                for i in range(ntrains):

                    # -------------------------------------------------------------------------------------------------
                    #   generate spike train

                    needs_resample = True
                    num_resamples -= 1  # subtact the first sample because it is not a resampling but initial sample
                    train = np.zeros_like(t, dtype=bool)
                    while needs_resample:
                        num_resamples += 1

                        num_gen = int(
                            2 * (t[-1] - t[0]) * base_frequency
                        )  # generate more spikes than expected needed
                        inv_isi = 2**(np.random.randn(num_gen) / alpha +
                                      np.log2(base_frequency))
                        spikes = np.cumsum(1 / inv_isi) - 0.5 / 8

                        # check if we have made a mistake somewhere sampling the spike train
                        if num_resamples > 0 and num_resamples % 10000 == 0:
                            print(
                                f'    spike train {i+1} resample {num_resamples}, alpha={alpha}'
                            )
                            print(f'      spikes: {spikes}')

                        # if we got unlucky and did not generate enough to go past the end, then just try again
                        if spikes[-1] < t[-1]:
                            continue

                        # clip to recording length
                        spikes = spikes[(spikes >= t[0]) & (spikes <= t[-1])]

                        if len(spikes) > 0:
                            f = 1 / np.diff(spikes)
                            raster = np.histogram(spikes, bins=t_edges)[0]
                            if len(f) > 0 and np.all(raster < 2) and np.all(
                                    f >= freq_edges[0]) and np.all(
                                        f <= freq_edges[-1]):
                                needs_resample = False
                                train = raster > 0
                                train_freq_hists[alpha_idx, :] += np.histogram(
                                    f, bins=freq_edges)[0]

                    # store train for an example (doesn't depend on SNR, so just store from the first SNR only)
                    if snr_idx == 0 and i < example_trains.shape[1]:
                        example_trains[alpha_idx, i, :] = train

                    # -------------------------------------------------------------------------------------------------
                    #   generate noise and signal

                    signal = train.astype(float)

                    # membrane potential bumps
                    mempot = 10 * scipy.ndimage.gaussian_filter1d(
                        signal, 0.01 / dt)

                    # make spike heights noisy
                    signal *= 1 / (1 + np.exp(
                        colorednoise.powerlaw_psd_gaussian(0, len(t))))

                    # add mempot bumps
                    signal += mempot

                    # generate additive noise
                    noise = colorednoise.powerlaw_psd_gaussian(
                        1, len(signal))  # use 1/f (pink) noise

                    # scale to attain desired snr
                    if snr_idx == 0:
                        # rather than making noise infinite, makes more sense to set signal to 0
                        signal *= 0
                    else:
                        # noise is scaled rather than scaling signal so that plotting signal examples looks better
                        noise *= np.sqrt(
                            np.mean(signal**2) /
                            snr)  # scale noise to attain desired SNR
                    signal += noise

                    # check if we should keep this signal as the example
                    num_spikes_train = np.sum(
                        train[:len(train) //
                              2])  # only the first 0..1s that will be plotted
                    spike_train_dist = np.abs(num_spikes_train -
                                              base_frequency)
                    if spike_train_dist < example_signal_dist:
                        # store signal as an example
                        example_signals[alpha_idx, snr_idx, :] = signal
                        example_signal_dist = spike_train_dist

                    # -------------------------------------------------------------------------------------------------
                    #   compute wavelet on both noise and signal

                    for j, k in enumerate(min_cycles):
                        w = wavelet.cwt(signal, min_cycles=k,
                                        **wavelet_kwargs)[0]
                        waves[alpha_idx, snr_idx, i,
                              j, :] = np.mean(np.abs(w)**2, axis=1)

                    # -------------------------------------------------------------------------------------------------
                    #   compute peak detection on both noise and signal

                    for j in range(len(threshes)):

                        # compute on signal
                        x = signal
                        x = x - np.mean(x)
                        x = x / np.max(x)
                        spikes = utils.spike_detect(x, t, threshes[j])
                        if len(spikes) > 1:
                            f = np.log2(1 / np.diff(spikes))
                            h = np.histogram(
                                f, bins=log2_freq_edges)[0].astype(float)
                        else:
                            h = np.zeros_like(freqs)
                        peaks[alpha_idx, snr_idx, i, j, :] = h

                    # show progress
                    now = datetime.datetime.now()
                    elapsed_trains += 1
                    if (now - progress_tick).total_seconds() > 10:
                        progress_tick = now
                        proportion_done = elapsed_trains / total_trains
                        alpha_str = f'alpha {alpha} ({alpha_idx+1}/{len(alphas)})'
                        snr_str = f'snr ({snr_idx + 1}/{len(snrs)})'
                        train_str = f'train ({i+1}/{ntrains})'
                        print(
                            f'{alpha_str}, {snr_str}, {train_str}, {progress_str(progress_start, proportion_done)}'
                        )

        results = (alphas, threshes, min_cycles, snrs, freqs, t,
                   base_frequency, train_freq_hists, example_trains,
                   example_signals, waves, peaks)

        with open(cache_filename, 'wb') as f:
            pickle.dump(results, f, protocol=2)

        print(f'elapsed time: {datetime.datetime.now() - progress_start}')

    return results