Ejemplo n.º 1
0
def computePCA(colors=ut.list_of_data_sets):
    f, ax = plt.subplots(1, 1)
    for c in colors:
        print("Processing {}".format(c))
        block = ut.load_dataset(c)
        block = ut.sort_spiketrains(block)
        R = ut.rasterize_data(block, sf=100)
        R = ut.calculate_single_trial_PSTH(R, fs=100)
        n_units = R.shape[1]
        R = R.transpose(1, 0, 2).reshape(n_units, -1)
        pca = PCA(n_components=n_units)
        pca.fit(R.T)

        var_explained = 100 * np.cumsum(pca.explained_variance_) / np.sum(
            pca.explained_variance_)
        # make the plot of variance explained_variance_
        ax.plot(var_explained, '.-', color=c)

    ax.set_xlabel("Number of PCs")
    ax.set_ylabel("% Variance explained")
    ax.legend(colors)
    asp = np.diff(ax.get_xlim())[0] / np.diff(ax.get_ylim())[0]
    ax.set_aspect(asp)

    plt.show()
Ejemplo n.º 2
0
def plotCorrelationByDistance(colors=ut.COLORS, **args):
    # compute correlation matrix for range of different time delays
    fs = args.get('fs', 50)
    win_size = args.get('win_size', None)
    sig = args.get('sig', False)

    f, ax = plt.subplots(1, 1)
    corr_mat = {}
    for i, c in enumerate(colors):
        print("preprocessing: {}".format(c))
        block = ut.load_dataset(c)
        r = ut.rasterize_data(block, sf=fs)
        if win_size == 0:
            pass
        else:
            r = ut.calculate_single_trial_PSTH(r, fs=fs, win_size=win_size)
        n_units = r.shape[1]
        r = r.transpose(1, 0, 2).reshape(n_units, -1)
        print("Calculating...")
        # sort the units according to electrode distance
        if sig == True:
            sig_mat = ut.get_sig_corr_mask(r, r.shape[0], 142)
            sig = sig_mat < 0.05
            print("% significant: {0} for c: {1}".format(
                sig.sum() / sig.shape[0], c))
        cc = np.corrcoef(r)
        corr_mat[c] = cc.copy()
        cc = cc[~np.eye(cc.shape[0], dtype=bool)]
        if sig == True:
            sig = sig[~np.eye(sig.shape[0], dtype=bool)]
            cc = cc[sig]

        _, d_mat = ut.array_locations(block)
        d_mat = d_mat[~np.eye(d_mat.shape[0], dtype=bool)]
        if sig == True:
            d_mat = d_mat[sig]

        cc_bin = []
        cc_sem = []
        distance = []
        # bin sorted cc
        for dist in np.unique(d_mat):
            args = (d_mat == dist)
            distance.append(dist)
            cc_bin.append(np.mean(cc[args]))
            cc_sem.append(np.std(cc[args]) / np.sqrt(len(cc[args])))
        #axes[ax[i]].imshow(cc_sorted, aspect='auto', cmap='seismic')
        ax.plot(distance, cc_bin, 'o-', color=c)
        ax.errorbar(distance, cc_bin, cc_sem, color=c)
        ax.set_xlabel("Electrode distance")
        ax.set_ylabel("Pairwise correlation")

    f.tight_layout()
    f.suptitle("bin size: {} ms".format(1000 / fs))

    return corr_mat
Ejemplo n.º 3
0
def plotCorrelationMatrix(colors=ut.COLORS, **args):
    fs = args.get('fs', 50)
    win_size = args.get('win_size', None)

    corr_mat = {}
    for i, c in enumerate(colors):
        print("preprocessing: {}".format(c))
        block = ut.load_dataset(c)
        r = ut.rasterize_data(block, sf=fs)
        if win_size == 0:
            pass
        else:
            r = ut.calculate_single_trial_PSTH(r, fs=fs, win_size=win_size)
        n_units = r.shape[1]
        r = r.transpose(1, 0, 2).reshape(n_units, -1)
        print("Calculating...")

        corr_mat[c] = np.corrcoef(r)

        axes = [(0, 0), (0, 1), (0, 2), (1, 0), (1, 1), (1, 2)]
        colors = ut.COLORS[::-1]
        vmin = 0
        vmax = 0
    for c in colors:
        min = np.min(corr_mat[c][~np.eye(corr_mat[c].shape[0], dtype=bool)])
        max = np.max(corr_mat[c][~np.eye(corr_mat[c].shape[0], dtype=bool)])

        if min < vmin:
            vmin = min
        if max > vmax:
            vmax = max

    f1, ax1 = plt.subplots(2, 3)
    for i, a in enumerate(axes):
        cm = corr_mat[colors[i]]
        Y = sch.linkage(cm, method='centroid')
        Z = sch.dendrogram(Y, orientation='right', no_plot=True)
        index = Z['leaves']
        cm = cm[:, index][index, :]
        # fill diagonal with 0s
        cm[np.diag_indices(cm.shape[0])] = 0
        im = ax1[a].imshow(cm,
                           aspect='auto',
                           cmap='seismic',
                           vmin=vmin,
                           vmax=vmax)
        ax1[a].set_title(colors[i])
    f1.subplots_adjust(right=0.8)
    cbar_ax = f1.add_axes([0.85, 0.15, 0.05, 0.7])
    f1.colorbar(im, cax=cbar_ax)
    f1.suptitle("bin size: {} ms".format(1000 / fs))
    f1.tight_layout()

    return corr_mat
Ejemplo n.º 4
0
"""
Plot the population PSTH for a couple of different bin sizes to differentiate
between the dithered data and real data
"""

import utils as ut
import numpy as np
import matplotlib.pyplot as plt

colors = ['red', 'green']

data = ut.load_dataset(colors)

for fs in [50, 5]:
    R_red = ut.rasterize_data(data['red'], sf=fs)
    R_red = R_red.mean(0).mean(0)

    R_green = ut.rasterize_data(data['green'], sf=fs)
    R_green = R_green.mean(0).mean(0)
    time = np.linspace(0, 5, R_red.shape[0])

    s = int(np.ceil(fs * (10 / 1000)))
    e = 4 * fs # int(np.ceil(fs * (1000 / 1000)))
    R_red = R_red[s:e]
    R_green = R_green[s:e]
    time = time[s:e]

    f, ax = plt.subplots(1, 1)

    ax.plot(time, R_red * fs, color='red')
    ax.plot(time, R_green * fs, color='green')
Ejemplo n.º 5
0
import scipy.ndimage.filters as sf
import copy

list_of_data_sets = ['blue', 'green', 'grey', 'orange', 'purple', 'red']
trial = 0
fs = 500
event = 'GO-ON'
f, ax = plt.subplots(2, 3)
axes = [(0, 0), (0, 1), (0, 2), (1, 0), (1, 1), (1, 2)]
for i, c in enumerate(list_of_data_sets):
    print("loading / analyzing {}".format(c))
    block = ut.load_dataset(c)
    block = ut.sort_spiketrains(block)

    # get binned spikes
    R = ut.rasterize_data(block, sf=fs)
    R_raster = copy.deepcopy(R)
    # convert to firing rates
    R = ut.calculate_single_trial_PSTH(R, fs=fs, win_size=100)
    n_units = R.shape[1]
    R_flat = R.transpose(1, 0, 2).reshape(n_units, -1)

    # get event array and smooth
    ev = ut.get_event_dict(block, fs=fs)[event]
    ev_time = np.where(ev[trial, :])[0][0]
    ev_flat = sf.gaussian_filter1d(ev.reshape(1, -1), 10)
    cc = np.zeros(R_flat.shape[0])
    for u in range(n_units):
        cc[u] = np.corrcoef(R_flat[u, :], ev_flat[0, :])[0, 1]

    sorted = np.argsort(cc)
Ejemplo n.º 6
0
def compute_dPCA(colors=ut.list_of_data_sets, fs=100, win_size=200):
    print("Loading datasets")
    data = ut.load_dataset(colors)
    for color in colors:
        print("analyzing dataset {}".format(color))
        win = win_size
        block = data[color]
        #block = ut.sort_spiketrains(block)
        R = ut.rasterize_data(block, sf=fs)

        nunits = R.shape[1]
        ntrials = R.shape[0]
        trial_type = [
            block.segments[i].annotations['belongs_to_trialtype']
            for i in range(0, ntrials)
        ]
        un_types = np.unique(trial_type)

        time = np.linspace(0, 5, R.shape[-1])
        if win_size != 0:
            R = ut.calculate_single_trial_PSTH(R, fs=fs, win_size=win)
            win = int(np.ceil(fs * (win_size / 1000)))
            s = int(np.ceil((win / 2) * (1 / fs)))
            e = int(R.shape[-1] - (1 / (1 / fs))) - s
        else:
            s = 0
            e = int(R.shape[-1] - (1 / (1 / fs)))

        time = time[s:e]

        for i, t in enumerate(un_types):
            trials = np.argwhere(np.array(trial_type) == t)
            r_temp = np.mean(R[trials, :, s:e], 0)
            if i == 0:
                R_psth = r_temp
            else:
                R_psth = np.concatenate((R_psth, r_temp), axis=0)

        R_psth = R_psth.transpose(1, 0, 2)

        dpca = dPCA.dPCA(labels='st',
                         join={'st': ['s', 'st']},
                         n_components=nunits - 50,
                         regularizer=None)

        # fit and project the mean PSTHs back out
        R_transform = dpca.fit_transform(R_psth)

        # get event times
        ev_times = ut.get_event_dict(block, fs=fs)
        events = {}
        for ev in ut.trial_events:
            events[ev] = np.argwhere(ev_times[ev][0, :] == 1)[0][0] / fs

        for i, comp in enumerate(R_transform.keys()):
            # plot first PC for each marginalized component
            f, ax = plt.subplots(4, 1)
            for j in range(0, 4):
                ax[j].set_title(comp + ", var explained: " + str(
                    round(dpca.explained_variance_ratio_[comp][j] * 100, 2)))
                ax[j].plot(time, R_transform[comp][j, :, :].T)
                ax[j].legend(un_types)
                for ev in ut.trial_events:
                    ax[j].axvline(events[ev], lw=2, color='k')
            f.suptitle(color)
            f.tight_layout()

        f, ax = plt.subplots(1, 1)
        c = ['black', 'gold']
        for i, k in enumerate(R_transform.keys()):
            ax.plot(np.cumsum(
                np.array(dpca.explained_variance_ratio_[k]) * 100),
                    'o-',
                    color=c[i])
        ax.legend(["Condition independent", "Condition dependent"])
        ax.set_xlabel("PC")
        ax.set_ylabel("% variance explained")
        ax.set_ylim((0, 100))
        ax.axhline(0, linestyle='--', color='k')
        ax.set_title(color)
        f.tight_layout()

    return dpca
Ejemplo n.º 7
0
for color in ut.list_of_data_sets:
    data_path = ut.DUMP_DIR + "/" + color + ".pickle"
    if os.path.exists(data_path):
        print("Loading dumped data from", data_path)
        with open(data_path, 'rb') as f:
            all_data[color] = pickle.load(f)
    else:
        print('Processing ' + str(color) + ' data set')
        #Read in raw data
        data_block = ut.load_dataset(color, path=None)
        #ut.load_dataset() creates an entry in all_data for the color
        #Assigning neo_block here is redundant but left for readability
        all_data[color][
            'neo_block'] = data_block  # this original data block from neo
        all_data[color]['spike_raster'] = ut.rasterize_data(
            data_block, sf=1.e3)  # spikes in 1s and 0s/
        all_data[color]['spike_trains'] = ut.make_lists_of_spike_trains(
            data_block)  # spikes as time (in seconds?)
        all_data[color]['isi'] = ut.make_lists_of_isi(data_block)

        print("Dumping data to disk for future quick loading times")
        with open(data_path, 'wb') as f:
            pickle.dump(all_data[color], f)

fns = [
    (compute_isi_fits, [all_data], {}),
    (plotCorrelationMatrix, [ut.list_of_data_sets], {
        'fs': 50,
        'win_size': 0
    }),
    (plotCorrelationMatrix, [ut.list_of_data_sets], {