Esempio n. 1
0
def plotSummary(summary_this,data,ds_factor):
    fig,axs = plt.subplots(len(summary_this),4,figsize=(12,12))
    speed = helpers.calcSpeed(data['posx'])
    speed_ds,_ = speedFilterFR(speed,speed,speed_threshold = 2)
    for ii,(X_um,cluID,Xpc,xcorr) in enumerate(summary_this):
        if len(summary_this)>1:
            ax = axs[ii]
        else:
            ax = axs #for when there is only 1 cluster
        try:
            tri,_=speedFilterFR(data['trial'],speed,speed_threshold=2)
            # import pdb
            # pdb.set_trace()
            ax[0].scatter(X_um[:,0][::2],X_um[:,1][::2],c=tri[::ds_factor][::2],s=3,marker='.')
            ax[0].set_title('trial')
        except:
            pass
        try:
            pos,_=speedFilterFR(data['posx'],speed,speed_threshold=2)

            ax[1].scatter(X_um[:,0][::2],X_um[:,1][::2],c=pos[::ds_factor][::2],s=3,marker='.')
            ax[1].set_title('pos')
        except:
            pass

        try:
            gain,_=speedFilterFR(data['trial_gain'][data['trial']-1],speed,speed_threshold = 2)

            sc=ax[2].scatter(X_um[:,0][::2],X_um[:,1][::2],c=gain[::ds_factor][::2],s=3,cmap='Set1',marker='.')
            ax[2].set_title('gain')
            # import pdb 
            # pdb.set_trace()
            handles,labels = sc.legend_elements(prop='colors',alpha = 0.6)
            ax[2].legend(handles,labels,loc='best')
        except:
            pass
        try:
            ax[3].imshow(xcorr,aspect='auto')
        except:
            pass
        for ax_this in ax:
            ax_this.set_xticks([])
            ax_this.set_yticks([])
    return fig
Esempio n. 2
0
def runUMAPForCluster(good_cells,data,ds_factor=5):
    spikes,X,t_edges = helpers.calculateFiringRate(data,good_cells = good_cells,t_edges = data['post'])
    speed = helpers.calcSpeed(data['posx'])
    FR = spikes.mean(axis=0)
    fr_idx = FR>0.1
    (X,speed_ds)=preprocess(spikes[:,fr_idx],speed,ds_factor=ds_factor,gauss_win=10,speed_threshold = 2)
    pca = PCA(n_components=6)
    # import pdb
    # pdb.set_trace()
    X[np.logical_not(np.isfinite(X))]=0
    X_new=pca.fit_transform(X)
    #X_new = X
    #reducer = umap.UMAP(n_components=3,metric='cosine',init='spectral',n_neighbors=1000,min_dist=0.8)
    #reducer = umap.UMAP(n_components=3,metric='cosine',init='spectral',min_dist=0.8)
    #reducer = umap.UMAP(n_components=3,metric='cosine',init='spectral',min_dist=0.8)
    #reducer = umap.UMAP(n_components = 3)
    reducer = umap.UMAP(n_components=2)
    Xu = reducer.fit_transform(X_new)
    return Xu,X_new[:,0]
def run_for_file_gain(data, TRIALS):
    try:
        anatomy = data['anatomy']
    except:
        print('no anatomy')
        return None

    if 'parent_shifted' in anatomy:
        group = anatomy['parent_shifted']
    else:
        group = anatomy['cluster_parent']
    #regions = ('MEC','VISp','RS')
    regions = ('MEC')
    idx = [str(ss).startswith(regions) for ss in group]
    idx = np.array(idx)
    posx = np.mod(data['posx'], track_end)
    post = data['post']
    trial = data['trial']
    sp = data['sp']
    good_cells = sp['cids'][np.logical_and(idx, sp['cgs'] == 2)]
    if len(good_cells) < 5:
        return None

    # posx categories for position decoding (binned)
    posx_bin = np.digitize(posx, posx_edges)
    validSpikes = np.in1d(data['sp']['clu'], good_cells)
    spike_clu = data['sp']['clu'][validSpikes]
    (bla, spike_idx) = np.unique(spike_clu, return_inverse=True)
    spiketimes = np.digitize(data['sp']['st'][validSpikes], data['post'])
    spikelocations = posx_bin[spiketimes] - 1
    trial_idx = data['trial'][spiketimes] - 1

    occ2 = np.zeros((len(posx_edges) - 1, TRIALS.max()), dtype=float)
    _fast_occ(occ2, data['trial'] - 1, posx_bin - 1)
    n_cells = len(good_cells)
    shape = (n_cells, len(posx_edges) - 1, TRIALS.max())
    counts = np.zeros(shape, dtype=float)
    _fast_bin(counts, trial_idx, spikelocations, spike_idx)
    spMapN = np.zeros(counts.shape)
    stab = np.zeros(n_cells)
    for iC in range(n_cells):
        tmp = np.divide(counts[iC, :, :], occ2)
        df = pd.DataFrame(tmp)
        df.interpolate(method='pchip', axis=0, limit=None, inplace=True)
        tmp = df.values
        #print((np.isnan(tmp).sum()))
        tmp_f = gaussian_filter1d(tmp, 3, axis=0, mode='wrap')
        cc = np.corrcoef(np.transpose(tmp_f[:, TRIALS[0:6] - 1]))
        f = cc

        stab[iC] = np.nanmean(f[np.triu(np.full((6, 6), True), 1)])
        #spMapN[iC,:,:]=np.divide(counts[iC,:,:],occ2)

    # count spikes in each time bin for each cell
    spikecount = np.empty((
        len(good_cells),
        len(post) - 1,
    ))
    #spikecount[:] = np.nan
    for cell_idx in range(len(good_cells)):
        spike_t = sp['st'][sp['clu'] == good_cells[cell_idx]]
        spikecount[cell_idx, :] = np.histogram(spike_t, bins=post)[0]

    fr = spikecount.sum(axis=1) / post.max()
    valid_idx = np.logical_and(stab > .3, fr >= 1)
    if sum(valid_idx) < 5:
        return None
    spikecount = np.hstack((spikecount, np.zeros((spikecount.shape[0], 1))))
    spikerate = spikecount / dt
    spikes = np.transpose(spikerate)
    spikes = spikes[:, valid_idx]
    spikes[np.logical_not(np.isfinite(spikes))] = 0
    X = gaussian_filter1d(spikes, SMOOTHNESS, axis=0)

    speed = helpers.calcSpeed(posx)
    position = np.mod(posx, track_end)
    n_units = spikes.shape[1]
    speed_idx = speed > 0
    trial_idx = np.in1d(trial, TRIALS)
    valid_idx = np.logical_and(
        speed_idx, trial_idx
    )  #only take data from trials of interest and above speed threshold
    speed_r = speed[valid_idx]
    speed_r = speed_r[0::every_nth_time_bin]
    position_r = position[valid_idx]
    position_r = np.mod(position_r + 200, 400)
    position_r = position_r[0::every_nth_time_bin]
    position_r[np.logical_not(np.isfinite(position_r))] = 0
    trial_r = trial[valid_idx]
    trial_r = trial_r[0::every_nth_time_bin]
    posbin_r = posx_bin[valid_idx]
    posbin_r = posbin_r[0::every_nth_time_bin]

    X_r = X[valid_idx, :]
    #X_r = scipy.stats.zscore(X_r,axis=0)
    X_r = X_r[0::every_nth_time_bin, :]
    #theta = ((position_r - position_r.min()) / position_r.max()) * 2 * np.pi - np.pi
    theta = ((position_r) / track_end) * 2 * np.pi - np.pi

    model = CircularRegression(alpha=REGULARIZATION)
    #model = linear_model.LogisticRegression(random_state=0, solver='lbfgs', multi_class='multinomial', max_iter=10000, C = 0.1)
    pos_bin = posbin_r - 1

    train_TRIALS = TRIALS[0:6]

    train_idx = np.in1d(trial_r, train_TRIALS)

    tc = np.full((X_r.shape[1], 200), np.nan)
    for ii in range(200):
        tc[:,
           ii] = X_r[np.logical_and(pos_bin == ii, train_idx), :].mean(axis=0)

    df = pd.DataFrame(tc)
    df.interpolate(method='pchip', axis=1, limit=None, inplace=True)
    tc = df.values
    ff = np.dot(tc.transpose(), np.transpose(X_r))
    g = np.argmax(ff, axis=0)

    model.fit(X_r[train_idx, :], theta[train_idx])
    tmp = model.predict(X_r)
    #pred_pos.append(tmp)
    #tmp_e = theta[test_idx]-tmp
    #model.fit(X_r[train_idx,:], pos_bin[train_idx])
    #tmp = model.predict(X_r[test_idx,:])

    predicted_theta = tmp
    predicted_bin = g + 1

    output = dict()
    output['cluID'] = good_cells
    output['pos_bin'] = pos_bin
    output['true_pos'] = position_r
    output['true_theta'] = theta
    output['predicted_theta'] = predicted_theta
    output['predicted_bin'] = predicted_bin
    output['region'] = group[np.logical_and(idx, sp['cgs'] == 2)]
    output['trial'] = trial_r
    return output
Esempio n. 4
0
            # _,_,stabPre=helpers.calculateFiringRateMap(data,good_cells=good_cells,trials2extract = trial_range[0:6],ops=opt)
            # good_cells = good_cells[stabPre>0.5]
            X_um, trial_idx = runUMAPForCluster(
                good_cells,
                data,
                trial_range,
                ds_factor=ds_factor,
                speed_threshold=speed_threshold)

            counts, spMapN, stab = helpers.calculateFiringRateMap(
                data,
                good_cells=good_cells,
                trials2extract=trial_range,
                ops=opt)

            speed = helpers.calcSpeed(data['posx'])
            twoD = True

            if twoD:
                fig, ax = plt.subplots(1, 4, figsize=(15, 5))
            else:
                fig = plt.figure(figsize=(15, 5))
                ax = [
                    plt.subplot(1, 4, i + 1, projection='3d') for i in range(3)
                ]
                ax.append(plt.subplot(1, 4, 4))
            # import pdb
            # pdb.set_trace()

            try:
                tri, _ = speedFilterFR(data['trial'][trial_idx],
Esempio n. 5
0
    shutil.copy2(os.path.abspath(__file__), im_path)

    for fi in files:
        try:
            data = lm.loadmat(fi)
            _, sn = os.path.split(fi)

            opt = helpers.options()
            good_cells = data['sp']['cids'][data['sp']['cgs'] == 2]
            counts, spMapN, stab = helpers.calculateFiringRateMap(
                data, good_cells=good_cells, trials2extract=None, ops=opt)

            spikes_gain, _, _ = helpers.calculateFiringRate(
                data, good_cells=good_cells, t_edges=data['post'])
            speed_gain = helpers.calcSpeed(data['posx'])

            FR = spikes_gain.mean(axis=0)
            fr_idx = FR > 0.1
            spMapN = spMapN[fr_idx]
            (X_gain, speed_ds) = preprocess(spikes_gain[:, fr_idx],
                                            speed_gain,
                                            speed_threshold=2)
            pca = PCA(n_components=6)

            X_new = pca.fit_transform(X_gain)

            #reducer = umap.UMAP(n_components=2)
            #X_um=reducer.fit_transform(X_new)
            X_um = X_new
            fig, ax = plt.subplots(1, 4, figsize=(15, 5))