예제 #1
0
def batch_onset_tunings(p_load, p_save):
    DF_ALL = pd.DataFrame()
    DF_DIRECTION = pd.DataFrame()
    DF = pd.DataFrame()
    for f in glob.glob(os.path.join(p_load, '*.h5')):
        print('Working on {}'.format(os.path.basename(f)))
        blk = neoUtils.get_blk(f)
        num_units = len(blk.channel_indexes[-1].units)
        for unit_num in xrange(num_units):
            df_all, df_direction, df = onset_tuning(blk, unit_num=unit_num)
            if df_all is -1:
                continue
            DF = DF.append(df)
            DF_ALL = DF_ALL.append(df_all)
            DF_DIRECTION = DF_DIRECTION.append(df_direction)

    # get_stim_responsive columns
    stim_responsive_file = os.path.join(p_save, 'cell_id_stim_responsive.csv')
    if os.path.isfile(stim_responsive_file):
        is_stim = pd.read_csv(stim_responsive_file)
        DF = DF.merge(is_stim, on='id')
        DF_ALL = DF_ALL.merge(is_stim, on='id')
        DF_DIRECTION = DF_DIRECTION.merge(is_stim, on='id')

    DF.to_csv(os.path.join(p_save, 'onset_data.csv'), index=False)
    DF_ALL.to_csv(os.path.join(p_save, 'onset_tuning_by_cell.csv'),
                  index=False)
    DF_DIRECTION.to_csv(os.path.join(p_save,
                                     'onset_tuning_by_cell_and_direction.csv'),
                        index=False)
예제 #2
0
def batch_anova(p_load, p_save):
    '''
    Calculate the anova tables and data by deflection direction and arclength
    :param p_load: 
    :param p_save: 
    :return: 
    '''
    aov = pd.DataFrame()
    df = pd.DataFrame()
    for f in glob.glob(os.path.join(p_load, '*.h5')):
        blk = neoUtils.get_blk(f)
        print('Working on {}'.format(os.path.basename(f)))
        num_units = len(blk.channel_indexes[-1].units)

        for ii in xrange(num_units):
            try:
                df_temp, aov_temp = anova_analysis(blk, unit_num=ii)
                if df_temp is -1:
                    continue
                df = df.append(df_temp)
                aov = aov.append(aov_temp)

                # plot_anova(df_temp,save_loc=p_save)
            except:
                print('Problem with {}c{}'.format(os.path.basename(f), ii))

        plt.close('all')
    df.to_csv(os.path.join(p_save, 'direction_arclength_FR_group_data.csv'))
    aov.to_csv(os.path.join(p_save, 'direction_arclength_FR_group_anova.csv'))
예제 #3
0
def batch_calc_MSE():
    p_load = os.path.join(os.environ['BOX_PATH'],
                          r'__VG3D\_deflection_trials\_NEO')
    p_save = os.path.join(os.environ['BOX_PATH'],
                          r'__VG3D\_deflection_trials\_NEO\results')
    p_smooth = r'E:\VG3D\_rerun_with_pad\_deflection_trials\_NEO\smooth'

    DF = pd.DataFrame()
    for ii, f in enumerate(glob.glob(os.path.join(p_load, '*.h5'))):
        if ii == 0:
            continue
        blk = neoUtils.get_blk(f)
        num_units = len(blk.channel_indexes[-1].units)
        for unit_num in range(num_units):
            id = neoUtils.get_root(blk, unit_num)
            print('Working on {}'.format(id))
            try:
                mse = calc_MSE(f, p_smooth, unit_num)
                df = pd.DataFrame()
                for ii, var in enumerate(
                    ['Mx', 'My', 'Mz', 'Fx', 'Fy', 'Fz', 'TH', 'PHI']):
                    df[var] = mse[ii]
                df['id'] = id
                df['smoothing'] = np.arange(5, 100, 10)
                DF = DF.append(df)
            except:
                print('Problem on {}'.format(id))

    DF.to_csv(os.path.join(p_save, 'MSE_by_smoothing.csv'), index=False)
예제 #4
0
def get_adaptation_df(p_load, max_t=20):
    '''
    Returns a dataframe that has the firing rate for the
    first N (default=20) ms for each cell and direction. Should be useful for
    calculating an 'adaptation' parameter.

    :param p_load: path to where all the neo files exist
    :param max_t: maximum time in miliseconds to grab the firing rate
    :return:
    '''
    df_all = pd.DataFrame()
    for f in glob.glob(os.path.join(p_load, '*.h5')):
        blk = neoUtils.get_blk(f)
        num_units = len(blk.channel_indexes[-1].units)
        for unit_num in range(num_units):
            df = pd.DataFrame()
            id = neoUtils.get_root(blk, unit_num)
            print('Working on {}'.format(id))
            PSTH, edges, _, med_angle = get_PSTH_by_dir(blk,
                                                        unit_num,
                                                        norm_dur=False,
                                                        binsize=1 * pq.ms)
            if PSTH is -1:
                continue
            for ii in xrange(len(PSTH)):
                df_dir = pd.DataFrame()
                df_dir['dir_idx'] = np.repeat(ii, max_t)
                df_dir['time'] = edges[ii][:max_t]
                df_dir['rate'] = PSTH[ii][:max_t]
                df_dir['med_angle'] = med_angle[ii]
                df = df.append(df_dir)
                df['id'] = [id for x in range(len(df))]
        df_all = df_all.append(df)
    return (df_all)
예제 #5
0
def get_threshold_index(p_load):
    '''
    Return a dataframe with a binary telling you if a particular contact ellicited a spike for each cell
    :param p_load: path to the neo files
    :return: a pandas dataframe with all the contacts for all cells. Cannot reshape since every whisker has a dif. number of contacts
    '''
    df_all = pd.DataFrame()
    for f in glob.glob(os.path.join(p_load, '*.h5')):
        blk = neoUtils.get_blk(f)
        num_units = len(blk.channel_indexes[-1].units)
        for unit_num in xrange(num_units):
            df = pd.DataFrame()
            id = neoUtils.get_root(blk, unit_num)
            print('working on {}'.format(id))
            trains = spikeAnalysis.get_contact_sliced_trains(blk, unit_num)[-1]
            dir_idx, med_dir = worldGeometry.get_contact_direction(
                blk, plot_tgl=False)
            if dir_idx is -1:
                continue
            dir_map = {key: value for (key, value) in enumerate(med_dir)}
            df['id'] = [id for x in xrange(len(trains))]
            df['did_spike'] = [len(x) > 0 for x in trains]
            df['dir_idx'] = dir_idx
            df['med_dir'] = df['dir_idx'].map(dir_map)
            df_all = df_all.append(df)
    return (df_all)
예제 #6
0
def get_Xc_yc(fname,p_smooth,unit_num,binsize):
    varlist = ['M', 'F', 'TH', 'PHIE']
    blk = neoUtils.get_blk(fname)
    blk_smooth = GLM.get_blk_smooth(fname,p_smooth)

    cbool = neoUtils.get_Cbool(blk)
    X = GLM.create_design_matrix(blk,varlist)
    Xdot = GLM.get_deriv(blk,blk_smooth,varlist,[0,5,9]) #maybe only want one derivative?

    X = np.concatenate([X,Xdot],axis=1)
    X = neoUtils.replace_NaNs(X,'pchip')
    X = neoUtils.replace_NaNs(X,'interp')

    Xbin = GLM.bin_design_matrix(X,binsize=binsize)
    scaler = sklearn.preprocessing.StandardScaler(with_mean=False)
    Xbin = scaler.fit_transform(Xbin)
    cbool_bin= GLM.bin_design_matrix(cbool[:,np.newaxis],binsize=binsize).ravel()

    y = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)]
    ybin = elephant.conversion.BinnedSpikeTrain(y,binsize=binsize*pq.ms).to_array().T.astype('f8')
    Xbin = Xbin[:ybin.shape[0],:]
    cbool_bin = cbool_bin[:ybin.shape[0]]
    yhat = np.zeros(ybin.shape[0])

    Xc = Xbin[cbool_bin,:]
    yc = ybin[cbool_bin,:]
    return(Xc,yc,cbool_bin,yhat)
예제 #7
0
def batch_pc(p_load,p_save):
    ID = []
    COV=[]
    EXP_VAR=[]
    for f in glob.glob(os.path.join(p_load,'*.h5')):
        blk = neoUtils.get_blk(f)
        id = neoUtils.get_root(blk,0)[:-2]
        print('Working on {}'.format(id))
        pc = get_pc(blk)
        cov = pc.get_covariance()
        exp_var = pc.explained_variance_ratio_
        COV.append(cov)
        EXP_VAR.append(exp_var)
        ID.append(id)
    ID = np.array(ID)
    COV = np.array(COV)
    EXP_VAR = np.array(EXP_VAR)
    COV = np.moveaxis(COV,[0,1,2],[2,0,1])
    EXP_VAR = EXP_VAR.T
    var_labels = ['Mx','My','Mz','Fx','Fy','Fz','TH','PHI']

    np.savez(os.path.join(p_save,'cov_exp_var.npz'),
             cov=COV,
             exp_var=EXP_VAR,
             id=ID,
             var_labels=var_labels)

    print('Saved PCA descriptions!')
    return None
예제 #8
0
def batch_get_first_spike_val(p_load, p_save, p_smooth):
    DF = pd.DataFrame()
    for f in glob.glob(os.path.join(p_load, '*.h5')):
        blk = neoUtils.get_blk(f)
        num_units = len(blk.channel_indexes[-1].units)
        for unit_num in num_units:
            df = get_first_spike_vals(f, p_save, unit_num)
            DF = DF.append(df)
    DF.to_csv(os.path.join(p_save, 'first_spike_data.csv'), index=False)
예제 #9
0
def main(p_raw,p_smooth,p_save):
    print('p_raw: {}\np_smooth:{}\np_save:{}'.format(p_raw,p_smooth,p_save))
    for f in glob.glob(os.path.join(p_raw,'*.h5')):
        f_smooth = find_smooth_match(f,p_smooth)
        if len(f_smooth)==0:
            print('{} smooth correspondant not found'.format(f))
            continue
        else:
            f_smooth =f_smooth[0]+'.h5'
            print('Working on {}'.format(os.path.basename(f)))
        blk = neoUtils.get_blk(os.path.join(p_raw, f))
        blk_smooth = neoUtils.get_blk(os.path.join(p_smooth, f_smooth))
        num_units = len(blk.channel_indexes[-1].units)
        for unit_num in xrange(num_units):
            try:
                res= plot_smooth_hists(blk,blk_smooth,unit_num,p_save)
            except:
                print('Failed at {}'.format(f))
예제 #10
0
def run_model(fname, p_smooth, unit_num, savepath, param_dict):
    X, y, cbool = get_X_y(fname, p_smooth, unit_num)
    blk = neoUtils.get_blk(fname)
    root = neoUtils.get_root(blk, unit_num)
    savefile = os.path.join(savepath, '{}_tensorflow.ckpt'.format(root))
    X[np.invert(cbool), :] = 0
    y[np.invert(cbool), :] = 0
    Xb = X_to_pillow(X[:, :8])
    print(param_dict)
    build_GLM_model(Xb, y, savefile, **param_dict)
예제 #11
0
def get_blk_smooth(fname,p_smooth):
    root = os.path.splitext(os.path.basename(fname))[0]
    smooth_file = glob.glob(os.path.join(p_smooth,root+'*smooth*.h5'))
    if len(smooth_file)>1:
        raise ValueError('More than one smooth file found')
    elif len(smooth_file)==0:
        raise ValueError('No Smooth file found')

    blk = neoUtils.get_blk(smooth_file[0])

    return(blk)
예제 #12
0
def batch_ISI_by_deflection(p_load):
    DF = pd.DataFrame()
    for f in glob.glob(os.path.join(p_load, '*.h5')):
        blk = neoUtils.get_blk(f)
        print('Working on {}'.format(os.path.basename(f)))
        num_units = len(blk.channel_indexes[-1].units)
        # _,med_dir = worldGeometry.get_contact_direction(blk,plot_tgl=False)
        for unit_num in xrange(num_units):
            df = ISI_by_deflection(blk, unit_num)
            DF = DF.append(df)
    return (DF)
예제 #13
0
def batch_thresh_response(p_load,p_save):
    DF = pd.DataFrame()
    for f in glob.glob(os.path.join(p_load,'*.h5')):
       blk = neoUtils.get_blk(f)
       print('Working on {}'.format(os.path.basename(f)))
       num_units = len(blk.channel_indexes[-1].units)
       for unit_num in range(num_units):
           df = create_threshold_DF(blk,unit_num,time_win=10,max_spikes=10)
           if df is not -1:
               DF = DF.append(df)
    DF.to_csv(os.path.join(p_save,'threshold_variable_response.csv'),index=False)
    return(0)
예제 #14
0
def plot_joint_spaces(p_load,p_save):
    for f in glob.glob(os.path.join(p_load,'rat*.h5')):
        print(os.path.basename(f))
        blk = neoUtils.get_blk(f)
        for unit in blk.channel_indexes[-1].units:
            unit_num = int(unit.name[-1])
            try:
                mymz_space(blk, unit_num, p_save=p_save, save_tgl=True, im_ext='png', dpi_res=300)
                phase_plots(blk, unit_num, p_save=p_save, save_tgl=True, im_ext='png', dpi_res=300)
            except:
                print('File {} did not create jointplots'.format(neoUtils.get_root(blk,unit_num)))
                pass
예제 #15
0
def main(fname,p_smooth,nfilts=3,pca_tgl=False):
    print('loading in {}'.format(fname))
    blk = neoUtils.get_blk(fname)
    save_dir = os.path.split(fname)[0]
    num_units = len(blk.channel_indexes[-1].units)
    for unit_num in range(num_units):
        id = neoUtils.get_root(blk,unit_num)
        save_file = os.path.join(save_dir,'{}_IF_model.npz'.format(id))
        if os.path.isfile(save_file):
            print('File already found. Aborting...')
            break
        X,y,cbool = get_X_y(fname,p_smooth,unit_num=unit_num)
        if pca_tgl:
            npcs=8
            X_pca = np.zeros([X.shape[0],npcs])
            pca = sklearn.decomposition.PCA()
            X_pca[cbool,:] = pca.fit_transform(X[cbool,:])[:,:npcs]
            X = X_pca.copy()

        free_params = init_free_params(X,nfilts)
        free_params = convert_free_params(free_params,X,nfilts,out_type='list')
        free_params = np.array(free_params)
        const_params = init_constrained_params()
        #algo = nlopt.LN_SBPLX
        algo = nlopt.LN_COBYLA
        #algo = nlopt.LN_NELDERMEAD

        opt = nlopt.opt(algo,free_params.shape[0])
        opt.set_min_objective(lambda free_params,grad:
                              optim_func(free_params,X,y,nfilts,const_params,cbool))
        lb,ub = nlopt_bounds(free_params)
        #opt.set_lower_bounds(lb)
        #opt.set_upper_bounds(ub)
        dx = np.abs(free_params)/50.
        opt.set_initial_step(dx)

        xopt = opt.optimize(free_params)



        np.savez(save_file,
                 X=X,
                 y=y,
                 cbool=cbool,
                 opt=opt,
                 xopt=xopt,
                 free_params=free_params,
                 const_params=const_params,
                 nfilts=nfilts)
예제 #16
0
def get_X_y(fname, unit_num=0):
    varlist = ['M', 'FX', 'FY', 'TH']
    blk = neoUtils.get_blk(fname)
    cbool = neoUtils.get_Cbool(blk)
    X = GLM.create_design_matrix(blk, varlist)
    Xdot, Xsmooth = GLM.get_deriv(blk, blk, varlist, [0, 5, 9])

    X = np.concatenate([X, Xdot], axis=1)
    X = neoUtils.replace_NaNs(X, 'pchip')
    X = neoUtils.replace_NaNs(X, 'interp')
    scaler = sklearn.preprocessing.StandardScaler(with_mean=False)
    X = scaler.fit_transform(X)
    y = neoUtils.get_rate_b(blk, unit_num)[1][:, np.newaxis]
    yhat = np.zeros_like(y)
    return (X, y, cbool)
예제 #17
0
def get_onset_and_duration_spikes(p_load, dur=10 * pq.ms):
    """
    loops through all the data we have and gets the
    number of spikes during an onset duration,
    the total number of spikes during the contact duration,
    and the length of the contact. This will allow us to calculate how much
    the spiking occurs in the first interval

    :param p_load: directory where the h5 files live
    :param dur: a python quantity to determine the 'onset' epoch

    :return: a dataframe with a summary of the relevant data
    """
    df_all = pd.DataFrame()
    for f in glob.glob(os.path.join(p_load, '*.h5')):
        blk = neoUtils.get_blk(f)
        num_units = len(blk.channel_indexes[-1].units)
        for unit_num in range(num_units):
            df = pd.DataFrame()
            id = neoUtils.get_root(blk, unit_num)
            print('Working on {}'.format(id))
            _, _, trains = spikeAnalysis.get_contact_sliced_trains(
                blk, unit_num)

            dir_idx, med_angle = worldGeometry.get_contact_direction(
                blk, plot_tgl=False)

            dir = []
            full = []
            contact_duration = []
            onset = []
            for train, direction in zip(trains, dir_idx):
                onset.append(
                    len(train.time_slice(train.t_start, train.t_start + dur)))
                full.append(len(train))
                dir.append(direction)
                contact_duration.append(float(train.t_stop - train.t_start))

            df_dir = pd.DataFrame()
            df_dir['dir_idx'] = dir
            df_dir['time'] = contact_duration
            df_dir['total_spikes'] = full
            df_dir['onset_spikes'] = onset
            df_dir['med_angle'] = [med_angle[x] for x in df_dir.dir_idx]
            df_dir['id'] = id
            df_all = df_all.append(df_dir)
            df_all['onset_period'] = dur
    return (df_all)
예제 #18
0
def calc_MSE(fname, p_smooth, unit_num):
    blk = neoUtils.get_blk(fname)
    blk_smooth = GLM.get_blk_smooth(fname, p_smooth)
    varlist = ['M', 'F', 'TH', 'PHIE']
    root = neoUtils.get_root(blk, unit_num)
    print('Working on {}'.format(root))
    Xdot = GLM.get_deriv(blk, blk_smooth, varlist)[0]
    Xdot = np.reshape(Xdot, [-1, 8, 10])

    sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(0)]
    cbool = neoUtils.get_Cbool(blk)
    mse = []
    for ii in range(Xdot.shape[1]):
        var_in = Xdot[:, ii, :].copy()
        mse.append(tuning_curve_MSE(var_in, sp, cbool, bins=50))
    return (mse)
예제 #19
0
def smoothed_best():
    df = pd.read_csv(min_entropy, index_col='id')
    smooth_vals = np.arange(5, 100, 10).tolist()
    best_smooth = df.mode(axis=1)[0]
    best_idx = [smooth_vals.index(x) for x in best_smooth]
    best_idx = pd.DataFrame({'idx': best_idx}, index=best_smooth.index)

    for f in glob.glob(os.path.join(p_load, '*NEO.h5')):
        try:
            blk = neoUtils.get_blk(f)
            blk_smooth = GLM.get_blk_smooth(f, p_smooth)
            num_units = len(blk.channel_indexes[-1].units)
            for unit_num in range(num_units):
                varlist = ['M', 'F', 'TH', 'PHIE']
                root = neoUtils.get_root(blk, unit_num)
                print('Working on {}'.format(root))
                if root not in best_idx.index:
                    print('{} not found in best smoothing derivative data'.
                          format(root))
                    continue
                outname = os.path.join(
                    p_save,
                    'best_smoothing_deriv\\{}_best_smooth_pillowX.mat'.format(
                        root))
                X = GLM.create_design_matrix(blk, varlist)
                smoothing_to_use = best_idx.loc[root][0]

                Xdot = GLM.get_deriv(blk,
                                     blk_smooth,
                                     varlist,
                                     smoothing=[smoothing_to_use])[0]
                X = np.concatenate([X, Xdot], axis=1)
                y = neoUtils.get_rate_b(blk, unit_num)[1]
                cbool = neoUtils.get_Cbool(blk)
                arclengths = get_arclength_bool(blk, unit_num)

                sio.savemat(
                    outname, {
                        'X': X,
                        'y': y,
                        'cbool': cbool,
                        'smooth': best_smooth.loc[root],
                        'arclengths': arclengths
                    })
        except Exception as ex:
            print('Problem with {}:{}'.format(os.path.basename(f), ex))
예제 #20
0
def get_X_y(fname, p_smooth, unit_num, pca_tgl=False, n_pcs=3):
    varlist = ['M', 'F', 'TH', 'PHIE']
    blk = neoUtils.get_blk(fname)
    blk_smooth = get_blk_smooth(fname, p_smooth)

    cbool = neoUtils.get_Cbool(blk)
    X = GLM.create_design_matrix(blk, varlist)
    Xdot, Xsmooth = GLM.get_deriv(blk, blk_smooth, varlist, [0, 5, 9])
    # if using the PCA decomposition of the inputs:
    if pca_tgl:

        X = neoUtils.replace_NaNs(X, 'pchip')
        X = neoUtils.replace_NaNs(X, 'interp')

        Xsmooth = neoUtils.replace_NaNs(Xsmooth, 'pchip')
        Xsmooth = neoUtils.replace_NaNs(Xsmooth, 'interp')

        scaler = sklearn.preprocessing.StandardScaler(with_mean=False)
        X = scaler.fit_transform(X)

        scaler = sklearn.preprocessing.StandardScaler(with_mean=False)
        Xsmooth = scaler.fit_transform(Xsmooth)

        pca = sklearn.decomposition.PCA()
        X_pc = pca.fit_transform(X)[:, :n_pcs]
        pca = sklearn.decomposition.PCA()
        Xs_pc = pca.fit_transform(Xsmooth)[:, :n_pcs]
        zero_pad = np.zeros([1, n_pcs])
        Xd_pc = np.diff(np.concatenate([zero_pad, Xs_pc], axis=0), axis=0)
        X = np.concatenate([X_pc, Xd_pc], axis=1)

        scaler = sklearn.preprocessing.StandardScaler(with_mean=False)
        X = scaler.fit_transform(X)
    else:
        X = np.concatenate([X, Xdot], axis=1)
        X = neoUtils.replace_NaNs(X, 'pchip')
        X = neoUtils.replace_NaNs(X, 'interp')
        scaler = sklearn.preprocessing.StandardScaler(with_mean=False)
        X = scaler.fit_transform(X)

    y = neoUtils.get_rate_b(blk, unit_num)[1][:, np.newaxis]
    # Xc = X[cbool,:]
    # yc = y[cbool]
    yhat = np.zeros_like(y)
    return (X, y, cbool)
예제 #21
0
def get_X_y(fname,p_smooth,unit_num=0):
    varlist = ['M', 'F', 'TH', 'PHIE']
    blk = neoUtils.get_blk(fname)
    blk_smooth = GLM.get_blk_smooth(fname,p_smooth)

    cbool = neoUtils.get_Cbool(blk)
    X = GLM.create_design_matrix(blk,varlist)
    Xdot,Xsmooth = GLM.get_deriv(blk,blk_smooth,varlist,[9])

    X = np.concatenate([X,Xdot],axis=1)
    X = neoUtils.replace_NaNs(X,'pchip')
    X = neoUtils.replace_NaNs(X,'interp')
    scaler = sklearn.preprocessing.StandardScaler(with_mean=False)
    X = scaler.fit_transform(X)

    y = neoUtils.get_rate_b(blk,unit_num)[1][:,np.newaxis]
    y[np.invert(cbool)]=0
    return(X,y,cbool)
예제 #22
0
def get_first_spike_vals(fname, p_smooth, unit_num):
    """
    Return a dataframe with length Ncontacts and the value of
    relevant stimulus features at that time

    :param blk:         neo block
    :param unit_num:    int
    :return: pandas dataframe
    """
    # get the blocks
    blk = neoUtils.get_blk(fname)
    blk_smooth = GLM.get_blk_smooth(fname, p_smooth)
    # get the trains and times of first spikes
    _, _, trains = spikeAnalysis.get_contact_sliced_trains(blk, unit_num)
    t_idx = [
        train[0].magnitude if len(train) > 0 else np.nan for train in trains
    ]
    t_idx = np.array(t_idx)
    t_idx = t_idx[np.isfinite(t_idx)].astype('int')
    # get the stimuli
    varlist = ['M', 'F', 'TH', 'PHIE']
    X = GLM.create_design_matrix(blk, varlist)
    Xsmooth = GLM.get_deriv(blk, blk_smooth, varlist, smoothing=[9])[1]
    MB = np.sqrt(X[:, 1]**2 + X[:, 2]**2)[:, np.newaxis]
    FB = np.sqrt(X[:, 4]**2 + X[:, 5]**2)[:, np.newaxis]
    RB = np.sqrt(X[:, 6]**2 + X[:, 7]**2)[:, np.newaxis]
    # use smooth to calculate derivative
    MBsmooth = np.sqrt(Xsmooth[:, 1]**2 + Xsmooth[:, 2]**2)[:, np.newaxis]
    FBsmooth = np.sqrt(Xsmooth[:, 4]**2 + Xsmooth[:, 5]**2)[:, np.newaxis]
    RBsmooth = np.sqrt(Xsmooth[:, 6]**2 + Xsmooth[:, 7]**2)[:, np.newaxis]

    X = np.concatenate([MB, FB, RB], axis=1)
    Xsmooth = np.concatenate([MBsmooth, FBsmooth, RBsmooth], axis=1)
    Xdot = np.diff(np.concatenate([np.zeros([1, 3]), Xsmooth]), axis=0)
    X = np.concatenate([X, Xdot], axis=1)

    #extract stimulus at time of first spike and output to a dataframe
    vals = X[t_idx]
    vallist = ['MB', 'FB', 'RB', 'MBdot', 'FBdot', 'RBdot']
    df = pd.DataFrame()
    for ii in range(len(vallist)):
        df[vallist[ii]] = vals[ii, :]
    df['id'] = neoUtils.get_root(blk, unit_num)
    return (df)
예제 #23
0
def smoothed_mechanics():
    """
    use this function to grab the data from the smoothed mechanics and the
    derivative of the same
    """

    f_arclength = '/projects/p30144/_VG3D/deflections/direction_arclength_FR_group_data.csv'
    f_list = glob.glob(os.path.join(p_load, '*NEO.h5'))
    f_list.sort()

    for f in f_list:
        try:
            blk = neoUtils.get_blk(f)
            blk_smooth = GLM.get_blk_smooth(f, p_smooth)
            num_units = len(blk.channel_indexes[-1].units)
            for unit_num in range(num_units):
                varlist = ['M', 'F', 'TH', 'PHIE']
                root = neoUtils.get_root(blk, unit_num)
                print('Working on {}'.format(root))
                outname = os.path.join(p_save,
                                       '{}_smooth_mechanicsX.mat'.format(root))

                Xdot, X = GLM.get_deriv(blk,
                                        blk_smooth,
                                        varlist,
                                        smoothing=[5])
                X = np.concatenate([X, Xdot], axis=1)
                y = neoUtils.get_rate_b(blk, unit_num)[1]
                cbool = neoUtils.get_Cbool(blk)
                arclengths = get_arclength_bool(blk,
                                                unit_num,
                                                fname=f_arclength)

                sio.savemat(
                    outname, {
                        'X': X,
                        'y': y,
                        'cbool': cbool,
                        'smooth': 55,
                        'arclengths': arclengths
                    })
        except Exception as ex:
            print('Problem with {}:{}'.format(os.path.basename(f), ex))
예제 #24
0
def main(fname, p_smooth, p_save):
    """
    Run the multi-filter GLM on a given file
    :param fname:
    :param p_smooth:
    :param p_save:
    :return:  Saves a numpy file to p_save
    """

    param_dict = {
        'family': 'p',
        'hist': True,
        'nfilts': 3,
        'learning_rate': 3e-4,
        'batch_size': 4096,
        'epochs': 5000,
        'min_delta': 0.01,
        'patience': 8
    }
    nsims = 100
    blk = neoUtils.get_blk(fname)
    num_units = len(blk.channel_indexes[-1].units)
    for unit_num in range(num_units):
        X, y, cbool = get_X_y(fname, p_smooth, unit_num)
        root = neoUtils.get_root(blk, unit_num)
        model_fname = os.path.join(p_save, '{}_tensorflow.ckpt'.format(root))
        X[np.invert(cbool), :] = 0
        y[np.invert(cbool), :] = 0
        # Train
        build_GLM_model(X, y, model_fname, **param_dict)
        #Simulate
        output = simulate(X, y, model_fname, cbool, nsims)
        print('Saving...')
        np.savez(os.path.join(p_save, '{}_multi_filter.npz'.format(root)),
                 X=X,
                 y=y,
                 cbool=cbool,
                 model_out=output,
                 param_dict=param_dict)
        print('Saved')
예제 #25
0
def batch_peak_PSTH_time(p_load, p_save):
    df = pd.DataFrame()
    for f in glob.glob(os.path.join(p_load, '*.h5')):
        blk = neoUtils.get_blk(f)
        print('Working on {}'.format(os.path.basename(f)))
        num_units = len(blk.channel_indexes[-1].units)
        # _,med_dir = worldGeometry.get_contact_direction(blk,plot_tgl=False)
        for unit_num in xrange(num_units):
            id = neoUtils.get_root(blk, unit_num)
            PSTH, t_edges, max_fr, med_dir = get_PSTH_by_dir(blk, unit_num)
            if PSTH is -1:
                continue
            peak_time = [
                t_edges[x][np.nanargmax(PSTH[x])] for x in xrange(len(PSTH))
            ]
            df_temp = pd.DataFrame()
            df_temp['id'] = [id for x in range(len(med_dir))]
            df_temp['med_dir'] = med_dir
            df_temp['peak_time'] = peak_time
            df = df.append(df_temp)
    df.to_csv(os.path.join(p_save, 'peak_PSTH_time.csv'))
    print('done')
예제 #26
0
def get_components(fname,p_smooth=None,smooth_idx=9):
    ''' Get the PCA comonents given a filename'''
    varlist = ['M', 'F', 'TH', 'PHIE']
    blk = neoUtils.get_blk(fname)
    cbool = neoUtils.get_Cbool(blk)
    root = neoUtils.get_root(blk,0)[:-2]
    X = GLM.create_design_matrix(blk,varlist)
    if p_smooth is not None:
        blk_smooth = GLM.get_blk_smooth(fname,p_smooth)
        Xdot = GLM.get_deriv(blk,blk_smooth,varlist,smoothing=[smooth_idx])[0]
        X = np.concatenate([X,Xdot],axis=1)
    X[np.invert(cbool),:]=0
    X = neoUtils.replace_NaNs(X,'pchip')
    X = neoUtils.replace_NaNs(X,'interp')

    scaler = sklearn.preprocessing.StandardScaler(with_mean=False)
    X[cbool,:] = scaler.fit_transform(X[cbool,:])

    pca = sklearn.decomposition.PCA()
    pca.fit_transform(X[cbool,:])

    return(pca,root)
예제 #27
0
def get_corr_with_FR():
    p_load = os.path.join(os.environ['BOX_PATH'],
                          r'__VG3D\_deflection_trials\_NEO')
    p_save = os.path.join(os.environ['BOX_PATH'],
                          r'__VG3D\_deflection_trials\_NEO\results')
    p_smooth = r'K:\VG3D\_rerun_with_pad\_deflection_trials\_NEO\smooth'

    DF = pd.DataFrame()
    for ii, f in enumerate(glob.glob(os.path.join(p_load, '*.h5'))):
        if ii == 0:
            continue
        blk = neoUtils.get_blk(f)
        num_units = len(blk.channel_indexes[-1].units)
        for unit_num in range(num_units):
            id = neoUtils.get_root(blk, unit_num)
            print('Working on {}'.format(id))
            try:
                df = calc_corr(f, p_smooth, unit_num)
                df['id'] = id
                DF = DF.append(df)
            except:
                print('Problem on {}'.format(id))
    DF.to_csv(os.path.join(p_save, 'derivative_corr_by_smoothing.csv'),
              index=True)
예제 #28
0
def calc_corr(fname, p_smooth, unit_num):
    blk = neoUtils.get_blk(fname)
    blk_smooth = GLM.get_blk_smooth(fname, p_smooth)
    varlist = ['M', 'F', 'TH', 'PHIE']
    component_list = [
        '{}_dot'.format(x)
        for x in ['Mx', 'My', 'Mz', 'Fx', 'Fy', 'Fz', 'TH', 'PHI']
    ]
    root = neoUtils.get_root(blk, unit_num)
    Xdot = GLM.get_deriv(blk, blk_smooth, varlist)[0]
    Xdot = np.reshape(Xdot, [-1, 8, 10])
    windows = np.arange(5, 100, 10)

    sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(0)]
    cbool = neoUtils.get_Cbool(blk)
    corr = []
    R = []
    # loop over variables
    for ii in range(Xdot.shape[1]):
        var_in = Xdot[:, ii, :].copy()
        # loop over smoothing
        r = []
        for jj in range(var_in.shape[1]):
            kernel = elephant.kernels.GaussianKernel(pq.ms * windows[jj])
            FR = elephant.statistics.instantaneous_rate(sp,
                                                        pq.ms,
                                                        kernel=kernel)
            idx = np.isfinite(var_in[:, jj])
            r.append(
                scipy.corrcoef(var_in[:, jj].ravel()[idx],
                               FR.magnitude.ravel()[idx])[0, 1])
        R.append(r)
    R = np.array(R)
    df = pd.DataFrame(data=R, columns=['{}ms'.format(x) for x in windows])
    df.index = component_list
    return (df)
예제 #29
0
def smoothed(smooth_idx=9):
    smooth_vals = np.arange(5, 100, 10)
    sub_p_save = os.path.join(
        p_save, '{}ms_smoothing_deriv'.format(smooth_vals[smooth_idx]))
    if not os.path.isdir(sub_p_save):
        os.mkdir(sub_p_save)
    for f in glob.glob(os.path.join(p_load, '*NEO.h5')):
        try:
            blk = neoUtils.get_blk(f)
            blk_smooth = GLM.get_blk_smooth(f, p_smooth)
            num_units = len(blk.channel_indexes[-1].units)
            for unit_num in range(num_units):
                varlist = ['M', 'F', 'TH', 'PHIE']
                root = neoUtils.get_root(blk, unit_num)
                print('Working on {}'.format(root))
                outname = os.path.join(
                    sub_p_save,
                    '{}ms_{}_pillowX.mat'.format(smooth_vals[smooth_idx],
                                                 root))

                X = GLM.create_design_matrix(blk, varlist)
                Xdot = GLM.get_deriv(blk, blk_smooth, varlist, [smooth_idx])[0]
                X = np.concatenate([X, Xdot], axis=1)
                sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)]
                y = neoUtils.get_rate_b(blk, unit_num)[1]
                cbool = neoUtils.get_Cbool(blk)
                arclengths = get_arclength_bool(blk, unit_num)

                sio.savemat(outname, {
                    'X': X,
                    'y': y,
                    'cbool': cbool,
                    'arclengths': arclengths
                })
        except Exception as ex:
            print('Problem with {}:{}'.format(os.path.basename(f), ex))
예제 #30
0
def calc_world_geom_hist(p_load,p_save,n_bins=100):
    """
     Since calculation takes so long on getting the histograms (mostly loading of data)
    we want to calculate them once and save the data.

    This calculates the Geometry.

    :param p_load: Location where all the neo h5 files live
    :param p_save: Location to save the output data files
    :param n_bins: Number of bins in with which to split the data
    :return None: Saves a 'world_geom_hists.npz' file.
    """
    # init
    ID = []
    all_S_bayes = []
    all_TH_bayes = []
    all_PHIE_bayes = []
    all_ZETA_bayes = []

    all_S_edges = []
    all_TH_edges = []
    all_PHIE_edges = []
    all_ZETA_edges = []

    # loop files
    for f in glob.glob(os.path.join(p_load,'rat*.h5')):
        # load in
        print(os.path.basename(f))
        blk = neoUtils.get_blk(f)

        # get contact
        Cbool = neoUtils.get_Cbool(blk)
        use_flags = neoUtils.concatenate_epochs(blk)

        # get vars
        S = neoUtils.get_var(blk, 'S').magnitude

        TH = neoUtils.get_var(blk, 'TH').magnitude
        neoUtils.center_var(TH, use_flags)

        PHIE = neoUtils.get_var(blk, 'PHIE').magnitude
        neoUtils.center_var(PHIE, use_flags)

        ZETA = neoUtils.get_var(blk, 'ZETA').magnitude
        neoUtils.center_var(ZETA, use_flags)

        # loop units
        for unit in blk.channel_indexes[-1].units:
            # get unit info
            unit_num = int(unit.name[-1])
            r, b = neoUtils.get_rate_b(blk, unit_num, sigma=5 * pq.ms)
            sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)]
            root = neoUtils.get_root(blk,unit_num)
            ID.append(root)

            # Create hists
            S_bayes, S_edges = varTuning.stim_response_hist(S.ravel(), r, Cbool, nbins=n_bins, min_obs=5)
            TH_bayes, TH_edges = varTuning.stim_response_hist(TH.ravel(), r, Cbool, nbins=n_bins, min_obs=5)
            PHIE_bayes, PHIE_edges = varTuning.stim_response_hist(PHIE.ravel(), r, Cbool, nbins=n_bins,min_obs=5)
            ZETA_bayes, ZETA_edges = varTuning.stim_response_hist(ZETA.ravel(), r, Cbool, nbins=n_bins,min_obs=5)

            # append outputs
            plt.close('all')
            all_S_bayes.append(S_bayes)
            all_TH_bayes.append(TH_bayes)
            all_PHIE_bayes.append(PHIE_bayes)
            all_ZETA_bayes.append(ZETA_bayes)

            all_S_edges.append(S_edges)
            all_TH_edges.append(TH_edges)
            all_PHIE_edges.append(PHIE_edges)
            all_ZETA_edges.append(ZETA_edges)


    np.savez(os.path.join(p_save, 'world_geom_hists.npz'),
             all_S_bayes=all_S_bayes,
             all_TH_bayes=all_TH_bayes,
             all_PHIE_bayes=all_PHIE_bayes,
             all_ZETA_bayes=all_ZETA_bayes,
             all_S_edges=all_S_edges,
             all_TH_edges=all_TH_edges,
             all_PHIE_edges=all_PHIE_edges,
             all_ZETA_edges=all_ZETA_edges,
             ID=ID
             )