Ejemplo n.º 1
0
def gen_ret_vars(retfile,stimfile):

    needed_ret_var_names = ['pval_ret','ret']
    needed_ret_var_vals = ut.loadmat(retfile,needed_ret_var_names)
    ret_vars = {key:val for key,val in zip(needed_ret_var_names,needed_ret_var_vals)}

    result = ut.loadmat(stimfile,'result')[()]
    ret_vars['position'] = result['position']

    paramdict_normal = ut.loadmat(retfile,'paramdict_normal')
    ret_vars['paramdict_normal'] = ut.matfile_to_dict(paramdict_normal)

    return ret_vars
Ejemplo n.º 2
0
def gen_frm_on(frame_file, rg=(1, 0)):
    info = ut.loadmat(frame_file, 'info')[()]
    frame = info['frame']
    frm = np.floor(frame[rg[0]:frame.size + rg[1]] / 4).astype('int')
    frm_on = np.concatenate(
        [np.arange(x, y) for x, y in zip(frm[0::2], frm[1::2])])
    return frm_on
Ejemplo n.º 3
0
def get_nbydepth(datafiles):
    nbydepth = np.zeros((len(datafiles),))
    for i,datafile in enumerate(datafiles):
        corrected = ut.loadmat(datafile,'corrected')[()]
        nbydepth[i] = corrected.shape[0]
        #with h5py.File(datafile,mode='r') as f:
            #nbydepth[i] = (f['corrected'][:].T.shape[0])
    return nbydepth
Ejemplo n.º 4
0
def fix_up_directions(locy, locx, stimfile, datafile):
    # positive direction is temporal
    # positive direction is UP!!

    result = ut.matfile_to_dict(ut.loadmat(stimfile, 'result'))  # [()]

    if 'range' in list(result.keys()):
        gridsize = 5
        ctr = np.array(
            (result['range'][0:2].mean(), result['range'][2:].mean())
        )  # ctr: x center of range, y center of range # fixed 18/10/30; for expts. before 18/10/30, this will have to be switched!
    else:
        gridsize = 10
        ctr = np.array((0, 0))

    Ny = locy.max() + 1
    Nx = locx.max() + 1

    xrg = np.arange(-(Nx - 1) * gridsize / 2, (Nx + 1) * gridsize / 2,
                    gridsize)  # positive direction is temporal
    yrg = -np.arange(-(Ny - 1) * gridsize / 2, (Ny + 1) * gridsize / 2,
                     gridsize)  # positive direction is UP!!

    # flipping for expts. before 18/10/30
    toflip = int(datafile.split('/')[-4]) < 181030
    if toflip:
        ctr = ctr * np.array((1, -1))

    # inverting for expts. between 18/10/30 and 18/12/09
    notquitefixed = int(datafile.split('/')[-4]) < 181209
    if ~toflip and notquitefixed:
        yrg = -yrg

    xrg = xrg + ctr[0]
    yrg = yrg + ctr[1]

    locydeg = yrg[locy.astype('int')]
    locxdeg = xrg[locx.astype('int')]

    return locydeg, locxdeg
        Wstar_dict = np.load(weights_files[iwt], allow_pickle=True)[()]
        losses[iwt] = Wstar_dict['loss']
    weights_files = [
        wf for wf, l in zip(weights_files, losses)
        if l < np.nanpercentile(losses, lcutoff)
    ]
    target_file = calnet_base + 'dynamics/vip_halo_l4_opto_tavg_connection_deletion_%s.npy' % fit_lbl
    build_models_and_simulate_opto_effects(weights_files,
                                           target_file,
                                           sim_options=sim_options,
                                           pool_size=pool_size)


if __name__ == '__main__':

    rrs_orig_layer, rfs_orig_layer = ut.loadmat(vip_halo_l4_matfile,
                                                ['rrs', 'rfs'])
    scall_halo_l4 = opto_utils.norm_to_mean_light_off(rrs_orig_layer[0, 0])
    opto_slope, opto_intercept = compute_opto_line(scall_halo_l4)

    fit_lbl = sys.argv[1]
    if len(sys.argv) == 3:
        pool_size = int(sys.argv[2])
        run(fit_lbl, pool_size=pool_size)
    elif len(sys.argv) > 3:
        pool_size = int(sys.argv[2])
        calnet_base = sys.argv[3]
        run(fit_lbl, pool_size=pool_size, calnet_base=calnet_base)
    else:
        run(fit_lbl)
Ejemplo n.º 6
0
def analyze_precise_retinotopy(datafiles,
                               stimfile,
                               retfile,
                               criterion=lambda x: x > 100,
                               rg=(2, -10),
                               nbefore=nbefore,
                               nafter=nafter,
                               gridsize=10):
    nbydepth = np.zeros((len(datafiles), ))
    for i, datafile in enumerate(datafiles):
        corrected = ut.loadmat(datafile, 'corrected')
        nbydepth[i] = corrected.shape[0]


#         with h5py.File(datafile,mode='r') as f:
#             nbydepth[i] = (f['corrected'][:].T.shape[0])
    trialwise, ctrialwise, strialwise, dfof, straces, dtrialwise, trialwise_t_offset = ut.gen_precise_trialwise(
        datafiles, rg=rg, nbefore=nbefore, nafter=nafter)
    zstrialwise = sst.zscore(strialwise.reshape(
        (strialwise.shape[0], -1)).T).T.reshape(strialwise.shape)

    result = sio.loadmat(stimfile, squeeze_me=True)['result'][()]

    infofile = sio.loadmat(datafiles[0][:-12] + '.mat', squeeze_me=True)
    #retfile = sio.loadmat(retfile,squeeze_me=True)

    locinds = result['locinds']  #retfile['locinds']

    has_inverse = False
    try:
        #        inverted = result['inverted'][()]
        inverted = np.tile(result['inverted'], (result['repetitions'], ))
        has_inverse = True
    except:
        has_inverse = False

    frame = infofile['info'][()]['frame'][()]
    frame = np.unique(
        frame[rg[0]:frame.size + rg[1]]
    )  # this format for all the retinotopic mapping through 12/12

    data = strialwise[:, :, nbefore:strialwise.shape[-1] - nafter]

    Ny = locinds[:, 0].max()
    Nx = locinds[:, 1].max()

    try:
        try:
            dxdt = sio.loadmat(datafiles[1], squeeze_me=True)['dxdt']
        except:
            with h5py.File(datafiles[1], mode='r') as f:
                dxdt = f['dxdt'][:].T
    except:
        print('no running data saved; assuming all running')
        dxdt = 101 * np.ones((frame.max(), ))

    trialrun = np.zeros(frame[0::2].shape)
    for i in range(len(trialrun)):
        trialrun[i] = dxdt[frame[0::2][i]:frame[1::2][i]].mean()
    runtrial = criterion(np.abs(trialrun))

    if has_inverse:
        ret = np.zeros((data.shape[0], Ny, Nx, 2))
        for j in range(Ny):
            for k in range(Nx):
                lkat = np.logical_and(
                    np.logical_and(
                        np.logical_and(locinds[:, 0] == j + 1,
                                       locinds[:, 1] == k + 1), runtrial),
                    np.nanmax(np.nanmax(data, 0), -1))
                lkat_reg = np.logical_and(lkat, np.logical_not(inverted))
                lkat_inv = np.logical_and(lkat, inverted)
                n_reg = lkat_reg.sum()
                n_inv = lkat_inv.sum()
                print((n_reg, n_inv))
                for idx in np.where(lkat_reg)[0]:
                    ret[:, j, k,
                        0] = ret[:, j, k, 0] + data[:, idx].mean(1) / n_reg
                for idx in np.where(lkat_inv)[0]:
                    ret[:, j, k,
                        1] = ret[:, j, k, 1] + data[:, idx].mean(1) / n_inv
                assert (~np.isnan(np.nanmax(ret[:, j, k])))
    else:
        ret = np.zeros((data.shape[0], Ny, Nx))
        for j in range(Ny):
            for k in range(Nx):
                lkat_reg = np.logical_and(
                    np.logical_and(locinds[:, 0] == j + 1,
                                   locinds[:, 1] == k + 1), runtrial)
                n_reg = lkat_reg.sum()
                print((n_reg, ))  #n_inv))
                for idx in np.where(lkat_reg)[0]:
                    ret[:, j, k] = ret[:, j, k] + data[:, idx].mean(1) / n_reg
                assert (~np.isnan(np.nanmax(ret[:, j, k])))

    if 'range' in result.dtype.names:
        gridsize = 5
        ctr = np.array(
            (result['range'][0:2].mean(), -result['range'][2:].mean())
        )  # ctr: x center of range, y center of range # fixed 18/10/30; for expts. after 18/10/30, this will have to be switched!
    else:
        gridsize = 10
        ctr = np.array((0, 0))

    # flipping for expts. after 18/10/30
    toflip = int(datafiles[0].split('/')[-4]) > 181030
    if toflip:
        ctr = ctr * np.array((1, -1))

    xrg = np.arange(-(Nx - 1) * gridsize / 2, (Nx + 1) * gridsize / 2,
                    gridsize)
    yrg = np.arange(-(Ny - 1) * gridsize / 2, (Ny + 1) * gridsize / 2,
                    gridsize)

    # inverting for expts. before 18/12/09
    notquitefixed = int(datafiles[0].split('/')[-4]) < 181209
    if toflip and notquitefixed:
        yrg = -yrg

    if has_inverse:
        paramdict = [
            ut.fit_2d_gaussian((xrg, yrg), ret[:, :, :, 0]),
            ut.fit_2d_gaussian((xrg, yrg), ret[:, :, :, 1])
        ]
        paramdict[0]['gridsize'] = gridsize
        paramdict[1]['gridsize'] = gridsize
        paramdict[0]['ctr'] = ctr
        paramdict[1]['ctr'] = ctr
    else:
        paramdict = ut.fit_2d_gaussian(
            (xrg, yrg), ret)  #,ut.fit_2d_gaussian((xrg,yrg),ret[:,:,:,1])]
        paramdict['gridsize'] = gridsize
        paramdict['ctr'] = ctr

    pval_ret = np.zeros(strialwise.shape[0])
    for i in range(strialwise.shape[0]):
        _, pval_ret[i] = sst.ttest_rel(strialwise[i, :, nbefore - 1],
                                       strialwise[i, :, nbefore + 1])

    return ret, paramdict, pval_ret, trialrun, has_inverse, locinds
Ejemplo n.º 7
0
    (16, 1, 4, 8, 2))[np.newaxis]).sum(1),
                    kind='stable')
order2 = np.argsort(utils.nubs_active[order1][::-1].sum(1),
                    kind='stable')[::-1]
# order of stimuli for tuning curve display purposes
evan_order_actual = order1[order2]
# order of stimuli for stimulus identity display purposes
evan_order_apparent = np.argsort(utils.nubs_active[::-1].sum(1),
                                 kind='stable')[::-1]
nub_no = utils.nubs_active[evan_order_actual].sum(1)
#parula = ListedColormap(ut.loadmat('/Users/dan/Documents/code/adesnal/matlab_parula_colormap.mat','cmap'))

# similar to parula colormap, ported to python
parula_path = '/Users/dan/Documents/code/adesnal/'
parula_filename = parula_path + 'matlab_parula_colormap.mat'
parula = ListedColormap(ut.loadmat(parula_filename, 'cmap'))


def compute_tuning(dsname,
                   keylist,
                   datafield,
                   run_fn,
                   dilation_fn=dilation_fn,
                   gen_nub_selector=utils.gen_nub_selector_v1,
                   trialwise_dfof=False):
    # extract pandas dataframe from hdf5 file
    df, roi_info, trial_info = ut.compute_tavg_dataframe(
        dsname,
        'nub_0',
        datafield=datafield,
        keylist=keylist,
Ejemplo n.º 8
0
def analyze(datafiles,stimfile,frame_adjust=None,rg=(1,0),nbefore=nbefore,nafter=nafter,stim_params=None):
    # stim_params: list (or similar) of tuples, where first element is a string corresponding to a field of the
    # output hdf5 file proc, and second element is a function taking result as an input, to yield the correct data

    # find number of ROIs in each plane
    nbydepth = get_nbydepth(datafiles)

    nplanes = len(nbydepth)

    # get trialized fluorescence data
    trialwise,ctrialwise,strialwise,dfof,straces,dtrialwise,proc1 = ut.gen_precise_trialwise(datafiles,rg=rg,frame_adjust=frame_adjust,nbefore=nbefore,nafter=nafter,blcutoff=blcutoff) # , trialwise_t_offset

    # load stimulus data
    #result = sio.loadmat(stimfile,squeeze_me=True)['result'][()]
    result = ut.loadmat(stimfile,'result')[()]
    #result = result[()]
    
    # correct stim trigger frames if necessary
    #infofile = sio.loadmat(datafiles[0][:-12]+'.mat',squeeze_me=True) # original .mat file
    info = ut.loadmat(datafiles[0][:-12]+'.mat','info')[()] # original .mat file
    #frame = infofile['info'][()]['frame'][()]
    frame = info['frame'][()].astype(np.int64)
    if not rg is None:
        frame = frame[rg[0]:frame.size+rg[1]]
    else:
        event_id = info['event_id'][()].astype(np.int64)
        frame = frame[event_id==1]
    if frame_adjust:
        frame = frame_adjust(frame)

    while np.min(np.diff(frame)) < 0:
        brk = np.argmin(np.diff(frame))+1
        frame[brk:] = frame[brk:] + 65536
         
    # load running and pupil data
    dxdt = ut.loadmat(datafiles[0],'dxdt').flatten()
    try:
        # first entry of pupil_ctr is x, second entry is y
        pupil_ctr,pupil_area,pupil_frac_ctr,pupil_frac_area = ut.loadmat(datafiles[0],['pupil_ctr','pupil_area','pupil_frac_ctr','pupil_frac_area'])
        pupil_area = pupil_area.flatten()
        pupil_frac_area = pupil_frac_area.flatten()
    except:
        print('no eye tracking data for ' + stimfile)
        pupil_ctr = None
        pupil_frac_ctr = None
        pupil_area = None
        pupil_frac_area = None

    nplanes = len(datafiles)

    msk,ctr = load_msk_ctr(datafiles[0])
    cell_mask = np.zeros((0,)+msk.shape[1:],dtype='bool')
    cell_center = np.zeros((0,2))
    cell_depth = np.zeros((0,))
    for iplane in range(nplanes):
        msk,ctr = load_msk_ctr(datafiles[iplane])
        cell_mask = np.concatenate((cell_mask,msk),axis=0)
        cell_center = np.concatenate((cell_center,ctr),axis=0)
        cell_depth = np.concatenate((cell_depth,iplane*np.ones((msk.shape[0],))))

#    try:
#        try:
#        #if True:
#            #mean_image_red,mean_image_green = ut.loadmat(datafiles[0],['red_mean','green_mean'])
#            #mean_red_channel = np.zeros((len(datafiles),)+mean_image_red.shape)
#            #mean_green_channel = np.zeros((len(datafiles),)+mean_image_green.shape)
#            mean_red_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
#            mean_green_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
#            for iplane in range(nplanes):
#                mean_image_red,mean_image_green = ut.loadmat(datafiles[iplane],['red_mean','green_mean'])
#                mean_red_channel[iplane] = mean_image_red
#                mean_green_channel[iplane] = mean_image_green
#        except:
#            mean_red_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
#            mean_red_channel_corrected = np.zeros((nplanes,)+cell_mask.shape[1:])
#            mean_green_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
#            mean_green_channel_enhanced = np.zeros((nplanes,)+cell_mask.shape[1:])
#            for iplane in range(nplanes):
#                mean_image_green,mean_image_green_enhanced,mean_image_red,mean_image_red_corrected = ut.loadmat(datafiles[iplane],['meanImg','meanImgE','meanImg_chan2','meanImg_chan2_corrected'])
#                #mean_image_green,mean_image_red = ut.loadmat(datafiles[iplane],['meanImg','meanImg_chan2_corrected'])
#                mean_red_channel[iplane] = mean_image_red
#                mean_red_channel_corrected[iplane] = mean_image_red_corrected
#                mean_green_channel[iplane] = mean_image_green
#                mean_green_channel_enhanced[iplane] = mean_image_green_enhanced
#    except:
#        print('no mean image data for ' + stimfile)
#        mean_red_channel = None
#        mean_red_channel_corrected = None
#        mean_green_channel = None
#        mean_green_channel_enhanced = None

    #varnames1 = ['green_mean','red_mean']
    #varnames2= ['meanImg','meanImgE','meanImg_chan2','meanImg_chan2_corrected']
    #outputs1 = ut.loadmat(datafiles[0],varnames1)
    #outputs2 = ut.loadmat(datafiles[0],varnames2)
    #use_first,use_second = [not outputs[0] is None for outputs in [outputs1,outputs2]]

    #if use_first:
    #    mean_red_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
    #    mean_green_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
    #    for iplane in range(nplanes):
    #        mean_image_green,mean_image_red = ut.loadmat(datafiles[iplane],varnames1)
    #        mean_green_channel[iplane] = mean_image_green
    #        mean_red_channel[iplane] = mean_image_red
    #    mean_red_channel_corrected = None
    #    mean_green_channel_enhanced = None
    #elif use_second:
    #    mean_green_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
    #    mean_green_channel_enhanced = np.zeros((nplanes,)+cell_mask.shape[1:])
    #    mean_red_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
    #    mean_red_channel_corrected = np.zeros((nplanes,)+cell_mask.shape[1:])
    #    for iplane in range(nplanes):
    #        mean_image_green,mean_image_green_enhanced,mean_image_red,mean_image_red_corrected = ut.loadmat(datafiles[iplane],['meanImg','meanImgE','meanImg_chan2','meanImg_chan2_corrected'])
    #        #mean_image_green,mean_image_red = ut.loadmat(datafiles[iplane],['meanImg','meanImg_chan2_corrected'])
    #        mean_red_channel[iplane] = mean_image_red
    #        mean_red_channel_corrected[iplane] = mean_image_red_corrected
    #        mean_green_channel[iplane] = mean_image_green
    #        mean_green_channel_enhanced[iplane] = mean_image_green_enhanced
    #else:
    #    print('no mean image data for ' + stimfile)
    #    mean_red_channel = None
    #    mean_red_channel_corrected = None
    #    mean_green_channel = None
    #    mean_green_channel_enhanced = None
    # trialize running and pupil data
    #try:
    roi_proc = load_roi_info(datafiles)
    #except:
    #    roi_proc = None
    frame_div = np.floor(2*frame/nplanes).astype(np.int64)
    trialrun = ut.trialize(dxdt.T,frame,nbefore=nbefore,nafter=nafter)
    trialctr = ut.trialize(pupil_ctr,frame_div,nbefore=nbefore,nafter=nafter)
    trialfracctr = ut.trialize(pupil_frac_ctr,frame_div,nbefore=nbefore,nafter=nafter)
    trialarea = ut.trialize(pupil_area,frame_div,nbefore=nbefore,nafter=nafter)
    trialfracarea = ut.trialize(pupil_frac_area,frame_div,nbefore=nbefore,nafter=nafter)

    proc = {}
    proc['trialrun'] = trialrun
    proc['trialctr'] = trialctr
    proc['trialarea'] = trialarea
    proc['trialfracctr'] = trialfracctr
    proc['trialfracarea'] = trialfracarea
    proc['trialwise'] = trialwise
    proc['strialwise'] = strialwise
    proc['nbydepth'] = nbydepth
    proc['dtrialwise'] = dtrialwise
    proc['dfof'] = dfof
    proc['trialwise_t_offset'] = proc1['trialwise_t_offset']
    proc['raw_trialwise'] = proc1['raw_trialwise']
    proc['neuropil_trialwise'] = proc1['neuropil_trialwise']
    if roi_proc:
        for key in roi_proc:
            proc[key] = roi_proc[key]
    else:
        print('could not compute roi info')
    #proc['mean_red_channel'] = mean_red_channel
    #proc['mean_red_channel_corrected'] = mean_red_channel_corrected
    #proc['mean_green_channel'] = mean_green_channel
    #proc['mean_green_channel_enhanced'] = mean_green_channel_enhanced
    #proc['cell_depth'] = cell_depth
    #proc['cell_center'] = cell_center
    #proc['cell_mask'] = cell_mask
    proc['nbefore'] = nbefore
    proc['nafter'] = nafter
              
    # define extra parameters based on 'result' variable
    for param in stim_params:
        name,function = param
        proc[name] = function(result)
    
    return proc
Ejemplo n.º 9
0
def load_msk_ctr(filename):
    msk,ctr = ut.loadmat(filename,['msk','ctr'])
    return msk.astype('bool').transpose((2,0,1)), ctr.T
Ejemplo n.º 10
0
def load_roi_info(datafiles):
    nplanes = len(datafiles)
    msk,ctr = load_msk_ctr(datafiles[0])
    cell_mask = np.zeros((0,)+msk.shape[1:],dtype='bool')
    cell_center = np.zeros((0,2))
    cell_depth = np.zeros((0,))
    for iplane in range(nplanes):
        msk,ctr = load_msk_ctr(datafiles[iplane])
        cell_mask = np.concatenate((cell_mask,msk),axis=0)
        cell_center = np.concatenate((cell_center,ctr),axis=0)
        cell_depth = np.concatenate((cell_depth,iplane*np.ones((msk.shape[0],))))

    varnames1 = ['green_mean','red_mean']
    varnames2= ['meanImg','meanImgE','meanImg_chan2','meanImg_chan2_corrected']
    outputs1 = ut.loadmat(datafiles[0],varnames1)
    outputs2 = ut.loadmat(datafiles[0],varnames2)
    use_first,use_second = [not outputs[0] is None for outputs in [outputs1,outputs2]]

    if use_first:
        iplane = 0
        mean_image_green,mean_image_red = ut.loadmat(datafiles[iplane],varnames1)
        shp = mean_image_green.shape
        mean_red_channel = np.zeros((nplanes,)+shp)
        mean_green_channel = np.zeros((nplanes,)+shp)
        for iplane in range(nplanes):
            mean_image_green,mean_image_red = ut.loadmat(datafiles[iplane],varnames1)
            mean_green_channel[iplane] = mean_image_green
            mean_red_channel[iplane] = mean_image_red
        mean_red_channel_corrected = None
        mean_green_channel_enhanced = None
    elif use_second:
        iplane = 0
        mean_image_green,mean_image_green_enhanced,mean_image_red,mean_image_red_corrected = ut.loadmat(datafiles[iplane],varnames2)
        shp = mean_image_green.shape
        mean_green_channel = np.zeros((nplanes,)+shp)
        mean_green_channel_enhanced = np.zeros((nplanes,)+shp)
        mean_red_channel = np.zeros((nplanes,)+shp)
        mean_red_channel_corrected = np.zeros((nplanes,)+shp)
        for iplane in range(nplanes):
            mean_image_green,mean_image_green_enhanced,mean_image_red,mean_image_red_corrected = ut.loadmat(datafiles[iplane],varnames2)
            #mean_image_green,mean_image_red = ut.loadmat(datafiles[iplane],['meanImg','meanImg_chan2_corrected'])
            mean_red_channel[iplane] = mean_image_red
            mean_red_channel_corrected[iplane] = mean_image_red_corrected
            mean_green_channel[iplane] = mean_image_green
            mean_green_channel_enhanced[iplane] = mean_image_green_enhanced
    else:
        print('no mean image data for ' + datafiles[0])
        mean_red_channel = None
        mean_red_channel_corrected = None
        mean_green_channel = None
        mean_green_channel_enhanced = None

    proc = {}

    proc['mean_red_channel'] = mean_red_channel
    proc['mean_red_channel_corrected'] = mean_red_channel_corrected
    proc['mean_green_channel'] = mean_green_channel
    proc['mean_green_channel_enhanced'] = mean_green_channel_enhanced
    proc['cell_depth'] = cell_depth
    proc['cell_center'] = cell_center
    proc['cell_mask'] = cell_mask

    return proc