Пример #1
0
def analyze_luminance(datafiles,stimfile,retfile=None,frame_adjust=None,rg=(1,0),nbefore=nbefore,nafter=nafter,criterion=None,criterion_cutoff=None):
    if criterion is None:
        criterion = lambda x: np.abs(x)>100
    nbydepth = get_nbydepth(datafiles)
#     trialwise,ctrialwise,strialwise = gen_trialwise(datafiles,frame_adjust=frame_adjust)
    trialwise,ctrialwise,strialwise,dfof,straces,dtrialwise,trialwise_t_offset = ut.gen_precise_trialwise(datafiles,rg=rg,frame_adjust=frame_adjust,nbefore=nbefore,nafter=nafter,blcutoff=blcutoff)
    zstrialwise = sst.zscore(strialwise.reshape((strialwise.shape[0],-1)).T).T.reshape(strialwise.shape)
    
    result = sio.loadmat(stimfile,squeeze_me=True)['result'][()]
    
    infofile = sio.loadmat(datafiles[0][:-12]+'.mat',squeeze_me=True) # original .mat file
    frame = infofile['info'][()]['frame'][()]
    frame = frame[rg[0]:frame.size+rg[1]]
    #assert(True==False)
    if frame_adjust:
        frame = frame_adjust(frame)
    
    data = strialwise #[:,:,nbefore:-nafter]
    data_dfof = trialwise #[:,:,nbefore:-nafter]
    print(data.shape)
    
    try:
        dxdt = sio.loadmat(datafiles[0],squeeze_me=True)['dxdt']
    except:
        with h5py.File(datafiles[0],mode='r') as f:
            dxdt = f['dxdt'][:].T
            
    trialrun = np.zeros(frame[0::2].shape)
    for i in range(len(trialrun)):
        trialrun[i] = np.abs(dxdt[frame[0::2][i]:frame[1::2][i]]).mean()
    runtrial = criterion(trialrun)
    print(runtrial.sum()/runtrial.size)
    if criterion_cutoff:
        if runtrial.sum()/runtrial.size < criterion_cutoff:
            #return Savg,Smean,lb,ub,pval,spont,Smean_stat,proc
            return [np.array(())]*8
             
    stimparams = result['stimParams']
    gratingInfo = result['gratingInfo']
        
    indexlut,stimp = np.unique(stimparams,axis=0,return_inverse=True)
    
    intensity = gratingInfo['Intensity'][()]
    
    uintensity = np.unique(intensity)
    nintensity = len(uintensity)
 
#    Smean = np.zeros((data.shape[0],nangle180,nsize,ncontrast,data.shape[2]))
#    Fmean = np.zeros((data.shape[0],nangle180,nsize,ncontrast,data.shape[2]))
#    Smean_stat = np.zeros((data.shape[0],nangle180,nsize,ncontrast,data.shape[2]))
#    Stavg = np.zeros((data.shape[0],nangle180,nsize,ncontrast,int(data.shape[1]/nangle/nsize/ncontrast)))
#    
#    Strials = {}
#    Sspont = {}
#    for i in range(nangle180):
#        for j in range(nsize):
#            for k in range(ncontrast):
#                lkat = np.logical_and(runtrial,np.logical_and(angle180==uangle180[i],np.logical_and(size==usize[j],contrast==ucontrast[k])))
#                Smean[:,i,j,k,:] = np.nanmean(data[:,lkat,:],1)
#                Fmean[:,i,j,k,:] = np.nanmean(data_dfof[:,lkat,:],1)
#                Strials[(i,j,k)] = np.nanmean(data[:,lkat,nbefore:-nafter],2)
#                Sspont[(i,j,k)] = np.nanmean(data[:,lkat,:nbefore],2)
#                stat = np.logical_and(np.logical_not(runtrial),np.logical_and(angle180==uangle180[i],np.logical_and(size==usize[j],contrast==ucontrast[k])))
#                Smean_stat[:,i,j,k] = np.nanmean(data[:,stat],1)
# 
#    lb = np.zeros((strialwise.shape[0],nangle180,nsize,ncontrast))
#    ub = np.zeros((strialwise.shape[0],nangle180,nsize,ncontrast))
#    
#    for i in range(nangle180):
#        print(i)
#        for j in range(nsize):
#            for k in range(ncontrast):
#                if Strials[(i,j,k)].size:
#                    lb[:,i,j,k],ub[:,i,j,k] = ut.bootstrap(Strials[(i,j,k)],np.mean,axis=1,pct=(16,84))
#                else:
#                    lb[:,i,j,k] = np.nan
#                    ub[:,i,j,k] = np.nan
#    
#    pval = np.zeros((strialwise.shape[0],nangle180))
#    for j,theta in enumerate(uangle180):
#        print(theta)
#        _,pval[:,j] = sst.ttest_ind(Strials[(j,0,0)],Strials[(j,0,ncontrast-1)],axis=1)
#    Savg = np.nanmean(np.nanmean(Smean[:,:,:,:,nbefore:-nafter],axis=-1),axis=1)
#    Favg = np.nanmean(np.nanmean(Fmean[:,:,:,:,nbefore:-nafter],axis=-1),axis=1)
#    
#    spont = np.zeros((Savg.shape[0],))
#    keylist = list(Sspont.keys())
#    nkeys = len(keylist)
#    for key in Sspont.keys():
#        spont = spont + Sspont[key].mean(1)/nkeys

    proc = {}
    proc['runtrial'] = runtrial
    proc['trialrun'] = trialrun
    proc['intensity'] = intensity
    proc['trialwise'] = trialwise
    proc['strialwise'] = strialwise
    #proc['ctrialwise'] = ctrialwise
    proc['dtrialwise'] = dtrialwise
    proc['dfof'] = dfof
    proc['trialwise_t_offset'] = trialwise_t_offset
    #proc['straces'] = straces
    #proc['oriavg_dfof'] = Favg
    
    return proc
Пример #2
0
def do_process(thisfold,
               thisfile,
               rg=(2, -10),
               nbefore=8,
               nafter=8,
               criterion=lambda x: x > 100,
               datafoldbase='/home/mossing/scratch/2Pdata/',
               stimfoldbase='/home/mossing/scratch/visual_stim/'):

    #datafoldbase = '/home/mossing/scratch/2Pdata/'
    datafold = datafoldbase + thisfold + 'ot/'
    datafiles = [
        thisfile + '_ot_' + number + '.rois'
        for number in ['000', '001', '002', '003']
    ]

    #stimfoldbase = '/home/mossing/scratch/visual_stim/'
    stimfold = stimfoldbase + thisfold
    stimfile = thisfile + '.mat'

    datafiles = [datafold + file for file in datafiles]
    datafiles = [x for x in datafiles if os.path.exists(x)]
    stimfile = stimfold + stimfile
    retfile = datafoldbase + thisfold + 'retinotopy_' + thisfile[-3:] + '.mat'

    #     nbefore = 4
    #     nafter = 4

    ret, paramdict, pval, trialrun, has_inverse, locinds = analyze_precise_retinotopy(
        datafiles,
        stimfile,
        retfile,
        criterion=criterion,
        rg=rg,
        nbefore=nbefore,
        nafter=nafter)
    nbydepth = get_nbydepth(datafiles)
    trialwise, ctrialwise, strialwise, dfof, _, dtrialwise, trialwise_t_offset = ut.gen_precise_trialwise(
        datafiles, rg=rg, nbefore=nbefore, nafter=nafter)
    #     traces,ctraces,straces,dfof,baseline = rt.gen_traces(datafiles)
    spont = strialwise[:, trialrun > 100, :nbefore].mean(-1).mean(-1)
    proc = {}
    proc['trialwise'] = trialwise
    proc['dtrialwise'] = dtrialwise
    proc['strialwise'] = strialwise
    proc['trialwise_t_offset'] = trialwise_t_offset
    proc['spont'] = spont
    proc['locinds'] = locinds
    proc['dfof'] = dfof

    try:
        retfile_load = sio.loadmat(retfile)
    except:
        print('retinotopy file not accessible')
        retfile_load = {}

    if has_inverse:
        retfile_load['paramdict_normal'] = paramdict[0]
        retfile_load['paramdict_inv'] = paramdict[1]
    else:
        retfile_load['paramdict_normal'] = paramdict
    retfile_load['pval_ret'] = pval
    retfile_load['has_inverse'] = has_inverse
    retfile_load['ret'] = ret
    sio.savemat(retfile, retfile_load)
    print('saving here ' + retfile)
    return ret, paramdict, pval, trialrun, has_inverse, nbydepth, proc
Пример #3
0
def analyze_precise_retinotopy(datafiles,
                               stimfile,
                               retfile,
                               criterion=lambda x: x > 100,
                               rg=(2, -10),
                               nbefore=nbefore,
                               nafter=nafter,
                               gridsize=10):
    nbydepth = np.zeros((len(datafiles), ))
    for i, datafile in enumerate(datafiles):
        corrected = ut.loadmat(datafile, 'corrected')
        nbydepth[i] = corrected.shape[0]


#         with h5py.File(datafile,mode='r') as f:
#             nbydepth[i] = (f['corrected'][:].T.shape[0])
    trialwise, ctrialwise, strialwise, dfof, straces, dtrialwise, trialwise_t_offset = ut.gen_precise_trialwise(
        datafiles, rg=rg, nbefore=nbefore, nafter=nafter)
    zstrialwise = sst.zscore(strialwise.reshape(
        (strialwise.shape[0], -1)).T).T.reshape(strialwise.shape)

    result = sio.loadmat(stimfile, squeeze_me=True)['result'][()]

    infofile = sio.loadmat(datafiles[0][:-12] + '.mat', squeeze_me=True)
    #retfile = sio.loadmat(retfile,squeeze_me=True)

    locinds = result['locinds']  #retfile['locinds']

    has_inverse = False
    try:
        #        inverted = result['inverted'][()]
        inverted = np.tile(result['inverted'], (result['repetitions'], ))
        has_inverse = True
    except:
        has_inverse = False

    frame = infofile['info'][()]['frame'][()]
    frame = np.unique(
        frame[rg[0]:frame.size + rg[1]]
    )  # this format for all the retinotopic mapping through 12/12

    data = strialwise[:, :, nbefore:strialwise.shape[-1] - nafter]

    Ny = locinds[:, 0].max()
    Nx = locinds[:, 1].max()

    try:
        try:
            dxdt = sio.loadmat(datafiles[1], squeeze_me=True)['dxdt']
        except:
            with h5py.File(datafiles[1], mode='r') as f:
                dxdt = f['dxdt'][:].T
    except:
        print('no running data saved; assuming all running')
        dxdt = 101 * np.ones((frame.max(), ))

    trialrun = np.zeros(frame[0::2].shape)
    for i in range(len(trialrun)):
        trialrun[i] = dxdt[frame[0::2][i]:frame[1::2][i]].mean()
    runtrial = criterion(np.abs(trialrun))

    if has_inverse:
        ret = np.zeros((data.shape[0], Ny, Nx, 2))
        for j in range(Ny):
            for k in range(Nx):
                lkat = np.logical_and(
                    np.logical_and(
                        np.logical_and(locinds[:, 0] == j + 1,
                                       locinds[:, 1] == k + 1), runtrial),
                    np.nanmax(np.nanmax(data, 0), -1))
                lkat_reg = np.logical_and(lkat, np.logical_not(inverted))
                lkat_inv = np.logical_and(lkat, inverted)
                n_reg = lkat_reg.sum()
                n_inv = lkat_inv.sum()
                print((n_reg, n_inv))
                for idx in np.where(lkat_reg)[0]:
                    ret[:, j, k,
                        0] = ret[:, j, k, 0] + data[:, idx].mean(1) / n_reg
                for idx in np.where(lkat_inv)[0]:
                    ret[:, j, k,
                        1] = ret[:, j, k, 1] + data[:, idx].mean(1) / n_inv
                assert (~np.isnan(np.nanmax(ret[:, j, k])))
    else:
        ret = np.zeros((data.shape[0], Ny, Nx))
        for j in range(Ny):
            for k in range(Nx):
                lkat_reg = np.logical_and(
                    np.logical_and(locinds[:, 0] == j + 1,
                                   locinds[:, 1] == k + 1), runtrial)
                n_reg = lkat_reg.sum()
                print((n_reg, ))  #n_inv))
                for idx in np.where(lkat_reg)[0]:
                    ret[:, j, k] = ret[:, j, k] + data[:, idx].mean(1) / n_reg
                assert (~np.isnan(np.nanmax(ret[:, j, k])))

    if 'range' in result.dtype.names:
        gridsize = 5
        ctr = np.array(
            (result['range'][0:2].mean(), -result['range'][2:].mean())
        )  # ctr: x center of range, y center of range # fixed 18/10/30; for expts. after 18/10/30, this will have to be switched!
    else:
        gridsize = 10
        ctr = np.array((0, 0))

    # flipping for expts. after 18/10/30
    toflip = int(datafiles[0].split('/')[-4]) > 181030
    if toflip:
        ctr = ctr * np.array((1, -1))

    xrg = np.arange(-(Nx - 1) * gridsize / 2, (Nx + 1) * gridsize / 2,
                    gridsize)
    yrg = np.arange(-(Ny - 1) * gridsize / 2, (Ny + 1) * gridsize / 2,
                    gridsize)

    # inverting for expts. before 18/12/09
    notquitefixed = int(datafiles[0].split('/')[-4]) < 181209
    if toflip and notquitefixed:
        yrg = -yrg

    if has_inverse:
        paramdict = [
            ut.fit_2d_gaussian((xrg, yrg), ret[:, :, :, 0]),
            ut.fit_2d_gaussian((xrg, yrg), ret[:, :, :, 1])
        ]
        paramdict[0]['gridsize'] = gridsize
        paramdict[1]['gridsize'] = gridsize
        paramdict[0]['ctr'] = ctr
        paramdict[1]['ctr'] = ctr
    else:
        paramdict = ut.fit_2d_gaussian(
            (xrg, yrg), ret)  #,ut.fit_2d_gaussian((xrg,yrg),ret[:,:,:,1])]
        paramdict['gridsize'] = gridsize
        paramdict['ctr'] = ctr

    pval_ret = np.zeros(strialwise.shape[0])
    for i in range(strialwise.shape[0]):
        _, pval_ret[i] = sst.ttest_rel(strialwise[i, :, nbefore - 1],
                                       strialwise[i, :, nbefore + 1])

    return ret, paramdict, pval_ret, trialrun, has_inverse, locinds
Пример #4
0
def analyze_figure_ground(datafiles,
                          stimfile,
                          retfile=None,
                          frame_adjust=None,
                          rg=None,
                          nbefore=4,
                          nafter=4):
    nbydepth = get_nbydepth(datafiles)
    #trialwise,ctrialwise,strialwise,dfof,straces = ut.gen_precise_trialwise(datafiles,frame_adjust=frame_adjust)
    trialwise, ctrialwise, strialwise, dfof, straces, dtrialwise, proc1 = ut.gen_precise_trialwise(
        datafiles,
        rg=rg,
        frame_adjust=frame_adjust,
        nbefore=nbefore,
        nafter=nafter)
    trialwise_t_offset = proc1['trialwise_t_offset']
    raw_trialwise = proc1['raw_trialwise']
    neuropil_trialwise = proc1['neuropil_trialwise']
    print(strialwise.shape)
    zstrialwise = sst.zscore(strialwise.reshape(
        (strialwise.shape[0], -1)).T).T.reshape(strialwise.shape)

    result = sio.loadmat(stimfile, squeeze_me=True)['result'][()]

    infofile = sio.loadmat(datafiles[0][:-12] + '.mat', squeeze_me=True)
    frame = infofile['info'][()]['frame'][()]
    if frame_adjust:
        print('adjusted')
        frame = frame_adjust(frame)
    if np.remainder(frame.shape[0], 2):
        print('deleted one')
        frame = frame[:-1]

    data = strialwise  #[:,:,nbefore:-nafter]

    try:
        dxdt = sio.loadmat(datafiles[1], squeeze_me=True)['dxdt']
    except:
        with h5py.File(datafiles[1], mode='r') as f:
            dxdt = f['dxdt'][:].T

    trialrun = np.zeros(frame[0::2].shape)
    for i in range(len(trialrun)):
        trialrun[i] = dxdt[frame[0::2][i]:frame[1::2][i]].mean()
    runtrial = trialrun > 100

    pval = np.zeros(strialwise.shape[0])
    for i in range(strialwise.shape[0]):
        _, pval[i] = sst.ttest_rel(strialwise[i, :, nbefore - 1],
                                   strialwise[i, :, nbefore + 1])

    stimparams = result['stimParams']

    order = ['ctrl', 'fig', 'grnd', 'iso', 'cross']
    norder = len(order)
    ori = stimparams[0]
    sz = stimparams[1]
    figContrast = stimparams[-2]
    grndContrast = stimparams[-1]

    paramdict = {}
    paramdict['ctrl'] = np.logical_and(figContrast == 0, grndContrast == 0)
    paramdict['fig'] = np.logical_and(figContrast == 1, grndContrast == 0)
    paramdict['grnd'] = np.logical_and(
        np.logical_and(figContrast == 0, grndContrast == 1), sz > 0)
    paramdict['iso'] = sz == 0
    paramdict['cross'] = np.logical_and(figContrast == 1, grndContrast == 1)

    indexlut, stimp = np.unique(stimparams, axis=1, return_inverse=True)

    angle = stimparams[0]
    size = stimparams[1]
    contrast = stimparams[4]

    ucontrast = np.unique(contrast)
    uangle = np.unique(angle)
    usize = np.unique(size)
    ncontrast = len(ucontrast)
    nangle = len(uangle)
    nsize = len(usize)

    angle180 = np.remainder(angle, 180)
    uangle180 = np.unique(angle180)
    nangle180 = len(uangle180)

    Smean = np.zeros(
        (strialwise.shape[0], norder, nangle180, strialwise.shape[2]))
    Stavg = np.zeros((strialwise.shape[0], norder, nangle180,
                      int(strialwise.shape[1] / nangle / norder)))

    Strials = {}
    Sspont = {}
    #print(runtrial.shape)
    #for i,name in enumerate(order):
    #    for j,theta in enumerate(uangle180):
    #        lkat = np.logical_and(runtrial,np.logical_and(angle180==theta,paramdict[name]))
    #        if lkat.sum()==1:
    #            print('problem')
    #        Smean[:,i,j,:] = data[:,lkat,:].mean(1)
    #        Strials[(i,j)] = data[:,lkat,nbefore:-nafter].mean(2)
    #        Sspont[(i,j)] = data[:,lkat,:nbefore].mean(2)

    lb = np.zeros((strialwise.shape[0], norder, nangle180))
    ub = np.zeros((strialwise.shape[0], norder, nangle180))

    #for i in range(norder):
    #    print(i)
    #    for j in range(nangle180):
    #        lb[:,i,j],ub[:,i,j] = ut.bootstrap(Strials[(i,j)],np.mean,axis=1,pct=(16,84))
    # mn[:,i,j,k] = np.nanmean(Strials[(i,j,k)],axis=1)

    pval_fig = np.zeros((strialwise.shape[0], nangle180))
    #for j,theta in enumerate(uangle180):
    #    print(theta)
    #    figind = int(np.where(np.array([x=='fig' for x in order]))[0])
    #    _,pval_fig[:,j] = sst.ttest_rel(Sspont[(figind,j)],Strials[(figind,j)],axis=1)
    #
    pval_grnd = np.zeros((strialwise.shape[0], nangle180))
    #for j,theta in enumerate(uangle180):
    #    print(theta)
    #    grndind = int(np.where(np.array([x=='grnd' for x in order]))[0])
    #    _,pval_grnd[:,j] = sst.ttest_rel(Sspont[(grndind,j)],Strials[(grndind,j)],axis=1)

    Savg = np.nanmean(np.nanmean(Smean[:, :, :, nbefore:-nafter], axis=-1),
                      axis=2)

    Storiavg = Stavg.mean(1)
    # _,pval = sst.ttest_ind(Storiavg[:,0,-1].T,Storiavg[:,0,0].T)

    #suppressed = np.logical_and(pval<0.05,Savg[:,0,-1]<Savg[:,0,0])
    #facilitated = np.logical_and(pval<0.05,Savg[:,0,-1]>Savg[:,0,0])
    proc = {}
    proc['Smean'] = Smean
    proc['lb'] = lb
    proc['ub'] = ub
    proc['pval_fig'] = pval_fig
    proc['pval_grnd'] = pval_grnd
    proc['trialrun'] = trialrun
    proc['strialwise'] = strialwise
    proc['dtrialwise'] = dtrialwise
    proc['trialwise'] = trialwise
    proc['dfof'] = dfof
    proc['trialwise_t_offset'] = trialwise_t_offset
    proc['raw_trialwise'] = raw_trialwise
    proc['neuropil_trialwise'] = neuropil_trialwise
    proc['order'] = order
    proc['angle'] = angle
    proc['paramdict'] = paramdict
    proc['Sspont'] = Sspont

    #return Savg,Smean,lb,ub,pval_fig,pval_grnd,trialrun
    return Savg, proc
Пример #5
0
def analyze(datafiles,stimfile,frame_adjust=None,rg=(1,0),nbefore=nbefore,nafter=nafter,stim_params=None):
    # stim_params: list (or similar) of tuples, where first element is a string corresponding to a field of the
    # output hdf5 file proc, and second element is a function taking result as an input, to yield the correct data

    # find number of ROIs in each plane
    nbydepth = get_nbydepth(datafiles)

    nplanes = len(nbydepth)

    # get trialized fluorescence data
    trialwise,ctrialwise,strialwise,dfof,straces,dtrialwise,proc1 = ut.gen_precise_trialwise(datafiles,rg=rg,frame_adjust=frame_adjust,nbefore=nbefore,nafter=nafter,blcutoff=blcutoff) # , trialwise_t_offset

    # load stimulus data
    #result = sio.loadmat(stimfile,squeeze_me=True)['result'][()]
    result = ut.loadmat(stimfile,'result')[()]
    #result = result[()]
    
    # correct stim trigger frames if necessary
    #infofile = sio.loadmat(datafiles[0][:-12]+'.mat',squeeze_me=True) # original .mat file
    info = ut.loadmat(datafiles[0][:-12]+'.mat','info')[()] # original .mat file
    #frame = infofile['info'][()]['frame'][()]
    frame = info['frame'][()].astype(np.int64)
    if not rg is None:
        frame = frame[rg[0]:frame.size+rg[1]]
    else:
        event_id = info['event_id'][()].astype(np.int64)
        frame = frame[event_id==1]
    if frame_adjust:
        frame = frame_adjust(frame)

    while np.min(np.diff(frame)) < 0:
        brk = np.argmin(np.diff(frame))+1
        frame[brk:] = frame[brk:] + 65536
         
    # load running and pupil data
    dxdt = ut.loadmat(datafiles[0],'dxdt').flatten()
    try:
        # first entry of pupil_ctr is x, second entry is y
        pupil_ctr,pupil_area,pupil_frac_ctr,pupil_frac_area = ut.loadmat(datafiles[0],['pupil_ctr','pupil_area','pupil_frac_ctr','pupil_frac_area'])
        pupil_area = pupil_area.flatten()
        pupil_frac_area = pupil_frac_area.flatten()
    except:
        print('no eye tracking data for ' + stimfile)
        pupil_ctr = None
        pupil_frac_ctr = None
        pupil_area = None
        pupil_frac_area = None

    nplanes = len(datafiles)

    msk,ctr = load_msk_ctr(datafiles[0])
    cell_mask = np.zeros((0,)+msk.shape[1:],dtype='bool')
    cell_center = np.zeros((0,2))
    cell_depth = np.zeros((0,))
    for iplane in range(nplanes):
        msk,ctr = load_msk_ctr(datafiles[iplane])
        cell_mask = np.concatenate((cell_mask,msk),axis=0)
        cell_center = np.concatenate((cell_center,ctr),axis=0)
        cell_depth = np.concatenate((cell_depth,iplane*np.ones((msk.shape[0],))))

#    try:
#        try:
#        #if True:
#            #mean_image_red,mean_image_green = ut.loadmat(datafiles[0],['red_mean','green_mean'])
#            #mean_red_channel = np.zeros((len(datafiles),)+mean_image_red.shape)
#            #mean_green_channel = np.zeros((len(datafiles),)+mean_image_green.shape)
#            mean_red_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
#            mean_green_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
#            for iplane in range(nplanes):
#                mean_image_red,mean_image_green = ut.loadmat(datafiles[iplane],['red_mean','green_mean'])
#                mean_red_channel[iplane] = mean_image_red
#                mean_green_channel[iplane] = mean_image_green
#        except:
#            mean_red_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
#            mean_red_channel_corrected = np.zeros((nplanes,)+cell_mask.shape[1:])
#            mean_green_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
#            mean_green_channel_enhanced = np.zeros((nplanes,)+cell_mask.shape[1:])
#            for iplane in range(nplanes):
#                mean_image_green,mean_image_green_enhanced,mean_image_red,mean_image_red_corrected = ut.loadmat(datafiles[iplane],['meanImg','meanImgE','meanImg_chan2','meanImg_chan2_corrected'])
#                #mean_image_green,mean_image_red = ut.loadmat(datafiles[iplane],['meanImg','meanImg_chan2_corrected'])
#                mean_red_channel[iplane] = mean_image_red
#                mean_red_channel_corrected[iplane] = mean_image_red_corrected
#                mean_green_channel[iplane] = mean_image_green
#                mean_green_channel_enhanced[iplane] = mean_image_green_enhanced
#    except:
#        print('no mean image data for ' + stimfile)
#        mean_red_channel = None
#        mean_red_channel_corrected = None
#        mean_green_channel = None
#        mean_green_channel_enhanced = None

    #varnames1 = ['green_mean','red_mean']
    #varnames2= ['meanImg','meanImgE','meanImg_chan2','meanImg_chan2_corrected']
    #outputs1 = ut.loadmat(datafiles[0],varnames1)
    #outputs2 = ut.loadmat(datafiles[0],varnames2)
    #use_first,use_second = [not outputs[0] is None for outputs in [outputs1,outputs2]]

    #if use_first:
    #    mean_red_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
    #    mean_green_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
    #    for iplane in range(nplanes):
    #        mean_image_green,mean_image_red = ut.loadmat(datafiles[iplane],varnames1)
    #        mean_green_channel[iplane] = mean_image_green
    #        mean_red_channel[iplane] = mean_image_red
    #    mean_red_channel_corrected = None
    #    mean_green_channel_enhanced = None
    #elif use_second:
    #    mean_green_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
    #    mean_green_channel_enhanced = np.zeros((nplanes,)+cell_mask.shape[1:])
    #    mean_red_channel = np.zeros((nplanes,)+cell_mask.shape[1:])
    #    mean_red_channel_corrected = np.zeros((nplanes,)+cell_mask.shape[1:])
    #    for iplane in range(nplanes):
    #        mean_image_green,mean_image_green_enhanced,mean_image_red,mean_image_red_corrected = ut.loadmat(datafiles[iplane],['meanImg','meanImgE','meanImg_chan2','meanImg_chan2_corrected'])
    #        #mean_image_green,mean_image_red = ut.loadmat(datafiles[iplane],['meanImg','meanImg_chan2_corrected'])
    #        mean_red_channel[iplane] = mean_image_red
    #        mean_red_channel_corrected[iplane] = mean_image_red_corrected
    #        mean_green_channel[iplane] = mean_image_green
    #        mean_green_channel_enhanced[iplane] = mean_image_green_enhanced
    #else:
    #    print('no mean image data for ' + stimfile)
    #    mean_red_channel = None
    #    mean_red_channel_corrected = None
    #    mean_green_channel = None
    #    mean_green_channel_enhanced = None
    # trialize running and pupil data
    #try:
    roi_proc = load_roi_info(datafiles)
    #except:
    #    roi_proc = None
    frame_div = np.floor(2*frame/nplanes).astype(np.int64)
    trialrun = ut.trialize(dxdt.T,frame,nbefore=nbefore,nafter=nafter)
    trialctr = ut.trialize(pupil_ctr,frame_div,nbefore=nbefore,nafter=nafter)
    trialfracctr = ut.trialize(pupil_frac_ctr,frame_div,nbefore=nbefore,nafter=nafter)
    trialarea = ut.trialize(pupil_area,frame_div,nbefore=nbefore,nafter=nafter)
    trialfracarea = ut.trialize(pupil_frac_area,frame_div,nbefore=nbefore,nafter=nafter)

    proc = {}
    proc['trialrun'] = trialrun
    proc['trialctr'] = trialctr
    proc['trialarea'] = trialarea
    proc['trialfracctr'] = trialfracctr
    proc['trialfracarea'] = trialfracarea
    proc['trialwise'] = trialwise
    proc['strialwise'] = strialwise
    proc['nbydepth'] = nbydepth
    proc['dtrialwise'] = dtrialwise
    proc['dfof'] = dfof
    proc['trialwise_t_offset'] = proc1['trialwise_t_offset']
    proc['raw_trialwise'] = proc1['raw_trialwise']
    proc['neuropil_trialwise'] = proc1['neuropil_trialwise']
    if roi_proc:
        for key in roi_proc:
            proc[key] = roi_proc[key]
    else:
        print('could not compute roi info')
    #proc['mean_red_channel'] = mean_red_channel
    #proc['mean_red_channel_corrected'] = mean_red_channel_corrected
    #proc['mean_green_channel'] = mean_green_channel
    #proc['mean_green_channel_enhanced'] = mean_green_channel_enhanced
    #proc['cell_depth'] = cell_depth
    #proc['cell_center'] = cell_center
    #proc['cell_mask'] = cell_mask
    proc['nbefore'] = nbefore
    proc['nafter'] = nafter
              
    # define extra parameters based on 'result' variable
    for param in stim_params:
        name,function = param
        proc[name] = function(result)
    
    return proc
Пример #6
0
def analyze_size_contrast(datafiles,
                          stimfile,
                          retfile=None,
                          frame_adjust=None,
                          rg=(1, 0),
                          nbefore=nbefore,
                          nafter=nafter,
                          criterion=None,
                          criterion_cutoff=None):
    if criterion is None:
        criterion = lambda x: np.abs(x) > 100
    nbydepth = get_nbydepth(datafiles)
    #     trialwise,ctrialwise,strialwise = gen_trialwise(datafiles,frame_adjust=frame_adjust)
    trialwise, ctrialwise, strialwise, dfof, straces, dtrialwise, trialwise_t_offset = ut.gen_precise_trialwise(
        datafiles,
        rg=rg,
        frame_adjust=frame_adjust,
        nbefore=nbefore,
        nafter=nafter)
    zstrialwise = sst.zscore(strialwise.reshape(
        (strialwise.shape[0], -1)).T).T.reshape(strialwise.shape)

    result = sio.loadmat(stimfile, squeeze_me=True)['result'][()]

    infofile = sio.loadmat(datafiles[0][:-12] + '.mat',
                           squeeze_me=True)  # original .mat file
    frame = infofile['info'][()]['frame'][()]
    frame = frame[rg[0]:frame.size + rg[1]]
    if frame_adjust:
        frame = frame_adjust(frame)

    data = strialwise  #[:,:,nbefore:-nafter]

    try:
        dxdt = sio.loadmat(datafiles[0], squeeze_me=True)['dxdt']
    except:
        with h5py.File(datafiles[0], mode='r') as f:
            dxdt = f['dxdt'][:].T

    trialrun = np.zeros(frame[0::2].shape)
    for i in range(len(trialrun)):
        trialrun[i] = np.abs(dxdt[frame[0::2][i]:frame[1::2][i]]).mean()
    runtrial = criterion(trialrun)
    print(runtrial.sum() / runtrial.size)
    if criterion_cutoff:
        if runtrial.sum() / runtrial.size < criterion_cutoff:
            #return Savg,Smean,lb,ub,pval,spont,Smean_stat,proc
            return [np.array(())] * 8

    #pval = np.zeros(strialwise.shape[0])
    #for i in range(strialwise.shape[0]):
    #    _,pval[i] = sst.ttest_rel(data[i,:,nbefore-1],data[i,:,nbefore+1])

    stimparams = result['stimParams']
    gratingInfo = result['gratingInfo']

    indexlut, stimp = np.unique(stimparams, axis=1, return_inverse=True)

    #angle = stimparams[0]
    #size = stimparams[1]
    #contrast = stimparams[4]
    #light = stimparams[5]

    angle = gratingInfo['Orientation'][()]
    size = gratingInfo['Size'][()]
    contrast = gratingInfo['Contrast'][()]
    light = gratingInfo['lightsOn'][()]

    ucontrast = np.unique(contrast)
    uangle = np.unique(angle[~np.isnan(angle)])
    usize = np.unique(size)
    ulight = np.unique(light)
    ncontrast = len(ucontrast)
    nangle = len(uangle)
    nsize = len(usize)
    nlight = len(ulight)
    print(nlight)

    angle180 = np.remainder(angle, 180)
    uangle180 = np.unique(angle180[~np.isnan(angle180)])
    nangle180 = len(uangle180)

    Smean = np.zeros(
        (data.shape[0], nangle180, nsize, ncontrast, nlight, data.shape[2]))
    Smean_stat = np.zeros(
        (data.shape[0], nangle180, nsize, ncontrast, nlight, data.shape[2]))
    Stavg = np.zeros(
        (data.shape[0], nangle180, nsize, ncontrast, nlight,
         int(data.shape[1] / nangle / nsize / ncontrast / nlight)))

    Strials = {}
    Sspont = {}
    for i in range(nangle180):
        for j in range(nsize):
            for k in range(ncontrast):
                for ii in range(nlight):
                    lkat = np.logical_and(
                        np.logical_and(
                            runtrial,
                            np.logical_and(
                                angle180 == uangle180[i],
                                np.logical_and(size == usize[j],
                                               contrast == ucontrast[k]))),
                        light == ulight[ii])
                    Smean[:, i, j, k, ii, :] = data[:, lkat, :].mean(1)
                    Strials[(i, j, k, ii)] = data[:, lkat,
                                                  nbefore:-nafter].mean(2)
                    Sspont[(i, j, k, ii)] = data[:, lkat, :nbefore].mean(2)
                    stat = np.logical_and(
                        np.logical_and(
                            np.logical_not(runtrial),
                            np.logical_and(
                                angle180 == uangle180[i],
                                np.logical_and(size == usize[j],
                                               contrast == ucontrast[k]))),
                        light == ulight[ii])
                    Smean_stat[:, i, j, k, ii] = data[:, stat].mean(1)

    lb = np.zeros((strialwise.shape[0], nangle180, nsize, ncontrast, nlight))
    ub = np.zeros((strialwise.shape[0], nangle180, nsize, ncontrast, nlight))
    # mn = np.zeros((strialwise.shape[0],nangle180,nsize,ncontrast))

    for i in range(nangle180):
        for j in range(nsize):
            for k in range(ncontrast):
                for ii in range(nlight):
                    if Strials[(i, j, k, ii)].size:
                        lb[:, i, j, k,
                           ii], ub[:, i, j, k,
                                   ii] = ut.bootstrap(Strials[(i, j, k, ii)],
                                                      np.mean,
                                                      axis=1,
                                                      pct=(16, 84))
                    else:
                        lb[:, i, j, k, ii] = np.nan
                        ub[:, i, j, k, ii] = np.nan
                # mn[:,i,j,k] = np.nanmean(Strials[(i,j,k)],axis=1)

    pval = np.zeros((strialwise.shape[0], nangle180))
    #     for i in range(pval.shape[0]):
    #         print(i)
    for j, theta in enumerate(uangle180):
        _, pval[:, j] = sst.ttest_rel(Sspont[(j, 0, ncontrast - 1, 0)],
                                      Strials[(j, 0, ncontrast - 1, 0)],
                                      axis=1)

    Savg = np.nanmean(np.nanmean(Smean[:, :, :, :, :, nbefore:-nafter],
                                 axis=-1),
                      axis=1)

    Storiavg = Stavg.mean(1)
    # _,pval = sst.ttest_ind(Storiavg[:,0,-1].T,Storiavg[:,0,0].T)

    #suppressed = np.logical_and(pval<0.05,Savg[:,0,-1]<Savg[:,0,0])
    #facilitated = np.logical_and(pval<0.05,Savg[:,0,-1]>Savg[:,0,0])
    spont = np.zeros((Savg.shape[0], ))
    keylist = list(Sspont.keys())
    nkeys = len(keylist)
    for key in Sspont.keys():
        spont = spont + Sspont[key].mean(1) / nkeys

    proc = {}
    proc['runtrial'] = runtrial
    proc['trialrun'] = trialrun
    proc['angle'] = angle
    proc['size'] = size
    proc['contrast'] = contrast
    proc['light'] = light
    proc['trialwise'] = trialwise
    proc['strialwise'] = strialwise
    #proc['ctrialwise'] = ctrialwise
    proc['dtrialwise'] = dtrialwise
    proc['trialwise_t_offset'] = trialwise_t_offset
    proc['dfof'] = dfof
    proc['frame'] = frame

    return Savg, Smean, lb, ub, pval, spont, Smean_stat, proc