Ejemplo n.º 1
0
def evaluate_modulation_each_cell(cellDB,responseRange):
    '''
    '''

    ############# FINISH THIS (see spikesanalysis.evaluate_modulation()  ##########

    timeRange = np.array([baseRange[0], baseRange[-1]+np.diff(baseRange)])
    for indcell,onecell in enumerate(cellDB):
        print '[%d/%d] %s'%(indcell+1,nCells,onecell)
        (behavData,trialEvents,dataTT,spikeInds) = sessionanalysis.load_cell_reversal(onecell)
        (eventOfInterest,xLabelStr) = sessionanalysis.align_to_event(behavData,1)
        (trialsEachCond,condInfo) = sessionanalysis.trials_by_condition(behavData,1,outcome='correct')
        trialsOfInterest = np.hstack(trialsEachCond)
        (spikeTimesFromEventOnset,trialIndexForEachSpike,indexLimitsEachTrial) = \
            spikesanalysis.eventlocked_spiketimes(dataTT.timestamps[spikeInds],
                                                  eventOfInterest[trialsOfInterest],
                                                  timeRange)
        nCond = len(trialsEachCond)
        zStats = np.empty((len(rangeStart),nCond))
        for indCond in range(nCond):
            toUse = np.arange(condInfo['firstTrialEachCond'][indCond],
                              condInfo['lastTrialEachCond'][indCond])
            (zStats[:,indCond],pValues) = spikesanalysis.evaluate_responsiveness(spikeTimesFromEventOnset,
                                                   indexLimitsEachTrial[:,toUse],baseRange,
                                                   rangeStart)
        zStatsEachCell[:,:,indcell] = zStats
        return zStatsEachCell
Ejemplo n.º 2
0
def save_data_each_cell(cellDB,outputDir,timeRange=np.array([-0.3,0.9]),lockTo=1):
    allPostfix = {1:'SoundOn',2:'Cout',3:'SideIn',4:'Cin'} ### FIXME: HARDCODED !!!
    for indcell,onecell in enumerate(cellDB):
        #cellStr = '%s_%s_T%dc%d'%(onecell.animalName, onecell.ephysSession,
        #                          onecell.tetrode, onecell.cluster)
        cellStr = str(onecell).replace(' ','_')
        try:
            (behavData,trialEvents,dataTT,spikeInds) = load_cell_reversal(onecell)
        except IOError:
            print 'WARNING: File not found for cell %s'%cellStr
            continue
        (eventOfInterest,xLabelStr) = align_to_event(behavData,lockTo)
        # -- Ignore trialsToExclude --
        eventOfInterest[onecell.trialsToExclude] = np.nan
        ##if len(onecell.trialsToExclude)>0:
        (spikeTimesFromEventOnset,trialIndexForEachSpike,indexLimitsEachTrial) = \
            spikesanalysis.eventlocked_spiketimes(dataTT.timestamps[spikeInds],
                                                  eventOfInterest,timeRange)
        filePostfix = allPostfix[lockTo]
        if not os.path.exists(outputDir):
            os.makedirs(outputDir)
        fileName = os.path.join(outputDir,cellStr+'_'+filePostfix+'.npz')
        print fileName
        np.savez(fileName, spikeTimesFromEventOnset=spikeTimesFromEventOnset,
                 trialIndexForEachSpike=trialIndexForEachSpike,
                 indexLimitsEachTrial=indexLimitsEachTrial, xLabelStr=xLabelStr,
                 timeRange=timeRange,animalName=onecell.animalName,
                 behavSession=onecell.behavSession)
Ejemplo n.º 3
0
def save_data_each_mu(muDB,outputDir,timeRange=np.array([-0.3,0.9]),lockTo=1):
    for indmu,onemu in enumerate(muDB):
        muStr = '%s_%s_T%dmu'%(onemu.animalName, onemu.ephysSession,
                               onemu.tetrode)
        (behavData,trialEvents,dataTT,spikeInds) = load_mu_reversal(onemu)
        (eventOfInterest,xLabelStr) = align_to_event(behavData,lockTo)
        (spikeTimesFromEventOnset,trialIndexForEachSpike,indexLimitsEachTrial) = \
            spikesanalysis.eventlocked_spiketimes(dataTT.timestamps[spikeInds],
                                                  eventOfInterest,timeRange)
        fileName = os.path.join(outputDir,muStr+'_'+filePostfix+'.npz')
        print fileName
        np.savez(fileName, spikeTimesFromEventOnset=spikeTimesFromEventOnset,
                 trialIndexForEachSpike=trialIndexForEachSpike,
                 indexLimitsEachTrial=indexLimitsEachTrial,
                 timeRange=timeRange,animalName=onemu.animalName,
                 behavSession=onemu.behavSession)
Ejemplo n.º 4
0
    if CASE == 1:
        import pylab as plt
        animalName = 'saja099'
        ephysSession = '2011-04-18_20-44-49'
        behavSession = '20110418a'
        tetrode = 1
        cluster = 3
        onecell = celldatabase.CellInfo(animalName,ephysSession,behavSession,tetrode,cluster)
        print 'Loading %s ...'%(onecell)
        (behavData,trialEvents,dataTT,spikeInds) = load_cell_reversal(onecell)
        timeRange = np.array([-0.3,0.9])
        trialsOfInterest = range(300)
        (eventOfInterest,xLabelStr) = align_to_event(behavData,1)
        (spikeTimesFromEventOnset,trialIndexForEachSpike,indexLimitsEachTrial) = \
            spikesanalysis.eventlocked_spiketimes(dataTT.timestamps[spikeInds],
                                                  eventOfInterest,timeRange)
        pR, = plt.plot(1e3*spikeTimesFromEventOnset,trialIndexForEachSpike,'.k')
        pR.set_markersize(2)
        plt.draw()
        plt.show()
    elif CASE==2:
        animalName   = 'saja100'
        ephysSession = '2011-10-26_11-57-31'
        behavSession = '20111026a'
        tetrode = 1
        clusters = [2,3,4,5,6,7,9,10,11]
        oneSite = celldatabase.MultiUnitInfo(animalName = animalName,
                                             ephysSession = ephysSession,
                                             behavSession = behavSession,
                                             tetrode = tetrode,
                                             clusters = clusters)
Ejemplo n.º 5
0
def align_data(onemu):
    rasterDeltaT = 0.1e-3            # sec
    timeRange = np.array([-0.2,0.6])
    animalName = onemu.animalName
    behavSession = onemu.behavSession
    ephysSession = onemu.ephysSession
    tetrode = onemu.tetrode
    clusters = onemu.clusters
    
    # -- Load events from Neuralynx --
    dataDir = os.path.join(settings.EPHYS_PATH,'%s/%s/'%(animalName,ephysSession))
    clustersDir = os.path.join(settings.EPHYS_PATH,'%s/%s_kk/'%(animalName,ephysSession))
    eventsFile = os.path.join(dataDir,'Events.nev')
    events = loadneuralynx.DataEvents(eventsFile)
    trialEvents = (events.valueTTL & (1<<bitTRIALIND)) != 0
    trialStartTimeNL = 1e-6*events.timestamps[trialEvents]
    targetEvents = (events.valueTTL & (1<<bitTARGETIND)) != 0
    targetTimeNL = 1e-6*events.timestamps[targetEvents]
    #targetFreqInd = events.valueTTL[trialEvents]>>8  # 

    # -- Load events from behavior --
    behavDataDir = os.path.join(settings.BEHAVIOR_PATH,'%s/'%(animalName))
    behavFileName = 'data_saja_tuningcurve_santiago_%s_%s.h5'%(animalName,behavSession)
    behavFile = os.path.join(behavDataDir,behavFileName)
    behavData = loadbehavior.TuningBehaviorData(behavFile)
    behavData.extract_event_times()
    behavData.align_to_ephys(trialStartTimeNL)

    # FIXME: add line to check that number of trials of ephys & behavior are consistent
    
    # -- Check is alignment ephys/behavior is correct --
    #behavData.check_clock_drift()
    #waitforbuttonpress()

    # -- Remove first empty trial from data --
    behavData['nTrials'] = behavData['nTrials']-1
    nTrials = behavData['nTrials']
    behavData.trialStartTime = behavData.trialStartTime[1:]
    behavData.targetOnsetTime = behavData.targetOnsetTime[1:]
    behavData['SoundFreq'] = behavData['SoundFreq'][1:]
    # -- Remove incomplete trial from ephys data --
    #targetTimeNL = targetTimeNL[:nTrials]
    behavData.trialStartTimeEphys = behavData.trialStartTimeEphys[1:] # Remove first empty trial

    trialsOfInterest = np.argsort(behavData['SoundFreq'])

    eventOfInterest = behavData.targetOnsetTime[trialsOfInterest] - \
                      behavData.trialStartTime[trialsOfInterest] + \
                      behavData.trialStartTimeEphys[trialsOfInterest]
    timeVec = np.arange(timeRange[0],timeRange[-1]+rasterDeltaT,rasterDeltaT)

    freqEachTrial = behavData['SoundFreq'][trialsOfInterest]

    # -- Load spikes --
    tetrodeFile = os.path.join(dataDir,'TT%d.ntt'%tetrode)
    dataTT = loadneuralynx.DataTetrode(tetrodeFile)
    dataTT.timestamps = dataTT.timestamps.astype(np.float64)*1e-6  # in sec

    # -- Load clusters if required --
    #if (clustersEachTetrode is not None) and clustersEachTetrode.has_key(tetrode):
    
    ########## BUG: clustersEachTetrode is not defined anywhere !!! #############
    
    if len(clusters)>0:
        clustersFile = os.path.join(clustersDir,'TT%d.clu.1'%tetrode)
        dataTT.set_clusters(clustersFile)
        spikeInds = extrafuncs.ismember(dataTT.clusters,clustersEachTetrode[tetrode])
        (spikeTimesFromEventOnset,trialIndexForEachSpike,indexLimitsEachTrial) = \
          spikesanalysis.eventlocked_spiketimes(dataTT.timestamps[spikeInds],eventOfInterest,timeRange)
    else:
        (spikeTimesFromEventOnset,trialIndexForEachSpike,indexLimitsEachTrial) = \
          spikesanalysis.eventlocked_spiketimes(dataTT.timestamps,eventOfInterest,timeRange)
    
    return (spikeTimesFromEventOnset,trialIndexForEachSpike,indexLimitsEachTrial,freqEachTrial,timeRange)