Ejemplo n.º 1
0
def get_plot(mice,date,plo_typ):
    '''
    Args:
        mice: A list to store the mice
        date: A list to store the dates
        plo_typ: A list to store the plot types
    Returns:
        all_file_name: A list to store all the file names that referenced in this excuction
    '''
    subject_list = mice
    session_list = date
	
    all_file_name = []
    for subject in subject_list:
        for session in session_list:
            behavData = None
            try:
                behavFile = loadbehavior.path_to_behavior_data(subject,EXPERIMENTER,paradigm,session)
                behavData = loadbehavior.FlexCategBehaviorData(behavFile,readmode='full')
            except:
                for plot_type in plo_typ:
                    out_dict = form_out_put(sub=subject,typ='summary',data=None,sess=session)
                    all_file_name.append(out_dict['filename'])
                continue
            for plot_type in plo_typ:
                out_dict = form_out_put(sub=subject,typ=plot_type,data=behavData,sess=session)
                all_file_name.append(out_dict['filename'])
                if not check_exist(fil_nam=out_dict['filename']):
                    #non_exsi_file.append(out_dict['filename'])
                    #test_list=[]
                    #test_list.append(out_dict)
                    pg.generate(plotInfo=out_dict)
    return all_file_name
Ejemplo n.º 2
0
def get_plot(mice, date, plo_typ):

    EXPERIMENTER = settings.DEFAULT_EXPERIMENTER
    paradigm = '2afc'

    subject_list = mice
    session_list = date

    all_file_name = []
    for subject in subject_list:
        for session in session_list:
            behavFile = loadbehavior.path_to_behavior_data(
                subject, EXPERIMENTER, paradigm, session)
            behavData = loadbehavior.FlexCategBehaviorData(behavFile,
                                                           readmode='full')
            for plot_type in plo_typ:
                out_dict = form_out_put(sub=subject,
                                        typ=plot_type,
                                        data=behavData,
                                        sess=session)
                all_file_name.append(out_dict['filename'])
                if not check_exsit(fil_nam=out_dict['filename']):
                    #non_exsi_file.append(out_dict['filename'])
                    test_list = []
                    test_list.append(out_dict)
                    pg.Generate(plotList=test_list)
                    #test_plot(out_dic=out_dict)
                    #print

    return all_file_name
def load_behavior_flexcat(animal, behavSession):
    '''Load behavior using the FlexCategBehaviorData class of loadbehavior module.
    :param arg1: String containing animal name.
    :param arg2: A string of the name of the behavior session, this is the full filename in '{animal}_{paradigm}_{date}{behavsuffix}.h5' format. 
    :return: bData object (as defined in loadbehavior).
    '''
    behavFullPath = os.path.join(BEHAVIOR_PATH, animal, behavSession) 
    bData = loadbehavior.FlexCategBehaviorData(behavFullPath)
    return bData
Ejemplo n.º 4
0
def load_remote_2afc_behav(oneCell,
                           behavDir=BEHAVDIR_MOUNTED,
                           ephysDir=EPHYSDIR_MOUNTED):
    '''
    Given a CellInfo object and remote behavior and ephys directories, this function loads the associated 2afc behav data from the mounted jarastore drive. Returns bData object.
    '''
    ### Get behavior data associated with 2afc session ###
    behavFileName = '{0}_{1}_{2}.h5'.format(oneCell.animalName, '2afc',
                                            oneCell.behavSession)
    behavFile = os.path.join(behavDir, oneCell.animalName, behavFileName)
    bData = loadbehavior.FlexCategBehaviorData(behavFile, readmode='full')
    return bData
Ejemplo n.º 5
0
def Test():
    #Test function: Used to test code from plotgenerator
    # NOTE: this text function will be a mini version of what the backend will be doing each time
    #code from "test032_example_read_bdata.py" by Santiago Jaramillo
    EXPERIMENTER = settings.DEFAULT_EXPERIMENTER
    paradigm = '2afc'
    subject = 'adap021'
    session = '20160310a'  # This is the date formatted as YYYYMMDD and one more character (usually 'a')

    # -- Find the data filename and load the data into a data object (similar to a Python dict) --
    behavFile = loadbehavior.path_to_behavior_data(subject, EXPERIMENTER,
                                                   paradigm, session)
    behavData = loadbehavior.FlexCategBehaviorData(behavFile, readmode='full')
    #end Santiago's code
    #make each dictionary (for test, 1 per graph type) and append them to the list
    graphList = []
    '''
    #Test 1: Simple one of each graph test
    
    graphDict1 = {'type' : "psychometric", 'filename' : "adap021_20160310_psychometric.svg", 'data' : behavData}
    graphList.append(graphDict1)
    graphDict2 = {'type' : "summary", 'filename' : "adap021_20160310_summary.svg", 'data' : behavData}
    graphList.append(graphDict2)
    graphDict3 = {'type' : "dynamics", 'filename' : "adap021_20160310_dynamics.svg", 'data' : behavData}
    graphList.append(graphDict3)
    '''
    #'''
    #Test2: stress test. 20 summary, 20 dynamics
    graphDictPsy = {
        'type': "psychometric",
        'filename': "adap021_20160310_psychometric.svg",
        'data': behavData
    }
    graphDictDyn = {
        'type': "dynamics",
        'filename': "adap021_20160310_dynamics.svg",
        'data': behavData
    }
    for i in range(20):
        graphList.append(graphDictPsy)
        graphList.append(graphDictDyn)
    #'''
    #time.time returns the current system time in seconds. by taking the time before and after running the Generate function and taking the difference,
    #we can see the exact time in seconds that the module took to run.
    t0 = time.time()
    plotgenerator.Generate(graphList)
    t1 = time.time()
    print(t1 - t0)
Ejemplo n.º 6
0
def load_remote_2afc_data(oneCell,
                          behavDir=BEHAVDIR_MOUNTED,
                          ephysDir=EPHYSDIR_MOUNTED):
    '''
    Given a CellInfo object and remote behavior and ephys directories, this function loads the associated 2afc ephys and 2afc behav data from the mounted jarastore drive. Returns eventOnsetTimes, spikeTimestamps, and bData objects.
    '''

    ### Get behavior data associated with 2afc session ###
    behavFileName = '{0}_{1}_{2}.h5'.format(oneCell.animalName, '2afc',
                                            oneCell.behavSession)
    behavFile = os.path.join(behavDir, oneCell.animalName, behavFileName)
    bData = loadbehavior.FlexCategBehaviorData(behavFile, readmode='full')

    ### Get events data ###
    fullEventFilename = os.path.join(ephysDir, oneCell.animalName,
                                     oneCell.ephysSession,
                                     'all_channels.events')
    eventData = loadopenephys.Events(fullEventFilename)

    ### Get event onset times ###
    eventData.timestamps = np.array(
        eventData.timestamps
    ) / EPHYS_SAMPLING_RATE  #hard-coded ephys sampling rate!!
    #evID=np.array(eventData.eventID)
    #eventOnsetTimes=eventTimestamps[(evID==1)]

    ### GEt spike data of just this cluster ###
    spikeFilename = os.path.join(ephysDir, oneCell.animalName,
                                 oneCell.ephysSession,
                                 'Tetrode{}.spikes'.format(oneCell.tetrode))
    spikeData = loadopenephys.DataSpikes(spikeFilename)
    spikeData.timestamps = spikeData.timestamps / EPHYS_SAMPLING_RATE
    clustersDir = os.path.join(ephysDir, oneCell.animalName,
                               oneCell.ephysSession) + '_kk'
    clusterFilename = os.path.join(clustersDir,
                                   'Tetrode{}.clu.1'.format(oneCell.tetrode))
    clusters = np.fromfile(clusterFilename, dtype='int32', sep=' ')[1:]
    spikeData.timestamps = spikeData.timestamps[clusters == oneCell.cluster]
    spikeData.samples = spikeData.samples[clusters == oneCell.cluster, :, :]
    spikeData.samples = spikeData.samples.astype(
        float) - 2**15  # FIXME: this is specific to OpenEphys
    # FIXME: This assumes the gain is the same for all channels and records
    spikeData.samples = (1000.0 / spikeData.gain[0, 0]) * spikeData.samples
    #spikeData = ephyscore.CellData(oneCell) #This defaults to settings ephys path

    return (eventData, spikeData, bData)
 #celldbPath = os.path.join(settings.DATABASE_PATH,'reward_change_{}.h5'.format(label))
 celldbPath = os.path.join(settings.DATABASE_PATH,
                           '{}_database.h5'.format(animal))
 celldb = pd.read_hdf(celldbPath, key='reward_change')
 if 'level_0' in list(celldb):
     celldb.drop('level_0', inplace=True, axis=1)
 else:
     celldb = celldb.reset_index()
     celldb.drop('level_0', inplace=True, axis=1)
 allRightwardBias = []
 for date in np.unique(celldb.date):
     cellsThisSession = celldb.query('date=="{}"'.format(date))
     rcInd = cellsThisSession['sessiontype'].iloc[0].index('behavior')
     rcBehavior = cellsThisSession['behavior'].iloc[0][rcInd]
     behavFile = os.path.join(BEHAVIOR_PATH, animal, rcBehavior)
     bdata = loadbehavior.FlexCategBehaviorData(behavFile, readmode='full')
     possibleFreqs = np.unique(bdata['targetFrequency'])
     currentBlock = bdata['currentBlock']
     blockTypes = [
         bdata.labels['currentBlock']['same_reward'],
         bdata.labels['currentBlock']['more_left'],
         bdata.labels['currentBlock']['more_right']
     ]
     trialsEachType = behavioranalysis.find_trials_each_type(
         currentBlock, blockTypes)
     rightwardBiasByReward = []  #np.zeros(len(possibleFreqs))
     for indf, freq in enumerate(possibleFreqs):
         oneFreqTrials = (bdata['targetFrequency']
                          == freq) & bdata['valid'].astype('bool') & (
                              bdata['choice'] !=
                              bdata.labels['choice']['none'])
    #cellID = allcells.cellDB.findcell('test017','20150315a',2,7) #
    cellID = allcells.cellDB.findcell('test017', '20150301a', 2, 3)  #
elif CASE == 2:
    pass

oneCell = allcells.cellDB[cellID]

subject = oneCell.animalName
behavSession = oneCell.behavSession
ephysSession = oneCell.ephysSession
ephysRoot = os.path.join(ephysRootDir, subject)

# -- Load Behavior Data --
behaviorFilename = loadbehavior.path_to_behavior_data(subject, experimenter,
                                                      paradigm, behavSession)
bdata = loadbehavior.FlexCategBehaviorData(behaviorFilename)
bdata.find_trials_each_block()

# -- Load event data and convert event timestamps to ms --
ephysDir = os.path.join(ephysRoot, ephysSession)
eventFilename = os.path.join(ephysDir, 'all_channels.events')
events = loadopenephys.Events(eventFilename)  # Load events data
eventTimes = np.array(events.timestamps) / SAMPLING_RATE

soundOnsetEvents = (events.eventID == 1) & (events.eventChannel
                                            == soundTriggerChannel)

# -- Load Spike Data From Certain Cluster --
spkData = ephyscore.CellData(oneCell)
spkTimeStamps = spkData.spikes.timestamps
def main():
    global behavSession
    global subject
    global tetrode
    global cluster
    global tuningBehavior  #behavior file name of tuning curve
    global tuningEphys  #ephys session name of tuning curve
    global bdata
    global eventOnsetTimes
    global spikeTimesFromEventOnset
    global indexLimitsEachTrial
    global spikeTimesFromMovementOnset
    global indexLimitsEachMovementTrial
    global titleText

    print "switch_tuning_block_allfreq_report"
    for cellID in range(0, numOfCells):
        oneCell = allcells.cellDB[cellID]
        try:
            if (behavSession != oneCell.behavSession):

                subject = oneCell.animalName
                behavSession = oneCell.behavSession
                ephysSession = oneCell.ephysSession
                tuningSession = oneCell.tuningSession
                ephysRoot = os.path.join(ephysRootDir, subject)
                tuningBehavior = oneCell.tuningBehavior
                tuningEphys = oneCell.tuningSession

                print behavSession

                # -- Load Behavior Data --
                behaviorFilename = loadbehavior.path_to_behavior_data(
                    subject=subject,
                    paradigm=paradigm,
                    sessionstr=behavSession)
                bdata = loadbehavior.FlexCategBehaviorData(behaviorFilename)
                #bdata = loadbehavior.BehaviorData(behaviorFilename)
                numberOfTrials = len(bdata['choice'])

                # -- Load event data and convert event timestamps to ms --
                ephysDir = os.path.join(ephysRoot, ephysSession)
                eventFilename = os.path.join(ephysDir, 'all_channels.events')
                events = loadopenephys.Events(
                    eventFilename)  # Load events data
                eventTimes = np.array(
                    events.timestamps
                ) / SAMPLING_RATE  #get array of timestamps for each event and convert to seconds by dividing by sampling rate (Hz). matches with eventID and

                soundOnsetEvents = (events.eventID == 1) & (
                    events.eventChannel == soundTriggerChannel)

                eventOnsetTimes = eventTimes[soundOnsetEvents]
                soundOnsetTimeBehav = bdata['timeTarget']

                # Find missing trials
                missingTrials = behavioranalysis.find_missing_trials(
                    eventOnsetTimes, soundOnsetTimeBehav)
                # Remove missing trials
                bdata.remove_trials(missingTrials)
                bdata.find_trials_each_block()

                ###############################################################################################
                centerOutTimes = bdata[
                    'timeCenterOut']  #This is the times that the mouse goes out of the center port
                soundStartTimes = bdata[
                    'timeTarget']  #This gives an array with the times in seconds from the start of the behavior paradigm of when the sound was presented for each trial
                timeDiff = centerOutTimes - soundStartTimes
                if (len(eventOnsetTimes) < len(timeDiff)):
                    timeDiff = timeDiff[:-1]
                    eventOnsetTimesCenter = eventOnsetTimes + timeDiff
                elif (len(eventOnsetTimes) > len(timeDiff)):
                    eventOnsetTimesCenter = eventOnsetTimes[:-1] + timeDiff
                else:
                    eventOnsetTimesCenter = eventOnsetTimes + timeDiff
                ###############################################################################################

            tetrode = oneCell.tetrode
            cluster = oneCell.cluster

            # -- Load Spike Data From Certain Cluster --
            spkData = ephyscore.CellData(oneCell)
            spkTimeStamps = spkData.spikes.timestamps

            (spikeTimesFromEventOnset,trialIndexForEachSpike,indexLimitsEachTrial) = \
                spikesanalysis.eventlocked_spiketimes(spkTimeStamps,eventOnsetTimes,timeRange)

            (spikeTimesFromMovementOnset,movementTrialIndexForEachSpike,indexLimitsEachMovementTrial) = \
                spikesanalysis.eventlocked_spiketimes(spkTimeStamps,eventOnsetTimesCenter,timeRange)

            plt.clf()
            if (len(spkTimeStamps) > 0):
                ax1 = plt.subplot2grid((numRows, numCols),
                                       ((numRows - sizeClusterPlot), 0),
                                       colspan=(numCols / 3))
                spikesorting.plot_isi_loghist(spkData.spikes.timestamps)
                ax3 = plt.subplot2grid(
                    (numRows, numCols),
                    ((numRows - sizeClusterPlot), (numCols / 3) * 2),
                    colspan=(numCols / 3))
                spikesorting.plot_events_in_time(spkTimeStamps)
                samples = spkData.spikes.samples.astype(float) - 2**15
                samples = (1000.0 / spkData.spikes.gain[0, 0]) * samples
                ax2 = plt.subplot2grid(
                    (numRows, numCols),
                    ((numRows - sizeClusterPlot), (numCols / 3)),
                    colspan=(numCols / 3))
                spikesorting.plot_waveforms(samples)

            ###############################################################################
            ax4 = plt.subplot2grid((numRows, numCols), (0, 0),
                                   colspan=(numCols / 2),
                                   rowspan=3 * sizeRasters)
            #plt.setp(ax4.get_xticklabels(), visible=False)
            #fig.axes.get_xaxis().set_visible(False)
            raster_tuning(ax4)
            axvline(x=0, ymin=0, ymax=1, color='r')
            plt.gca().set_xlim(tuning_timeRange)

            ax6 = plt.subplot2grid((numRows, numCols), (0, (numCols / 2)),
                                   colspan=(numCols / 2),
                                   rowspan=sizeRasters)
            plt.setp(ax6.get_xticklabels(), visible=False)
            plt.setp(ax6.get_yticklabels(), visible=False)
            raster_sound_block_switching()
            plt.title(
                'sound aligned, Top: middle freq in blocks, Bottom: all freqs')

            ax7 = plt.subplot2grid((numRows, numCols),
                                   (sizeRasters, (numCols / 2)),
                                   colspan=(numCols / 2),
                                   rowspan=sizeHists,
                                   sharex=ax6)
            hist_sound_block_switching(ax7)
            #plt.setp(ax7.get_yticklabels(), visible=False)
            ax7.yaxis.tick_right()
            ax7.yaxis.set_ticks_position('both')
            plt.setp(ax7.get_xticklabels(), visible=False)
            plt.gca().set_xlim(timeRange)

            ax10 = plt.subplot2grid((numRows, numCols),
                                    ((sizeRasters + sizeHists), (numCols / 2)),
                                    colspan=(numCols / 2),
                                    rowspan=sizeRasters)
            plt.setp(ax10.get_xticklabels(), visible=False)
            plt.setp(ax10.get_yticklabels(), visible=False)
            raster_sound_allFreq_switching()

            ax11 = plt.subplot2grid(
                (numRows, numCols),
                ((2 * sizeRasters + sizeHists), (numCols / 2)),
                colspan=(numCols / 2),
                rowspan=sizeHists,
                sharex=ax10)
            hist_sound_allFreq_switching(ax11)
            ax11.yaxis.tick_right()
            ax11.yaxis.set_ticks_position('both')
            ax11.set_xlabel('Time (sec)')
            #plt.setp(ax11.get_yticklabels(), visible=False)
            plt.gca().set_xlim(timeRange)

            ###############################################################################
            #plt.tight_layout()
            modulation_index_switching()
            plt.suptitle(titleText)

            tetrodeClusterName = 'T' + str(oneCell.tetrode) + 'c' + str(
                oneCell.cluster)
            plt.gcf().set_size_inches((8.5, 11))
            figformat = 'png'  #'png' #'pdf' #'svg'
            filename = reportname + '_%s_%s_%s.%s' % (
                subject, behavSession, tetrodeClusterName, figformat)
            fulloutputDir = outputDir + subject + '/'
            fullFileName = os.path.join(fulloutputDir, filename)

            directory = os.path.dirname(fulloutputDir)
            if not os.path.exists(directory):  #makes sure output folder exists
                os.makedirs(directory)
            #print 'saving figure to %s'%fullFileName
            plt.gcf().savefig(fullFileName, format=figformat)

        except:
            if (oneCell.behavSession not in badSessionList):
                badSessionList.append(oneCell.behavSession)

    print 'error with sessions: '
    for badSes in badSessionList:
        print badSes
Ejemplo n.º 10
0
        '20151213a',
        '20151214a',
        '20151222a',
        '20160111a',
        '20160112a',
        '20160113a',
        '20160114a',
        '20160115a',
        '20160116a',
        '20160118a',
        '20160120a',
        '20160122a',
        '20160123a',
        '20160124a',
        '20160125a',
        '20160127a',
        '20160128a',
        '20160130a',
        '20160202a',
    ]
}

# -- Here is how you load bdata for each behavior session -- #
paradigm = '2afc'
for animal in sessionsDict.keys():
    behavSessThisAnimal = sessionsDict[animal]
    for session in behavSessThisAnimal:
        behavFileName = loadbehavior.path_to_behavior_data(
            animal, paradigm, session)
        bData = loadbehavior.FlexCategBehaviorData(behavFileName)
def switch_report(mouseName, behavSession, tetrode, cluster):
    #global behavSession
    #global subject
    global bdata
    global eventOnsetTimes
    global spikeTimesFromEventOnset
    global indexLimitsEachTrial
    global spikeTimesFromMovementOnset
    global indexLimitsEachMovementTrial

    allcellsFileName = 'allcells_' + mouseName
    sys.path.append(settings.ALLCELLS_PATH)
    allcells = importlib.import_module(allcellsFileName)

    cellID = allcells.cellDB.findcell(mouseName, behavSession, tetrode,
                                      cluster)
    oneCell = allcells.cellDB[cellID]

    subject = oneCell.animalName
    behavSession = oneCell.behavSession
    ephysSession = oneCell.ephysSession
    ephysRoot = os.path.join(ephysRootDir, subject)

    # -- Load Behavior Data --
    behaviorFilename = loadbehavior.path_to_behavior_data(
        subject, experimenter, paradigm, behavSession)
    bdata = loadbehavior.FlexCategBehaviorData(behaviorFilename)
    #bdata = loadbehavior.BehaviorData(behaviorFilename)
    bdata.find_trials_each_block()
    numberOfTrials = len(bdata['choice'])

    # -- Load event data and convert event timestamps to ms --
    ephysDir = os.path.join(ephysRoot, ephysSession)
    eventFilename = os.path.join(ephysDir, 'all_channels.events')
    events = loadopenephys.Events(eventFilename)  # Load events data
    eventTimes = np.array(
        events.timestamps
    ) / SAMPLING_RATE  #get array of timestamps for each event and convert to seconds by dividing by sampling rate (Hz). matches with eventID and

    soundOnsetEvents = (events.eventID == 1) & (events.eventChannel
                                                == soundTriggerChannel)

    eventOnsetTimes = eventTimes[soundOnsetEvents]

    #################################################################################################
    centerOutTimes = bdata[
        'timeCenterOut']  #This is the times that the mouse goes out of the center port
    soundStartTimes = bdata[
        'timeTarget']  #This gives an array with the times in seconds from the start of the behavior paradigm of when the sound was presented for each trial
    timeDiff = centerOutTimes - soundStartTimes
    if (len(eventOnsetTimes) < len(timeDiff)):
        timeDiff = timeDiff[:-1]
    eventOnsetTimesCenter = eventOnsetTimes + timeDiff
    #################################################################################################

    # -- Load Spike Data From Certain Cluster --
    spkData = ephyscore.CellData(oneCell)
    spkTimeStamps = spkData.spikes.timestamps

    (spikeTimesFromEventOnset,trialIndexForEachSpike,indexLimitsEachTrial) = \
        spikesanalysis.eventlocked_spiketimes(spkTimeStamps,eventOnsetTimes,timeRange)

    (spikeTimesFromMovementOnset,movementTrialIndexForEachSpike,indexLimitsEachMovementTrial) = \
        spikesanalysis.eventlocked_spiketimes(spkTimeStamps,eventOnsetTimesCenter,timeRange)

    plt.clf()
    if (len(spkTimeStamps) > 0):
        ax1 = plt.subplot2grid((numRows, numCols),
                               ((numRows - sizeClusterPlot), 0),
                               colspan=(numCols / 3))
        spikesorting.plot_isi_loghist(spkData.spikes.timestamps)
        ax3 = plt.subplot2grid(
            (numRows, numCols),
            ((numRows - sizeClusterPlot), (numCols / 3) * 2),
            colspan=(numCols / 3))
        spikesorting.plot_events_in_time(spkTimeStamps)
        samples = spkData.spikes.samples.astype(float) - 2**15
        samples = (1000.0 / spkData.spikes.gain[0, 0]) * samples
        ax2 = plt.subplot2grid((numRows, numCols),
                               ((numRows - sizeClusterPlot), (numCols / 3)),
                               colspan=(numCols / 3))
        spikesorting.plot_waveforms(samples)

    ###############################################################################
    ax4 = plt.subplot2grid((numRows, numCols), (0, 0),
                           colspan=(numCols / 2),
                           rowspan=sizeRasters)
    raster_sound_block_switching()
    ax5 = plt.subplot2grid((numRows, numCols), (sizeRasters, 0),
                           colspan=(numCols / 2),
                           rowspan=sizeHists)
    hist_sound_block_switching()
    ax6 = plt.subplot2grid((numRows, numCols), (0, (numCols / 2)),
                           colspan=(numCols / 2),
                           rowspan=sizeRasters)
    raster_movement_block_switching()
    ax7 = plt.subplot2grid((numRows, numCols), (sizeRasters, (numCols / 2)),
                           colspan=(numCols / 2),
                           rowspan=sizeHists)
    hist_movement_block_switching()

    ax8 = plt.subplot2grid((numRows, numCols), ((sizeRasters + sizeHists), 0),
                           colspan=(numCols / 2),
                           rowspan=sizeRasters)
    raster_sound_allFreq_switching()
    ax9 = plt.subplot2grid((numRows, numCols),
                           ((2 * sizeRasters + sizeHists), 0),
                           colspan=(numCols / 2),
                           rowspan=sizeHists)
    hist_sound_allFreq_switching()
    ax10 = plt.subplot2grid((numRows, numCols),
                            ((sizeRasters + sizeHists), (numCols / 2)),
                            colspan=(numCols / 2),
                            rowspan=sizeRasters)
    raster_sound_switching()
    ax11 = plt.subplot2grid((numRows, numCols),
                            ((2 * sizeRasters + sizeHists), (numCols / 2)),
                            colspan=(numCols / 2),
                            rowspan=sizeHists)
    hist_sound_switching()
    ###############################################################################
    #plt.tight_layout()

    tetrodeClusterName = 'T' + str(oneCell.tetrode) + 'c' + str(
        oneCell.cluster)
    plt.suptitle(mouseName + ' ' + behavSession + ' ' + tetrodeClusterName)
    plt.gcf().set_size_inches((8.5, 11))
    #figformat = 'png' #'png' #'pdf' #'svg'
    #filename = 'report_%s_%s_%s.%s'%(subject,behavSession,tetrodeClusterName,figformat)
    #fulloutputDir = outputDir+subject +'/'
    #fullFileName = os.path.join(fulloutputDir,filename)

    #directory = os.path.dirname(fulloutputDir)
    #if not os.path.exists(directory): #makes sure output folder exists
    #os.makedirs(directory)
    #print 'saving figure to %s'%fullFileName
    #plt.gcf().savefig(fullFileName,format=figformat)

    plt.show()
Ejemplo n.º 12
0
def rasterBlock(oneCell):
    subject = oneCell.animalName
    behavSession = oneCell.behavSession
    ephysSession = oneCell.ephysSession
    ephysRoot = os.path.join(ephysRootDir, subject)

    # -- Load Behavior Data --
    behaviorFilename = loadbehavior.path_to_behavior_data(
        subject, experimenter, paradigm, behavSession)
    bdata = loadbehavior.FlexCategBehaviorData(behaviorFilename)
    bdata.find_trials_each_block()

    # -- Load event data and convert event timestamps to ms --
    ephysDir = os.path.join(ephysRoot, ephysSession)
    eventFilename = os.path.join(ephysDir, 'all_channels.events')
    events = loadopenephys.Events(eventFilename)  # Load events data
    eventTimes = np.array(events.timestamps) / SAMPLING_RATE

    soundOnsetEvents = (events.eventID == 1) & (events.eventChannel
                                                == soundTriggerChannel)

    # -- Load Spike Data From Certain Cluster --
    spkData = ephyscore.CellData(oneCell)
    spkTimeStamps = spkData.spikes.timestamps

    eventOnsetTimes = eventTimes[soundOnsetEvents]

    correct = bdata['outcome'] == bdata.labels['outcome']['correct']

    possibleFreq = np.unique(bdata['targetFrequency'])
    oneFreq = bdata['targetFrequency'] == possibleFreq[middleFreq]

    correctOneFreq = oneFreq & correct
    correctTrialsEachBlock = bdata.blocks[
        'trialsEachBlock'] & correctOneFreq[:, np.newaxis]

    #trialsEachCond = np.c_[invalid,leftward,rightward]; colorEachCond = ['0.75','g','r']
    #trialsEachCond = np.c_[leftward,rightward]; colorEachCond = ['0.5','0.7','0']
    trialsEachCond = correctTrialsEachBlock

    if bdata['currentBlock'][0] == bdata.labels['currentBlock'][
            'low_boundary']:
        colorEachBlock = 3 * ['g', 'r']
    else:
        colorEachBlock = 3 * ['r', 'g']


    (spikeTimesFromEventOnset,trialIndexForEachSpike,indexLimitsEachTrial) = \
        spikesanalysis.eventlocked_spiketimes(spkTimeStamps,eventOnsetTimes,timeRange)

    #plot(spikeTimesFromEventOnset,trialIndexForEachSpike,'.')

    plt.clf()
    ax1 = plt.subplot2grid((3, 1), (0, 0), rowspan=2)
    extraplots.raster_plot(spikeTimesFromEventOnset,
                           indexLimitsEachTrial,
                           timeRange,
                           trialsEachCond=correctTrialsEachBlock,
                           colorEachCond=colorEachBlock,
                           fillWidth=None,
                           labels=None)
    #plt.yticks([0,trialsEachCond.sum()])
    #ax1.set_xticklabels([])
    plt.ylabel('Trials')

    timeVec = np.arange(timeRange[0], timeRange[-1], binWidth)
    spikeCountMat = spikesanalysis.spiketimes_to_spikecounts(
        spikeTimesFromEventOnset, indexLimitsEachTrial, timeVec)

    smoothWinSize = 3
    ax2 = plt.subplot2grid((3, 1), (2, 0), sharex=ax1)

    extraplots.plot_psth(spikeCountMat / binWidth,
                         smoothWinSize,
                         timeVec,
                         trialsEachCond=correctTrialsEachBlock,
                         colorEachCond=colorEachBlock,
                         linestyle=None,
                         linewidth=3,
                         downsamplefactor=1)

    plt.xlabel('Time from sound onset (s)')
    plt.ylabel('Firing rate (spk/sec)')

    #plt.show()

    nameFreq = str(possibleFreq[middleFreq])
    tetrodeClusterName = 'T' + str(oneCell.tetrode) + 'c' + str(
        oneCell.cluster)
    plt.gcf().set_size_inches((8.5, 11))
    figformat = 'png'  #'png' #'pdf' #'svg'
    filename = 'block_%s_%s_%s_%s.%s' % (
        subject, behavSession, tetrodeClusterName, nameFreq, figformat)
    fulloutputDir = outputDir + subject + '/'
    fullFileName = os.path.join(fulloutputDir, filename)

    directory = os.path.dirname(fulloutputDir)
    if not os.path.exists(directory):
        os.makedirs(directory)
    print 'saving figure to %s' % fullFileName
    plt.gcf().savefig(fullFileName, format=figformat)
Ejemplo n.º 13
0
figure()
plot_dynamics_sound_type(bdata, 'chords', soundfreq=[5000, 16000])
title('amod004, chords')

figure()
plot_dynamics_sound_type(bdata, 'amp_mod', soundfreq=[8, 64])
title('amod004, amp_mod')




#DEBUGGING

fn = '/var/tmp/data/santiago/nick/nick_2afc_20160330a.h5' #Not psycurve
bdata = loadbehavior.FlexCategBehaviorData(fn)

soundType = 'amp_mod'
behavData=bdata

trialsSoundType = behavData['soundType']==behavData.labels['soundType'][soundType]
choice = behavData['choice']

#FIXME: I am tempted to ignore invalid trials for now...
valid = behavData['valid']
validSoundType = valid[trialsSoundType]

choiceRight = choice==bdata.labels['choice']['right']
choiceRightSoundType = choiceRight[trialsSoundType]