コード例 #1
0
ファイル: example_plots.py プロジェクト: aivai/AIMODELCOMP
import lims_utils
from allensdk.core.nwb_data_set import NwbDataSet
from allensdk.ephys.ephys_extractor import EphysSweepFeatureExtractor

from allensdk.ephys.feature_extractor import EphysFeatureExtractor

from allensdk.core.cell_types_cache import CellTypesCache  #Following jupyter notebook here
import pprint
pp = pprint.PrettyPrinter(indent=2)

ctc = CellTypesCache(manifest_file='cell_types/cell_types_manifest.json')

from allensdk.api.queries.cell_types_api import CellTypesApi
#%%

ephys_features = ctc.get_ephys_features()

data_set = ctc.get_ephys_data(
    464212183
)  # For one particular specimen (later find one that has all models, follows trend in plots)

#ct = CellTypesApi()
#cells = ct.list_cells(require_reconstruction=False)
#ct.save_ephys_data(cells[0]['476218657'], 'example.nwb')

exp_sweeps = []
exp_spike_times = []

for sweep in data_set.get_sweep_numbers():
    if data_set.get_sweep_metadata(sweep)['aibs_stimulus_name'] == 'Noise 1':
        exp_sweeps.append(sweep)
コード例 #2
0
def CreateDB(specimenList, databaseName, resetDB, manifestFile, 
             host, user, password, verbose):
    
    if verbose:
        print "CreateDB importing..."
        
    import sys
    from allensdk.ephys.extract_cell_features import extract_cell_features
    from allensdk.core.cell_types_cache import CellTypesCache
    from collections import defaultdict
    
    import mysql.connector
    
    import numpy as np
    from numpyconversion import NumpyMySQLConverter
    
    from CellSurveyTableOps import dropTable, createDonorsTable
    from CellSurveyTableOps import createSpecimensTable, createSpecimenFXsTable
    from CellSurveyTableOps import createExperimentsTable, createExperimentFXsTable
    from CellSurveyTableOps import addSpecimen, addExperiment, addDonor
    from CellSurveyTableOps import addExpFX,addSpecFX
    from ABISweepFX import getABIAnalysisPoints, ExtractSweepFeatures
    
    #### Create the database from scratch if required
    if verbose:
        print "Connecting to the database"; 
    
    try: 
        cnx = mysql.connector.connect(user=user, password=password,
                                      host=host, database=databaseName,
                                      converter_class=NumpyMySQLConverter)
        if verbose:
            print "Connection complete"
            
        cursobj = cnx.cursor()
    except:
        cnx = mysql.connector.connect(user=user, password=password, host=host,
                                      converter_class=NumpyMySQLConverter)
        if verbose:
            print cnx
        cursobj = cnx.cursor()
        mycmd = 'create database ' + databaseName
        cursobj.execute(mycmd)
        if verbose:
            print "Database created"
        mycmd = 'use ' + databaseName
        cursobj.execute(mycmd)
        if verbose:
            print "Using database " + databaseName
    
    if resetDB:
        if verbose:
            print "Dropping all tables"
            
        tablenames = ['specimenFXs', 'experimentFXs', 'experiments', 
                      'specimens', 'donors']
        for tablename in tablenames:
            result = dropTable(cnx, tablename)
            if verbose:
                if result:
                    print tablename + " table dropped"
                else:
                    print " There was a problem dropping table " + tablename
    
        # -----
        if verbose:
            print "Creating tables"
       
        result = createDonorsTable(cnx)
        if verbose:
            if result:
                print "Donors Table created"
            else:
                print "There was a problem creating the Donors Table"
    
        result = createSpecimensTable(cnx)
        if verbose:
            if result:
                print "Specimens Table created"
            else:
                print "There was a problem creating the Specimens Table"
    
        result = createExperimentsTable(cnx)
        if verbose:
            if result:
                print "Experiments Table created"
            else:
                print "There was a problem creating the Experiments Table"
    
        result = createSpecimenFXsTable(cnx)
        if verbose:
            if result:
                print "SpecimenFXs Table created"
            else:
                print "There was a problem creating the SpecimenFXs Table"
    
        result = createExperimentFXsTable(cnx)
        if verbose:
            if result:
                print "ExperimentFXs Table created"
            else:
                print "There was a problem creating the ExperimentFXs Table"
    
        
    # ====================================================================
    # Install the ABI Datasets
    if verbose:
        print "Installing the ABI Datasets into the database"; sys.stdout.flush()
        
    # Instantiate the CellTypesCache instance.  
    ctc = CellTypesCache(manifest_file=manifestFile)
    
    # Get metadata on all cells
    cells = ctc.get_cells()
    
    ####### ALL DONORS #######
    # Populate the donors table with all donors of all cells
    if verbose:
        print "Populating donors table"
    
    for cell in cells:
        addDonor(cnx, cell['donor_id'], cell['donor']['sex'], cell['donor']['name'])

        
    ####### ALL EPHYS FEATURES #######
    try:
        # for all cells
        allEphysFeatures = ctc.get_ephys_features()  
    except:
        # If no ephys features, we cannot do anything
        print "No ephys features available; aborting program."
        sys.exit()
            
            
    ####### SPECIMENS #######
    # Get relevant info for each specimen in input list
    if verbose:
        print "Processing each specimen in turn"; sys.stdout.flush()
        
    for specimen in specimenList:
#        if verbose:
        print '@@@@@ Processing specimen:', specimen
        
        try:
            specEphysData = ctc.get_ephys_data(specimen)
        except:
            # If no ephys data, we do not want to bother with it
            print "No ephys data for specimen ", specimen, "; ignoring it."
            continue
    
        ###### SPECIMEN >>> METADATA ######
        # Paw through the cells to find the metadata for the current specimen
        # The cell is a dictionary that has most of the "other" non-sweep stuff
        # we need such as cell averages, rheobase info, transgenic line, hemisphere, 
        # age, sex, graph order, dendrite type, area, has_burst,...
        # May be able to improve this search Pythonically 
        for cell in cells:
            datasets = cell['data_sets']
            for dataset in datasets:
                dsspec = dataset['specimen_id']
                if dsspec == specimen:
                    specCell = cell
                    break
                
        # Add the specimen to the database
        donorID = specCell['donor_id']
        specimenTableIDX = addSpecimen(cnx, donorID, specimen)
    
        ####### SPECIMEN >>> SWEEPS/EXPERIMENTS #######
        # Change these to true if show in any sweep 
        cellHasBursts = False
        cellHasDelays = False
        cellHasPauses = False
        
        # Process each sweep in turn
        sweeps = ctc.get_ephys_sweeps(specimen)
        for sweep in sweeps:
            sweepNum = sweep['sweep_number']
            
#             if verbose:
            msg = ("  Processing sweep_number: " + str(sweepNum) + 
                   "  stimulus: " + str(sweep['stimulus_name']) + 
                   "  num_spikes = " + str(sweep['num_spikes']))
            print msg
    
            # Screen out some sweep types because they are not suitable for our 
            #      simulations or because the stimulus type is not successful 
            #      in use of process_spikes() (which we use for simulations)
            databaseList = ['Long Square', 'Short Square', 'Noise 1', 'Noise 2', 
                            'Square - 2s Suprathreshold', 'Square - 0.5ms Subthreshold',
                            'Short Square - Triple', 'Ramp', 'Ramp to Rheobase']
            if sweep['stimulus_name'] not in databaseList:
                print "    Stimulus type", sweep['stimulus_name'], "not supported."
                continue
    
            # sweepData holds index range, response data vector, sampling_rate, and stimulus vector 
            sweepData = specEphysData.get_sweep(sweepNum)
    
            # sweep_metadata holds aibs_stimulus_amplitude_pa, aibs_stimulus_name,
            #  gain, initial_access_resistance, and seal
            sweep_metadata = specEphysData.get_sweep_metadata(sweepNum)
            samplingRate = sweepData["sampling_rate"] # in Hz
            
            # Need to check if this sweep is actually an experiment
            # [not implemented]
            
            # Add the experiment to the database
            experimentIDX = (#
                addExperiment(cnx, specimenTableIDX, 
                              sweepNum, samplingRate,
                              sweep_metadata['aibs_stimulus_name'],
                              float(sweep_metadata['aibs_stimulus_amplitude_pa'])))

            # Only Long Square is suitable for our simulations
            fxOKList = ['Long Square']
            if sweep['stimulus_name'] not in fxOKList:
                print "    Stimulus type", sweep['stimulus_name'], "entered into database but not supported for feature extractions."
                continue

            ## Create the experiment feature extraction data ## 
            # This approach seen at   
            # http://alleninstitute.github.io/AllenSDK/_static/examples/nb/
            #      cell_types.html#Computing-Electrophysiology-Features
            # index_range[0] is the "experiment" start index. 0 is the "sweep" start index
            indexRange = sweepData["index_range"]
            # For our purposes, we grab the data from the beginning of the sweep 
            #  instead of the beginning of the experiment
            # i = sweepData["stimulus"][indexRange[0]:indexRange[1]+1] # in A
            # v = sweepData["response"][indexRange[0]:indexRange[1]+1] # in V
            i = sweepData["stimulus"][0:indexRange[1]+1] # in A
            v = sweepData["response"][0:indexRange[1]+1] # in V
            i *= 1e12 # to pA
            v *= 1e3 # to mV
            t = np.arange(0, len(v)) * (1.0 / samplingRate) # in seconds
         
            ###### Do the sweep's feature extraction #######
            # Determine the position and length of the analysis window with respect
            # to the beginning of the sweep 
            stimType = sweep_metadata['aibs_stimulus_name']
            analysisPoints = getABIAnalysisPoints(stimType)
            analysis_start = analysisPoints['analysisStart']
            stimulus_start = analysisPoints['stimulusStart']
            analysis_duration = analysisPoints['analysisDuration']
    
            if verbose:
                print ('analysis_start', analysis_start, 'stimulus_start ', 
                       stimulus_start, 'analysis_duration', analysis_duration)
    
            # Trim the analysis to end of experiment if necessary
            if (analysis_start + analysis_duration) * samplingRate >= indexRange[1]:
                end_time = (indexRange[1]-1)/samplingRate
                analysis_duration = end_time - analysis_start
    
            if verbose:
                print ('analysis_start', analysis_start, 'stimulus_start ', 
                       stimulus_start, 'analysis_duration', analysis_duration)
    
            # Now we extract the sweep features from that analysis window
            swFXs = ExtractSweepFeatures(t, v, i, analysis_start, 
                            analysis_duration, stimulus_start, verbose)
            if len(swFXs) == 0:
                print "Skipping experiment: ", specimen, '/', sweepNum, " and continuing..."
                continue
            
            if swFXs['hasBursts']: cellHasBursts = True
            if swFXs['hasPauses']: cellHasPauses = True
            if swFXs['hasDelay']: cellHasDelays = True

            ## Add the feature extraction to the database ##
            expFXs = dict(swFXs)
            # individual spike data not going into the database directly
            if 'spikeData' in expFXs:
                del expFXs['spikeData']
                   
            addExpFX(cnx, experimentIDX, expFXs)
        # end of:  for sweep in sweeps:
    
        ## Assemble the specimen feature extraction data ##
        specimenEphysFeaturesList = [f for f in allEphysFeatures if f['specimen_id'] == specimen]
        specimenEphysFeatures = specimenEphysFeaturesList[0]
         
        data_set = ctc.get_ephys_data(specCell['id'])
        sweeps = ctc.get_ephys_sweeps(specimen)
        sweep_numbers = defaultdict(list)
        for sweep in sweeps:
            sweep_numbers[sweep['stimulus_name']].append(sweep['sweep_number'])
    
        cell_features = (extract_cell_features(data_set, sweep_numbers['Ramp'], 
                    sweep_numbers['Short Square'], sweep_numbers['Long Square']))
        spFXs = {}
        spFXs['hasSpikes']                   = cell_features['long_squares']['spiking_sweeps'] != []
        spFXs['hero_sweep_id']               = cell_features['long_squares']['hero_sweep']['id']
        spFXs['hero_sweep_avg_firing_rate']  = cell_features['long_squares']['hero_sweep']['avg_rate']
        spFXs['hero_sweep_adaptation']       = cell_features['long_squares']['hero_sweep']['adapt']
        spFXs['hero_sweep_first_isi']        = cell_features['long_squares']['hero_sweep']['first_isi']
        spFXs['hero_sweep_mean_isi']         = cell_features['long_squares']['hero_sweep']['mean_isi']
        spFXs['hero_sweep_median_isi']       = cell_features['long_squares']['hero_sweep']['median_isi']
        spFXs['hero_sweep_isi_cv']           = cell_features['long_squares']['hero_sweep']['isi_cv']
        spFXs['hero_sweep_latency']          = cell_features['long_squares']['hero_sweep']['latency']
        spFXs['hero_sweep_stim_amp']         = cell_features['long_squares']['hero_sweep']['stim_amp']
        spFXs['hero_sweep_v_baseline']       = cell_features['long_squares']['hero_sweep']['v_baseline']
        spFXs['dendrite_type']               = specCell['dendrite_type']
        spFXs['electrode_0_pa']              = specimenEphysFeatures['electrode_0_pa']
        spFXs['f_i_curve_slope']             = specimenEphysFeatures['f_i_curve_slope']
        spFXs['fast_trough_t_long_square']   = specimenEphysFeatures['fast_trough_t_long_square']     
        spFXs['fast_trough_t_ramp']          = specimenEphysFeatures['fast_trough_t_ramp']    
        spFXs['fast_trough_t_short_square']  = specimenEphysFeatures['fast_trough_t_short_square']  
        spFXs['fast_trough_v_long_square']   = specimenEphysFeatures['fast_trough_v_long_square']
        spFXs['fast_trough_v_ramp']          = specimenEphysFeatures['fast_trough_v_ramp']    
        spFXs['fast_trough_v_short_square']  = specimenEphysFeatures['fast_trough_v_short_square']
        spFXs['has_bursts']                  = cellHasBursts
        spFXs['has_delays']                  = cellHasDelays    
        spFXs['has_pauses']                  = cellHasPauses
        spFXs['hemisphere']                  = specCell['hemisphere'] 
        spFXs['input_resistance_mohm']       = specimenEphysFeatures['input_resistance_mohm']
        spFXs['peak_t_long_square']          = specimenEphysFeatures['peak_t_long_square']
        spFXs['peak_t_ramp']                 = specimenEphysFeatures['peak_t_ramp']    
        spFXs['peak_t_short_square']         = specimenEphysFeatures['peak_t_short_square']
        spFXs['peak_v_long_square']          = specimenEphysFeatures['peak_v_long_square'] 
        spFXs['peak_v_ramp']                 = specimenEphysFeatures['peak_v_ramp']    
        spFXs['peak_v_short_square']         = specimenEphysFeatures['peak_v_short_square']
        spFXs['reporter_status']             = specCell['reporter_status']
        spFXs['rheobase_current']            = cell_features['long_squares']['rheobase_i'] 
        spFXs['ri']                          = specimenEphysFeatures['ri']
        spFXs['sagFraction']                 = specimenEphysFeatures['sag']
        spFXs['seal_gohm']                   = specimenEphysFeatures['seal_gohm']
        spFXs['slow_trough_t_long_square']   = specimenEphysFeatures['slow_trough_t_long_square']
        spFXs['slow_trough_t_ramp']          = specimenEphysFeatures['slow_trough_t_ramp']           
        spFXs['slow_trough_t_short_square']  = specimenEphysFeatures['slow_trough_t_short_square']
        spFXs['slow_trough_v_long_square']   = specimenEphysFeatures['slow_trough_v_long_square']  
        spFXs['slow_trough_v_ramp']          = specimenEphysFeatures['slow_trough_v_ramp']                
        spFXs['slow_trough_v_short_square']  = specimenEphysFeatures['slow_trough_v_short_square']
        spFXs['structure_acronym']           = specCell['structure']['acronym']  
        spFXs['structure_name']              = specCell['structure']['name']
        spFXs['tau']                         = specimenEphysFeatures['tau']
        spFXs['threshold_i_long_square']     = specimenEphysFeatures['threshold_i_long_square']
        spFXs['threshold_i_ramp']            = specimenEphysFeatures['threshold_i_ramp']              
        spFXs['threshold_i_short_square']    = specimenEphysFeatures['threshold_i_short_square']
        spFXs['threshold_t_long_square']     = specimenEphysFeatures['threshold_t_long_square']  
        spFXs['threshold_t_ramp']            = specimenEphysFeatures['threshold_t_ramp']              
        spFXs['threshold_t_short_square']    = specimenEphysFeatures['threshold_t_short_square']
        spFXs['threshold_v_long_square']     = specimenEphysFeatures['threshold_v_long_square']  
        spFXs['threshold_v_ramp']            = specimenEphysFeatures['threshold_v_ramp']              
        spFXs['threshold_v_short_square']    = specimenEphysFeatures['threshold_v_short_square']
        spFXs['transgenic_line']             = specCell['transgenic_line']
        spFXs['trough_t_long_square']        = specimenEphysFeatures['trough_t_long_square']        
        spFXs['trough_t_ramp']               = specimenEphysFeatures['trough_t_ramp']                 
        spFXs['trough_t_short_square']       = specimenEphysFeatures['trough_t_short_square'] 
        spFXs['trough_v_long_square']        = specimenEphysFeatures['trough_v_long_square']   
        spFXs['trough_v_ramp']               = specimenEphysFeatures['trough_v_ramp']                 
        spFXs['trough_v_short_square']       = specimenEphysFeatures['trough_v_short_square'] 
        spFXs['upstroke_downstroke_ratio_long_square'] \
                                = specimenEphysFeatures['upstroke_downstroke_ratio_long_square']  
        spFXs['upstroke_downstroke_ratio_ramp'] \
                                = specimenEphysFeatures['upstroke_downstroke_ratio_ramp']        
        spFXs['upstroke_downstroke_ratio_short_square'] \
                                = specimenEphysFeatures['upstroke_downstroke_ratio_short_square'] 
        spFXs['v_rest']                      = specimenEphysFeatures['vrest']
        spFXs['vm_for_sag']                  = specimenEphysFeatures['vm_for_sag']

        ## Add the specimen feature extraction data to the database ##
        addSpecFX(cnx, specimenTableIDX, spFXs)
    # end of:  for specimen in specimenList
    
    cnx.close()
コード例 #3
0
#

# In[9]:

single_cell_morphology.soma

# ## Plotting a single sweep of data

# At this point, you might have realized that this dataframe doesn't contain any data about the electrophysiology. In order to get information about the electrophysiological properties of these cells, we need to use the `get_ephys_features()` method on our instance of the cell types cache.

# Just as we did before, we will will assign the output of `get_ephys_features()` to a variable and then convert it into a pandas dataframe.

# In[10]:

ephys_features = pd.DataFrame(
    ctc.get_ephys_features()).set_index('specimen_id')
ephys_features.head()

# Again, we can combine our dataframe that contians the metadata of our cells with our electrophysiology dataframe to create one single dataframe.

# In[11]:

all_ephys_features = all_cells_df.join(ephys_features)
all_ephys_features.head()

# In[12]:

print(len(all_ephys_features))

# The `get_ephys_data()` method can download electrophysiology traces for a single cell in the database. This method returns a  class instance with helper methods for retrieving stimulus and response traces out of an NWB file. In order to use this method, you must specify the id of the cell specimen whose electrophysiology you would like to download.
コード例 #4
0
#Import all the necessary packages and initalize an instance of the cache
import pandas as pd
from allensdk.core.cell_types_cache import CellTypesCache
from allensdk.api.queries.cell_types_api import CellTypesApi
import matplotlib.pyplot as plt
ctc = CellTypesCache(manifest_file='cell_types/manifest.json')

print('Packages succesfully downloaded.')

# Below we have created a pandas dataframe from the electrophysiology data and metadata of our mouse cells and set the row indices to be the `id` column.

# In[2]:

mouse_df = pd.DataFrame(
    ctc.get_cells(species=[CellTypesApi.MOUSE])).set_index('id')
ephys_df = pd.DataFrame(ctc.get_ephys_features()).set_index('specimen_id')
mouse_ephys_df = mouse_df.join(ephys_df)
mouse_ephys_df.head()

# The Allen has many pre-computed features that you might consider comparing across cells. Some of these features include input resistance ('input_resistance_mohm'), Adapation ratio ('adaptation'), Average ISI ('avg_isi'), and many others (you can find a complete glossary <a href = "https://docs.google.com/document/d/1YGLwkMTebwrXd_1E817LFbztMjSTCWh83Mlp3_3ZMEo/edit#heading=h.t0p3wngfkxc1"> here </a>).

# We must first select 2 or more cell types that we would like to compare. We can subset our electrophysiology dataframe to compare across transgenic lines, structure layer, and many more  columns. We created two dataframes to compare spiny dendrite types to aspiny dendrite types.

# In[3]:

# Define your cell type variables below
cell_type1 = 'spiny'
cell_type2 = 'aspiny'

# Create our dataframes from our cell types
mouse_spiny_df = mouse_ephys_df[mouse_ephys_df['dendrite_type'] == cell_type1]
コード例 #5
0
# In[2]:

all_features = ctc.get_all_features(dataframe=True)
all_features.head()

# Since we set the `dataframe` parameter to `True`, the method returns our data as a nice and neat pandas dataframe. If set to `False`, the method returns a dictionary. We can all check to make sure all the columns are there by executing the `get_ephys_features` and `get_morphology_features` seperately and comparing the columns.

# In[3]:

all_features_columns = all_features.columns
all_features_columns

# In[4]:

# Store all ephys columns in a variable
ephys = pd.DataFrame(ctc.get_ephys_features())
ephys_columns = ephys.columns

# Store all morphology columns in a variable
morphology = pd.DataFrame(ctc.get_morphology_features())
morphology_columns = morphology.columns

# Combine the two into one list
ephys_and_morphology = list(morphology_columns) + list(ephys_columns)

# Sort and Compare the columns to make sure they are all there
print(list(all_features_columns).sort() == ephys_and_morphology.sort())

# By default, `get_all_features()` only returns ephys and morphology features for cells that have reconstructions. To access all cells regardless of reconstruction, set the parameter `require_recontruction` to `False`.

# The `get_ephys_data()` method can download electrophysiology traces for a single cell in the database. This method returns a  class instance with helper methods for retrieving stimulus and response traces out of an NWB file. In order to use this method, you must specify the id of the cell specimen whose electrophysiology you would like to download.