예제 #1
0
def expt_ap_dim(specimen_id):
    data_set = expt_data_set(specimen_id)
    long_square_sweeps = lims_utils.get_sweeps_of_type("C1LSCOARSE",
                                                       specimen_id,
                                                       passed_only=True)
    fi_curve_data = dict(
        [amp_and_spike_count(data_set, sweep) for sweep in long_square_sweeps])
    sweeps_by_amp = {
        amp_and_spike_count(data_set, sweep)[0]: sweep
        for sweep in long_square_sweeps
    }
    fi_arr = np.array([(amp, fi_curve_data[amp])
                       for amp in sorted(fi_curve_data.keys())])

    spiking_sweeps = np.flatnonzero(fi_arr[:, 1])
    if len(spiking_sweeps) == 0:
        return np.nan, np.nan
    rheo_sweep = sweeps_by_amp[fi_arr[spiking_sweeps[0], 0]]
    #     print specimen_id, rheo_sweep
    v, i, t = lims_utils.get_sweep_v_i_t_from_set(data_set, rheo_sweep)
    swp_ext = EphysSweepFeatureExtractor(t, v, start=1.02, end=2.02)
    swp_ext.process_spikes()
    return (swp_ext.spike_feature("width")[0] * 1e3,
            swp_ext.spike_feature("peak_v")[0] -
            swp_ext.spike_feature("trough_v")[0])
예제 #2
0
    def calculate_features(self, t_ms, v, i, feature_names):
        delay = self.stim.delay * 1e-3
        duration = self.stim.dur * 1e-3
        t = t_ms * 1e-3

        swp = EphysSweepFeatureExtractor(t,
                                         v,
                                         i,
                                         start=delay,
                                         end=delay + duration)
        swp.process_spikes()
        swp.sweep_feature("v_baseline")  # Pre-compute the baseline
        swp.process_new_spike_feature("slow_trough_norm_t",
                                      sf.slow_trough_norm_t,
                                      affected_by_clipping=True)
        swp.process_new_spike_feature("slow_trough_delta_v",
                                      sf.slow_trough_delta_voltage_feature,
                                      affected_by_clipping=True)
        sweep_keys = swp.sweep_feature_keys()
        spike_keys = swp.spike_feature_keys()
        out_features = []
        for k in feature_names:
            if k in sweep_keys:
                out_features.append(swp.sweep_feature(k))
            elif k in spike_keys:
                out_features.append(swp.spike_feature(k).mean())
            else:
                out_features.append(np.nan)
        return np.array(out_features)
예제 #3
0
def load_sweep(ds, sweep_num):
    sweep = ds.get_sweep(sweep_num)
    v = sweep['response'] * 1e3 # to mV
    i = sweep['stimulus'] * 1e12 # to pA
    t = np.arange(0, len(v)) * (1.0 / sweep['sampling_rate'])

    sweep_ext = EphysSweepFeatureExtractor(t=t, v=v, i=i)
    sweep_ext.process_spikes()
    
    spike_idxs = sweep_ext.spike_feature("threshold_index")
    peak_idxs = sweep_ext.spike_feature("peak_index")
    trough_idxs = sweep_ext.spike_feature("trough_index")

    hz_in = sweep['sampling_rate']
    hz_out = 25000.

    return resample_timeseries(v, i, t, 
                               spike_idxs, peak_idxs, trough_idxs,
                               hz_in, hz_out)
예제 #4
0
def ve_ap_dim(specimen_id, ve_path):
    data_set = NwbDataSet(ve_path)
    expt_set = expt_data_set(specimen_id)
    long_square_sweeps = lims_utils.get_sweeps_of_type("C1LSCOARSE",
                                                       specimen_id,
                                                       passed_only=True)
    fi_curve_data = dict([
        amp_and_spike_count(data_set, sweep, expt_set)
        for sweep in long_square_sweeps
    ])
    sweeps_by_amp = {
        amp_and_spike_count(data_set, sweep, expt_set)[0]: sweep
        for sweep in long_square_sweeps
    }
    fi_arr = np.array([(amp, fi_curve_data[amp])
                       for amp in sorted(fi_curve_data.keys())])

    spiking_sweeps = np.flatnonzero(fi_arr[:, 1])
    if len(spiking_sweeps) == 0:
        return np.nan, np.nan
    rheo_sweep = sweeps_by_amp[fi_arr[spiking_sweeps[0], 0]]
    #     print specimen_id, rheo_sweep

    v, i, t = lims_utils.get_sweep_v_i_t_from_set(data_set, rheo_sweep)
    swp_ext = EphysSweepFeatureExtractor(t,
                                         v,
                                         start=1.02,
                                         end=2.02,
                                         filter=None)
    swp_ext.process_spikes()
    if len(swp_ext.spike_feature("width")) == 0:
        print "NO SPIKES FOR {:d} ON SWEEP {:d}".format(
            specimen_id, sweeps_by_amp[fi_arr[spiking_sweeps[0], 0]])
        print fi_arr
        print sweeps_by_amp
        return np.nan, np.nan
    return_vals = (swp_ext.spike_feature("width")[0] * 1e3,
                   swp_ext.spike_feature("peak_v")[0] -
                   swp_ext.spike_feature("trough_v")[0])
    return return_vals
        tf = trace.t_stop
        #print(tf)
        ti = trace.t_start
        #print(ti)
        t = np.linspace(0,float(tf - ti), len(v))
        #print(t)
        #i = np.zeros(len(v))
        #print(np.isnan(v)[0])
        #print('##########')
        print(trace.sampling_rate)
        plt.plot(t,v,label = (iteration_number))
        try:
            Trace_with_features = EphysSweepFeatureExtractor(t=t, v=v, filter = float(trace.sampling_rate)/2500,min_peak=-30.0, dv_cutoff=20.0, max_interval=0.005, min_height=2.0, thresh_frac=0.05, baseline_interval=0.1, baseline_detect_thresh=0.3, id=None)
            Trace_with_features.process_spikes()
            print(Trace_with_features.filter)        
            plt.plot(Trace_with_features.spike_feature("peak_t"),Trace_with_features.spike_feature("peak_v"),'k+')
        except:
            pass
        plt.title('recording type: '+str(trace.name).split("-")[0]+' '+str(len(analog_signals))+' '+'traces'+' '+'_compiled')
        plt.ylabel('Amplitude of signal: '+str(trace[0]).split()[1])
        plt.xlabel('time (mS)')
        plt.legend()
        plt.show()
        
        
        
file_to_read = nio.StimfitIO('/mnt/5D4B-FA71/Data/190822/trace_alone.dat')
segments = file_to_read.read_block().segments
analog_signal = segment.analogsignals
trace_1 = segment.tr
         label='trace numer = ' + str(iteration_number))
try:
    Trace_with_features = EphysSweepFeatureExtractor(
        t=t,
        v=v,
        filter=float(trace.sampling_rate) / 2500,
        min_peak=-20.0,
        dv_cutoff=20.0,
        max_interval=0.005,
        min_height=2.0,
        thresh_frac=0.05,
        baseline_interval=0.1,
        baseline_detect_thresh=0.3,
        id=None)
    Trace_with_features.process_spikes()
    neuron_threshold_v = Trace_with_features.spike_feature(
        "threshold_v")
    #                        print(threshold_state)
    if threshold_state == 0 and len(
            neuron_threshold_v) >= 1:
        neuron_threshold_v = Trace_with_features.spike_feature(
            "threshold_v")[0]
        neuron_threshold_t = Trace_with_features.spike_feature(
            "threshold_t")[0]
        plt.plot(neuron_threshold_t,
                 neuron_threshold_v,
                 'o',
                 color='k',
                 label='threshold voltage')
        #                            print(neuron_threshold_t,neuron_threshold_v)
        plt.figtext(
            1, 0.50, "Neuron's threshold potential = " +
예제 #7
0
import os
from allensdk.ephys.ephys_extractor import EphysSweepFeatureExtractor
from allensdk.core.cell_types_cache import CellTypesCache

filename = 'Cell 6 of 171117.abf'
seg_no = 17  #0 is -100pA, 4 is 0pA, 20 is 400pA. Now extrapolate
stim_start = 81.4e-3  #in s
stim_stop = 581.4e-3  #in s

Actualstim_start = seg_no * 2 + stim_start
Actualstim_stop = seg_no * 2 + stim_stop
Inputcurr = seg_no * 25 - 100  #in pA

reader = AxonIO(filename='Deepanjali data/WT step input cells/' + filename)
Vtrace = reader.read_block().segments[seg_no].analogsignals[0]

i = np.zeros(int((Vtrace.t_stop - Vtrace.t_start) * Vtrace.sampling_rate))
i[int(stim_start * Vtrace.sampling_rate):int(stim_stop *
                                             Vtrace.sampling_rate)] = Inputcurr
i = np.array(i)
v = np.array([float(V) for V in Vtrace])
t = np.linspace(0, float(Vtrace.t_stop - Vtrace.t_start),
                int((Vtrace.t_stop - Vtrace.t_start) * Vtrace.sampling_rate))
t = np.array(t)

sweep_ext = EphysSweepFeatureExtractor(t=t, v=v, i=i, filter=5)
sweep_ext.process_spikes()

sweep_ext.spike_feature_keys()  #Lists all the features that can be extracted
sweep_ext.spike_feature("width")  #Extracts AP width
예제 #8
0
 try:
     Trace_with_features = EphysSweepFeatureExtractor(
         t=t,
         v=v,
         filter=float(trace.sampling_rate) / 2500,
         min_peak=-10.0,
         dv_cutoff=10.0,
         max_interval=0.005,
         min_height=2.0,
         thresh_frac=0.05,
         baseline_interval=0.1,
         baseline_detect_thresh=0.3,
         id=None)
     Trace_with_features.process_spikes()
     #                        print(Trace_with_features.filter)
     plt.plot(Trace_with_features.spike_feature("peak_t"),
              Trace_with_features.spike_feature("peak_v"),
              'r+',
              label='action potentials')
     plt.plot(
         [], [],
         ' ',
         label='number of action petentials = ' + str(
             len(Trace_with_features.spike_feature(
                 "peak_v"))),
         color='red')
 except:
     pass
 channel_index = str(trace.annotations['channel_index'])
 plt.title('recording type: ' +
           str(trace.name).split("-")[0] +
예제 #9
0
    def calculate_feature_errors(self, t_ms, v, i, feature_names, targets):
        # Special case checks and penalty values
        minimum_num_spikes = 2
        missing_penalty_value = 20.0
        max_fail_penalty = 250.0
        min_fail_penalty = 75.0
        overkill_reduction = 0.75
        variance_factor = 0.1

        fail_trace = False

        delay = self.stim.delay * 1e-3
        duration = self.stim.dur * 1e-3
        t = t_ms * 1e-3

        # penalize for failing to return to rest
        start_index = np.flatnonzero(t >= delay)[0]
        if np.abs(v[-1] - v[:start_index].mean()) > 2.0:
            fail_trace = True
        else:
            pre_swp = EphysSweepFeatureExtractor(t, v, i, start=0, end=delay)
            pre_swp.process_spikes()
            if pre_swp.sweep_feature("avg_rate") > 0:
                fail_trace = True

        target_features_dict = {
            f["name"]: {
                "mean": f["mean"],
                "stdev": f["stdev"]
            }
            for f in targets
        }

        if not fail_trace:
            swp = EphysSweepFeatureExtractor(t,
                                             v,
                                             i,
                                             start=delay,
                                             end=delay + duration)
            swp.process_spikes()
            swp.sweep_feature("v_baseline")  # Pre-compute the baseline
            swp.process_new_spike_feature("slow_trough_norm_t",
                                          sf.slow_trough_norm_t,
                                          affected_by_clipping=True)
            swp.process_new_spike_feature("slow_trough_delta_v",
                                          sf.slow_trough_delta_voltage_feature,
                                          affected_by_clipping=True)
            sweep_keys = swp.sweep_feature_keys()
            spike_keys = swp.spike_feature_keys()

            if len(swp.spike_feature(
                    "threshold_t")) < minimum_num_spikes:  # Enough spikes?
                fail_trace = True
            else:
                avg_per_spike_peak_error = np.mean([
                    abs(peak_v - target_features_dict["peak_v"]["mean"])
                    for peak_v in swp.spike_feature("peak_v")
                ])
                avg_overall_error = abs(
                    target_features_dict["peak_v"]["mean"] -
                    swp.spike_feature("peak_v").mean())
                if avg_per_spike_peak_error > 3. * avg_overall_error:  # Weird bi-modality of spikes; 3.0 is arbitrary
                    fail_trace = True

        if fail_trace:
            variance_start = np.flatnonzero(t >= delay - 0.1)[0]
            variance_end = np.flatnonzero(t >= (delay + duration) / 2.0)[0]
            trace_variance = v[variance_start:variance_end].var()
            error_value = max(
                max_fail_penalty - trace_variance * variance_factor,
                min_fail_penalty)
            errs = np.ones(len(feature_names)) * error_value
        else:
            errs = []
            for k in feature_names:
                if k in sweep_keys:
                    model_mean = swp.sweep_feature(k)
                elif k in spike_keys:
                    model_mean = swp.spike_feature(k).mean()
                else:
                    _log.debug("Could not find feature %s", k)
                    errs.append(missing_penalty_value)
                    continue
                if np.isnan(model_mean):
                    errs.append(missing_penalty_value)
                else:
                    target_mean = target_features_dict[k]['mean']
                    target_stdev = target_features_dict[k]['stdev']
                    errs.append(
                        np.abs((model_mean - target_mean) / target_stdev))
            errs = np.array(errs)
        return errs
예제 #10
0
def singlefeature(seg_nol,reader,filename,stim_start,stim_end):
    '''This function is used to extract features from a neo.io.axonio.AxonIO object and extracts features from it.
    seg_nol takes in a list of segment numbers. reader is the objecct. filename is the name of the neuron. stim_start and stim_end are when the current injection was started and ended.
    Outputs a pandas dataframe with all the features'''
    features = {}
    for seg_no in seg_nol:
        Vtrace = reader.read_block().segments[seg_no].analogsignals[0]
        #Extra
        features['Cell name'] = filename
        features['Sampling rate'] = Vtrace.sampling_rate
        features['stim_start'] = stim_start
        features['stim_end'] = stim_end
        Inputcurr = seg_no*25e-12 - 100e-12 #in A

        i = np.zeros(int((Vtrace.t_stop - Vtrace.t_start)*Vtrace.sampling_rate))
        i[int(stim_start*Vtrace.sampling_rate):int(stim_end*Vtrace.sampling_rate)] = Inputcurr*1e12
        i = np.array(i) #in pA
        v = np.array([float(V) for V in Vtrace]) #in mV
        t = np.linspace(0,float(Vtrace.t_stop - Vtrace.t_start), int((Vtrace.t_stop - Vtrace.t_start)*Vtrace.sampling_rate))
        t = np.array(t) # in s

        # plt.plot(t,v, label = f'{filename}')
        # plt.legend()
        # plt.show()

        sweep_ext = EphysSweepFeatureExtractor(t=t, v=v, i=i, filter=float(Vtrace.sampling_rate)/2500)
        sweep_ext.process_spikes()


        # E_rest
        features[f'E_rest_{Inputcurr*1e12}'] = np.nanmean(v[t<=stim_start])*1e-3

        # Input resistance #steady-state V is average of last 100ms of the current clamp duration
        Vtracep25 = reader.read_block().segments[5].analogsignals[0]
        Vtracep25 = np.array([float(V) for V in Vtracep25]) #in mV
        Vtracen25 = reader.read_block().segments[3].analogsignals[0]
        Vtracen25 = np.array([float(V) for V in Vtracen25]) #in mV
        str_ind = (np.abs(t-stim_end+100e-3)).argmin()
        stp_ind = (np.abs(t-stim_end)).argmin()
        features[f'Rinput'] = (-1*np.nanmean(Vtracen25[str_ind:stp_ind]) + np.nanmean(Vtracep25[str_ind:stp_ind]))*1e-3/2/25e-12

        # Total capacitance
        Vtracep25_choppped = Vtracep25[:stp_ind]
        Vtracen25_choppped = Vtracen25[:stp_ind]
        vp63 = (np.nanmean(Vtracep25[str_ind:stp_ind]) - features[f'E_rest_{Inputcurr*1e12}'])*0.63 + features[f'E_rest_{Inputcurr*1e12}']
        vn63 = (np.nanmean(Vtracen25[str_ind:stp_ind]) - features[f'E_rest_{Inputcurr*1e12}'])*0.63 + features[f'E_rest_{Inputcurr*1e12}']
        tau = (t[(np.abs(Vtracep25_choppped-vp63)).argmin()] - stim_start + t[(np.abs(Vtracep25_choppped-vp63)).argmin()] - stim_start)/2
        tauinv = (t[len(Vtracep25_choppped)-(np.abs(Vtracep25_choppped[stp_ind::-1]-vp63)).argmin()] - stim_start + t[len(Vtracep25_choppped)-(np.abs(Vtracep25_choppped[::-1]-vp63)).argmin()] - stim_start)/2
        features[f'Cm'] = (tau+tauinv)/features[f'Rinput']/2

        # AP1_amp
        features[f'AP1_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("peak_v")[0]*1e-3 - features[f'E_rest_{Inputcurr*1e12}']

        #APp_amp
        features[f'APp_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("peak_v")[-2]*1e-3 - features[f'E_rest_{Inputcurr*1e12}']

        #AP1_width
        features[f'AP1_width_{Inputcurr*1e12}'] = sweep_ext.spike_feature("width")[0]

        #APp_width
        features[f'APp_width_{Inputcurr*1e12}'] = sweep_ext.spike_feature("width")[-2]

        #AP1_thresh
        features[f'AP1_thresh_{Inputcurr*1e12}'] = sweep_ext.spike_feature("threshold_v")[0]*1e-3

        #APp_thresh
        features[f'APp_thresh_{Inputcurr*1e12}'] = sweep_ext.spike_feature("threshold_v")[-2]*1e-3

        #AP1_lat
        features[f'AP1_lat_{Inputcurr*1e12}'] = sweep_ext.spike_feature("threshold_t")[0] - stim_start

        #ISI1
        features[f'ISI1_{Inputcurr*1e12}'] = sweep_ext.spike_feature("peak_t")[1] - sweep_ext.spike_feature("peak_t")[0]

        #ISIl
        features[f'ISIl_{Inputcurr*1e12}'] = sweep_ext.spike_feature("peak_t")[-1] - sweep_ext.spike_feature("peak_t")[-2]

        #ISIavg
        pt = sweep_ext.spike_feature("peak_t")
        features[f'ISIavg_{Inputcurr*1e12}'] = np.nanmean([s-f for s,f in zip(pt[1:],pt[:-1])])

        #freq
        features[f'freq_{Inputcurr*1e12}'] = len(sweep_ext.spike_feature("peak_t"))/(stim_end - stim_start)

        #Adptn_id = 1-ISI1/ISIl
        features[f'Adptn_id_{Inputcurr*1e12}'] = 1 - features[f'ISI1_{Inputcurr*1e12}']/features[f'ISIl_{Inputcurr*1e12}']

        #fAHP_AP1_amp
        features[f'fAHP_AP1_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("fast_trough_v")[0]*1e-3 - features[f'E_rest_{Inputcurr*1e12}']

        #fAHP_APp_amp
        features[f'fAHP_APp_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("fast_trough_v")[-2]*1e-3 - features[f'E_rest_{Inputcurr*1e12}']

        #mAHP_AP1_amp
        features[f'mAHP_AP1_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("slow_trough_v")[0]*1e-3 - features[f'E_rest_{Inputcurr*1e12}']

        #mAHP_APp_amp
        features[f'mAHP_APp_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("slow_trough_v")[-2]*1e-3 - features[f'E_rest_{Inputcurr*1e12}']

        #mAHP_AP1_dur
        features[f'mAHP_AP1_dur_{Inputcurr*1e12}'] = (sweep_ext.spike_feature("slow_trough_t")[0] - sweep_ext.spike_feature("peak_t")[0])/features[f'ISI1_{Inputcurr*1e12}']

        #mAHP_APp_dur = mAHP of second last spike (penultimate)
        features[f'mAHP_APp_dur_{Inputcurr*1e12}'] = (sweep_ext.spike_feature("slow_trough_t")[-2] - sweep_ext.spike_feature("peak_t")[-2])/features[f'ISIl_{Inputcurr*1e12}']

        #ADP_AP1_amp
        features[f'ADP_AP1_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("adp_v")[0]*1e-3 - features[f'E_rest_{Inputcurr*1e12}']

        #ADP_APp_amp
        features[f'ADP_APp_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("adp_v")[-2]*1e-3 - features[f'E_rest_{Inputcurr*1e12}']

        #mAHP_stimend_amp = within 50ms
        features[f'mAHP_stimend_amp_{Inputcurr*1e12}'] = np.min(v[int((stim_end)*Vtrace.sampling_rate):int((stim_end + 50e-3)*Vtrace.sampling_rate)])*1e-3 - features[f'E_rest_{Inputcurr*1e12}']

        #sAHP_stimend_amp = within 200ms
        features[f'sAHP_stimend_amp_{Inputcurr*1e12}'] = np.min(v[int((stim_end)*Vtrace.sampling_rate):int((stim_end + 200e-3)*Vtrace.sampling_rate)])*1e-3 - features[f'E_rest_{Inputcurr*1e12}']

    return features
예제 #11
0
def features(Vtrace,
             stim_start=preStimTime,
             stim_end=preStimTime + injectTime,
             truntime=runtime,
             Inputcurr=Injectcurr):
    features_df = feature_range_df.copy()

    v = np.array(Vtrace)  #in mV
    t = np.linspace(0, truntime, len(Vtrace))  #in s
    start_idx = (np.abs(t - stim_start)).argmin()
    end_idx = (np.abs(t - stim_end)).argmin()
    i = np.zeros(len(t))
    i[start_idx:end_idx] = Inputcurr * 1e12  #in pA

    try:
        sweep_ext = EphysSweepFeatureExtractor(t=t,
                                               v=v,
                                               i=i,
                                               start=stim_start,
                                               end=stim_end,
                                               filter=9.9)
        sweep_ext.process_spikes()
    except:
        sweep_ext = EphysSweepFeatureExtractor(t=t,
                                               v=v,
                                               i=i,
                                               start=stim_start,
                                               end=stim_end,
                                               filter=4.9)
        sweep_ext.process_spikes()

    features_df['raw'] = ''
    # E_rest
    if stim_start > 0.5:
        features_df.loc[f'E_rest_{Inputcurr*1e12}', 'raw'] = np.nanmean(
            v[int(0.5 * len(v) / truntime):int(stim_start * len(v) /
                                               truntime)])
    else:
        features_df.loc[f'E_rest_{Inputcurr*1e12}',
                        'raw'] = np.nanmean(v[-int(0.5 * len(v) / truntime):])
    # AP1_amp
    features_df.loc[f'AP1_amp_{Inputcurr*1e12}',
                    'raw'] = sweep_ext.spike_feature("peak_v")[
                        0] - features_df.loc[f'E_rest_{Inputcurr*1e12}', 'raw']
    #APp_amp
    features_df.loc[
        f'APp_amp_{Inputcurr*1e12}', 'raw'] = sweep_ext.spike_feature(
            "peak_v")[-2] - features_df.loc[f'E_rest_{Inputcurr*1e12}', 'raw']
    #AP1_width
    features_df.loc[f'AP1_width_{Inputcurr*1e12}',
                    'raw'] = sweep_ext.spike_feature("width")[0]
    #APp_width
    features_df.loc[f'APp_width_{Inputcurr*1e12}',
                    'raw'] = sweep_ext.spike_feature("width")[-2]
    #AP1_thresh
    features_df.loc[f'AP1_thresh_{Inputcurr*1e12}',
                    'raw'] = sweep_ext.spike_feature("threshold_v")[0]
    #APp_thresh
    features_df.loc[f'APp_thresh_{Inputcurr*1e12}',
                    'raw'] = sweep_ext.spike_feature("threshold_v")[-2]
    #AP1_lat
    features_df.loc[
        f'AP1_lat_{Inputcurr*1e12}',
        'raw'] = sweep_ext.spike_feature("threshold_t")[0] - stim_start
    #ISI1
    features_df.loc[f'ISI1_{Inputcurr*1e12}', 'raw'] = sweep_ext.spike_feature(
        "peak_t")[1] - sweep_ext.spike_feature("peak_t")[0]
    #ISIl
    features_df.loc[f'ISIl_{Inputcurr*1e12}', 'raw'] = sweep_ext.spike_feature(
        "peak_t")[-1] - sweep_ext.spike_feature("peak_t")[-2]
    # #ISIavg
    pt = sweep_ext.spike_feature("peak_t")
    features_df.loc[f'ISIavg_{Inputcurr*1e12}', 'raw'] = np.nanmean(
        [s - f for s, f in zip(pt[1:], pt[:-1])])
    # features_df.loc[f'ISIavg_{Inputcurr*1e12}','raw'] = 'skip'
    #freq
    features_df.loc[f'freq_{Inputcurr*1e12}', 'raw'] = len(
        sweep_ext.spike_feature("peak_t")) / (stim_end - stim_start)
    #Adptn_id = 1-ISI1/ISIl
    features_df.loc[f'Adptn_id_{Inputcurr*1e12}', 'raw'] = 1 - features_df.loc[
        f'ISI1_{Inputcurr*1e12}',
        'raw'] / features_df.loc[f'ISIl_{Inputcurr*1e12}', 'raw']
    #fAHP_AP1_amp
    features_df.loc[f'fAHP_AP1_amp_{Inputcurr*1e12}',
                    'raw'] = sweep_ext.spike_feature("fast_trough_v")[
                        0] - features_df.loc[f'E_rest_{Inputcurr*1e12}', 'raw']
    #fAHP_APp_amp
    features_df.loc[f'fAHP_APp_amp_{Inputcurr*1e12}',
                    'raw'] = sweep_ext.spike_feature("fast_trough_v")[
                        -2] - features_df.loc[f'E_rest_{Inputcurr*1e12}',
                                              'raw']
    # #mAHP_AP1_amp
    # features_df.loc[f'mAHP_AP1_amp_{Inputcurr*1e12}','raw'] = sweep_ext.spike_feature("slow_trough_v")[0] - features_df.loc[f'E_rest_{Inputcurr*1e12}','raw']
    features_df.loc[f'mAHP_AP1_amp_{Inputcurr*1e12}', 'raw'] = 'skip'
    #mAHP_APp_amp
    features_df.loc[f'mAHP_APp_amp_{Inputcurr*1e12}',
                    'raw'] = sweep_ext.spike_feature("slow_trough_v")[
                        -2] - features_df.loc[f'E_rest_{Inputcurr*1e12}',
                                              'raw']
    # #mAHP_AP1_dur
    # features_df.loc[f'mAHP_AP1_dur_{Inputcurr*1e12}','raw'] = (sweep_ext.spike_feature("slow_trough_t")[0] - sweep_ext.spike_feature("peak_t")[0])/features_df.loc[f'ISI1_{Inputcurr*1e12}','raw']
    features_df.loc[f'mAHP_AP1_dur_{Inputcurr*1e12}', 'raw'] = 'skip'
    #mAHP_APp_dur = mAHP of second last spike (penultimate)
    features_df.loc[f'mAHP_APp_dur_{Inputcurr*1e12}',
                    'raw'] = (sweep_ext.spike_feature("slow_trough_t")[-2] -
                              sweep_ext.spike_feature("peak_t")[-2]
                              ) / features_df.loc[f'ISIl_{Inputcurr*1e12}',
                                                  'raw']
    # #ADP_AP1_amp
    # features_df.loc[f'ADP_AP1_amp_{Inputcurr*1e12}','raw'] = sweep_ext.spike_feature("adp_v")[0] - features_df.loc[f'E_rest_{Inputcurr*1e12}','raw']
    features_df.loc[f'ADP_AP1_amp_{Inputcurr*1e12}', 'raw'] = 'skip'
    # #ADP_APp_amp
    # features_df.loc[f'ADP_APp_amp_{Inputcurr*1e12}','raw'] = sweep_ext.spike_feature("adp_v")[-2] - features_df.loc[f'E_rest_{Inputcurr*1e12}','raw']
    features_df.loc[f'ADP_APp_amp_{Inputcurr*1e12}', 'raw'] = 'skip'
    #mAHP_stimend_amp = within 50ms
    end50_idx = (np.abs(t - stim_end - 50e-3)).argmin()
    features_df.loc[f'mAHP_stimend_amp_{Inputcurr*1e12}', 'raw'] = np.min(
        v[end_idx:end50_idx]) - features_df.loc[f'E_rest_{Inputcurr*1e12}',
                                                'raw']
    #sAHP_stimend_amp = within 200ms
    end200_idx = (np.abs(t - stim_end - 200e-3)).argmin()
    features_df.loc[f'sAHP_stimend_amp_{Inputcurr*1e12}', 'raw'] = np.min(
        v[end_idx:end200_idx]) - features_df.loc[f'E_rest_{Inputcurr*1e12}',
                                                 'raw']

    features_df = features_df.replace('', np.nan)

    features_df['rescaled'] = ''
    for i in features_df.index:
        if features_df.loc[i, 'raw'] == 'skip':
            features_df.loc[i, 'rescaled'] = 0
        elif np.isnan(features_df.loc[i, 'raw']):
            features_df.loc[
                i,
                'rescaled'] = 10  #Penalty for not having the feature altogether
        else:
            Max = features_df.loc[i, 'Max']
            Min = features_df.loc[i, 'Min']
            features_df.loc[i, 'rescaled'] = 2 / (Max - Min) * (
                features_df.loc[i, 'raw'] - Min) - 1
    features_df = features_df.replace('', np.nan)

    features_df['cost'] = ''
    for i in features_df.index:
        if features_df.loc[i, 'rescaled'] != '' and ~np.isnan(
                features_df.loc[i, 'rescaled']):
            if features_df.loc[i, 'rescaled'] > 1 or features_df.loc[
                    i, 'rescaled'] < -1:
                features_df.loc[i, 'cost'] = np.abs(
                    features_df.loc[i, 'rescaled']) * features_df.loc[i,
                                                                      'weight']
            else:
                features_df.loc[i, 'cost'] = 0
    features_df = features_df.replace('', np.nan)

    return features_df
예제 #12
0
def get_features(filename, nameit):
    #filename is the hoc file of the simulation with
    #vectors already present as recv and rect
    neuron.xopen(filename)
    features_pd = pd.DataFrame()
    stim_start = 1000e-3
    stim_end = 1500e-3
    features = {}
    #Extra
    features['Cell name'] = nameit
    features['stim_start'] = stim_start
    features['stim_end'] = stim_end
    Inputcurr = 150e-12 #in A

    v = np.array(h.recv) #in mV
    t = np.array(h.rect)*1e-3 #in s
    start_idx = (np.abs(t - stim_start)).argmin()
    end_idx = (np.abs(t - stim_end)).argmin()
    i = np.zeros(len(t))
    i[start_idx:end_idx] = Inputcurr*1e12 #in pA

    sweep_ext = EphysSweepFeatureExtractor(t=t, v=v, i=i)
    sweep_ext.process_spikes()

    # E_rest
    features[f'E_rest_{Inputcurr*1e12}'] = np.nanmean(v[i==0])

    # AP1_amp
    features[f'AP1_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("peak_v")[0] - features[f'E_rest_{Inputcurr*1e12}']

    #APp_amp
    features[f'APp_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("peak_v")[-2] - features[f'E_rest_{Inputcurr*1e12}']

    #AP1_width
    features[f'AP1_width_{Inputcurr*1e12}'] = sweep_ext.spike_feature("width")[0]

    #APp_width
    features[f'APp_width_{Inputcurr*1e12}'] = sweep_ext.spike_feature("width")[-2]

    #AP1_thresh
    features[f'AP1_thresh_{Inputcurr*1e12}'] = sweep_ext.spike_feature("threshold_v")[0]

    #APp_thresh
    features[f'APp_thresh_{Inputcurr*1e12}'] = sweep_ext.spike_feature("threshold_v")[-2]

    #AP1_lat
    features[f'AP1_lat_{Inputcurr*1e12}'] = sweep_ext.spike_feature("threshold_t")[0] - stim_start

    #ISI1
    features[f'ISI1_{Inputcurr*1e12}'] = sweep_ext.spike_feature("peak_t")[1] - sweep_ext.spike_feature("peak_t")[0]

    #ISIl
    features[f'ISIl_{Inputcurr*1e12}'] = sweep_ext.spike_feature("peak_t")[-1] - sweep_ext.spike_feature("peak_t")[-2]

    #ISIavg
    pt = sweep_ext.spike_feature("peak_t")
    features[f'ISIavg_{Inputcurr*1e12}'] = np.nanmean([s-f for s,f in zip(pt[1:],pt[:-1])])

    #freq
    features[f'freq_{Inputcurr*1e12}'] = len(sweep_ext.spike_feature("peak_t"))/(stim_end - stim_start)

    #Adptn_id = 1-ISI1/ISIl
    features[f'Adptn_id_{Inputcurr*1e12}'] = 1 - features[f'ISI1_{Inputcurr*1e12}']/features[f'ISIl_{Inputcurr*1e12}']

    #fAHP_AP1_amp
    features[f'fAHP_AP1_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("fast_trough_v")[0] - features[f'E_rest_{Inputcurr*1e12}']

    #fAHP_APp_amp
    features[f'fAHP_APp_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("fast_trough_v")[-2] - features[f'E_rest_{Inputcurr*1e12}']

    #mAHP_AP1_amp
    features[f'mAHP_AP1_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("slow_trough_v")[0] - features[f'E_rest_{Inputcurr*1e12}']

    #mAHP_APp_amp
    features[f'mAHP_APp_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("slow_trough_v")[-2] - features[f'E_rest_{Inputcurr*1e12}']

    #mAHP_AP1_dur
    features[f'mAHP_AP1_dur_{Inputcurr*1e12}'] = (sweep_ext.spike_feature("slow_trough_t")[0] - sweep_ext.spike_feature("peak_t")[0])/features[f'ISI1_{Inputcurr*1e12}']

    #mAHP_APp_dur = mAHP of second last spike (penultimate)
    features[f'mAHP_APp_dur_{Inputcurr*1e12}'] = (sweep_ext.spike_feature("slow_trough_t")[-2] - sweep_ext.spike_feature("peak_t")[-2])/features[f'ISIl_{Inputcurr*1e12}']

    #ADP_AP1_amp
    features[f'ADP_AP1_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("adp_v")[0] - features[f'E_rest_{Inputcurr*1e12}']

    #ADP_APp_amp
    features[f'ADP_APp_amp_{Inputcurr*1e12}'] = sweep_ext.spike_feature("adp_v")[-2] - features[f'E_rest_{Inputcurr*1e12}']

    #mAHP_stimend_amp = within 50ms
    end50_idx = (np.abs(t - stim_end - 50e-3)).argmin()
    print(end50_idx)
    features[f'mAHP_stimend_amp_{Inputcurr*1e12}'] = np.min(v[end_idx:end50_idx]) - features[f'E_rest_{Inputcurr*1e12}']

    #sAHP_stimend_amp = within 200ms
    end200_idx = (np.abs(t - stim_end - 200e-3)).argmin()
    features[f'sAHP_stimend_amp_{Inputcurr*1e12}'] = np.min(v[end_idx:end200_idx]) - features[f'E_rest_{Inputcurr*1e12}']

    return [features, t, v]
예제 #13
0
    def calculate_feature_errors(self, t_ms, v, i):
        # Special case checks and penalty values
        minimum_num_spikes = 2
        missing_penalty_value = 20.0
        max_fail_penalty = 250.0
        min_fail_penalty = 75.0
        overkill_reduction = 0.75
        variance_factor = 0.1

        fail_trace = False

        delay = self.stim.delay * 1e-3
        duration = self.stim.dur * 1e-3
        t = t_ms * 1e-3
        feature_names = self.description.data['features']

        # penalize for failing to return to rest
        start_index = np.flatnonzero(t >= delay)[0]
        if np.abs(v[-1] - v[:start_index].mean()) > 2.0:
            fail_trace = True
        else:
            swp = EphysSweepFeatureExtractor(t,
                                             v,
                                             i,
                                             start=0,
                                             end=delay,
                                             filter=None)
            swp.process_spikes()
            if swp.sweep_feature("avg_rate") > 0:
                fail_trace = True

        target_features = self.description.data['target_features']
        target_features_dict = {
            f["name"]: {
                "mean": f["mean"],
                "stdev": f["stdev"]
            }
            for f in target_features
        }

        if not fail_trace:
            swp = EphysSweepFeatureExtractor(t,
                                             v,
                                             i,
                                             start=delay,
                                             end=(delay + duration),
                                             filter=None)
            swp.process_spikes()
            if len(swp.spikes()) < minimum_num_spikes:  # Enough spikes?
                fail_trace = True
            else:
                avg_per_spike_peak_error = np.mean([
                    abs(spk["peak_v"] - target_features_dict["peak_v"]["mean"])
                    for spk in swp.spikes()
                ])
                avg_overall_error = abs(
                    target_features_dict["peak_v"]["mean"] -
                    swp.spike_feature("peak_v").mean())
                if avg_per_spike_peak_error > 3.0 * avg_overall_error:  # Weird bi-modality of spikes; 3.0 is arbitrary
                    fail_trace = True

        if fail_trace:
            variance_start = np.flatnonzero(t >= delay - 0.1)[0]
            variance_end = np.flatnonzero(t >= (delay + duration) / 2.0)[0]
            trace_variance = v[variance_start:variance_end].var()
            error_value = max(
                max_fail_penalty - trace_variance * variance_factor,
                min_fail_penalty)
            errs = np.ones(len(feature_names)) * error_value
        else:
            errs = []

            # Calculate additional features not done by swp.process_spikes()
            baseline_v = swp.sweep_feature("v_baseline")
            other_features = {}
            threshold_t = swp.spike_feature("threshold_t")
            fast_trough_t = swp.spike_feature("fast_trough_t")
            slow_trough_t = swp.spike_feature("slow_trough_t")

            delta_t = slow_trough_t - fast_trough_t
            delta_t[np.isnan(delta_t)] = 0.
            other_features["slow_trough_delta_time"] = np.mean(
                delta_t[:-1] / np.diff(threshold_t))

            fast_trough_v = swp.spike_feature("fast_trough_v")
            slow_trough_v = swp.spike_feature("slow_trough_v")
            delta_v = fast_trough_v - slow_trough_v
            delta_v[np.isnan(delta_v)] = 0.
            other_features["slow_trough_delta_v"] = delta_v.mean()

            for f in feature_names:
                target_mean = target_features_dict[f]['mean']
                target_stdev = target_features_dict[f]['stdev']

                if target_stdev == 0:
                    print("Feature with 0 stdev: ", f)

                if f in swp.spike_feature_keys():
                    model_mean = swp.spike_feature(f).mean()
                elif f in swp.sweep_feature_keys():
                    model_mean = swp.sweep_feature(f)
                elif f in other_features:
                    model_mean = other_features[f]
                else:
                    model_mean = np.nan

                if np.isnan(model_mean):
                    errs.append(missing_penalty_value)
                else:
                    errs.append(
                        np.abs((model_mean - target_mean) / target_stdev))

            errs = np.array(errs)
        return errs
 plt.plot(t, v, label='trace numer = ' + str(iteration_number))
 try:
     Trace_with_features = EphysSweepFeatureExtractor(
         t=t,
         v=v,
         filter=float(trace.sampling_rate) / 2500,
         min_peak=-20.0,
         dv_cutoff=20.0,
         max_interval=0.005,
         min_height=2.0,
         thresh_frac=0.05,
         baseline_interval=0.1,
         baseline_detect_thresh=0.3,
         id=None)
     Trace_with_features.process_spikes()
     neuron_threshold_v = Trace_with_features.spike_feature(
         "threshold_v")
     #                        print(threshold_state)
     if threshold_state == 0 and len(neuron_threshold_v) >= 1:
         neuron_threshold_v = Trace_with_features.spike_feature(
             "threshold_v")[0]
         neuron_threshold_t = Trace_with_features.spike_feature(
             "threshold_t")[0]
         trough_v = Trace_with_features.spike_feature("trough_v")[0]
         trough_t = Trace_with_features.spike_feature("trough_t")[0]
         plt.plot(neuron_threshold_t,
                  neuron_threshold_v,
                  'o',
                  color='k',
                  label='threshold voltage')
         plt.plot(trough_t,
                  trough_v,
예제 #15
0
stimulus = sweep_data['stimulus']

# response is a numpy array in volts
reponse = sweep_data['response']

# sampling rate is in Hz
sampling_rate = sweep_data['sampling_rate']

# start/stop indices that exclude the experimental test pulse (if applicable)
index_range = sweep_data['index_range']

sweep_number = 35
sweep_data = data_set.get_sweep(sweep_number)

index_range = sweep_data["index_range"]
i = sweep_data["stimulus"][0:index_range[1] + 1]  # in A
v = sweep_data["response"][0:index_range[1] + 1]  # in V
i *= 1e12  # to pA
v *= 1e3  # to mV

sampling_rate = sweep_data["sampling_rate"]  # in Hz
t = np.arange(0, len(v)) * (1.0 / sampling_rate)

sweep_ext = EphysSweepFeatureExtractor(t=t, v=v, i=i, start=1.02, end=2.02)
sweep_ext.process_spikes()

print("Avg spike threshold: %.01f mV" %
      sweep_ext.spike_feature("threshold_v").mean())
print("Avg spike width: %.02f ms" %
      (1e3 * np.nanmean(sweep_ext.spike_feature("width"))))
예제 #16
0
                traces = 'traces compiled'
            else:
                 traces = 'trace_alone'
#                    print (Trace_with_features.spike_feature_keys())
            protocol_type = 'Current_clamp'
            v = v.magnitude
            Vm_trace = np.mean(v[len(v)-300:len(v)])
            tf = trace.t_stop
            ti = trace.t_start
            t = np.linspace(0,float(tf - ti), len(v))                    
#                    print(trace.sampling_rate)
            plt.plot(t,v,label = 'trace numer = '+str(iteration_number))
            try:
                Trace_with_features = EphysSweepFeatureExtractor(t=t, v=v, filter = float(trace.sampling_rate)/2500,min_peak=-20.0, dv_cutoff=20.0, max_interval=0.005, min_height=2.0, thresh_frac=0.05, baseline_interval=0.1, baseline_detect_thresh=0.3, id=None)
                Trace_with_features.process_spikes()
                neuron_threshold_v = Trace_with_features.spike_feature("threshold_v")
#                        print(threshold_state)
                if threshold_state == 0 and len(neuron_threshold_v) >=1:
                    neuron_threshold_v = Trace_with_features.spike_feature("threshold_v")[0]
                    neuron_threshold_t = Trace_with_features.spike_feature("threshold_t")[0]
                    trough_v = Trace_with_features.spike_feature("trough_v")[0]
                    trough_t = Trace_with_features.spike_feature("trough_t")[0]
                    plt.plot(neuron_threshold_t,neuron_threshold_v,'o', color ='k',label = 'threshold voltage')
                    plt.plot(trough_t,trough_v,'o', color ='r',label = 'trough_v')
#                            print(neuron_threshold_t,neuron_threshold_v)
                    plt.figtext(1, 0.20, "trough_v = "+ str(np.around(trough_v, decimals = 2))+"mV")
                    plt.figtext(1, 0.15, "trough_t = "+ str(np.around(trough_t, decimals = 2))+'s')
                    threshold_state = 1
#                        print(Trace_with_features.filter)
#                        plt.plot(Trace_with_features.spike_feature("peak_t"),Trace_with_features.spike_feature("peak_v"),'r+', label = 'action potentials')
#                        plt.plot([],[],' ',label ='number of peaks = '+ str(len(Trace_with_features.spike_feature("peak_v"))),color = 'red')