Exemplo n.º 1
0
def test_extractor_on_zero_voltage():
    t = np.arange(0, 4000) * 5e-6
    v = np.zeros_like(t)
    i = np.zeros_like(t)

    ext = EphysSweepSetFeatureExtractor([t], [v], [i])
    ext.process_spikes()
Exemplo n.º 2
0
def allen_format(volts,times,key):
    '''
    Synposis:
        At its most fundamental level, AllenSDK still calls a single trace a sweep.
        In otherwords there are no single traces, but there are sweeps of size 1.
        This is a bit like wrapping unitary objects in iterable containers like [times].

    inputs:
        np.arrays of time series: Specifically a time recording vector, and a membrane potential recording.
        in floats probably with units striped away
    outputs:
        a data frame of Allen features, a very big dump of features as they pertain to each spike in a train.

        to get a managable data digest
        we out put features from the middle spike of a spike train.
    '''
    ext = EphysSweepSetFeatureExtractor([times],[volts])
    ext.process_spikes()
    swp = ext.sweeps()[0]
    spikes = swp.spikes()
    if len(spikes)==0:
        return (None,None)
    meaned_features_1 = {}
    skeys = [ skey for skey in spikes[0].keys() ]
    for sk in skeys:
        if str('isi_type') not in sk:
            meaned_features_1[sk] = np.mean([ i[sk] for i in spikes if type(i) is not type(str(''))] )
    allen_features = {}
    meaned_features_overspikes = {}
    for s in swp.sweep_feature_keys():# print(swp.sweep_feature(s))
        if str('isi_type') not in s:
            allen_features[s] = swp.sweep_feature(s)

    allen_features.update(meaned_features_1)
    return allen_features[key], allen_features
Exemplo n.º 3
0
def test_extractor_on_zero_voltage():
    t = np.arange(0, 4000) * 5e-5
    v = np.zeros_like(t)
    i = np.zeros_like(t)

    ext = EphysSweepSetFeatureExtractor([t], [v], [i])
    ext.process_spikes()
Exemplo n.º 4
0
def test_extractor_on_sample_data_with_i():
    data = np.loadtxt(os.path.join(path, "data/spike_test_pair.txt"))
    t = data[:, 0]
    v = data[:, 1]
    i = np.zeros_like(v)

    ext = EphysSweepSetFeatureExtractor([t], [v], [i])
    ext.process_spikes()
Exemplo n.º 5
0
def test_extractor_on_sample_data_with_i():
    data = np.loadtxt(os.path.join(path, "data/spike_test_pair.txt"))
    t = data[:, 0]
    v = data[:, 1]
    i = np.zeros_like(v)

    ext = EphysSweepSetFeatureExtractor([t], [v], [i])
    ext.process_spikes()
Exemplo n.º 6
0
def test_extractor_with_high_init_dvdt():
    data = np.loadtxt(os.path.join(path, "data/spike_test_high_init_dvdt.txt"))
    t = data[:, 0]
    v = data[:, 1]

    ext = EphysSweepSetFeatureExtractor([t], [v])
    ext.process_spikes()
    expected_thresh_ind = np.array([11222, 16258, 24060])
    sweep = ext.sweeps()[0]
    assert np.allclose(sweep.spike_feature("threshold_index"), expected_thresh_ind)
Exemplo n.º 7
0
def test_extractor_on_variable_time_step():
    data = np.loadtxt(os.path.join(path, "data/spike_test_var_dt.txt"))
    t = data[:, 0]
    v = data[:, 1]

    ext = EphysSweepSetFeatureExtractor([t], [v])
    ext.process_spikes()
    expected_thresh_ind = np.array([73, 183, 314, 463, 616, 770])
    sweep = ext.sweeps()[0]
    assert np.allclose(sweep.spike_feature("threshold_index"), expected_thresh_ind)
Exemplo n.º 8
0
def test_extractor_with_high_init_dvdt():
    data = np.loadtxt(os.path.join(path, "data/spike_test_high_init_dvdt.txt"))
    t = data[:, 0]
    v = data[:, 1]

    ext = EphysSweepSetFeatureExtractor([t], [v])
    ext.process_spikes()
    expected_thresh_ind = np.array([11222, 16256, 24058])
    sweep = ext.sweeps()[0]
    assert np.allclose(sweep.spike_feature("threshold_index"),
                       expected_thresh_ind)
Exemplo n.º 9
0
def test_extractor_on_variable_time_step():
    data = np.loadtxt(os.path.join(path, "data/spike_test_var_dt.txt"))
    t = data[:, 0]
    v = data[:, 1]

    ext = EphysSweepSetFeatureExtractor([t], [v])
    ext.process_spikes()
    expected_thresh_ind = np.array([73, 183, 314, 463, 616, 770])
    sweep = ext.sweeps()[0]
    assert np.allclose(sweep.spike_feature("threshold_index"),
                       expected_thresh_ind)
Exemplo n.º 10
0
def test_extractor_input_resistance():
    t = np.arange(0, 1.0, 5e-6)
    v1 = np.ones_like(t) * -5.
    v2 = np.ones_like(t) * -10.
    i1 = np.ones_like(t) * -50.
    i2 = np.ones_like(t) * -100.

    ext = EphysSweepSetFeatureExtractor([t, t], [v1, v2], [i1, i2])
    ri = input_resistance(ext)
    assert np.allclose(ri, 100.)
Exemplo n.º 11
0
def test_extractor_wrong_inputs():
    data = np.loadtxt(os.path.join(path, "data/spike_test_pair.txt"))
    t = data[:, 0]
    v = data[:, 1]
    i = np.zeros_like(v)

    with pytest.raises(ValueError):
        ext = EphysSweepSetFeatureExtractor(t, v, i)

    with pytest.raises(ValueError):
        ext = EphysSweepSetFeatureExtractor([t], v, i)

    with pytest.raises(ValueError):
        ext = EphysSweepSetFeatureExtractor([t], [v], i)

    with pytest.raises(ValueError):
        ext = EphysSweepSetFeatureExtractor([t, t], [v], [i])

    with pytest.raises(ValueError):
        ext = EphysSweepSetFeatureExtractor([t, t], [v, v], [i])
Exemplo n.º 12
0
def test_extractor_on_sample_data():
    data = np.loadtxt(os.path.join(path, "data/spike_test_pair.txt"))
    t = data[:, 0]
    v = data[:, 1]

    ext = EphysSweepSetFeatureExtractor([t], [v])
    ext.process_spikes()
    swp = ext.sweeps()[0]
    spikes = swp.spikes()

    keys = swp.spike_feature_keys()
    swp_keys = swp.sweep_feature_keys()
    result = swp.spike_feature(keys[0])
    result = swp.sweep_feature("first_isi")
    result = ext.sweep_features("first_isi")
    result = ext.spike_feature_averages(keys[0])

    with pytest.raises(KeyError):
        result = swp.spike_feature("nonexistent_key")

    with pytest.raises(KeyError):
        result = swp.sweep_feature("nonexistent_key")
Exemplo n.º 13
0
def test_extractor_on_sample_data():
    data = np.loadtxt(os.path.join(path, "data/spike_test_pair.txt"))
    t = data[:, 0]
    v = data[:, 1]

    ext = EphysSweepSetFeatureExtractor([t], [v])
    ext.process_spikes()
    swp = ext.sweeps()[0]
    spikes = swp.spikes()

    keys = swp.spike_feature_keys()
    swp_keys = swp.sweep_feature_keys()
    result = swp.spike_feature(keys[0])
    result = swp.sweep_feature("first_isi")
    result = ext.sweep_features("first_isi")
    result = ext.spike_feature_averages(keys[0])

    with pytest.raises(KeyError):
        result = swp.spike_feature("nonexistent_key")

    with pytest.raises(KeyError):
        result = swp.sweep_feature("nonexistent_key")
Exemplo n.º 14
0
def prepare_stage_1(description, passive_fit_data):
    output_directory = description.manifest.get_path('WORKDIR')
    neuronal_model_data = ju.read(description.manifest.get_path('neuronal_model_data'))
    specimen_data = neuronal_model_data['specimen']
    specimen_id = neuronal_model_data['specimen_id']
    is_spiny = not any(t['name'] == u'dendrite type - aspiny' for t in specimen_data['specimen_tags'])
    all_sweeps = specimen_data['ephys_sweeps']
    data_set = NwbDataSet(description.manifest.get_path('stimulus_path'))
    swc_path = description.manifest.get_path('MORPHOLOGY')
    
    if not os.path.exists(output_directory):
        os.makedirs(output_directory)

    ra = passive_fit_data['ra']
    cm1 = passive_fit_data['cm1']
    cm2 = passive_fit_data['cm2']

    # Check for fi curve shift to decide to use core1 or core2
    fi_shift, n_core2 = check_fi_shift.estimate_fi_shift(data_set, all_sweeps)
    fi_shift_threshold = 30.0
    sweeps_to_fit = []
    if abs(fi_shift) > fi_shift_threshold:
        _fit_stage_1_log.info("FI curve shifted; using Core 1")
        sweeps_to_fit = find_core1_trace(data_set, all_sweeps)
    else:
        sweeps_to_fit = find_core2_trace(data_set, all_sweeps)

        if sweeps_to_fit == []:
            _fit_stage_1_log.info("Not enough good Core 2 traces; using Core 1")
            sweeps_to_fit = find_core1_trace(data_set, all_sweeps)

    _fit_stage_1_log.debug("will use sweeps: " + str(sweeps_to_fit))

    jxn = -14.0

    t_set = []
    v_set = []
    i_set = []
    for s in sweeps_to_fit:
        v, i, t = ephys_utils.get_sweep_v_i_t_from_set(data_set, s)
        v += jxn
        stim_start, stim_dur, stim_amp, start_idx, end_idx = ephys_utils.get_step_stim_characteristics(i, t)
        t_set.append(t)
        v_set.append(v)
        i_set.append(i)
    ext = EphysSweepSetFeatureExtractor(t_set, v_set, i_set, start=stim_start, end=(stim_start + stim_dur))
    ext.process_spikes()

    ft = {}
    blacklist = ["isi_type"]
    for k in ext.sweeps()[0].spike_feature_keys():
        if k in blacklist:
            continue
        pair = {}
        pair["mean"] = float(ext.spike_feature_averages(k).mean())
        pair["stdev"] = float(ext.spike_feature_averages(k).std())
        ft[k] = pair

    # "Delta" features
    sweep_avg_slow_trough_delta_time = []
    sweep_avg_slow_trough_delta_v = []
    sweep_avg_peak_trough_delta_time = []
    for swp in ext.sweeps():
        threshold_t = swp.spike_feature("threshold_t")
        fast_trough_t = swp.spike_feature("fast_trough_t")
        slow_trough_t = swp.spike_feature("slow_trough_t")

        delta_t = slow_trough_t - fast_trough_t
        delta_t[np.isnan(delta_t)] = 0.
        sweep_avg_slow_trough_delta_time.append(np.mean(delta_t[:-1] / np.diff(threshold_t)))

        fast_trough_v = swp.spike_feature("fast_trough_v")
        slow_trough_v = swp.spike_feature("slow_trough_v")
        delta_v = fast_trough_v - slow_trough_v
        delta_v[np.isnan(delta_v)] = 0.
        sweep_avg_slow_trough_delta_v.append(delta_v.mean())

    ft["slow_trough_delta_time"] = {"mean": float(np.mean(sweep_avg_slow_trough_delta_time)),
                                    "stdev": float(np.std(sweep_avg_slow_trough_delta_time))}
    ft["slow_trough_delta_v"] = {"mean": float(np.mean(sweep_avg_slow_trough_delta_v)),
                                 "stdev": float(np.std(sweep_avg_slow_trough_delta_v))}

    baseline_v = float(ext.sweep_features("v_baseline").mean())
    passive_fit_data["e_pas"] = baseline_v
    for k in ext.sweeps()[0].sweep_feature_keys():
        pair = {}
        pair["mean"] = float(ext.sweep_features(k).mean())
        pair["stdev"] = float(ext.sweep_features(k).std())
        ft[k] = pair

    # Determine highest step to check for depolarization block
    noise_1_sweeps, _, _ = ephys_utils.get_sweeps_of_type("C1NSSEED_1", all_sweeps)
    noise_2_sweeps, _, _ = ephys_utils.get_sweeps_of_type("C1NSSEED_2", all_sweeps)
    step_sweeps, _, _ = ephys_utils.get_sweeps_of_type("C1LSCOARSE", all_sweeps)
    all_sweeps = noise_1_sweeps + noise_2_sweeps + step_sweeps
    max_i = 0
    for s in all_sweeps:
        try:
            v, i, t = ephys_utils.get_sweep_v_i_t_from_set(data_set, s['sweep_number'])
        except:
            pass
        if np.max(i) > max_i:
            max_i = np.max(i)
    max_i += 10 # add 10 pA
    max_i *= 1e-3 # convert to nA

    # ----------- Generate output and submit jobs ---------------

    # Set up directories
    # Decide which fit(s) we are doing
    if (is_spiny and ft["width"]["mean"] < 0.8) or (not is_spiny and ft["width"]["mean"] > 0.8):
        fit_types = ["f6", "f12"]
    elif is_spiny:
        fit_types = ["f6"]
    else:
        fit_types = ["f12"]

    for fit_type in fit_types:
        fit_type_dir = os.path.join(output_directory, fit_type)
        if not os.path.exists(fit_type_dir):
            os.makedirs(fit_type_dir)
        for seed in SEEDS:
            seed_dir = "{:s}/s{:d}".format(fit_type_dir, seed)
            if not os.path.exists(seed_dir):
                os.makedirs(seed_dir)

    # Collect and save data for target.json file
    target_dict = {}
    target_dict["passive"] = [{
        "ra": ra,
        "cm": { "soma": cm1, "axon": cm1, "dend": cm2 },
        "e_pas": baseline_v
    }]

    swc_data = pd.read_table(swc_path, sep='\s', comment='#', header=None)
    has_apic = False
    if APICAL_DENDRITE_TYPE in pd.unique(swc_data[1]):
        has_apic = True
        _fit_stage_1_log.info("Has apical dendrite")
    else:
        _fit_stage_1_log.info("Does not have apical dendrite")

    if has_apic:
        target_dict["passive"][0]["cm"]["apic"] = cm2

    target_dict["fitting"] = [{
        "junction_potential": jxn,
        "sweeps": sweeps_to_fit,
        "passive_fit_info": passive_fit_data,
        "max_stim_test_na": max_i,        
    }]

    target_dict["stimulus"] = [{
        "amplitude": 1e-3 * stim_amp,
        "delay": 1000.0,
        "duration": 1e3 * stim_dur
    }]

    target_dict["manifest"] = []
    target_dict["manifest"].append({"type": "file", "spec": swc_path, "key": "MORPHOLOGY"})

    target_dict["target_features"] = collect_target_features(ft)

    target_file = os.path.join(output_directory, 'target.json')
    ju.write(target_file, target_dict)

    # Create config.json for each fit type
    config_base_data = ju.read(os.path.join(FIT_BASE_DIR,
                                            'config_base.json'))


    jobs = []
    for fit_type in fit_types:
        config = config_base_data.copy()
        fit_type_dir = os.path.join(output_directory, fit_type)
        config_path = os.path.join(fit_type_dir, "config.json")

        config["biophys"][0]["model_file"] = [ target_file, config_path]
        if has_apic:
            fit_style_file = os.path.join(FIT_BASE_DIR, 'fit_styles', '%s_fit_style.json' % (fit_type))
        else:
            fit_style_file = os.path.join(FIT_BASE_DIR, "fit_styles", "%s_noapic_fit_style.json" % (fit_type))

        config["biophys"][0]["model_file"].append(fit_style_file)
        config["manifest"].append({"type": "dir", "spec": fit_type_dir, "key": "FITDIR"})
        ju.write(config_path, config)

        for seed in SEEDS:
            logfile = os.path.join(output_directory, fit_type, 's%d' % seed, 'stage_1.log')
            jobs.append({
                    'config_path': os.path.abspath(config_path),
                    'fit_type': fit_type,
                    'log': os.path.abspath(logfile),
                    'seed': seed,
                    'num_processes': DEFAULT_NUM_PROCESSES
                    })
    return jobs
Exemplo n.º 15
0
def test_extractor_no_values():
    ext = EphysSweepSetFeatureExtractor()
def allen_format(volts, times):
    '''
    Synposis:
        At its most fundamental level, AllenSDK still calls a single trace a sweep.
        In otherwords there are no single traces, but there are sweeps of size 1.
        This is a bit like wrapping unitary objects in iterable containers like [times].

    inputs:
        np.arrays of time series: Specifically a time recording vector, and a membrane potential recording.
        in floats probably with units striped away
    outputs:
        a data frame of Allen features, a very big dump of features as they pertain to each spike in a train.

        to get a managable data digest
        we out put features from the middle spike of a spike train.

    '''
    ext = EphysSweepSetFeatureExtractor([times], [volts])
    ext.process_spikes()

    swp = ext.sweeps()[0]

    spikes = swp.spikes()

    meaned_features_1 = {}
    skeys = [skey for skey in spikes[0].keys()]
    for sk in skeys:
        if str('isi_type') not in sk:
            meaned_features_1[sk] = np.mean(
                [i[sk] for i in spikes if type(i) is not type(str(''))])


#

#allen_features = {}
    meaned_features_overspikes = {}
    for s in swp.sweep_feature_keys():  # print(swp.sweep_feature(s))

        if str('isi_type') not in s:
            #allen_features[s] = swp.sweep_feature(s)
            try:
                feature = swp.sweep_feature(s)
                if isinstance(feature, Iterable):
                    meaned_features_overspikes[s] = np.mean(
                        [i for i in feature if type(i) is not type(str(''))])
                else:
                    meaned_features_overspikes[s] = feature

            except:
                meaned_features_overspikes[
                    s] = None  #np.mean([i for i in swp.spike_feature(s) if type(i) is not type(str(''))])
                print(meaned_features_overspikes)
    for s in swp.sweep_feature_keys():
        print(swp.sweep_feature(s))

    #import pdb; pdb.set_trace()

    frame_shape = pd.Series(meaned_features_1).to_frame()
    frame_dynamics = pd.Series(meaned_features_overspikes).to_frame()
    final = frame_shape.append(frame_dynamics)
    return final