def test_extractor_on_sample_data(): data = np.loadtxt(os.path.join(path, "data/spike_test_pair.txt")) t = data[:, 0] v = data[:, 1] ext = EphysSweepSetFeatureExtractor([t], [v]) ext.process_spikes() swp = ext.sweeps()[0] spikes = swp.spikes() keys = swp.spike_feature_keys() swp_keys = swp.sweep_feature_keys() result = swp.spike_feature(keys[0]) result = swp.sweep_feature("first_isi") result = ext.sweep_features("first_isi") result = ext.spike_feature_averages(keys[0]) with pytest.raises(KeyError): result = swp.spike_feature("nonexistent_key") with pytest.raises(KeyError): result = swp.sweep_feature("nonexistent_key")
def prepare_stage_1(description, passive_fit_data): output_directory = description.manifest.get_path('WORKDIR') neuronal_model_data = ju.read(description.manifest.get_path('neuronal_model_data')) specimen_data = neuronal_model_data['specimen'] specimen_id = neuronal_model_data['specimen_id'] is_spiny = not any(t['name'] == u'dendrite type - aspiny' for t in specimen_data['specimen_tags']) all_sweeps = specimen_data['ephys_sweeps'] data_set = NwbDataSet(description.manifest.get_path('stimulus_path')) swc_path = description.manifest.get_path('MORPHOLOGY') if not os.path.exists(output_directory): os.makedirs(output_directory) ra = passive_fit_data['ra'] cm1 = passive_fit_data['cm1'] cm2 = passive_fit_data['cm2'] # Check for fi curve shift to decide to use core1 or core2 fi_shift, n_core2 = check_fi_shift.estimate_fi_shift(data_set, all_sweeps) fi_shift_threshold = 30.0 sweeps_to_fit = [] if abs(fi_shift) > fi_shift_threshold: _fit_stage_1_log.info("FI curve shifted; using Core 1") sweeps_to_fit = find_core1_trace(data_set, all_sweeps) else: sweeps_to_fit = find_core2_trace(data_set, all_sweeps) if sweeps_to_fit == []: _fit_stage_1_log.info("Not enough good Core 2 traces; using Core 1") sweeps_to_fit = find_core1_trace(data_set, all_sweeps) _fit_stage_1_log.debug("will use sweeps: " + str(sweeps_to_fit)) jxn = -14.0 t_set = [] v_set = [] i_set = [] for s in sweeps_to_fit: v, i, t = ephys_utils.get_sweep_v_i_t_from_set(data_set, s) v += jxn stim_start, stim_dur, stim_amp, start_idx, end_idx = ephys_utils.get_step_stim_characteristics(i, t) t_set.append(t) v_set.append(v) i_set.append(i) ext = EphysSweepSetFeatureExtractor(t_set, v_set, i_set, start=stim_start, end=(stim_start + stim_dur)) ext.process_spikes() ft = {} blacklist = ["isi_type"] for k in ext.sweeps()[0].spike_feature_keys(): if k in blacklist: continue pair = {} pair["mean"] = float(ext.spike_feature_averages(k).mean()) pair["stdev"] = float(ext.spike_feature_averages(k).std()) ft[k] = pair # "Delta" features sweep_avg_slow_trough_delta_time = [] sweep_avg_slow_trough_delta_v = [] sweep_avg_peak_trough_delta_time = [] for swp in ext.sweeps(): threshold_t = swp.spike_feature("threshold_t") fast_trough_t = swp.spike_feature("fast_trough_t") slow_trough_t = swp.spike_feature("slow_trough_t") delta_t = slow_trough_t - fast_trough_t delta_t[np.isnan(delta_t)] = 0. sweep_avg_slow_trough_delta_time.append(np.mean(delta_t[:-1] / np.diff(threshold_t))) fast_trough_v = swp.spike_feature("fast_trough_v") slow_trough_v = swp.spike_feature("slow_trough_v") delta_v = fast_trough_v - slow_trough_v delta_v[np.isnan(delta_v)] = 0. sweep_avg_slow_trough_delta_v.append(delta_v.mean()) ft["slow_trough_delta_time"] = {"mean": float(np.mean(sweep_avg_slow_trough_delta_time)), "stdev": float(np.std(sweep_avg_slow_trough_delta_time))} ft["slow_trough_delta_v"] = {"mean": float(np.mean(sweep_avg_slow_trough_delta_v)), "stdev": float(np.std(sweep_avg_slow_trough_delta_v))} baseline_v = float(ext.sweep_features("v_baseline").mean()) passive_fit_data["e_pas"] = baseline_v for k in ext.sweeps()[0].sweep_feature_keys(): pair = {} pair["mean"] = float(ext.sweep_features(k).mean()) pair["stdev"] = float(ext.sweep_features(k).std()) ft[k] = pair # Determine highest step to check for depolarization block noise_1_sweeps, _, _ = ephys_utils.get_sweeps_of_type("C1NSSEED_1", all_sweeps) noise_2_sweeps, _, _ = ephys_utils.get_sweeps_of_type("C1NSSEED_2", all_sweeps) step_sweeps, _, _ = ephys_utils.get_sweeps_of_type("C1LSCOARSE", all_sweeps) all_sweeps = noise_1_sweeps + noise_2_sweeps + step_sweeps max_i = 0 for s in all_sweeps: try: v, i, t = ephys_utils.get_sweep_v_i_t_from_set(data_set, s['sweep_number']) except: pass if np.max(i) > max_i: max_i = np.max(i) max_i += 10 # add 10 pA max_i *= 1e-3 # convert to nA # ----------- Generate output and submit jobs --------------- # Set up directories # Decide which fit(s) we are doing if (is_spiny and ft["width"]["mean"] < 0.8) or (not is_spiny and ft["width"]["mean"] > 0.8): fit_types = ["f6", "f12"] elif is_spiny: fit_types = ["f6"] else: fit_types = ["f12"] for fit_type in fit_types: fit_type_dir = os.path.join(output_directory, fit_type) if not os.path.exists(fit_type_dir): os.makedirs(fit_type_dir) for seed in SEEDS: seed_dir = "{:s}/s{:d}".format(fit_type_dir, seed) if not os.path.exists(seed_dir): os.makedirs(seed_dir) # Collect and save data for target.json file target_dict = {} target_dict["passive"] = [{ "ra": ra, "cm": { "soma": cm1, "axon": cm1, "dend": cm2 }, "e_pas": baseline_v }] swc_data = pd.read_table(swc_path, sep='\s', comment='#', header=None) has_apic = False if APICAL_DENDRITE_TYPE in pd.unique(swc_data[1]): has_apic = True _fit_stage_1_log.info("Has apical dendrite") else: _fit_stage_1_log.info("Does not have apical dendrite") if has_apic: target_dict["passive"][0]["cm"]["apic"] = cm2 target_dict["fitting"] = [{ "junction_potential": jxn, "sweeps": sweeps_to_fit, "passive_fit_info": passive_fit_data, "max_stim_test_na": max_i, }] target_dict["stimulus"] = [{ "amplitude": 1e-3 * stim_amp, "delay": 1000.0, "duration": 1e3 * stim_dur }] target_dict["manifest"] = [] target_dict["manifest"].append({"type": "file", "spec": swc_path, "key": "MORPHOLOGY"}) target_dict["target_features"] = collect_target_features(ft) target_file = os.path.join(output_directory, 'target.json') ju.write(target_file, target_dict) # Create config.json for each fit type config_base_data = ju.read(os.path.join(FIT_BASE_DIR, 'config_base.json')) jobs = [] for fit_type in fit_types: config = config_base_data.copy() fit_type_dir = os.path.join(output_directory, fit_type) config_path = os.path.join(fit_type_dir, "config.json") config["biophys"][0]["model_file"] = [ target_file, config_path] if has_apic: fit_style_file = os.path.join(FIT_BASE_DIR, 'fit_styles', '%s_fit_style.json' % (fit_type)) else: fit_style_file = os.path.join(FIT_BASE_DIR, "fit_styles", "%s_noapic_fit_style.json" % (fit_type)) config["biophys"][0]["model_file"].append(fit_style_file) config["manifest"].append({"type": "dir", "spec": fit_type_dir, "key": "FITDIR"}) ju.write(config_path, config) for seed in SEEDS: logfile = os.path.join(output_directory, fit_type, 's%d' % seed, 'stage_1.log') jobs.append({ 'config_path': os.path.abspath(config_path), 'fit_type': fit_type, 'log': os.path.abspath(logfile), 'seed': seed, 'num_processes': DEFAULT_NUM_PROCESSES }) return jobs