def feature_vector_input(): TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'data') nwb_file_name = "Pvalb-IRES-Cre;Ai14-415796.02.01.01.nwb" nwb_file_full_path = os.path.join(TEST_DATA_PATH, nwb_file_name) if not os.path.exists(nwb_file_full_path): download_file(nwb_file_name, nwb_file_full_path) data_set = AibsDataSet(nwb_file=nwb_file_full_path, ontology=ontology) lsq_sweep_numbers = data_set.filtered_sweep_table( clamp_mode=data_set.CURRENT_CLAMP, stimuli=ontology.long_square_names).sweep_number.sort_values().values lsq_sweeps = data_set.sweep_set(lsq_sweep_numbers) lsq_start, lsq_dur, _, _, _ = stf.get_stim_characteristics( lsq_sweeps.sweeps[0].i, lsq_sweeps.sweeps[0].t) lsq_end = lsq_start + lsq_dur lsq_spx, lsq_spfx = dsf.extractors_for_sweeps(lsq_sweeps, start=lsq_start, end=lsq_end, **dsf.detection_parameters( data_set.LONG_SQUARE)) lsq_an = spa.LongSquareAnalysis(lsq_spx, lsq_spfx, subthresh_min_amp=-100.) lsq_features = lsq_an.analyze(lsq_sweeps) return lsq_sweeps, lsq_features, lsq_start, lsq_end
def feature_vector_input(): TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'data') nwb_file_name = "Pvalb-IRES-Cre;Ai14-415796.02.01.01.nwb" nwb_file_full_path = os.path.join(TEST_DATA_PATH, nwb_file_name) if not os.path.exists(nwb_file_full_path): download_file(nwb_file_name, nwb_file_full_path) data_set = AibsDataSet(nwb_file=nwb_file_full_path, ontology=ontology) lsq_sweep_numbers = [4, 5, 6, 16, 17, 18, 19, 20, 21] lsq_sweeps = data_set.sweep_set(lsq_sweep_numbers) lsq_sweeps.select_epoch("recording") lsq_sweeps.align_to_start_of_epoch("experiment") lsq_start, lsq_dur, _, _, _ = stf.get_stim_characteristics(lsq_sweeps.sweeps[0].i, lsq_sweeps.sweeps[0].t) lsq_end = lsq_start + lsq_dur lsq_spx, lsq_spfx = dsf.extractors_for_sweeps(lsq_sweeps, start=lsq_start, end=lsq_end, **dsf.detection_parameters(data_set.LONG_SQUARE)) lsq_an = spa.LongSquareAnalysis(lsq_spx, lsq_spfx, subthresh_min_amp=-100.) lsq_features = lsq_an.analyze(lsq_sweeps) return lsq_sweeps, lsq_features, lsq_start, lsq_end
import ipfx.ephys_features as ft # download a specific experiment NWB file via AllenSDK ct = CellTypesApi() specimen_id = 595570553 nwb_file = "%d.nwb" % specimen_id if not os.path.exists(nwb_file): ct.save_ephys_data(specimen_id, nwb_file) sweep_info = ct.get_ephys_sweeps(specimen_id) # build a data set and find the short squares data_set = AibsDataSet(sweep_info=sweep_info, nwb_file=nwb_file) shsq_table = data_set.filtered_sweep_table( stimuli=data_set.ontology.short_square_names) shsq_sweep_set = data_set.sweep_set(shsq_table.sweep_number) # Estimate the dv cutoff and threshold fraction (we know stimulus starts at 0.27s) dv_cutoff, thresh_frac = ft.estimate_adjusted_detection_parameters( shsq_sweep_set.v, shsq_sweep_set.t, 0.27, 0.271) # Build the extractors start = 0.27 spx = SpikeExtractor(start=start, dv_cutoff=dv_cutoff, thresh_frac=thresh_frac) sptrx = SpikeTrainFeatureExtractor(start=start, end=None) # Run the analysis shsq_analysis = ShortSquareAnalysis(spx, sptrx) results = shsq_analysis.analyze(shsq_sweep_set) # Plot the sweeps at the lowest amplitude that evoked the most spikes for i, swp in enumerate(shsq_sweep_set.sweeps):
from ipfx.stimulus_protocol_analysis import RampAnalysis # download a specific experiment NWB file via AllenSDK ct = CellTypesApi() specimen_id = 595570553 nwb_file = "%d.nwb" % specimen_id if not os.path.exists(nwb_file): ct.save_ephys_data(specimen_id, nwb_file) sweep_info = ct.get_ephys_sweeps(specimen_id) # Build the data set and find the ramp sweeps data_set = AibsDataSet(sweep_info=sweep_info, nwb_file=nwb_file) ramp_table = data_set.filtered_sweep_table( stimuli=data_set.ontology.ramp_names) ramp_sweep_set = data_set.sweep_set(ramp_table.sweep_number) # Build the extractors (we know stimulus starts at 0.27 s) start = 0.27 spx = SpikeExtractor(start=start, end=None) sptrx = SpikeTrainFeatureExtractor(start=start, end=None) # Run the analysis ramp_analysis = RampAnalysis(spx, sptrx) results = ramp_analysis.analyze(ramp_sweep_set) # Plot the sweeps and the latency to the first spike of each sns.set_style("white") for swp in ramp_sweep_set.sweeps: plt.plot(swp.t, swp.v, linewidth=0.5) sns.rugplot(results["spiking_sweeps"]["latency"].values + start)