예제 #1
0
def run_feature_extraction(input_nwb_file, stimulus_ontology_file,
                           output_nwb_file, qc_fig_dir, sweep_info, cell_info):

    lu.log_pretty_header("Extract ephys features", level=1)

    sp.drop_failed_sweeps(sweep_info)
    if len(sweep_info) == 0:
        raise er.FeatureError(
            "There are no QC-passed sweeps available to analyze")

    if not stimulus_ontology_file:
        stimulus_ontology_file = StimulusOntology.DEFAULT_STIMULUS_ONTOLOGY_FILE
        logging.info(
            F"Ontology is not provided, using default {StimulusOntology.DEFAULT_STIMULUS_ONTOLOGY_FILE}"
        )
    ont = StimulusOntology(ju.read(stimulus_ontology_file))

    data_set = create_data_set(sweep_info=sweep_info,
                               nwb_file=input_nwb_file,
                               ontology=ont,
                               api_sweeps=False)

    try:
        cell_features, sweep_features, cell_record, sweep_records = dsft.extract_data_set_features(
            data_set)

        if cell_info: cell_record.update(cell_info)

        cell_state = {"failed_fx": False, "fail_fx_message": None}

        feature_data = {
            'cell_features': cell_features,
            'sweep_features': sweep_features,
            'cell_record': cell_record,
            'sweep_records': sweep_records,
            'cell_state': cell_state
        }

    except (er.FeatureError, IndexError) as e:
        cell_state = {"failed_fx": True, "fail_fx_message": str(e)}
        logging.warning(e)
        feature_data = {'cell_state': cell_state}

    if not cell_state["failed_fx"]:
        sweep_spike_times = collect_spike_times(sweep_features)
        embed_spike_times(input_nwb_file, output_nwb_file, sweep_spike_times)

        if qc_fig_dir is None:
            logging.info("qc_fig_dir is not provided, will not save figures")
        else:
            plotqc.display_features(qc_fig_dir, data_set, feature_data)

        # On Windows int64 keys of sweep numbers cannot be converted to str by json.dump when serializing.
        # Thus, we are converting them here:
        feature_data["sweep_features"] = {
            str(k): v
            for k, v in feature_data["sweep_features"].items()
        }

    return feature_data
예제 #2
0
    def run_feature_extraction(self):
        self.status_message.emit("Computing features, please wait.")
        drop_failed_sweeps(self.sweep_info)
        data_set = create_ephys_data_set(sweep_info=self.sweep_info,
                                   nwb_file=self.input_nwb_file,
                                   ontology=self.ontology)
        try:
            cell_features, sweep_features, cell_record, sweep_records,\
                cell_state, feature_states = extract_data_set_features(data_set)

            self.feature_data = {'cell_features': cell_features,
                                 'sweep_features': sweep_features,
                                 'cell_record': cell_record,
                                 'sweep_records': sweep_records,
                                 'cell_state': cell_state,
                                 'feature_states': feature_states
                                }

            self.new_state()
            self.status_message.emit("Done computing features!")

        except (FeatureError, IndexError) as ferr:
            exception_message("Feature extraction error",
                              f"failed feature extraction",
                              ferr
                              )
예제 #3
0
Run all analyses on NWB file
"""
from __future__ import print_function

import os
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
from allensdk.api.queries.cell_types_api import CellTypesApi
from ipfx.aibs_data_set import AibsDataSet
from ipfx.data_set_features import extract_data_set_features

# download a specific experiment NWB file via AllenSDK
ct = CellTypesApi()

specimen_id = 595570553
nwb_file = "%d.nwb" % specimen_id
if not os.path.exists(nwb_file):
    ct.save_ephys_data(specimen_id, nwb_file)
sweep_info = ct.get_ephys_sweeps(specimen_id)

data_set = AibsDataSet(
    sweep_info=sweep_info,
    nwb_file=nwb_file)  # Download and access the experimental data

cell_features, sweep_features, cell_record, sweep_records = \
    extract_data_set_features(data_set, subthresh_min_amp=-100.0)

print(cell_record)
예제 #4
0
def run_feature_extraction(input_nwb_file,
                           stimulus_ontology_file,
                           output_nwb_file,
                           qc_fig_dir,
                           sweep_info,
                           cell_info,
                           write_spikes=True):

    lu.log_pretty_header("Extract ephys features", level=1)

    sp.drop_failed_sweeps(sweep_info)
    if len(sweep_info) == 0:
        raise er.FeatureError(
            "There are no QC-passed sweeps available to analyze")

    if not stimulus_ontology_file:
        stimulus_ontology_file = StimulusOntology.DEFAULT_STIMULUS_ONTOLOGY_FILE
        logging.info(
            F"Ontology is not provided, using default {StimulusOntology.DEFAULT_STIMULUS_ONTOLOGY_FILE}"
        )
    ont = StimulusOntology(ju.read(stimulus_ontology_file))

    data_set = create_ephys_data_set(sweep_info=sweep_info,
                                     nwb_file=input_nwb_file,
                                     ontology=ont)

    (cell_features, sweep_features, cell_record, sweep_records, cell_state,
     feature_states) = dsft.extract_data_set_features(data_set)

    if cell_state['failed_fx']:
        feature_data = {'cell_state': cell_state}
    else:
        if cell_info:
            cell_record.update(cell_info)

        feature_data = {
            'cell_features': cell_features,
            'sweep_features': sweep_features,
            'cell_record': cell_record,
            'sweep_records': sweep_records,
            'cell_state': cell_state,
            'feature_states': feature_states
        }

        if write_spikes:
            if not feature_states['sweep_features_state']['failed_fx']:
                sweep_spike_times = collect_spike_times(sweep_features)
                append_spike_times(input_nwb_file,
                                   sweep_spike_times,
                                   output_nwb_path=output_nwb_file)
            else:
                logging.warn("extract_sweep_features failed, "
                             "unable to write spikes")

        if qc_fig_dir is None:
            logging.info("qc_fig_dir is not provided, will not save figures")
        else:
            plotqc.display_features(qc_fig_dir, data_set, feature_data)

        # On Windows int64 keys of sweep numbers cannot be converted to str by json.dump when serializing.
        # Thus, we are converting them here:
        feature_data["sweep_features"] = {
            str(k): v
            for k, v in feature_data["sweep_features"].items()
        }

    return feature_data