Example #1
0
def main():
    """
    Usage:
    > python generate_qc_input.py --specimen_id SPECIMEN_ID --cell_dir CELL_DIR
    > python generate_qc_input.py --input_nwb_file input_nwb_file --cell_dir CELL_DIR

    """

    args = parse_args()
    se_input = generate_se_input(**args)

    cell_dir = args['cell_dir']
    if not os.path.exists(cell_dir):
        os.makedirs(cell_dir)

    lu.configure_logger(cell_dir)

    ju.write(os.path.join(cell_dir,'se_input.json'), se_input)

    se_output = run_sweep_extraction(se_input["input_nwb_file"],
                                     se_input.get("input_h5_file",None),
                                     se_input.get("stimulus_ontology_file", None))

    ju.write(os.path.join(cell_dir,'se_output.json'),se_output)

    sp.drop_tagged_sweeps(se_output["sweep_features"])

    qc_input = generate_qc_input(se_input, se_output)

    ju.write(os.path.join(cell_dir,'qc_input.json'), qc_input)
Example #2
0
def drop_failed_sweeps(
        dataset: EphysDataSet,
        stimulus_ontology: Optional[StimulusOntology] = None,
        qc_criteria: Optional[Dict] = None
) -> List[Dict]:
    """A convenience which extracts and QCs sweeps in preparation for dataset
    feature extraction. This function:
    1. extracts sweep qc features
    2. removes sweeps tagged with failure messages
    3. sets sweep states based on qc results

    Parameters
    ----------
    dataset : dataset from which to draw sweeps

    Returns
    -------
    sweep_features : a list of dictionaries, each describing a sweep
    """
    if stimulus_ontology is None:
        stimulus_ontology = StimulusOntology.default()
    if qc_criteria is None:
        qc_criteria = qcp.load_default_qc_criteria()

    sweep_features = sweep_qc_features(dataset)
    sweep_props.drop_tagged_sweeps(sweep_features)
    sweep_props.remove_sweep_feature("tags", sweep_features)
    sweep_states = qcp.qc_sweeps(
        stimulus_ontology, sweep_features, qc_criteria
    )
    sweep_props.assign_sweep_states(sweep_states, sweep_features)

    dataset.sweep_info = sweep_features
def extract_qc_features(data_set):
    cell_features, cell_tags = cell_qc_features(
        data_set,
        # manual_values=cell_qc_manual_values
    )
    sweep_features = sweep_qc_features(data_set)
    drop_tagged_sweeps(sweep_features)
    return cell_features, cell_tags, sweep_features
Example #4
0
def main():
    """
    Usage:
    > python generate_fx_input.py --specimen_id SPECIMEN_ID --cell_dir CELL_DIR
    > python generate_fx_input.py --input_nwb_file INPUT_NWB_FILE --cell_dir CELL_DIR

    """

    kwargs = parse_args()
    se_input = generate_se_input(**kwargs)
    cell_dir = kwargs["cell_dir"]
    lu.configure_logger(cell_dir)

    if not os.path.exists(cell_dir):
        os.makedirs(cell_dir)

    ju.write(os.path.join(cell_dir, 'se_input.json'), se_input)

    se_output = run_sweep_extraction(
        se_input["input_nwb_file"], se_input.get("input_h5_file", None),
        se_input.get("stimulus_ontology_file", None))

    ju.write(os.path.join(cell_dir, 'se_output.json'), se_output)

    sp.drop_tagged_sweeps(se_output["sweep_features"])

    qc_input = generate_qc_input(se_input, se_output)
    ju.write(os.path.join(cell_dir, 'qc_input.json'), qc_input)

    qc_output = run_qc(qc_input.get("stimulus_ontology_file",
                                    None), qc_input["cell_features"],
                       qc_input["sweep_features"], qc_input["qc_criteria"])
    ju.write(os.path.join(cell_dir, 'qc_output.json'), qc_output)

    if kwargs["specimen_id"]:
        manual_sweep_states = lq.get_sweep_states(kwargs["specimen_id"])
    elif kwargs["input_nwb_file"]:
        manual_sweep_states = []

    sp.override_auto_sweep_states(manual_sweep_states,
                                  qc_output["sweep_states"])
    sp.assign_sweep_states(qc_output["sweep_states"],
                           se_output["sweep_features"])

    fx_input = generate_fx_input(se_input,
                                 se_output,
                                 cell_dir,
                                 plot_figures=True)

    ju.write(os.path.join(cell_dir, 'fx_input.json'), fx_input)
Example #5
0
def run_pipeline(input_nwb_file,
                 output_nwb_file,
                 stimulus_ontology_file,
                 qc_fig_dir,
                 qc_criteria,
                 manual_sweep_states,
                 write_spikes=True,
                 update_ontology=True):

    se_output = run_sweep_extraction(input_nwb_file,
                                     stimulus_ontology_file,
                                     update_ontology=update_ontology)

    sweep_props.drop_tagged_sweeps(se_output["sweep_features"])
    sweep_props.remove_sweep_feature("tags", se_output["sweep_features"])

    qc_output = run_qc(stimulus_ontology_file, se_output["cell_features"],
                       se_output["sweep_features"], qc_criteria)

    sweep_props.override_auto_sweep_states(manual_sweep_states,
                                           qc_output["sweep_states"])
    sweep_props.assign_sweep_states(qc_output["sweep_states"],
                                    se_output["sweep_features"])

    fx_output = run_feature_extraction(
        input_nwb_file,
        stimulus_ontology_file,
        output_nwb_file,
        qc_fig_dir,
        se_output['sweep_features'],
        se_output['cell_features'],
        write_spikes,
    )

    log_pretty_header("Analysis completed!", level=1)

    return {
        "sweep_extraction": se_output,
        "qc": qc_output,
        "feature_extraction": fx_output
    }
    def run_extraction_and_auto_qc(self, nwb_path, stimulus_ontology, qc_criteria, commit=True):

        data_set = create_ephys_data_set(
            sweep_info=None,
            nwb_file=nwb_path,
            ontology=stimulus_ontology
        )

        cell_features, cell_tags, sweep_features = extract_qc_features(data_set)

        sweep_props.drop_tagged_sweeps(sweep_features)
        cell_state, cell_features, sweep_states, sweep_features = run_qc(
            stimulus_ontology, cell_features, sweep_features, qc_criteria
        )

        if commit:
            self.begin_commit_calculated.emit()

            self.stimulus_ontology = stimulus_ontology
            self.qc_criteria = qc_criteria
            self.nwb_path = nwb_path

            self.data_set = data_set
            self.cell_features = cell_features
            self.cell_tags = cell_tags
            self.cell_state = cell_state

            self.sweep_features = sweep_features
            self.sweep_states = sweep_states
            self.manual_qc_states = {sweep["sweep_number"]: "default" for sweep in self.sweep_features}

            self.end_commit_calculated.emit(
                self.sweep_features, self.sweep_states, self.manual_qc_states, self.data_set
            )

        self.data_changed.emit(self.nwb_path,
                               self.stimulus_ontology,
                               self.sweep_features,
                               self.cell_features)
Example #7
0
def run_pipeline(input_nwb_file, input_h5_file, output_nwb_file,
                 stimulus_ontology_file, qc_fig_dir, qc_criteria,
                 manual_sweep_states):

    se_output = run_sweep_extraction(input_nwb_file, input_h5_file,
                                     stimulus_ontology_file)

    sp.drop_tagged_sweeps(se_output["sweep_features"])
    sp.remove_sweep_feature("tags", se_output["sweep_features"])

    qc_output = run_qc(stimulus_ontology_file, se_output["cell_features"],
                       se_output["sweep_features"], qc_criteria)

    if qc_output["cell_state"]["failed_qc"]:
        logging.warning("Failed QC. No ephys features extracted.")

        return dict(
            sweep_extraction=se_output,
            qc=qc_output,
        )

    sp.override_auto_sweep_states(manual_sweep_states,
                                  qc_output["sweep_states"])
    sp.assign_sweep_states(qc_output["sweep_states"],
                           se_output["sweep_features"])

    fx_output = run_feature_extraction(
        input_nwb_file,
        stimulus_ontology_file,
        output_nwb_file,
        qc_fig_dir,
        se_output['sweep_features'],
        se_output['cell_features'],
    )

    return dict(sweep_extraction=se_output,
                qc=qc_output,
                feature_extraction=fx_output)
Example #8
0
import pandas as pd
from ipfx.dataset.create import create_ephys_data_set
from ipfx.qc_feature_extractor import sweep_qc_features

import ipfx.sweep_props as sweep_props
import ipfx.qc_feature_evaluator as qcp
from ipfx.stimulus import StimulusOntology

# Download and access the experimental data from DANDI archive per instructions in the documentation
# Example below will use an nwb file provided with the package

nwb_file = os.path.join(os.path.dirname(os.getcwd()), "data",
                        "nwb2_H17.03.008.11.03.05.nwb")
data_set = create_ephys_data_set(nwb_file=nwb_file)

# Compute sweep QC features
sweep_features = sweep_qc_features(data_set)

# Drop sweeps that failed to compute QC criteria
sweep_props.drop_tagged_sweeps(sweep_features)
sweep_props.remove_sweep_feature("tags", sweep_features)

stimulus_ontology = StimulusOntology.default()
qc_criteria = qcp.load_default_qc_criteria()

sweep_states = qcp.qc_sweeps(stimulus_ontology, sweep_features, qc_criteria)

# print a few sweeps and states
print(pd.DataFrame(sweep_features).head())
print(sweep_states[0:len(pd.DataFrame(sweep_features).head())])