Esempio n. 1
0
def main():
    """
    Usage:
    > python generate_qc_input.py --specimen_id SPECIMEN_ID --cell_dir CELL_DIR
    > python generate_qc_input.py --input_nwb_file input_nwb_file --cell_dir CELL_DIR

    """

    args = parse_args()
    se_input = generate_se_input(**args)

    cell_dir = args['cell_dir']
    if not os.path.exists(cell_dir):
        os.makedirs(cell_dir)

    lu.configure_logger(cell_dir)

    ju.write(os.path.join(cell_dir,'se_input.json'), se_input)

    se_output = run_sweep_extraction(se_input["input_nwb_file"],
                                     se_input.get("input_h5_file",None),
                                     se_input.get("stimulus_ontology_file", None))

    ju.write(os.path.join(cell_dir,'se_output.json'),se_output)

    sp.drop_tagged_sweeps(se_output["sweep_features"])

    qc_input = generate_qc_input(se_input, se_output)

    ju.write(os.path.join(cell_dir,'qc_input.json'), qc_input)
Esempio n. 2
0
def main():
    """
    Usage:
    > python generate_fx_input.py --specimen_id SPECIMEN_ID --cell_dir CELL_DIR
    > python generate_fx_input.py --input_nwb_file INPUT_NWB_FILE --cell_dir CELL_DIR

    """

    kwargs = parse_args()
    se_input = generate_se_input(**kwargs)
    cell_dir = kwargs["cell_dir"]
    lu.configure_logger(cell_dir)

    if not os.path.exists(cell_dir):
        os.makedirs(cell_dir)

    ju.write(os.path.join(cell_dir, 'se_input.json'), se_input)

    se_output = run_sweep_extraction(
        se_input["input_nwb_file"], se_input.get("input_h5_file", None),
        se_input.get("stimulus_ontology_file", None))

    ju.write(os.path.join(cell_dir, 'se_output.json'), se_output)

    sp.drop_tagged_sweeps(se_output["sweep_features"])

    qc_input = generate_qc_input(se_input, se_output)
    ju.write(os.path.join(cell_dir, 'qc_input.json'), qc_input)

    qc_output = run_qc(qc_input.get("stimulus_ontology_file",
                                    None), qc_input["cell_features"],
                       qc_input["sweep_features"], qc_input["qc_criteria"])
    ju.write(os.path.join(cell_dir, 'qc_output.json'), qc_output)

    if kwargs["specimen_id"]:
        manual_sweep_states = lq.get_sweep_states(kwargs["specimen_id"])
    elif kwargs["input_nwb_file"]:
        manual_sweep_states = []

    sp.override_auto_sweep_states(manual_sweep_states,
                                  qc_output["sweep_states"])
    sp.assign_sweep_states(qc_output["sweep_states"],
                           se_output["sweep_features"])

    fx_input = generate_fx_input(se_input,
                                 se_output,
                                 cell_dir,
                                 plot_figures=True)

    ju.write(os.path.join(cell_dir, 'fx_input.json'), fx_input)
Esempio n. 3
0
def run_pipeline(input_nwb_file,
                 output_nwb_file,
                 stimulus_ontology_file,
                 qc_fig_dir,
                 qc_criteria,
                 manual_sweep_states,
                 write_spikes=True,
                 update_ontology=True):

    se_output = run_sweep_extraction(input_nwb_file,
                                     stimulus_ontology_file,
                                     update_ontology=update_ontology)

    sweep_props.drop_tagged_sweeps(se_output["sweep_features"])
    sweep_props.remove_sweep_feature("tags", se_output["sweep_features"])

    qc_output = run_qc(stimulus_ontology_file, se_output["cell_features"],
                       se_output["sweep_features"], qc_criteria)

    sweep_props.override_auto_sweep_states(manual_sweep_states,
                                           qc_output["sweep_states"])
    sweep_props.assign_sweep_states(qc_output["sweep_states"],
                                    se_output["sweep_features"])

    fx_output = run_feature_extraction(
        input_nwb_file,
        stimulus_ontology_file,
        output_nwb_file,
        qc_fig_dir,
        se_output['sweep_features'],
        se_output['cell_features'],
        write_spikes,
    )

    log_pretty_header("Analysis completed!", level=1)

    return {
        "sweep_extraction": se_output,
        "qc": qc_output,
        "feature_extraction": fx_output
    }
Esempio n. 4
0
def run_pipeline(input_nwb_file, input_h5_file, output_nwb_file,
                 stimulus_ontology_file, qc_fig_dir, qc_criteria,
                 manual_sweep_states):

    se_output = run_sweep_extraction(input_nwb_file, input_h5_file,
                                     stimulus_ontology_file)

    sp.drop_tagged_sweeps(se_output["sweep_features"])
    sp.remove_sweep_feature("tags", se_output["sweep_features"])

    qc_output = run_qc(stimulus_ontology_file, se_output["cell_features"],
                       se_output["sweep_features"], qc_criteria)

    if qc_output["cell_state"]["failed_qc"]:
        logging.warning("Failed QC. No ephys features extracted.")

        return dict(
            sweep_extraction=se_output,
            qc=qc_output,
        )

    sp.override_auto_sweep_states(manual_sweep_states,
                                  qc_output["sweep_states"])
    sp.assign_sweep_states(qc_output["sweep_states"],
                           se_output["sweep_features"])

    fx_output = run_feature_extraction(
        input_nwb_file,
        stimulus_ontology_file,
        output_nwb_file,
        qc_fig_dir,
        se_output['sweep_features'],
        se_output['cell_features'],
    )

    return dict(sweep_extraction=se_output,
                qc=qc_output,
                feature_extraction=fx_output)