Exemple #1
0
def main():
    """
    Usage:
    > python generate_qc_input.py --specimen_id SPECIMEN_ID --cell_dir CELL_DIR
    > python generate_qc_input.py --input_nwb_file input_nwb_file --cell_dir CELL_DIR

    """

    args = parse_args()
    se_input = generate_se_input(**args)

    cell_dir = args['cell_dir']
    if not os.path.exists(cell_dir):
        os.makedirs(cell_dir)

    lu.configure_logger(cell_dir)

    ju.write(os.path.join(cell_dir,'se_input.json'), se_input)

    se_output = run_sweep_extraction(se_input["input_nwb_file"],
                                     se_input.get("input_h5_file",None),
                                     se_input.get("stimulus_ontology_file", None))

    ju.write(os.path.join(cell_dir,'se_output.json'),se_output)

    sp.drop_tagged_sweeps(se_output["sweep_features"])

    qc_input = generate_qc_input(se_input, se_output)

    ju.write(os.path.join(cell_dir,'qc_input.json'), qc_input)
Exemple #2
0
def main():
    """
    Usage:
    > python generate_fx_input.py --specimen_id SPECIMEN_ID --cell_dir CELL_DIR
    > python generate_fx_input.py --input_nwb_file INPUT_NWB_FILE --cell_dir CELL_DIR

    """

    kwargs = parse_args()
    se_input = generate_se_input(**kwargs)
    cell_dir = kwargs["cell_dir"]
    lu.configure_logger(cell_dir)

    if not os.path.exists(cell_dir):
        os.makedirs(cell_dir)

    ju.write(os.path.join(cell_dir, 'se_input.json'), se_input)

    se_output = run_sweep_extraction(
        se_input["input_nwb_file"], se_input.get("input_h5_file", None),
        se_input.get("stimulus_ontology_file", None))

    ju.write(os.path.join(cell_dir, 'se_output.json'), se_output)

    sp.drop_tagged_sweeps(se_output["sweep_features"])

    qc_input = generate_qc_input(se_input, se_output)
    ju.write(os.path.join(cell_dir, 'qc_input.json'), qc_input)

    qc_output = run_qc(qc_input.get("stimulus_ontology_file",
                                    None), qc_input["cell_features"],
                       qc_input["sweep_features"], qc_input["qc_criteria"])
    ju.write(os.path.join(cell_dir, 'qc_output.json'), qc_output)

    if kwargs["specimen_id"]:
        manual_sweep_states = lq.get_sweep_states(kwargs["specimen_id"])
    elif kwargs["input_nwb_file"]:
        manual_sweep_states = []

    sp.override_auto_sweep_states(manual_sweep_states,
                                  qc_output["sweep_states"])
    sp.assign_sweep_states(qc_output["sweep_states"],
                           se_output["sweep_features"])

    fx_input = generate_fx_input(se_input,
                                 se_output,
                                 cell_dir,
                                 plot_figures=True)

    ju.write(os.path.join(cell_dir, 'fx_input.json'), fx_input)
Exemple #3
0
def main():
    """
    Runs pipeline from the nwb file
    Usage:
    python pipeline_from_nwb_file.py INPUT_NWB_FILE

    User must specify the OUTPUT_DIR

    """

    input_nwb_file = sys.argv[1]
    input_nwb_file_basename = os.path.basename(input_nwb_file)
    cell_name = os.path.splitext(input_nwb_file_basename)[0]

    cell_dir = os.path.join(OUTPUT_DIR, cell_name)

    if not os.path.exists(cell_dir):
        os.makedirs(cell_dir)

    lu.configure_logger(cell_dir)

    pipe_input = gpi.generate_pipeline_input(cell_dir,
                                             input_nwb_file=input_nwb_file)

    input_json = os.path.join(cell_dir, INPUT_JSON)
    ju.write(input_json, pipe_input)

    #   reading back from disk
    pipe_input = ju.read(input_json)
    pipe_output = run_pipeline(pipe_input["input_nwb_file"],
                               pipe_input.get("input_h5_file", None),
                               pipe_input["output_nwb_file"],
                               pipe_input.get("stimulus_ontology_file", None),
                               pipe_input.get("qc_fig_dir",
                                              None), pipe_input["qc_criteria"],
                               pipe_input["manual_sweep_states"])

    ju.write(os.path.join(cell_dir, OUTPUT_JSON), pipe_output)
Exemple #4
0
def main():
    """
    Runs pipeline from the specimen_id
    Usage:
    python pipeline_from_nwb_file.py SPECIMEN_ID

    User must specify the OUTPUT_DIR

    """

    specimen_id = sys.argv[1]
    cell_name = specimen_id

    cell_dir = os.path.join(OUTPUT_DIR, cell_name)

    if not os.path.exists(cell_dir):
        os.makedirs(cell_dir)

    lu.configure_logger(cell_dir)

    pipe_input = gpi.generate_pipeline_input(cell_dir,
                                             specimen_id=int(specimen_id))

    input_json = os.path.join(cell_dir, INPUT_JSON)
    ju.write(input_json, pipe_input)

    #   reading back from disk
    pipe_input = ju.read(input_json)
    pipe_output = run_pipeline(pipe_input["input_nwb_file"],
                               pipe_input.get("input_h5_file", None),
                               pipe_input["output_nwb_file"],
                               pipe_input.get("stimulus_ontology_file", None),
                               pipe_input.get("qc_fig_dir",
                                              None), pipe_input["qc_criteria"],
                               pipe_input["manual_sweep_states"])

    ju.write(os.path.join(cell_dir, OUTPUT_JSON), pipe_output)
def main():
    """
    Convenience script for running ephys pipeline from a given nwb file
    It generates the pipeline input and then calls the run_pipeline executable

    Usage:
    python run_pipeline_from_nwb_file.py <input_nwb_file> <output_dir>
    """

    parser = argparse.ArgumentParser(
        description="Process an nwb file through the ephys pipeline"
    )
    parser.add_argument(
        "input_nwb_file", type=str, help="process this NWB2 file"
    )
    parser.add_argument(
        "output_dir", type=str, help="outputs will be written here"
    )
    parser.add_argument(
        "--write_spikes", type=bool, default=False,
        help="If true will attempt to append spike times to the nwb file",
    )
    parser.add_argument(
        "--input_json", type=str, default="input.json",
        help=(
            "write pipeline input json file here (relative to "
            "OUTPUT_DIR/cell_name, where cell_name is the extensionless "
            "basename of the input NWB file)"
        )
    )
    parser.add_argument(
        "--output_json", type=str, default="output.json", 
        help=(
            "write output json file here (relative to OUTPUT_DIR/cell_name, "
            "where cell_name is the extensionless basename of the input NWB "
            "file)"
        )
    )
    parser.add_argument(
        "--qc_fig_dir", type=str, default=None, const="qc_figs", nargs="?",
        help=(
            "Generate qc figures and store them here (relative to "
            "OUTPUT_DIR/cell_name, where cell_name is the extensionless "
            "basename of the input nwb file). If you supply --qc_fig_dir with " 
            "no arguments, the path will be OUTPUT_DIR/cell_name/qc_figs. If "
            "this argument is not supplied, no figures will be generated."
        )
    )

    args = vars(parser.parse_args())
    output_dir = args["output_dir"]
    input_nwb_file = args["input_nwb_file"]
    input_json = args["input_json"]
    output_json = args["output_json"]

    input_nwb_file_basename = os.path.basename(input_nwb_file)

    cell_name = os.path.splitext(input_nwb_file_basename)[0]
    cell_dir = os.path.join(output_dir, cell_name)
    os.makedirs(cell_dir, exist_ok=True)

    lu.configure_logger(cell_dir)

    pipeline_input = generate_pipeline_input(
        cell_dir=cell_dir,
        input_nwb_file=input_nwb_file,
        plot_figures=args["qc_fig_dir"] is not None,
        qc_fig_dirname=args["qc_fig_dir"]
    )

    input_json = os.path.join(cell_dir, input_json)
    ju.write(input_json, pipeline_input)

    #   reading back from disk
    pipeline_input = ju.read(input_json)
    pipeline_output = run_pipeline(pipeline_input["input_nwb_file"],
                                   pipeline_input["output_nwb_file"],
                                   pipeline_input.get("stimulus_ontology_file", None),
                                   pipeline_input.get("qc_fig_dir", None),
                                   pipeline_input["qc_criteria"],
                                   pipeline_input["manual_sweep_states"],
                                   args["write_spikes"])

    ju.write(os.path.join(cell_dir, output_json), pipeline_output)