Пример #1
0
def test_create_full_segment_pnh_subpipes_no_args():

    params = {
        "short_preparation_pipe": {
            "bet_crop": {}
        },
        "brain_extraction_pipe": {
            "correct_bias_pipe": {},
            "extract_pipe": {}
        },
        "brain_segment_pipe": {
            "masked_correct_bias_pipe": {},
            "register_NMT_pipe": {},
            "segment_atropos_pipe": {}
        }
    }

    # params template
    template_name = 'NMT_v1.2'
    template_dir = load_test_data(template_name, data_path)
    params_template = format_template(template_dir, template_name)

    # running workflow
    segment_pnh = create_full_segment_pnh_subpipes(
        params=params,
        params_template=params_template,
        name="test_create_full_segment_pnh_subpipes_no_args")

    segment_pnh.base_dir = data_path

    segment_pnh.write_graph(graph2use="colored")
    assert op.exists(
        op.join(data_path, "test_create_full_segment_pnh_subpipes_no_args",
                "graph.png"))
Пример #2
0
def test_create_full_segment_pnh_subpipes():

    params = {
        "short_preparation_pipe": {
            "bet_crop": {
                "m": True,
                "aT2": True,
                "c": 10,
                "n": 2
            }
        },
        "brain_extraction_pipe": {
            "correct_bias_pipe": {
                "smooth": {
                    "args": "-bin -s 2"
                },
                "norm_smooth": {
                    "op_string": "-s 2 -div %s"
                },
                "smooth_bias": {
                    "sigma": 2
                }
            },
            "extract_pipe": {
                "atlas_brex": {
                    "f": 0.5,
                    "reg": 1,
                    "wrp": "10,10,10",
                    "msk": "a,0,0",
                    "dil": 2,
                    "nrm": 1
                }
            }
        },
        "brain_segment_pipe": {
            "masked_correct_bias_pipe": {
                "smooth": {
                    "args": "-bin -s 2"
                },
                "norm_smooth": {
                    "op_string": "-s 2 -div %s"
                },
                "smooth_bias": {
                    "sigma": 2
                }
            },
            "register_NMT_pipe": {
                "norm_intensity": {
                    "dimension": 3,
                    "bspline_fitting_distance": 200,
                    "n_iterations": [50, 50, 40, 30],
                    "convergence_threshold": 0.00000001,
                    "shrink_factor": 2,
                    "args": "-r 0 --verbose 1"
                }
            },
            "segment_atropos_pipe": {
                "Atropos": {
                    "dimension": 3,
                    "numberOfClasses": 3
                },
                "threshold_gm": {
                    "thresh": 0.5
                },
                "threshold_wm": {
                    "thresh": 0.5
                },
                "threshold_csf": {
                    "thresh": 0.5
                }
            }
        }
    }

    # params template
    template_name = 'NMT_v1.2'
    template_dir = load_test_data(template_name, data_path)
    params_template = format_template(template_dir, template_name)

    # running workflow
    segment_pnh = create_full_segment_pnh_subpipes(
        params=params,
        params_template=params_template,
        name="test_create_full_segment_pnh_subpipes")

    segment_pnh.base_dir = data_path

    segment_pnh.write_graph(graph2use="colored")
    assert op.exists(
        op.join(data_path, "test_create_full_segment_pnh_subpipes",
                "graph.png"))
template_dir = load_test_data(template_name)
params_template = format_template(template_dir, template_name)
print (params_template)

# en local
data_path = "/home/INT/meunier.d/Data/Baboon/data/sub-Babar/ses-test/anat"
main_path = "/home/INT/meunier.d/data_macapype/"

# sur frioul
#data_path = "/hpc/meca/users/loh.k/baboon_proc/data/sub-Odor/ses-T1/anat"
#main_path = "/hpc/crise/meunier.d/Data"

## data file
T1_file = op.join(data_path, "sub-Odor_ses-T1_T1w.nii.gz")
T2_file = op.join(data_path, "sub-Odor_ses-T1_T2w.nii.gz")

# running workflow
segment_pnh = create_full_segment_pnh_subpipes(params=params,
                                               params_template=params_template,
                                               name = "example_segment_baboon_ants_based_Odor")
segment_pnh.base_dir = main_path

segment_pnh.inputs.inputnode.list_T1 = [T1_file]
segment_pnh.inputs.inputnode.list_T2 = [T2_file]
segment_pnh.inputs.inputnode.indiv_params = indiv_params["sub-Odor"]["ses-T1"]

segment_pnh.write_graph(graph2use="colored")
segment_pnh.config['execution'] = {'remove_unnecessary_outputs': 'false'}
segment_pnh.run()
Пример #4
0
    template_name = 'NMT_v1.2'

if "general" in params.keys() and "my_path" in params["general"].keys():
    my_path = params["general"]["my_path"]
else:
    my_path = ''

template_dir = load_test_data(template_name)
params_template = format_template(template_dir, template_name)
print(params_template)

data_path = load_test_data("data_test_macaque", my_path)

# data file
T1_file = op.join(data_path, "non_cropped", "sub-Apache_ses-01_T1w.nii")
T2_file = op.join(data_path, "non_cropped", "sub-Apache_ses-01_T2w.nii")

# running workflow
segment_pnh = create_full_segment_pnh_subpipes(
    params=params,
    params_template=params_template,
    name="example_segment_macaque_ants_based")
segment_pnh.base_dir = data_path

segment_pnh.inputs.inputnode.list_T1 = [T1_file]
segment_pnh.inputs.inputnode.list_T2 = [T2_file]

segment_pnh.write_graph(graph2use="colored")
segment_pnh.config['execution'] = {'remove_unnecessary_outputs': 'false'}
segment_pnh.run()
Пример #5
0
def create_main_workflow(data_dir,
                         process_dir,
                         soft,
                         subjects,
                         sessions,
                         acquisitions,
                         params_file,
                         indiv_params_file,
                         wf_name="test_pipeline_single"):
    """ Set up the segmentatiopn pipeline based on ANTS

    Arguments
    ---------
    data_path: pathlike str
        Path to the BIDS directory that contains anatomical images

    out_path: pathlike str
        Path to the ouput directory (will be created if not alredy existing).
        Previous outputs maybe overwritten.

    soft: str
        Indicate which analysis should be launched; so for, only spm and ants
        are accepted; can be extended

    subjects: list of str (optional)
        Subject's IDs to match to BIDS specification (sub-[SUB1], sub-[SUB2]...)

    sessions: list of str (optional)
        Session's IDs to match to BIDS specification (ses-[SES1], ses-[SES2]...)

    acquisitions: list of str (optional)
        Acquisition name to match to BIDS specification (acq-[ACQ1]...)

    indiv_params_file: path to a JSON file
        JSON file that specify some parameters of the pipeline,
        unique for the subjects/sessions.

    params_file: path to a JSON file
        JSON file that specify some parameters of the pipeline.


    Returns
    -------
    workflow: nipype.pipeline.engine.Workflow


    """

    # formating args
    data_dir = op.abspath(data_dir)

    if not op.isdir(process_dir):
        os.makedirs(process_dir)

    # params
    params = {}
    if params_file is not None:

        print("Params:", params_file)

        assert os.path.exists(params_file), "Error with file {}".format(
            params_file)

        params = json.load(open(params_file))

    pprint.pprint(params)

    # indiv_params
    indiv_params = {}
    if indiv_params_file is not None:

        print("Multi Params:", indiv_params_file)

        assert os.path.exists(indiv_params_file), "Error with file {}".format(
            indiv_params_file)

        indiv_params = json.load(open(indiv_params_file))

        wf_name += "_indiv_params"

    pprint.pprint(indiv_params)

    # params_template
    assert ("general" in params.keys() and \
        "template_name" in params["general"].keys()), \
            "Error, the params.json should contains a general/template_name"

    template_name = params["general"]["template_name"]

    if "general" in params.keys() and "my_path" in params["general"].keys():
        my_path = params["general"]["my_path"]
    else:
        my_path = ""

    nmt_dir = load_test_data(template_name, path_to=my_path)
    params_template = format_template(nmt_dir, template_name)
    print(params_template)

    # soft
    soft = soft.lower()
    assert soft in ["spm12", "spm", "ants", "spm_t1"], \
        "error with {}, should be among [spm12, spm, ants]".format(soft)

    wf_name += "_{}".format(soft)

    # main_workflow
    main_workflow = pe.Workflow(name=wf_name)
    main_workflow.base_dir = process_dir

    if soft in ["spm", "spm12"]:
        segment_pnh = create_full_T1xT2_segment_pnh_subpipes(
            params_template=params_template, params=params)
    elif soft == "ants":
        segment_pnh = create_full_segment_pnh_subpipes(
            params_template=params_template, params=params)
    elif soft == "spm_t1":
        segment_pnh = create_full_spm_subpipes(params_template=params_template,
                                               params=params)

    if indiv_params:
        datasource = create_datasource_indiv_params(data_dir, indiv_params,
                                                    subjects, sessions)

        main_workflow.connect(datasource, "indiv_params", segment_pnh,
                              'inputnode.indiv_params')
    else:
        datasource = create_datasource(data_dir, subjects, sessions,
                                       acquisitions)

    main_workflow.connect(datasource, 'T1', segment_pnh, 'inputnode.list_T1')
    if soft != "spm_t1":
        main_workflow.connect(datasource, 'T2', segment_pnh,
                              'inputnode.list_T2')

    return main_workflow