Exemplo n.º 1
0
    def _run_interface(self, runtime):
        specs = self.inputs.template_spec
        if isdefined(self.inputs.resolution):
            specs['resolution'] = self.inputs.resolution
        if isdefined(self.inputs.atlas):
            specs['atlas'] = self.inputs.atlas
        if isdefined(self.inputs.cohort):
            specs['cohort'] = self.inputs.cohort

        name = self.inputs.template.strip(":").split(":", 1)
        if len(name) > 1:
            specs.update({
                k: v
                for modifier in name[1].split(":")
                for k, v in [tuple(modifier.split("-"))] if k not in specs
            })

        self._results['t1w_file'] = tf.get(name[0],
                                           desc=None,
                                           suffix='T1w',
                                           **specs)

        self._results['brain_mask'] = tf.get(name[0],
                                             desc='brain',
                                             suffix='mask',
                                             **specs)
        return runtime
Exemplo n.º 2
0
def main():

    args = parse_args()
    print("\n### Running fmriprep-slurm\n")
    print(vars(args))

    print("\n# Loading pyBIDS database (it might take few hours for a big dataset)...\n")
    sing_bids_path = os.path.join(
        SINGULARITY_DATA_PATH, os.path.basename(args.bids_path))
    layout = bids.BIDSLayout(
        sing_bids_path,
        reset_database=args.force_reindex,
        ignore=(
            "code",
            "stimuli",
            "sourcedata",
            "models",
            re.compile(r"^\."),
        )
        + load_bidsignore(sing_bids_path),
    )
    job_path = os.path.join(SINGULARITY_OUTPUT_PATH, SLURM_JOB_DIR)
    if not os.path.exists(job_path):
        os.mkdir(job_path)
    
    print("\n# Prefectch templateflow templates ...\n")
    # prefectch templateflow templates
    os.environ["TEMPLATEFLOW_HOME"] = TEMPLATEFLOW_HOME
    tf_api.get(args.output_spaces + ["OASIS30ANTs", "fsLR", "fsaverage"])

    print("\n# Processing slurm files into {}\n".format(
        os.path.join(args.output_path, SLURM_JOB_DIR)))
    for job_file in run_fmriprep(layout, args):
        if args.submit:
            submit_slurm_job(job_file)
Exemplo n.º 3
0
    def _run_interface(self, runtime):
        specs = self.inputs.template_spec
        if isdefined(self.inputs.resolution):
            specs["resolution"] = self.inputs.resolution
        if isdefined(self.inputs.atlas):
            specs["atlas"] = self.inputs.atlas
        if isdefined(self.inputs.cohort):
            specs["cohort"] = self.inputs.cohort

        name = self.inputs.template.strip(":").split(":", 1)
        if len(name) > 1:
            specs.update(
                {
                    k: v
                    for modifier in name[1].split(":")
                    for k, v in [tuple(modifier.split("-"))]
                    if k not in specs
                }
            )

        self._results["t1w_file"] = tf.get(name[0], desc=None, suffix="T1w", **specs)

        self._results["brain_mask"] = (
            tf.get(name[0], desc="brain", suffix="mask", **specs)
            or tf.get(name[0], label="brain", suffix="mask", **specs)
        )
        return runtime
Exemplo n.º 4
0
def _fetch_fslr_templates(hemi, den):
    """Fetch the necessary templates for fsaverage to fsLR transform"""
    import templateflow.api as tf

    fsaverage_sphere = str(
        tf.get('fsLR',
               space='fsaverage',
               suffix='sphere',
               hemi=hemi,
               density='164k'))
    fslr_sphere = str(
        tf.get('fsLR', space=None, suffix='sphere', hemi=hemi, density=den))
    fsaverage_midthick = str(
        tf.get('fsLR',
               space='fsaverage',
               suffix='midthickness',
               hemi=hemi,
               density='164k'))
    fslr_midthick = str(
        tf.get('fsLR',
               space=None,
               suffix='midthickness',
               hemi=hemi,
               density=den))
    return fsaverage_sphere, fslr_sphere, fsaverage_midthick, fslr_midthick
Exemplo n.º 5
0
def _get_cifti_data(surface, volume, subjects_dir=None, density=None):
    """
    Fetch surface and volumetric label files for CIFTI creation.

    Parameters
    ----------
    surface : str
        Target surface space
    volume : str
        Target volume space
    subjects_dir : str, optional
        Path to FreeSurfer subjects directory (required `fsaverage5`/`fsaverage6` surfaces)
    density : str, optional
        Surface density (required for `fsLR` surfaces)

    Returns
    -------
    annotation_files : list
        Surface annotation files to allow removal of medial wall
    label_file : str
        Volumetric label file of subcortical structures

    Examples
    --------
    >>> annots, label = _get_cifti_data('fsLR', 'MNI152NLin6Asym', density='32k')
    >>> annots  # doctest: +ELLIPSIS
    ['.../tpl-fsLR_hemi-L_den-32k_desc-nomedialwall_dparc.label.gii', \
     '.../tpl-fsLR_hemi-R_den-32k_desc-nomedialwall_dparc.label.gii']
    >>> label  # doctest: +ELLIPSIS
    '.../tpl-MNI152NLin6Asym_res-02_atlas-HCP_dseg.nii.gz'

    """
    if surface not in CIFTI_SURFACES or volume not in CIFTI_VOLUMES:
        raise NotImplementedError(
            "Variant (surface: {0}, volume: {1}) is not supported".format(surface, volume)
        )

    tpl_kwargs = {'suffix': 'dseg'}
    # fMRIPrep grayordinates
    if volume == "MNI152NLin2009cAsym":
        tpl_kwargs.update({'resolution': '2', 'desc': 'DKT31'})
        annotation_files = sorted(glob(os.path.join(
            subjects_dir,
            surface,
            'label',
            '*h.aparc.annot'
        )))
    # HCP grayordinates
    elif volume == 'MNI152NLin6Asym':
        # templateflow specific resolutions (2mm, 1.6mm)
        res = {'32k': '2', '59k': '6'}[density]
        tpl_kwargs.update({'atlas': 'HCP', 'resolution': res})
        annotation_files = [
            str(f) for f in tf.get('fsLR', density=density, desc='nomedialwall', suffix='dparc')
        ]

    if len(annotation_files) != 2:
        raise IOError("Invalid number of surface annotation files")
    label_file = str(tf.get(volume, **tpl_kwargs))
    return annotation_files, label_file
Exemplo n.º 6
0
def make_parcellation(data_path, atlas, template='MNI152NLin2009cAsym', atlas_desc=None, resolution=2, parc_params=None, return_meta=False):
    """
    Performs a parcellation which reduces voxel space to regions of interest (brain data).

    Parameters
    ----------

    data_path : str
        Path to .nii image.
    atlas : str
        Specify which atlas you want to use (see github.com/templateflow/)
    template : str
        What space you data is in. If fmriprep, leave as MNI152NLin2009cAsym.
    atlas_desc : str
        Specify which description of atlas.
    resolution : int
        Resolution of atlas. Can be 1 or 2.
    parc_params : dict
        **kwargs for nilearn functions.
    return_meta : bool
        If true, tries to return any meta-information that exists about parcellation.

    Returns
    -------

    data : array
        Data after the parcellation.

    NOTE
    ----
    These functions make use of nilearn. Please cite templateflow and nilearn if used in a publicaiton.
    """

    if not parc_params:
        parc_params = {}

    tf_get_params = {
        'template': template,
        'resolution': resolution,
        'atlas': atlas
    }
    if atlas_desc is not None:
        tf_get_params['desc'] = atlas_desc
    file = tf.get(**tf_get_params, extensions='nii.gz')

    if isinstance(file, list):
        raise ValueError('More than one template file found. Specify the type of file you need (often atlas_desc). Run: templateflow.api.TF_LAYOUT.get_descs(atlas=' +
                         atlas + ') to see available desc for atlas')

    region = NiftiLabelsMasker(str(file), **parc_params)
    data = region.fit_transform(data_path)

    if return_meta:
        meta_info = tf.get(template=template, atlas=atlas,
                           desc=atlas_desc, extensions='tsv')
        meta_info = load_tabular_file(str(meta_info))
        return data, meta_info
    else:
        return data
Exemplo n.º 7
0
def fetch_fsaverage():
    """
    Expected templates:

    tpl-fsaverage/tpl-fsaverage_hemi-L_den-164k_desc-std_sphere.surf.gii
    tpl-fsaverage/tpl-fsaverage_hemi-R_den-164k_desc-std_sphere.surf.gii
    tpl-fsaverage/tpl-fsaverage_hemi-L_den-164k_desc-vaavg_midthickness.shape.gii
    tpl-fsaverage/tpl-fsaverage_hemi-R_den-164k_desc-vaavg_midthickness.shape.gii
    """
    template = 'fsaverage'

    tf.get(template, density='164k', desc='std', suffix='sphere')
    tf.get(template, density='164k', desc='vaavg', suffix='midthickness')
Exemplo n.º 8
0
def fetch_fsLR():
    """
    Expected templates:

    tpl-fsLR/tpl-fsLR_hemi-L_den-32k_desc-nomedialwall_dparc.label.gii
    tpl-fsLR/tpl-fsLR_hemi-L_den-32k_desc-vaavg_midthickness.shape.gii
    tpl-fsLR/tpl-fsLR_hemi-L_den-32k_sphere.surf.gii
    tpl-fsLR/tpl-fsLR_hemi-R_den-32k_desc-nomedialwall_dparc.label.gii
    tpl-fsLR/tpl-fsLR_hemi-R_den-32k_desc-vaavg_midthickness.shape.gii
    tpl-fsLR/tpl-fsLR_hemi-R_den-32k_sphere.surf.gii
    tpl-fsLR/tpl-fsLR_space-fsaverage_hemi-L_den-32k_sphere.surf.gii
    tpl-fsLR/tpl-fsLR_space-fsaverage_hemi-R_den-32k_sphere.surf.gii
    """
    tf.get('fsLR', density='32k')
Exemplo n.º 9
0
def test_transformer_nii(tmp_path):
    os.chdir(str(tmp_path))

    api_args = dict(template="MNI152NLin2009cAsym", resolution=2)
    ref_mask_file = api.get(**api_args, desc="brain", suffix="mask")
    ref_mask_img = nib.load(ref_mask_file)

    ref_mask = ref_mask_img.get_fdata() > 0
    n_voxels = np.count_nonzero(ref_mask)
    n_volumes = 10

    test_array = np.random.rand(n_voxels, n_volumes)
    test_img_data = np.zeros((*ref_mask_img.shape, n_volumes), dtype=float)
    test_img_data[ref_mask, :] = test_array

    img = new_img_like(ref_mask_img, test_img_data, copy_header=True)
    img.header.set_data_dtype(np.float64)
    test_file = "img.nii.gz"
    nib.save(img, test_file)

    tf = Transformer()
    tf.inputs.mask = ref_mask_file

    array = tf._load(test_file)

    out_file = tf._dump(array)

    img_data = nib.load(out_file).get_fdata()

    assert np.allclose(test_img_data, img_data)
Exemplo n.º 10
0
    def _run_interface(self, runtime):
        specs = self.inputs.template_spec
        if isdefined(self.inputs.resolution):
            specs['resolution'] = self.inputs.resolution
        if isdefined(self.inputs.atlas):
            specs['atlas'] = self.inputs.atlas
        self._results['t1w_file'] = tf.get(self.inputs.template,
                                           desc=None,
                                           suffix='T1w',
                                           **specs)

        self._results['brain_mask'] = tf.get(self.inputs.template,
                                             desc='brain',
                                             suffix='mask',
                                             **specs)
        return runtime
Exemplo n.º 11
0
 def get_surface_meshes(density, surface_type):
     import templateflow.api as tf
     lh, rh = tf.get("fsLR",
                     density=density,
                     suffix=surface_type,
                     extension=[".surf.gii"])
     return str(lh), str(rh)
Exemplo n.º 12
0
def test_nifti_input():
    nifti = nib.load(
        tflow.get("MNI152Lin", resolution="02", desc="brain", suffix="mask"))
    edg = mesh_edges(nifti)

    assert edg.shape[1] == 2
    assert np.amax(edg) <= nifti.get_data().sum() - 1
def get_atlas_path(info_dict, args):
    info_dict['atlas_desc'] = f'{args.nparcels}Parcels{args.nnetworks}Networks'
    info_dict['atlas_path'] = str(
        tflow.get(info_dict['space'],
                  desc=info_dict['atlas_desc'],
                  resolution=args.atlas_resolution,
                  atlas=args.atlas))
    return(info_dict)
Exemplo n.º 14
0
def _templateflow_setup(config_file=None, debug=False):
    if not debug:
        sys.excepthook = exception_handler
        logging.basicConfig(level=logging.INFO)
    else:
        logging.basicConfig(level=logging.DEBUG)

    config = ClpipeConfigParser()
    config.config_updater(config_file)

    templateflow_path = config.config["FMRIPrepOptions"]["TemplateFlowPath"]
    logging.info("Setting TemplateFlow storage path to " + templateflow_path)
    os.system("export TEMPLATEFLOW_HOME=" + templateflow_path)
    logging.info(
        "Downloading requested templates " +
        " ".join(config.config['FMRIPrepOptions']["TemplateFlowTemplates"]))
    api.get(config.config['FMRIPrepOptions']["TemplateFlowTemplates"])
Exemplo n.º 15
0
def fetch_MNI2009():
    """
    Expected templates:

    tpl-MNI152NLin2009cAsym/tpl-MNI152NLin2009cAsym_res-01_T1w.nii.gz
    tpl-MNI152NLin2009cAsym/tpl-MNI152NLin2009cAsym_res-02_T1w.nii.gz
    tpl-MNI152NLin2009cAsym/tpl-MNI152NLin2009cAsym_res-01_desc-brain_mask.nii.gz
    tpl-MNI152NLin2009cAsym/tpl-MNI152NLin2009cAsym_res-02_desc-brain_mask.nii.gz
    tpl-MNI152NLin2009cAsym/tpl-MNI152NLin2009cAsym_res-01_desc-carpet_dseg.nii.gz
    tpl-MNI152NLin2009cAsym/tpl-MNI152NLin2009cAsym_res-02_desc-fMRIPrep_boldref.nii.gz
    tpl-MNI152NLin2009cAsym/tpl-MNI152NLin2009cAsym_res-01_label-brain_probseg.nii.gz
    """
    template = 'MNI152NLin2009cAsym'

    tf.get(template, resolution=(1, 2), desc=None, suffix='T1w')
    tf.get(template, resolution=(1, 2), desc='brain', suffix='mask')
    tf.get(template, resolution=1, atlas=None, desc='carpet', suffix='dseg')
    tf.get(template, resolution=2, desc='fMRIPrep', suffix='boldref')
    tf.get(template, resolution=1, label="brain", suffix="probseg")
Exemplo n.º 16
0
    def from_templateflow(self, in_prefix, **kwargs):
        in_atlas_path = api.get(
            self.template, resolution=self.resolution, **kwargs
        )
        in_atlas_img = nib.load(in_atlas_path)
        in_atlas = np.asanyarray(in_atlas_img.dataobj, dtype=np.uint16)

        in_labels_path = str(first(
            filter(
                lambda f: f.suffix == ".tsv",
                api.get(self.template, **kwargs)
            )
        ))
        in_labels_df = pd.read_table(in_labels_path, sep=r"\s+", index_col=0)

        assert isinstance(in_labels_df, pd.DataFrame)

        in_labels = in_labels_df["name"]

        self.accumulate(in_prefix, in_labels, in_atlas)
Exemplo n.º 17
0
    def __init__(self, template="MNI152NLin2009cAsym", resolution=2):
        self.template = template
        self.resolution = resolution

        self.fixed_img_path = api.get(
            template, resolution=resolution, suffix="T1w", desc="brain"
        )
        self.fixed_img = nib.load(self.fixed_img_path)

        self.masks = pd.Series([], dtype=object)
        self.labels = pd.Series([], dtype=object)
Exemplo n.º 18
0
def test_volumetric_input():
    mask_image = nib.load(
        tflow.get("MNI152Lin", resolution="02", desc="brain", suffix="mask"))
    n_voxels = (mask_image.get_fdata() != 0).sum()
    n_subjects = 3
    data = np.random.rand(n_subjects, n_voxels)
    model = FixedEffect(1)
    contrast = np.ones(3)

    slm = SLM(model, contrast, surf=mask_image)
    slm.fit(data)
Exemplo n.º 19
0
def read_mni_template(resolution=1, mask=True):
    """

    Reads the MNI T2w template

    Parameters
    ----------
    resolution : int, optional.
        Either 1 or 2, the resolution in mm of the voxels. Default: 1.

    mask : bool, optional
        Whether to mask the data with a brain-mask before returning the image.
        Default : True

    Returns
    -------
    nib.Nifti1Image class instance containing masked or unmasked T2 template.

    """
    template_img = nib.load(
        str(
            tflow.get('MNI152NLin2009cAsym',
                      desc=None,
                      resolution=resolution,
                      suffix='T2w',
                      extension='nii.gz')))
    if not mask:
        return template_img
    else:
        mask_img = nib.load(
            str(
                tflow.get('MNI152NLin2009cAsym',
                          resolution=resolution,
                          desc='brain',
                          suffix='mask')))

        template_data = template_img.get_fdata()
        mask_data = mask_img.get_fdata()
        out_data = template_data * mask_data
        return nib.Nifti1Image(out_data, template_img.affine)
Exemplo n.º 20
0
def fetch_OASIS():
    """
    Expected templates:

    tpl-OASIS30ANTs/tpl-OASIS30ANTs_res-01_T1w.nii.gz
    tpl-OASIS30ANTs/tpl-OASIS30ANTs_res-01_label-brain_probseg.nii.gz
    tpl-OASIS30ANTs/tpl-OASIS30ANTs_res-01_desc-brain_mask.nii.gz
    tpl-OASIS30ANTs/tpl-OASIS30ANTs_res-01_desc-BrainCerebellumExtraction_mask.nii.gz
    """
    template = "OASIS30ANTs"

    tf.get(template, resolution=1, desc=None, label=None, suffix='T1w')
    tf.get(template, resolution=1, label='brain', suffix='probseg')
    tf.get(template, resolution=1, label='brain', suffix='mask')
    tf.get(template, resolution=1, desc='BrainCerebellumExtraction', suffix='mask')
Exemplo n.º 21
0
def _plot_template(ax,
                   style='filled',
                   template='MNI152NLin2009cAsym',
                   templatecolor='lightgray',
                   alpha=0.2,
                   voxsize=None,
                   azim=0,
                   elev=0,
                   surface_detection=None,
                   surface_resolution=2,
                   edgethreshold=0.8,
                   hemisphere='both'):
    if isinstance(template, str):
        if not os.path.exists(template):
            if surface_detection is None and style == 'surface':
                surface_detection = _get_surface_level_for_template(template)
            tf_kwargs = {}
            # Add kwargs to specify specific templates
            if 'MNI152' in template or 'OASIS' in template:
                tf_kwargs = {
                    'suffix': 'T1w',
                    'resolution': 1,
                }
            if 'WHS' in template:
                tf_kwargs = {
                    'resolution': 1,
                }
            template = tf.get(template=template,
                              desc='brain',
                              extension='.nii.gz',
                              **tf_kwargs)
            # If multiple templates still remain, take the first
            # This may lead to suboptimal performence for some templates
            if isinstance(template, list):
                template = template[0]
        img = nib.load(template)
    elif isinstance(template, (nib.Nifti1Image, nib.Nifti2Image)):
        img = template
    if voxsize is not None:
        img = resample_to_output(img, [voxsize] * 3)
    data = img.get_fdata()
    data = _select_single_hemisphere_template(data, hemisphere)
    if style == 'filled':
        _plot_template_style_filled(ax, data, alpha, templatecolor)
    elif style == 'cloudy':
        _plot_template_style_cloudy(ax, data, azim, elev, edgethreshold,
                                    templatecolor, alpha)
    elif style == 'surface':
        _plot_template_style_surface(ax, data, alpha, template, templatecolor,
                                     surface_resolution, surface_detection)
    return img.affine
Exemplo n.º 22
0
def main():

    args = parse_args()

    pybids_cache_path = os.path.join(args.bids_path, PYBIDS_CACHE_PATH)

    layout = bids.BIDSLayout(
        args.bids_path,
        database_path=pybids_cache_path,
        reset_database=args.force_reindex,
        ignore=(
            "code",
            "stimuli",
            "sourcedata",
            "models",
            re.compile(r"^\."),
        ) + load_bidsignore(args.bids_path),
    )

    job_path = os.path.join(layout.root, SLURM_JOB_DIR)
    if not os.path.exists(job_path):
        os.mkdir(job_path)
        # add .slurm to .gitignore
        with open(os.path.join(layout.root, ".gitignore"), "a+") as f:
            f.seek(0)
            if not any([SLURM_JOB_DIR in l for l in f.readlines()]):
                f.write(f"{SLURM_JOB_DIR}\n")

    # prefectch templateflow templates
    os.environ["TEMPLATEFLOW_HOME"] = TEMPLATEFLOW_HOME
    import templateflow.api as tf_api

    tf_api.get(OUTPUT_TEMPLATES + ["OASIS30ANTs", "fsLR", "fsaverage"])

    for job_file in run_fmriprep(layout, args, args.preproc):
        if not args.no_submit:
            submit_slurm_job(job_file)
Exemplo n.º 23
0
def get_imgs():
    root = tk.Tk()
    root.withdraw()
    bg_img = load_img(
        str(tflow.get('MNI152NLin6Asym', desc=None, resolution=1,
                      suffix='T1w')))
    stats_img = load_img(
        fd.askopenfilename(title='select stats image',
                           filetypes=[('nifti files', '*.nii *.gz')]))
    seed_img = None
    if args.seed:
        seed_img = load_img(
            fd.askopenfilename(title='select seed image',
                               filetypes=[('nifti files', '*.nii *.gz')]))
    output_path = fd.asksaveasfilename(title='name output file')
    return bg_img, stats_img, seed_img, output_path
Exemplo n.º 24
0
def fetch_UNCInfant():
    """
    Expected templates:

    tpl-UNCInfant/cohort-1/tpl-UNCInfant_cohort-1_T1w.nii.gz
    tpl-UNCInfant/cohort-1/tpl-UNCInfant_cohort-1_label-brain_probseg.nii.gz
    tpl-UNCInfant/cohort-1/tpl-UNCInfant_cohort-1_label-brain_mask.nii.gz
    tpl-UNCInfant/cohort-1/tpl-UNCInfant_cohort-1_label-BrainCerebellumExtraction_mask.nii.gz
    """
    template = "UNCInfant"

    tf.get(template, cohort=1, desc=None, suffix='T1w')
    tf.get(template, cohort=1, label='brain', suffix='probseg')
    tf.get(template, cohort=1, label='brain', suffix='mask')
    tf.get(template,
           cohort=1,
           label='BrainCerebellumExtraction',
           suffix='mask')
Exemplo n.º 25
0
def fetch_MNI6():
    """
    Expected templates:

    tpl-MNI152NLin6Asym/tpl-MNI152NLin6Asym_res-01_T1w.nii.gz
    tpl-MNI152NLin6Asym/tpl-MNI152NLin6Asym_res-02_T1w.nii.gz
    tpl-MNI152NLin6Asym/tpl-MNI152NLin6Asym_res-01_desc-brain_mask.nii.gz
    tpl-MNI152NLin6Asym/tpl-MNI152NLin6Asym_res-02_desc-brain_mask.nii.gz
    tpl-MNI152NLin6Asym/tpl-MNI152NLin6Asym_res-02_atlas-HCP_dseg.nii.gz
    """
    template = 'MNI152NLin6Asym'

    tf.get(template, resolution=(1, 2), desc=None, suffix='T1w')
    tf.get(template, resolution=(1, 2), desc='brain', suffix='mask')
    # CIFTI
    tf.get(template, resolution=2, atlas='HCP', suffix='dseg')
Exemplo n.º 26
0
def medial_wall_to_nan(in_file, subjects_dir, den=None, newpath=None):
    """Convert values on medial wall to NaNs."""
    import os
    import nibabel as nb
    import numpy as np
    import templateflow.api as tf

    fn = os.path.basename(in_file)
    target_subject = in_file.split(".")[1]
    if not target_subject.startswith('fs'):
        return in_file

    func = nb.load(in_file)
    if target_subject.startswith('fsaverage'):
        cortex = nb.freesurfer.read_label(
            os.path.join(subjects_dir, target_subject, 'label',
                         '{}.cortex.label'.format(fn[:2])))
        medial = np.delete(np.arange(len(func.darrays[0].data)), cortex)
    elif target_subject == 'fslr' and den is not None:
        hemi = fn[0].upper()
        label_file = str(
            tf.get('fsLR',
                   hemi=hemi,
                   desc='nomedialwall',
                   density=den,
                   suffix='dparc'))
        label = nb.load(label_file)
        medial = np.invert(label.darrays[0].data.astype(bool))
    else:
        return in_file

    for darray in func.darrays:
        darray.data[medial] = np.nan

    out_file = os.path.join(newpath or os.getcwd(), fn)
    func.to_filename(out_file)
    return out_file
Exemplo n.º 27
0
def test_zscore(tmp_path):
    os.chdir(str(tmp_path))

    api_args = dict(template="MNI152NLin2009cAsym", resolution=2)
    ref_mask_file = api.get(**api_args, desc="brain", suffix="mask")
    ref_mask_img = nib.load(ref_mask_file)

    ref_mask = ref_mask_img.get_fdata() > 0
    n_voxels = np.count_nonzero(ref_mask)

    test_data = np.random.rand(n_voxels)
    assert not isclose(np.mean(test_data), 0, abs_tol=abs_tol)
    assert not isclose(np.std(test_data), 1, abs_tol=abs_tol)

    test_img_data = np.zeros(ref_mask_img.shape, dtype=float)
    test_img_data[ref_mask] = test_data

    img = new_img_like(ref_mask_img, test_img_data, copy_header=True)
    assert isinstance(img.header, nib.Nifti1Header)
    img.header.set_data_dtype(np.float64)
    test_file = "img.nii.gz"
    nib.save(img, test_file)

    instance = ZScore()
    instance.inputs.in_file = test_file
    instance.inputs.mask = ref_mask_file

    result = instance.run()
    assert result.outputs is not None

    out_img = nib.load(result.outputs.out_file)
    out_img_data = out_img.get_fdata()
    out_data = out_img_data[ref_mask]

    assert isclose(np.mean(out_data), 0, abs_tol=abs_tol)
    assert isclose(np.std(out_data), 1, abs_tol=abs_tol)
Exemplo n.º 28
0
def main():
    """Entry point"""
    from nipype import logging as nlogging
    from multiprocessing import set_start_method, Process, Manager
    from ..utils.bids import write_derivative_description, validate_input_dir
    set_start_method('forkserver')
    warnings.showwarning = _warn_redirect
    opts = get_parser().parse_args()

    exec_env = os.name

    # special variable set in the container
    if os.getenv('IS_DOCKER_8395080871'):
        exec_env = 'singularity'
        cgroup = Path('/proc/1/cgroup')
        if cgroup.exists() and 'docker' in cgroup.read_text():
            exec_env = 'docker'
            if os.getenv('DOCKER_VERSION_8395080871'):
                exec_env = 'fmriprep-docker'

    sentry_sdk = None
    if not opts.notrack:
        import sentry_sdk
        from ..utils.sentry import sentry_setup
        sentry_setup(opts, exec_env)

    if opts.debug:
        print('WARNING: Option --debug is deprecated and has no effect',
              file=sys.stderr)

    # Validate inputs
    if not opts.skip_bids_validation:
        print(
            "Making sure the input data is BIDS compliant (warnings can be ignored in most "
            "cases).")
        validate_input_dir(exec_env, opts.bids_dir, opts.participant_label)

    # FreeSurfer license
    default_license = str(Path(os.getenv('FREESURFER_HOME')) / 'license.txt')
    # Precedence: --fs-license-file, $FS_LICENSE, default_license
    license_file = opts.fs_license_file or Path(
        os.getenv('FS_LICENSE', default_license))
    if not license_file.exists():
        raise RuntimeError("""\
ERROR: a valid license file is required for FreeSurfer to run. fMRIPrep looked for an existing \
license file at several paths, in this order: 1) command line argument ``--fs-license-file``; \
2) ``$FS_LICENSE`` environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. Get it \
(for free) by registering at https://surfer.nmr.mgh.harvard.edu/registration.html"""
                           )
    os.environ['FS_LICENSE'] = str(license_file.resolve())

    # Retrieve logging level
    log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG))
    # Set logging
    logger.setLevel(log_level)
    nlogging.getLogger('nipype.workflow').setLevel(log_level)
    nlogging.getLogger('nipype.interface').setLevel(log_level)
    nlogging.getLogger('nipype.utils').setLevel(log_level)

    # Call build_workflow(opts, retval)
    with Manager() as mgr:
        retval = mgr.dict()
        p = Process(target=build_workflow, args=(opts, retval))
        p.start()
        p.join()

        retcode = p.exitcode or retval.get('return_code', 0)

        bids_dir = Path(retval.get('bids_dir'))
        output_dir = Path(retval.get('output_dir'))
        work_dir = Path(retval.get('work_dir'))
        plugin_settings = retval.get('plugin_settings', None)
        subject_list = retval.get('subject_list', None)
        fmriprep_wf = retval.get('workflow', None)
        run_uuid = retval.get('run_uuid', None)

    if opts.reports_only:
        sys.exit(int(retcode > 0))

    if opts.boilerplate:
        sys.exit(int(retcode > 0))

    if fmriprep_wf and opts.write_graph:
        fmriprep_wf.write_graph(graph2use="colored",
                                format='svg',
                                simple_form=True)

    retcode = retcode or int(fmriprep_wf is None)
    if retcode != 0:
        sys.exit(retcode)

    # Check workflow for missing commands
    missing = check_deps(fmriprep_wf)
    if missing:
        print("Cannot run fMRIPrep. Missing dependencies:", file=sys.stderr)
        for iface, cmd in missing:
            print("\t{} (Interface: {})".format(cmd, iface))
        sys.exit(2)
    # Clean up master process before running workflow, which may create forks
    gc.collect()

    # Sentry tracking
    if not opts.notrack:
        from ..utils.sentry import start_ping
        start_ping(run_uuid, len(subject_list))

    errno = 1  # Default is error exit unless otherwise set
    try:
        fmriprep_wf.run(**plugin_settings)
    except Exception as e:
        if not opts.notrack:
            from ..utils.sentry import process_crashfile
            crashfolders = [
                output_dir / 'fmriprep' / 'sub-{}'.format(s) / 'log' / run_uuid
                for s in subject_list
            ]
            for crashfolder in crashfolders:
                for crashfile in crashfolder.glob('crash*.*'):
                    process_crashfile(crashfile)

            if "Workflow did not execute cleanly" not in str(e):
                sentry_sdk.capture_exception(e)
        logger.critical('fMRIPrep failed: %s', e)
        raise
    else:
        if opts.run_reconall:
            from templateflow import api
            from niworkflows.utils.misc import _copy_any
            dseg_tsv = str(
                api.get('fsaverage', suffix='dseg', extension=['.tsv']))
            _copy_any(dseg_tsv,
                      str(output_dir / 'fmriprep' / 'desc-aseg_dseg.tsv'))
            _copy_any(dseg_tsv,
                      str(output_dir / 'fmriprep' / 'desc-aparcaseg_dseg.tsv'))
        errno = 0
        logger.log(25, 'fMRIPrep finished without errors')
        if not opts.notrack:
            sentry_sdk.capture_message('fMRIPrep finished without errors',
                                       level='info')
    finally:
        from niworkflows.reports import generate_reports
        from subprocess import check_call, CalledProcessError, TimeoutExpired
        from pkg_resources import resource_filename as pkgrf
        from shutil import copyfile

        citation_files = {
            ext: output_dir / 'fmriprep' / 'logs' / ('CITATION.%s' % ext)
            for ext in ('bib', 'tex', 'md', 'html')
        }

        if citation_files['md'].exists():
            # Generate HTML file resolving citations
            cmd = [
                'pandoc', '-s', '--bibliography',
                pkgrf('fmriprep',
                      'data/boilerplate.bib'), '--filter', 'pandoc-citeproc',
                '--metadata', 'pagetitle="fMRIPrep citation boilerplate"',
                str(citation_files['md']), '-o',
                str(citation_files['html'])
            ]

            logger.info(
                'Generating an HTML version of the citation boilerplate...')
            try:
                check_call(cmd, timeout=10)
            except (FileNotFoundError, CalledProcessError, TimeoutExpired):
                logger.warning('Could not generate CITATION.html file:\n%s',
                               ' '.join(cmd))

            # Generate LaTex file resolving citations
            cmd = [
                'pandoc', '-s', '--bibliography',
                pkgrf('fmriprep', 'data/boilerplate.bib'), '--natbib',
                str(citation_files['md']), '-o',
                str(citation_files['tex'])
            ]
            logger.info(
                'Generating a LaTeX version of the citation boilerplate...')
            try:
                check_call(cmd, timeout=10)
            except (FileNotFoundError, CalledProcessError, TimeoutExpired):
                logger.warning('Could not generate CITATION.tex file:\n%s',
                               ' '.join(cmd))
            else:
                copyfile(pkgrf('fmriprep', 'data/boilerplate.bib'),
                         citation_files['bib'])
        else:
            logger.warning(
                'fMRIPrep could not find the markdown version of '
                'the citation boilerplate (%s). HTML and LaTeX versions'
                ' of it will not be available', citation_files['md'])

        # Generate reports phase
        failed_reports = generate_reports(subject_list,
                                          output_dir,
                                          work_dir,
                                          run_uuid,
                                          packagename='fmriprep')
        write_derivative_description(bids_dir, output_dir / 'fmriprep')

        if failed_reports and not opts.notrack:
            sentry_sdk.capture_message(
                'Report generation failed for %d subjects' % failed_reports,
                level='error')
        sys.exit(int((errno + failed_reports) > 0))
Exemplo n.º 29
0
def main():
    """Entry point."""
    import os
    import sys
    import gc
    from multiprocessing import Process, Manager
    from .parser import parse_args
    from ..utils.bids import write_derivative_description

    parse_args()

    popylar = None
    if not config.execution.notrack:
        import popylar
        from ..__about__ import __ga_id__
        config.loggers.cli.info(
            "Your usage of dmriprep is being recorded using popylar (https://popylar.github.io/). ",  # noqa
            "For details, see https://nipreps.github.io/dmriprep/usage.html. ",
            "To opt out, call dmriprep with a `--notrack` flag")
        popylar.track_event(__ga_id__, 'run', 'cli_run')

    # CRITICAL Save the config to a file. This is necessary because the execution graph
    # is built as a separate process to keep the memory footprint low. The most
    # straightforward way to communicate with the child process is via the filesystem.
    config_file = config.execution.work_dir / '.dmriprep.toml'
    config.to_filename(config_file)

    # CRITICAL Call build_workflow(config_file, retval) in a subprocess.
    # Because Python on Linux does not ever free virtual memory (VM), running the
    # workflow construction jailed within a process preempts excessive VM buildup.
    with Manager() as mgr:
        from .workflow import build_workflow
        retval = mgr.dict()
        p = Process(target=build_workflow, args=(str(config_file), retval))
        p.start()
        p.join()

        retcode = p.exitcode or retval.get('return_code', 0)
        dmriprep_wf = retval.get('workflow', None)

    # CRITICAL Load the config from the file. This is necessary because the ``build_workflow``
    # function executed constrained in a process may change the config (and thus the global
    # state of dMRIPrep).
    config.load(config_file)

    if config.execution.reports_only:
        sys.exit(int(retcode > 0))

    if dmriprep_wf and config.execution.write_graph:
        dmriprep_wf.write_graph(graph2use="colored",
                                format='svg',
                                simple_form=True)

    retcode = retcode or (dmriprep_wf is None) * os.EX_SOFTWARE
    if retcode != 0:
        sys.exit(retcode)

    # Generate boilerplate
    with Manager() as mgr:
        from .workflow import build_boilerplate
        p = Process(target=build_boilerplate,
                    args=(str(config_file), dmriprep_wf))
        p.start()
        p.join()

    if config.execution.boilerplate_only:
        sys.exit(int(retcode > 0))

    # Clean up master process before running workflow, which may create forks
    gc.collect()

    if popylar is not None:
        popylar.track_event(__ga_id__, 'run', 'started')

    config.loggers.workflow.log(
        15, '\n'.join(['dMRIPrep config:'] +
                      ['\t\t%s' % s for s in config.dumps().splitlines()]))
    config.loggers.workflow.log(25, 'dMRIPrep started!')
    errno = 1  # Default is error exit unless otherwise set
    try:
        dmriprep_wf.run(**config.nipype.get_plugin())
    except Exception as e:
        if not config.execution.notrack:
            popylar.track_event(__ga_id__, 'run', 'error')
        config.loggers.workflow.critical('dMRIPrep failed: %s', e)
        raise
    else:
        config.loggers.workflow.log(25, 'dMRIPrep finished successfully!')

        # Bother users with the boilerplate only iff the workflow went okay.
        if (config.execution.output_dir / 'dmriprep' / 'logs' /
                'CITATION.md').exists():
            config.loggers.workflow.log(
                25, 'Works derived from this dMRIPrep execution should '
                'include the following boilerplate:\n\n%s',
                (config.execution.output_dir / 'dmriprep' / 'logs' /
                 'CITATION.md').read_text())

        if config.workflow.run_reconall:
            from templateflow import api
            from niworkflows.utils.misc import _copy_any
            dseg_tsv = str(
                api.get('fsaverage', suffix='dseg', extension=['.tsv']))
            _copy_any(
                dseg_tsv,
                str(config.execution.output_dir / 'dmriprep' /
                    'desc-aseg_dseg.tsv'))
            _copy_any(
                dseg_tsv,
                str(config.execution.output_dir / 'dmriprep' /
                    'desc-aparcaseg_dseg.tsv'))
        errno = 0
    finally:
        from niworkflows.reports import generate_reports
        from pkg_resources import resource_filename as pkgrf

        # Generate reports phase
        failed_reports = generate_reports(config.execution.participant_label,
                                          config.execution.output_dir,
                                          config.execution.work_dir,
                                          config.execution.run_uuid,
                                          config=pkgrf(
                                              'dmriprep',
                                              'config/reports-spec.yml'),
                                          packagename='dmriprep')
        write_derivative_description(config.execution.bids_dir,
                                     config.execution.output_dir / 'dmriprep')

        if failed_reports and not config.execution.notrack:
            popylar.track_event(__ga_id__, 'run', 'reporting_error')
        sys.exit(int((errno + failed_reports) > 0))
Exemplo n.º 30
0
def build_opts(opts):
    """Trigger a new process that builds the workflow graph, based on the input options."""
    import os
    from pathlib import Path
    import logging
    import sys
    import gc
    import warnings
    from multiprocessing import set_start_method, Process, Manager
    from nipype import logging as nlogging
    from niworkflows.utils.misc import check_valid_fs_license

    set_start_method('forkserver')

    logging.addLevelName(
        25, 'IMPORTANT')  # Add a new level between INFO and WARNING
    logging.addLevelName(15,
                         'VERBOSE')  # Add a new level between INFO and DEBUG
    logger = logging.getLogger('cli')

    def _warn_redirect(message,
                       category,
                       filename,
                       lineno,
                       file=None,
                       line=None):
        logger.warning('Captured warning (%s): %s', category, message)

    warnings.showwarning = _warn_redirect

    # Precedence: --fs-license-file, $FS_LICENSE, default_license
    if opts.fs_license_file is not None:
        os.environ["FS_LICENSE"] = os.path.abspath(opts.fs_license_file)

    if not check_valid_fs_license():
        raise RuntimeError(
            'ERROR: a valid license file is required for FreeSurfer to run. '
            'sMRIPrep looked for an existing license file at several paths, in this '
            'order: 1) command line argument ``--fs-license-file``; 2) ``$FS_LICENSE`` '
            'environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. '
            'Get it (for free) by registering at https://'
            'surfer.nmr.mgh.harvard.edu/registration.html')

    # Retrieve logging level
    log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG))
    # Set logging
    logger.setLevel(log_level)
    nlogging.getLogger('nipype.workflow').setLevel(log_level)
    nlogging.getLogger('nipype.interface').setLevel(log_level)
    nlogging.getLogger('nipype.utils').setLevel(log_level)

    errno = 0

    # Call build_workflow(opts, retval)
    with Manager() as mgr:
        retval = mgr.dict()
        p = Process(target=build_workflow, args=(opts, retval))
        p.start()
        p.join()

        if p.exitcode != 0:
            sys.exit(p.exitcode)

        smriprep_wf = retval['workflow']
        plugin_settings = retval['plugin_settings']
        bids_dir = retval['bids_dir']
        output_dir = retval['output_dir']
        subject_list = retval['subject_list']
        run_uuid = retval['run_uuid']
        retcode = retval['return_code']

    if smriprep_wf is None:
        sys.exit(1)

    if opts.write_graph:
        smriprep_wf.write_graph(graph2use="colored",
                                format='svg',
                                simple_form=True)

    if opts.reports_only:
        sys.exit(int(retcode > 0))

    if opts.boilerplate:
        sys.exit(int(retcode > 0))

    # Check workflow for missing commands
    missing = check_deps(smriprep_wf)
    if missing:
        print("Cannot run sMRIPrep. Missing dependencies:")
        for iface, cmd in missing:
            print("\t{} (Interface: {})".format(cmd, iface))
        sys.exit(2)

    # Clean up master process before running workflow, which may create forks
    gc.collect()
    try:
        smriprep_wf.run(**plugin_settings)
    except RuntimeError:
        errno = 1
    else:
        if opts.run_reconall:
            from templateflow import api
            from niworkflows.utils.misc import _copy_any
            dseg_tsv = str(
                api.get('fsaverage', suffix='dseg', extension=['.tsv']))
            _copy_any(
                dseg_tsv,
                str(Path(output_dir) / 'smriprep' / 'desc-aseg_dseg.tsv'))
            _copy_any(
                dseg_tsv,
                str(Path(output_dir) / 'smriprep' / 'desc-aparcaseg_dseg.tsv'))
        logger.log(25, 'sMRIPrep finished without errors')
    finally:
        from niworkflows.reports import generate_reports
        from ..utils.bids import write_derivative_description

        logger.log(25, 'Writing reports for participants: %s',
                   ', '.join(subject_list))
        # Generate reports phase
        errno += generate_reports(subject_list,
                                  output_dir,
                                  run_uuid,
                                  packagename='smriprep')
        write_derivative_description(bids_dir,
                                     str(Path(output_dir) / 'smriprep'))
    sys.exit(int(errno > 0))