Exemplo n.º 1
0
def _set_volume_masker(roi_file, as_voxels=False, **kwargs):
    """Check and see if multiple ROIs exist in atlas file"""

    if not isinstance(roi_file, str):
        raise ValueError('roi_file must be a file name string')

    if roi_file.endswith('.csv') or roi_file.endswith('.tsv'):
        roi = _read_coords(roi_file)
        n_rois = len(roi)
        print('  {} region(s) detected from coordinates'.format(n_rois))

        if kwargs.get('radius') is None:
            warnings.warn('No radius specified for coordinates; setting '
                          'to nilearn.input_data.NiftiSphereMasker default '
                          'of extracting from a single voxel')
        masker = NiftiSpheresMasker(roi, **kwargs)
    
    elif roi_file.endswith('.nii.gz'):
        # remove args for NiftiSpheresMasker 
        if 'radius' in kwargs:
            kwargs.pop('radius')
        if 'allow_overlap' in kwargs:
                kwargs.pop('allow_overlap')
    
        roi_img = image.load_img(roi_file)
        if len(roi_img.shape) == 4:
            n_rois = roi_img.shape[-1]
            print('  {} region(s) detected from {}'.format(n_rois,
                                                        roi_img.get_filename()))
            masker = NiftiMapsMasker(roi_img, allow_overlap=True,**kwargs)
        else:
            n_rois = len(np.unique(roi_img.get_fdata())) - 1
            print('  {} region(s) detected from {}'.format(n_rois,
                                                        roi_img.get_filename()))
            if n_rois > 1:
                masker = NiftiLabelsMasker(roi_img, **kwargs)
            elif n_rois == 1:
                # binary mask for single ROI 
                if as_voxels:
                    if 'mask_img' in kwargs:
                        kwargs.pop('mask_img')
                    masker = NiftiMasker(roi_img, **kwargs)
                else:
                    # more computationally efficient if only wanting the mean
                    masker = NiftiLabelsMasker(roi_img, **kwargs)
            else:
                raise ValueError('No ROI detected; check ROI file')
    
    else:
        raise ValueError('Invalid file type for roi_file. Must be one of: '
                         '.nii.gz, .csv, .tsv')
    
    return masker, n_rois
Exemplo n.º 2
0
def make_masker(scheme):
    '''
    Parameters
    ----------
    scheme : String
        The type of parcellation wanted.

    Returns
    -------
    masker: nilearn.input_data.NiftiLabelsMasker
        Masker of the chosen scheme.
    labels: list
        Labels of all the regions in parcellation.
    '''
    if scheme.lower() == "harvox":  # 48 regions
        dataset = datasets.fetch_atlas_harvard_oxford('cort-maxprob-thr25-2mm')
        atlas_filename = dataset.maps
        labels = dataset.labels[1:]  # trim off "background" label
        masker = NiftiLabelsMasker(labels_img=atlas_filename,
                                   standardize=True,
                                   high_variance_confounds=True,
                                   verbose=1)
    elif scheme.lower() == "yeo":  # 17 regions
        dataset = datasets.fetch_atlas_yeo_2011()
        masker = NiftiLabelsMasker(labels_img=dataset['thick_17'],
                                   standardize=True,
                                   high_variance_confounds=True,
                                   verbose=1)
        labels = [
            "Visual A", "Visual B", "Somatomotor A", "Somatomotor B",
            "Dorsal Attention A", "Dorsal Attention B",
            "Salience/Ventral Attention A", "Salience/Ventral Attention B",
            "Limbic A", "Limbic B", "Control C", "Control A", "Control B",
            "Temporal Parietal", "Default C", "Default A", "Default B"
        ]  # list from valerie-jzr
    elif scheme.lower() == "aal":  # 116 regions
        dataset = datasets.fetch_atlas_aal(version='SPM12')
        labels = dataset['labels']
        masker = NiftiLabelsMasker(labels_img=dataset['maps'],
                                   standardize=True,
                                   high_variance_confounds=True,
                                   verbose=1)
    elif scheme.lower() == "schaefer":
        dataset = datasets.fetch_atlas_schaefer_2018(n_rois=100,
                                                     yeo_networks=17)
        labels = dataset['labels']
        masker = NiftiLabelsMasker(labels_img=dataset['maps'],
                                   standardize=True,
                                   high_variance_confounds=True,
                                   verbose=1)
    return masker, labels
Exemplo n.º 3
0
    def apply_mask(self, atlas="AAL"):

        if atlas == "AAL":
            # load atlas
            atlas_filename = datasets.fetch_atlas_aal(version="SPM12",
                                                      verbose=0).maps
        elif atlas == "multiscale":
            raise NotImplementedError()
        else:
            raise ValueError("Altas should be 'AAL' or 'multiscale'")

        # set mask
        masker = NiftiLabelsMasker(labels_img=atlas_filename,
                                   standardize=True,
                                   detrend=True,
                                   low_pass=0.08,
                                   high_pass=0.01,
                                   t_r=3.7,
                                   memory="nilearn_cache",
                                   verbose=0)

        # apply mask to data
        confounds = high_variance_confounds(self.fmri_filename,
                                            n_confounds=1,
                                            detrend=True)
        ts_hvar = masker.fit_transform(self.fmri_filename, confounds=confounds)

        return ts_hvar
Exemplo n.º 4
0
def compute_connectivity_subjects(func_list, atlas, mask, conn, n_jobs=-1):
    """ Returns connectivities for all subjects
    tril matrix n_subjects * n_rois_tril
    """
    if len(nib.load(atlas).shape) == 4:
        masker = NiftiMapsMasker(maps_img=atlas,
                                 mask_img=mask,
                                 detrend=True,
                                 low_pass=.1,
                                 high_pass=.01,
                                 t_r=3.,
                                 resampling_target='data',
                                 smoothing_fwhm=6,
                                 memory=CACHE_DIR,
                                 memory_level=2)
    else:
        masker = NiftiLabelsMasker(labels_img=atlas,
                                   mask_img=mask,
                                   t_r=3.,
                                   detrend=True,
                                   low_pass=.1,
                                   high_pass=.01,
                                   resampling_target='data',
                                   smoothing_fwhm=6,
                                   memory=CACHE_DIR,
                                   memory_level=2)

    p = Parallel(n_jobs=n_jobs, verbose=5)(delayed(
                 compute_connectivity_subject)(conn, func, masker)\
                 for func in func_list)
    return np.asarray(p)
Exemplo n.º 5
0
    def from_fmri_data(
        cls,
        datafile: str,
        atlas: Optional[str] = None,
        confounds: Optional[str] = None,
        **kwargs,
    ):
        """Take a 4D dataset and generate signals from the atlas parcels.
        """
        if atlas is None:
            atlas = MIST_ATLAS_444
            kind = "atlas"
        else:
            kind = "atlas_custom"

        # Resampling target should be the image with lowest resolution.
        # Assuming that the data resolution is isotropic for now.
        atlas_res = nib.load(atlas).header["pixdim"][1]
        data_res = nib.load(datafile).header["pixdim"][1]
        resampling_target = "data" if data_res > atlas_res else "labels"

        masker = NiftiLabelsMasker(labels_img=atlas,
                                   standardize=True,
                                   resampling_target=resampling_target)
        signals = masker.fit_transform(datafile)
        atlasrois = atlas_roitovol(atlas, nrois=signals.shape[-1])
        return cls(timeseries=signals,
                   maps=atlasrois,
                   kind=kind,
                   confounds=confounds,
                   **kwargs)
def timeSeries(func_files, confound_files, atlas_filename):
    # This function receives a list of funcional files and a list of matching confound files
    # and outputs an array
    from nilearn.input_data import NiftiLabelsMasker
    # define masker here
    masker = NiftiLabelsMasker(
        labels_img=atlas_filename,
        standardize=True,
        smoothing_fwhm=6,
        memory="nilearn_cashe",
        t_r=1,
        verbose=5,
        high_pass=.01,
        low_pass=.1
    )  # As it is task based we dont' bandpassing high_pass=.01 , low_pass = .1)
    total_subjects = [
    ]  # creating an empty array that will hold all subjects matrix
    # This function needs a masker object that will be defined outside the function
    for func_file, confound_file in zip(func_files, confound_files):
        print(f"proccessing file {func_file}")  # print file name
        confoundClean = removeVars(confound_file)
        confoundArray = confoundClean  #confoundClean.values
        time_series = masker.fit_transform(func_file, confounds=confoundArray)
        #time_series = extractor.fit_transform(func_file, confounds=confoundArray)
        #masker.fit_transform(func_file, confoundArray)
        total_subjects.append(time_series)
    return total_subjects
Exemplo n.º 7
0
    def _run_interface(self, runtime):
        fname = self.inputs.fmri_denoised
        entities = parse_file_entities(fname)
        bold_img = nb.load(fname)
        parcellation_file = get_parcellation_file_path(entities['space'])
        masker = NiftiLabelsMasker(labels_img=parcellation_file,
                                   standardize=True)
        time_series = masker.fit_transform(bold_img, confounds=None)

        corr_measure = ConnectivityMeasure(kind='correlation')
        corr_mat = corr_measure.fit_transform([time_series])[0]
        entities['pipeline'] = extract_pipeline_from_path(fname)
        conn_file = join(self.inputs.output_dir,
                         build_path(entities, self.conn_file_pattern, False))
        carpet_plot_file = join(
            self.inputs.output_dir,
            build_path(entities, self.carpet_plot_pattern, False))
        matrix_plot_file = join(
            self.inputs.output_dir,
            build_path(entities, self.matrix_plot_pattern, False))

        make_carpetplot(time_series, carpet_plot_file)
        mplot = plot_matrix(corr_mat, vmin=-1, vmax=1)
        mplot.figure.savefig(matrix_plot_file)

        np.save(conn_file, corr_mat)

        self._results['corr_mat'] = conn_file
        self._results['carpet_plot'] = carpet_plot_file
        self._results['matrix_plot'] = matrix_plot_file

        return runtime
 def __init__(self, data_config, mode="train"):
     super(TransoformerDataset, self).__init__(data_config, mode)
     self.masker = NiftiLabelsMasker(
         labels_img=
         f"{data_config.workdir}/input/Schaefer2018_400Parcels_7Networks_order_FSLMNI152_1mm.nii.gz",
         standardize=True,
     )
Exemplo n.º 9
0
def series_times_ROI(Maps, func, typeF):
    from nilearn.input_data import NiftiLabelsMasker, NiftiMapsMasker
    from nilearn import plotting
    import scipy.io as sio
    import numpy as np
    import os
    ##################################
    Resul = os.getcwd()  #+'-Results'
    n_map = Maps[Maps.rfind('/') + 1:][:Maps[Maps.rfind('/') + 1:].find('.')]
    n_plot = 'empty_plot'
    #os.system('mkdir '+Resul)
    ##################################
    if typeF == 'Labels':
        masker = NiftiLabelsMasker(labels_img=Maps, standardize=True)
        plot_atlas = plotting.plot_roi(Maps)
        n_plot = Resul + '/Atlas_' + n_map + '_' + typeF + '.svg'
        plot_atlas.savefig(n_plot)
    if typeF == 'Maps':
        masker = NiftiMapsMasker(maps_img=Maps,
                                 standardize=True,
                                 memory='nilearn_cache',
                                 verbose=5)

    time_series = masker.fit_transform(func)
    print('Shape of serial times ', np.shape(time_series))
    out_mat = Resul + '/Time_series_' + n_map + '_' + typeF + '.mat'
    sio.savemat(out_mat, {'time_series': time_series})

    return out_mat, n_plot
Exemplo n.º 10
0
def connectivity_data():
    """Fixture for connectivity tests."""

    base_dir = os.path.abspath(pkg_resources.resource_filename(
        "pynets", "../data/examples"))
    func_file = f"{base_dir}/BIDS/sub-25659/ses-1/func/" \
                f"sub-25659_ses-1_task-rest_space-T1w_desc-preproc_bold.nii.gz"
    mask_file = f"{base_dir}/BIDS/sub-25659/ses-1/func/" \
                f"sub-25659_ses-1_task-rest_space-T1w_desc-preproc_" \
                f"bold_mask.nii.gz"
    parcellation = pkg_resources.resource_filename(
        "pynets", "templates/atlases/DesikanKlein2012.nii.gz"
    )

    masker = NiftiLabelsMasker(
        labels_img=nib.load(parcellation), background_label=0,
        resampling_target="labels", dtype="auto",
        mask_img=nib.load(mask_file), standardize=True)

    time_series = masker.fit_transform(func_file)
    conn_measure = ConnectivityMeasure(
        kind="correlation")
    conn_matrix = conn_measure.fit_transform([time_series])[0]
    [coords, _, _, label_intensities] = \
        get_names_and_coords_of_parcels(parcellation)

    labels = ['ROI_' + str(idx) for idx, val in enumerate(label_intensities)]

    yield {'time_series': time_series, 'conn_matrix': conn_matrix,
           'labels': labels, 'coords': coords, 'indices': label_intensities}
Exemplo n.º 11
0
    def _run_interface(self, runtime):
        fname = self.inputs.fmri_denoised
        bold_img = nb.load(fname)
        masker = NiftiLabelsMasker(labels_img=self.inputs.parcellation,
                                   standardize=True)
        time_series = masker.fit_transform(bold_img, confounds=None)

        corr_measure = ConnectivityMeasure(kind='correlation')
        corr_mat = corr_measure.fit_transform([time_series])[0]
        _, base, _ = split_filename(fname)

        conn_file = f'{self.inputs.output_dir}/{base}_conn_mat.npy'

        carpet_plot_file = join(self.inputs.output_dir,
                                f'{base}_carpet_plot.png')
        matrix_plot_file = join(self.inputs.output_dir,
                                f'{base}_matrix_plot.png')

        create_carpetplot(time_series, carpet_plot_file)
        mplot = plot_matrix(corr_mat, vmin=-1, vmax=1)
        mplot.figure.savefig(matrix_plot_file)

        np.save(conn_file, corr_mat)

        self._results['corr_mat'] = conn_file
        self._results['carpet_plot'] = carpet_plot_file
        self._results['matrix_plot'] = matrix_plot_file

        return runtime
Exemplo n.º 12
0
def debiasing(data_file, mask, mtx, idx_u, idx_v, tr, out_dir, history_str):
    """
    Perform debiasing based on denoised edge-time matrix.
    """
    print("Performing debiasing based on denoised edge-time matrix...")
    masker = NiftiLabelsMasker(
        labels_img=mask,
        standardize=False,
        memory="nilearn_cache",
        strategy="mean",
    )

    # Read data
    data = masker.fit_transform(data_file)

    # Generate mask of significant edge-time connections
    ets_mask = np.zeros(data.shape)
    idxs = np.where(mtx != 0)
    time_idxs = idxs[0]
    edge_idxs = idxs[1]

    print("Generating mask of significant edge-time connections...")
    for idx, time_idx in enumerate(time_idxs):
        ets_mask[time_idx, idx_u[edge_idxs[idx]]] = 1
        ets_mask[time_idx, idx_v[edge_idxs[idx]]] = 1

    # Create HRF matrix
    hrf = HRFMatrix(
        TR=tr,
        TE=[0],
        nscans=data.shape[0],
        r2only=True,
        has_integrator=False,
        is_afni=True,
    )
    hrf.generate_hrf()

    # Perform debiasing
    deb_output = debiasing_spike(hrf, data, ets_mask)
    beta = deb_output["beta"]
    fitt = deb_output["betafitts"]

    # Transform results back to 4D
    beta_4D = masker.inverse_transform(beta)
    beta_file = join(out_dir, f"{basename(data_file[:-7])}_beta_ETS.nii.gz")
    beta_4D.to_filename(beta_file)
    atlas_mod.inverse_transform(beta_file, data_file)
    subprocess.run(f"3dNotes {join(out_dir, beta_file)} -h {history_str}",
                   shell=True)

    fitt_4D = masker.inverse_transform(fitt)
    fitt_file = join(out_dir, f"{basename(data_file[:-7])}_fitt_ETS.nii.gz")
    fitt_4D.to_filename(fitt_file)
    subprocess.run(f"3dNotes {join(out_dir, fitt_file)} -h {history_str}",
                   shell=True)
    atlas_mod.inverse_transform(fitt_file, data_file)

    print("Debiasing finished and files saved.")

    return beta, fitt
Exemplo n.º 13
0
    def _run_interface(self, runtime):

        from nilearn import datasets
        from nilearn.input_data import NiftiLabelsMasker
        import numpy as np

        dataset = datasets.fetch_atlas_harvard_oxford(
            self.inputs.atlas_identifier)
        atlas_filename = dataset.maps

        masker = NiftiLabelsMasker(labels_img=atlas_filename,
                                   standardize=True,
                                   detrend=True,
                                   low_pass=0.1,
                                   high_pass=0.01,
                                   t_r=self.inputs.tr,
                                   memory='nilearn_cache',
                                   verbose=0)

        #file_labels = open('/home/brainlab/Desktop/Rudas/Data/Parcellation/AAL from Freesourfer/fs_default.txt', 'r')
        #labels = []
        #for line in file_labels.readlines():
        #labels.append(line)
        #file_labels.close()

        time_series = masker.fit_transform(
            self.inputs.in_file, confounds=self.inputs.confounds_file)

        np.savetxt(self.inputs.time_series_out_file,
                   time_series,
                   fmt='%10.2f',
                   delimiter=',')

        if self.inputs.plot:
            from nilearn import plotting
            from nilearn.connectome import ConnectivityMeasure
            import matplotlib
            import matplotlib.pyplot as plt
            fig, ax = matplotlib.pyplot.subplots()

            font = {'family': 'normal', 'size': 5}

            matplotlib.rc('font', **font)

            correlation_measure = ConnectivityMeasure(kind='correlation')
            correlation_matrix = correlation_measure.fit_transform(
                [time_series])[0]

            # Mask the main diagonal for visualization:
            np.fill_diagonal(correlation_matrix, 0)
            plotting.plot_matrix(correlation_matrix,
                                 figure=fig,
                                 labels=dataset.labels[1:],
                                 vmax=0.8,
                                 vmin=-0.8,
                                 reorder=True)

            fig.savefig(self.inputs.correlation_matrix_out_file, dpi=1200)

        return runtime
Exemplo n.º 14
0
def make_parcellation(data_path, atlas, template='MNI152NLin2009cAsym', atlas_desc=None, resolution=2, parc_params=None, return_meta=False):
    """
    Performs a parcellation which reduces voxel space to regions of interest (brain data).

    Parameters
    ----------

    data_path : str
        Path to .nii image.
    atlas : str
        Specify which atlas you want to use (see github.com/templateflow/)
    template : str
        What space you data is in. If fmriprep, leave as MNI152NLin2009cAsym.
    atlas_desc : str
        Specify which description of atlas.
    resolution : int
        Resolution of atlas. Can be 1 or 2.
    parc_params : dict
        **kwargs for nilearn functions.
    return_meta : bool
        If true, tries to return any meta-information that exists about parcellation.

    Returns
    -------

    data : array
        Data after the parcellation.

    NOTE
    ----
    These functions make use of nilearn. Please cite templateflow and nilearn if used in a publicaiton.
    """

    if not parc_params:
        parc_params = {}

    tf_get_params = {
        'template': template,
        'resolution': resolution,
        'atlas': atlas
    }
    if atlas_desc is not None:
        tf_get_params['desc'] = atlas_desc
    file = tf.get(**tf_get_params, extensions='nii.gz')

    if isinstance(file, list):
        raise ValueError('More than one template file found. Specify the type of file you need (often atlas_desc). Run: templateflow.api.TF_LAYOUT.get_descs(atlas=' +
                         atlas + ') to see available desc for atlas')

    region = NiftiLabelsMasker(str(file), **parc_params)
    data = region.fit_transform(data_path)

    if return_meta:
        meta_info = tf.get(template=template, atlas=atlas,
                           desc=atlas_desc, extensions='tsv')
        meta_info = load_tabular_file(str(meta_info))
        return data, meta_info
    else:
        return data
Exemplo n.º 15
0
def correlation_matrix(ts,atlas,
	confounds=None,
	mask=None,
	loud=False,
	structure_names=[],
	save_as='',
	low_pass=0.25,
	high_pass=0.004,
	smoothing_fwhm=.3,
	):
	"""Return a CSV file containing correlations between ROIs.

	Parameters
	----------
	ts : str
		Path to the 4D NIfTI timeseries file on which to perform the connectivity analysis.
	confounds : 2D array OR path to CSV file
		Array/CSV file containing confounding time-series to be regressed out before FC analysis.
	atlas : str, optional
		Path to a 3D NIfTI-like binary label file designating ROIs.
	structure_names : list, optional
		Ordered list of all structure names in atlas (length N).
	save_as : str
		Path under which to save the Pandas DataFrame conttaining the NxN correlation matrix.
	"""
	ts = path.abspath(path.expanduser(ts))
	if isinstance(atlas,str):
		atlas = path.abspath(path.expanduser(atlas))
	if mask:
		mask = path.abspath(path.expanduser(mask))
	tr = nib.load(ts).header['pixdim'][0]
	labels_masker = NiftiLabelsMasker(
		labels_img=atlas,
		mask_img=mask,
		standardize=True,
		memory='nilearn_cache',
		verbose=5,
		low_pass=low_pass,
		high_pass = high_pass,
		smoothing_fwhm=smoothing_fwhm,
		t_r=tr,
		)
	#TODO: test confounds with physiological signals
	if(confounds):
		confounds = path.abspath(path.expanduser(confounds))
		timeseries = labels_masker.fit_transform(ts, confounds=confounds)
	else:
		timeseries = labels_masker.fit_transform(ts)
	correlation_measure = ConnectivityMeasure(kind='correlation')
	correlation_matrix = correlation_measure.fit_transform([timeseries])[0]
	if structure_names:
		df = pd.DataFrame(columns=structure_names, index=structure_names, data=correlation_matrix)
	else:
		df = pd.DataFrame(data=correlation_matrix)
	if save_as:
		save_dir = path.dirname(save_as)
		if not path.exists(save_dir):
			makedirs(save_dir)
		df.to_csv(save_as)
Exemplo n.º 16
0
def extract_ts(path, atlasPath):
    img = image.load_img(path)
    template = nilearn.datasets.load_mni152_template()
    img_re = image.resample_to_img(img, template, interpolation='linear')
    masker = NiftiLabelsMasker(labels_img=atlasPath, standardize=True,
                               memory='nilearn_cache', verbose=5)
    ts = masker.fit_transform(img_re)
    return ts
Exemplo n.º 17
0
def test_ibma_with_custom_masker(testdata_ibma):
    """ Ensure voxel-to-ROI reduction works. """
    atlas = op.join(get_test_data_path(), "test_pain_dataset", "atlas.nii.gz")
    masker = NiftiLabelsMasker(atlas)
    meta = ibma.Fishers(mask=masker)
    meta.fit(testdata_ibma)
    assert isinstance(meta.results, nimare.results.MetaResult)
    assert meta.results.maps["z"].shape == (5, )
Exemplo n.º 18
0
def extract_timeseries(filename, atlas_filename, confounds=None):
    """
    Wrapper around nilearn masker and fit_transform.
    """
    masker = NiftiLabelsMasker(labels_img=atlas_filename, standardize=True)

    time_series = masker.fit_transform(filename, confounds=confounds)
    return time_series
Exemplo n.º 19
0
    def _run_interface(self, runtime):
        from nilearn.input_data import NiftiLabelsMasker
        from nilearn.connectome import ConnectivityMeasure
        from sklearn.covariance import EmpiricalCovariance
        import numpy as np
        import pandas as pd
        import os
        import matplotlib.pyplot as plt
        from mne.viz import plot_connectivity_circle
        import re

        plt.switch_backend('Agg')

        # extract timeseries from every label
        masker = NiftiLabelsMasker(labels_img=self.inputs.atlas_file,
                                   standardize=True, verbose=1)
        timeseries = masker.fit_transform(self.inputs.timeseries_file)
        # create correlation matrix
        correlation_measure = ConnectivityMeasure(cov_estimator=EmpiricalCovariance(),
                                                  kind="correlation")
        correlation_matrix = correlation_measure.fit_transform([timeseries])[0]
        np.fill_diagonal(correlation_matrix, np.NaN)

        # add the atlas labels to the matrix
        atlas_lut_df = pd.read_csv(self.inputs.atlas_lut, sep='\t')
        regions = atlas_lut_df['regions'].values
        correlation_matrix_df = pd.DataFrame(correlation_matrix, index=regions, columns=regions)

        # do a fisher's r -> z transform
        fisher_z_matrix_df = correlation_matrix_df.apply(lambda x: (np.log(1 + x) - np.log(1 - x)) * 0.5)

        # write out the file.
        out_file = os.path.join(runtime.cwd, 'fisher_z_correlation.tsv')
        fisher_z_matrix_df.to_csv(out_file, sep='\t', na_rep='n/a')

        # save the filename in the outputs
        self._results['correlation_matrix'] = out_file

        # visualizations with mne
        connmat = fisher_z_matrix_df.values
        labels = list(fisher_z_matrix_df.index)

        # define title and outfile names:
        trial_regex = re.compile(r'.*trialtype-(?P<trial>[A-Za-z0-9]+)')
        title = re.search(trial_regex, self.inputs.timeseries_file).groupdict()['trial']
        outfile = os.path.join(runtime.cwd, ".".join([title, "svg"]))

        n_lines = int(np.sum(connmat > 0) / 2)
        fig = plt.figure(figsize=(5, 5))

        plot_connectivity_circle(connmat, labels, n_lines=n_lines, fig=fig, title=title, fontsize_title=10,
                                 facecolor='white', textcolor='black', colormap='jet', colorbar=1,
                                 node_colors=['black'], node_edgecolor=['white'], show=False, interactive=False)

        fig.savefig(outfile, dpi=300)
        self._results['correlation_fig'] = outfile

        return runtime
Exemplo n.º 20
0
def prepare_data(data_dir, output_dir, pipeline = "cpac", quality_checked = True):
    # get dataset
    print("Loading dataset...")
    abide = datasets.fetch_abide_pcp(data_dir = data_dir,
                                     pipeline = pipeline,
                                     quality_checked = quality_checked)
    # make list of filenames
    fmri_filenames = abide.func_preproc

    # load atlas
    multiscale = datasets.fetch_atlas_basc_multiscale_2015()
    atlas_filename = multiscale.scale064

    # initialize masker object
    masker = NiftiLabelsMasker(labels_img=atlas_filename,
                               standardize=True,
                               memory='nilearn_cache',
                               verbose=0)

    # initialize correlation measure
    correlation_measure = ConnectivityMeasure(kind='correlation', vectorize=True,
                                             discard_diagonal=True)

    try: # check if feature file already exists
        # load features
        feat_file = os.path.join(output_dir, 'ABIDE_BASC064_features.npz')
        X_features = np.load(feat_file)['a']
        print("Feature file found.")

    except: # if not, extract features
        X_features = [] # To contain upper half of matrix as 1d array
        print("No feature file found. Extracting features...")

        for i,sub in enumerate(fmri_filenames):
            # extract the timeseries from the ROIs in the atlas
            time_series = masker.fit_transform(sub)
            # create a region x region correlation matrix
            correlation_matrix = correlation_measure.fit_transform([time_series])[0]
            # add to our container
            X_features.append(correlation_matrix)
            # keep track of status
            print('finished extracting %s of %s'%(i+1,len(fmri_filenames)))
        # Save features
        np.savez_compressed(os.path.join(output_dir, 'ABIDE_BASC064_features'),
                                         a = X_features)

    # Dimensionality reduction of features with PCA
    print("Running PCA...")
    pca = PCA(0.99).fit(X_features) # keeping 99% of variance
    X_features_pca = pca.transform(X_features)

    # Transform phenotypic data into dataframe
    abide_pheno = pd.DataFrame(abide.phenotypic)

    # Get the target vector
    y_target = abide_pheno['DX_GROUP']

    return(X_features_pca, y_target)
Exemplo n.º 21
0
def _set_masker(roi_file, as_voxels=False, **kwargs):
    """Check and see if multiple ROIs exist in atlas file"""

    if isinstance(roi_file, str) and roi_file.endswith('.tsv'):
        roi = _read_coords(roi_file)
        n_rois = len(roi)
        is_coords = True
        print('  {} region(s) detected from coordinates'.format(n_rois))
    else:
        roi = load_img(roi_file)
        n_rois = len(np.unique(roi.get_data())) - 1

        is_coords = False
        print('  {} region(s) detected from {}'.format(n_rois,
                                                       roi.get_filename()))
    
    if is_coords:
        if kwargs.get('radius') is None:
            warnings.warn('No radius specified for coordinates; setting '
                            'to nilearn.input_data.NiftiSphereMasker default '
                            'of extracting from a single voxel')
        masker = NiftiSpheresMasker(roi, **kwargs)
    else:

        if 'radius' in kwargs:
            kwargs.pop('radius')
        
        if 'allow_overlap' in kwargs:
            kwargs.pop('allow_overlap')
        
        if n_rois > 1:
            masker = NiftiLabelsMasker(roi, **kwargs)
        elif n_rois == 1:
            # single binary ROI mask 
            if as_voxels:
                if 'mask_img' in kwargs:
                    kwargs.pop('mask_img')
                masker = NiftiMasker(roi, **kwargs)
            else:
                # more computationally efficient if only wanting the mean of ROI
                masker = NiftiLabelsMasker(roi, **kwargs)
        else:
            raise ValueError('No ROI detected; check ROI file')
    
    return masker
Exemplo n.º 22
0
def test_ibma_with_custom_masker():
    """ Ensure voxel-to-ROI reduction works. """
    atlas = op.join(get_resource_path(), 'atlases',
                    'HarvardOxford-cort-maxprob-thr25-2mm.nii.gz')
    masker = NiftiLabelsMasker(atlas)
    meta = ibma.Fishers(mask=masker)
    meta.fit(pytest.dset_z)
    assert isinstance(meta.results, nimare.base.MetaResult)
    assert meta.results.maps['z'].shape == (48, )
Exemplo n.º 23
0
def main(args):

    # Set analysis directories
    root = '/net/synapse/nt/users/bmacintosh_lab/nluciw/'
    # Root directory of data.
    data_dir = root + 'data/EnF/sourcedata/'
    # Output directory.
    output_dir = root + 'outputs/perf_covar/' + args.output_dir
    # Create output if does not exist.
    if not os.path.exists(os.path.dirname(output_dir)):
        os.makedirs(os.path.dirname(output_dir))

    # Save command line
    with open(output_dir + 'commandline_args.txt', 'w') as f:
        f.write('\n'.join(sys.argv[1:]))

    # Load 4d nifti objects for both groups
    bd_data = fetch_data(data_dir,
                         args.nifti_name,
                         metadata=args.metadata,
                         subject_group=('BD', ))
    hc_data = fetch_data(data_dir,
                         args.nifti_name,
                         metadata=args.metadata,
                         subject_group=('HC', ))

    hc_vols = concat_imgs(hc_data.imgs)
    bd_vols = concat_imgs(bd_data.imgs)

    # Construct or load parcellator object using the atlas we specify on
    # the command line.
    parcellator = NiftiLabelsMasker(labels_img=args.atlas,
                                    mask_img=data_dir +
                                    'masks/cbf_80p_aal_merge_mni.nii.gz',
                                    standardize=False,
                                    strategy='mean')
    parcellator.fit()

    # Do the parcellation and correlation for both groups.
    hc_covar, bd_covar =\
        covariance.parcellate_and_correlate([hc_vols,bd_vols],
                                            output_dir,
                                            parcellator,
                                            prefix = args.output_prefix,
                                            detrend=False#,
                                             #                                            pve_gm_imgs=[concat_imgs(hc_data.struc_imgs),
                                             #                                                         concat_imgs(bd_data.struc_imgs)]
                                            )

    print(len(bd_data.imgs), len(hc_data.imgs))
    difference = statistics.compute_difference(bd_covar[0], hc_covar[0],
                                               len(bd_data.imgs),
                                               len(hc_data.imgs))

    cors = np.stack((bd_covar[0], hc_covar[0], difference))

    np.save(output_dir + args.output_prefix + 'cors', cors)
Exemplo n.º 24
0
def extract_parcellation_time_series(in_data, parcellation_name,
                                     parcellations_dict, bp_freqs, tr):
    '''
    Depending on parcellation['is_probabilistic'] this function chooses either NiftiLabelsMasker or NiftiMapsMasker
    to extract the time series of each parcel
    if bp_freq: data is band passfiltered at (hp, lp), if (None,None): no filter, if (None, .1) only lp...
    tr in ms (e.g. from freesurfer ImageInfo())
    returns np.array with parcellation time series and saves this array also to parcellation_time_series_file, and
    path to pickled masker object
    '''
    from nilearn.input_data import NiftiLabelsMasker, NiftiMapsMasker, NiftiSpheresMasker
    import os, pickle
    import numpy as np

    if parcellations_dict[parcellation_name][
            'is_probabilistic'] == True:  # use probab. nilearn
        masker = NiftiMapsMasker(
            maps_img=parcellations_dict[parcellation_name]['nii_path'],
            standardize=True)

    elif parcellations_dict[parcellation_name]['is_probabilistic'] == 'sphere':
        atlas = pickle.load(
            open(parcellations_dict[parcellation_name]['nii_path']))
        coords = atlas.rois
        masker = NiftiSpheresMasker(coords,
                                    radius=5,
                                    allow_overlap=True,
                                    standardize=True)

    else:  # 0/1 labels
        masker = NiftiLabelsMasker(
            labels_img=parcellations_dict[parcellation_name]['nii_path'],
            standardize=True)

    # add bandpass filter (only executes if freq not None
    hp, lp = bp_freqs
    masker.low_pass = lp
    masker.high_pass = hp
    if tr is not None:
        masker.t_r = tr
    else:
        masker.t_r = None

    masker.standardize = True

    masker_file = os.path.join(os.getcwd(), 'masker.pkl')
    with open(masker_file, 'w') as f:
        pickle.dump(masker, f)

    parcellation_time_series = masker.fit_transform(in_data)

    parcellation_time_series_file = os.path.join(
        os.getcwd(), 'parcellation_time_series.npy')
    np.save(parcellation_time_series_file, parcellation_time_series)

    return parcellation_time_series, parcellation_time_series_file, masker_file
Exemplo n.º 25
0
 def extract(fname, masker_fname, sid, labels, roi_numbers, save_csv=True):
     if os.path.isfile(fname) and os.path.isfile(masker_fname):
         masker_obj = NiftiLabelsMasker(labels_img=masker_fname,
                                        standardize=True,
                                        memory='nilearn_cache',
                                        verbose=5)
         anImg = nib.load(fname)
         try:
             time_series = masker_obj.fit_transform(anImg)
             cormat = connectivity_obj.fit_transform([time_series])[0]
         except Exception as e:
             print("Problem extracting: ")
             print(e)
             time_series = None
             cormat = None
         if time_series is None or cormat is None:
             print(
                 'Could not compute time series for this file, skipping: {}'
                 .format(fname))
         elif save_csv and outpath:
             try:
                 #print(masker_obj.labels_)
                 masker_labels = np.asarray(masker_obj.labels_)
                 #print(masker_labels)
                 #print(roi_numbers)
                 labels_extracted = np.array(labels)[np.isin(
                     roi_numbers, masker_labels)]
                 #print(labels_extracted)
                 if not (cormat.shape[0] == labels_extracted.shape[0] &
                         time_series.shape[1] == labels_extracted.shape[0]):
                     raise Exception(
                         "Shape of extracted data and implied labels do not match. Labels will be set to 999."
                     )
             except Exception as e:
                 print("Problem setting labels: ")
                 print(e)
                 masker_labels = np.arange(999, 999 + time_series.shape[1],
                                           1)
                 labels_extracted = masker_labels
             if not os.path.isdir(outpath):
                 raise Exception(
                     "Cannot find output dir {}".format(outpath))
             else:
                 save_one(fname,
                          time_series,
                          cormat,
                          sid,
                          labels_extracted,
                          masker_labels,
                          outpath,
                          use_number_labels_only=True)
     else:
         warnings.warn('Cannot find file(s) {}, {}'.format(
             fname, masker_fname))
         time_series = []
         cormat = []
Exemplo n.º 26
0
def staticFC(seedIMG,seedNAME,funcdata,output):
    '''
    For example:
    seedIMG='Thalamus_atlas_regions.nii'
    seedNAME="Thalamus_atlas_regions.txt"
    '''

    masker = NiftiLabelsMasker(labels_img=seedIMG, standardize=True)
    my_file = pd.read_table(seedNAME,header=None)
    seedname= list(my_file[0])
    seed_time_series = masker.fit_transform(funcdata)

    # remember to check the TR in this code. 
    brain_masker = input_data.NiftiMasker(
        smoothing_fwhm=6,
        detrend=True, standardize=True,
        low_pass=0.1, high_pass=0.01, t_r=2,
        memory='nilearn_cache', memory_level=1, verbose=2)
    
    brain_time_series = brain_masker.fit_transform(funcdata)

    #print("Seed time series shape: (%s, %s)" % seed_time_series.shape)
    #print("Brain time series shape: (%s, %s)" % brain_time_series.shape)


    seed_to_voxel_correlations = (np.dot(brain_time_series.T, seed_time_series) /
                                  seed_time_series.shape[0]
                                  )

    # print("Seed-to-voxel correlation shape: (%s, %s)" %
    #       seed_to_voxel_correlations.shape)
    # print("Seed-to-voxel correlation: min = %.3f; max = %.3f" % (
    #     seed_to_voxel_correlations.min(), seed_to_voxel_correlations.max()))

    seed_to_voxel_correlations_img = brain_masker.inverse_transform(
        seed_to_voxel_correlations.T)

    seed_to_voxel_correlations_fisher_z = np.arctanh(seed_to_voxel_correlations)

    # Finally, we can tranform the correlation array back to a Nifti image
    # object, that we can save.
    seed_to_voxel_correlations_fisher_z_img = brain_masker.inverse_transform(
        seed_to_voxel_correlations_fisher_z.T)

    #seed_to_voxel_correlations_fisher_z_img.to_filename('seed_correlation_z.nii.gz')

    IM=funcs.four_to_three(seed_to_voxel_correlations_fisher_z_img)
    for i,M in enumerate(IM):
        outdir=f'{output}/{seedname[i]}/seedFC'
        if not os.path.exists(outdir):
            os.makedirs(outdir)
        subname=os.path.basename(funcdata)[:-4]
        if not os.path.exists(f'{outdir}/{subname}_SFC_Z.nii.gz'):
            M.to_filename(f'{outdir}/{subname}_SFC_Z.nii.gz')
        else:
            print('Existed')
def comp_timeseries(item):
    print("Computing time-series for:", item)

    # Get filepaths
    bold_fp = WORKDIR + ("sub-{0:0>3}/ses-{1}/func/" \
                         "sub-{0:0>3}_ses-{1}_task-{2}_run-{3}_space-MNI152NL"
                         "in2009cAsym_desc-preproc_bold.nii.gz") \
                         .format(item[0], item[1].lower(), item[2], item[3])

    conf_fp = WORKDIR + ("sub-{0:0>3}/ses-{1}/func/" \
                         "sub-{0:0>3}_ses-{1}_task-{2}_run-{3}_desc-confounds_"
                         "regressors.tsv") \
                         .format(item[0], item[1].lower(), item[2], item[3])

    # Load the image and drop first n frames
    func_img = image.index_img(image.load_img(bold_fp), slice(CUTOFF, None))

    # Load confounds
    confounds = pd.read_csv(conf_fp, sep='\t') \
                    .loc[CUTOFF:, [
                               "a_comp_cor_00",
                               "a_comp_cor_01",
                               "a_comp_cor_02",
                               "a_comp_cor_03",
                               "a_comp_cor_04",
                               "a_comp_cor_05",
                               "global_signal",
                               "white_matter",
                               "csf",
                               "trans_x",
                               "trans_y",
                               "trans_z",
                               'rot_x',
                               'rot_y',
                               'rot_z']]

    # Create parcellation object with additional pre-processing parameters
    willard_mask = NiftiLabelsMasker(willard_img,
                                     detrend=True,
                                     t_r=0.802,
                                     low_pass=0.1,
                                     high_pass=0.01,
                                     standardize=True,
                                     memory=HOMEDIR + 'cache',
                                     memory_level=1)

    # Process and perform parcellation
    roi_time_series = willard_mask.fit_transform(func_img,
                                                 confounds=confounds.values)

    # Write into csv
    csv_data = pd.DataFrame(roi_time_series)
    csv_data.to_csv(OUTDIR + "sub-{0:0>3}_ses-{1}_task-{2}_run-" \
                    "{3}.csv".format(item[0], item[1].lower(), item[2], item[3]),
                               header=False, index=False)
Exemplo n.º 28
0
def post_processed_data(atlas_data, regressors):
    """A post-processed version of the atlas_data, which is generated directly
    by nilearn rather than niimasker. The results from niimasker should
    directly match what is produced by nilearn.
    """
    labels_img = _get_atlas()['maps']
    masker = NiftiLabelsMasker(labels_img, standardize=True, smoothing_fwhm=5,
                               detrend=True, low_pass=.1, high_pass=.01, t_r=2)

    confounds = regressors.values
    return masker.fit_transform(atlas_data, confounds=confounds)
Exemplo n.º 29
0
def cal_connectome(fmri_ff,
                   confound_ff,
                   atlas_ff,
                   outputjpg_ff,
                   metric='correlation',
                   labelrange=None,
                   label_or_map=0):
    if label_or_map == 0:
        # “correlation”, “partial correlation”, “tangent”, “covariance”, “precision”
        masker = NiftiLabelsMasker(labels_img=atlas_ff,
                                   standardize=True,
                                   verbose=0)
    else:
        masker = NiftiMapsMasker(maps_img=atlas_ff,
                                 standardize=True,
                                 verbose=0)

    time_series_0 = masker.fit_transform(fmri_ff, confounds=confound_ff)
    if labelrange is None:
        labelrange = np.arange(time_series_0.shape[1])
    time_series = time_series_0[:, labelrange]
    if metric == 'sparse inverse covariance':
        try:
            estimator = GraphLassoCV()
            estimator.fit(time_series)
            correlation_matrix = -estimator.precision_
        except:
            correlation_matrix = np.zeros(
                (time_series.shape[1], time_series.shape[1]))
    else:
        correlation_measure = ConnectivityMeasure(kind=metric)
        correlation_matrix = correlation_measure.fit_transform([time_series
                                                                ])[0]

    # Plot the correlation matrix

    fig = plt.figure(figsize=(6, 5), dpi=100)
    plt.clf()
    # Mask the main diagonal for visualization:
    np.fill_diagonal(correlation_matrix, 0)

    plt.imshow(correlation_matrix,
               interpolation="nearest",
               cmap="RdBu_r",
               vmax=0.8,
               vmin=-0.8)
    plt.gca().yaxis.tick_right()
    plt.axis('off')
    plt.colorbar()
    plt.title(metric.title(), fontsize=12)
    plt.tight_layout()
    fig.savefig(outputjpg_ff, bbox_inches='tight')
    plt.close()
    return correlation_matrix
Exemplo n.º 30
0
def _vectorize_nii(in_data_file, mask_file, parcellation_path, fwhm):
    from nilearn.input_data import NiftiMasker, NiftiLabelsMasker
    import nibabel as nib

    if parcellation_path is None:
        masker = NiftiMasker(mask_img=mask_file, smoothing_fwhm=fwhm)
    else:
        masker = NiftiLabelsMasker(labels_img=parcellation_path,
                                   smoothing_fwhm=fwhm)

    vectorized_data = masker.fit_transform(in_data_file)
    return vectorized_data, masker