def __init__(self, data_config, mode="train"):
     super(TransoformerDataset, self).__init__(data_config, mode)
     self.masker = NiftiLabelsMasker(
         labels_img=
         f"{data_config.workdir}/input/Schaefer2018_400Parcels_7Networks_order_FSLMNI152_1mm.nii.gz",
         standardize=True,
     )
Example #2
0
    def apply_mask(self, atlas="AAL"):

        if atlas == "AAL":
            # load atlas
            atlas_filename = datasets.fetch_atlas_aal(version="SPM12",
                                                      verbose=0).maps
        elif atlas == "multiscale":
            raise NotImplementedError()
        else:
            raise ValueError("Altas should be 'AAL' or 'multiscale'")

        # set mask
        masker = NiftiLabelsMasker(labels_img=atlas_filename,
                                   standardize=True,
                                   detrend=True,
                                   low_pass=0.08,
                                   high_pass=0.01,
                                   t_r=3.7,
                                   memory="nilearn_cache",
                                   verbose=0)

        # apply mask to data
        confounds = high_variance_confounds(self.fmri_filename,
                                            n_confounds=1,
                                            detrend=True)
        ts_hvar = masker.fit_transform(self.fmri_filename, confounds=confounds)

        return ts_hvar
def series_times_ROI(Maps, func, typeF):
    from nilearn.input_data import NiftiLabelsMasker, NiftiMapsMasker
    from nilearn import plotting
    import scipy.io as sio
    import numpy as np
    import os
    ##################################
    Resul = os.getcwd()  #+'-Results'
    n_map = Maps[Maps.rfind('/') + 1:][:Maps[Maps.rfind('/') + 1:].find('.')]
    n_plot = 'empty_plot'
    #os.system('mkdir '+Resul)
    ##################################
    if typeF == 'Labels':
        masker = NiftiLabelsMasker(labels_img=Maps, standardize=True)
        plot_atlas = plotting.plot_roi(Maps)
        n_plot = Resul + '/Atlas_' + n_map + '_' + typeF + '.svg'
        plot_atlas.savefig(n_plot)
    if typeF == 'Maps':
        masker = NiftiMapsMasker(maps_img=Maps,
                                 standardize=True,
                                 memory='nilearn_cache',
                                 verbose=5)

    time_series = masker.fit_transform(func)
    print('Shape of serial times ', np.shape(time_series))
    out_mat = Resul + '/Time_series_' + n_map + '_' + typeF + '.mat'
    sio.savemat(out_mat, {'time_series': time_series})

    return out_mat, n_plot
Example #4
0
def connectivity_data():
    """Fixture for connectivity tests."""

    base_dir = os.path.abspath(pkg_resources.resource_filename(
        "pynets", "../data/examples"))
    func_file = f"{base_dir}/BIDS/sub-25659/ses-1/func/" \
                f"sub-25659_ses-1_task-rest_space-T1w_desc-preproc_bold.nii.gz"
    mask_file = f"{base_dir}/BIDS/sub-25659/ses-1/func/" \
                f"sub-25659_ses-1_task-rest_space-T1w_desc-preproc_" \
                f"bold_mask.nii.gz"
    parcellation = pkg_resources.resource_filename(
        "pynets", "templates/atlases/DesikanKlein2012.nii.gz"
    )

    masker = NiftiLabelsMasker(
        labels_img=nib.load(parcellation), background_label=0,
        resampling_target="labels", dtype="auto",
        mask_img=nib.load(mask_file), standardize=True)

    time_series = masker.fit_transform(func_file)
    conn_measure = ConnectivityMeasure(
        kind="correlation")
    conn_matrix = conn_measure.fit_transform([time_series])[0]
    [coords, _, _, label_intensities] = \
        get_names_and_coords_of_parcels(parcellation)

    labels = ['ROI_' + str(idx) for idx, val in enumerate(label_intensities)]

    yield {'time_series': time_series, 'conn_matrix': conn_matrix,
           'labels': labels, 'coords': coords, 'indices': label_intensities}
Example #5
0
    def _run_interface(self, runtime):

        from nilearn import datasets
        from nilearn.input_data import NiftiLabelsMasker
        import numpy as np

        dataset = datasets.fetch_atlas_harvard_oxford(
            self.inputs.atlas_identifier)
        atlas_filename = dataset.maps

        masker = NiftiLabelsMasker(labels_img=atlas_filename,
                                   standardize=True,
                                   detrend=True,
                                   low_pass=0.1,
                                   high_pass=0.01,
                                   t_r=self.inputs.tr,
                                   memory='nilearn_cache',
                                   verbose=0)

        #file_labels = open('/home/brainlab/Desktop/Rudas/Data/Parcellation/AAL from Freesourfer/fs_default.txt', 'r')
        #labels = []
        #for line in file_labels.readlines():
        #labels.append(line)
        #file_labels.close()

        time_series = masker.fit_transform(
            self.inputs.in_file, confounds=self.inputs.confounds_file)

        np.savetxt(self.inputs.time_series_out_file,
                   time_series,
                   fmt='%10.2f',
                   delimiter=',')

        if self.inputs.plot:
            from nilearn import plotting
            from nilearn.connectome import ConnectivityMeasure
            import matplotlib
            import matplotlib.pyplot as plt
            fig, ax = matplotlib.pyplot.subplots()

            font = {'family': 'normal', 'size': 5}

            matplotlib.rc('font', **font)

            correlation_measure = ConnectivityMeasure(kind='correlation')
            correlation_matrix = correlation_measure.fit_transform(
                [time_series])[0]

            # Mask the main diagonal for visualization:
            np.fill_diagonal(correlation_matrix, 0)
            plotting.plot_matrix(correlation_matrix,
                                 figure=fig,
                                 labels=dataset.labels[1:],
                                 vmax=0.8,
                                 vmin=-0.8,
                                 reorder=True)

            fig.savefig(self.inputs.correlation_matrix_out_file, dpi=1200)

        return runtime
Example #6
0
    def from_fmri_data(
        cls,
        datafile: str,
        atlas: Optional[str] = None,
        confounds: Optional[str] = None,
        **kwargs,
    ):
        """Take a 4D dataset and generate signals from the atlas parcels.
        """
        if atlas is None:
            atlas = MIST_ATLAS_444
            kind = "atlas"
        else:
            kind = "atlas_custom"

        # Resampling target should be the image with lowest resolution.
        # Assuming that the data resolution is isotropic for now.
        atlas_res = nib.load(atlas).header["pixdim"][1]
        data_res = nib.load(datafile).header["pixdim"][1]
        resampling_target = "data" if data_res > atlas_res else "labels"

        masker = NiftiLabelsMasker(labels_img=atlas,
                                   standardize=True,
                                   resampling_target=resampling_target)
        signals = masker.fit_transform(datafile)
        atlasrois = atlas_roitovol(atlas, nrois=signals.shape[-1])
        return cls(timeseries=signals,
                   maps=atlasrois,
                   kind=kind,
                   confounds=confounds,
                   **kwargs)
	def _run_interface(self, runtime):
		from nilearn.input_data import NiftiMasker, NiftiLabelsMasker
		from nipype.utils.filemanip import split_filename
		import nibabel as nib
		import os

		functional_filename = self.inputs.in_file
		atlas_filename = self.inputs.atlas_filename
		mask_filename = self.inputs.mask_filename

		# Extracting the ROI signals
		masker = NiftiLabelsMasker(labels_img=atlas_filename,
                           background_label = 0,
                           standardize=True,
                           detrend = True,
                           verbose = 1
                           )
		time_series = masker.fit_transform(functional_filename)

		# Removing the ROI signal from the time series
		nifti_masker = NiftiMasker(mask_img=mask_filename)
		masked_data = nifti_masker.fit_transform(functional_filename, confounds=time_series[...,0])
		masked_img = nifti_masker.inverse_transform(masked_data)

		# Saving the result to disk
		outputs = self._outputs().get()
		fname = self.inputs.in_file
		_, base, _ = split_filename(fname)
		nib.save(masked_img, os.path.abspath(base + '_regressed.nii.gz'))
		return runtime
def timeSeries(func_files, confound_files, atlas_filename):
    # This function receives a list of funcional files and a list of matching confound files
    # and outputs an array
    from nilearn.input_data import NiftiLabelsMasker
    # define masker here
    masker = NiftiLabelsMasker(
        labels_img=atlas_filename,
        standardize=True,
        smoothing_fwhm=6,
        memory="nilearn_cashe",
        t_r=1,
        verbose=5,
        high_pass=.01,
        low_pass=.1
    )  # As it is task based we dont' bandpassing high_pass=.01 , low_pass = .1)
    total_subjects = [
    ]  # creating an empty array that will hold all subjects matrix
    # This function needs a masker object that will be defined outside the function
    for func_file, confound_file in zip(func_files, confound_files):
        print(f"proccessing file {func_file}")  # print file name
        confoundClean = removeVars(confound_file)
        confoundArray = confoundClean  #confoundClean.values
        time_series = masker.fit_transform(func_file, confounds=confoundArray)
        #time_series = extractor.fit_transform(func_file, confounds=confoundArray)
        #masker.fit_transform(func_file, confoundArray)
        total_subjects.append(time_series)
    return total_subjects
Example #9
0
    def _run_interface(self, runtime):
        fname = self.inputs.fmri_denoised
        entities = parse_file_entities(fname)
        bold_img = nb.load(fname)
        parcellation_file = get_parcellation_file_path(entities['space'])
        masker = NiftiLabelsMasker(labels_img=parcellation_file,
                                   standardize=True)
        time_series = masker.fit_transform(bold_img, confounds=None)

        corr_measure = ConnectivityMeasure(kind='correlation')
        corr_mat = corr_measure.fit_transform([time_series])[0]
        entities['pipeline'] = extract_pipeline_from_path(fname)
        conn_file = join(self.inputs.output_dir,
                         build_path(entities, self.conn_file_pattern, False))
        carpet_plot_file = join(
            self.inputs.output_dir,
            build_path(entities, self.carpet_plot_pattern, False))
        matrix_plot_file = join(
            self.inputs.output_dir,
            build_path(entities, self.matrix_plot_pattern, False))

        make_carpetplot(time_series, carpet_plot_file)
        mplot = plot_matrix(corr_mat, vmin=-1, vmax=1)
        mplot.figure.savefig(matrix_plot_file)

        np.save(conn_file, corr_mat)

        self._results['corr_mat'] = conn_file
        self._results['carpet_plot'] = carpet_plot_file
        self._results['matrix_plot'] = matrix_plot_file

        return runtime
Example #10
0
def debiasing(data_file, mask, mtx, idx_u, idx_v, tr, out_dir, history_str):
    """
    Perform debiasing based on denoised edge-time matrix.
    """
    print("Performing debiasing based on denoised edge-time matrix...")
    masker = NiftiLabelsMasker(
        labels_img=mask,
        standardize=False,
        memory="nilearn_cache",
        strategy="mean",
    )

    # Read data
    data = masker.fit_transform(data_file)

    # Generate mask of significant edge-time connections
    ets_mask = np.zeros(data.shape)
    idxs = np.where(mtx != 0)
    time_idxs = idxs[0]
    edge_idxs = idxs[1]

    print("Generating mask of significant edge-time connections...")
    for idx, time_idx in enumerate(time_idxs):
        ets_mask[time_idx, idx_u[edge_idxs[idx]]] = 1
        ets_mask[time_idx, idx_v[edge_idxs[idx]]] = 1

    # Create HRF matrix
    hrf = HRFMatrix(
        TR=tr,
        TE=[0],
        nscans=data.shape[0],
        r2only=True,
        has_integrator=False,
        is_afni=True,
    )
    hrf.generate_hrf()

    # Perform debiasing
    deb_output = debiasing_spike(hrf, data, ets_mask)
    beta = deb_output["beta"]
    fitt = deb_output["betafitts"]

    # Transform results back to 4D
    beta_4D = masker.inverse_transform(beta)
    beta_file = join(out_dir, f"{basename(data_file[:-7])}_beta_ETS.nii.gz")
    beta_4D.to_filename(beta_file)
    atlas_mod.inverse_transform(beta_file, data_file)
    subprocess.run(f"3dNotes {join(out_dir, beta_file)} -h {history_str}",
                   shell=True)

    fitt_4D = masker.inverse_transform(fitt)
    fitt_file = join(out_dir, f"{basename(data_file[:-7])}_fitt_ETS.nii.gz")
    fitt_4D.to_filename(fitt_file)
    subprocess.run(f"3dNotes {join(out_dir, fitt_file)} -h {history_str}",
                   shell=True)
    atlas_mod.inverse_transform(fitt_file, data_file)

    print("Debiasing finished and files saved.")

    return beta, fitt
Example #11
0
    def _run_interface(self, runtime):
        fname = self.inputs.fmri_denoised
        bold_img = nb.load(fname)
        masker = NiftiLabelsMasker(labels_img=self.inputs.parcellation,
                                   standardize=True)
        time_series = masker.fit_transform(bold_img, confounds=None)

        corr_measure = ConnectivityMeasure(kind='correlation')
        corr_mat = corr_measure.fit_transform([time_series])[0]
        _, base, _ = split_filename(fname)

        conn_file = f'{self.inputs.output_dir}/{base}_conn_mat.npy'

        carpet_plot_file = join(self.inputs.output_dir,
                                f'{base}_carpet_plot.png')
        matrix_plot_file = join(self.inputs.output_dir,
                                f'{base}_matrix_plot.png')

        create_carpetplot(time_series, carpet_plot_file)
        mplot = plot_matrix(corr_mat, vmin=-1, vmax=1)
        mplot.figure.savefig(matrix_plot_file)

        np.save(conn_file, corr_mat)

        self._results['corr_mat'] = conn_file
        self._results['carpet_plot'] = carpet_plot_file
        self._results['matrix_plot'] = matrix_plot_file

        return runtime
Example #12
0
def correlation_matrix(ts,atlas,
	confounds=None,
	mask=None,
	loud=False,
	structure_names=[],
	save_as='',
	low_pass=0.25,
	high_pass=0.004,
	smoothing_fwhm=.3,
	):
	"""Return a CSV file containing correlations between ROIs.

	Parameters
	----------
	ts : str
		Path to the 4D NIfTI timeseries file on which to perform the connectivity analysis.
	confounds : 2D array OR path to CSV file
		Array/CSV file containing confounding time-series to be regressed out before FC analysis.
	atlas : str, optional
		Path to a 3D NIfTI-like binary label file designating ROIs.
	structure_names : list, optional
		Ordered list of all structure names in atlas (length N).
	save_as : str
		Path under which to save the Pandas DataFrame conttaining the NxN correlation matrix.
	"""
	ts = path.abspath(path.expanduser(ts))
	if isinstance(atlas,str):
		atlas = path.abspath(path.expanduser(atlas))
	if mask:
		mask = path.abspath(path.expanduser(mask))
	tr = nib.load(ts).header['pixdim'][0]
	labels_masker = NiftiLabelsMasker(
		labels_img=atlas,
		mask_img=mask,
		standardize=True,
		memory='nilearn_cache',
		verbose=5,
		low_pass=low_pass,
		high_pass = high_pass,
		smoothing_fwhm=smoothing_fwhm,
		t_r=tr,
		)
	#TODO: test confounds with physiological signals
	if(confounds):
		confounds = path.abspath(path.expanduser(confounds))
		timeseries = labels_masker.fit_transform(ts, confounds=confounds)
	else:
		timeseries = labels_masker.fit_transform(ts)
	correlation_measure = ConnectivityMeasure(kind='correlation')
	correlation_matrix = correlation_measure.fit_transform([timeseries])[0]
	if structure_names:
		df = pd.DataFrame(columns=structure_names, index=structure_names, data=correlation_matrix)
	else:
		df = pd.DataFrame(data=correlation_matrix)
	if save_as:
		save_dir = path.dirname(save_as)
		if not path.exists(save_dir):
			makedirs(save_dir)
		df.to_csv(save_as)
Example #13
0
def make_parcellation(data_path, atlas, template='MNI152NLin2009cAsym', atlas_desc=None, resolution=2, parc_params=None, return_meta=False):
    """
    Performs a parcellation which reduces voxel space to regions of interest (brain data).

    Parameters
    ----------

    data_path : str
        Path to .nii image.
    atlas : str
        Specify which atlas you want to use (see github.com/templateflow/)
    template : str
        What space you data is in. If fmriprep, leave as MNI152NLin2009cAsym.
    atlas_desc : str
        Specify which description of atlas.
    resolution : int
        Resolution of atlas. Can be 1 or 2.
    parc_params : dict
        **kwargs for nilearn functions.
    return_meta : bool
        If true, tries to return any meta-information that exists about parcellation.

    Returns
    -------

    data : array
        Data after the parcellation.

    NOTE
    ----
    These functions make use of nilearn. Please cite templateflow and nilearn if used in a publicaiton.
    """

    if not parc_params:
        parc_params = {}

    tf_get_params = {
        'template': template,
        'resolution': resolution,
        'atlas': atlas
    }
    if atlas_desc is not None:
        tf_get_params['desc'] = atlas_desc
    file = tf.get(**tf_get_params, extensions='nii.gz')

    if isinstance(file, list):
        raise ValueError('More than one template file found. Specify the type of file you need (often atlas_desc). Run: templateflow.api.TF_LAYOUT.get_descs(atlas=' +
                         atlas + ') to see available desc for atlas')

    region = NiftiLabelsMasker(str(file), **parc_params)
    data = region.fit_transform(data_path)

    if return_meta:
        meta_info = tf.get(template=template, atlas=atlas,
                           desc=atlas_desc, extensions='tsv')
        meta_info = load_tabular_file(str(meta_info))
        return data, meta_info
    else:
        return data
Example #14
0
def extract_timeseries(filename, atlas_filename, confounds=None):
    """
    Wrapper around nilearn masker and fit_transform.
    """
    masker = NiftiLabelsMasker(labels_img=atlas_filename, standardize=True)

    time_series = masker.fit_transform(filename, confounds=confounds)
    return time_series
Example #15
0
def extract_ts(path, atlasPath):
    img = image.load_img(path)
    template = nilearn.datasets.load_mni152_template()
    img_re = image.resample_to_img(img, template, interpolation='linear')
    masker = NiftiLabelsMasker(labels_img=atlasPath, standardize=True,
                               memory='nilearn_cache', verbose=5)
    ts = masker.fit_transform(img_re)
    return ts
Example #16
0
def prepare_data(data_dir, output_dir, pipeline = "cpac", quality_checked = True):
    # get dataset
    print("Loading dataset...")
    abide = datasets.fetch_abide_pcp(data_dir = data_dir,
                                     pipeline = pipeline,
                                     quality_checked = quality_checked)
    # make list of filenames
    fmri_filenames = abide.func_preproc

    # load atlas
    multiscale = datasets.fetch_atlas_basc_multiscale_2015()
    atlas_filename = multiscale.scale064

    # initialize masker object
    masker = NiftiLabelsMasker(labels_img=atlas_filename,
                               standardize=True,
                               memory='nilearn_cache',
                               verbose=0)

    # initialize correlation measure
    correlation_measure = ConnectivityMeasure(kind='correlation', vectorize=True,
                                             discard_diagonal=True)

    try: # check if feature file already exists
        # load features
        feat_file = os.path.join(output_dir, 'ABIDE_BASC064_features.npz')
        X_features = np.load(feat_file)['a']
        print("Feature file found.")

    except: # if not, extract features
        X_features = [] # To contain upper half of matrix as 1d array
        print("No feature file found. Extracting features...")

        for i,sub in enumerate(fmri_filenames):
            # extract the timeseries from the ROIs in the atlas
            time_series = masker.fit_transform(sub)
            # create a region x region correlation matrix
            correlation_matrix = correlation_measure.fit_transform([time_series])[0]
            # add to our container
            X_features.append(correlation_matrix)
            # keep track of status
            print('finished extracting %s of %s'%(i+1,len(fmri_filenames)))
        # Save features
        np.savez_compressed(os.path.join(output_dir, 'ABIDE_BASC064_features'),
                                         a = X_features)

    # Dimensionality reduction of features with PCA
    print("Running PCA...")
    pca = PCA(0.99).fit(X_features) # keeping 99% of variance
    X_features_pca = pca.transform(X_features)

    # Transform phenotypic data into dataframe
    abide_pheno = pd.DataFrame(abide.phenotypic)

    # Get the target vector
    y_target = abide_pheno['DX_GROUP']

    return(X_features_pca, y_target)
Example #17
0
    def _run_interface(self, runtime):
        from nilearn.input_data import NiftiLabelsMasker
        from nilearn.connectome import ConnectivityMeasure
        from sklearn.covariance import EmpiricalCovariance
        import numpy as np
        import pandas as pd
        import os
        import matplotlib.pyplot as plt
        from mne.viz import plot_connectivity_circle
        import re

        plt.switch_backend('Agg')

        # extract timeseries from every label
        masker = NiftiLabelsMasker(labels_img=self.inputs.atlas_file,
                                   standardize=True, verbose=1)
        timeseries = masker.fit_transform(self.inputs.timeseries_file)
        # create correlation matrix
        correlation_measure = ConnectivityMeasure(cov_estimator=EmpiricalCovariance(),
                                                  kind="correlation")
        correlation_matrix = correlation_measure.fit_transform([timeseries])[0]
        np.fill_diagonal(correlation_matrix, np.NaN)

        # add the atlas labels to the matrix
        atlas_lut_df = pd.read_csv(self.inputs.atlas_lut, sep='\t')
        regions = atlas_lut_df['regions'].values
        correlation_matrix_df = pd.DataFrame(correlation_matrix, index=regions, columns=regions)

        # do a fisher's r -> z transform
        fisher_z_matrix_df = correlation_matrix_df.apply(lambda x: (np.log(1 + x) - np.log(1 - x)) * 0.5)

        # write out the file.
        out_file = os.path.join(runtime.cwd, 'fisher_z_correlation.tsv')
        fisher_z_matrix_df.to_csv(out_file, sep='\t', na_rep='n/a')

        # save the filename in the outputs
        self._results['correlation_matrix'] = out_file

        # visualizations with mne
        connmat = fisher_z_matrix_df.values
        labels = list(fisher_z_matrix_df.index)

        # define title and outfile names:
        trial_regex = re.compile(r'.*trialtype-(?P<trial>[A-Za-z0-9]+)')
        title = re.search(trial_regex, self.inputs.timeseries_file).groupdict()['trial']
        outfile = os.path.join(runtime.cwd, ".".join([title, "svg"]))

        n_lines = int(np.sum(connmat > 0) / 2)
        fig = plt.figure(figsize=(5, 5))

        plot_connectivity_circle(connmat, labels, n_lines=n_lines, fig=fig, title=title, fontsize_title=10,
                                 facecolor='white', textcolor='black', colormap='jet', colorbar=1,
                                 node_colors=['black'], node_edgecolor=['white'], show=False, interactive=False)

        fig.savefig(outfile, dpi=300)
        self._results['correlation_fig'] = outfile

        return runtime
Example #18
0
def main(args):

    # Set analysis directories
    root = '/net/synapse/nt/users/bmacintosh_lab/nluciw/'
    # Root directory of data.
    data_dir = root + 'data/EnF/sourcedata/'
    # Output directory.
    output_dir = root + 'outputs/perf_covar/' + args.output_dir
    # Create output if does not exist.
    if not os.path.exists(os.path.dirname(output_dir)):
        os.makedirs(os.path.dirname(output_dir))

    # Save command line
    with open(output_dir + 'commandline_args.txt', 'w') as f:
        f.write('\n'.join(sys.argv[1:]))

    # Load 4d nifti objects for both groups
    bd_data = fetch_data(data_dir,
                         args.nifti_name,
                         metadata=args.metadata,
                         subject_group=('BD', ))
    hc_data = fetch_data(data_dir,
                         args.nifti_name,
                         metadata=args.metadata,
                         subject_group=('HC', ))

    hc_vols = concat_imgs(hc_data.imgs)
    bd_vols = concat_imgs(bd_data.imgs)

    # Construct or load parcellator object using the atlas we specify on
    # the command line.
    parcellator = NiftiLabelsMasker(labels_img=args.atlas,
                                    mask_img=data_dir +
                                    'masks/cbf_80p_aal_merge_mni.nii.gz',
                                    standardize=False,
                                    strategy='mean')
    parcellator.fit()

    # Do the parcellation and correlation for both groups.
    hc_covar, bd_covar =\
        covariance.parcellate_and_correlate([hc_vols,bd_vols],
                                            output_dir,
                                            parcellator,
                                            prefix = args.output_prefix,
                                            detrend=False#,
                                             #                                            pve_gm_imgs=[concat_imgs(hc_data.struc_imgs),
                                             #                                                         concat_imgs(bd_data.struc_imgs)]
                                            )

    print(len(bd_data.imgs), len(hc_data.imgs))
    difference = statistics.compute_difference(bd_covar[0], hc_covar[0],
                                               len(bd_data.imgs),
                                               len(hc_data.imgs))

    cors = np.stack((bd_covar[0], hc_covar[0], difference))

    np.save(output_dir + args.output_prefix + 'cors', cors)
Example #19
0
 def extract(fname, masker_fname, sid, labels, roi_numbers, save_csv=True):
     if os.path.isfile(fname) and os.path.isfile(masker_fname):
         masker_obj = NiftiLabelsMasker(labels_img=masker_fname,
                                        standardize=True,
                                        memory='nilearn_cache',
                                        verbose=5)
         anImg = nib.load(fname)
         try:
             time_series = masker_obj.fit_transform(anImg)
             cormat = connectivity_obj.fit_transform([time_series])[0]
         except Exception as e:
             print("Problem extracting: ")
             print(e)
             time_series = None
             cormat = None
         if time_series is None or cormat is None:
             print(
                 'Could not compute time series for this file, skipping: {}'
                 .format(fname))
         elif save_csv and outpath:
             try:
                 #print(masker_obj.labels_)
                 masker_labels = np.asarray(masker_obj.labels_)
                 #print(masker_labels)
                 #print(roi_numbers)
                 labels_extracted = np.array(labels)[np.isin(
                     roi_numbers, masker_labels)]
                 #print(labels_extracted)
                 if not (cormat.shape[0] == labels_extracted.shape[0] &
                         time_series.shape[1] == labels_extracted.shape[0]):
                     raise Exception(
                         "Shape of extracted data and implied labels do not match. Labels will be set to 999."
                     )
             except Exception as e:
                 print("Problem setting labels: ")
                 print(e)
                 masker_labels = np.arange(999, 999 + time_series.shape[1],
                                           1)
                 labels_extracted = masker_labels
             if not os.path.isdir(outpath):
                 raise Exception(
                     "Cannot find output dir {}".format(outpath))
             else:
                 save_one(fname,
                          time_series,
                          cormat,
                          sid,
                          labels_extracted,
                          masker_labels,
                          outpath,
                          use_number_labels_only=True)
     else:
         warnings.warn('Cannot find file(s) {}, {}'.format(
             fname, masker_fname))
         time_series = []
         cormat = []
Example #20
0
def staticFC(seedIMG,seedNAME,funcdata,output):
    '''
    For example:
    seedIMG='Thalamus_atlas_regions.nii'
    seedNAME="Thalamus_atlas_regions.txt"
    '''

    masker = NiftiLabelsMasker(labels_img=seedIMG, standardize=True)
    my_file = pd.read_table(seedNAME,header=None)
    seedname= list(my_file[0])
    seed_time_series = masker.fit_transform(funcdata)

    # remember to check the TR in this code. 
    brain_masker = input_data.NiftiMasker(
        smoothing_fwhm=6,
        detrend=True, standardize=True,
        low_pass=0.1, high_pass=0.01, t_r=2,
        memory='nilearn_cache', memory_level=1, verbose=2)
    
    brain_time_series = brain_masker.fit_transform(funcdata)

    #print("Seed time series shape: (%s, %s)" % seed_time_series.shape)
    #print("Brain time series shape: (%s, %s)" % brain_time_series.shape)


    seed_to_voxel_correlations = (np.dot(brain_time_series.T, seed_time_series) /
                                  seed_time_series.shape[0]
                                  )

    # print("Seed-to-voxel correlation shape: (%s, %s)" %
    #       seed_to_voxel_correlations.shape)
    # print("Seed-to-voxel correlation: min = %.3f; max = %.3f" % (
    #     seed_to_voxel_correlations.min(), seed_to_voxel_correlations.max()))

    seed_to_voxel_correlations_img = brain_masker.inverse_transform(
        seed_to_voxel_correlations.T)

    seed_to_voxel_correlations_fisher_z = np.arctanh(seed_to_voxel_correlations)

    # Finally, we can tranform the correlation array back to a Nifti image
    # object, that we can save.
    seed_to_voxel_correlations_fisher_z_img = brain_masker.inverse_transform(
        seed_to_voxel_correlations_fisher_z.T)

    #seed_to_voxel_correlations_fisher_z_img.to_filename('seed_correlation_z.nii.gz')

    IM=funcs.four_to_three(seed_to_voxel_correlations_fisher_z_img)
    for i,M in enumerate(IM):
        outdir=f'{output}/{seedname[i]}/seedFC'
        if not os.path.exists(outdir):
            os.makedirs(outdir)
        subname=os.path.basename(funcdata)[:-4]
        if not os.path.exists(f'{outdir}/{subname}_SFC_Z.nii.gz'):
            M.to_filename(f'{outdir}/{subname}_SFC_Z.nii.gz')
        else:
            print('Existed')
def comp_timeseries(item):
    print("Computing time-series for:", item)

    # Get filepaths
    bold_fp = WORKDIR + ("sub-{0:0>3}/ses-{1}/func/" \
                         "sub-{0:0>3}_ses-{1}_task-{2}_run-{3}_space-MNI152NL"
                         "in2009cAsym_desc-preproc_bold.nii.gz") \
                         .format(item[0], item[1].lower(), item[2], item[3])

    conf_fp = WORKDIR + ("sub-{0:0>3}/ses-{1}/func/" \
                         "sub-{0:0>3}_ses-{1}_task-{2}_run-{3}_desc-confounds_"
                         "regressors.tsv") \
                         .format(item[0], item[1].lower(), item[2], item[3])

    # Load the image and drop first n frames
    func_img = image.index_img(image.load_img(bold_fp), slice(CUTOFF, None))

    # Load confounds
    confounds = pd.read_csv(conf_fp, sep='\t') \
                    .loc[CUTOFF:, [
                               "a_comp_cor_00",
                               "a_comp_cor_01",
                               "a_comp_cor_02",
                               "a_comp_cor_03",
                               "a_comp_cor_04",
                               "a_comp_cor_05",
                               "global_signal",
                               "white_matter",
                               "csf",
                               "trans_x",
                               "trans_y",
                               "trans_z",
                               'rot_x',
                               'rot_y',
                               'rot_z']]

    # Create parcellation object with additional pre-processing parameters
    willard_mask = NiftiLabelsMasker(willard_img,
                                     detrend=True,
                                     t_r=0.802,
                                     low_pass=0.1,
                                     high_pass=0.01,
                                     standardize=True,
                                     memory=HOMEDIR + 'cache',
                                     memory_level=1)

    # Process and perform parcellation
    roi_time_series = willard_mask.fit_transform(func_img,
                                                 confounds=confounds.values)

    # Write into csv
    csv_data = pd.DataFrame(roi_time_series)
    csv_data.to_csv(OUTDIR + "sub-{0:0>3}_ses-{1}_task-{2}_run-" \
                    "{3}.csv".format(item[0], item[1].lower(), item[2], item[3]),
                               header=False, index=False)
Example #22
0
def post_processed_data(atlas_data, regressors):
    """A post-processed version of the atlas_data, which is generated directly
    by nilearn rather than niimasker. The results from niimasker should
    directly match what is produced by nilearn.
    """
    labels_img = _get_atlas()['maps']
    masker = NiftiLabelsMasker(labels_img, standardize=True, smoothing_fwhm=5,
                               detrend=True, low_pass=.1, high_pass=.01, t_r=2)

    confounds = regressors.values
    return masker.fit_transform(atlas_data, confounds=confounds)
Example #23
0
def cal_connectome(fmri_ff,
                   confound_ff,
                   atlas_ff,
                   outputjpg_ff,
                   metric='correlation',
                   labelrange=None,
                   label_or_map=0):
    if label_or_map == 0:
        # “correlation”, “partial correlation”, “tangent”, “covariance”, “precision”
        masker = NiftiLabelsMasker(labels_img=atlas_ff,
                                   standardize=True,
                                   verbose=0)
    else:
        masker = NiftiMapsMasker(maps_img=atlas_ff,
                                 standardize=True,
                                 verbose=0)

    time_series_0 = masker.fit_transform(fmri_ff, confounds=confound_ff)
    if labelrange is None:
        labelrange = np.arange(time_series_0.shape[1])
    time_series = time_series_0[:, labelrange]
    if metric == 'sparse inverse covariance':
        try:
            estimator = GraphLassoCV()
            estimator.fit(time_series)
            correlation_matrix = -estimator.precision_
        except:
            correlation_matrix = np.zeros(
                (time_series.shape[1], time_series.shape[1]))
    else:
        correlation_measure = ConnectivityMeasure(kind=metric)
        correlation_matrix = correlation_measure.fit_transform([time_series
                                                                ])[0]

    # Plot the correlation matrix

    fig = plt.figure(figsize=(6, 5), dpi=100)
    plt.clf()
    # Mask the main diagonal for visualization:
    np.fill_diagonal(correlation_matrix, 0)

    plt.imshow(correlation_matrix,
               interpolation="nearest",
               cmap="RdBu_r",
               vmax=0.8,
               vmin=-0.8)
    plt.gca().yaxis.tick_right()
    plt.axis('off')
    plt.colorbar()
    plt.title(metric.title(), fontsize=12)
    plt.tight_layout()
    fig.savefig(outputjpg_ff, bbox_inches='tight')
    plt.close()
    return correlation_matrix
Example #24
0
def _set_volume_masker(roi_file, as_voxels=False, **kwargs):
    """Check and see if multiple ROIs exist in atlas file"""

    if not isinstance(roi_file, str):
        raise ValueError('roi_file must be a file name string')

    if roi_file.endswith('.csv') or roi_file.endswith('.tsv'):
        roi = _read_coords(roi_file)
        n_rois = len(roi)
        print('  {} region(s) detected from coordinates'.format(n_rois))

        if kwargs.get('radius') is None:
            warnings.warn('No radius specified for coordinates; setting '
                          'to nilearn.input_data.NiftiSphereMasker default '
                          'of extracting from a single voxel')
        masker = NiftiSpheresMasker(roi, **kwargs)
    
    elif roi_file.endswith('.nii.gz'):
        # remove args for NiftiSpheresMasker 
        if 'radius' in kwargs:
            kwargs.pop('radius')
        if 'allow_overlap' in kwargs:
                kwargs.pop('allow_overlap')
    
        roi_img = image.load_img(roi_file)
        if len(roi_img.shape) == 4:
            n_rois = roi_img.shape[-1]
            print('  {} region(s) detected from {}'.format(n_rois,
                                                        roi_img.get_filename()))
            masker = NiftiMapsMasker(roi_img, allow_overlap=True,**kwargs)
        else:
            n_rois = len(np.unique(roi_img.get_fdata())) - 1
            print('  {} region(s) detected from {}'.format(n_rois,
                                                        roi_img.get_filename()))
            if n_rois > 1:
                masker = NiftiLabelsMasker(roi_img, **kwargs)
            elif n_rois == 1:
                # binary mask for single ROI 
                if as_voxels:
                    if 'mask_img' in kwargs:
                        kwargs.pop('mask_img')
                    masker = NiftiMasker(roi_img, **kwargs)
                else:
                    # more computationally efficient if only wanting the mean
                    masker = NiftiLabelsMasker(roi_img, **kwargs)
            else:
                raise ValueError('No ROI detected; check ROI file')
    
    else:
        raise ValueError('Invalid file type for roi_file. Must be one of: '
                         '.nii.gz, .csv, .tsv')
    
    return masker, n_rois
Example #25
0
def load_masked_rest(sub, mask):

    # helper to load in a single run of DFR data and mask it
    nifti_masker = NiftiLabelsMasker(labels_img=mask)
    data_path = base_path + '/subjects/ID' + sub + '/analysis/restfMRI/swrRestingState.nii'

    rest_in = os.path.join(data_path)
    rest_data = nib.load(rest_in)
    print("Loading rest from %s" % rest_in)

    rest_masked = nifti_masker.fit_transform(rest_data)
    return rest_masked
Example #26
0
 def apply_atlas(self, images):
     """
     Brain parcellation from an image using an atlas and a feature reduction method
     :param images: iterable with list of files or with np.arrays
     :return: np.array of dimensions len(images) x number or atlas regions
     """
     atlas_filename = self.cfg['paths']['atlas']
     masker = NiftiLabelsMasker(labels_img=atlas_filename,
                                strategy=self.atlas_strategy)
     if isinstance(images[0], np.ndarray):
         images = np.array(
             [Nifti1Image(x, affine=np.eye(4)) for x in images])
     return masker.fit_transform(imgs=images)
Example #27
0
def make_masker(scheme):
    '''
    Parameters
    ----------
    scheme : String
        The type of parcellation wanted.

    Returns
    -------
    masker: nilearn.input_data.NiftiLabelsMasker
        Masker of the chosen scheme.
    labels: list
        Labels of all the regions in parcellation.
    '''
    if scheme.lower() == "harvox":  # 48 regions
        dataset = datasets.fetch_atlas_harvard_oxford('cort-maxprob-thr25-2mm')
        atlas_filename = dataset.maps
        labels = dataset.labels[1:]  # trim off "background" label
        masker = NiftiLabelsMasker(labels_img=atlas_filename,
                                   standardize=True,
                                   high_variance_confounds=True,
                                   verbose=1)
    elif scheme.lower() == "yeo":  # 17 regions
        dataset = datasets.fetch_atlas_yeo_2011()
        masker = NiftiLabelsMasker(labels_img=dataset['thick_17'],
                                   standardize=True,
                                   high_variance_confounds=True,
                                   verbose=1)
        labels = [
            "Visual A", "Visual B", "Somatomotor A", "Somatomotor B",
            "Dorsal Attention A", "Dorsal Attention B",
            "Salience/Ventral Attention A", "Salience/Ventral Attention B",
            "Limbic A", "Limbic B", "Control C", "Control A", "Control B",
            "Temporal Parietal", "Default C", "Default A", "Default B"
        ]  # list from valerie-jzr
    elif scheme.lower() == "aal":  # 116 regions
        dataset = datasets.fetch_atlas_aal(version='SPM12')
        labels = dataset['labels']
        masker = NiftiLabelsMasker(labels_img=dataset['maps'],
                                   standardize=True,
                                   high_variance_confounds=True,
                                   verbose=1)
    elif scheme.lower() == "schaefer":
        dataset = datasets.fetch_atlas_schaefer_2018(n_rois=100,
                                                     yeo_networks=17)
        labels = dataset['labels']
        masker = NiftiLabelsMasker(labels_img=dataset['maps'],
                                   standardize=True,
                                   high_variance_confounds=True,
                                   verbose=1)
    return masker, labels
Example #28
0
    def _run_interface(self, runtime):
        fname = self.inputs.fmri_denoised
        bold_img = nb.load(fname)
        masker = NiftiLabelsMasker(labels_img=self.inputs.parcellation, standardize=True)
        time_series = masker.fit_transform(bold_img, confounds=None)
        corr_measure = ConnectivityMeasure(kind='correlation')
        corr_mat = corr_measure.fit_transform([time_series])[0]

        _, base, _ = split_filename(fname)
        conn_file = f'{self.inputs.output_dir}/{base}_conn_mat.npy'
        np.save(conn_file, corr_mat)
        self._results['corr_mat'] = conn_file

        return runtime
Example #29
0
def extract_correlation_matrix(data_filename,
                               confounds_filename,
                               atlas_name="destrieux_2009",
                               correlation_type='correlation'):

    atlas_filename = get_atlas(atlas_name)
    #labels = atlas['labels']

    masker = NiftiLabelsMasker(labels_img=atlas_filename, standardize=True)
    time_series = masker.fit_transform(data_filename,
                                       confounds=confounds_filename)
    correlation_measure = ConnectivityMeasure(kind=correlation_type)
    correlation_matrix = correlation_measure.fit_transform([time_series])[0]

    return correlation_matrix
 def _run_interface(self, runtime):
     print("\nExtracting signal from FreeSurfer's RoI...\n")
     #Getting the confounds
     if isinstance(self.inputs.confounds, np.ndarray):
         confounds = self.inputs.confounds
         confName = self.inputs.confoundsName
     else:
         confounds = None
         confName = "NoConfounds"
     if not self.inputs.lutFile:
         self.inputs.lutFile = os.path.join(os.environ['FREESURFER_HOME'],
                                            "FreeSurferColorLUT.txt")
     #Mask to extract time_series/RoI
     masker = NiftiLabelsMasker(self.inputs.roi_file)
     #Time series extracted/RoI, dimensions (timestamps x nb RoI)
     roiTimeSeries = masker.fit_transform(self.inputs.fmri_file,
                                          confounds=confounds)
     #Getting LUT Table
     lutTable = np.loadtxt(self.inputs.lutFile, dtype=str)[1:]
     #RoIs present in the roi_file
     rois_Present = np.unique(
         nilearn.image.load_img(self.inputs.roi_file).get_data())
     #Names of the RoIs
     rois_Present_Names = [
         region[1] for region in lutTable if int(region[0]) in rois_Present
     ]
     #DataFrame containing time_series/RoI
     time_seriesDF = pd.DataFrame(roiTimeSeries, columns=rois_Present_Names)
     #Name of the OutPutFile
     directory = os.path.join(self.inputs.output_dir, "time_series")
     if not os.path.exists(directory):
         try:
             os.makedirs(directory)
         except Exception:
             print("exception makedirs")
     outFile = os.path.join(
         directory, self.inputs.prefix + self.inputs.confoundsName +
         "TimeSeriesRoI.tsv")
     #Output value
     self._results["time_series"] = time_seriesDF.values
     self._results["roiLabels"] = list(time_seriesDF.columns)
     self._results["confName"] = confName
     print(
         "Time series successfully computed.\nSaving data in {}\n\n".format(
             outFile))
     #Saving as a tsv file
     time_seriesDF.to_csv(outFile, sep="\t", index=False)
     return runtime
Example #31
0
def load_masked_DFR_data(sub, run, mask):

    # helper to load in a single run of DFR data and mask it
    nifti_masker = NiftiLabelsMasker(labels_img=mask)

    data_path = base_path + '/subjects/ID' + sub + '/analysis/fMRI/SPM'

    # Load MRI file (in Nifti format) of one localizer run
    DFR_in = os.path.join(data_path, "swrDFR_run%d.nii" % run)
    #DFR_in = os.path.join(base_path, "swrDFR_%s_run%d.nii" % (sub, run))

    DFR_data = nib.load(DFR_in)
    print("Loading data from %s" % DFR_in)
    DFR_masked_data = nifti_masker.fit_transform(DFR_data)
    DFR_masked_data = np.transpose(DFR_masked_data)
    return DFR_masked_data
def get_data(subject_dir, labels_img='labels_level_3.nii.gz',
             mask_img='gm_mask.nii', smoothing_fwhm=6):
    if not os.path.exists(mask_img):
        os.system('python compute_gm_mask.py')
    files = sorted(glob.glob(os.path.join(
        subject_dir, 'rfMRI_REST?_??/rfMRI_REST?_??.nii.gz')))
    confounds = get_confounds(subject_dir, files, mask_img=mask_img)
    masker = NiftiLabelsMasker(labels_img=labels_img, mask_img=mask_img,
                               smoothing_fwhm=smoothing_fwhm,
                               standardize=True, resampling_target='labels',
                               detrend=True, low_pass=.1, t_r=.7)

    subj_data = []
    for (f_ix, f) in enumerate(files):
        base_name = os.path.basename(f)
        subj_data.append(
            {'data': masker.fit_transform(f,
                                          confounds=confounds[f_ix]),
             'session': int(base_name[10]),
             'scan': base_name[12:14]})
    return subj_data
Example #33
0
os.chdir(path+stud)  # set path of data
for name in sorted(glob.glob('*.gz')):  # use glob to find the recently download filename
    subid = name.split('_00')  # separates the keywords to extract the ID number
    subid = subid[1][:5]  # extract ID

    inF = gzip.open(name, 'rb')  # opens .gz file
    outF = open('{0}.nii'.format(subid), 'wb')   # creates a new file using fileID as the name
    outF.write(inF.read())  # extract and write the .nii file
    inF.close()
    outF.close()
    os.remove(name)  # deletes the .nii.gz file

os.chdir(path+mask)
for msk in sorted(glob.glob('*')):
    mask_name = msk[:-4]
    masker = NiftiLabelsMasker(labels_img=msk, standardize=True)  # sets the atlas used

    os.chdir(path+stud)
    for name in sorted(glob.glob('*[0-9].nii')):
        subid = name[:5]
        # extract time series data
        ts = masker.fit_transform(name)  # masks must be in same directory as data
        norm = np.corrcoef(ts.T)

        # find DX by matching the rows
        cors = [subid]
        d = idlab.index([subid])
        cors.append(dxlab[d][0])
        # flatten the correlation matrix
        for i in range(1, np.size(norm, axis=0)):
            for j in range(i+1, np.size(norm, axis=0)):
Example #34
0
    crad = ds.fetch_atlas_craddock_2012()
    # atlas_nii = index_img(crad['scorr_mean'], 19)  # Craddock 200 region atlas
    atlas_nii = index_img(crad['scorr_mean'], 9)  # Craddock 100 region atlas

    r_atlas_nii = resample_img(
        img=atlas_nii,
        target_affine=mask_file.get_affine(),
        target_shape=mask_file.shape,
        interpolation='nearest'
    )
    r_atlas_nii.to_filename('debug_ratlas.nii.gz')

    from nilearn.input_data import NiftiLabelsMasker
    nlm = NiftiLabelsMasker(
        labels_img=r_atlas_nii, mask_img=mask_file,
        standardize=True, detrend=True)

    nlm.fit()
    FS_regpool = nlm.transform(all_sub_rs_maps)
    np.save('%i_regs_timeseries' % sub_id, FS_regpool)

    # compute network sparse inverse covariance
    from sklearn.covariance import GraphLassoCV
    from nilearn.image import index_img
    from nilearn import plotting

    try:
        gsc_nets = GraphLassoCV(verbose=2, alphas=20)
        gsc_nets.fit(FS_regpool)
from nilearn import datasets

# Retrieve our atlas
dataset = datasets.fetch_atlas_harvard_oxford('cort-maxprob-thr25-2mm')
atlas_filename, labels = dataset.maps, dataset.labels

print('Atlas ROIs are located in nifti image (4D) at: %s' %
      atlas_filename)  # 4D data

# And one subject of resting-state data
data = datasets.fetch_adhd(n_subjects=1)

# To extract signals on a parcellation defined by labels, we use the
# NiftiLabelsMasker
from nilearn.input_data import NiftiLabelsMasker
masker = NiftiLabelsMasker(labels_img=atlas_filename, standardize=True,
                           memory='nilearn_cache', verbose=5)


# Here we go from nifti files to the signal time series in a numpy
# array. Note how we give confounds to be regressed out during signal
# extraction
time_series = masker.fit_transform(data.func[0], confounds=data.confounds)

import numpy as np
correlation_matrix = np.corrcoef(time_series.T)

# Plot the correlation matrix
from matplotlib import pyplot as plt
plt.figure(figsize=(10, 10))
plt.imshow(correlation_matrix, interpolation="nearest")
Example #36
0
"""
Run this when you already have your data downloaded and unzipped.

Date: 19 October 2015
"""
__author__ = '2d Lt Kyle Palko'
__version__ = 'v0.1.0'

import glob
import os
import csv
import numpy as np
from nilearn.input_data import NiftiLabelsMasker

masker = NiftiLabelsMasker(labels_img='/media/kap/8e22f6f8-c4df-4d97-a388-0adcae3ec1fb/Python/Thesis/TT/tt_mask_pad.nii'
                           , standardize=True)  # sets the atlas used
atlas = 'TT'  # label which atlas to use
stud = 'Olin'
os.chdir('/media/kap/8e22f6f8-c4df-4d97-a388-0adcae3ec1fb/Python/Thesis/Data/{0}'.format(stud))
for n in sorted(glob.glob('*[0-9].nii')):
    str_id = n[:5]  # sets the current image ID
#    masked_data = apply_mask(n, str_id+'_mask.nii')
    ts = masker.fit_transform('{0}.nii'.format(str_id))

    norm = np.corrcoef(ts.T)

    # flatten the correlation matrix
    cors = [str_id]
    for i in range(1, np.size(norm, axis=0)):
        for j in range(i+1, np.size(norm, axis=0)):
            cors.append(norm[i, j])
Example #37
0
##########################################################################
# Extract coordinates on Yeo atlas - parcellations
# ------------------------------------------------
from nilearn.input_data import NiftiLabelsMasker
from nilearn.connectome import ConnectivityMeasure

# ConenctivityMeasure from Nilearn uses simple 'correlation' to compute
# connectivity matrices for all subjects in a list
connectome_measure = ConnectivityMeasure(kind='correlation')

# useful for plotting connectivity interactions on glass brain
from nilearn import plotting

# create masker to extract functional data within atlas parcels
masker = NiftiLabelsMasker(labels_img=yeo['thick_17'], standardize=True,
                           memory='nilearn_cache')

# extract time series from all subjects and concatenate them
time_series = []
for func, confounds in zip(data.func, data.confounds):
    time_series.append(masker.fit_transform(func, confounds=confounds))

# calculate correlation matrices across subjects and display
correlation_matrices = connectome_measure.fit_transform(time_series)

# Mean correlation matrix across 10 subjects can be grabbed like this,
# using connectome measure object
mean_correlation_matrix = connectome_measure.mean_

# grab center coordinates for atlas labels
coordinates = plotting.find_parcellation_cut_coords(labels_img=yeo['thick_17'])
import glob, os, sys, warnings
import numpy as np
from nilearn.image import resample_img, index_img
import nibabel as nib

os.chdir(WD)

rs_niis = sorted(glob.glob(DATA_DIR + os.sep + '*.nii.gz'))# participants
analysis_name = scca_rs_weights.split(os.sep)[-1].split('.')[0]
label_atlas_nii= nib.load(roiLabel) 
rest_loading = np.load(expanduser(scca_rs_weights)) 
loading_labels = np.load(expanduser(scca_rs_weights_keys))
idx = np.triu_indices(n_ROIs, 1)

from nilearn.input_data import NiftiLabelsMasker
masker = NiftiLabelsMasker(labels_img=label_atlas_nii, standardize=True,
                           memory='nilearn_cache', verbose=0)
masker.fit()

#average all comps lodaings
def mean_nonzero(data, a):
    with warnings.catch_warnings():
        warnings.simplefilter("ignore", category=RuntimeWarning)
        temp_sum = np.sum(data!=0, axis=a)
        # temp_sum [temp_sum==0] = 1 
        output = np.sum(data, axis=a)/temp_sum
        output[np.isnan(output)] = 0
    return output

ave_loadings_flat = mean_nonzero(rest_loading,1)
ave_loadings_mat = np.zeros((14,14))
ave_loadings_mat[idx] = ave_loadings_flat
       'vs':'VANWATERLOO',
       }
#names='ap','as','bh','bi','cmp','cas','cs','cb','gm','gn','gbn','mv','ms','pm','pc','ph','pa','pv','pom','rdc','ti','vs'
smt='ss'
fold_g = 'F:/IRM_Marche/'
blocks_i=np.loadtxt(fold_g+'block_main.txt','int')
label_i=np.loadtxt(fold_g+'label_main.txt','S12')


coords=get_masker_coord('BASC')
from nilearn.datasets import load_mni152_brain_mask
basc = datasets.fetch_atlas_basc_multiscale_2015(version='asym')['scale444']
brainmask = load_mni152_brain_mask()
masker = NiftiLabelsMasker(labels_img = basc, mask_img = brainmask, 
                           memory_level=1, verbose=0,
                           detrend=True, standardize=False,  
                           high_pass=0.01,t_r=2.28,
                           resampling_target='labels'
                           )
masker.fit()

scaler = preprocessing.StandardScaler()
svm= SVC(C=1., kernel="linear")  


## INDIVIDUAL ANALYSIS
#index=[]
#for x in range(label.shape[0]):
#    if label[x,0]!=label[x-1,0]:
#        index.append(x)
#    elif label[x,0]!=label[x-2,0]:
#        index.append(x)
    label_atlas[cur_roi_data > 0.1] = i_roi + 1


#create labels
label_atlas_nii = nib.Nifti1Image(
    label_atlas,
    affine=tmp_nii.get_affine(),
    header=tmp_nii.get_header()
)

#save for future usage
label_atlas_nii.to_filename(roiLabel)
#load saved labels
# label_atlas_nii= nib.load(roiLabel)

masker = NiftiLabelsMasker(labels_img=label_atlas_nii, standardize=True,
                           memory='nilearn_cache', verbose=0)
masker.fit()

corr_mat_vect_list = []
ind_list = []


for i_rs_img, rs_img in enumerate(rs_niis):
    print('%i/%i: %s' % (i_rs_img + 1, len(rs_niis), rs_img))
    rs_reg_ts = masker.transform(rs_img)
    corr_mat = np.corrcoef(rs_reg_ts.T)
    triu_inds = np.triu_indices(corr_mat.shape[0], 1)
    corr_mat_vect = corr_mat[triu_inds]
    # save for later
    corr_mat_vect_list.append(corr_mat_vect)
corr_mat_vect_array = np.array(corr_mat_vect_list)
Example #41
0
##############################################################################
# Use the new ROIs, to extract data maps in both ROIs

# We extract data from ROIs using nilearn's NiftiLabelsMasker
from nilearn.input_data import NiftiLabelsMasker

# Before data extraction, we convert an array labels to Nifti like image. All
# inputs to NiftiLabelsMasker must be Nifti-like images or filename to Nifti
# images. We use the same reference image as used above in previous sections
labels_img = new_img_like(fmri_img, labels)
# First, initialize masker with parameters suited for data extraction using
# labels as input image, resampling_target is None as affine, shape/size is same
# for all the data used here, time series signal processing parameters
# standardize and detrend are set to False
masker = NiftiLabelsMasker(labels_img, resampling_target=None,
                           standardize=False, detrend=False)
# After initialization of masker object, we call fit() for preparing labels_img
# data according to given parameters
masker.fit()
# Preparing for data extraction: setting number of conditions, size, etc from
# haxby dataset
condition_names = haxby_labels.unique()
n_cond_img = fmri_data[..., haxby_labels == 'house'].shape[-1]
n_conds = len(condition_names)

X1, X2 = np.zeros((n_cond_img, n_conds)), np.zeros((n_cond_img, n_conds))
# Gathering data for each condition and then use transformer from masker
# object transform() on each data. The transformer extracts data in condition
# maps where the target regions are specified by labels images
for i, cond in enumerate(condition_names):
    cond_maps = new_img_like(
atlas_filename = dataset.maps
labels = dataset.labels

print('Atlas ROIs are located in nifti image (4D) at: %s' %
      atlas_filename)  # 4D data

# One subject of resting-state data
data = datasets.fetch_adhd(n_subjects=1)
fmri_filenames = data.func[0]

##############################################################################
# Extract signals on a parcellation defined by labels
# -----------------------------------------------------
# Using the NiftiLabelsMasker
from nilearn.input_data import NiftiLabelsMasker
masker = NiftiLabelsMasker(labels_img=atlas_filename, standardize=True,
                           memory='nilearn_cache', verbose=5)

# Here we go from nifti files to the signal time series in a numpy
# array. Note how we give confounds to be regressed out during signal
# extraction
time_series = masker.fit_transform(fmri_filenames, confounds=data.confounds)


##############################################################################
# Compute and display a correlation matrix
# -----------------------------------------
from nilearn.connectome import ConnectivityMeasure
correlation_measure = ConnectivityMeasure(kind='correlation')
correlation_matrix = correlation_measure.fit_transform([time_series])[0]

# Plot the correlation matrix
Example #43
0
ab_img_two = datasets.fetch_abide_pcp(data_dir=path, n_subjects=1, pipeline='cpac', band_pass_filtering=True,
                                      derivatives=['func_preproc'], SUB_ID=[50004])

ab_mask_one = datasets.fetch_abide_pcp(data_dir=path, n_subjects=1, pipeline='cpac', band_pass_filtering=True,
                                       derivatives=['func_mask'], SUB_ID=[50003])
ab_mask_two = datasets.fetch_abide_pcp(data_dir=path, n_subjects=1, pipeline='cpac', band_pass_filtering=True,
                                       derivatives=['func_mask'], SUB_ID=[50003])

my_data = ['pitt3.nii', 'pitt4.nii']  # have to rename the two files that were downloaded

# apply mask to the fMRI images. The mask is the regions of the image that will be extracted for use.
from nilearn.masking import apply_mask
masked_data = apply_mask(my_data[0], 'pitt3mask.nii')  # just mask the first image (fMRI, mask)
print('Completed Masking')

# Use the atlas to extract image data from the masked fMRI images
from nilearn.input_data import NiftiLabelsMasker
# masker = NiftiLabelsMasker(labels_img='tt_mask_pad.nii', standardize=True)
# masker = NiftiLabelsMasker(labels_img='aal_mask_pad.nii', standardize=True)
masker = NiftiLabelsMasker(labels_img='CC400.nii', standardize=True)  # sets the atlas that will be used to extract
time_series = masker.fit_transform(my_data[0])  # extract time series data from the fMRI image

# Use the time series data to find correlations between the ROIs and plot matrix
import numpy as np
correlation_matrix = np.corrcoef(time_series.T)
plt.figure(figsize=(10, 10))
plt.imshow(correlation_matrix, interpolation='nearest')

plt.show()
print('Completed Program')
def make_functional_derivatives(population, workspace_dir, freesurfer_dir, derivatives_dir):

    print '========================================================================================'
    print ''
    print '                Tourettome - 008. CREATING FUNCTIONAL FEATURES                          '
    print ''
    print '========================================================================================'

    # global IO
    sca_dir       = mkdir_path(os.path.join(derivatives_dir, 'func_seed_correlation'))
    gm_group_mask = os.path.join(derivatives_dir, 'func_centrality/GROUP_GM_FUNC_3mm.nii')

    count = 0
    for subject in population:
        count +=1
        print '###################################################################'
        print 'Extracting functional derivatives for subject %s' % subject

        # subject I/0
        subject_dir = os.path.join(workspace_dir, subject)

        for denoise_type in ['compcor', 'gsr','censor','gsr_censor']:
            print 'Calculating Derivatives for denoise type:', denoise_type
            sca_dir = mkdir_path(os.path.join(derivatives_dir, 'func_seed_correlation', denoise_type))

            func_denoised      = os.path.join(subject_dir, 'DENOISE', 'residuals_%s'%denoise_type, 'residual.nii.gz')

            if os.path.isfile(func_denoised):

                ################################################################################################################
                ### 1- Seed-Based Correlation
                ################################################################################################################

                print '1. Calculating SCA'

                for seed_name in seeds:
                    if not os.path.isfile(os.path.join(sca_dir, seed_name, '%s_sca_z.nii.gz'%subject)):
                        print seed_name
                        seed_dir = mkdir_path(os.path.join(sca_dir, seed_name))

                        TR = nb.load(func_denoised).header['pixdim'][4]
                        # Extract seed timeseries
                        seed = seeds[seed_name]
                        masker_seed = NiftiLabelsMasker(labels_img=seed,
                                                        smoothing_fwhm=6, detrend = False, standardize=True,
                                                        low_pass = 0.1, high_pass = 0.01, t_r = TR,
                                                        memory='nilearn_cache', verbose=0,
                                                        )

                        timeseries_seed = masker_seed.fit_transform(func_denoised)
                        print 'seed_timeseries_shape', timeseries_seed.shape

                        # Extract brain timeseries
                        masker_brain = NiftiMasker(smoothing_fwhm=6, detrend=None, standardize=True,
                                                   low_pass=0.1, high_pass=0.01, t_r=TR,
                                                   memory='nilearn_cache', memory_level=1, verbose=0)
                        timeseries_brain = masker_brain.fit_transform(func_denoised)
                        print 'brain_timeseries_shape', timeseries_brain.shape

                        # Seed Based Correlation
                        # see Nilearn http://nilearn.github.io/auto_examples/03_connectivity/plot_seed_to_voxel_correlation.html#sphx-glr-auto-examples-03-connectivity-plot-seed-to-voxel-correlation-py
                        sca = np.dot(timeseries_brain.T, timeseries_seed) / timeseries_seed.shape[0]
                        sca_rz = np.arctanh(sca)
                        print("seed-based correlation R: min = %.3f; max = %.3f" % (sca.min(), sca.max()))
                        print("seed-based correlation R-to-Z : min = %.3f; max = %.3f" % (sca_rz.min(), sca_rz.max()))

                        # Save seed-to-brain correlation as a  Nifti image
                        sca_img = masker_brain.inverse_transform(sca.T)
                        sca_img.to_filename(os.path.join(seed_dir, '%s_sca_z.nii.gz'%subject))

                        ######### SKIP since already smoothed with nilearn
                        #smooth
                        # Smoothing kernel
                        # FWHM = 6
                        # sigma = FWHM / 2.35482004503
                        # os.chdir(seed_dir)
                        # os.system('fslmaths %s_sca_z -s %s %s_sca_z_fwhm6.nii.gz'%(subject, sigma, subject))

                        # skip the nilearn approach..... do it with freesurfer...
                        # Map seed-to-voxel onto surface
                        # sca_lh = surface.vol_to_surf(sca_img, fsaverage5['pial_left']).ravel()
                        # sca_rh = surface.vol_to_surf(sca_img, fsaverage5['pial_right']).ravel()
                        #
                        # # Save seed-to-vertex correlation as a txt file
                        # np.save(os.path.join(seed_dir, '%s_sca_z_fwhm6_lh.npy'%subject), sca_lh)
                        # np.save(os.path.join(seed_dir, '%s_sca_z_fwhm6_rh.npy'%subject), sca_rh)
                        ####################

                    seed_dir = os.path.join(sca_dir, seed_name)
                    if not os.path.isfile(os.path.join(seed_dir, '%s_sca_z_fsaverage5_fwhm10_rh.mgh' % subject)):
                        os.chdir(seed_dir)
                        for hemi in  ['lh', 'rh']:
                            # vol2surf
                            os.system('mri_vol2surf '
                                      '--mov %s_sca_z.nii.gz '
                                      '--regheader %s '
                                      '--projfrac-avg 0.2 0.8 0.1 '
                                      '--interp nearest '
                                      '--hemi %s '
                                      '--out %s_sca_z_%s.mgh'
                                      %(subject, subject, hemi, subject, hemi))
                            #surf2surf
                            os.system('mri_surf2surf '
                                      '--s %s '
                                      '--sval  %s_sca_z_%s.mgh '
                                      '--hemi %s '
                                      '--trgsubject fsaverage5 '
                                      '--tval %s_sca_z_fsaverage5_fwhm00_%s.mgh'
                                      % (subject, subject, hemi, hemi, subject, hemi))

                            os.system('mri_surf2surf '
                                      '--s %s '
                                      '--sval  %s_sca_z_%s.mgh '
                                      '--hemi %s '
                                      '--trgsubject fsaverage5 '
                                      '--fwhm-src 10 '
                                      '--tval %s_sca_z_fsaverage5_fwhm10_%s.mgh'
                                      %(subject, subject, hemi, hemi, subject, hemi))

                        os.system('rm -rf %s_sca_z_lh.mgh %s_sca_z_rh.mgh' %(subject,subject))


                ################################################################################################################
                ### 1- connectome
                ################################################################################################################
                print '2. Calculating Power-264 connectome'

                connectome_dir = mkdir_path(os.path.join(derivatives_dir, 'func_connectome', denoise_type))
                if not os.path.isfile(os.path.join(connectome_dir, '%s_power264_tangent.npy'%subject)):

                    # get power-264 coordinates
                    atlas = datasets.fetch_coords_power_2011()
                    coords = np.vstack((atlas.rois['x'], atlas.rois['y'], atlas.rois['z'])).T

                    # Extract signals
                    spheres_masker = input_data.NiftiSpheresMasker(seeds=coords,
                                                                   smoothing_fwhm=6,
                                                                   radius=5.,
                                                                   detrend=False,
                                                                   standardize=True,
                                                                   low_pass=0.1,
                                                                   high_pass=0.01,
                                                                   t_r=2.)
                    timeseries = spheres_masker.fit_transform(func_denoised)
                    print 'timsereies shape ', timeseries.shape


                    for cor_type in ['correlation', 'tangent']:
                        if not os.path.isfile(os.path.join(connectome_dir, '%s_power264_%s.npy' % (subject, cor_type))):
                            correlation_measure = connectome.ConnectivityMeasure(kind=cor_type)
                            cmat = correlation_measure.fit_transform([timeseries])[0]
                            np.save(os.path.join(connectome_dir, '%s_power264_%s.npy'%(subject,cor_type)), cmat)

            else:
                print 'Need denoising first'
#result_cv_tr_foot,permutation_scores_tr_foot, p_foot=permutation_score(pipeline,roi_foot_all,roi_hand_all,y_foot_all,groups,logo,n_p) 
#print('Train FOOT - IMAG VS STIM',np.array(result_cv_tr_foot).mean(),p_foot)
#result_cv_tr_hand,permutation_scores_tr_hand, p_hand=permutation_score(pipeline,roi_hand_all,roi_foot_all,y_hand_all,groups,logo,n_p)
#print('Train HAND - IMAG VS STIM',np.array(result_cv_tr_hand).mean(),p_hand)
#result_cv_tr_imag,permutation_scores_tr_imag, p_imag=permutation_score(pipeline,roi_imag_all,roi_stim_all,y_imag_all,groups,logo,n_p)   
#print('Train IMAG - HAND VS FOOT',np.array(result_cv_tr_imag).mean(),p_imag)
#result_cv_tr_stim,permutation_scores_tr_stim, p_stim=permutation_score(pipeline,roi_stim_all,roi_imag_all,y_stim_all,groups,logo,n_p)
#print('Train STIM - HAND VS FOOT',np.array(result_cv_tr_stim).mean(),p_stim)


# Prepare ploting
basc = datasets.fetch_atlas_basc_multiscale_2015(version='asym')['scale444']
brainmask = load_mni152_brain_mask()
masker = NiftiLabelsMasker(labels_img = basc, mask_img = brainmask, 
                           memory_level=1, verbose=0,
                           detrend=True, standardize=False,  
                           high_pass=0.01,t_r=2.28,
                           resampling_target='labels'
                           )
masker.fit()

pipeline.fit(roi_foot_all,y_foot_all)
coef_foot = pipeline.named_steps['svm'].coef_
weight_f = masker.inverse_transform(coef_foot)
plot_stat_map(weight_f, title='Train Imp',display_mode='z',cmap='bwr',threshold=0.4)


pipeline.fit(roi_hand_all,y_hand_all)
coef_hand = pipeline.named_steps['svm'].coef_
weight_h = masker.inverse_transform(coef_hand)
plot_stat_map(weight_h, title='Train Imp',display_mode='z',cmap='bwr',threshold=0.1)
Example #46
0
def make_parcellation(data_path, parcellation, parc_type=None, parc_params=None):
    """
    Performs a parcellation which reduces voxel space to regions of interest (brain data).

    Parameters
    ----------

    data_path : str
        Path to .nii image.
    parcellation : str
        Specify which parcellation that you would like to use. For MNI: 'gordon2014_333', 'power2012_264', For TAL: 'shen2013_278'.
        It is possible to add the OH subcotical atlas on top of a cortical atlas (e.g. gordon) by adding:
            '+OH' (for oxford harvard subcortical atlas) and '+SUIT' for SUIT cerebellar atlas.
            e.g.: gordon2014_333+OH+SUIT'
    parc_type : str
        Can be 'sphere' or 'region'. If nothing is specified, the default for that parcellation will be used.
    parc_params : dict
        **kwargs for nilearn functions

    Returns
    -------

    data : array
        Data after the parcellation.

    NOTE
    ----
    These functions make use of nilearn. Please cite nilearn if used in a publicaiton.
    """

    if isinstance(parcellation, str):
        parcin = ''
        if '+' in parcellation:
            parcin = parcellation
            parcellation = parcellation.split('+')[0]
        if '+OH' in parcin:
            subcortical = True
        else:
            subcortical = None
        if '+SUIT' in parcin:
            cerebellar = True
        else:
            cerebellar = None

        if not parc_type or not parc_params:
            path = tenetopath[0] + '/data/parcellation_defaults/defaults.json'
            with open(path) as data_file:
                defaults = json.load(data_file)
        if not parc_type:
            parc_type = defaults[parcellation]['type']
            print('Using default parcellation type')
        if not parc_params:
            parc_params = defaults[parcellation]['params']
            print('Using default parameters')

    if parc_type == 'sphere':
        parcellation = load_parcellation_coords(parcellation)
        seed = NiftiSpheresMasker(np.array(parcellation), **parc_params)
        data = seed.fit_transform(data_path)
    elif parc_type == 'region':
        path = tenetopath[0] + '/data/parcellation/' + parcellation + '.nii.gz'
        region = NiftiLabelsMasker(path, **parc_params)
        data = region.fit_transform(data_path)
    else:
        raise ValueError('Unknown parc_type specified')

    if subcortical:
        subatlas = fetch_atlas_harvard_oxford('sub-maxprob-thr0-2mm')['maps']
        region = NiftiLabelsMasker(subatlas, **parc_params)
        data_sub = region.fit_transform(data_path)
        data = np.hstack([data, data_sub])

    if cerebellar:
        path = tenetopath[0] + '/data/parcellation/Cerebellum-SUIT_space-MNI152NLin2009cAsym.nii.gz'
        region = NiftiLabelsMasker(path, **parc_params)
        data_cerebellar = region.fit_transform(data_path)
        data = np.hstack([data, data_cerebellar])

    return data