Ejemplo n.º 1
0
    def fallback_covariance(time_series):
        from sklearn.ensemble import IsolationForest
        from sklearn import covariance

        # Remove gross outliers
        model = IsolationForest(contamination=0.02)
        model.fit(time_series)
        outlier_mask = model.predict(time_series)
        outlier_mask[outlier_mask == -1] = 0
        time_series = time_series[outlier_mask.astype('bool')]

        # Fall back to LedoitWolf
        print('Matrix estimation failed with Lasso and shrinkage due to '
              'ill conditions. Removing potential anomalies from the '
              'time-series using IsolationForest...')
        try:
            print("Trying Ledoit-Wolf Estimator...")
            conn_measure = ConnectivityMeasure(
                cov_estimator=covariance.LedoitWolf(store_precision=True,
                                                    assume_centered=True),
                kind=kind)
            conn_matrix = conn_measure.fit_transform([time_series])[0]
        except (np.linalg.linalg.LinAlgError, FloatingPointError):
            print("Trying Oracle Approximating Shrinkage Estimator...")
            conn_measure = ConnectivityMeasure(
                cov_estimator=covariance.OAS(assume_centered=True), kind=kind)
            try:
                conn_matrix = conn_measure.fit_transform([time_series])[0]
            except (np.linalg.linalg.LinAlgError, FloatingPointError):
                raise ValueError('All covariance estimators failed to '
                                 'converge...')

        return conn_matrix
Ejemplo n.º 2
0
def precompute_flattened_fcm(subject_id=None):
    """ Derives the correlation matrices for the parcellated timeseries data.

    :param subject_id: subject ID if only one connectivity matrix needs to be precomputed
    :return the flattened lower triangle of the correlation matrices for the parcellated timeseries data.
    Saved as a binary numpy array with the name of patient ID in the preprocessed timeseries directory.
    """

    conn_measure = ConnectivityMeasure(kind='correlation',
                                       vectorize=True,
                                       discard_diagonal=True)

    suffix = '_ts_raw.txt'

    if subject_id is not None:
        print("Processing %s" % subject_id)
        ts = np.loadtxt(os.path.join(data_timeseries, subject_id + suffix),
                        delimiter=',')
        np.save(os.path.join(data_computed_fcms, subject_id),
                conn_measure.fit_transform([np.transpose(ts)])[0])

    else:
        # Preompute all timeseries.
        ts_filenames = [f for f in os.listdir(data_timeseries)]
        suffix_length = len(suffix)

        for ts_file in ts_filenames:
            print("Processing %s" % ts_file)
            ts = np.loadtxt(os.path.join(data_timeseries, ts_file),
                            delimiter=',')
            np.save(os.path.join(data_computed_fcms, ts_file[:-suffix_length]),
                    conn_measure.fit_transform([np.transpose(ts)])[0])
Ejemplo n.º 3
0
def get_conn_matrix(time_series, conn_model, NETWORK, ID, dir_path, thr):
    if conn_model == 'corr':
        conn_measure = ConnectivityMeasure(kind='correlation')
        conn_matrix = conn_measure.fit_transform([time_series])[0]
        est_path = dir_path + '/' + ID + '_est_corr' + '_' + str(thr) + '.txt'
    elif conn_model == 'corr_fast':
        try:
            conn_matrix = compute_correlation(time_series,time_series)
            est_path = dir_path + '/' + ID + '_est_corr_fast' + '_' + str(thr) + '.txt'
        except RuntimeError:
            print('Cannot run accelerated correlation computation due to a missing dependency. You need brainiak installed!')
    elif conn_model == 'partcorr':
        conn_measure = ConnectivityMeasure(kind='partial correlation')
        conn_matrix = conn_measure.fit_transform([time_series])[0]
        est_path = dir_path + '/' + ID + '_est_part_corr' + '_' + str(thr) + '.txt'
    elif conn_model == 'cov' or conn_model == 'sps':
        ##Fit estimator to matrix to get sparse matrix
        estimator = GraphLassoCV()
        try:
            print("Fitting Lasso estimator...")
            est = estimator.fit(time_series)
        except RuntimeError:
            print('Unstable Lasso estimation--Attempting to re-run by first applying shrinkage...')
            #from sklearn.covariance import GraphLasso, empirical_covariance, shrunk_covariance
            #emp_cov = empirical_covariance(time_series)
            #for i in np.arange(0.8, 0.99, 0.01):
                #shrunk_cov = shrunk_covariance(emp_cov, shrinkage=i)
                #alphaRange = 10.0 ** np.arange(-8,0)
                #for alpha in alphaRange:
                    #try:
                        #estimator_shrunk = GraphLasso(alpha)
                        #est=estimator_shrunk.fit(shrunk_cov)
                        #print("Calculated graph-lasso covariance matrix for alpha=%s"%alpha)
                        #break
                    #except FloatingPointError:
                        #print("Failed at alpha=%s"%alpha)
            #if estimator_shrunk == None:
                #pass
            #else:
                #break
            print('Unstable Lasso estimation. Try again!')
            sys.exit()

        if NETWORK != None:
            est_path = dir_path + '/' + ID + '_' + NETWORK + '_est%s'%('_sps_inv' if conn_model=='sps' else 'cov') + '_' + str(thr) + '.txt'
        else:
            est_path = dir_path + '/' + ID + '_est%s'%('_sps_inv' if conn_model=='sps' else 'cov') + '_' + str(thr) + '.txt'
        if conn_model == 'sps':
            try:
                conn_matrix = -estimator.precision_
            except:
                conn_matrix = -estimator_shrunk.precision_
        elif conn_model == 'cov':
            try:
                conn_matrix = estimator.covariance_
            except:
                conn_matrix = estimator_shrunk.covariance_
    np.savetxt(est_path, conn_matrix, delimiter='\t')
    return(conn_matrix, est_path)
Ejemplo n.º 4
0
def plot_connectome_mixture(data=None, index=None, metric="correlation", save_as=None, show=False, **kwargs):
   
    # extract time series from all subjects and concatenate them
    mm = data.X[index, :].copy()
    time_series = [np.vstack(mm)]

    # calculate correlation matrices across indexed frames in data 
    connectome_measure = ConnectivityMeasure(kind=metric)
    connectome_measure.fit_transform(time_series)
    connectivity = connectome_measure.mean_
    np.fill_diagonal(connectivity, 0)
    #connectivity[np.abs(connectivity) < 0.2] = 0.0 


    # grab center coordinates for atlas labels
    atlas = kwargs.pop('atlas', data.atlas)
    coords = plotting.find_parcellation_cut_coords(labels_img=atlas)

    # assign node colors
    cmap = kwargs.pop('cmap', 'jet')
    node_cmap = plt.get_cmap('bone')
    node_norm = mpl.colors.Normalize(vmin=-0.8, vmax=1.2)
    node_colors = np.ravel([_[-1] for i,_ in enumerate(coords)])
    node_colors = [_ / np.max(node_colors) for _ in node_colors]
    node_colors = node_cmap(node_norm(node_colors))

    # plot connectome matrix
    fig = plt.figure(figsize=(12,5))
    ax = plt.subplot2grid((1, 2), (0, 1),  rowspan=1, colspan=1) 
    display = plotting.plot_matrix(
        connectivity,
        vmin=-.5, vmax=.5, colorbar=True, cmap=cmap,
        axes=ax, #title='{} Matrix'.format(metric.title()),
        )

    # plot connectome with 99.7% edge strength in the connectivity
    ax = plt.subplot2grid((1, 2), (0, 0), rowspan=1, colspan=1)
    display = plotting.plot_connectome(
        connectivity, coords,
        edge_threshold="99.9%", display_mode='z',
        node_color=node_colors, node_size=20, edge_kwargs=dict(lw=4),
        edge_vmin=-.8, edge_vmax=.8, edge_cmap=cmap,
        colorbar=False, black_bg=not True, alpha=0.5,
        annotate=False,
        axes=ax,
        )
    if show is True:
        plt.show()
    plt.subplots_adjust(left=0.05, right=0.95, bottom=0.05, top=0.95)
    if save_as:
        fig.savefig(save_as, transparent=True)#, facecolor='slategray', edgecolor='white')
    plt.close(fig)
    return display
Ejemplo n.º 5
0
    def _run_interface(self, runtime):
        fname = self.inputs.fmri_denoised
        entities = parse_file_entities(fname)
        bold_img = nb.load(fname)
        parcellation_file = get_parcellation_file_path(entities['space'])
        masker = NiftiLabelsMasker(labels_img=parcellation_file,
                                   standardize=True)
        time_series = masker.fit_transform(bold_img, confounds=None)

        corr_measure = ConnectivityMeasure(kind='correlation')
        corr_mat = corr_measure.fit_transform([time_series])[0]
        entities['pipeline'] = extract_pipeline_from_path(fname)
        conn_file = join(self.inputs.output_dir,
                         build_path(entities, self.conn_file_pattern, False))
        carpet_plot_file = join(
            self.inputs.output_dir,
            build_path(entities, self.carpet_plot_pattern, False))
        matrix_plot_file = join(
            self.inputs.output_dir,
            build_path(entities, self.matrix_plot_pattern, False))

        make_carpetplot(time_series, carpet_plot_file)
        mplot = plot_matrix(corr_mat, vmin=-1, vmax=1)
        mplot.figure.savefig(matrix_plot_file)

        np.save(conn_file, corr_mat)

        self._results['corr_mat'] = conn_file
        self._results['carpet_plot'] = carpet_plot_file
        self._results['matrix_plot'] = matrix_plot_file

        return runtime
Ejemplo n.º 6
0
def make_correlation_matrix(line, site, filename, bad_lst, good_lst):
    seq_len = param.WINDOW_SIZE
    time_series = []
    good = bad = 0
    n = len(line) - seq_len + 1

    for j in range(n):
        lst = []
        for i in line[j:j + seq_len]:
            lst.append(np.array(list(map(float, i.split()))))
        time_series.append(np.array(lst))
        correlation_measure = ConnectivityMeasure(kind='correlation')
        correlation_matrix = correlation_measure.fit_transform(
            [time_series[j]])[0]
        fisher = np.arctanh(correlation_matrix)
        np.fill_diagonal(fisher, 0)
        # dd.io.save(folder + '/{}_correlation_matrix//{}_{}.h5'.format(site, filename, j), fisher)

        # check whether there are lines all 0 in this subject
        for i in range(num_region):
            # column = correlation_matrix[i]
            # tmp = column[i]
            # np.delete(column, i)
            # if np.all(column == 0) and tmp == 1:
            if np.all(fisher[i] == 0):
                bad += 1
                bad_lst.append("{}".format(filename))
                break
        if i == (num_region - 1):
            good += 1
            good_lst.append("{}".format(filename))
Ejemplo n.º 7
0
def make_corr_matrix(ts_matrix):
    """
    Make a symmetric pearson's r->z transforme correlation matrix.

    Parameters
    ----------
    ts_matrix : numpy.ndarray
        2D numpy array with each column representing an atlas region
        and each row representing a volume (time point)

    Returns
    -------
    zcorr_matrix : numpy.ndarray
        2D symmetric matrix measuring region-region correlations
        main diagnal is all zeros
    """
    from nilearn.connectome import ConnectivityMeasure
    import numpy as np

    def fisher_r_to_z(r):
        import math
        if r == 1.:
            return 0.
        else:
            return math.log((1. + r) / (1. - r)) / 2.

    correlation_measure = ConnectivityMeasure(kind='correlation')
    corr_matrix = correlation_measure.fit_transform([ts_matrix])[0]
    vfisher_r_to_z = np.vectorize(fisher_r_to_z)
    # fisher's r to z
    zcorr_matrix = vfisher_r_to_z(corr_matrix)
    return zcorr_matrix
Ejemplo n.º 8
0
def postProcessing(nifti_file, subject_key, spheres_masker):
    """Perform post processing
	param nifti_file: string. path to the nifty file
    param subject_key: string. subject's key
	return: dictionary raw. 
		key: subject's key . 
		value: {"time_series" : matrix of time series (time_points,rois), "covariance" : covariance matrix of atlas rois (rois, rois),
			"correlation" : correlation matrix of atlas rois (rois, rois)}
    """
    try:
        print("subject_key: " + subject_key)
        print("Extract timeseries")
        # Extract the time series
        print(nifti_file)
        timeseries = spheres_masker.fit_transform(nifti_file, confounds=None)
        print("Extract covariance matrix")
        cov_measure = ConnectivityMeasure(cov_estimator=LedoitWolf(
            assume_centered=False, block_size=1000, store_precision=False),
                                          kind='covariance')
        cov = []
        cor = []
        cov = cov_measure.fit_transform([timeseries])[0, :, :]
        print("Extract correlation matrix")
        cor = nilearn.connectome.cov_to_corr(cov)
    except:

        raise Exception("subject_key: %s \n" % subject_key +
                        traceback.format_exc())
    return (subject_key, {
        "time_series": timeseries,
        "covariance": cov,
        "correlation": cor
    })
Ejemplo n.º 9
0
def CrossValidation(model, measure, y, skf, atalas):

    from sklearn.metrics import make_scorer
    from sklearn.metrics import accuracy_score, recall_score
    import numpy as np

    scoring = {
        'accuracy': make_scorer(accuracy_score),
        'sensitivity': make_scorer(recall_score),
        'specificity': make_scorer(recall_score, pos_label=0)
    }

    from nilearn.connectome import ConnectivityMeasure
    from nilearn.connectome import sym_matrix_to_vec

    conn_est = ConnectivityMeasure(kind=measure)
    conn_matrices = conn_est.fit_transform(atalas)
    X = sym_matrix_to_vec(conn_matrices)

    from sklearn.model_selection import cross_validate

    scores = cross_validate(model,
                            X,
                            y,
                            cv=skf,
                            scoring=scoring,
                            return_train_score=True)
    return [X, scores]
Ejemplo n.º 10
0
    def _run_interface(self, runtime):

        from nilearn import datasets
        from nilearn.input_data import NiftiLabelsMasker
        import numpy as np

        dataset = datasets.fetch_atlas_harvard_oxford(
            self.inputs.atlas_identifier)
        atlas_filename = dataset.maps

        masker = NiftiLabelsMasker(labels_img=atlas_filename,
                                   standardize=True,
                                   detrend=True,
                                   low_pass=0.1,
                                   high_pass=0.01,
                                   t_r=self.inputs.tr,
                                   memory='nilearn_cache',
                                   verbose=0)

        #file_labels = open('/home/brainlab/Desktop/Rudas/Data/Parcellation/AAL from Freesourfer/fs_default.txt', 'r')
        #labels = []
        #for line in file_labels.readlines():
        #labels.append(line)
        #file_labels.close()

        time_series = masker.fit_transform(
            self.inputs.in_file, confounds=self.inputs.confounds_file)

        np.savetxt(self.inputs.time_series_out_file,
                   time_series,
                   fmt='%10.2f',
                   delimiter=',')

        if self.inputs.plot:
            from nilearn import plotting
            from nilearn.connectome import ConnectivityMeasure
            import matplotlib
            import matplotlib.pyplot as plt
            fig, ax = matplotlib.pyplot.subplots()

            font = {'family': 'normal', 'size': 5}

            matplotlib.rc('font', **font)

            correlation_measure = ConnectivityMeasure(kind='correlation')
            correlation_matrix = correlation_measure.fit_transform(
                [time_series])[0]

            # Mask the main diagonal for visualization:
            np.fill_diagonal(correlation_matrix, 0)
            plotting.plot_matrix(correlation_matrix,
                                 figure=fig,
                                 labels=dataset.labels[1:],
                                 vmax=0.8,
                                 vmin=-0.8,
                                 reorder=True)

            fig.savefig(self.inputs.correlation_matrix_out_file, dpi=1200)

        return runtime
Ejemplo n.º 11
0
def connectivity_data():
    """Fixture for connectivity tests."""

    base_dir = os.path.abspath(pkg_resources.resource_filename(
        "pynets", "../data/examples"))
    func_file = f"{base_dir}/BIDS/sub-25659/ses-1/func/" \
                f"sub-25659_ses-1_task-rest_space-T1w_desc-preproc_bold.nii.gz"
    mask_file = f"{base_dir}/BIDS/sub-25659/ses-1/func/" \
                f"sub-25659_ses-1_task-rest_space-T1w_desc-preproc_" \
                f"bold_mask.nii.gz"
    parcellation = pkg_resources.resource_filename(
        "pynets", "templates/atlases/DesikanKlein2012.nii.gz"
    )

    masker = NiftiLabelsMasker(
        labels_img=nib.load(parcellation), background_label=0,
        resampling_target="labels", dtype="auto",
        mask_img=nib.load(mask_file), standardize=True)

    time_series = masker.fit_transform(func_file)
    conn_measure = ConnectivityMeasure(
        kind="correlation")
    conn_matrix = conn_measure.fit_transform([time_series])[0]
    [coords, _, _, label_intensities] = \
        get_names_and_coords_of_parcels(parcellation)

    labels = ['ROI_' + str(idx) for idx, val in enumerate(label_intensities)]

    yield {'time_series': time_series, 'conn_matrix': conn_matrix,
           'labels': labels, 'coords': coords, 'indices': label_intensities}
Ejemplo n.º 12
0
def connectomes_from_time_windows(time_windows_dict, kind, roi_names):
    print("\n\n===Compute Connectomes from Time Windows===")
    print("Connectivity Type: %s" % kind)
    # Initialize an empty dictionary to store connectome 3D Arrays
    connectomes_dict = {}

    # If Coherence, set the sampling interval, lower, and upper bounds of frequencies we are interested in.
    # Then for each set of timewindows, compute coherence.
    if kind == "coherence":
        TR = .720
        f_lb = 0.02
        f_ub = 0.15

        for timeseries_name, time_windows in time_windows_dict.items():
            connectomes_dict[timeseries_name] = compute_coherence(
                time_windows, TR, f_lb, f_ub, roi_names)

    # If kind is not coherence, do regular Connectivity (i.e. correlation)
    else:
        connectivity = ConnectivityMeasure(kind=kind)

        # For each scan's 3D array of time windows, compute their connectomes
        for timeseries_name, time_windows in time_windows_dict.items():
            connectomes_dict[timeseries_name] = connectivity.fit_transform(
                time_windows)

    return (connectomes_dict)
Ejemplo n.º 13
0
    def _run_interface(self, runtime):
        fname = self.inputs.fmri_denoised
        bold_img = nb.load(fname)
        masker = NiftiLabelsMasker(labels_img=self.inputs.parcellation,
                                   standardize=True)
        time_series = masker.fit_transform(bold_img, confounds=None)

        corr_measure = ConnectivityMeasure(kind='correlation')
        corr_mat = corr_measure.fit_transform([time_series])[0]
        _, base, _ = split_filename(fname)

        conn_file = f'{self.inputs.output_dir}/{base}_conn_mat.npy'

        carpet_plot_file = join(self.inputs.output_dir,
                                f'{base}_carpet_plot.png')
        matrix_plot_file = join(self.inputs.output_dir,
                                f'{base}_matrix_plot.png')

        create_carpetplot(time_series, carpet_plot_file)
        mplot = plot_matrix(corr_mat, vmin=-1, vmax=1)
        mplot.figure.savefig(matrix_plot_file)

        np.save(conn_file, corr_mat)

        self._results['corr_mat'] = conn_file
        self._results['carpet_plot'] = carpet_plot_file
        self._results['matrix_plot'] = matrix_plot_file

        return runtime
Ejemplo n.º 14
0
def plot_sliding_window(series, width, step):
    '''
    Plot a sliding-window graph
    
    Parameters
    ----------
    series:  time-series, array , 3-D
    width: window width
    step: window step size, in samples. If not provided, window and step size are equal.
    '''

    from nilearn import plotting
    import numpy as np
    from nilearn.connectome import sym_matrix_to_vec
    from nilearn.connectome import ConnectivityMeasure

    cut = sliding_window_2d(series, width, step)
    cut_matrix = np.zeros((cut.shape[0], cut.shape[2], cut.shape[2]))
    correlation_measure = ConnectivityMeasure(kind='correlation')

    for i in range(cut.shape[0]):
        matrix = correlation_measure.fit_transform([cut[i]])[0]
        cut_matrix[i, :, :] = matrix

    vectors = np.zeros(
        (cut_matrix.shape[0], sym_matrix_to_vec(cut_matrix[1]).shape[0]))

    for i in range(cut_matrix.shape[0]):
        vec = sym_matrix_to_vec(cut_matrix[i])
        vectors[i, :] = vec

    ax = np.corrcoef(vectors)
    plotting.plot_matrix(ax, title="width={} step={}".format(width, step))
Ejemplo n.º 15
0
def compute_correlations(time_series, kind='partial correlation'):
    correlation_measure = ConnectivityMeasure(kind=kind)
    print("Fit-transforming time series...")
    timer("tic")
    correlation_matrices = correlation_measure.fit_transform(time_series)
    timer("toc", name="fitting all time series")
    return correlation_measure, correlation_matrices
Ejemplo n.º 16
0
def get_nilearn_adhd_data(n_subjects, nilearn_download_dir):

    # Load the functional datasets
    datasets.get_data_dirs(data_dir=nilearn_download_dir)
    adhd_data = datasets.fetch_adhd(n_subjects=n_subjects,
                                    data_dir=nilearn_download_dir)
    msdl_data = datasets.fetch_atlas_msdl(data_dir=nilearn_download_dir)
    masker = input_data.NiftiMapsMasker(msdl_data.maps,
                                        resampling_target="data",
                                        t_r=2.5,
                                        detrend=True,
                                        low_pass=.1,
                                        high_pass=.01,
                                        memory='nilearn_cache',
                                        memory_level=1)

    pooled_subjects = []
    adhd_labels = []  # 1 if ADHD, 0 if control
    age = []
    for func_file, confound_file, phenotypic in zip(adhd_data.func,
                                                    adhd_data.confounds,
                                                    adhd_data.phenotypic):
        time_series = masker.fit_transform(func_file, confounds=confound_file)
        pooled_subjects.append(time_series)
        adhd_labels.append(phenotypic['adhd'])
        age.append(phenotypic['age'])
    correlation_measure = ConnectivityMeasure(kind='correlation')
    corr_mat = correlation_measure.fit_transform(pooled_subjects)
    print('Correlations are stacked in an array of shape {0}'.format(
        corr_mat.shape))
    beh = np.zeros((n_subjects, 2))
    beh[:, 0] = adhd_labels
    beh[:, 1] = age

    return corr_mat, beh
Ejemplo n.º 17
0
def plot_corr(time_series, labels):
    '''
    Parameters
    ----------
    time_series : array
        Time series of the signal in each region .
    labels : list
        Labels of all the regions in parcellation.

    Returns
    -------
    None.
    '''
    correlation_measure = ConnectivityMeasure(kind='correlation')
    correlation_matrix = correlation_measure.fit_transform([time_series])[0]

    # Mask the main diagonal for visualization:
    np.fill_diagonal(correlation_matrix, 0)
    # The labels we have start with the background (0), hence we skip the first label
    plotting.plot_matrix(correlation_matrix,
                         figure=(10, 8),
                         labels=labels[1:],
                         vmax=0.8,
                         vmin=-0.8,
                         reorder=True)
Ejemplo n.º 18
0
def connectivity_matrix(timeseries, kind='partial correlation'):
    # timeseries: as output by load_timeseries
    correlation_measure = ConnectivityMeasure(kind=kind,
                                              vectorize=True,
                                              discard_diagonal=True)
    correlation_matrix = correlation_measure.fit_transform(timeseries)
    return correlation_matrix, correlation_measure
Ejemplo n.º 19
0
def correlation_matrix(ts,atlas,
	confounds=None,
	mask=None,
	loud=False,
	structure_names=[],
	save_as='',
	low_pass=0.25,
	high_pass=0.004,
	smoothing_fwhm=.3,
	):
	"""Return a CSV file containing correlations between ROIs.

	Parameters
	----------
	ts : str
		Path to the 4D NIfTI timeseries file on which to perform the connectivity analysis.
	confounds : 2D array OR path to CSV file
		Array/CSV file containing confounding time-series to be regressed out before FC analysis.
	atlas : str, optional
		Path to a 3D NIfTI-like binary label file designating ROIs.
	structure_names : list, optional
		Ordered list of all structure names in atlas (length N).
	save_as : str
		Path under which to save the Pandas DataFrame conttaining the NxN correlation matrix.
	"""
	ts = path.abspath(path.expanduser(ts))
	if isinstance(atlas,str):
		atlas = path.abspath(path.expanduser(atlas))
	if mask:
		mask = path.abspath(path.expanduser(mask))
	tr = nib.load(ts).header['pixdim'][0]
	labels_masker = NiftiLabelsMasker(
		labels_img=atlas,
		mask_img=mask,
		standardize=True,
		memory='nilearn_cache',
		verbose=5,
		low_pass=low_pass,
		high_pass = high_pass,
		smoothing_fwhm=smoothing_fwhm,
		t_r=tr,
		)
	#TODO: test confounds with physiological signals
	if(confounds):
		confounds = path.abspath(path.expanduser(confounds))
		timeseries = labels_masker.fit_transform(ts, confounds=confounds)
	else:
		timeseries = labels_masker.fit_transform(ts)
	correlation_measure = ConnectivityMeasure(kind='correlation')
	correlation_matrix = correlation_measure.fit_transform([timeseries])[0]
	if structure_names:
		df = pd.DataFrame(columns=structure_names, index=structure_names, data=correlation_matrix)
	else:
		df = pd.DataFrame(data=correlation_matrix)
	if save_as:
		save_dir = path.dirname(save_as)
		if not path.exists(save_dir):
			makedirs(save_dir)
		df.to_csv(save_as)
Ejemplo n.º 20
0
def generate_connectivity_matrix(time_series, kind='correlation'):
    """
    Generate a connectivity matrix from a collection of time series.
    param :kind: Any kind accepted by nilearn ConnectivityMeasure
    """
    connectivity_measure = ConnectivityMeasure(kind=kind)
    connectivity_matrix = connectivity_measure.fit_transform([time_series])[0]
    return connectivity_matrix
Ejemplo n.º 21
0
def prepare_data(data_dir, output_dir, pipeline = "cpac", quality_checked = True):
    # get dataset
    print("Loading dataset...")
    abide = datasets.fetch_abide_pcp(data_dir = data_dir,
                                     pipeline = pipeline,
                                     quality_checked = quality_checked)
    # make list of filenames
    fmri_filenames = abide.func_preproc

    # load atlas
    multiscale = datasets.fetch_atlas_basc_multiscale_2015()
    atlas_filename = multiscale.scale064

    # initialize masker object
    masker = NiftiLabelsMasker(labels_img=atlas_filename,
                               standardize=True,
                               memory='nilearn_cache',
                               verbose=0)

    # initialize correlation measure
    correlation_measure = ConnectivityMeasure(kind='correlation', vectorize=True,
                                             discard_diagonal=True)

    try: # check if feature file already exists
        # load features
        feat_file = os.path.join(output_dir, 'ABIDE_BASC064_features.npz')
        X_features = np.load(feat_file)['a']
        print("Feature file found.")

    except: # if not, extract features
        X_features = [] # To contain upper half of matrix as 1d array
        print("No feature file found. Extracting features...")

        for i,sub in enumerate(fmri_filenames):
            # extract the timeseries from the ROIs in the atlas
            time_series = masker.fit_transform(sub)
            # create a region x region correlation matrix
            correlation_matrix = correlation_measure.fit_transform([time_series])[0]
            # add to our container
            X_features.append(correlation_matrix)
            # keep track of status
            print('finished extracting %s of %s'%(i+1,len(fmri_filenames)))
        # Save features
        np.savez_compressed(os.path.join(output_dir, 'ABIDE_BASC064_features'),
                                         a = X_features)

    # Dimensionality reduction of features with PCA
    print("Running PCA...")
    pca = PCA(0.99).fit(X_features) # keeping 99% of variance
    X_features_pca = pca.transform(X_features)

    # Transform phenotypic data into dataframe
    abide_pheno = pd.DataFrame(abide.phenotypic)

    # Get the target vector
    y_target = abide_pheno['DX_GROUP']

    return(X_features_pca, y_target)
Ejemplo n.º 22
0
    def _run_interface(self, runtime):
        from nilearn.input_data import NiftiLabelsMasker
        from nilearn.connectome import ConnectivityMeasure
        from sklearn.covariance import EmpiricalCovariance
        import numpy as np
        import pandas as pd
        import os
        import matplotlib.pyplot as plt
        from mne.viz import plot_connectivity_circle
        import re

        plt.switch_backend('Agg')

        # extract timeseries from every label
        masker = NiftiLabelsMasker(labels_img=self.inputs.atlas_file,
                                   standardize=True, verbose=1)
        timeseries = masker.fit_transform(self.inputs.timeseries_file)
        # create correlation matrix
        correlation_measure = ConnectivityMeasure(cov_estimator=EmpiricalCovariance(),
                                                  kind="correlation")
        correlation_matrix = correlation_measure.fit_transform([timeseries])[0]
        np.fill_diagonal(correlation_matrix, np.NaN)

        # add the atlas labels to the matrix
        atlas_lut_df = pd.read_csv(self.inputs.atlas_lut, sep='\t')
        regions = atlas_lut_df['regions'].values
        correlation_matrix_df = pd.DataFrame(correlation_matrix, index=regions, columns=regions)

        # do a fisher's r -> z transform
        fisher_z_matrix_df = correlation_matrix_df.apply(lambda x: (np.log(1 + x) - np.log(1 - x)) * 0.5)

        # write out the file.
        out_file = os.path.join(runtime.cwd, 'fisher_z_correlation.tsv')
        fisher_z_matrix_df.to_csv(out_file, sep='\t', na_rep='n/a')

        # save the filename in the outputs
        self._results['correlation_matrix'] = out_file

        # visualizations with mne
        connmat = fisher_z_matrix_df.values
        labels = list(fisher_z_matrix_df.index)

        # define title and outfile names:
        trial_regex = re.compile(r'.*trialtype-(?P<trial>[A-Za-z0-9]+)')
        title = re.search(trial_regex, self.inputs.timeseries_file).groupdict()['trial']
        outfile = os.path.join(runtime.cwd, ".".join([title, "svg"]))

        n_lines = int(np.sum(connmat > 0) / 2)
        fig = plt.figure(figsize=(5, 5))

        plot_connectivity_circle(connmat, labels, n_lines=n_lines, fig=fig, title=title, fontsize_title=10,
                                 facecolor='white', textcolor='black', colormap='jet', colorbar=1,
                                 node_colors=['black'], node_edgecolor=['white'], show=False, interactive=False)

        fig.savefig(outfile, dpi=300)
        self._results['correlation_fig'] = outfile

        return runtime
Ejemplo n.º 23
0
def estimate_connectivity(time_series, measure_type="correlation"):
    """
    Main function to estimate connectivity from atlas regions
    """

    correlation_measure = ConnectivityMeasure(kind=measure_type)
    correlation_matrix = correlation_measure.fit_transform([time_series])[0]

    return ((correlation_measure, correlation_matrix))
Ejemplo n.º 24
0
def _compute_correlation_matrix(df_nc, df_sz):
    corr_kind = 'correlation'

    # NC correlation
    nc_measure = ConnectivityMeasure(kind=corr_kind)
    nc_correlation_array = nc_measure.fit_transform(df_nc['ts_array'].values)
    for i in range(nc_correlation_array.shape[0]):
        np.fill_diagonal(nc_correlation_array[i], 0)
    df_nc = df_nc.assign(corr_matrix=[nc_correlation_array[i] for i in range(nc_correlation_array.shape[0])])

    # SZ correlation
    sz_measure = ConnectivityMeasure(kind=corr_kind)
    sz_correlation_array = sz_measure.fit_transform(df_sz['ts_array'].values)
    for i in range(sz_correlation_array.shape[0]):
        np.fill_diagonal(sz_correlation_array[i], 0)
    df_sz = df_sz.assign(corr_matrix=[sz_correlation_array[i] for i in range(sz_correlation_array.shape[0])])

    return df_nc, df_sz
Ejemplo n.º 25
0
    def process(self):
        # Read data into huge `Data` list.
        data_list: list[Data] = []

        filtered_people = np.load(UKB_IDS_PATH)
        main_covars = pd.read_csv(UKB_PHENOTYPE_PATH).set_index('ID')

        conn_measure = ConnectivityMeasure(kind='correlation', vectorize=False)

        for person in filtered_people:
            if person in [1663368, 3443644]:
                # No information in Covars file
                continue
            if self.target_var == 'bmi' and person in UKB_WITHOUT_BMI:
                continue

            if self.connectivity_type == ConnType.FMRI:
                ts = np.loadtxt(
                    f'{UKB_TIMESERIES_PATH}/UKB{person}_ts_raw.txt',
                    delimiter=',')
                if ts.shape[0] < 84:
                    continue
                elif ts.shape[1] == 523:
                    ts = ts[:, :490]
                assert ts.shape == (84, 490)

                # Getting only the last 68 cortical regions
                ts = ts[-68:, :]
                # For normalisation part and connectivity
                ts = ts.T

                corr_arr = conn_measure.fit_transform([ts])
                assert corr_arr.shape == (1, 68, 68)
                corr_arr = corr_arr[0]

                G = create_thresholded_graph(corr_arr,
                                             threshold=self.threshold,
                                             num_nodes=self.num_nodes)
                edge_index = torch.tensor(np.array(G.edges()),
                                          dtype=torch.long).t().contiguous()
                if self.include_edge_weights:
                    edge_attr = torch.tensor(list(
                        nx.get_edge_attributes(G, 'weight').values()),
                                             dtype=torch.float).unsqueeze(1)
                else:
                    edge_attr = None

                data = self.__create_data_object(person=person,
                                                 ts=ts,
                                                 edge_index=edge_index,
                                                 edge_attr=edge_attr,
                                                 covars=main_covars)
                data_list.append(data)

        data, slices = self.collate(data_list)
        torch.save((data, slices), self.processed_paths[0])
def createCorMat(time_series):
    from nilearn.connectome import ConnectivityMeasure
    correlation_measure = ConnectivityMeasure(
        kind='correlation')  # can choose partial - it might be better
    # create correlation matrix for each subject
    fullMatrix = []
    for time_s in time_series:
        correlation_matrix = correlation_measure.fit_transform([time_s])[0]
        fullMatrix.append(correlation_matrix)
    return fullMatrix
Ejemplo n.º 27
0
def make_connectivity_biomarkers(kind, labels, adhd200, pooled_subjects):
    """
    This function takes the masked fMRI volumes and the corresponding phenotypic information (age, gender and dexterity)
    and turns them into a 2D array for doing ML classification. If there is no phenotypic information available,
    we exlude it from the dataset.

    :param kind: (str) The type of functional connnectity we extract
    :param labels: (list) The truth values for the ADHD200 dataset
    :param adhd200: (ADHD200) The ADHD200 object
    :param pooled_subjects: (list) The masked fMRI volumes
    :return: (list) features, (list) labels
    """

    new_labels = []  # Initialize a new list for containing the new labels (only labels for fMRI volumes that
    # have corresponding phenotypic information
    temp_features = []  # Initialize a new list for containing the new labels (only labels for fMRI volumes that
    # have corresponding phenotypic information

    conn_measure = ConnectivityMeasure(kind=kind, vectorize=True, discard_diagonal=True)  # Generate the functional
    # connectivity using the biomarker specified
    connectivity = conn_measure.fit_transform(pooled_subjects)  # Apply it to all of the masked fMRI scans

    bar = ProgressBar(max_value=len(adhd200.func))  # Instantiate a new progressbar
    ops = 0  # Set the default value of the bar to 0

    for index in range(len(adhd200.func)):
        phenotypic_information = Helpers.get_params(adhd200, adhd200.func[
            index])  # Retrieve the corresponding phenotypic information for each fMRI
        ops += 1  # Increment the bar by one
        bar.update(ops)  # Update the progressbar to the value of the variable "ops"
        if phenotypic_information is not None:  # If we found phenotypic information for that fMRI
            new_labels.append(labels[index])  # Add it to the "approved" labels list
            generated_features = np.array(
                [Helpers.conform_1d(phenotypic_information, connectivity[index].shape[0]), connectivity[index]])
            # Add the phenotypic information and the functional connectivity as a matrix. We have to
            # surround the phenotypic information by 0s to make it the same shape as the connectivity (conform 1d)
            temp_features.append(generated_features)  # add it to the temp features
        else:
            continue  # Skip that fMRI scan from the dataset

    d3_dataset = np.array(temp_features)  # Convert the 3D temp_features array to a numpy array
    nsamples, nx, ny = d3_dataset.shape  # Extract the dimensionality of the data
    d2_functional_connectivity = d3_dataset.reshape((nsamples, nx * ny))  # Convert it to 2 dimensions

    with open('pickles/features.pkl',
              'wb') as features_file:  # Cache the features so that we don't have to run this
        # function again
        dump(d2_functional_connectivity, features_file)  # Dump them to the pickle file

    with open('pickles/adhd_labels.pkl', 'wb') as labels_file:  # Cache the biomarkers so that we don't have to run this
        # function again
        dump(new_labels, labels_file)  # Dump them to the pickle file

    return d2_functional_connectivity, new_labels  # Return them
Ejemplo n.º 28
0
def cal_connectome(fmri_ff,
                   confound_ff,
                   atlas_ff,
                   outputjpg_ff,
                   metric='correlation',
                   labelrange=None,
                   label_or_map=0):
    if label_or_map == 0:
        # “correlation”, “partial correlation”, “tangent”, “covariance”, “precision”
        masker = NiftiLabelsMasker(labels_img=atlas_ff,
                                   standardize=True,
                                   verbose=0)
    else:
        masker = NiftiMapsMasker(maps_img=atlas_ff,
                                 standardize=True,
                                 verbose=0)

    time_series_0 = masker.fit_transform(fmri_ff, confounds=confound_ff)
    if labelrange is None:
        labelrange = np.arange(time_series_0.shape[1])
    time_series = time_series_0[:, labelrange]
    if metric == 'sparse inverse covariance':
        try:
            estimator = GraphLassoCV()
            estimator.fit(time_series)
            correlation_matrix = -estimator.precision_
        except:
            correlation_matrix = np.zeros(
                (time_series.shape[1], time_series.shape[1]))
    else:
        correlation_measure = ConnectivityMeasure(kind=metric)
        correlation_matrix = correlation_measure.fit_transform([time_series
                                                                ])[0]

    # Plot the correlation matrix

    fig = plt.figure(figsize=(6, 5), dpi=100)
    plt.clf()
    # Mask the main diagonal for visualization:
    np.fill_diagonal(correlation_matrix, 0)

    plt.imshow(correlation_matrix,
               interpolation="nearest",
               cmap="RdBu_r",
               vmax=0.8,
               vmin=-0.8)
    plt.gca().yaxis.tick_right()
    plt.axis('off')
    plt.colorbar()
    plt.title(metric.title(), fontsize=12)
    plt.tight_layout()
    fig.savefig(outputjpg_ff, bbox_inches='tight')
    plt.close()
    return correlation_matrix
Ejemplo n.º 29
0
    def correlation(self, estimator="maximum_likelihood", assume_centered=False):

        if estimator=="maximum_likelihood":
            correlation_measure = ConnectivityMeasure(kind="correlation", cov_estimator=EmpiricalCovariance(assume_centered=assume_centered))
        elif estimator=="ledoit_wolf":
            correlation_measure = ConnectivityMeasure(kind="correlation", cov_estimator=LedoitWolf(assume_centered=assume_centered))
        else:
            raise ValueError("Estimator should be 'maximum_likelihood' or 'ledoit_wolf'")

        R = np.nan_to_num(correlation_measure.fit_transform(self.ts))

        return R
Ejemplo n.º 30
0
def get_correlation_matrix(region_timeseries_list):
    """
    Compute correlation matrix for a list of timeseries
    """
    correlation_matrices = []
    correlation_measure = ConnectivityMeasure(kind='correlation')

    for item in region_timeseries_list:
        matrix = correlation_measure.fit_transform([item])[0]
        correlation_matrices.append(matrix)

    return np.array(correlation_matrices)
Ejemplo n.º 31
0
def compute_correlation(time_series, method):

    data = np.genfromtxt(time_series).T

    if method == 'PearsonCorr':
        method = 'correlation'
    elif method == 'PartialCorr':
        method = 'partial correlation'

    connectivity_measure = ConnectivityMeasure(kind=method)
    connectome = connectivity_measure.fit_transform([data])[0]

    file = os.path.abspath('./%s_connectome.npy' % (method.replace(" ", "-")))
    np.save(file, connectome)
    return file
Ejemplo n.º 32
0
connectome_measure = ConnectivityMeasure(kind='correlation')

# useful for plotting connectivity interactions on glass brain
from nilearn import plotting

# create masker to extract functional data within atlas parcels
masker = NiftiLabelsMasker(labels_img=yeo['thick_17'], standardize=True,
                           memory='nilearn_cache')

# extract time series from all subjects and concatenate them
time_series = []
for func, confounds in zip(data.func, data.confounds):
    time_series.append(masker.fit_transform(func, confounds=confounds))

# calculate correlation matrices across subjects and display
correlation_matrices = connectome_measure.fit_transform(time_series)

# Mean correlation matrix across 10 subjects can be grabbed like this,
# using connectome measure object
mean_correlation_matrix = connectome_measure.mean_

# grab center coordinates for atlas labels
coordinates = plotting.find_parcellation_cut_coords(labels_img=yeo['thick_17'])

# plot connectome with 80% edge strength in the connectivity
plotting.plot_connectome(mean_correlation_matrix, coordinates,
                         edge_threshold="80%",
                         title='Yeo Atlas 17 thick (func)')

##########################################################################
# Load probabilistic atlases - extracting coordinates on brain maps
# First we need to do subjects timeseries signals extraction and then estimating
# correlation matrices on those signals.
# To extract timeseries signals, we call transform() from RegionExtractor object
# onto each subject functional data stored in func_filenames.
# To estimate correlation matrices we import connectome utilities from nilearn
from nilearn.connectome import ConnectivityMeasure

correlations = []
# Initializing ConnectivityMeasure object with kind='correlation'
connectome_measure = ConnectivityMeasure(kind='correlation')
for filename, confound in zip(func_filenames, confounds):
    # call transform from RegionExtractor object to extract timeseries signals
    timeseries_each_subject = extractor.transform(filename, confounds=confound)
    # call fit_transform from ConnectivityMeasure object
    correlation = connectome_measure.fit_transform([timeseries_each_subject])
    # saving each subject correlation to correlations
    correlations.append(correlation)

# Mean of all correlations
import numpy as np
mean_correlations = np.mean(correlations, axis=0).reshape(n_regions_extracted,
                                                          n_regions_extracted)

###############################################################################
# Plot resulting connectomes
# ----------------------------

title = 'Correlation between %d regions' % n_regions_extracted

# First plot the matrix
masker = NiftiMapsMasker(maps_img=atlas_filename, standardize=True,
                         memory='nilearn_cache', verbose=5)

time_series = masker.fit_transform(data.func[0],
                                   confounds=data.confounds)

############################################################################
# `time_series` is now a 2D matrix, of shape (number of time points x
# number of regions)
print(time_series.shape)

############################################################################
# Build and display a correlation matrix
from nilearn.connectome import ConnectivityMeasure
correlation_measure = ConnectivityMeasure(kind='correlation')
correlation_matrix = correlation_measure.fit_transform([time_series])[0]

# Display the correlation matrix
import numpy as np
from matplotlib import pyplot as plt
plt.figure(figsize=(10, 10))
# Mask out the major diagonal
np.fill_diagonal(correlation_matrix, 0)
plt.imshow(correlation_matrix, interpolation="nearest", cmap="RdBu_r",
           vmax=0.8, vmin=-0.8)
plt.colorbar()
# And display the labels
x_ticks = plt.xticks(range(len(labels)), labels, rotation=90)
y_ticks = plt.yticks(range(len(labels)), labels)

############################################################################
Ejemplo n.º 35
0
plt.title('Default Mode Network Time Series')
plt.xlabel('Scan number')
plt.ylabel('Normalized signal')
plt.legend()
plt.tight_layout()


##########################################################################
# Compute partial correlation matrix
# -----------------------------------
# Using object :class:`nilearn.connectome.ConnectivityMeasure`: Its
# default covariance estimator is Ledoit-Wolf, allowing to obtain accurate
# partial correlations.
from nilearn.connectome import ConnectivityMeasure
connectivity_measure = ConnectivityMeasure(kind='partial correlation')
partial_correlation_matrix = connectivity_measure.fit_transform(
    [time_series])[0]

##########################################################################
# Display connectome
# -------------------
from nilearn import plotting

plotting.plot_connectome(partial_correlation_matrix, dmn_coords,
                         title="Default Mode Network Connectivity")

##########################################################################
# Display connectome with hemispheric projections.
# Notice (0, -52, 18) is included in both hemispheres since x == 0.
plotting.plot_connectome(partial_correlation_matrix, dmn_coords,
                         title="Connectivity projected on hemispheres",
                         display_mode='lyrz')

# Compute connectivity metrics

individual_connectivity_matrices = {}
mean_connectivity_matrix = {}
p_ind = 0.
for func_type in func_type_list:
    subjects_connectivity = {}
    mean_connectivity = {}     
    for kind in kinds:
        try:
            conn_measure = ConnectivityMeasure(cov_estimator = estimator, kind = kind)
            #conn_measure = nilearn.connectivity.ConnectivityMeasure(cov_estimator =estimator, kind=kind)
            t_s=np.asarray(all_time_series_r[func_type])                                
            subjects_connectivity[kind] = conn_measure.fit_transform(t_s)
            if kind == 'tangent':
                   mean_connectivity[kind] = conn_measure.mean_
            else:
                mean_connectivity[kind] = \
                   subjects_connectivity[kind].mean(axis=0)
        except:
            print('estimation failed for '+ kind + ' in group ' + func_type)            
            pass
        p_ind = p_ind + 1.            
        progress = str(100*p_ind/(len(kinds)*len(func_type_list)))
        
        print(str(progress) + '% done in computing metrics ('+kind+' '+func_type+')')
        
    individual_connectivity_matrices[func_type] = subjects_connectivity
    mean_connectivity_matrix[func_type] = mean_connectivity
print('Data has {0} ADHD subjects.'.format(len(adhd_subjects)))

###############################################################################
# ROI-to-ROI correlations of ADHD patients
# ----------------------------------------
# The simpler and most commonly used kind of connectivity is correlation. It
# models the full (marginal) connectivity between pairwise ROIs. We can
# estimate it using :class:`nilearn.connectome.ConnectivityMeasure`.
from nilearn.connectome import ConnectivityMeasure

correlation_measure = ConnectivityMeasure(kind='correlation')

###############################################################################
# From the list of ROIs time-series for ADHD subjects, the
# `correlation_measure` computes individual correlation matrices.
correlation_matrices = correlation_measure.fit_transform(adhd_subjects)

# All individual coefficients are stacked in a unique 2D matrix.
print('Correlations of ADHD patients are stacked in an array of shape {0}'
      .format(correlation_matrices.shape))

###############################################################################
# as well as the average correlation across all fitted subjects.
mean_correlation_matrix = correlation_measure.mean_
print('Mean correlation has shape {0}.'.format(mean_correlation_matrix.shape))

###############################################################################
# We display the connectomes of the first 3 ADHD subjects and the mean
# correlation matrix over all ADHD patients.
from nilearn import plotting