def subject_connectivity(timeseries,
                         subjects,
                         atlas_name,
                         kind,
                         iter_no='',
                         seed=1234,
                         validation_ext='10CV',
                         n_subjects='',
                         save=True,
                         save_path=data_folder):
    """
        timeseries   : timeseries table for subject (timepoints x regions)
        subjects     : subject IDs
        atlas_name   : name of the parcellation atlas used
        kind         : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation
        iter_no      : tangent connectivity iteration number for cross validation evaluation
        save         : save the connectivity matrix to a file
        save_path    : specify path to save the matrix if different from subject folder

    returns:
        connectivity : connectivity matrix (regions x regions)
    """

    if kind in ['TPE', 'TE', 'correlation']:
        if kind not in ['TPE', 'TE']:
            conn_measure = connectome.ConnectivityMeasure(kind=kind)
            connectivity = conn_measure.fit_transform(timeseries)
        else:
            if kind == 'TPE':
                conn_measure = connectome.ConnectivityMeasure(
                    kind='correlation')
                conn_mat = conn_measure.fit_transform(timeseries)
                conn_measure = connectome.ConnectivityMeasure(kind='tangent')
                connectivity_fit = conn_measure.fit(conn_mat)
                connectivity = connectivity_fit.transform(conn_mat)
            else:
                conn_measure = connectome.ConnectivityMeasure(kind='tangent')
                connectivity_fit = conn_measure.fit(timeseries)
                connectivity = connectivity_fit.transform(timeseries)

    if save:
        if kind not in ['TPE', 'TE']:
            for i, subj_id in enumerate(subjects):
                subject_file = os.path.join(
                    save_path, subj_id, subj_id + '_' + atlas_name + '_' +
                    kind.replace(' ', '_') + '.mat')
                sio.savemat(subject_file, {'connectivity': connectivity[i]})
            return connectivity
        else:
            for i, subj_id in enumerate(subjects):
                subject_file = os.path.join(
                    save_path, subj_id,
                    subj_id + '_' + atlas_name + '_' + kind.replace(' ', '_') +
                    '_' + str(iter_no) + '_' + str(seed) + '_' +
                    validation_ext + str(n_subjects) + '.mat')
                sio.savemat(subject_file, {'connectivity': connectivity[i]})
            return connectivity_fit
예제 #2
0
def signals_to_net(signal):
    estimator = connectome.ConnectivityMeasure(kind='correlation')
    fmri_net = estimator.fit_transform([signal])[0]
    # fmri_net = np.abs(fmri_net)
    fmri_net = normalize_mat(fmri_net)
    # fmri_net[fmri_net<0]=0
    return fmri_net
예제 #3
0
def subject_connectivity(timeseries,
                         subject,
                         atlas_name,
                         kind,
                         save=True,
                         save_path=data_folder):
    """
        timeseries   : timeseries table for subject (timepoints x regions)
        subject      : the subject ID
        atlas_name   : name of the parcellation atlas used
        kind         : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation
        save         : save the connectivity matrix to a file
        save_path    : specify path to save the matrix if different from subject folder

    returns:
        connectivity : connectivity matrix (regions x regions)
    """

    print("Estimating %s matrix for subject %s" % (kind, subject))

    if kind in ['tangent', 'partial correlation', 'correlation']:
        conn_measure = connectome.ConnectivityMeasure(kind=kind)
        connectivity = conn_measure.fit_transform([timeseries])[0]

    if save:
        subject_file = os.path.join(
            save_path, subject,
            subject + '_' + atlas_name + '_' + kind.replace(' ', '_') + '.mat')
        sio.savemat(subject_file, {'connectivity': connectivity})

    return connectivity
예제 #4
0
def get_functional_connectivity(total_time_series):
    '''

    Get the functional connectivity matrix (diagonal elements are 0) from the roi_signals

    parameters
    ----------
    total_time_series: the roi_signals, list (length L).
                       the elements are np.array with shape (T, N)
                       where T is the time series step, N is the number of ROIs.
                       For each element, the N should be the same but T is not.

    return
    ------
    connectivity: the functional connectivity matrix from the roi_signals, np.array.
                  the shape is (L, N, N)

    '''
    conn_measure = connectome.ConnectivityMeasure(kind='correlation')
    connectivity = conn_measure.fit_transform(total_time_series)
    diagonal = np.diag_indices(connectivity.shape[1])
    for i in range(connectivity.shape[0]):
        connectivity[i][diagonal] = 0
    connectivity = np.arctanh(connectivity)
    return connectivity
예제 #5
0
def extract_mat(rsimg, maskimg, labelimg, conntype='correlation', space='labels', savets=False, nomat=False):

    masker = input_data.NiftiLabelsMasker(labelimg,
                                          background_label=0,
                                          smoothing_fwhm=None,
                                          standardize=False, detrend=False,
                                          mask_img=maskimg,
                                          resampling_target=space,
                                          verbose=0)

    # get the unique labels list, other than 0, which will be first
    reginparc = np.unique(labelimg.get_data())[1:].astype(np.int)
    reglabs = list(reginparc.astype(np.str))

    # Extract time series
    time_series = masker.fit_transform(rsimg)

    if nomat:
        connmat = None
        conndf = None
    else:
        connobj = connectome.ConnectivityMeasure(kind=conntype)
        connmat = connobj.fit_transform([time_series])[0]
        conndf = get_con_df(connmat, reglabs)


    # if not saving time series, don't pass anything substantial, save mem
    if not savets:
        time_series = 42

    return conndf, connmat, time_series, reginparc
예제 #6
0
def subject_connectivity(timeseries, subject, atlas_name, kind, save=True, save_path=root_folder):
    """
        timeseries   : timeseries table for subject (timepoints x regions)
        subject      : the subject short ID
        atlas_name   : name of the atlas used
        kind         : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation
        save         : save the connectivity matrix to a file
        save_path    : specify path to save the matrix if different from subject folder

    returns:
        connectivity : connectivity matrix (regions x regions)
    """

    print("Estimating %s matrix for subject %s" % (kind, subject))

    if kind == 'lasso':
        # Graph Lasso estimator
        covariance_estimator = GraphLassoCV(verbose=1)
        covariance_estimator.fit(timeseries)
        connectivity = covariance_estimator.covariance_
        print('Covariance matrix has shape {0}.'.format(connectivity.shape))

    elif kind in ['tangent', 'partial correlation', 'correlation']:
        conn_measure = connectome.ConnectivityMeasure(kind=kind)
        connectivity = conn_measure.fit_transform([timeseries])[0]

    if save:
        subject_file = os.path.join(save_path, subject,
                                    subject + '_' + atlas_name + '_' + kind.replace(' ', '_') + '.mat')
        sio.savemat(subject_file, {'connectivity': connectivity})

    return connectivity
예제 #7
0
def extract_full_connectivity_matrix(fmri_file, mask_file, confounds_file, tr):
    confounds = get_confounds(confounds_file)

    masker_pars = {
        "mask_img": str(mask_file),
        "detrend": True,
        "standardize": True,
        "low_pass": 0.1,
        "high_pass": 0.01,
        "t_r": tr,
        "smoothing_fwhm": 6,
        "radius": 5,
        "allow_overlap": True
    }

    atlas = datasets.fetch_coords_seitzman_2018()
    network_rois_ind = atlas.networks != "unassigned"

    coords = np.vstack((atlas.rois['x'], atlas.rois['y'], atlas.rois['z'])).T
    coords = coords[network_rois_ind]
    time_series, excluded_rois = extract_time_series(masker_pars, fmri_file,
                                                     confounds.values, coords)

    correlation_measure = connectome.ConnectivityMeasure(kind='correlation')
    connectivity_matrix = correlation_measure.fit_transform([time_series])[0]
    connectivity_matrix = nan_empty_rois_in_conmat(connectivity_matrix,
                                                   excluded_rois.keys())

    return connectivity_matrix, atlas.networks[network_rois_ind], excluded_rois
예제 #8
0
 def get_connectivity(self, parcels):
     # Initialize empty array to store average timeseries for each ROI
     average_timeseries = np.zeros((0, 
                                    np.size(self.timeseries['L'], 
                                            axis=1)))
                                            
     for hem in ['L', 'R']:
         # For each hemisphere
         unique_labels = np.unique(parcels[hem]).tolist()
     
         for l in unique_labels:
             # Retrieve indices of timeseries corresponding to the label l
             binary = (parcels[hem] == l).flatten()
             average_timeseries = np.vstack((average_timeseries, np.mean(
                 self.timeseries[hem][binary, :], axis=0)))
                    
     if self.edge_type == 'full':
         corr = np.corrcoef(average_timeseries)
         np.fill_diagonal(corr, 0)
         # Fisher's z-transform
         self.connectivity = np.arctanh(corr)
     elif self.edge_type in conn_kinds:
         conn_measure = connectome.ConnectivityMeasure(kind=self.edge_type)
         self.connectivity = np.squeeze(
         conn_measure.fit_transform([average_timeseries.T]))
     else:
         raise ValueError('Unknown edge type')
             
     return self.connectivity
예제 #9
0
def correlation(data, kind='pearson'):
    ''' Compute correlation matrix of a series of voxels. Options other than
    Pearson's r to be implemented in the future.

    Args:
        data    numpy array. Variables to be correlated are given by the rows.

    Returns: 
        Numpy array of size (data.shape[0], data.shape[0])
    '''

    print('Computing sample correlation with %s estimator'%kind)

    # Compute correlation matrix of rows of 'data'.
    if kind is 'spearman':
        cor_matrix, p_vals = stats.spearmanr(data.T)
    if kind is 'ledoit':
        cor_matrix = connectome.ConnectivityMeasure(kind='correlation').fit_transform([data.T,])[0]
    elif kind is 'pearson':
        cor_matrix = np.corrcoef(data)
        cor_matrix[np.isnan(cor_matrix)] = 0.

    if kind is not 'spearman':
        # Compute p-value of correlation using two-sided t-test with data.shape[1]-2.
        # degrees of freedom. 
        cor_matrix[cor_matrix > 0.999] = 0.999999
        t_stats = cor_matrix * np.sqrt(data.shape[1] - 2.)\
                  / np.sqrt(1. - cor_matrix**2.)
        p_vals = stats.t.sf(np.abs(t_stats), data.shape[1]-2.)*2.



    return cor_matrix, p_vals
예제 #10
0
def extract_mat(rsimg,
                maskimg,
                labelimg,
                regnames=None,
                conntype='correlation',
                space='labels'):

    masker = input_data.NiftiLabelsMasker(labelimg,
                                          background_label=0,
                                          smoothing_fwhm=None,
                                          standardize=False,
                                          detrend=False,
                                          mask_img=maskimg,
                                          resampling_target=space,
                                          verbose=0)

    # Extract time series
    time_series = masker.fit_transform(rsimg)

    connobj = connectome.ConnectivityMeasure(kind=conntype)
    connmat = connobj.fit_transform([time_series])[0]

    if regnames is not None:
        reglabs = open(regnames).read().splitlines()
    else:
        # get the unique labels list, other than 0, which will be first
        reglabs = list(
            np.unique(labelimg.get_data())[1:].astype(np.int).astype(np.str))

    conndf = get_con_df(connmat, reglabs)

    return conndf, connmat
예제 #11
0
def connnect_creation(df_int, kind='correlation'):
    """Create connectivity."""
    order = df_int[1]
    session_nb = str(df_int[2])
    filename_id = df_int[3][:-9]
    ts_dirty = load(filename_id + 'basc064' + '/' + 'rfMRI_REST' + session_nb +
                    '_' + order + '_raw')

    ts_ortho = np.loadtxt(filename_id + 'confounds' + '/' + 'rfMRI_REST' +
                          session_nb + '_' + order +
                          '_Movement_Regressors.txt')
    ts = signal.clean(ts_dirty,
                      detrend=True,
                      standardize=True,
                      confounds=ts_ortho,
                      low_pass=None,
                      high_pass=None,
                      t_r=0.72,
                      ensure_finite=False)

    conn_measure = connectome.ConnectivityMeasure(kind=kind)
    indiv_connect_mat[kind] = conn_measure.fit_transform([ts])
    mean_connect_mat[kind] = indiv_connect_mat[kind].mean(axis=0)
    connectivity_coefs = connectome.sym_to_vec(indiv_connect_mat[kind],
                                               discard_diagonal=True)
    return connectivity_coefs, ts
예제 #12
0
    def calculate_network(self, method, parcel_centers=False):
        """Calculate connectivity matrix based on functional parcellation
        
        :param method:     Method used for the functional parcellation
        :param hemisphere: Hemisphere to which the functional parcellation 
                           corresponds
        """

        if self.parcellation is None:
            raise RuntimeError('Parcellation needs to be specified.')
        
        self.method = method
        
        # Specify number of parcels
        num_parcels = self.parcellation.nodes

        # Initialize empty array to store average timeseries for each ROI
        average_timeseries = []
        
        for l in range(num_parcels):
            members = self.parcellation.members[l, 0][0]
            hemisphere = self.parcellation.hemisphere[l, 0]
            # Retrieve indices of timeseries corresponding to the label l
            average_timeseries.append(np.mean(
            self.timeseries[hemisphere][members, :], axis=0))
            
        average_timeseries = np.vstack(average_timeseries)
                       
        if self.edge_type == 'full':
            corr = np.corrcoef(average_timeseries)
            np.fill_diagonal(corr, 0)
            # Fisher's z-transform
            self.connectivity = np.arctanh(corr)
        elif self.edge_type in conn_kinds:
            conn_measure = connectome.ConnectivityMeasure(kind=self.edge_type)
            self.connectivity = np.squeeze(
            conn_measure.fit_transform([average_timeseries.T]))
        else:
            raise ValueError('Unknown edge type')
            
        if parcel_centers:  
            # Initialize empty array to store labels for each ROI
            node_labels = np.zeros((self.parcellation.midth_coords))
        
            for l in range(num_parcels):
                # Get parcel center coordinates
                parcel_center = self.get_parcel_center(
                self.timeseries[hemisphere][members, :],
                self.vertices[hemisphere][members, :])
                node_labels = np.vstack((node_labels, parcel_center))
            
            self.labels = node_labels
예제 #13
0
def create_all_features(data, parc):
    """
    This function gets the concatented data and produces a connectiviry matrix according to the chosen parcellation for each subject.
    input: data = a list of paths for the concatenated data (a poth for each subject), parc = chosen parcellation method
    output: all_features = a list which contains all the connectivity matrices
    """
    all_features = []  # here is where we will put the data (a container)

    #Create the wanted connectivity matrix
    #kind{“correlation”, “partial correlation”, “tangent”, “covariance”, “precision”}
    correlation_measure = connectome.ConnectivityMeasure(kind='correlation')

    medial_mask = []
    parc_shaf = []
    if parc == "Schaefer":
        #Add medial mask to HCP data (to equalize dimensions of HCP data and Schafer parcellation file)
        file = conf.MEDIAL_MASK_PATH
        mat_contents = sio.loadmat(file)
        medial_mask = mat_contents['medial_mask']
        print('The shape of the medial mask file is {}'.format(
            medial_mask.shape))
        print("It should be (64984, 1).")
        #Schafer parcellation
        path_parc = conf.SCHAEFER_PARC_DIR + conf.SCHAEFER_PARC_FILE
        img = nib.load(path_parc)
        parc_shaf = img.get_fdata()
        print('The shape of the parc file is {}'.format(parc_shaf.shape))

    for i, sub in enumerate(data):
        #load data
        load_data = np.load(data[i])['a']
        parcellated_data = []
        if parc == "Schaefer":
            print("Shafer")
            # add medial mask to the cortical vertices of the HCP data
            data_forparc = add_medial_mask(load_data, medial_mask)
            # parcell the data according to the schaefer parc
            parcellated_data = schaefer_parc(data_forparc, parc_shaf)
        else:
            print("Not Shafer")
            # parcell the data according to a specific atlas
            parcellated_data = hcp.parcellate(load_data, parc)
        # create a region x region correlation matrix
        correlation_matrix = correlation_measure.fit_transform(
            [parcellated_data])[0]
        # add to our container
        #np.savez_compressed(saved_dir_path + i, a = correlation_matrix)
        all_features.append(correlation_matrix)
        # keep track of status
        print('finished %s of %s' % (i + 1, len(data)))

    return all_features
예제 #14
0
def process_test_data(timeseries, transformer, ids, params, k, seed, validation_ext):
    conn_measure = connectome.ConnectivityMeasure(kind='correlation')
    test_data = conn_measure.fit_transform(timeseries)

    if params['connectivity'] == 'TE':
        connectivity = transformer.transform(timeseries)
    else:
        connectivity = transformer.transform(test_data)

    save_path = data_folder
    atlas_name = params['atlas']
    kind = params['connectivity']

    for i, subj_id in enumerate(ids):
        subject_file = os.path.join(save_path, subj_id,
                        subj_id + '_' + atlas_name + '_' + kind.replace(' ', '_') + '_' + str(k) + '_' + str(seed) + '_' + validation_ext + str(params['n_subjects'])+'.mat')
        sio.savemat(subject_file, {'connectivity': connectivity[i]})  
예제 #15
0
def group_connectivity(timeseries,
                       subject_list,
                       atlas_name,
                       kind,
                       save=True,
                       save_path=root_folder):
    """
        timeseries   : list of timeseries tables for subjects (timepoints x regions)
        subject_list : the subject short IDs list
        atlas_name   : name of the atlas used
        kind         : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation
        save         : save the connectivity matrix to a file
        save_path    : specify path to save the matrix if different from subject folder

    returns:
        connectivity : connectivity matrix (regions x regions)
    """

    if kind == 'lasso':
        # Graph Lasso estimator
        covariance_estimator = GraphLassoCV(verbose=1)
        connectivity_matrices = []

        for i, ts in enumerate(timeseries):
            covariance_estimator.fit(ts)
            connectivity = covariance_estimator.covariance_
            connectivity_matrices.append(connectivity)
            print('Covariance matrix has shape {0}.'.format(
                connectivity.shape))

    elif kind in ['tangent', 'partial correlation', 'correlation']:
        conn_measure = connectome.ConnectivityMeasure(kind=kind)
        connectivity_matrices = conn_measure.fit_transform(timeseries)

    if save:
        for i, subject in enumerate(subject_list):
            subject_file = os.path.join(
                save_path, subject_list[i], subject_list[i] + '_' +
                atlas_name + '_' + kind.replace(' ', '_') + '.mat')
            sio.savemat(subject_file,
                        {'connectivity': connectivity_matrices[i]})
            print("Saving connectivity matrix to %s" % subject_file)

    return connectivity_matrices
예제 #16
0
파일: conmats.py 프로젝트: fliem/sea_zrh_rs
def extract_mat(rs_file, brainmask_file, roi_file, confounds_file, conf, roi_type, tr, spikereg_threshold=None):
    """
    36 P
    """

    # Masker
    masker_pars = {"mask_img": brainmask_file, "detrend": True, "standardize": True, "low_pass": 0.1, "high_pass": \
        0.01, "t_r": tr}

    if roi_type == "maps":
        # for msdl type probablistic rois
        masker = input_data.NiftiMapsMasker(roi_file, **masker_pars)
    elif roi_type == "labels":
        # for binary rois
        masker = input_data.NiftiLabelsMasker(roi_file, **masker_pars)
    else:
        raise Exception("roi type not known {}".format(roi_type))

    # Extract time series
    confounds, outlier_stats = get_confounds(confounds_file, kind=conf, spikereg_threshold=spikereg_threshold)
    time_series = masker.fit_transform(rs_file, confounds=confounds.values)

    con_measure = connectome.ConnectivityMeasure(kind='correlation')
    conmat = con_measure.fit_transform([time_series])[0]

    report_str = "rs_file\t{}\n".format(rs_file)
    report_str += "roi_file\t{}\n".format(roi_file)

    keys = list(masker_pars.keys())
    keys.sort()
    for k in keys:
        report_str += "{}\t{}\n".format(k, masker_pars[k])

    report_str += "spike regression \t {}".format(spikereg_threshold)
    report_str += "\n\n"
    report_str += "confounds\t{}".format(", ".join(confounds.columns))
    report_str += "\n\n"
    report_str += confounds.to_string()

    return conmat, report_str, outlier_stats
예제 #17
0
def extract_net(in_timeseries, discard_frames=0, conntype='correlation'):

    hf = h5py.File(in_timeseries, 'r')

    reglabs = np.array(hf.get('regionids')).astype(np.str)

    ts = np.array(hf.get('timeseries'))
    print('shape of input data: {}'.format(ts.shape))
    # possibly trim
    if discard_frames > 0:
        ts = ts[discard_frames:, :]
        print('new shape of input data: {}'.format(ts.shape))

    if conntype == 'partialcorrelation':
        conntype = 'partial correlation'

    connobj = connectome.ConnectivityMeasure(kind=conntype)
    connmat = connobj.fit_transform([ts])[0]
    conndf = get_con_df(connmat, reglabs)

    hf.close()

    return conndf
예제 #18
0
def build_graph_adj_mat_newJune(pathout,
                                mmp_atlas,
                                atlas_name,
                                adjacent_mat_file,
                                graph_type='surf',
                                coarsening_levels=6,
                                noise_level=0.01,
                                Nneighbours=8):
    #####generate brain graphs

    graph_perm_file = os.path.join(
        pathout, '_'.join([
            atlas_name, graph_type, 'brain_graph_layer' +
            str(coarsening_levels) + '_Nei' + str(Nneighbours) + '.pkl'
        ]))
    print(graph_perm_file)

    if not os.path.isfile(graph_perm_file):

        if graph_type == 'surf':
            print(
                '\n\nLoading adjacent matrix based on counting connected vertices between parcels:',
                adjacent_mat_file)
            adj_mat = nib.load(adjacent_mat_file).get_data()
            adj_mat = sparse.csr_matrix(adj_mat)

        elif graph_type == 'SC':
            print(
                '\n\nCalculate adjacent graph based on structural covaraince of corrThickness across subjects:',
                adjacent_mat_file)
            conn_matrix = nib.load(adjacent_mat_file).get_data()
            Subject_Num, Node_Num = conn_matrix.shape

            atlas_roi = nib.load(mmp_atlas).get_data()
            RegionLabels = [i for i in np.unique(atlas_roi) if i > 0]
            Region_Num = len(RegionLabels)

            tc_matrix_df = pd.DataFrame(data=conn_matrix.ravel(),
                                        columns=['tc_signal'])
            tc_matrix_df['roi_label'] = np.repeat(atlas_roi.astype('int'),
                                                  Subject_Num,
                                                  axis=0).ravel()
            tc_matrix_df['subj'] = np.repeat(np.arange(Subject_Num).reshape(
                (Subject_Num, 1)),
                                             Node_Num,
                                             axis=1).ravel()
            # df = pd.DataFrame(values, index=index)

            tc_roi = tc_matrix_df.groupby(['subj', 'roi_label'])
            tc_roi_matrix = tc_roi.mean().values.reshape(
                Subject_Num, Region_Num)
            #################
            corr_kind = 'partial correlation'  #'correlation' ##
            connectome_measure = connectome.ConnectivityMeasure(kind=corr_kind)
            # connectome_measure = connectome.GroupSparseCovarianceCV()
            # corr_matrix = connectome_measure.fit_transform(np.transpose(subjects_tc_matrix))
            corr_matrix = connectome_measure.fit_transform(
                np.expand_dims(tc_roi_matrix, axis=0))
            corr_matrix_z = np.tanh(
                connectome_measure.mean_)  ##convert to z-score
            sig = 0.25
            corr_matrix_z = np.exp(
                corr_matrix_z / sig)  ##a Gaussian kernel, defined in Shen 2010

            # k-NN graph.
            idx = np.argsort(-corr_matrix_z)[:, 1:Nneighbours + 1]
            dist = np.array([
                corr_matrix_z[i, idx[i]] for i in range(corr_matrix_z.shape[0])
            ])
            dist[dist < 1] = 0
            adj_mat = graph.adjacency(dist, idx)

        elif graph_type == 'RSFC':
            from utils import load_rsfmri_data_matrix
            subjects_tc_matrix, subname_coding = load_rsfmri_data_matrix(
                adjacent_mat_file)

            if not os.path.isfile(pathout + atlas_name +
                                  '_avg_RSFC_matrix.pkl'):
                corr_kind = 'tangent'
                print('using %s for connectivity measure...' % corr_kind)
                connectome_measure = connectome.ConnectivityMeasure(
                    kind=corr_kind)
                corr_matrix = connectome_measure.fit_transform(
                    np.transpose(subjects_tc_matrix, (0, 2, 1)))
                corr_matrix_z = np.tanh(
                    connectome_measure.mean_
                )  ##np.mean(np.arctanh(corr_matrix),axis=0)
                with open(pathout + atlas_name + '_avg_RSFC_matrix.pkl',
                          'wb') as f:  # Python 3: open(..., 'wb')
                    pickle.dump([corr_matrix_z], f)
            else:
                with open(pathout + atlas_name + '_avg_RSFC_matrix.pkl',
                          'rb') as f:  # Python 3: open(..., 'rb')
                    corr_matrix_z = pickle.load(f)
                corr_matrix_z = corr_matrix_z[0]

            ##sig = 0.01
            sig = np.mean(corr_matrix_z)
            corr_matrix_z = np.exp(
                corr_matrix_z / sig)  ##a Gaussian kernel, defined in Shen 2010
            print(
                sig,
                np.histogram(corr_matrix_z, bins=np.arange(10), density=True))

            # k-NN graph.
            idx = np.argsort(-corr_matrix_z)[:, 1:Nneighbours + 1]
            dist = np.array([
                corr_matrix_z[i, idx[i]] for i in range(corr_matrix_z.shape[0])
            ])
            dist[dist < 1] = 0
            adj_mat = graph.adjacency(dist, idx)

        A = graph.replace_random_edges(adj_mat, noise_level)
        ###build multi-level graph using coarsen (div by 2 at each level)
        graphs, perm = coarsening.coarsen(A,
                                          levels=coarsening_levels,
                                          self_connections=False)
        L = [graph.laplacian(A, normalized=True) for A in graphs]
        with open(graph_perm_file, 'wb') as f:  # Python 3: open(..., 'wb')
            pickle.dump([A, perm, L], f)
    else:
        # Getting back the objects:
        with open(graph_perm_file, 'rb') as f:  # Python 3: open(..., 'rb')
            A, perm, L = pickle.load(f)

    return A, perm, L
예제 #19
0
    'verbose': 1
}

masker = NiftiMapsMasker(maps_img=maps_img, **mask_params)
subjects_timeseries = []
dx_groups = []
for label, func_img in zip(phenotypes['DX_GROUP'], func_imgs):
    confounds = extract_confounds(func_img, mask_img=gm_mask, n_confounds=10)
    signals = masker.fit_transform(func_img, confounds=confounds)
    subjects_timeseries.append(signals)
    dx_groups.append(label)

##############################################################################
# Functional Connectomes
# ----------------------
connectome_measure = connectome.ConnectivityMeasure(
    cov_estimator=LedoitWolf(assume_centered=True),
    kind='tangent',
    vectorize=True)

# Vectorized connectomes across subject-specific timeseries
vec = connectome_measure.fit_transform(subjects_timeseries)

##############################################################################
# Linear model
# -------------
# Logistic Regression 'l2'
estimator = LogisticRegression(penalty='l2', random_state=0)
cv = StratifiedShuffleSplit(n_splits=20, test_size=0.25, random_state=0)
scores = cross_val_score(estimator, vec, dx_groups, scoring='roc_auc', cv=cv)
예제 #20
0
파일: utils.py 프로젝트: ku-milab/EC_GCN
def get_networks(subject_list, variable, isDynamic, isEffective):
    """
        subject_list : list of subject IDs
        kind         : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation
        atlas_name   : name of the parcellation atlas used
        variable     : variable name in the .mat file that has been used to save the precomputed networks


    return:
        matrix      : feature matrix of connectivity networks (num_subjects x network_size)
    """

    with open('name.data', 'rb') as f:
        name = pickle.load(f)

    with open('alff.data', 'rb') as f:
        alff = pickle.load(f)

    with open('reho.data', 'rb') as f:
        reho = pickle.load(f)

        dynamicset = []
        all_networks = []
        timeseries_set = []

    if isEffective == True:
        fc = sio.loadmat(
            os.path.join('./EffectiveFC/Before_Dropout/0.1.mat'))['sparse_f']
        # fc = sio.loadmat(os.path.join('./EffectiveFC/Before_Dropout/0.01.mat'))['sparse_f']
        # fc = sio.loadmat(os.path.join('./EffectiveFC/Before_Dropout/0.001.mat'))['sparse_f']
        # fc = sio.loadmat(os.path.join('./EffectiveFC/Before_Dropout/0.2.mat'))['sparse_f']
        # fc = sio.loadmat(os.path.join('./EffectiveFC/Before_Dropout/0.5.mat'))['sparse_f']
        # fc = sio.loadmat(os.path.join('./EffectiveFC/Before_Dropout/0.05.mat'))['sparse_f']
        # fc = sio.loadmat(os.path.join('./EffectiveFC/Before_Dropout/0.15.mat'))['sparse_f']
        # fc = sio.loadmat(os.path.join('./EffectiveFC/After_Dropout/0.001.mat'))['sparse_f']
        # fc = sio.loadmat(os.path.join('./EffectiveFC/After_Dropout/0.01.mat'))['sparse_f']
        # fc = sio.loadmat(os.path.join('./EffectiveFC/After_Dropout/0.05.mat'))['sparse_f']
        # fc = sio.loadmat(os.path.join('./EffectiveFC/After_Dropout/0.1.mat'))['sparse_f']
        # fc = sio.loadmat(os.path.join('./EffectiveFC/After_Dropout/0.15.mat'))['sparse_f']
        # fc = sio.loadmat(os.path.join('./EffectiveFC/After_Dropout/0.2.mat'))['sparse_f']
        # fc = sio.loadmat(os.path.join('./EffectiveFC/After_Dropout/0.5.mat'))['sparse_f']
        for i in range(len(subject_list)):
            all_networks.append(fc[:, :, i])

        # all_networks_set = np.dstack(all_networks)
        # matrix = np.transpose(all_networks_set, (2,0,1))

        vec_networks = [np.reshape(mat, [1, -1]) for mat in all_networks]
        matrix = np.vstack(vec_networks)

    else:
        for subject in subject_list:
            flname = [
                i for i in os.listdir(data_folder) if
                os.path.isfile(os.path.join(data_folder, i)) and subject in i
            ]
            fl = os.path.join(data_folder, flname[0])

            # Estimate connectivity matrix
            timeseries = sio.loadmat(fl)['ROI']

            if variable == 'correlation':
                conn_measure = connectome.ConnectivityMeasure(kind=variable)
                # conn_measure = connectome.ConnectivityMeasure(kind=variable).fit_transform([timeseries])[0]
                # conn_measure_2nd = np.matmul(conn_measure, conn_measure)
                # conn_measure_3rd = np.matmul(conn_measure, conn_measure_2nd)
                # connectivity = conn_measure + conn_measure_2nd + conn_measure_3rd
                ft = conn_measure.fit_transform([timeseries])[0]
            elif variable == 'graph_measure':
                conn_measure = connectome.ConnectivityMeasure(
                    kind='correlation')
                connectivity = conn_measure.fit_transform([timeseries])[0]
                ft = bct.clustering_coef_wu(connectivity)

            timeseries_set.append(timeseries)
            all_networks.append(ft)
            dynamicset.append(
                np.concatenate(
                    (alff[name.index(subject)], reho[name.index(subject)])))

            # all_networks=np.array(all_networks)
            if variable == 'correlation':
                idx = np.triu_indices_from(all_networks[0], 1)
                norm_networks = [np.arctanh(mat) for mat in all_networks]
                vec_networks = [mat[idx] for mat in norm_networks]
                # vec_networks = [mat[idx] for mat in all_networks]
                matrix = np.vstack(vec_networks)
            elif variable == 'graph_measure':
                matrix = np.vstack(all_networks)

            # all_networks_set = np.dstack(all_networks)
            # matrix = np.transpose(all_networks_set, (2,0,1))

    # if isDynamic == True:
    #     dynamicset = np.vstack(dynamicset)
    #     matrix = np.concatenate((matrix, dynamicset), axis=1)
    #
    #
    # with open('./train_data.pkl', 'wb') as filehandle:
    #     pickle.dump(timeseries_set, filehandle)

    return matrix
예제 #21
0
def compute_single_connectivity(timeseries):
    correlation_measure = connectome.ConnectivityMeasure(kind='correlation')
    correlation_matrix = correlation_measure.fit_transform([timeseries])[0]
    np.fill_diagonal(correlation_matrix, 0)

    return correlation_matrix
예제 #22
0
        ts = np.load(output_dir + '/sub-' + subject + '_ses-' + ses + '.npy',
                     allow_pickle=True)
        event = event_template.format(sub=subject, ses=ses)
        events = pd.read_csv(event, sep='\t')
        onset = int(
            events.onset[events.trial_type_30 ==
                         'trauma1_0'])  # take onset of trauma first script
        ts_script = ts[onset:onset + duration, :]
        sub_ts.append(ts_script)
    return sub_ts


# %%
from nilearn import connectome

connectome = connectome.ConnectivityMeasure(kind='correlation',
                                            vectorize=False)

mat_ses1 = connectome.fit_transform(pooledTS(subject_list, '1'))

# %% plot mean matrix
nilearn.plotting.plot_matrix(connectome.mean_,
                             colorbar=True,
                             labels=range(256),
                             reorder='average')

# %% lets run ses 2
mat_ses2 = connectome.fit_transform(pooledTS(subject_list, '2'))

nilearn.plotting.plot_matrix(connectome.mean_,
                             colorbar=True,
                             labels=range(256),
예제 #23
0
def testNilearn(workDir, dataDir, saveIntermediate):
    """
    :param workDir: the directory where the result files are stored
    :param dataDir: the directory where data files can be found
    :param saveIntermediate: determine if the intermediate files will be saved
    :return: None
    """

    filenames = os.listdir(dataDir + '/fMRI/')
    filenames.sort()
    img_names = [
        dataDir + '/fMRI/' + filename + '/' + filename + '.nii'
        for filename in filenames
    ]
    resultDir = workDir + '/connectivity_matrices/'
    os.makedirs(resultDir)

    connectivity_matrices = {}
    for idx, img_name in enumerate(img_names):
        logging.info('Computing image of subject %s. (Progress: %s/%s)' %
                     (filenames[idx], str(idx + 1), str(len(img_names))))
        img = nilearn.image.load_img(img_name)

        logging.info('Preprocessing image ...step1')
        # confounds = nilearn.image.high_variance_confounds(img, n_confounds=5, percentile=2.0)
        # img = nilearn.image.clean_img(img, detrend=True, standardize=True, confounds=confounds)
        img = nilearn.image.smooth_img(img, 4)
        coords_table = pd.read_csv(dataDir + '/atlas/AAL90.csv',
                                   encoding='gbk')
        coords = pd.concat(
            [coords_table.MNIX, coords_table.MNIY, coords_table.MNIZ], axis=1)
        masker = input_data.NiftiLabelsMasker(labels_img=dataDir +
                                              '/atlas/AAL_Contract_90_2MM.nii',
                                              standardize=True,
                                              memory='nilearn_cache')
        time_series = masker.fit_transform(img)

        if saveIntermediate != 'n':
            intermDir = workDir + '/intermediate_files/'
            if not os.path.exists(intermDir):
                os.makedirs(intermDir)
            plotting.plot_img(img.slicer[:, :, :, 100])
            plt.savefig(intermDir + filenames[idx] + '_preprocessed_step1.png')

        logging.info('Computing connectome ...step2')
        connectivity_measure = connectome.ConnectivityMeasure(
            kind='correlation')
        connectivity_matrix = connectivity_measure.fit_transform([time_series
                                                                  ])[0]
        connectivity_matrices[filenames[idx]] = connectivity_matrix

        if saveIntermediate != 'n':
            plotting.plot_matrix(connectivity_matrix,
                                 colorbar=True,
                                 vmax=0.8,
                                 vmin=-0.8)
            plt.savefig(intermDir + filenames[idx] + '_matrix_step2.png')
            plotting.plot_connectome(connectivity_matrix,
                                     coords,
                                     edge_threshold="97%",
                                     colorbar=True)
            plt.savefig(intermDir + filenames[idx] + '_connectome_step2.png')

        with codecs.open(resultDir + '/' + filenames[idx] + '.csv',
                         'w+',
                         encoding='gbk') as result_csv:
            writer = csv.writer(result_csv, delimiter=',')
            for row in connectivity_matrix:
                writer.writerow(row)

    with open(
            workDir + '/' + os.environ['USERNAME'] + '_' +
            time.strftime('%y%m%d') + '_' + str(len(filenames)) + '_bcn.pkl',
            'wb') as pkl_file:
        pickle.dump(connectivity_matrices, pkl_file)
subs_fact = 8

for i, kind in enumerate(kinds):
    for j, session_nb in enumerate(whoistrain):
        for k, order in enumerate(orders):

            first_all_sess_msk = np.array((df['session_nb'] == session_nb)
                                          & (df['order'] == order))
            secnd_all_sess_msk = np.array((df['session_nb'] != session_nb)
                                          & (df['order'] == order))

            ts_all_array = np.array(ts_all, dtype=object)
            ts_fst = list(ts_all_array[first_all_sess_msk][::subs_fact])
            ts_secnd = list(ts_all_array[secnd_all_sess_msk][::subs_fact])

            conn_measure = connectome.ConnectivityMeasure(kind=kind)
            indiv_connect_mat = conn_measure.fit_transform(ts_fst)
            connectivity_fst = connectome.sym_to_vec(indiv_connect_mat,
                                                     discard_diagonal=True)

            indiv_connect_mat = conn_measure.transform(ts_secnd)
            connectivity_secnd = connectome.sym_to_vec(indiv_connect_mat,
                                                       discard_diagonal=True)

            X_train_all = connectivity_fst
            y_train = y[first_all_sess_msk][::subs_fact]

            X_test_all = connectivity_secnd
            y_test = y[secnd_all_sess_msk][::subs_fact]

            clf = KNeighborsClassifier(n_neighbors=1)
예제 #25
0
def connectivity(layout,
                 subject,
                 session,
                 task,
                 atlas,
                 connectivity_metric='correlation',
                 confounds=None,
                 out_dir=None):
    """
    Makes connectivity matrices per subject per session per task per condition.
    Parameters
    ----------
    layout : str
        BIDS layout with derivatives indexed from pyBIDS
    subject : str
        Subject ID for which the networks will be calculated.
    session : str, optional
        Session of data collection. If there's only one session, we'll find it.
    connectivity_metric : {"correlation", "partial correlation", "tangent",\
                           "covariance", "precision"}, optional
        The matrix kind. Passed to Nilearn's `ConnectivityMeasure`.
    space : str
        'native' if analyses will be performed in subjects' functional native space (atlas(es) should be transformed)
        'mni152-2mm' if analyses will be performed in MNI125 2mm isotropic space (fMRI data should already be transformed)
    atlas : str
        Name of atlas for parcellating voxels into nodes, must be in the same `space` given above.
    confounds : list-like
        Names of confounds (should be columns in fmriprep output confounds.tsv).
    Returns
    -------
    adjacency_matrix
    """
    try:
        version = idconn.__version__
    except:
        version = 'test'
    if '.nii' in atlas:
        assert exists(atlas), f'Mask file does not exist at {atlas}'

    if not out_dir:
        deriv_dir = join(layout.root, 'derivatives', f'idconn-{version}')
    else:
        deriv_dir = out_dir
    atlas_name = basename(atlas).rsplit('.', 2)[0]
    # use pybids here to grab # of runs and preproc bold filenames
    connectivity_measure = connectome.ConnectivityMeasure(
        kind=connectivity_metric)
    bold_files = layout.get(
        scope='derivatives',
        return_type='file',
        suffix='bold',
        task=task,
        space='MNI152NLin2009cAsym',
        subject=subject,
        session=session,
        extension='nii.gz'
    )  # should be preprocessed BOLD file from fmriprep, grabbed with pybids
    print(f'BOLD files found at {bold_files}')
    confounds_files = layout.get(scope='derivatives',
                                 return_type='file',
                                 desc='confounds',
                                 subject=subject,
                                 session=session,
                                 task=task)

    runs = []
    if len(bold_files) > 1:
        for i in range(0, len(bold_files)):
            assert exists(
                bold_files[i]
            ), "Preprocessed bold file(s) does not exist at {0}".format(
                bold_files)
            runs.append(layout.parse_file_entities(bold_files[i])['run'])
    else:
        runs = None
    print(f'Found runs: {runs}')

    out = join(deriv_dir, f'sub-{subject}', f'ses-{session}', 'func')
    if not exists(out):
        makedirs(out)

    #event_files = layout.get(return_type='filename', suffix='events', task=task, subject=subject)
    #timing = pd.read_csv(event_files[0], header=0, index_col=0, sep='\t')
    #conditions = timing['trial_type'].unique()

    if runs:
        corrmats = {}
        for run in runs:
            print('run = ', run)
            # read in events file for this subject, task, and run

            confounds_file = layout.get(scope='derivatives',
                                        return_type='file',
                                        desc='confounds',
                                        subject=subject,
                                        session=session,
                                        task=task,
                                        run=run,
                                        extension='tsv')
            print(f'Confounds file located at: {confounds_file}')
            confounds_df = pd.read_csv(confounds_file[0], header=0, sep='\t')
            confounds_df = confounds_df[confounds].fillna(0)
            confounds_fname = join(
                deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
                f'sub-{subject}_ses-{session}_task-{task}_run-{run}_desc-confounds_timeseries.tsv'
            )
            confounds_df.to_csv(confounds_fname, sep='\t')

            bold_file = layout.get(scope='derivatives',
                                   return_type='file',
                                   suffix='bold',
                                   task=task,
                                   space='MNI152NLin2009cAsym',
                                   subject=subject,
                                   session=session,
                                   extension='nii.gz',
                                   run=run)
            assert len(
                bold_file
            ) == 1, f'BOLD file improperly specified, more than one .nii.gz file with {subject}, {session}, {task}, {run}: {bold_file}'
            tr = layout.get_tr(bold_file)
            masker = input_data.NiftiLabelsMasker(atlas,
                                                  standardize=True,
                                                  t_r=tr,
                                                  verbose=2)

            ex_bold = image.index_img(bold_file[0], 2)
            display = plotting.plot_epi(ex_bold)
            display.add_contours(atlas)
            display.savefig(
                join(
                    deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
                    f'sub-{subject}_ses-{session}_task-{task}_run-{run}_desc-atlas_overlay.png'
                ))

            print(f'BOLD file located at {bold_file}\nTR = {tr}s')
            try:
                #for each parcellation, extract BOLD timeseries
                print(
                    f'Extracting bold signal for sub-{subject}, ses-{session}, run-{run}...'
                )
                timeseries = masker.fit_transform(bold_file[0],
                                                  confounds_fname)
            except Exception as e:
                print('ERROR: Trying to extract BOLD signals, but', e)
            try:
                print(
                    f'Making correlation matrix for for sub-{subject}, ses-{session}, task-{task}, run-{run}...'
                )
                corrmats[run] = connectivity_measure.fit_transform(
                    [timeseries])[0]
            except Exception as e:
                print('ERROR: Trying to make corrmat, but', e)
        data = list(corrmats.values())
        stacked_corrmats = np.array(data)
        print('Stacked corrmats have dimensions', stacked_corrmats.shape)
        avg_corrmat = np.mean(stacked_corrmats, axis=0)
    else:
        confounds_file = layout.get(scope='derivatives',
                                    return_type='file',
                                    desc='confounds',
                                    subject=subject,
                                    session=session,
                                    task=task,
                                    extension='tsv')
        print(f'Confounds file located at: {confounds_file}')
        confounds_df = pd.read_csv(confounds_file[0], header=0, sep='\t')
        confounds_df = confounds_df[confounds].fillna(0)
        confounds_fname = join(
            deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
            f'sub-{subject}_ses-{session}_task-{task}_desc-confounds_timeseries.tsv'
        )
        confounds_df.to_csv(confounds_fname, sep='\t')

        bold_file = layout.get(scope='derivatives',
                               return_type='file',
                               suffix='bold',
                               task=task,
                               space='MNI152NLin2009cAsym',
                               subject=subject,
                               session=session,
                               extension='nii.gz')
        assert len(
            bold_file
        ) == 1, f'BOLD file improperly specified, more than one .nii.gz file with {subject}, {session}, {task}: {bold_file}'
        tr = layout.get_tr(bold_file)
        masker = input_data.NiftiLabelsMasker(atlas,
                                              standardize=True,
                                              t_r=tr,
                                              verbose=2)

        ex_bold = image.index_img(bold_file[0], 2)
        display = plotting.plot_epi(ex_bold)
        display.add_contours(atlas)
        display.savefig(
            join(
                deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
                f'sub-{subject}_ses-{session}_task-{task}_desc-atlas_overlay.png'
            ))

        print(f'BOLD file located at {bold_file}\nTR = {tr}s')
        try:
            #for each parcellation, extract BOLD timeseries
            print(
                f'Extracting bold signal for sub-{subject}, ses-{session}...')
            timeseries = masker.fit_transform(bold_file[0], confounds_fname)
        except Exception as e:
            print('ERROR: Trying to extract BOLD signals, but', e)
        try:
            print(
                f'Making correlation matrix for for sub-{subject}, ses-{session}...'
            )
            avg_corrmat = connectivity_measure.fit_transform([timeseries])[0]
        except Exception as e:
            print('ERROR: Trying to make corrmat, but', e)

    print('Correlation matrix created, dimensions:', avg_corrmat.shape)
    try:
        corrmat_df = pd.DataFrame(index=np.arange(1, avg_corrmat.shape[0] + 1),
                                  columns=np.arange(1,
                                                    avg_corrmat.shape[0] + 1),
                                  data=avg_corrmat)
        corrmat_file = join(
            deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
            f'sub-{subject}_ses-{session}_task-{task}_space-MNI152NLin2009cAsym_atlas-{atlas_name}_desc-corrmat_bold.tsv'
        )
        corrmat_df.to_csv(corrmat_file, sep='\t')
    except Exception as e:
        print('ERROR saving corrmat...', e)
    return corrmat_df, corrmat_file
예제 #26
0
def task_connectivity(layout,
                      subject,
                      session,
                      task,
                      atlas,
                      confounds,
                      connectivity_metric='correlation',
                      out_dir=None):
    """
    Makes connectivity matrices per subject per session per task per condition.
    Parameters
    ----------
    dset_dir : str
        BIDS-formatted dataset path (top-level, in which a 'derivatives/' directory will be made if one does not exist)
    subject : str
        Subject ID for which the networks will be calculated.
    session : str, optional
        Session of data collection. If there's only one session, we'll find it.
    task : str
        Name of task fMRI scan from which networks will be calculated.
    connectivity_metric : {"correlation", "partial correlation", "tangent",\
                           "covariance", "precision"}, optional
        The matrix kind. Passed to Nilearn's `ConnectivityMeasure`.
    space : str
        'native' if analyses will be performed in subjects' functional native space (atlas(es) should be transformed)
        'mni152-2mm' if analyses will be performed in MNI125 2mm isotropic space (fMRI data should already be transformed)
    atlas : str
        If you want to grab an atlas using Nilearn, this is the name of the atlas and 
        must match the corresponding function `fetch_atlas_[name]` in `nilearn.datasets`. 
        If you have your own atlas, this is the path to that nifti file.`
    confounds : list-like
        Filenames of confounds files.
    Returns
    -------
    confounds_file : str
        Filename of merged confounds .tsv file
    """
    #version = '0.1.1'
    try:
        version = idconn.__version__
    except:
        version = 'test'
    if '.nii' in atlas:
        assert exists(atlas), f'Mask file does not exist at {atlas}'

    if not out_dir:
        deriv_dir = join(layout.root, 'derivatives', f'idconn-{version}')
    else:
        deriv_dir = out_dir
    space = 'MNI152NLin2009cAsym'
    atlas_name = basename(atlas).rsplit('.', 2)[0]
    # use pybids here to grab # of runs and preproc bold filenames
    connectivity_measure = connectome.ConnectivityMeasure(
        kind=connectivity_metric)
    bold_files = layout.get(
        scope='derivatives',
        return_type='file',
        suffix='bold',
        task=task,
        space=space,
        subject=subject,
        session=session,
        extension='nii.gz'
    )  # should be preprocessed BOLD file from fmriprep, grabbed with pybids
    print(f'BOLD files found at {bold_files}')

    runs = []
    if len(bold_files) > 1:
        for i in range(0, len(bold_files)):
            assert exists(
                bold_files[i]
            ), "Preprocessed bold file(s) does not exist at {0}".format(
                bold_files)
            runs.append(layout.parse_file_entities(bold_files[i])['run'])
    else:
        runs = None
    print(f'Found runs: {runs}')

    out = join(deriv_dir, f'sub-{subject}', f'ses-{session}', 'func')
    if not exists(out):
        makedirs(out)

    event_files = layout.get(return_type='filename',
                             suffix='events',
                             task=task,
                             subject=subject)
    timing = pd.read_csv(event_files[0], header=0, index_col=0, sep='\t')
    conditions = timing['trial_type'].unique()

    run_cond = {}
    corrmats = {}
    for run in runs:
        bold_file = layout.get(scope='derivatives',
                               return_type='file',
                               suffix='bold',
                               task=task,
                               space='MNI152NLin2009cAsym',
                               subject=subject,
                               session=session,
                               extension='nii.gz',
                               run=run)
        assert len(
            bold_file
        ) == 1, f'BOLD file improperly specified, more than one .nii.gz file with {subject}, {session}, {task}, {run}: {bold_file}'
        tr = layout.get_tr(bold_file)

        #load timing file
        #update to use pyBIDS + layout
        event_file = layout.get(return_type='filename',
                                suffix='events',
                                task=task,
                                subject=subject,
                                run=run,
                                session=session)
        print('# of event files =', len(event_file), '\nfilename = ',
              event_file[0])
        the_file = str(event_file[0])
        assert exists(the_file), 'file really does not exist'
        timing = pd.read_csv(the_file, header=0, index_col=0, sep='\t')
        timing.sort_values('onset')

        confounds_file = layout.get(scope='derivatives',
                                    return_type='file',
                                    desc='confounds',
                                    subject=subject,
                                    session=session,
                                    task=task,
                                    run=run,
                                    extension='tsv')
        print(f'Confounds file located at: {confounds_file}')
        confounds_df = pd.read_csv(confounds_file[0], header=0, sep='\t')
        confounds_df = confounds_df[confounds].fillna(0)
        confounds_fname = join(
            deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
            f'sub-{subject}_ses-{session}_task-{task}_run-{run}_desc-confounds_timeseries.tsv'
        )
        confounds_df.to_csv(confounds_fname, sep='\t')

        masker = input_data.NiftiLabelsMasker(atlas,
                                              standardize=True,
                                              t_r=tr,
                                              verbose=2)
        ex_bold = image.index_img(bold_file[0], 2)
        display = plotting.plot_epi(ex_bold)
        display.add_contours(atlas)
        display.savefig(
            join(
                deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
                f'sub-{subject}_ses-{session}_task-{task}_run-{run}_desc-{atlas_name}_overlay.png'
            ))

        print(f'BOLD file located at {bold_file}\nTR = {tr}s')

        masker = input_data.NiftiLabelsMasker(atlas,
                                              standardize=True,
                                              t_r=tr,
                                              verbose=1)
        timeseries = masker.fit_transform(bold_file[0],
                                          confounds=confounds_fname)
        #load timing file
        #update to use pyBIDS + layout
        try:
            #and now we slice into conditions
            for condition in conditions:
                run_cond[condition] = {}
                corrmats[condition] = {}
                blocks = []
                cond_timing = timing[timing['trial_type'] == condition]
                for i in cond_timing.index:
                    blocks.append(
                        (cond_timing.loc[i]['onset'] / tr,
                         ((cond_timing.loc[i]['onset'] +
                           cond_timing.loc[i]['duration']) / tr) + 1))
                if len(blocks) > 1:
                    run_cond[condition][run] = np.vstack(
                        (timeseries[int(blocks[0][0]):int(blocks[0][1]), :],
                         timeseries[int(blocks[1][0]):int(blocks[1][1]), :]))
                if len(blocks) > 2:
                    for i in np.arange(2, len(blocks)):
                        run_cond[condition][run] = np.vstack((
                            timeseries[int(blocks[0][0]):int(blocks[0][1]), :],
                            timeseries[int(blocks[1][0]):int(blocks[1][1]), :]
                        ))
                    #print('extracted signals for {0}, {1}, {2}'.format(task, run, condition), run_cond['{0}-{1}'.format(run, condition)].shape)
                else:
                    pass
                print(f'Making correlation matrix for {run}, {condition}.')
                corrmats[condition][run] = connectivity_measure.fit_transform(
                    [run_cond[condition][run]])[0]
                print('And that correlation matrix is',
                      corrmats[condition][run].shape)
        except Exception as e:
            print('trying to slice and dice, but', e)
    #and paste together the timeseries from each run together per condition
    files = []
    avg_corrmats = {}
    print('Corrmats per run per condition have been made!')
    for condition in conditions:
        print(f'Merging corrmats for {task}-{condition}...')
        data = list(corrmats[condition].values())
        stacked_corrmats = np.array(data)
        print('Stacked corrmats have dimensions', stacked_corrmats.shape)
        avg_corrmat = np.mean(stacked_corrmats, axis=0)
        corrmat_df = pd.DataFrame(index=np.arange(1, avg_corrmat.shape[0] + 1),
                                  columns=np.arange(1,
                                                    avg_corrmat.shape[0] + 1),
                                  data=avg_corrmat)
        avg_corrmats[condition] = corrmat_df
        corrmat_file = join(
            deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
            f'sub-{subject}_ses-{session}_task-{task}_condition-{condition}_desc-{atlas_name}_corrmat.tsv'
        )
        try:
            corrmat_df.to_csv(corrmat_file, sep='\t')
            files.append(corrmat_file)
        except Exception as e:
            print('saving corrmat...', e)
    return files, avg_corrmats
# extract and store timeseries of ICA components
masker = input_data.NiftiMapsMasker(maps_img=icares,
                                    standardize=True,
                                    memory='nilearn_cache',
                                    verbose=1)
ts = masker.fit_transform(img)
fnm = out_pref + 'ts.csv'
pandas.DataFrame(ts).to_csv(fnm, index=False)

# Remove the same ICs that iStaging analysis removes
ics = [x for x in range(100) if x not in ics_to_remove]
ts = ts[:, ics]

# Get and store derivatives
# full correlation
correlation_measure = connectome.ConnectivityMeasure(kind='correlation')
cmat = correlation_measure.fit_transform([ts])[0]
np.fill_diagonal(cmat, 0)
fnm = out_pref + 'fullcorr.csv'
pandas.DataFrame(cmat).to_csv(fnm, index=False)

# partial correlation
correlation_measure = connectome.ConnectivityMeasure(
    kind="partial correlation")
cmat = correlation_measure.fit_transform([ts])[0]
np.fill_diagonal(cmat, 0)
fnm = out_pref + 'partcorr.csv'
pandas.DataFrame(cmat).to_csv(fnm, index=False)

# nodal amplitude
namp = np.std(ts, axis=0)
예제 #28
0
def extract_mat(rsimg, maskimg, labelimg, conntype='correlation', space='labels', 
                savets=False, nomat=False, dtr=False, stdz=False):

    masker = input_data.NiftiLabelsMasker(labelimg,
                                          background_label=0,
                                          smoothing_fwhm=None,
                                          standardize=stdz, 
                                          detrend=dtr,
                                          mask_img=maskimg,
                                          resampling_target=space,
                                          verbose=1)

    # mask the labimg so that there are no regions that dont have data
    from nilearn.image import resample_to_img
    if space == 'data':
        # resample_to_image(source, target)
        # assume here that the mask is also fmri space
        resamplabs = resample_to_img(labelimg,maskimg,interpolation='nearest')
        resampmask = resample_to_img(maskimg,maskimg,interpolation='nearest')
    else:
        resamplabs = resample_to_img(labelimg,labelimg,interpolation='nearest')
        resampmask = resample_to_img(maskimg,labelimg,interpolation='nearest')
        
    # mask
    from nilearn.masking import apply_mask
    resamplabsmasked = apply_mask(resamplabs,resampmask)

    # get the unique labels list, other than 0, which will be first
    #reginparc = np.unique(resamplabs.get_fdata())[1:].astype(np.int)
    reginparc = np.unique(resamplabsmasked)[1:].astype(np.int)
    reglabs = list(reginparc.astype(np.str))

    reginorigparc = np.unique(labelimg.get_fdata())[1:].astype(np.int)
    if len(reginparc) != len(reginorigparc):
        print('\n !!!WARNING!!! during resampling of label image, some of the'
              ' ROIs (likely very small) were interpolated out. Please take '
              'care to note which ROIs are present in the output data\n')
        print('ALTERNATIVELY, your parcellation is not in the same space'
              'as the bold data.\n')
        if abs(len(reginparc) - len(reginorigparc)) > 9:
            print('\nBASED ON QUICK HEURISTIC...I think your parcellation '
                  'is not in the right space. Please check that the two '
                  'images are aligned properly.')

    # Extract time series
    time_series = masker.fit_transform(rsimg)

    if nomat:
        connmat = None
        conndf = None
    else:
        connobj = connectome.ConnectivityMeasure(kind=conntype)
        connmat = connobj.fit_transform([time_series])[0]
        conndf = get_con_df(connmat, reglabs)


    # if not saving time series, don't pass anything substantial, save mem
    if not savets:
        time_series = 42

    return conndf, connmat, time_series, reginparc
예제 #29
0
#subject_list = ['KPE1223']
subject_ts = []
ses = '2'

for sub in subject_list:
    print(f' Analysing subject {sub}')
    subject = sub.split('KPE')[1]
    func = func_template.format(sub=subject, session=ses)
    confound = confound_template.format(sub=subject, session=ses)
    signals = masker.fit_transform(imgs=func, confounds=removeVars(confound))
    subject_ts.append(signals)

#%% generate connectivity matrix
from sklearn.covariance import LedoitWolf
connectome_measure = connectome.ConnectivityMeasure(
    cov_estimator=LedoitWolf(assume_centered=True),
    kind='partial correlation', vectorize=True)

# Vectorized connectomes across subject-specific timeseries
vec = connectome_measure.fit_transform(subject_ts)




 
# %% XGboost

from xgboost import XGBClassifier
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from sklearn.model_selection import StratifiedKFold
예제 #30
0
dosenbach_timeseries = dosenbach_masker.fit_transform(
    adhd.func[0], confounds=adhd.confounds[0])

###############################################################################
# Extract and plot correlation matrix

for atlas in ['Power', 'Dosenbach']:

    if atlas == 'Power':
        timeseries = power_timeseries
        coords = power_coords
    else:
        timeseries = dosenbach_timeseries
        coords = dosenbach_coords

    connectivity = connectome.ConnectivityMeasure(kind='correlation')
    corr_matrix = connectivity.fit_transform([timeseries])[0]
    np.fill_diagonal(corr_matrix, 0)

    plt.figure()
    vmax = np.max(np.abs(corr_matrix))
    plt.imshow(corr_matrix,
               vmin=-vmax,
               vmax=vmax,
               cmap='RdBu_r',
               interpolation='nearest')
    plt.colorbar()
    plt.title(atlas + 'correlation matrix')

    # Plot the connectome
    plotting.plot_connectome(corr_matrix,