def _connectome_converter(self, catalog=None, confounds=None): """Estimates functional connectivity matrices. Depending upon the models selected for brain parcellations, all models dependent subjects timeseries will be used as input for estimating the functional network interactions. This function first checks whether fit() followed by transform() or fit_transform() has been called or not to find related subjects timeseries signals. Parameters ---------- confounds : csv file, numpy array like Confounds to regress out the effect such as gender, age, etc from group level connectivity coefficients. """ CONNECTOMES = dict() if not hasattr(self, 'subjects_timeseries_'): raise ValueError("Could not find attribute 'subjects_timeseries_'. " "Make sure to call transform() for subjects " "timeseries signals to connectome matrices.") if catalog is None: warnings.warn("Catalog for connectivity measure is None. Taking " "kind='correlation'", stacklevel=2) catalog = ['correlation'] self.connectome_measures_ = catalog if len(self.subjects_timeseries_) == len(self.models_): for model in self.models_: coefs = dict() if self.verbose > 0: print("[Timeseries signals] Loading data of model '{0}' " .format(model)) subjects_timeseries = self.subjects_timeseries_[model] for measure in catalog: if self.verbose > 0: print("[Connectivity Measure] kind='{0}'".format(measure)) if self.covariance_estimator == 'LedoitWolf': # Ledoit Wolf covariance estimator connections = ConnectivityMeasure( cov_estimator=LedoitWolf(assume_centered=True), kind=measure) elif self.covariance_estimator == 'GraphLassoCV': # GraphLassoCV connections = ConnectivityMeasure(cov_estimator=GraphLassoCV(verbose=2), kind=measure) # By default vectorize is True if self.connectome_confounds is not None and self.verbose > 0: print("[Connectivity Coefficients] Regression") conn_coefs = connections.fit_transform(subjects_timeseries, confounds=confounds) coefs[measure] = conn_coefs if coefs is not None: CONNECTOMES[model] = coefs else: warnings.warn("Conn coefs are found empty for model {0}" .format(model)) self.connectomes_ = CONNECTOMES return self
for atlas in atlases: print("Running predictions: with atlas: {0}".format(atlas)) timeseries, groups = _get_paths(subject_ids['subject_id'], hcp_behavioral, atlas, timeseries_dir) timeseries = np.array(timeseries) groups = np.array(groups) high_iq = groups[np.where(groups > 20.0)] _, classes = np.unique(np.isin(groups, high_iq), return_inverse=True) iter_for_prediction = cv.split(timeseries, classes) for index, (train_index, test_index) in enumerate(iter_for_prediction): print("[Cross-validation] Running fold: {0}".format(index)) for measure in measures: print("[Connectivity measure] kind='{0}'".format(measure)) connections = ConnectivityMeasure( cov_estimator=LedoitWolf(assume_centered=True), kind=measure) conn_coefs = connections.fit_transform(timeseries) for est_key in sklearn_classifiers.keys(): print( 'Supervised learning: classification {0}'.format(est_key)) estimator = sklearn_classifiers[est_key] score = cross_val_score(estimator, conn_coefs, classes, scoring='roc_auc', cv=[(train_index, test_index)]) results['atlas'].append(atlas) results['iter_shuffle_split'].append(index) results['measure'].append(measure) results['classifier'].append(est_key)
# cv = StratifiedShuffleSplit(n_splits=10, test_size=0.25, random_state=1) cv = StratifiedKFold(n_splits=nfold, shuffle=True, random_state=1) # atlas = atlases[0]#取第一个脑模板 timeseries, diagnosis, _ = _get_paths(phenotypic, atlas, timeseries_dir) _, classes = np.unique(diagnosis, return_inverse=True) iter_for_prediction = cv.split(timeseries, classes) print('Loading cross-valid data') train_index, test_index = next(iter_for_prediction) #由上一句替代#for index, (train_index, test_index) in enumerate(iter_for_prediction): # measure = measures[2] connections = ConnectivityMeasure( cov_estimator=LedoitWolf(assume_centered=True), kind=measure) conn_coefs = connections.fit_transform( timeseries, vectorize=False) #vectorize=True取矩阵上三角, False=全矩阵 def calculate_edge(conn_coefs): meanA = np.mean(conn_coefs, axis=0) edge_index0 = [] edge_index1 = np.repeat(np.arange(0, np.shape(meanA)[0]), knn_k) for vi in range(np.shape(meanA)[0]): vec = meanA[vi, :] minx = np.argsort(vec) chose_minx = minx[-knn_k - 1:-1] edge_index0.extend(chose_minx)