def generate_data(templates): order = len(templates) + 1 adjusted_templates = [templates[-1] ] # generate adjusted templates in reverse order next_corrmats = adjusted_templates[-1] for n in range(order - 1, 1, -1): print(n) corrmats = tc.vec2mat(next_corrmats) K = corrmats.shape[0] sK = int(np.sqrt(K)) T = corrmats.shape[2] draws = np.zeros([sK, sK, T]) means = tc.vec2mat(templates[n - 2]) for t in range(T): draws[:, :, t] = np.reshape( np.random.multivariate_normal(means[:, :, t].ravel(), corrmats[:, :, t]), [sK, sK]) next_corrmats = tc.mat2vec(draws) adjusted_templates.append(next_corrmats) corrmats = tc.vec2mat(next_corrmats) K = int(corrmats.shape[0]) T = corrmats.shape[2] data = np.zeros([T, K]) for t in range(T): data[t, :] = np.random.multivariate_normal(np.zeros([K]), corrmats[:, :, t]) adjusted_templates.reverse() return data, adjusted_templates
def generate_templates_refactor(order=1, cov_list=None, **kwargs): kwargs['return_corrs'] = True T = kwargs['T'] templates = [] for n in range(order): print(n) if cov_list: kwargs['datagen'] = cov_list[n] _, next_template = tc.simulate_data(**kwargs) if n >= 1: expanded_corrmats_last = tc.vec2mat(templates[n - 1]) expanded_corrmats_next = tc.vec2mat(next_template) K2 = expanded_corrmats_last.shape[0]**2 next_template = np.zeros([K2, K2, T]) for t in range(T): x_last = expanded_corrmats_last[:, :, t] x_next = expanded_corrmats_next[:, :, t] next_template[:, :, t] = np.kron(x_last, x_next) next_template = tc.mat2vec(next_template) templates.append(next_template) return templates
def ramping_dataset(K, T, *args): warnings.simplefilter('ignore') def dist(a, b): return cdist(np.atleast_2d(a), np.atleast_2d(b), 'correlation') a = tc.mat2vec(random_corrmat(K)) b = tc.mat2vec(random_corrmat(K)) max_dist = dist(a, b) max_iter = 100 for i in np.arange(max_iter): next_b = tc.mat2vec(random_corrmat(K)) next_dist = dist(a, next_b) if next_dist > max_dist: b = next_b max_dist = next_dist mu = np.linspace(1, 0, T) corrs = np.zeros([T, int((K**2 - K) / 2 + K)]) Y = np.zeros([T, K]) for t in np.arange(T): corrs[t, :] = mu[t] * a + (1 - mu[t]) * b Y[t, :] = np.random.multivariate_normal(mean=np.zeros([K]) + .1, cov=tc.vec2mat(corrs[t, :])) return Y, corrs
def random_dataset(K, T, *args): warnings.simplefilter('ignore') corrs = np.zeros([T, int((K**2 - K) / 2 + K)]) Y = np.zeros([T, K]) for t in np.arange(T): corrs[t, :] = tc.mat2vec(random_corrmat(K)) Y[t, :] = np.random.multivariate_normal(mean=np.zeros([K]), cov=tc.vec2mat(corrs[t, :])) return Y, corrs
def block_dataset(K, T, B=5): warnings.simplefilter('ignore') block_len = np.ceil(T / B) corrs = np.zeros([B, int((K**2 - K) / 2 + K)]) Y = np.zeros([T, K]) for b in np.arange(B): corrs[b, :] = tc.mat2vec(random_corrmat(K)) corrs = np.repeat(corrs, block_len, axis=0) corrs = corrs[:T, :] for t in np.arange(T): Y[t, :] = np.random.multivariate_normal(mean=np.zeros([K]) + .1, cov=tc.vec2mat(corrs[t, :])) return Y, corrs
def generate_templates(order=1, **kwargs): kwargs['return_corrs'] = True _, next_template = tc.simulate_data(**kwargs) T = kwargs['T'] templates = [] for n in range(order - 1): templates.append(next_template) expanded_corrmats = tc.vec2mat(next_template) K2 = expanded_corrmats.shape[0]**2 next_template = np.zeros([K2, K2, T]) for t in range(T): x = np.atleast_2d(expanded_corrmats[:, :, t].ravel()) next_template[:, :, t] = x * x.T next_template = tc.mat2vec(next_template) templates.append(next_template) return templates
def expanded_vec2mat(v): m = tc.vec2mat(v) x = np.zeros([v.shape[0], m.shape[0]**2]) for t in range(m.shape[2]): x[t, :] = m[:, :, t].ravel() return x
def generate_data(templates): order = len(templates) + 1 adjusted_templates = [templates[-1] ] # generate adjusted templates in reverse order next_corrmats = adjusted_templates[-1] for n in range(order - 1, 1, -1): print(n) corrmats = tc.vec2mat(next_corrmats) K = corrmats.shape[0] sK = int(np.sqrt(K)) T = corrmats.shape[2] draws = np.zeros([sK, sK, T]) means = tc.vec2mat(templates[n - 2]) for t in range(T): draws[:, :, t] = np.reshape( np.random.multivariate_normal(means[:, :, t].ravel(), corrmats[:, :, t]), [sK, sK]) next_corrmats = tc.mat2vec(draws) adjusted_templates.append(next_corrmats) # try_it = np.random.multivariate_normal(np.zeros([sK * sK]), np.eye(sK * sK), size=T) # for t in range(T): # # #x_hat = np.random.multivariate_normal(means[:, :, t].ravel(), corrmats[:, :, t]).reshape(sK, sK, order='A') # #x_hat = np.random.multivariate_normal(np.zeros([sK * sK]), corrmats[:, :, t]).reshape(sK, sK, order='A') # #x_hat = np.random.multivariate_normal(np.zeros([sK * sK]), np.eye(sK * sK)) # # try_c = corrmats[:, :, t] # #try_c = nearPSD(try_c) # #sns.heatmap(try_c) # np.fill_diagonal(try_c, 10) # c = np.linalg.cholesky(try_c) # # # #sns.heatmap(c) # # np.fill_diagonal(c, 0) # # c /= np.max(np.abs(c)) # # # # ## this increases the recovery performance by A LOT # # np.fill_diagonal(c, 1) # c = tc.helpers.norm_mat(c) # # #sns.heatmap(c) # x_hat = np.dot(try_it[t], c) # #sns.heatmap(x_hat) # x_hat = np.reshape(x_hat, [sK, sK]) # x_t = x_hat * x_hat.T # x_t /= np.max(np.abs(x_t)) # #sns.heatmap(x_t) # np.fill_diagonal(x_t, 1) # draws[:, :, t] = x_t # # # draws[:, :, t] = np.reshape(np.random.multivariate_normal(means[:, :, t].ravel(), corrmats[:, :, t]), # # [sK, sK]) # # next_corrmats = tc.mat2vec(draws) # adjusted_templates.append(next_corrmats) corrmats = tc.vec2mat(next_corrmats) K = int(corrmats.shape[0]) T = corrmats.shape[2] data = np.zeros([T, K]) for t in range(T): data[t, :] = np.random.multivariate_normal(np.zeros([K]), corrmats[:, :, t]) adjusted_templates.reverse() return data, adjusted_templates
""" ============================= Simulate subject data ============================= In this example, we simulate data """ # Code source: Lucy Owen # License: MIT # load timecorr import timecorr as tc import seaborn as sns # simulate some data data, corrs = tc.simulate_data(datagen='block', return_corrs=True, set_random_seed=True, S=1, T=100, K=10, B=5) # calculate correlations - returned squareformed tc_vec_data = tc.timecorr(tc.simulate_data(), weights_function=tc.gaussian_weights, weights_params={'var': 5}, combine=tc.helpers.corrmean_combine) # convert from vector to matrix format tc_mat_data = tc.vec2mat(tc_vec_data) # plot the 3 correlation matrices different timepoints sns.heatmap(tc_mat_data[:, :, 48]) sns.heatmap(tc_mat_data[:, :, 50]) sns.heatmap(tc_mat_data[:, :, 52])
We'll use a [chord diagram](http://python-graph-gallery.com/chord-diagram/) generated by the [Bokeh](https://docs.bokeh.org/en/latest/index.html) backend of [HoloViews](http://holoviews.org) to visualize the brain connectivity patterns. We'll need to re-format the correlation matrices into DataFrames that describe the set of connections using four columns (there will be a total of $[(K^2 - K)/2]$ rows in this DataFrame: - *source*: origin of the connection - *target*: destination of the connection - *value*: the strength of the connection - *sign*: whether the connection is positive (+1) or negative (-1) def mat2chord(vec, t=0, cthresh=0.25): def mat2links(x, ids): links = [] for i in range(x.shape[0]): for j in range(i): links.append({'source': ids[i], 'target': ids[j], 'value': np.abs(x[i, j]), 'sign': np.sign(x[i, j])}) return pd.DataFrame(links) links = mat2links(tc.vec2mat(vec)[:, :, t], rois['ID']) chord = hv.Chord((links, hv.Dataset(rois, 'ID'))).select(value=(cthresh, None)) chord.opts( opts.Chord(cmap='Category20', edge_cmap='Category20', edge_color=dim('source').str(), labels='Region', node_color=dim('ID').str()) ) return chord Here's the first correlation matrix: hmap = mat2chord(isfc, t=0) # This is to render chord plot in jupyter-book html_repr = file_html(pn.Column(hmap).get_root(), CDN) IPython.display.HTML(html_repr) Now let's create an interactive figure to display the dynamic network patterns, with a slider for controlling the timepoint: