def ramping_dataset(K, T, *args): warnings.simplefilter('ignore') def dist(a, b): return cdist(np.atleast_2d(a), np.atleast_2d(b), 'correlation') a = tc.mat2vec(random_corrmat(K)) b = tc.mat2vec(random_corrmat(K)) max_dist = dist(a, b) max_iter = 100 for i in np.arange(max_iter): next_b = tc.mat2vec(random_corrmat(K)) next_dist = dist(a, next_b) if next_dist > max_dist: b = next_b max_dist = next_dist mu = np.linspace(1, 0, T) corrs = np.zeros([T, int((K**2 - K) / 2 + K)]) Y = np.zeros([T, K]) for t in np.arange(T): corrs[t, :] = mu[t] * a + (1 - mu[t]) * b Y[t, :] = np.random.multivariate_normal(mean=np.zeros([K]) + .1, cov=tc.vec2mat(corrs[t, :])) return Y, corrs
def generate_templates_refactor(order=1, cov_list=None, **kwargs): kwargs['return_corrs'] = True T = kwargs['T'] templates = [] for n in range(order): print(n) if cov_list: kwargs['datagen'] = cov_list[n] _, next_template = tc.simulate_data(**kwargs) if n >= 1: expanded_corrmats_last = tc.vec2mat(templates[n - 1]) expanded_corrmats_next = tc.vec2mat(next_template) K2 = expanded_corrmats_last.shape[0]**2 next_template = np.zeros([K2, K2, T]) for t in range(T): x_last = expanded_corrmats_last[:, :, t] x_next = expanded_corrmats_next[:, :, t] next_template[:, :, t] = np.kron(x_last, x_next) next_template = tc.mat2vec(next_template) templates.append(next_template) return templates
def generate_data(templates): order = len(templates) + 1 adjusted_templates = [templates[-1] ] # generate adjusted templates in reverse order next_corrmats = adjusted_templates[-1] for n in range(order - 1, 1, -1): print(n) corrmats = tc.vec2mat(next_corrmats) K = corrmats.shape[0] sK = int(np.sqrt(K)) T = corrmats.shape[2] draws = np.zeros([sK, sK, T]) means = tc.vec2mat(templates[n - 2]) for t in range(T): draws[:, :, t] = np.reshape( np.random.multivariate_normal(means[:, :, t].ravel(), corrmats[:, :, t]), [sK, sK]) next_corrmats = tc.mat2vec(draws) adjusted_templates.append(next_corrmats) corrmats = tc.vec2mat(next_corrmats) K = int(corrmats.shape[0]) T = corrmats.shape[2] data = np.zeros([T, K]) for t in range(T): data[t, :] = np.random.multivariate_normal(np.zeros([K]), corrmats[:, :, t]) adjusted_templates.reverse() return data, adjusted_templates
def constant_dataset(K, T, *args): warnings.simplefilter('ignore') C = random_corrmat(K) corrs = np.tile(tc.mat2vec(C), [T, 1]) Y = np.random.multivariate_normal(mean=np.zeros([K]) + .1, cov=C, size=T) return Y, corrs
def random_dataset(K, T, *args): warnings.simplefilter('ignore') corrs = np.zeros([T, int((K**2 - K) / 2 + K)]) Y = np.zeros([T, K]) for t in np.arange(T): corrs[t, :] = tc.mat2vec(random_corrmat(K)) Y[t, :] = np.random.multivariate_normal(mean=np.zeros([K]), cov=tc.vec2mat(corrs[t, :])) return Y, corrs
def block_dataset(K, T, B=5): warnings.simplefilter('ignore') block_len = np.ceil(T / B) corrs = np.zeros([B, int((K**2 - K) / 2 + K)]) Y = np.zeros([T, K]) for b in np.arange(B): corrs[b, :] = tc.mat2vec(random_corrmat(K)) corrs = np.repeat(corrs, block_len, axis=0) corrs = corrs[:T, :] for t in np.arange(T): Y[t, :] = np.random.multivariate_normal(mean=np.zeros([K]) + .1, cov=tc.vec2mat(corrs[t, :])) return Y, corrs
def generate_templates(order=1, **kwargs): kwargs['return_corrs'] = True _, next_template = tc.simulate_data(**kwargs) T = kwargs['T'] templates = [] for n in range(order - 1): templates.append(next_template) expanded_corrmats = tc.vec2mat(next_template) K2 = expanded_corrmats.shape[0]**2 next_template = np.zeros([K2, K2, T]) for t in range(T): x = np.atleast_2d(expanded_corrmats[:, :, t].ravel()) next_template[:, :, t] = x * x.T next_template = tc.mat2vec(next_template) templates.append(next_template) return templates
def generate_data(templates): order = len(templates) + 1 adjusted_templates = [templates[-1] ] # generate adjusted templates in reverse order next_corrmats = adjusted_templates[-1] for n in range(order - 1, 1, -1): print(n) corrmats = tc.vec2mat(next_corrmats) K = corrmats.shape[0] sK = int(np.sqrt(K)) T = corrmats.shape[2] draws = np.zeros([sK, sK, T]) means = tc.vec2mat(templates[n - 2]) for t in range(T): draws[:, :, t] = np.reshape( np.random.multivariate_normal(means[:, :, t].ravel(), corrmats[:, :, t]), [sK, sK]) next_corrmats = tc.mat2vec(draws) adjusted_templates.append(next_corrmats) # try_it = np.random.multivariate_normal(np.zeros([sK * sK]), np.eye(sK * sK), size=T) # for t in range(T): # # #x_hat = np.random.multivariate_normal(means[:, :, t].ravel(), corrmats[:, :, t]).reshape(sK, sK, order='A') # #x_hat = np.random.multivariate_normal(np.zeros([sK * sK]), corrmats[:, :, t]).reshape(sK, sK, order='A') # #x_hat = np.random.multivariate_normal(np.zeros([sK * sK]), np.eye(sK * sK)) # # try_c = corrmats[:, :, t] # #try_c = nearPSD(try_c) # #sns.heatmap(try_c) # np.fill_diagonal(try_c, 10) # c = np.linalg.cholesky(try_c) # # # #sns.heatmap(c) # # np.fill_diagonal(c, 0) # # c /= np.max(np.abs(c)) # # # # ## this increases the recovery performance by A LOT # # np.fill_diagonal(c, 1) # c = tc.helpers.norm_mat(c) # # #sns.heatmap(c) # x_hat = np.dot(try_it[t], c) # #sns.heatmap(x_hat) # x_hat = np.reshape(x_hat, [sK, sK]) # x_t = x_hat * x_hat.T # x_t /= np.max(np.abs(x_t)) # #sns.heatmap(x_t) # np.fill_diagonal(x_t, 1) # draws[:, :, t] = x_t # # # draws[:, :, t] = np.reshape(np.random.multivariate_normal(means[:, :, t].ravel(), corrmats[:, :, t]), # # [sK, sK]) # # next_corrmats = tc.mat2vec(draws) # adjusted_templates.append(next_corrmats) corrmats = tc.vec2mat(next_corrmats) K = int(corrmats.shape[0]) T = corrmats.shape[2] data = np.zeros([T, K]) for t in range(T): data[t, :] = np.random.multivariate_normal(np.zeros([K]), corrmats[:, :, t]) adjusted_templates.reverse() return data, adjusted_templates