def ppm_experiment_1d(seed, sigma, L, b, Ncopy, use_signal_prior): np.random.seed(seed) n_iter = 1000 tol = 1e-7 # Generate MRA measurements y, s, n, x = make_data_1d(L, Ncopy, sigma, b) if use_signal_prior: b_prior = b else: b_prior = None t = time() y_s, s = synchronize_1d(y, method='ppm') x_init = np.expand_dims(np.mean(y_s, axis=1), axis=-1) t_ppm = time() - t err_ppm = relative_error_1d(x_init, x) results = pd.DataFrame() results = results.append( { 'use_signal_prior': use_signal_prior, 'L': L, 'b': b, 'sigma': sigma, 'seed': seed, 'N': Ncopy, 'err': err_ppm, 'num_iter': 0, 't': t_ppm }, ignore_index=True) return results
def ppm_synch_em_experiment(seed, sigma, L, b, Ncopy, use_signal_prior): np.random.seed(seed) n_iter = 1000 tol = 1e-7 # Generate MRA measurements y, s, n, x = make_data_1d(L, Ncopy, sigma, b) if use_signal_prior: b_prior = b else: b_prior = None y_s, s_est = synchronize_1d(y, method='ppm') x_init = np.expand_dims(np.mean(y_s, axis=1), axis=-1) print('start em processing') t = time() x_est_em, rho_est_em, num_iter_em_uniform = em_1d(y_s, sigma, n_iter, tol, x_init, b=b_prior, rho_prior=None, uniform=False) t_em_uniform = time() - t err_em_uniform = relative_error_1d(x_est_em, x) h, bin_edges = np.histogram((s_est - s) % L, bins=np.arange(-0.5, L + 0.5)) measured_rho = h / np.sum(h) results = pd.DataFrame() results = results.append( { 'use_signal_prior': use_signal_prior, 'L': L, 'b': b, 'sigma': sigma, 'seed': seed, 'N': Ncopy, 'err': err_em_uniform, 'num_iter': num_iter_em_uniform, 't': t_em_uniform }, ignore_index=True) return results
def synchronize_and_match_em_experiment(seed, sigma, L, b, Ncopy, use_signal_prior, P): np.random.seed(seed) n_iter = 1000 tol = 1e-7 # Generate MRA measurements y, s, n, x = make_data_1d(L, Ncopy, sigma, b) if use_signal_prior: b_prior = b else: b_prior = None y_s, s, _, _ = synchronize_and_match_1d(y, P) x_init = np.expand_dims(np.mean(y_s, axis=1), axis=-1) print('start em processing') t = time() x_est_em, rho_est_em, num_iter_em_uniform = em_1d(y_s, sigma, n_iter, tol, x_init, b=b_prior, rho_prior=None, uniform=False) t_em_uniform = time() - t err_em_uniform = relative_error_1d(x_est_em, x) results = pd.DataFrame() results = results.append( { 'use_signal_prior': use_signal_prior, 'L': L, 'b': b, 'sigma': sigma, 'seed': seed, 'N': Ncopy, 'err': err_em_uniform, 'num_iter': num_iter_em_uniform, 't': t_em_uniform }, ignore_index=True) return results
def synch_em_1d_experiment(seed, sigma, L, b, Ncopy, use_signal_prior, P, gamma): np.random.seed(seed) n_iter = 1000 tol = 1e-7 # Generate MRA measurements y, s, n, x = make_data_1d(L, Ncopy, sigma, b) if use_signal_prior: b_prior = b else: b_prior = None t = time() y_s, s_est, _, _ = synchronize_and_match_1d(y, P) #y_s, s_est = synchronize_1d(y,method='ppm') t_synch = time() - t x_init = np.expand_dims(np.mean(y_s, axis=1), axis=-1) h, bin_edges = np.histogram((s_est - s) % L, bins=np.arange(-0.5, L + 0.5)) measured_rho = h / np.sum(h) print('start synch em 1d processing') t = time() x_est_em, rho_est_em, num_iter_em_uniform = em_1d(y_s, sigma, n_iter, tol, x_init, b=b_prior, rho_prior=(measured_rho, gamma), uniform=False) t_em_uniform = time() - t err_em_uniform = relative_error_1d(x_est_em, x) plt.plot(measured_rho) plt.xlabel('shift') plt.ylabel('Probability') plt.savefig( 'est_dist/rho_est_L_%d_sigma_%.2f_N_%d_b_%d_seed_%d_SNR_%.2f.png' % (L, sigma, Ncopy, b, seed, 1 / sigma**2)) plt.clf() results = pd.DataFrame() results = results.append( { 'use_signal_prior': use_signal_prior, 'L': L, 'b': b, 'sigma': sigma, 'seed': seed, 'N': Ncopy, 'err': err_em_uniform, 'num_iter': num_iter_em_uniform, 't': t_synch + t_em_uniform }, ignore_index=True) return results
def pearson_test(): np.random.seed(1) R = 10 sigma = 2 L = 21 N = 1000 P = 100 r_vec = [] pval_vec = [] r_vec2 = [] pval_vec2 = [] r_vec3 = [] pval_vec3 = [] dist = scipy.stats.beta(N / 2 - 1, N / 2 - 1, loc=-1, scale=2) r = np.arange(0, 1, 0.001) p = 2 * dist.cdf(-abs(r)) p_critical = 0.05 r_critical = r[p <= 0.05][0] for q in range(R): y, s, n, x = make_data_1d(L,N,sigma,0) y_s_new, s_est_new = synchronize_1d(y, method='ppm') y_s_new2, s_est_new2, y_s_old, s_est_old = synchronize_and_match_1d(y, P=P) s_est_new = np.asarray(s_est_new, dtype=int) s_est_new2 = np.asarray(s_est_new2, dtype=int) n_shifted = np.zeros((L, N)) n_shifted2 = np.zeros((L, N)) x_shifted = np.zeros((L, N)) x_shifted2 = np.zeros((L, N)) x_only = np.zeros((L, N)) for i in range(N): n_shifted[:, i] = np.roll(n[:, i], -s_est_new[i]) n_shifted2[:, i] = np.roll(n[:, i], -s_est_new2[i]) x_shifted[:, i] = np.roll(x, s[i] - s_est_new[i]) x_shifted2[:, i] = np.roll(x, s[i] - s_est_new2[i]) x_only[:, i] = np.roll(x, s[i]) for i in range(L): for j in range(L): r, pval = stats.pearsonr(x_only[i], n[j]) r_vec.append(r) pval_vec.append(pval) r, pval = stats.pearsonr(x_shifted[i], n_shifted[j]) r_vec2.append(r) pval_vec2.append(pval) r, pval = stats.pearsonr(x_shifted2[i], n_shifted2[j]) r_vec3.append(r) pval_vec3.append(pval) rmin = -.25 dr = 0.01 rmax = np.abs(rmin) + dr weights = np.ones_like(r_vec) / len(r_vec) weights2 = np.ones_like(r_vec2) / len(r_vec2) alpha = 1 plt.hist(r_vec2, bins=np.arange(rmin, rmax, dr), alpha=alpha, weights=weights2) plt.hist(r_vec3, bins=np.arange(rmin, rmax, dr), alpha=alpha, weights=weights2) plt.hist(r_vec, bins=np.arange(rmin, rmax, dr), alpha=alpha, weights=weights) plt.xlabel('Pearson correlation coefficient') plt.ylabel('Probability') plt.legend(['After Synchronization', 'After Synchronize and Match', 'Before Synchronization'], fontsize='small') plt.axvline(r_critical, color='k', linestyle='dashed', linewidth=1) plt.axvline(-r_critical, color='k', linestyle='dashed', linewidth=1) if not os.path.exists('1d_figures/'): os.mkdir('1d_figures/') plt.savefig('1d_figures/pearson_test_1d_N_%d_R_%d_sigma_%.2f_L_%d_pval_%.4f_.png'%(N,R,sigma,L,p_critical)) plt.savefig('1d_figures/pearson_test_1d_N_%d_R_%d_sigma_%.2f_L_%d_pval_%.4f.eps'%(N,R,sigma,L,p_critical))