for i, s in enumerate(s_arr): for j, m in enumerate(m_arr): print(j) for k, sigma in enumerate(var_arr): for l in range(num_trials): indices = np.random.choice(n, s, replace=False) theta = np.zeros((n, 1)) theta[indices, :] = np.random.randn(s, 1) theta = theta / np.linalg.norm(2) A = np.random.rand(m, n) y = A @ theta + sigma * np.random.randn(m, 1) abs_tol = math.sqrt(4 * n) * sigma * np.linalg.norm(theta) theta_recon = GRASP(A, y, s, abs_tol) # print(rmse(theta, theta_recon)) results[i, j, k, l] = rmse(theta, theta_recon) results_median = np.median(results, axis=-1) for i, s in enumerate(s_arr): plt.figure() plt.title("Sparsity level: " + str(s)) im = plt.imshow(results_median[i, :, :]) ax = plt.gca() ax.set_xticks(np.arange(len(var_arr))) ax.set_yticks(np.arange(len(m_arr))) # ... and label them with the respective list entries. ax.set_xticklabels(var_arr * 100) ax.set_yticklabels(m_arr) ax.set_xlabel("Relative Intensity of Noise") ax.set_ylabel("Number of measurements") for i2 in range(len(m_arr)):
data_path = "./data/" sigma = 0.01 img_array = cv2.imread(data_path + "clown.bmp", 0) plt.figure() plt.title("Original Image") plt.imshow(img_array, cmap='gray') plt.show() assert img_array.shape[0] == img_array.shape[1] n = img_array.shape[0] Phi = getDCTBasis(n) m = 300 theta = Phi.T @ img_array # Id DCT of columns theta_recon = np.zeros((n, n)) for i in range(n): print(i) col = np.expand_dims(theta[:, i], axis=1) A = np.random.rand(m, n) @ Phi y = A @ col + sigma * np.linalg.norm(col) * np.random.randn(m, 1) abs_tol = math.sqrt(4 * n) * sigma * np.linalg.norm(col) col_recon = GRASP(A, y, 100, abs_tol) theta_recon[:, i] = col_recon.squeeze() img_recon = Phi @ theta_recon # 1D IDCT of cols plt.figure() plt.title("Reconstructed Image") plt.imshow(img_recon, cmap='gray') plt.show() print("Image reconstruction error: ", rmse(theta, theta_recon))