def run_exp(gr0, nr_sampled_supports, nr_gr_in_family=5): gr = gr0 gr.calc_incidence_matrix() inc_matr = gr.get_incidence_matr() all_rec_probs = [] for count in range(nr_gr_in_family): sparsity_range = np.arange(1, 1 + gr.nr_edges, 1) exp = l1_rec_exp(matr=inc_matr, sparsity_range=sparsity_range, nr_sampled_supports=nr_sampled_supports) exp.run() rec_probs = exp.get_mean_rec_probs() all_rec_probs.append(rec_probs) ## Change graph ## for _ in range(2): gr.subdivide_edge(edge=(gr.nr_vertices - 1, 0)) gr.calc_incidence_matrix() inc_matr = gr.get_incidence_matr() # SAVE RESULTS ## utils.save_nparray_with_date( data=all_rec_probs, file_prefix='TrIT_paper_Inc_Matr_Exp_1_NrS{}'.format( nr_sampled_supports), subfolder_name='output')
def run_exp(par_dict, output_dir='output', save_results=True): all_rec_probs = { p: {s: [] for s in par_dict['sparsity_range']} for p in par_dict['edge_probs_list'] } edge_probs_list = par_dict['edge_probs_list'] nr_vertices = par_dict['nr_vertices'] nr_gr_samples = par_dict['nr_gr_samples'] sparsity_range = par_dict['sparsity_range'] nr_sampled_supports = par_dict['nr_sampled_supports'] for edge_prob in edge_probs_list: rand_gr = erdos_renyi_graph(nr_vertices=nr_vertices, edge_probability=edge_prob) for _ in range(nr_gr_samples): gr = rand_gr.sample() inc_matr = gr.get_incidence_matr() exp = l1_rec_exp(matr=inc_matr, sparsity_range=sparsity_range, nr_sampled_supports=nr_sampled_supports) exp.run() rec_probs = exp.get_mean_rec_probs() for s in rec_probs: all_rec_probs[edge_prob][s].append(rec_probs[s]) if save_results: utils.save_nparray_with_date( data=all_rec_probs, file_prefix='TrIT_paper_Inc_Matr_Exp_2_NrV{}_NrS{}'.format( nr_vertices, nr_sampled_supports), subfolder_name=output_dir)
def run_experiment(p, m_bar, nr_sampled_supp_sets, sparsity_interval=[1, 1], rec_sensitivity=1e-6): meas_indices = list(range(m_bar + 1)) + list(range(p - m_bar, p)) sparsity_range = list(range(sparsity_interval[0], sparsity_interval[1] + 1)) pdft = pDFT(N=p, meas_ind=meas_indices) pdft_matr = np.asarray(pdft.get_pdft_matrix()) results_dict = {} results_dict['l1'] = {s: [] for s in sparsity_range} results_dict['pdft_matr'] = pdft_matr results_dict['params'] = { 'p': p, 'm_bar': m_bar, 'nr_sampled_supp_sets': nr_sampled_supp_sets, 'sparsity_interval': sparsity_interval } for s in sparsity_range: for _ in range(nr_sampled_supp_sets): S = list(np.random.choice(range(p), s, replace=False)) ## Solve optimization problem # a0 = datetime.datetime.now() x_bar = np.zeros(shape=[p, 1]) for k in S: x_bar[k] = np.random.normal(0.0, 1.0) x_bar /= np.linalg.norm(x_bar) meas_matr = complex_to_real(pdft_matr)[:, :p] obj_func = l1(meas_matr, solver_opts={ 'maxiter': 1000, 'rr': False }) b = np.matmul(meas_matr, x_bar) x_hat_dict = obj_func.solve(b=b) x_hat = x_hat_dict['x'] if np.linalg.norm(x_hat - x_bar) <= rec_sensitivity: results_dict['l1'][s].append(1.0) else: results_dict['l1'][s].append(0.0) # a1 = datetime.datetime.now() # print('Solving l1 minimization takes {} microseconds'.format((a1-a0).microseconds)) # return utils.save_nparray_with_date( data=results_dict, file_prefix='TrIT_paper_pDFT_Exp_1_p{}_mbar{}_nrsamples{}'.format( p, m_bar, nr_sampled_supp_sets), subfolder_name='output')
def run_par_exp(par_dict, output_dir='output', save_results=True): edge_probs_list = par_dict['edge_probs_list'] results = Parallel(n_jobs=-1)(delayed(the_exp)(edge_prob, par_dict) for edge_prob in edge_probs_list) all_rec_probs = {} for k in zip(range(len(edge_probs_list)), edge_probs_list): all_rec_probs[k[1]] = results[k[0]][k[1]] nr_vertices = par_dict['nr_vertices'] nr_sampled_supports = par_dict['nr_sampled_supports'] if save_results: utils.save_nparray_with_date( data=all_rec_probs, file_prefix='TrIT_paper_Inc_Matr_Exp_2_NrV{}_NrS{}'.format( nr_vertices, nr_sampled_supports), subfolder_name=output_dir)
def run_exp(p, nr_exps = 2000, recovery_sensitivity = 1e-6, calc_l1_exp_bound=False, calc_ext_pts_bound=False, save_results=True): if p//4 < 2 : raise Exception('TrIT_paper_pDFT_Exp_2 :: run_exp : p = {} is too small.'.format(p)) # card_omega_range = set([p-(p//(2*k)) for k in range(2,1+p//4)]) down_step = 2*max(p//100, 1) card_omega_range = list(range(p - 2, p//4 - down_step, (-1)*down_step )) results_dict = {c:{} for c in card_omega_range} prev_l1_max_sparsity = p for card_omega in card_omega_range: m_bar = card_omega//2 meas_ind = list(range(m_bar+1)) + list(range(p-m_bar, p)) pdft = pDFT(N=p, meas_ind=meas_ind) pdft_matr = np.asarray(pdft.get_pdft_matrix()) meas_matr = complex_to_real(pdft_matr)[:, :p] ## Our bound ## results_dict[card_omega]['coh_bound'] = p/(2*(p-card_omega)) # ## Mutual coherence bound ## # mc_val = utils.cal_mutual_coherence(meas_matr) # results_dict[card_omega]['mc_bound'] = (1.0+1.0/mc_val)/2.0 # # results_dict[card_omega]['mc_our_bound'] = (1.0+np.pi/(p*np.sin(np.pi*card_omega/p)))/2.0 # Using chordal prod if calc_ext_pts_bound: N = p nr_zeros = N - card_omega - 1 all_max_sparsity = [] for _ in range(nr_exps): zero_indices = list(np.random.choice(range(N), nr_zeros, replace=False)) ch_p = chordal_prod(N=N) ch_p.set_polyn(zero_indices=zero_indices) vals = ch_p.get_polyn_vals() norms = [np.linalg.norm(z) for z in vals] # max_norm = max(norms) one_norm = sum(norms) # norms = [k/max_norm for k in norms] norms = [k / one_norm for k in norms] sorted_norms = norms.copy() sorted_norms.sort() sorted_norms.reverse() the_sum = sorted_norms[0] s = 0 while the_sum < 0.5: s += 1 the_sum += sorted_norms[s] all_max_sparsity.append(s) results_dict[card_omega]['ext_pts_bound'] = min(all_max_sparsity) ## Experimental bound ## if calc_l1_exp_bound: sparsity_range = range(min(p//2,prev_l1_max_sparsity+5), 1, -1) # exp_bound = max(sparsity_range) results_dict[card_omega]['l1_exp_bound'] = 1 prev_l1_max_sparsity = 1 for s in sparsity_range: sp = s_sparse(sig_dim=p, s=s) fail_flag = False for exp_count in range(nr_exps): xbar, _ = sp.random_sample() # xbar = np.abs(xbar) # Attention here!! This is needed. b = np.matmul(meas_matr, xbar) obj_func = l1(A=meas_matr) try: soln_dict = obj_func.solve(b=b) except Exception: continue xhat = soln_dict['x'] xbar = np.reshape(xbar, newshape=xhat.shape) diff_norm = np.linalg.norm(xbar-xhat) if diff_norm > recovery_sensitivity: fail_flag = True break if not fail_flag: results_dict[card_omega]['l1_exp_bound'] = s prev_l1_max_sparsity = s print('l1 experiment bound for |\u03A9| = {} is {}.'.format(card_omega, s)) break if save_results: utils.save_nparray_with_date(data=results_dict, file_prefix='TrIT_paper_pDFT_Exp_2_p{}_NrExp{}'.format(p, nr_exps), subfolder_name='output') plot_results(res_file=results_dict)
def run_exp(config_file_path=''): ################### ## Configuration ## ################### config = configparser.ConfigParser() config.read(config_file_path) mat_is_rand = config['meas_matrix'].getboolean('random') A = None matr_density = None if mat_is_rand: m = config['meas_matrix'].getint('nr_rows') n = config['meas_matrix'].getint('nr_cols') nr_matrices = config['meas_matrix'].getint('nr_matrices') matr_density = config['meas_matrix'].getfloat( 'density') # Should crash here! else: A = config['meas_matrix']['A'] A = utils.config_str_to_np_array(A) m = A.shape[0] n = A.shape[1] nr_matrices = 1 max_s = config['sparsity'].getint('max_sparsity') nr_trials = config['experiment'].getint('nr_trials') obj_funcs = [ x.strip() for x in config['optimization']['obj_funcs'].split(",") ] sparsity_type = config['sparsity']['type'] #################### ## The Experiment ## #################### test_sp_range = range(1, max_s + 1) # obj_func_res = {obj_f:[[] for i in test_sp_range] for obj_f in obj_funcs} res_dict = {} for mat_nr in range(nr_matrices): if mat_is_rand: A = sp_random(m=m, n=n, density=matr_density).A A = np.asarray(A) matr_str = 'meas_matr_{}'.format(mat_nr) res_dict[matr_str] = [A, {}] for sparsity in test_sp_range: print('Matrix nr: {:>2}. {}: {:>2}.'.format( mat_nr, sparsity_type, sparsity)) sp_str = sparsity_type + '={}'.format(sparsity) res_dict[matr_str][1][sp_str] = {} sp = sp_factory(obj_str=sparsity_type, sig_dim=n, config=config, s=sparsity).run() obj_func_classes = { obj_f: obj_func_factory(obj_str=obj_f, A=A, groups=sp.groups()).run() for obj_f in obj_funcs } # sp_res = {obj_f:[] for obj_f in obj_funcs} for trial in range(nr_trials): print('.', sep=' ', end='', flush=True) [x_bar, _] = sp.random_sample() b = np.matmul(A, x_bar) x_hat_dict = { obj_f: obj_func_classes[obj_f].solve(b=b) for obj_f in obj_funcs } res_dict[matr_str][1][sp_str][tuple(x_bar)] = x_hat_dict # for obj_f in obj_funcs: # obj_func_res[obj_f][sparsity-1] += sp_res[obj_f] print('\n') utils.save_nparray_with_date(data=res_dict, file_prefix='Norm_comp_exp', subfolder_name='output')