def take_samples(G, C, M, k, p, num_samples): low_g, up_g = 0, 2 * pi low_b, up_b = 0, pi / 2 best_sol = common.brute_force(G, k) samples = [] for i in range(num_samples): angles = [ random.uniform(low_g, up_g) if j < p else random.uniform( low_b, up_b) for j in range(2 * p) ] value = -qaoa(angles, G, C, M, k, p) samples.append(value / best_sol) if (i + 1) % 100 == 0: print('\ti: ' + str(i+1) + '\tavg: ' + str(np.average(samples)) \ + '\tstd: ' + str(np.std(samples)) + '\terr: ' + str(1.96*np.std(samples)/np.sqrt(i+1)) \ + '\tstd_err: ' + str(np.sqrt(1.96*np.sqrt(2/num_samples)*np.std(samples)**2))) return samples
return best if __name__ == '__main__': #indices = np.linspace(rank*s_per_rank, (rank+1)*s_per_rank-1, s_per_rank) gi = 91 s_per_rank = 100 z = 2.576 # z* for 99% confidence interval max_p = 6 max_exp, max_std, error = [], [], [] for p in range(1, max_p + 1): # do work over indices rank_exp = work(gi, p, s_per_rank) # gather best exp data = None if rank == 0: data = np.empty(size, dtype='d') comm.Gather(rank_exp, data, root=0) if rank == 0: best = common.brute_force(gi) data = [x / best for x in data] max_exp.append(np.average(data)) max_std.append(np.std(data)) error.append(z * np.std(data) / np.sqrt(size)) pickle.dump([ gi, [i + 1 for i in range(p)], max_exp, max_std, error, s_per_rank, size ], open('data/' + str(gi) + '.complete', 'wb'))
disp=False, niter_success=10) if -optimal.fun > best_exp: best_exp = -optimal.fun best_angles = optimal.x return best_exp, best_angles if __name__ == '__main__': random.seed(1) seed = random.randint(1, 1000) + rank random.seed(seed) num_nodes = 6 k = int(num_nodes / 2) G = nx.fast_gnp_random_graph(num_nodes, 0.5) while not nx.is_connected(G): G = nx.fast_gnp_random_graph(num_nodes, 0.5) C = common.create_C(G, k) M = common.create_complete_M(num_nodes, k) best_sol = common.brute_force(G, k) all_exps, all_angles = [], [] for p in range(3, 9): rank_exp, rank_angles = get_angles(G, C, M, k, p) all_exps.append(rank_exp / best_sol) all_angles.append(rank_angles) pickle.dump([all_exps, all_angles], open('interpolation/complete-6/' + str(seed) + '.seed', 'wb'))
disp=False) data.append(-optimal.fun) return data if __name__ == '__main__': random.seed(10 + rank) gi = random.randint(163, 955) print('aquired gi: ' + str(gi) + ' from: ' + str(rank)) s_per_rank = 2 max_p = 2 z = 2.576 # z* for 99% confidence interval max_exp, max_std, error = [], [], [] for p in range(1, max_p + 1): # do work over indices best_exps = work(gi, p, s_per_rank) print(best_exps) brute = common.brute_force(gi) best = np.average(best_exps) std = np.std(best_exps) approx = best / brute max_exp.append(approx) error.append(z * np.std(best_exps) / np.sqrt(s_per_rank)) print('gi: ' + str(gi) + ', max_exp: ' + str(max_exp) + ', error: ' + str(error)) pickle.dump([max_exp, error], open('init-complete/' + str(gi) + '.mpi', 'wb')) #pickle.dump([gi, [i+1 for i in range(p)], max_exp, max_std, error, s_per_rank, size], open('data/' + str(gi) + '.mpi-k', 'wb'))