def make_histogram(parameters, version=2): utils = UTILS() file_name = utils.make_file_name(parameters, root="../data/") data = parse_data(file_name, v=version) fidelities = data['F'] plt.hist(fidelities, bins=200) plt.savefig('histogram/hist.pdf')
def main(): ut = UTILS() parameters=ut.read_parameter_file(file = "../para.dat") parameters['L']=2 nrange = np.arange(4,200,4,dtype=int) nrange = [nrange[18]] dt = 0.02 parameters['dt'] = dt mean_fid = [] std_fid = [] for n in nrange: #model = ut.quick_setup(argv=['T=%.3f'%T,'n_step=%i'%n_step],file='../para.dat') parameters['T'] = n*dt parameters['n_step']= n file_name = ut.make_file_name(parameters,root="../data/") res = parse_data(file_name,v=2) # results stored here ... mean_fid.append(np.mean(res['F'])) std_fid.append(np.std(res['F'])) prot = res['protocol'] n_step = len(res['protocol'][0]) plotting.protocol(np.arange(0,n_step)*dt,np.mean(res['protocol'],axis=0),title='$T=%.3f$'%(dt*n_step)) #plt.scatter(nrange*dt,std_fid) #plt.plot(nrange*dt,std_fid) plt.show()
def main(): ### READING ES data and finding optimal state and the gap to excited states. ### Looping over parameters and storing data in a dictionary. utils = UTILS() parameters = utils.read_parameter_file() res = {} for T in [3.6]: for n_step in [24]: parameters['T'] = T parameters['n_step'] = n_step parameters['dt'] = T / n_step file = utils.make_file_name(parameters, root='data/') with open(file, 'rb') as f: data = pickle.load(f) print(np.sort(data[:, 0])[-8:]) exit() pos_min = np.argmax(data[:, 0]) optimal_fid = data[pos_min, 0] data[pos_min, 0] -= 10. pos_min_2 = np.argmax(data[:, 0]) gap = optimal_fid - data[pos_min_2, 0] hx = (b2_array(pos_min, n_step) - 0.5) * 2.0 print(T, '\t', n_step, '\t', optimal_fid, '\t', gap, '\t', gamma(hx), '\t', np.sum(hx)) res[(round(T, 2), n_step)] = [optimal_fid, gap, pos_min] with open('optimal.pkl', 'wb') as f: pickle.dump(res, f)
def main(): with open('optimal.pkl','rb') as f: res = pickle.load(f) ### READING ES data and finding optimal state and the gap to excited states. ### Looping over parameters and storing data in a dictionary. utils=UTILS() parameters = utils.read_parameter_file() prob_vs_T = {} for T_tmp in np.arange(0.025,4.001,0.025): T = round(T_tmp,2) prob_vs_T[T] = np.zeros((13,2),dtype=np.float64) ii = 0 for n_step in [4,6,8,10,12,14,16,18,20,22,24,26,28] : parameters['T']= T parameters['n_step'] = n_step parameters['dt'] = T/n_step gs_fid = res[(T,n_step)][0] file = utils.make_file_name(parameters,root='data/') with open(file,'rb') as f: _, data = pickle.load(f) n_elem = len(data) v = np.zeros(n_elem,dtype=np.float64) # careful here, precision is important ---> !!! n_eval = np.zeros(n_elem,dtype=np.float64) # careful here, precision is important ---> !!! for i,elem in zip(range(n_elem),data): v[i] = elem[1] n_eval[i] = elem[0] count = (v[np.abs(v - gs_fid) < 1e-14].shape[0]) prob = count/n_elem mean = np.mean(n_eval) if prob > 1e-14: prob=prob**-1*mean else: prob=2**n_step # worst case ... need to search the whole space ... if prob > 2**n_step: prob = 2**n_step # worst case ... prob_vs_T[T][ii,0] = n_step prob_vs_T[T][ii,1] = prob mean=np.mean(n_eval) std=np.std(n_eval) out_str = "{0:<6.2f}{1:<6}{2:<20.3f}{3:<10.4f}{4:<10.4f}{5:<8}".format(T,n_step,prob,std/mean,mean,count) print(out_str) ii += 1
def main(): ut = UTILS() parameters = ut.read_parameter_file(file="../para.dat") parameters['L'] = 1 n_step = 400 nrange = np.arange(10, 800, 10, dtype=int) Trange = np.arange(0.05, 4.01, 0.05) dt = 0.005 parameters['dt'] = dt mean_fid = [] std_fid = [] n_fid = [] ed1 = [] ed2 = [] for n in nrange: #for n in nrange: #model = ut.quick_setup(argv=['T=%.3f'%T,'n_step=%i'%n_step],file='../para.dat') parameters['T'] = n * dt #parameters['T'] = T parameters['n_step'] = n #parameters['n_step']= n_step #parameters['dt'] = parameters['T']/parameters['n_step'] file_name = ut.make_file_name(parameters, root="../data/") res = parse_data(file_name) # results stored here ... print(n, '\t', len(res['F'])) mean_fid.append(np.max(res['F'])) n_fid.append(np.mean(res['n_fid'])) std_fid.append(np.std(res['F'])) tmp = 8 * (res['protocol'] - 0.5) ed1.append(Ed_Ad_OP(tmp)) #ed2.append(Ed_Ad_OP_2(res['protocol'],min_h=0, max_h=1)) plt.plot(nrange * dt, n_fid, label='ed1') #plt.plot(nrange*dt, ed2,label='ed2') plt.legend(loc='best') plt.show() exit() n_step = len(res['protocol'][0]) plotting.protocol(Trange, np.mean(res['protocol'], axis=0), title='$T=%.3f$' % (dt * n_step)) #plotting.protocol(np.arange(0,n_step)*dt,np.mean(res['protocol'],axis=0),title='$T=%.3f$'%(dt*n_step)) #plt.scatter(nrange*dt,std_fid) #plt.plot(nrange*dt,std_fid) plt.show()
def main(): ### READING ES data and finding optimal state and the gap to excited states. ### Looping over parameters and storing data in a dictionary. utils=UTILS() parameters = utils.read_parameter_file() Ts=np.arange(1.5,2.81,0.05) n_step=28 fidelities=np.zeros((n_step,201),np.float64) energies=np.zeros_like(fidelities) protocols=np.zeros_like(fidelities) for i_,T in enumerate(Ts): parameters['T']= T parameters['n_step'] = n_step parameters['dt'] = T/n_step b2_array = lambda n10 : np.array(list(np.binary_repr(n10, width=n_step)), dtype=np.int) file = utils.make_file_name(parameters,root='data/data_ES/') res = {} with open(file,'rb') as f: data = pickle.load(f) h_index=np.argsort(-data[:,0])[:201] fidelities[i_,:]=data[h_index,0] energies[i_,:]=data[h_index,1] protocols[i_,:]=h_index print("done with %i/%i iterations." %(i_+1,len(Ts))) save_name='processed_data/low-fid_spectrum_L=2_J=1_hz=1.txt' pickle.dump([fidelities,energies,protocols], open(save_name, "wb" ) )
def main(): ### READING ES data and finding optimal state and the gap to excited states. ### Looping over parameters and storing data in a dictionary. utils = UTILS() parameters = utils.read_parameter_file() for T in np.arange( 1.5, 2.81, 0.05 ): #[1.8,2.15]:#[0.6,0.8,1.2,1.4,1.5,1.6,1.7,1.8,2.0,2.1,2.15,2.2,2.4,2.6,2.8]: for n_step in [28]: parameters['T'] = T parameters['n_step'] = n_step parameters['dt'] = T / n_step b2_array = lambda n10: np.array( list(np.binary_repr(n10, width=n_step)), dtype=np.int) file = utils.make_file_name(parameters, root='data/data_ES/') res = {} with open(file, 'rb') as f: data = pickle.load(f) pos_min = np.argmax(data[:, 0]) optimal_fid = data[pos_min, 0] data[pos_min, 0] -= 10. pos_min_2 = np.argmax(data[:, 0]) gap = optimal_fid - data[pos_min_2, 0] print("T = %0.3f: F_optimal = %0.5f; gap = %0.10f" % (T, optimal_fid, gap)) res[(T, n_step)] = [optimal_fid, gap] #exit() print(pos_min) optimal_h = b2_array(pos_min) optimal_h[np.abs(optimal_h) < 1E-12] = -1 print(optimal_h) print(optimal_h + optimal_h[::-1]) print()
def main(): utils = UTILS() model = utils.quick_setup(argv=['T=3.3'], file="../para.dat") parameters = utils.read_parameter_file(file="../para.dat") Trange = np.arange(0.1, 4.01, 0.1) # maybe trick is to bin stuff up --> ?! interval = [0.85, 1.0] #for t in Trange: parameters['T'] = 3.2 parameters['n_step'] = 100 parameters['n_quench'] = 1000 parameters['dt'] = parameters['T'] / parameters['n_step'] file_name = utils.make_file_name(parameters, root="../data/") data = parse_data(file_name, v=3) fid_series = data['fid_series'] protocols = data['protocol'] fid = data['F'] protocols = protocols[(fid < interval[1]) & (fid > interval[0])] # protocols within an interval
def main(): import os utils = UTILS() parameters = utils.read_parameter_file(file="../para.dat") file_bw = 'bandwidth_L=6_nStep=20.pkl' if os.path.isfile(file_bw): with open(file_bw, 'rb') as f: bandwidths = pickle.load(f) else: bandwidths = {} for T in np.arange(0.3, 4.01, 0.1): print("density map for\t", T) parameters['task'] = 'ES' parameters['L'] = 6 parameters['n_step'] = 20 parameters['T'] = T parameters['dt'] = parameters['T'] / parameters['n_step'] parameters['n_quench'] = 1000 file = utils.make_file_name(parameters, root="../data/") with open(file, 'rb') as f: data = pickle.load(f) n_sample = data.shape[0] exc_dict = excitations(data[:, 0], parameters) X = np.column_stack((data[:, 0], data[:, 1])) # fidelity vs energy Emin = np.min(X[:, 1]) arg_Fmax = np.argmax(X[:, 0]) Fmax = X[arg_Fmax, 0] print("--> density estimate on : ", X.shape) # transformations, so we can visualize the high-fidelity regions X[:, 0] = -np.log(X[:, 0]) / parameters['L'] X[:, 1] = (X[:, 1] - Emin) / parameters['L'] if round(T, 3) in bandwidths.keys(): kde = KDE(extreme_dist=True, bandwidth=bandwidths[round(T, 3)]) else: kde = KDE(extreme_dist=True) kde.fit(X) # determining bandwidth + building model bandwidths[round(T, 3)] = kde.bandwidth #with open('kde_tmp.pkl','wb') as f: # pickle.dump(kde, f) # Just make some prelim plots, fidelity, etc. Let's see what we get for now. # --------------> print('plotting') plt2.density_map(X, kde, xlabel='$-\log F/L$', ylabel='$(E-E_0)/L$', show=False, n_mesh=400) print('excitations') plot_excitations(X, exc_dict, a_fmax=arg_Fmax) #plt.title('$T=%.2f, N=%i $'%(parameters['T'], parameters['n_step'])) plt.tight_layout(pad=0.2) plt.legend(loc='best') plt.savefig('ES_L=%i_T=%.2f_nStep=%i.pdf' % (parameters['L'], parameters['T'], parameters['n_step'])) with open( 'bandwidth_L=%i_nStep=%i.pkl' % (parameters['L'], parameters['n_step']), 'wb') as f: pickle.dump(bandwidths, f) f.close() exit()
def main(): # Utility object for reading, writing parameters, etc. utils = UTILS() # Reading parameters from para.dat file parameters = utils.read_parameter_file() # Command line specified parameters overide parameter file values utils.read_command_line_arg(parameters, sys.argv) # Printing parameters for user utils.print_parameters(parameters) # Defining Hamiltonian H = HAMILTONIAN(**parameters) # Defines the model, and precomputes evolution matrices given set of states model = MODEL(H, parameters) #n_step = parameters['n_step'] #X,y=sample_m0(10000,n_step,model) #print(y[0:10]) #plt.hist(y,bins=20) #plt.show() rob_vs_T = {} n_eval = {} fid = {} res = {} visit = {} T_list = np.arange(0.1, 4.01, 0.1) n_step = 100 fid_list = [] for T in T_list: parameters['T'] = T parameters['n_step'] = n_step parameters['dt'] = T / n_step file = utils.make_file_name(parameters, root='data/') res = parse_data(file, v=3) fid_list.append(np.mean(res['F'])) #n_eval[(n_step,hash(T))]=res['n_fid'] #fid[(n_step,hash(T))]=res['F'] #visit[(n_step,hash(T))] = res['n_visit'] plt.plot(T_list, fid_list) plt.xlabel('T') plt.ylabel('Fidelity') plt.show() exit() n_step_list = [40, 50, 60, 70, 80, 90, 100, 110] for T in T_list: for n_step in n_step_list: #[40,50,60,70,80,90,100,110,120]: ##for T in np.arange(0.025,10.001,0.025): # for n_step in [100,200,400] : parameters['T'] = T parameters['n_step'] = n_step parameters['dt'] = T / n_step file = utils.make_file_name(parameters, root='data/') res = parse_data(file) n_eval[(n_step, hash(T))] = res['n_fid'] fid[(n_step, hash(T))] = res['F'] visit[(n_step, hash(T))] = res['n_visit'] ''' with open(file,'rb') as f: _, data = pickle.load(f) n_elem = len(data) n_eval[(n_step,hash(T))]=[] n_fid[(n_step,hash(T))]=[] for elem in data: n_eval[(n_step,hash(T))].append(elem[0]) n_fid[(n_step,hash(T))].append(elem[1])''' #print(n_eval) #exit() n_eval_mean = {} fid_mean = {} visit_mean = {} #print(visit[(40,115292150460684704)]) #exit() for n_step in n_step_list: n_eval_mean[n_step] = [] fid_mean[n_step] = [] visit_mean[n_step] = [] for T in T_list: hT = hash(T) n_eval_mean[n_step].append( [T, np.mean(n_eval[(n_step, hT)]) / (n_step * n_step)]) fid_mean[n_step].append([T, np.mean(fid[(n_step, hT)])]) visit_mean[n_step].append( [T, np.mean(visit[(n_step, hT)]) / (n_step)]) c_list = [ '#d53e4f', '#f46d43', '#fdae61', '#fee08b', '#e6f598', '#abdda4', '#66c2a5', '#3288bd' ] for i, n_step in enumerate(n_step_list): x = np.array(n_eval_mean[n_step]) plt.plot(x[:, 0], x[:, 1], c='black', zorder=0) plt.scatter(x[:, 0], x[:, 1], c=c_list[i], marker='o', s=5, label='$N=%i$' % n_step, zorder=1) plt.title('Number of fidelity evaluations vs. ramp time \n for 2 flip') plt.ylabel('$N_{eval}/N^2$') plt.xlabel('$T$') plt.legend(loc='best') plt.tight_layout() plt.show() for i, n_step in enumerate(n_step_list): x = np.array(visit_mean[n_step]) plt.plot(x[:, 0], x[:, 1], c='black', zorder=0) plt.scatter(x[:, 0], x[:, 1], c=c_list[i], marker='o', s=5, label='$N=%i$' % n_step, zorder=1) plt.title('Number of visited states vs. ramp time \n for 2 flip') plt.ylabel('$N_{visit}/N$') plt.xlabel('$T$') plt.legend(loc='best') plt.tight_layout() plt.show() '''
def main(): ut = UTILS() parameters = ut.read_parameter_file(file="../para.dat") n_step = 100 S_shannon = [] n_cluster = [] slarge = [] parameters['task'] = 'SD' Trange = np.arange(0.05, 1.0, 0.01) n_fid = [] n_visit = [] fid = [] std_fid = [] energy = [] #Trange = [3.6] n_best = 50000 for T in Trange: #model = ut.quick_setup(argv=['T=%.3f'%T,'n_step=%i'%n_step],file='../para.dat') dt = T / n_step parameters['dt'] = dt parameters['T'] = T parameters['n_step'] = n_step file_name = ut.make_file_name(parameters, root="../data/") res = parse_data(file_name, v=2) # results stored here ... prot = res['protocol'] asortF = np.argsort(res['F']) fid.append(np.mean(res['F'][asortF[-n_best:]])) std_fid.append(np.std(res['F'])) n_visit.append(np.mean(res['n_visit'][asortF[-n_best:]])) n_fid.append(np.mean(res['n_fid'][asortF[-n_best:]])) energy.append(np.mean(res['E'][asortF[-n_best:]])) n_sample = prot.shape[0] #np.sum(-mag*np.log(mag)) xunique, count = np.unique(prot, return_counts=True, axis=0) n_unique = len(xunique) #print(count) t = T print(T, '\t', n_unique, '\t', n_sample) '''v=scidist.squareform(scidist.pdist(xunique,mydist)) print(scidist.squareform(scidist.pdist(xunique,mydist))) print([model.compute_fidelity(protocol = xi) for xi in xunique]) for xi in xunique: plotting.protocol(range(150),xi) exit() exit()''' #[mydist(xunique[i],xunique[j]) for i in range(nunique) for j in range(nu #print(count) #print(len(count)) #print(xunique) #plotting.protocol(range(150),xunique[0]) #print(count) #_sample) #print(res['F'][0]) #print('E0 :\t',model.compute_fidelity(protocol=xunique[0])) #print('E1 :\t',model.compute_fidelity(protocol=xunique[1])) #xtest = np.copy(xunique[1]) #xtest[14]^=1 #xtest[15]^=1 #print(xtest) #print(xunique[0]) #print("Einter :\t",model.compute_fidelity(protocol=xtest)) #print(xunique) #exit() #plotting.protocol(range(50),xunique[1],title='F=%.10f'%model.compute_fidelity(protocol=xunique[1])) #print(count) #exit() #print(count) #exit() prob = (count * 1.0) / np.sum(count) mag = np.mean(prot, axis=0) nzero = (mag > 1e-10) #mag[nzero]*np.log(mag[nzero]) S_shannon.append(-np.sum(mag[nzero] * np.log(mag[nzero]))) #plist2.append(prob) n_cluster.append(1.0 * len(count) / n_sample) if len(count) == 1: slarge.append(1.0) else: asort = np.argsort(count) fraction = (1.0 * count) / n_sample total_f = np.sum(fraction[fraction > 0.1]) #largest_size = count[asort[-1]] slarge.append(np.max(count) / n_sample * 1.0) #slarge.append(1.0*np.sum(count[asort[-5:]])/n_sample) #plist2.append(-np.sum(prob*np.log(prob))/np.log(n_sample)) #print(T,'\t',-np.sum(prob*np.log(prob))) plt.scatter(Trange, S_shannon) plt.show() '''sorted_f = np.sort(res['F']) #print(np.std(sorted_f[-45000:])) #print(np.std(sorted_f)) #exit() n_red = 41500 plt.hist(np.sort(res['F'])[-n_red:], bins=100) n_sample = len(res['F']) plt.title("2-SF, T=%.2f, fidelity distribution ($N=%i$) \n considering the best %.1f %% of data"%(t,n_red,n_red/n_sample*100.)) plt.show() exit()''' plt.scatter(Trange, n_fid) plt.title('nfid') plt.show() plt.scatter(Trange, n_visit) plt.title('nvisit') plt.show() plt.scatter(Trange, fid) plt.title('F') plt.show() plt.scatter(Trange, std_fid) plt.title('std F') plt.show() plt.scatter(Trange, energy) plt.title('E') plt.show() plt.scatter(Trange, slarge) plt.scatter(Trange, n_cluster) plt.title('Cluster size fraction and # of clusters') plt.show() plt.scatter(Trange, S_shannon) plt.title('Shannon entropy') plt.show()
import sys sys.path.append("..") from utils import UTILS import pickle from matplotlib import pyplot as plt utils = UTILS() parameters = utils.read_parameter_file(file="../para.dat") n_step = 20 parameters['T'] = round(0.4, 2) parameters['n_step'] = n_step parameters['dt'] = parameters['T'] / parameters['n_step'] file = utils.make_file_name(parameters, root='../data/') n_state = 10000 b2_array = lambda n10: np.array(list(np.binary_repr(n10, width=n_step)), dtype=np.float) pipe = open(file, 'rb') data = pickle.load(pipe) fid, ene = data[:, 0], data[:, 1] f_state = np.argsort(fid)[-n_state:] X = np.zeros((n_state, 20), dtype=np.float) for i in range(X.shape[0]): X[i] = b2_array(f_state[i]) tsne = TSNE(n_components=2)