def plot_effective_mass(data_file,settings_file="input.xml",label="",param_file="effective_mass_t.dat",factor=1,fit_lines=True): root=ET.parse(settings_file).getroot() skip=int(root.find("measures").find("winding_number").attrib["skip"]) time_step=float(root.find("method").find("delta_tau").text) fit_mean=[] fit_low=[] fit_up=[] data=tools.read_matrix_from_file(data_file) data[1]=data[1]*factor data[2]=data[2]*factor #plt.ylim(0,1) if (os.path.isfile(param_file) and fit_lines==True): with open(param_file,"r") as f: cut_low=int(f.readline()) cut_heigh=int(f.readline()) params=f.readline().split(); errors=f.readline().split(); a=(time_step*(skip+1)) for x in (data[0][cut_low:cut_heigh]*a): #values.append(effective_mass_exp_curve(float(x),float(params[0]),float(params[1]),float(params[2]))) fit_mean.append(hyperbole(float(x),float(params[0]),float(params[1])*a)) fit_low.append(hyperbole(float(x),float(params[0])-float(errors[0]),float(params[1])*a - float(errors[1])*a)) fit_up.append(hyperbole(float(x),float(params[0])+float(errors[0]),float(params[1])*a + float(errors[1])*a)) plt.plot(data[0][cut_low:cut_heigh]*a,fit_mean,linewidth=4) plt.fill_between(data[0][cut_low:cut_heigh]*a,fit_low,fit_up,alpha=0.4) return plt.errorbar(data[0]*time_step*(skip+1),data[1]/(data[0]*time_step*(skip+1)), yerr=data[2]/(data[0]*time_step*(skip+1)),label=label,fmt='o',ms=1)
def winding_number(filename,window,bins=0): data=tools.read_matrix_from_file(filename); print "file read." print len(data[1]) if (bins ==0): bins=window times=np.zeros(bins) values=np.zeros(bins) ns=np.zeros(bins) step=window/bins for i in range(0,bins): times[i]=i*step for k in range(0,len(data[1])-window,window): for j in range(k,window+k,step): for i in range(0,bins): values[i]=values[i]+(data[1][j]-data[1][j - i*step])**2 ns[i]=ns[i]+1 for i in range(0,bins): if (ns[i] != 0): values[i]=values[i]/ns[i] else: values[i]=0 plt.scatter(times,values) return [times,values]
def get_array(self,filename): values=tools.read_matrix_from_file(self.dir_path+"/"+filename) x=[k for k in range(0,len(values[0]))] tab=np.zeros((len(values[0]),3)) tab[:,0]=x tab[:,1]=values[1] tab[:,3]=values[2] return tab
def get_array(self, filename): values = tools.read_matrix_from_file(self.dir_path + "/" + filename) x = [k for k in range(0, len(values[0]))] tab = np.zeros((len(values[0]), 3)) tab[:, 0] = x tab[:, 1] = values[1] tab[:, 3] = values[2] return tab
def __init__(self, frames_map, raw=True): if raw: for frames_tag, frames_path in frames_map.items(): # Convert paths in matrix set frames_map[frames_tag] = read_matrix_from_file(frames_path) self.frames_map = frames_map self.renderer_object = RendererObject() self.center_hor = False self.parent_screen_object = None
def is_stationary(filename,bins=10,eps=0.01): # the values values=np.array(tools.read_matrix_from_file(filename)[1]) # number of steps steps=np.array(tools.read_matrix_from_file(filename)[0]) #steps=np.arange(0,len(values)) #tab=np.zeros((len(steps),2)) #tab[:,0]=steps #tab[:,1]=values #print tab converged=False step=(len(values)-1)/bins for i in range(0,bins): low=i*step heigh=(i+1)*step tsa_r=tsa.adfuller(values[low:heigh],regression="c") if (tsa_r[0] < tsa_r[4]["5%"]): converged=True return converged
def anal_pair_correlation(dirname=".",jumps=0,bins=10,filename="g.dat"): matrix=tools.read_matrix_from_file(dirname+"/"+filename) values=np.zeros((matrix.shape[0],2)) for j in range(0,matrix.shape[0]): e=observable() e.values=matrix[j,jumps::] #e.reblock().autocorrelate().plot_autocorrelate() r=e.reblock() r.bins=bins r.estimate().save(dirname+"/pair_correlation/"+str(j)) values[j,0]=r.mean values[j,1]=r.errors[-1] tools.write_matrix_in_file(values,dirname+"/pair_correlation/g.dat") return None
def plot_file(filename,error=True,label="",reset_index=False,jumps=0,linear_increment=False): data=np.array(tools.read_matrix_from_file(filename)) if len(data)<3: error=False if reset_index: data[0]=data[0]-data[0][0] if linear_increment: data[0]=range(1,len(data[0])+1) data=data[:,jumps:] if error : return plt.errorbar(data[0],data[1], yerr=data[2],label=label,marker='o',linestyle='none') else: return plt.plot(data[0],data[1],label=label,marker='o')[0]
def anal_g(dirname="."): data=tools.read_matrix_from_file(dirname+"/g.dat") values=[] errors=[] n=data.shape[0] i=0 for column in data: e=observable() e.import_data(column) r=e.reblock() values.append(r.get_mean()) errors.append(r.get_error()) i=i+1 print "completed " + str(i) +" of " +str(n) tools.write_matrix_in_file([values,errors],"g.out")
def getMean(filename, method="blocking", jumps=0, makePlot=False): y = np.array(tools.read_matrix_from_file(filename)[1]) y = y[jumps:] err = estimateErrorBlocking(y, minBlocks=10, makePlot=makePlot) return [np.mean(y), err[0], err[1], err[2]]
time.sleep(2.5) print("One more time...") time.sleep(2.5) def draw_line(self): print("#", end="") for drawer in range(self.screen_x): print("-", end="") print("#") def update_screen(self): self.screen_matrix.clear() if __name__ == "__main__": frames_matrix_go_right = read_matrix_from_file("models/example_right.model") frames_matrix_go_left = read_matrix_from_file("models/example_left.model") renderer = Renderer() demo_object = RendererObject(0, 0) renderer.renderer_objects.append(demo_object) go_right = True while True: # Walk logic if demo_object.pos_x == 0: go_right = True demo_object.set_model_matrix_frames(frames_matrix_go_right) if demo_object.pos_x == renderer.screen_x - 19: go_right = False demo_object.set_model_matrix_frames(frames_matrix_go_left)
def load_frames(path_to_file): return read_matrix_from_file(path_to_file)
def plot_double(filename): values=tools.read_matrix_from_file(filename) plt.plot(values[0],values[1],marker='o',linestyle='none')
def plot_pair_correlation(filename): values=tools.read_matrix_from_file(filename) x=[k for k in range(0,len(values[0]))] plt.plot(x,values[0],marker='o',linestyle='none')
def anal_effective_mass(data_file,settings_file="input.xml",label="",out_file="effective_mass_fit.dat",cut_low=500,cut_heigh=None,n_cuts=10,factor=1): root=ET.parse(settings_file).getroot() skip=int(root.find("measures").find("winding_number").attrib["skip"]) time_step=float(root.find("method").find("delta_tau").text) #print "anal" + str(data_file) + "\n" data=tools.read_matrix_from_file(data_file) if (factor != 1): for i in range(len(data[1])): data[1][i]=factor*data[1][i] p0=[1,1,0.001] errors=[] #params,covs=curve_fit(effective_mass_exp_curve,data[0][700:],(data[1]/(data[0]*time_step*(skip+1)))[700:],p0=p0) #params,covs=curve_fit(effective_mass_curve_2,data[0][cut_low:],data[1][cut_low:]/(time_step*(skip+1)),p0=p0,maxfev=100000) #errors.append(covs[0][0]) #errors.append(covs[1][1]) #errors.append(covs[2][2]) # convert time bounds to array indexes cut_low= int(cut_low/(time_step*(skip+1)) ) if cut_heigh==None: cut_heigh=len(data[0])-1 else: cut_heigh=int(cut_heigh/(time_step*(skip+1)) ) # check bounds if cut_low<0 or cut_heigh<0: raise bound_error() else: if ( cut_low >= len(data[0]) or cut_heigh >= len(data[1]) ): raise bound_error() # size of the cut size_of_cut=(cut_heigh - cut_low)/n_cuts #mean_params=np.zeros((3,n_cuts)) #mean2_params=np.zeros((3,n_cuts)) #error_params=np.zeros((3,n_cuts)) # mean values and related errors means=np.zeros(3) means2=np.zeros(3) errors=np.zeros(3) # in the range of the number of cuts for i in range(0,n_cuts): cut_low_current=cut_low + i*size_of_cut cut_heigh_current=cut_low + (i+1)*size_of_cut if (cut_heigh_current - cut_low_current<= 2): raise not_enough_data params,covs=curve_fit(linear,data[0][cut_low_current:cut_heigh_current],data[1][cut_low_current:cut_heigh_current]/(time_step*(skip+1)),sigma=data[2][cut_low_current:cut_heigh_current]/(time_step*(skip+1)),maxfev=100000) means[0]=means[0]+ params[0] means[1]=means[1] + params[1] # accumulate the mean of the parameter means2[0]=means2[0] + params[0]**2 means2[1]=means2[1] + params[1]**2 # accumulates the square of the errors errors[0]=errors[0] + covs[0][0] errors[1]=errors[0] + covs[1][1] means=means/n_cuts means2=means2/n_cuts errors=np.sqrt(abs((means2-means**2) + errors/n_cuts)) with open(out_file,"w") as f: #f.write(str(params[0])+" "+ str(params[1]) + " " + str(params[2])+"\n") f.write(str(cut_low)+ "\n") f.write(str(cut_heigh)+"\n") f.write(str(means[0])+" "+ str(means[1]) + " " + "0"+"\n") #f.write(str(covs[0][0]) + " " + str(covs[1][1]) + " "+ str(covs[2][2]) +"\n") f.write(str(errors[0]) + " " + str(errors[1]) + " "+ "0" +"\n") return (params,covs)
def anal_energy(dirname=".",jumps=0,nMax=None,bins=None,filename="e.dat",sub_directory="energy",e_min=None,e_max=None,makePlot=False): values=np.array(tools.read_matrix_from_file(dirname+"/"+filename,nMax=nMax)[1]) values=values[jumps::] return anal_energy_values(values,bins=bins,dirname=dirname,sub_directory=sub_directory,makePlot=makePlot)
def getMean(filename,method="blocking",jumps=0,makePlot=False): y=np.array(tools.read_matrix_from_file(filename)[1]) y=y[jumps:] err=estimateErrorBlocking(y,minBlocks=10,makePlot=makePlot) return [np.mean(y),err[0],err[1],err[2]]