def partNum(z, pxlen, R_mu, R_sigma): A = (np.shape(z)[1]-1)*(np.shape(z)[0]-1) * pxlen**2 V = par.V(z, pxlen) V_mean = 4/3 * np.pi * np.exp(3*R_mu + 3**2*R_sigma**2/2) N_particles = V / V_mean eff_cov = N_particles / A return N_particles, eff_cov
def partNum(z, pxlen, R_mu, R_sigma): A = len(z)**2 * pxlen**2 V = par.V(z, pxlen) V_mean = 4 / 3 * np.pi * np.exp(3 * R_mu + 3**2 * R_sigma**2 / 2) N_part = V / V_mean eff_cov = N_part / A return N_part, eff_cov
def plotVfracDep(Npx, pxlen, N_part_max, N_part_step, R_mean, R_std, R_mu, R_sigma): z = mf.genFlat(Npx) V_real = 0 V_frac_list = [] N_part = np.arange(0, N_part_max + 1, N_part_step) for N in N_part: z, R_part_real = mf.genLogNormSolidSph(z, pxlen, N_part_step, R_mean, R_std) V_real += np.sum(4 / 3 * np.pi * R_part_real**3) V_frac_list.append(par.V(z, pxlen) / V_real) print(N) plt.figure() plt.plot(N_part, V_frac_list, color='r', marker='o') plt.xlabel(r'$N_{part,real}$') plt.ylabel(r'$V_{tot,est} / V_{tot,real}$') plt.title(r'$\mu_R = $' + str(R_mu) + r'$nm, \sigma_R = $' + str(R_sigma) + r'$nm$' + r'$, R_{mean} = $' + str(R_mean) + r'$nm, R_{std} = $' + str(R_std) + r'$nm$') plt.grid()
def partDep(Npx, pxlen, step_sim, N_part_min, N_part_max, N_part_step, R_mu, R_sigma, firstmap='', usefile=False, savefile=False): N_part = np.linspace(np.log10(N_part_min), np.log10(N_part_max), N_part_step) N_part = np.round(10**N_part) N_part.astype(int, copy=False) # N_est = np.array([]) #2+1D only V_est = np.array([]) rms_est = np.array([]) h_est = np.array([]) h_top = np.array([]) C_true=[] G_true=[] C2_true=[] G2_true=[] C_list=[] G_list=[] for i in range(step_sim): if firstmap=='': #initialize map #z = np.zeros(Npx) #1+1D z=mf.genFlat(Npx) N_prec=0 V_real=0 else: z = np.loadtxt(firstmap) dotpos=firstmap.find('.') start=dotpos-1 for i in range(dotpos-1): if firstmap[start]=='_': break start-=1 N_prec=int(firstmap[start+1:dotpos]) out=open(firstmap) head=out.readline() out.close() start=head.find('V=')+2 V_real=float(head[start:head.find(';', start)]) for N in N_part: print('Sim.',i+1,'; N=',str(N)[:len(str(N))-2], end=' ') if usefile and isfile('maps/lognorm_'+str(Npx)+'_'+str(pxlen)+'_'+str(N)[:len(str(N))-2]+'.dat'): print('map from file ...', end=' ') z= np.loadtxt('maps/lognorm_'+str(Npx)+'_'+str(pxlen)+'_'+str(N)[:len(str(N))-2]+'.dat') out=open('maps/lognorm_'+str(Npx)+'_'+str(pxlen)+'_'+str(N)[:len(str(N))-2]+'.dat') head=out.readline() out.close() start=head.find('V=')+2 V_real=float(head[start:head.find(';', start)]) else: #print('generating map ...', end=' ') #z, R_part_real = mf.genLogNormSolidSph(z,pxlen,int(N-N_prec),R_mu,R_sigma) #z=mf.genLattice1d(z,pxlen,int(N-N_prec)) #1+1D #V_real += (N-N_prec)* pxlen**2 #1+1D z=mf.genLattice2d(z,pxlen,int(N-N_prec)) V_real += (N-N_prec)* pxlen**3 #V_real += 4/3*np.pi * np.sum(R_part_real**3) if savefile: np.savetxt('maps/lognorm_'+str(Npx)+'_'+str(pxlen)+'_'+str(N)[:len(str(N))-2]+'.dat', z, header='V='+str(V_real)+'; Npx,pxlen,Npart in filename') #if savefile: np.savetxt('matrice.mat', z) N_prec=N #print('computing parameters ...') #N_est=np.append(N_est, partNum(z,pxlen,R_mu,R_sigma)[0]/N) V_est=np.append(V_est, par.V(z,pxlen)/V_real) rms_est=np.append(rms_est, np.std(z)) h_est=np.append(h_est, np.mean(z)) h_top=np.append(h_top, np.amax(z)) #print('computing correlations ...') l,C=par.C_profile(z, pxlen, 1) C_list.append(C) #C_list.append(par.C_1d(z)) #1+1D l,C=par.G_profile(z, pxlen, 1) G_list.append(C) #G_list.append(par.G_1d(z)) #1+1D print('',end='\r') #print() if i==0: for el in C_list: C_true.append(el) C2_true.append(el**2) for el in G_list: G_true.append(el) G2_true.append(el**2) else: for i in range(len(C_true)): C_true[i]=C_true[i]+C_list[i] C2_true[i]=C2_true[i]+C_list[i]**2 for i in range(len(G_true)): G_true[i]=G_true[i]+G_list[i] G2_true[i]=G2_true[i]+G_list[i]**2 C_list.clear() G_list.clear() filename=['V_relVsN.dat', 'rmsVsN.dat', 'hVsN.dat', 'maxhVsN.dat'] est=[V_est, rms_est, h_est, h_top] for j in range(len(est)): err=np.array([]) for i in range(len(N_part)): if step_sim==1: err=np.append(err, 0) else: err=np.append(err, np.std(est[j][i::len(N_part)])) est[j][i]=np.mean(est[j][i::len(N_part)]) np.savetxt(filename[j], np.array([ est[j][:len(N_part)], N_part, err ]), header=str(Npx) + ' ' + str(pxlen) + '\n' + r'$N_{px}$ $L_{px}$') #header=str(R_mu) + ' ' + str(R_sigma) + ' ' + str(Npx) + ' ' + str(pxlen) + '\n' + #r'$\mu_R$ $\sigma_R$ $N_{px}$ $L_{px}$') print('data saved in '+ filename[j] +' and in folder maps/') C_true=np.array(C_true)/step_sim G_true=np.array(G_true)/step_sim C2_true=np.array(C2_true)/step_sim - C_true**2 G2_true=np.array(G2_true)/step_sim - G_true**2 # np.savetxt('x.dat', l) np.savetxt('C.dat', C_true) np.savetxt('G.dat', G_true) np.savetxt('C2.dat', C2_true) np.savetxt('G2.dat', G2_true) print('correlations saved in files C,G,C2,G2')
def partDep_mpi(Npx, pxlen, step_sim, N_part_min, N_part_max, N_part_step, R_mu, R_sigma, firstmap='', usefile=False, savefile=False): N_part = np.linspace(np.log10(N_part_min), np.log10(N_part_max), N_part_step) N_part = np.round(10**N_part) N_part.astype(int, copy=False) N_est = np.array([]) V_est = np.array([]) rms_est = np.array([]) h_est = np.array([]) h_top = np.array([]) # L_corr_est = np.array([]) # L_corr_err = np.array([]) # alfa_est=np.array([]) # alfa_err=np.array([]) C_true=[] G_true=[] C2_true=[] G2_true=[] C_list=[] G_list=[] comm=MPI.COMM_WORLD size_mpi=comm.Get_size() for i in range(step_sim/size_mpi): if firstmap=='': #initialize map z = mf.genFlat(Npx) N_prec=0 V_real=0 else: z = np.loadtxt(firstmap) dotpos=firstmap.find('.') start=dotpos-1 for i in range(dotpos-1): if firstmap[start]=='_': break start-=1 N_prec=int(firstmap[start+1:dotpos]) out=open(firstmap) head=out.readline() out.close() start=head.find('V=')+2 V_real=float(head[start:head.find(';', start)]) for N in N_part: print('Sim.',i+1,'; N=',str(N)[:len(str(N))-2], end=' ') if usefile and isfile('maps/lognorm_'+str(Npx)+'_'+str(pxlen)+'_'+str(N)[:len(str(N))-2]+'.dat'): print('map from file ...', end=' ') z= np.loadtxt('maps/lognorm_'+str(Npx)+'_'+str(pxlen)+'_'+str(N)[:len(str(N))-2]+'.dat') out=open('maps/lognorm_'+str(Npx)+'_'+str(pxlen)+'_'+str(N)[:len(str(N))-2]+'.dat') head=out.readline() out.close() start=head.find('V=')+2 V_real=float(head[start:head.find(';', start)]) else: print('generating map ...', end=' ') z, R_part_real = mf.genLogNormSolidSph(z,pxlen,int(N-N_prec),R_mu,R_sigma) mf.plotfalsecol(z,pxlen) V_real += np.sum(4/3 * np.pi * R_part_real**3) if savefile: np.savetxt('maps/lognorm_'+str(Npx)+'_'+str(pxlen)+'_'+str(N)[:len(str(N))-2]+'.dat', z, header='V='+str(V_real)+'; Npx,pxlen,Npart in filename') N_prec=N print('computing parameters ...') N_est=np.append(N_est, partNum(z,pxlen,R_mu,R_sigma)[0]/N) V_est=np.append(V_est, par.V(z,pxlen)/V_real) rms_est=np.append(rms_est, np.std(z)) h_est=np.append(h_est, np.mean(z)) h_top=np.append(h_top, np.amax(z)) #print('computing correlations ...') l,C=par.C_profile(z, 2, 800) C_list.append(C) l,C=par.G_profile(z, 2, 800) G_list.append(C) if i==0: for el in C_list: C_true.append(el) C2_true.append(el**2) for el in G_list: G_true.append(el) G2_true.append(el**2) else: for i in range(len(C_true)): C_true[i]=C_true[i]+C_list[i] C2_true[i]=C2_true[i]+C_list[i]**2 for i in range(len(G_true)): G_true[i]=G_true[i]+G_list[i] G2_true[i]=G2_true[i]+G_list[i]**2 C_list.clear() G_list.clear() est=[N_est, V_est, rms_est, h_est, h_top] rank=comm.Get_rank() if rank!=0: comm.Send(est, dest=0) if rank==0: rec=[] for i in range(1, size_mpi): comm.Recv(rec, source=i) for k in range(len(est)): est[k]=np.append(est[k], rec[k]) rec.clear() C_true=np.array(C_true)/(step_sim/size_mpi) G_true=np.array(G_true)/(step_sim/size_mpi) C2_true=np.array(C2_true)/(step_sim/size_mpi) - C_true**2 G2_true=np.array(G2_true)/(step_sim/size_mpi) - G_true**2 corr=[C_true, G_true, C2_true, G2_true] if rank!=0: comm.Send(corr, dest=0) if rank==0: for i in range(1, size_mpi): comm.Recv(rec, source=i) for k in range(4): corr[k]=np.append(corr[k], rec[k]) rec.clear() corr[0]=np.array(corr[0])/size_mpi corr[1]=np.array(corr[1])/size_mpi corr[2]=np.array(corr[2])/size_mpi - corr[0]**2 corr[3]=np.array(corr[3])/size_mpi - corr[1]**2 filename=['N_relVsN.dat', 'V_relVsN.dat', 'rmsVsN.dat', 'hVsN.dat', 'maxhVsN.dat'] for j in range(len(est)): err=np.array([]) for i in range(len(N_part)): if step_sim==1: err=np.append(err, 0) else: err=np.append(err, np.std(est[j][i::len(N_part)])) est[j][i]=np.mean(est[j][i::len(N_part)]) np.savetxt(filename[j], np.array([ est[j][:len(N_part)], N_part, err ]), header=str(R_mu) + ' ' + str(R_sigma) + ' ' + str(Npx) + ' ' + str(pxlen) + '\n' + r'$\mu _R$ $\sigma _R$ $N_{px}$ $L_{px}$') print('data saved in '+ filename[j] +' and in folder maps/') np.savetxt('correlations.dat', np.array([ l*pxlen, corr[0], corr[1], corr[2], corr[3]]), fmt='%s')
pxlen=L/Npx thres=0 #soglia #----------------------------------------------- R=q*L htip=2*R*1.02 R_tip=np.linspace(R/5,3*R, 8) V_red_dil = [] height = [] profiles = [] posmax = [] z=mf.genFlat(int(Npx[-1])) z=mf.genSphere(z,pxlen[-1],np.array([L/2, L/2]),np.array([R])) obj = mf.identObj(z,thres)[0] V_red_calc= par.V(obj, pxlen[-1]) / volsphere(R) maxh=0 posmax.append(0) for x in range(np.shape(obj)[0]): height.append(np.amax(obj[0:,x])/R) if maxh<height[-1]: maxh=height[-1] posmax[-1]=x profiles.append(np.array(height)) height.clear() for rtip in R_tip: for i in range(len(Npx)): #iterazione su risoluzione print('R_tip=', rtip , ' Npx=', int(Npx[i])) z=mf.genFlat(int(Npx[i]))
def partDep(Npx, pxlen, step_sim, N_part_min, N_part_max, N_part_step, R_mean, R_std, par, firstmap='', usefile=False, savefile=False): N_part = np.linspace(np.log10(N_part_min), np.log10(N_part_max), N_part_step) N_part = np.round(10**N_part) N_part.astype(int, copy=False) R_mu = np.log(R_mean / np.sqrt(1 + R_std**2 / R_mean**2)) # recalculated gaussian R_sigma = np.sqrt(np.log(1 + (R_std / R_mean)**2)) # reculaculated gaussian est_list = [] wl_list = [] for i in range(step_sim): if firstmap == '': #initialize map z = mf.genFlat(Npx) N_prec = 0 V_real = 0 else: z = np.loadtxt(firstmap) dotpos = firstmap.find('.') start = dotpos - 1 for i in range(dotpos - 1): if firstmap[start] == '_': break start -= 1 N_prec = int(firstmap[start + 1:dotpos]) out = open(firstmap) head = out.readline() out.close() start = head.find('V=') + 2 V_real = float(head[start:head.find(';', start)]) for N in N_part: print('Sim.', i + 1, 'N=', str(N)[:len(str(N)) - 2], end=' ') if usefile and isfile('maps/lognorm_' + str(Npx) + '_' + str(pxlen) + '_' + str(N)[:len(str(N)) - 2] + '.dat'): print('map from file ...') z = np.loadtxt('maps/lognorm_' + str(Npx) + '_' + str(pxlen) + '_' + str(N)[:len(str(N)) - 2] + '.dat') out = open('maps/lognorm_' + str(Npx) + '_' + str(pxlen) + '_' + str(N)[:len(str(N)) - 2] + '.dat') head = out.readline() out.close() start = head.find('V=') + 2 V_real = float(head[start:head.find(';', start)]) else: print('generating map ...') z, R_part_real = mf.genLogNormSolidSph(z, pxlen, int(N - N_prec), R_mean, R_std) V_real += np.sum(4 / 3 * np.pi * R_part_real**3) if savefile: np.savetxt('maps/lognorm_' + str(Npx) + '_' + str(pxlen) + '_' + str(N)[:len(str(N)) - 2] + '.dat', z, header='V=' + str(V_real) + '; Npx,pxlen,Npart in filename') N_prec = N if par == 'N' or par == 'N_part' or par == 'Npart': est_list.append(partNum(z, pxlen, R_mu, R_sigma)[0]) if par == 'V' or par == 'V_rel' or par == 'V_frac': est_list.append(par.V(z, pxlen) / V_real) if par == 'rms' or par == 'RMS' or par == 'std': est_list.append(np.std(z)) wl_list.append(par.wavelength(z, pxlen, 'x')) if par == 'N' or par == 'N_part' or par == 'Npart': filename = 'N_relVsN_real.dat' if par == 'V' or par == 'V_rel' or par == 'V_frac': filename = 'V_relVsV_real.dat' if par == 'rms' or par == 'RMS' or par == 'std': filename = 'rmsVsN_real.dat' err = [] for i in range(len(N_part)): if step_sim == 1: err.append(0) else: err.append(np.std(np.array(est_list[i::len(N_part)]))) est_list[i] = np.mean(np.array(est_list[i::len(N_part)])) wl_list[i] = np.mean(np.array(wl_list[i::len(N_part)])) np.savetxt(filename, np.array( [np.array(est_list[:len(N_part)]), N_part, np.array(err)], np.array(wl_list[:len(N_part)])), header=str(R_mu) + ' ' + str(R_sigma) + ' ' + str(R_mean) + ' ' + str(R_std) + ' ' + str(Npx) + ' ' + str(pxlen) + '\n' + r'$\mu_R$ $\sigma_R$ $R_{mean}$ $R_{std}$ $N_{px}$ $L_{px}$' + '\n wavelength on last line') print('data saved in ' + filename + 'and in folder maps/')