def initial_shape(offset=0): zmin = -500 + offset zmax = 500 + offset # Dz = 45 dz = 1 z = np.arange(zmin, zmax, dz) Dz = 200 U0 = 2.5 * 10**3 delta = 10. U1 = U0 * np.exp(-Dz / delta) #delta = Dz / np.log(U0/U1) z0 = 0 + offset z1 = Dz + offset U = np.zeros(len(z)) for i, zi in enumerate(z): if zi < z0: U[i] = U0 if zi >= z0 and zi < z1: U[i] = U0 * np.exp((z0 - zi) / delta) if zi >= z1: U[i] = U0 * np.exp(-Dz / delta) graphes.semilogy(z, U, label='r') graphes.legende('z (mm)', 'U (mm/s)', '') graphes.set_axis(-50, 150, 10**0, 5 * 10**3) return z, U
def velocity_profile(M, xlines, ylines, display=True, start=0, end=10000, label='k^'): nx, ny, nt = M.shape() nt = min(nt, end) U = np.sqrt(M.Ux[:, :, start:nt]**2 + M.Uy[:, :, start:nt]**2) label = ['k^', 'rx', 'bo'] Dt = 10 t = M.t[start + Dt:nt - Dt] Ut = [] for i in ylines: for j in xlines: Ut.append(basics.smooth( U[i, j], Dt)) #[np.mean(S.Uy[i,j,k-Dt:k+Dt]) for k in range(Dt,nt-Dt)] # std_U=[np.std(U[i,j,k-Dt:k+Dt]) for k in range(Dt,nt-Dt)] if display: graphes.graph(t, Ut[-1]) graphes.legende('t (ms)', 'V (m/s)', '') #return a list of time series, for each element in xlines and ylines return t, Ut
def velocity_profile_xy(S, xlines, ylines, display=False, label='k^'): nx, ny, nt = S.shape() label = ['k^', 'rx', 'bo'] t = S.t Dt = 5 Uxt = [] Uyt = [] for i in ylines: for j in xlines: # std_Ux=[np.std(S.Ux[i,j,k-Dt:k+Dt]) for k in range(Dt,nt-Dt)] # std_Uy=[np.std(S.Uy[i,j,k-Dt:k+Dt]) for k in range(Dt,nt-Dt)] Uxt.append( basics.smooth(S.Ux[i, j], Dt) ) #(-1)**i*(-1)**j* [(-1)**i*(-1)**j*np.mean(S.Ux[i,j,k-Dt:k+Dt]) for k in range(Dt,nt-Dt)] Uyt.append(basics.smooth( S.Uy[i, j], Dt)) #[np.mean(S.Uy[i,j,k-Dt:k+Dt]) for k in range(Dt,nt-Dt)] if display: # plt.subplot(211) graphes.graph(t[Dt:-Dt], Uxt[-1]) #,std_Ux) graphes.legende('t (ms)', 'V (m/s)', 'Ux') # plt.subplot(212) graphes.graph(t[Dt:-Dt], Uyt[-1]) #,std_Uy) graphes.legende('t (ms)', 'V (m/s)', 'Uy') return t, Uxt, Uyt
def velocity_distribution(M, start, end, display=False): #compute the distribution of velocity for Ux, Uy and U for all the individual measurements between start and end #substract the mean flow in each point M = cdata.rm_nan(M, 'Ux') M = cdata.rm_nan(M, 'Uy') (nx, ny, n) = M.shape() nt = end - start Ux = np.reshape(M.Ux[:, :, start:end], (nx * ny * nt, )) Uy = np.reshape(M.Uy[:, :, start:end], (nx * ny * nt, )) Ux_rms = np.std(Ux) Uy_rms = np.std(Uy) Ux_moy = np.reshape(np.mean(M.Ux[:, :, start:end], axis=2), (nx, ny, 1)) Uy_moy = np.reshape(np.mean(M.Uy[:, :, start:end], axis=2), (nx, ny, 1)) Ux_m = np.reshape(np.dot(Ux_moy, np.ones((1, 1, nt))), (nx, ny, nt)) Uy_m = np.reshape(np.dot(Uy_moy, np.ones((1, 1, nt))), (nx, ny, nt)) # Ux=np.reshape(M.Ux[:,:,start:end]-Ux_m,(nx*ny*nt,)) # Uy=np.reshape(M.Uy[:,:,start:end]-Uy_m,(nx*ny*nt,)) Ux = np.reshape(M.Ux[:, :, start:end], (nx * ny * nt, )) Uy = np.reshape(M.Uy[:, :, start:end], (nx * ny * nt, )) # U_s=np.zeros(len(Ux)+len(Uy)) U_s = np.concatenate((Ux, Uy)) # U=np.sqrt(Ux**2+Uy**2) #normalized by the RMS velocity : Uxt_rms = np.std(Ux) Uyt_rms = np.std(Uy) U_rms = np.std(U_s) print('RMS velocity : ' + str(U_rms) + ' m/s') mid = (start + end) / 2 #Normalisation by the temporal decay function Nvec = (M.t[mid] / 100)**(-1) Nvec = 1 if display: print(max(U_s)) print(min(U_s)) print(U_s.shape) print(Nvec) # graphes.hist(Ux,Nvec,0,100,'o') # graphes.hist(Uy,Nvec,0,100,'s') graphes.hist(U_s, Nvec, fignum=1, num=10**4, label='o') title = '' # title='Z= '+str(M.param.Zplane)+' mm, t='+str(M.t[mid])+' ms'+', Dt = '+str(nt*M.ft)+' ms' graphes.legende('$U_{x,y} (m/s)$', '$pdf(U)$', title) # fields={'Z':'Zplane','t',} # graphes.set_title(M,fields) return Ux_rms, Uy_rms, Uxt_rms, Uyt_rms
def vertical_profile(S, xlines, Dt, start=0): nx, ny, nt = S.shape() y = S.y[:, 0] for i in range(start, nt, Dt): Ux = np.mean(np.mean(S.Ux[:, xlines, i:i + Dt], axis=1), axis=1) Uy = np.mean(np.mean(S.Uy[:, xlines, i:i + Dt], axis=1), axis=1) #standard deviation computation std_Ux = np.sqrt( np.mean(np.mean(abs(S.Ux[:, xlines, i:i + Dt] - Ux)**2, axis=1), axis=1)) std_Uy = np.sqrt( np.mean(np.mean(abs(S.Uy[:, xlines, i:i + Dt] - Uy)**2, axis=1), axis=1)) print(std_Ux) plt.subplot(121) graphes.graph(y, Ux, std_Ux) graphes.legende('z (m)', 'V (m/s)', 'Ux') plt.subplot(122) graphes.graph(y, Uy, std_Uy) graphes.legende('z (m)', 'V (m/s)', 'Uy') plt.draw() raw_input()
def shear_limit_M(M, W, Dt, type=1, **kwargs): """ Test the shear criterion : dU/W < 0.1 """ values = access.get(M, 'strain', frame) M, field = vgradient.compute(M, 'strain', step=1, filter=False, Dt=1, rescale=False, type=type, compute=False) values = getattr(M, field) #/W dUmin, dUmax = check.shear_limit_M(M, W) xbin, n = graphes.hist(values, normalize=False, num=200, range=(-0.5, 0.5), **kwargs) #xfactor = Dt maxn = max(n) * 1.2 graphes.graph([dUmin, dUmin], [0, maxn], label='r-', **kwargs) graphes.graph([dUmax, dUmax], [0, maxn], label='r-', **kwargs) graphes.legende('', '', '')
def fit_core_size(x, y, Z, fignum=1, display=False): """ Find the half width of a gaussian bump INPUT ----- x : 2d np array spatial coordinates (columns) y : 2d np array spatial coordinates (lines) Z : 2d np array data to be fitted (typically vorticity field ) fignum : int figure number for the output. Default 1 display : bool OUTPUT ----- a : float parameter of the gaussian bump center : 2 elements np array center coordinates """ ny, nx = Z.shape X, Y, data, center, factor = normalize(x, y, Z) R, theta = Smath.cart2pol(X, Y) a0 = 1 fun = gaussian res = opt.minimize(distance_fit, a0, args=(fun, R, data)) cond = ((center[0] > 5) and (center[0] < nx - 5) and (center[1] > 5) and (center[1] < ny - 5)) if cond: a = np.sqrt(res.x) else: a = None if display: figs = {} graphes.graph(R, factor * data, fignum=3, label='ko') graphes.graph(R, factor * gaussian(res.x, R), label='r.', fignum=fignum + 2) graphes.set_axis(0, 20, 0, factor * 1.1) figs.update(graphes.legende('r (mm)', 'Vorticity s^{-1})', '')) graphes.cla(fignum=fignum) graphes.color_plot(X, Y, factor * data, fignum=fignum + 3) graphes.colorbar() figs.update( graphes.legende('X (mm)', 'Y (mm)', 'Vorticity', display=False, cplot=True)) return a, center, figs else: return a, center
def display_profile(x, V, label='k', axe=2, fignum=0): x = np.asarray(x) z = x[:, axe] labels = ['Ux', 'Uy', 'Uz'] for i in range(3): graphes.graph(z, V[:, i], fignum=-i + 3 + fignum, label=label) graphes.legende(labels[i][1] + ' (au)', 'V ', labels[i])
def plot(p,tmin,tmax,label='',c=True,fignum=0): """ Plot the position of the vortex as a function time. pos is a dictionnary obtained from track.position tmin : minimum index tmax : maximum index """ figs = {} keys = ['Xmax','Xmin','Ymin','Ymax'] subplot = {'Xmax':121,'Xmin':121,'Ymax':122,'Ymin':122} fig1 = graphes.set_fig(fignum+1) fig1.set_size_inches(10,4) accurate = {key:None for key in keys} for key in keys: # print(p[key][tmin:tmax]) if c: p[key][tmin:tmax],accurate[key] = correct(p[key][tmin:tmax],a=5.) else: accurate[key]=True if 'Y' in key: #print('invert !') if np.nanmean(p[key])<0: p[key] = -np.asarray(p[key]) #X axis is inverted ! if accurate[key]: graphes.set_fig(fignum+1,subplot[key]) graphes.graph(p['t'],p[key],fignum=fignum+1,label=label) figs.update(graphes.legende('Time (s)',key[0]+' position (mm)','')) if 'Y' in key: graphes.set_axis(0.05,p['t'][tmax],0,100) else: graphes.set_axis(0.05,p['t'][tmax],-50,50) p['d']=np.sqrt((np.asarray(p['Xmin'])-np.asarray(p['Xmax']))**2+(np.asarray(p['Ymin'])-np.asarray(p['Ymax']))**2) graphes.graph(p['t'],p['d'][tmin:tmax],fignum=fignum+2,label=label) graphes.set_axis(0,p['t'][tmax],0,50) figs.update(graphes.legende('Time (s)','Distance (mm)','')) if accurate['Xmin'] and accurate['Ymin']: graphes.graph(p['Ymin'][tmin:tmax],p['Xmin'][tmin:tmax],fignum=fignum+3,label=label) figs.update(graphes.legende('X position (mm)','Y position (mm)','')) graphes.set_axis(0,60,-50,50) if accurate['Xmax'] and accurate['Ymax']: graphes.graph(p['Ymax'][tmin:tmax],p['Xmax'][tmin:tmax],fignum=fignum+3,label=label) graphes.graph(p['t'],p['Gammamax'],fignum=fignum+4,label=label) figs.update(graphes.legende('Time (s)','Circulation (mm^2/s)','')) graphes.set_axis(p['t'][tmin],p['t'][tmax],0,5*10**4) return figs,accurate
def plots(eigen,omega,cosine,step): """ Make plots of geometrical quantities associated to the strain tensor (eigenvalues, vorticity and stretching vector) INPUT ----- eigen : Dictionnary containing the eigenvalues Lambda and eigenvectors lambda omega : Dictionnary containing components of the vorticity field cosine : orientation angle between lambda and omega step : average number of data point per bin OUTPUT ----- figs : dict dictionnary of output figures, the key correspond to the number of the figure associated value is a title in string format (root name for an eventual saving process) """ figs={} #print('Epsilon : ') graphes.hist(eigen['epsilon'],label='k',step=step,fignum=1) figs.update(graphes.legende('$\epsilon$','PDF','',display=False)) label = ['k','b','r'] if True: # for i,key in enumerate(eigen.keys()): # k='Lambda_' # if key.find(k)>=0: # j=int(key[len(k)]) #hist(eigen_t[key],label=label[j],step=step,fignum=2+j) #plt.title(key) enstrophy = norm(omega,axis=3) graphes.hist(enstrophy,label='r',step=step,fignum=2) figs.update(graphes.legende('$\omega$','PDF','',display=False)) #if False: for i,key in enumerate(cosine.keys()): # print(key) keys = ['lambda_omega_','lambda_W_'] for z,k in enumerate(keys): if key.find(k)>=0: j=int(key[len(k)]) # print(j) graphes.hist(cosine[key],label=label[j],step=step,fignum=5+3*z+j) if z==0: figs.update(graphes.legende('cos($\lambda_'+str(3-j)+',\omega$)','PDF','',display=False)) if z==1: figs.update(graphes.legende('cos($\lambda_'+str(3-j)+',W$)','PDF','',display=False)) if key.find('W_omega')>=0: # print(step) graphes.hist(cosine[key],label='k',step=step,fignum=15) figs.update(graphes.legende('cos($\omega,W$)','PDF','',display=False)) # print(figs) return figs
def compute_Ct(M, tlist=[], axes=['Ux', 'Ux'], p=1, display=False, label='ko', fignum=1): display_part = False if tlist == []: t0 = 20 Dt = 50 dimensions = M.shape() tlist = range(t0, dimensions[2] - t0, Dt) tau = np.zeros(len(tlist)) tf = np.zeros(len(tlist)) for i, t in enumerate(tlist): X, Y, Yerr = stat_corr_t(M, t, axes=axes, p=1, display=False) try: popt, pcurv = scipy.optimize.curve_fit(fitting.exp, np.abs(X), Y) except ValueError: print("NaN values encountered, fit skipped") X, Y, Yerr = stat_corr_t(M, t, axe='E', p=1, display=True) # input() pcurv = [] popt = [1] except RuntimeError: print( "Fitting did not converge, arbitrarly chosen to previous value" ) pcurv = [] if i == 0: popt = [1] else: popt = [tau[i - 1]] tf[i] = M.t[t] tau[i] = -1 / popt[0] # print(str(M.t[t]) + ' : ' +str(tau[i])) if display_part: texp = np.abs(X) graphes.set_fig(1) graphes.errorbar(texp, Y, texp * 0, Yerr, fignum=0, label='ko') graphes.graph(texp, exp(texp, -1 / tau[i]), fignum=1, label='r') graphes.legende('$t/u^{2m}$', '$C_t$', '$m=1/2$') if display: graphes.graphloglog(tf, tau, fignum=fignum, label=label) graphes.legende('t (s)', 't_c', graphes.title(M)) return tf, tau
def multi_plane_measurements(Dt, N): ti = 500 n = 13 # Dtlist=[2,4,6,8,10,20,30,50,70,100,150,200,500] m = len(Mlist) t = np.zeros((n, m)) Ux_rms = np.zeros((n, m)) Uy_rms = np.zeros((n, m)) Uxt_rms = np.zeros((n, m)) Uyt_rms = np.zeros((n, m)) Zlist = [M.param.Zplane for M in Mlist] Dtlist = [Dt] for M in Mlist: j = Mlist.index(M) print(j) for i in range(n): t0 = (i + 1) * N + ti mid = t0 + Dt / 2 t[i] = M.t[mid] # Sdata_measure.velocity_distribution(M_1000fps,t0,t0+Dt) if i == 0: fig = j * 2 + 1 plt.figure(fig) # Ux_rms[i,j],Uy_rms[i,j],Uxt_rms[i,j],Uyt_rms[i,j]=Sdata_measure.velocity_distribution(M,t0,t0+Dt) Dir = M.fileDir + 'Velocity_Distribution' + '/' file = graphes.set_title(M, 'Dt=' + str(Dt / 10) + ' ms' + 'pdf_U') filename = Dir + file print(Dir) print(filename) # graphes.save_fig(fig,filename,Dir) U_rms = (Ux_rms + Uy_rms) / 2 Ut_rms = (Uxt_rms + Uyt_rms) / 2 graphes.graph(Zlist, U_rms[0, :], -1, 'o') graphes.graph(Zlist, Ut_rms[0, :], 0, '^') graphes.graph(Zlist, U_rms[0, :], -1, '+--') for i in range(1, n): graphes.graph(Zlist, (U_rms[i, :] * t[i] / t[0]), 0, '+--') graphes.set_axes(-110, 50, 0, 1) graphes.legende('$Z (mm)$', '$(t/t_0) <Urms_{xi}>_{Dt,x,y}$', '') file = graphes.set_title(M, 'U_rms_vs_t') filename = Dir + file
def make_plot_lin(Mlist,Range=None,color='k',label=None,field=[['Ux','Uy'],['omega']],Dirbase=None,Dt=1,example=False,total=True,fignum=1,save=True): M = Mlist[0] if Dirbase==None: Dirbase = '/Users/stephane/Documents/Experiences_local/Accelerated_grid/PIV_data/Test6/' #local saving Dirbase = './Stat_avg/Panel/'+M.Id.date axes = panel_graphs(M,subplot=[2,3],fignum=fignum) frames = select_range(M,Range) figs={} if hasattr(M,'Id'): Dirname = Dirbase+'/'+M.Id.get_id()+'/'+graphes.remove_special_chars(str(field))+'/' else: Dirname = Dirbase+'/JHTD_Data/'+graphes.remove_special_chars(str(field))+'/' print(Dirname) if Dt>1: print('Smoothed data') Dirname = Dirname + 'Smooth_Dt_'+str(int(Dt))+'/' figs.update(plot_scales(Mlist,axes,fignum=fignum,color=color,label=label)) plt.sca(axes[2]) frame = 1500 Dt = 1400 if label is None: labels = ['m^','b>','ko'] else: labels = [label,label,label] for i,f in enumerate(field[0]):#should contain either one or two fields figs.update(graphes.pdf_ensemble(Mlist,f,frame,Dt=Dt,fignum=fignum,label=labels[i],norm=False)) figs.update(graphes.legende(f,'pdf of '+f,'')) plt.sca(axes[3]) for f in field[1]: figs.update(graphes.pdf_ensemble(Mlist,f,frame,Dt=Dt,fignum=fignum,label=labels[2],norm=False)) figs.update(graphes.legende(f,'pdf of '+f,'')) plt.sca(axes[4]) corr.corr_v_t(Mlist,frame,axes=['Ux','Ux'],N=200,p=1,display=True,save=False,label=labels[0],fignum=fignum) corr.corr_v_t(Mlist,frame,axes=['Uy','Uy'],N=200,p=1,display=True,save=False,label=labels[1],fignum=fignum) plt.sca(axes[5]) corr.corr_v_t(Mlist,frame,axes=['omega','omega'],N=200,p=1,display=True,save=False,label=labels[2],fignum=fignum) if save: graphes.save_figs(figs,savedir=Dirname,prefix='General',suffix='_vs_t',dpi=300,frmt='png',display=True) else: return figs,Dirname
def display_fft(m, i, tag): #to be replaced by m.z if hasattr(m.Sdata.param, 'Zplane'): Z = m.Sdata.param.Zplane / 10 else: Z = -10 title = '$Z$ = ' + str(Z) + ' cm, $t$ = ' + str(m.t[i]) + ' ms' Dir = m.fileDir + 'FFT_vs_t_part_' + tag + '_' + m.id.get_id() + '/' if tag == '1d': graphes.legende('$k$ (mm$^{-1}$)', '$E_k$ (a.u.)', title) if tag == '2d': graphes.legende('$k_x$ (mm$^{-1}$)', '$k_y$ (mm$^{-1}$)', title)
def from_circulation_2(M, fignum=1, display=True): # R_list,Gamma,center,factor = compute_circulation_2(M,fignum=fignum) lc, G0, center = fit_core_circulation(M, fignum=fignum, display=True) nx, ny, nt = M.shape() for i in range(nt): R_list, Gamma, center, factor = circulation_2(M, i) graphes.graph(R_list, Gamma * factor, fignum=fignum, label='k^') graphes.legende('r (bmm)', 'Circulation (mm^2/s)', '') graphes.set_axis(0, 12., -7000, 500) return None
def distribution(sigma, n, display=False): n_p = 1 theta, N = theta_axis(n, N=None) r0 = 1 base = np.asarray([[r0 * np.cos(k), r0 * np.sin(k), 0] for k in theta]) paths = [] for p in range(n_p): #print(p) t = noise(base, sigma, n) paths.append(t.paths[0]) # h = helicity(t) # graphes.hist(h,fignum=2) if p < 3: savename = './Random_path/Tests/Examples/sigma_' + str( round(sigma * 1000)) + 'm_n' + str(n) + '_' + str(p + 1) save(t, prefix=savename) t_tot = tangle.Tangle(paths) if display: figs = radial_density(t_tot) figs.update(graphes.legende('R', 'PDF(R)', '')) # graphes.save_figs(figs,prefix='Random_path/Tests/R_Distributions/',suffix='_sigma_'+str(round(sigma*1000))+'m',dpi=300,display=True,frmt='png') return t
def test_bound(dataList, W, Dt, **kwargs): maxn = 0 Umin, Umax = bounds_pix(W) ratio = [] for data in dataList: # values = np.asarray(data['u'])**2+np.asarray(data['v']**2) values = np.sqrt(np.asarray(data['u'])**2 + np.asarray(data['v'])**2) r = len(np.where(np.logical_and( values > Umin, values < Umax))[0]) * 100. / len(data['u']) ratio.append(r) xbin, n = graphes.hist(values, normalize=False, num=200, range=(0., 2 * Umax), **kwargs) #xfactor = Dt maxn = max([maxn, max(n) * 1.2]) ratio = np.nanmean(np.asarray(ratio)) graphes.graph([Umin, Umin], [0, maxn], label='r-', **kwargs) graphes.graph([Umax, Umax], [0, maxn], label='r-', **kwargs) graphes.set_axis(0, Umax * 1.2, 0, maxn) title = 'Dt = ' + str(Dt) + ', W = ' + str(W) + 'pix' fig = graphes.legende('U (pix)', 'Histogram of U', title) # graphes.set_axis(0,1.5,0,maxn) return ratio, fig
def spatial_corr(data, N=1024, Dt=10): Cxx = np.zeros((N / 2, Dt)) d = np.arange(N / 2) figs = {} for p in range(3): for k in range(Dt): key = data.keys()[k] Z = np.asarray(data[key]) Ex = np.nanmean(np.power(Z[:N / 2, 0, 0, p], 2)) Cxx[:, k] = np.nanmean(np.asarray([[ Z[i, 0, 0, p] * Z[i + j, 0, 0, p] / Ex for i in range(N / 2) ] for j in range(N / 2)]), axis=1) #print(Cxx[0,:]) C = np.nanmean(Cxx, axis=1) graphes.graph(d, C, fignum=1) graphes.set_axis(0, N / 4, -1, 1.5) figs.update(graphes.legende('d', 'C', '')) graphes.save_figs(figs, savedir='./Corr_functions/', suffix='', prefix='', frmt='pdf', dpi=300)
def spectrum_2d(M, indices=None): Fourier.compute_spectrum_2d(M, Dt=3) #smooth on 3 time step. S_E = np.nanmean(M.S_E[..., indices], axis=2) graphes.color_plot(M.kx, M.ky, S_E, log=True, fignum=1) graphes.colorbar(label='$E_k$') figs = graphes.legende('$k_x$ (mm)', '$k_y$ (mm)', 'Energy Spectrum (log)') return figs
def circulation_2(M, i, fignum=1, display=False): Omega = access.get(M, 'omega', i) x, y = space_axis_vorticity(M) X, Y, data, center, factor = normalize(x, y, Omega[..., 0]) dx = M.x[0, 1] - M.x[0, 0] #print(dx) U, d = vgradient.make_Nvec(M, i) # Z : d+1 dimension np array nx, ny = X.shape R_list = np.arange(1., 15., 0.5) Gamma = [] divergence = [] for b in R_list: # print(b) tau = strain_tensor.strain_tensor_loc(U, center[0], center[1], d=2, b=b) omega, enstrophy = strain_tensor.vorticity(tau, d=2, norm=False) div = strain_tensor.divergence_2d(tau, d=2) G = (omega[0, 0] - div[0, 0]) * np.pi * b**2 * dx**2 Gamma.append(G) divergence.append(div[0, 0] / np.abs(omega[0, 0])) R_list = np.asarray(R_list) * dx if display: graphes.graph(R_list, Gamma, fignum=fignum, label='bo') graphes.legende('r (mm)', 'Circulation (mm^2/s)', '') graphes.graph(R_list, divergence, fignum=fignum + 1, label='ko') graphes.graph(R_list, np.zeros(len(R_list)), fignum=fignum + 1, label='r--') graphes.legende('r (mm)', 'Relative 2d divergence', '') graphes.set_axis(0, 30 * dx, -0.3, 0.3) return R_list, Gamma, center, factor
def isotropy(M, label='k^--', display=True, fignum=1): step = 1 tl = M.t[0:None:step] N = 50 display_part = False Anisotropy = np.zeros(len(tl)) Meanflow = np.zeros(len(tl)) for i, t in enumerate(tl): print(i * 100 / len(tl)) rho, Phi = angles(M, i) theta, U_moy, U_rms = angular_distribution(M, i) # t,U_moy,U_rms = time_window_distribution(M,i,Dt=40) if display_part: graphes.hist(Phi, fignum=1, num=N) graphes.legende('Phi', 'PDF', '') graphes.graph(theta, U_moy, fignum=3, label='k^') graphes.legende('$\theta$', '$U^p$', 'Angular fluctation distribution') graphes.graph(theta, U_rms, fignum=4, label='ro') graphes.legende('$\theta$', '$U^p$', 'Angular average flow') Anisotropy[i] = np.std(U_rms) / np.nanmean(U_rms) Meanflow[i] = np.std(U_moy) / np.nanmean(U_rms) graphes.semilogx(tl, Anisotropy, label='ro', fignum=fignum, subplot=(1, 2, 1)) graphes.legende('Time (s)', 'I', 'Anisotropy' + graphes.set_title(M)) graphes.set_axes(10**-2, 10**4, 0, 2) graphes.semilogx(tl, Meanflow, label='k^', fignum=fignum, subplot=(1, 2, 2)) graphes.legende('Time (s)', '<U>', 'Average flow') graphes.set_axes(10**-2, 10**4, 0, 4)
def v_increment(M, start, end, d, p=1, ort='all', fignum=1, normalize=False): """ Compute the distribution of velocity increments, either longitudinal, transverse, or all INPUT ----- M : Mdata object with attributes : Ux, Uy with method : shape() start : int start indice end : int end indice d : numpy 1d array vector d for computing increments p : int order of the increments ∂u_p = (u(r+d)^p-u(r)^p)^1/p ort : string orientation. can be either 'all','trans','long' """ #compute the distribution of velocity for Ux, Uy and U for all the individual measurements between start and end (nx, ny, n) = M.shape() nt = end - start Ux = M.Ux[..., start:end] Uy = M.Uy[..., start:end] Uz = M.Uz[..., start:end] dim = len(M.shape()) if dim == 3: if d[0] > 0 and d[1] > 0: dU_x = ( Ux[d[0]:, d[1]:, :] - Ux[:-d[0], :-d[1], :])**p #**(1./p) #longitudinal component dU_y = (Uy[d[0]:, d[1]:, :] - Uy[:-d[0], :-d[1], :])**p #**(1./p) #transverse component dU_y = (Uz[d[0]:, d[1]:, :] - Uz[:-d[0], :-d[1], :])**p #**(1./p) else: dU_x = (Ux[d[0]:, ...] - Ux[:-d[0], ...])**p #**(1./p) dU_y = (Uy[d[0]:, ...] - Uy[:-d[0], ...])**p #**(1./p) dU_z = (Uz[d[0]:, ...] - Uz[:-d[0], ...])**p #**(1./p) else: print('not implemented') # U=np.sqrt(Ux**2+Uy**2) # graphes.hist(U,1,100,'k^') graphes.hist(dU_x, fignum=fignum, num=10**3, label='ro', log=True) graphes.hist(dU_y, fignum=fignum, num=10**3, label='bs', log=True) graphes.hist(dU_z, fignum=fignum, num=10**3, label='m^', log=True) mid = (start + end) / 2 # title='Z= '+str(M.param.Zplane)+' mm, t='+str(M.t[mid])+' ms'+', Dt = '+str(nt) figs = {} figs.update(graphes.legende('$dU_{x,y}$', 'rho(U)', 'D = ' + str(d[0]))) return figs
def spatial_average(M, indices=None): figs = {} fields, names, vmin, vmax, labels, units = std_fields() for j, field in enumerate(fields): Y_moy = np.nanmean(getattr(M, field), axis=(0, 1)) graphes.graph(M.t, Y_moy, label=labels[j], fignum=j + 1) #graphes.set_axis(0,5,0,18000) figs.update( graphes.legende('Time (s)', names[j] + ' (' + units[j] + ')', '')) return figs
def mean_profile(S, i, j, direction='v', label='k^', display=False): #mean profile along the whole field : average on one direction only ! (and small windows on the other direction ?) nx, ny, nt = S.shape() Ux = S.m.Ux Uy = S.m.Uy #remove the data out of the PIV bounds # Ux,Uy=fix_PIV(S) U = np.sqrt(Ux**2 + Uy**2) # V=np.reshape(U,(nx*ny,nt)) #median is not so affected by peak values, but standard deviation definetely ! #histogramm between vmin and vmax, and remove values out of bound (set to NaN) U_moy = [] U_std = [] t = S.m.t Dt = 2 if direction == 'v': #average along the horizontal direction U_moy = [ np.mean(np.mean(U[j - Dt:j + Dt, :, k], axis=0), axis=0) for k in range(nt) ] print('horizontal average') else: #average along the vertical direction U_moy = [ np.mean(np.mean(U[:, i - Dt:i + Dt, k], axis=0), axis=0) for k in range(nt) ] print('vertical average') print(np.shape(U_moy)) if display: #U_moy=np.mean(V[np.invert(np.isnan(V))],axis=0) print('Number of frames : ' + str(len(S.m.t))) graphes.graph(t, U_moy, label) graphes.legende('t (ms)', '<V>_{x,y} (m/s)', '') return U_moy, U_std
def time_correlation(Mlist, indices=None, display=False): """ Compute the spatial averaged time of velocity autocorrelation Velocity autocorrelation functions in time are fitted by an exponential in time. Typical time tc gives the time correlation INPUT ----- Mlist : list of Mdata indices : list of int indices of Mlist elements to process. default value process all the elements display : bool default value False OUTPUT ----- """ if indices is None: indices = range(len(Mlist)) labels = ['k^', 'ro', 'bp', 'c8', 'g*'] for i, indice in enumerate(indices): label = labels[i] M = Mlist[indice] tf, tau = compute_Ct(M, display=False, label='ko', fignum=1) graphes.graphloglog(tf, tau, fignum=9, label=label) graphes.legende('$t (s)$', '$\tau (s)$', '') #compute from the # t_d,E = decay.decay(M,label=label) t_d, E = Fourier.display_fft_vs_t(M, '1d', Dt=50, label=label) Ef = np.zeros(len(tf)) for i, t in enumerate(tf): j = np.argmin(abs(t_d - t)) # print(str(j)+ ' : '+str(E[j]) + ", " + str(tau[i])) Ef[i] = E[j] graphes.graphloglog(Ef, tau, fignum=10, label=label) graphes.legende('$E (m^2/s^2)$', '$\tau (s)$', '')
def stat_corr_t(M, t, Dt=20, axes=['Ux', 'Ux'], p=1, display=False, label='k^', fignum=0): t0 = M.t[t] tlist = range(t - Dt // 2, t + Dt // 2) curves = [] for t in tlist: curves.append(corr_v_t([M], t, N=20, axes=axes, p=p, display=False)) X, Y, Yerr = statP.box_average(curves, 50) X = X[~np.isnan(X)] Y = Y[~np.isnan(Y)] Yerr = Yerr[~np.isnan(Yerr)] if display: # graphes.set_fig(1) graphes.errorbar(np.abs(X) / t0, Y, X * 0, Yerr, fignum=fignum, label=label) graphes.legende('$t/u^{2m}$', '$C_t$', '$m=1/2$') name = 'Corr_' + axes[0] + '_' + axes[1] + '_' + str(t) filename = './Corr_functions/' + M.id.date + '/' + M.id.get_id( ) + '/' + name + '.txt' keys = ['t', name] List_info = [np.ndarray.tolist(X), np.ndarray.tolist(Y)] rw_data.write_dictionnary(filename, keys, List_info, delimiter='\t') # print(X) # print(Y) return X, Y, Yerr
def radial_density(t, fignum=1, label=''): figs = {} nt = len(t.paths) R_tot = [] for j in range(nt): R = np.sum([t.paths[j][..., i]**2 for i in range(3)], axis=0) R_tot = R_tot + np.ndarray.tolist(R) graphes.hist(R_tot, log=True, fignum=fignum, label=label) figs.update(graphes.legende('R', 'PDF(R)', '')) return figs
def horizontal_profile(S, ylines, Dt, start=0): nx, ny, nt = S.shape() x = S.x[0, :] for i in range(start, nt, Dt): Ux = np.mean(np.mean(S.Ux[ylines, :, i:i + Dt], axis=0), axis=1) Uy = np.mean(np.mean(S.Uy[ylines, :, i:i + Dt], axis=0), axis=1) std_Ux = np.std(np.std(S.Ux[ylines, :, i:i + Dt], axis=0), axis=1) std_Uy = np.std(np.std(S.Uy[ylines, :, i:i + Dt], axis=0), axis=1) plt.subplot(121) graphes.graph(x, Ux, 0, std_Ux) graphes.legende('x (m)', 'V (m/s)', 'Ux') plt.subplot(122) graphes.graph(x, Uy, 0, std_Uy) graphes.legende('x (m)', 'V (m/s)', 'Uy') plt.draw() raw_input()
def tangent_test(): N = 100 path = generate_vortex(1, N) dV = tangent(path) * N / (2 * np.pi) print(np.mean(norm(dV))) print(np.std(norm(dV))) indices = np.arange(0, 100, 10) for i in indices: print(dV[i, :], path[i, :]) # graphes.graph(path[:,0],path[:,1],label='r') graphes.graph(np.arange(N), path[:, 0]) graphes.graph(np.arange(N), np.sum(dV * path, axis=1)) # vfield.plot(path[:,0],path[:,1],dV) # graphes.set_axis(-1.1,1.1,-1.5,1.5) graphes.legende('x', 'y', '')
def display_corr_vs_t(M, dlist, indices, step=100, Dt=1, label='-', display=False, fignum=1): tref, d, Cxx, Cyy, Cxy, CEE = correlation_functions(M, dlist, indices, Dt=Dt) #Display successive correlations functions times = range(0, len(tref) - 3 * Dt, step) times = range(0, len(tref), step) if display: for t in times: graphes.graph(d, Cxx[:, t] / Cxx[0, t], fignum=fignum) graphes.set_axis(0, max(d), -1, 1.5) graphes.legende('d (mm)', 'C_{xx}', '') graphes.graph(d, Cyy[:, t] / Cyy[0, t], fignum=fignum + 1) graphes.set_axis(0, max(d), -1, 1.5) graphes.legende('d (mm)', 'C_{yy}', '') graphes.graph(d, CEE[:, t] / CEE[0, t], fignum=fignum + 2) graphes.set_axis(0, max(d), -1, 1.5) graphes.legende('d (m)', 'C_{E}', '') return tref, d, Cxx, Cyy, Cxy, CEE