def loadData(fname='Unstra.out2.00008.athdf'): """load 3d bfield and calc the current density""" #data=ath.athdf(fname,quantities=['B1','B2','B3']) time, data = ath.athdf(fname, quantities=['Bcc1']) bx = data['Bcc1'] time, data = ath.athdf(fname, quantities=['Bcc2']) by = data['Bcc2'] time, data = ath.athdf(fname, quantities=['Bcc3']) bz = data['Bcc3'] x = data['x1f'] y = data['x2f'] z = data['x3f'] # --- def curl(vx, vy, vz, dx, dy, dz): [dzvx, dyvx, dxvx] = np.gradient(vx) [dzvy, dyvy, dxvy] = np.gradient(vy) [dzvz, dyvz, dxvz] = np.gradient(vz) cx = dyvz / dy - dzvy / dz cy = dzvx / dz - dxvz / dx cz = dxvy / dx - dyvx / dy # No need to del the reference by one manually # allow python to perform its own garbage collection # after the function return cxyz #del dzvx #del dzvy #del dzvz return cx, cy, cz # --- dx = dz = x[1] - x[0] dy = y[1] - y[0] jx, jy, jz = curl(bx, by, bz, dx, dy, dz) j2 = jx**2 + jy**2 + jz**2 return bx, by, bz, j2
def loadBonly(fname='Unstra.out2.00008.athdf'): """load 3d bfield only""" #data=ath.athdf(fname,quantities=['B1','B2','B3']) time, data = ath.athdf(fname, quantities=['Bcc1']) bx = data['Bcc1'] time, data = ath.athdf(fname, quantities=['Bcc2']) by = data['Bcc2'] time, data = ath.athdf(fname, quantities=['Bcc3']) bz = data['Bcc3'] x = data['x1f'] y = data['x2f'] z = data['x3f'] return bx, by, bz
def loadData(fname='Unstra.out2.00008.athdf'): """load 3d bfield and calc the current density""" #data=ath.athdf(fname,quantities=['B1','B2','B3']) time, data = ath.athdf(fname, quantities=['Bcc1']) bx = data['Bcc1'] time, data = ath.athdf(fname, quantities=['Bcc2']) by = data['Bcc2'] time, data = ath.athdf(fname, quantities=['Bcc3']) bz = data['Bcc3'] x = data['x1f'] y = data['x2f'] z = data['x3f'] # refinement rfac = 1.0 ##if bx.shape[0] < 512: ## nz,ny,nx = bx.shape ## rfac = int(512/bx.shape[0]) ## bx = np.repeat(bx,rfac,axis=0) ## bx = np.repeat(bx,rfac,axis=1) ## bx = np.repeat(bx,rfac,axis=2) ## by = np.repeat(by,rfac,axis=0) ## by = np.repeat(by,rfac,axis=1) ## by = np.repeat(by,rfac,axis=2) ## bz = np.repeat(bz,rfac,axis=0) ## bz = np.repeat(bz,rfac,axis=1) ## bz = np.repeat(bz,rfac,axis=2) # --- def curl(vx, vy, vz, dx, dy, dz): [dzvx, dyvx, dxvx] = np.gradient(vx) [dzvy, dyvy, dxvy] = np.gradient(vy) [dzvz, dyvz, dxvz] = np.gradient(vz) cx = dyvz / dy - dzvy / dz cy = dzvx / dz - dxvz / dx cz = dxvy / dx - dyvx / dy # No need to del the reference by one manually # allow python to perform its own garbage collection # after the function return cxyz #del dzvx #del dzvy #del dzvz return cx, cy, cz # --- dx = dz = (x[1] - x[0]) / rfac dy = (y[1] - y[0]) / rfac jx, jy, jz = curl(bx, by, bz, dx, dy, dz) j2 = jx**2 + jy**2 + jz**2 return j2
def density_avg(name, name2): frame = athdf(name) frame2 = athdf(name2) time = frame['Time'] Radius = frame['x1v'] dens_avg = np.array([]) for radius in range(rmin,rmax): sigma = frame2['user_out_var24'][0,:,radius] sigma_avg = np.sum(sigma)/(2*PI) dens_avg = np.append(dens_avg, sigma_avg) return [Radius, dens_avg]
def alphaterm2_avg(name, name2): frame = athdf(name) frame2 = athdf(name2) time = frame['Time'] Radius = frame['x1v'] mdot_avg = np.array([]) for radius in range(rmin,rmax): mdot = frame2['user_out_var23'][0,:,radius] mdot = np.sum(mdot) mdot_avg = np.append(mdot_avg, mdot) return [Radius, mdot_avg]
def loadData(fname='Unstra.out2.00008.athdf'): """load 3d bfield and calc the current density""" #data=ath.athdf(fname,quantities=['B1','B2','B3']) time, data = ath.athdf(fname, quantities=['vel1']) vx = data['vel1'] time, data = ath.athdf(fname, quantities=['vel2']) vy = data['vel2'] time, data = ath.athdf(fname, quantities=['vel3']) vz = data['vel3'] x = data['x1f'] y = data['x2f'] z = data['x3f'] time, data = ath.athdf(fname, quantities=['rho']) d = data['rho'] # --- def curl(vx, vy, vz, dx, dy, dz): [dzvx, dyvx, dxvx] = np.gradient(vx) [dzvy, dyvy, dxvy] = np.gradient(vy) [dzvz, dyvz, dxvz] = np.gradient(vz) # 1) w^2 w2 = (dyvz / dy - dzvy / dz)**2 w2 += (dzvx / dz - dxvz / dx)**2 w2 += (dxvy / dx - dyvx / dy)**2 w2 *= d # 2) 2*delv:delv disp = 2.0 * ((dxvx / dx)**2 + (dyvy / dy)**2 + (dzvz)**2) disp += 4.0 * (dxvy * dyvx / dx / dy + dxvz * dzvx / dx / dz + dyvz * dzvy / dy / dz) # 3) -2/3 (div v)^2 disp -= (2.0 / 3.0) * (dxvx / dx + dyvy / dy + dzvz / dz)**2 disp *= d # No need to del the reference by one manually # allow python to perform its own garbage collection # after the function return cxyz #del dzvx #del dzvy #del dzvz return w2, disp # --- # 1) get vorticity dx = dz = x[1] - x[0] dy = y[1] - y[0] w2, disp2 = curl(vx, vy, vz, dx, dy, dz) #del jx,jy,jz,vx,vy,vz # 2) get return w2, disp2
def get_fc(file, lev): # read hdf5 file all_things = ['dens'] data = ar.athdf(file, quantities=all_things, level=lev) return (data['Coordinates'], data['x1f'], data['x2f'], data['x3f'], data['dens'].shape)
def read_bz_hdf5(fname,level=0,subsample=False): time,data=ath.athdf(fname,level=level,subsample=subsample,quantities=['Bcc3']) bz = data['Bcc3'][0] x = data['x1f'];y = data['x2f']; z = data['x3f'] x = (x + 0.5*(x[1]-x[0]))[:-1] y = (y + 0.5*(y[1]-y[0]))[:-1] z = (z + 0.5*(z[1]-z[0]))[:-1] return time,x,y,z,bz
def alphaeff_avg(name, name2): frame = athdf(name) frame2 = athdf(name2) time = frame['Time'] Radius = frame['x1v'] alpha_avg = np.array([]) #rvk = np.sqrt(GM1*Radius) for radius in range(rmin,rmax): alpha_eff = frame2['user_out_var22'][0,:,radius] alpha_eff = np.sum(alpha_eff) alpha_avg = np.append(alpha_avg, alpha_eff) return [Radius, alpha_avg]
def __init__(self, fn): print('reading ' + fn, flush=True) full_data = athdf(fn) self.hydro_time = full_data['Time'] # setup location meshes and find data location self.r_list, self.theta_list, phi_list = full_data['x1f'], full_data[ 'x2f'], full_data['x3f'] self.cgs_r_list = np.array([r * 10**4 * AU for r in self.r_list]) dataloc_r_list = [(self.r_list[i] + self.r_list[i + 1]) / 2.0 for i in range(0, len(self.r_list) - 1, 1)] cgs_dataloc_r_list = [x * 10**4 * AU for x in dataloc_r_list] dataloc_theta_list = [ (self.theta_list[i] + self.theta_list[i + 1]) / 2.0 for i in range(0, len(self.theta_list) - 1, 1) ] rho_mesh = full_data['rho'][0] pres_mesh = full_data['press'][0] Phi_mesh = full_data['Phi'][ 0] # I guess the Phi is gravitational potential vr_mesh = full_data['vel1'][0] vtheta_mesh = full_data['vel2'][0] vphi_mesh = full_data['vel3'][0] self.cgs_rho_mesh = np.array([[x * m_sun / (10**4 * AU)**3 for x in y] for y in rho_mesh]) self.cgs_pres_mesh = np.array([[x * 1.33687 * 10**(-11) for x in y] for y in pres_mesh]) self.cgs_Phi_mesh = np.array( [[x * (10**4 * AU)**2 / (10**6 * year)**2 for x in Phi_list] for Phi_list in Phi_mesh]) self.cgs_vr_mesh = np.array([[x * 4743.78 for x in y] for y in vr_mesh]) self.cgs_vtheta_mesh = np.array([[x * 4743.78 for x in y] for y in vtheta_mesh]) self.cgs_vphi_mesh = np.array([[x * 4743.78 for x in y] for y in vphi_mesh]) self.cgs_temp_mesh = np.array([[ self.__temperature(rho, pres) for rho, pres in zip(rho_list, pres_list) ] for rho_list, pres_list in zip(self.cgs_rho_mesh, self.cgs_pres_mesh) ]) self.phi_theta_gradient_mesh = np.transpose([ -np.gradient(phitheta, dataloc_theta_list) for phitheta in np.transpose(self.cgs_Phi_mesh) ]) self.grav_r_mesh = np.array([ -np.gradient(phir, cgs_dataloc_r_list) for phir in self.cgs_Phi_mesh ]) self.grav_theta_mesh = np.array( [[dtheta * 1.0 / r for dtheta, r in zip(dt_l, cgs_dataloc_r_list)] for dt_l in self.phi_theta_gradient_mesh]) self.grav_phi_mesh = np.array([[0.0 for r in cgs_dataloc_r_list] for t in dataloc_theta_list])
def load_data(output_dir, n, prob=DEFAULT_PROB, do_path_format=1, method='matt'): '''Loads data from .athdf files output from Athena++. Parameters ---------- output_dir : string path to directory containing .athdf file(s) n : int integer describing which snapshot to choose prob : string, optional the Athena++ problem generator name, by default DEFAULT_PROB Returns ------- dict a dictionary containing information on all data in the simulation. Using the `athena_read.py` script in `/athena/vis/python/`. ''' def change_coords(data, a, method): # Using primed to unprimed transformation # from Matt Kunz's notes Λ = np.array([1, a, a]) # diagonal matrix λ = a**2 # determinant of Λ = a^2 matts_method = method == 'matt' data['rho'] /= λ if matts_method else 1 for i in range(3): B_string = 'Bcc' + str(i + 1) u_string = 'vel' + str(i + 1) data[u_string] *= Λ[i] data[B_string] *= Λ[i] / λ if matts_method else Λ[i] max_n = get_maxn(output_dir, do_path_format=do_path_format) assert n in range(0, max_n), 'n must be between 0 and ' + str(max_n) # '$folder.out$output_id.xxxxx.athdf' def f(n): return folder + '.out' + output_id + '.%05d' % n + '.athdf' # Input folder = format_path(output_dir, do_path_format) + prob # Name of output output_id = '2' # Output ID (set in input file) filename = f(n) data = athdf(filename) if method != 'nothing': # Rescale perpendicular components automatically current_a = data['a_exp'] print('current a = ' + str(current_a)) # use method = 'matt' for Matt's equations # else method = 'jono' for Jono's equations # where u_⟂ = a*u'_⟂ and same for B_⟂ change_coords(data, current_a, method) return data
def get_Omega_env_dist(fn,dv=0.05,G=1,rho_thresh=1.e-2,level=2): """ Get the mass-average Omega within r<1 """ d = ar.athdf(fn,level=level) # MAKE grid based coordinates d['gx1v'] = np.zeros_like(d['rho']) for i in range((d['rho'].shape)[2]): d['gx1v'][:,:,i] = d['x1v'][i] d['gx2v'] = np.zeros_like(d['rho']) for j in range((d['rho'].shape)[1]): d['gx2v'][:,j,:] = d['x2v'][j] d['gx3v'] = np.zeros_like(d['rho']) for k in range((d['rho'].shape)[0]): d['gx3v'][k,:,:] = d['x3v'][k] ## dr, dth, dph d['d1'] = d['x1f'][1:] - d['x1f'][:-1] d['d2'] = d['x2f'][1:] - d['x2f'][:-1] d['d3'] = d['x3f'][1:] - d['x3f'][:-1] # grid based versions d['gd1'] = np.zeros_like(d['rho']) for i in range((d['rho'].shape)[2]): d['gd1'][:,:,i] = d['d1'][i] d['gd2'] = np.zeros_like(d['rho']) for j in range((d['rho'].shape)[1]): d['gd2'][:,j,:] = d['d2'][j] d['gd3'] = np.zeros_like(d['rho']) for k in range((d['rho'].shape)[0]): d['gd3'][k,:,:] = d['d3'][k] # VOLUME d['dvol'] = d['gx1v']**2 * np.sin(d['gx2v']) * d['gd1']* d['gd2']* d['gd3'] # cell masses d['dm'] = d['rho']*d['dvol'] select = (d['rho']>rho_thresh) #(d['gx1v']<1.0) # Omega = vphi/(r sin th) vpf = (d['vel3'][select] / (d['gx1v'][select]* np.sin(d['gx2v'][select])) ).flatten() #vpf = d['vel3'][select].flatten() dmf = d['dm'][select].flatten() mybins = np.arange(-0.1,0.1,dv) mydist = np.histogram(vpf,weights=dmf,bins=mybins) GMtot=G*np.sum(mydist[0]) print ("Total mass GM = ",G*np.sum(dmf)) print ("Total mass GM (distribution) = ", GMtot) return mydist, np.average(vpf,weights=dmf)
def get_plot_array_vertical(quantity,phislice, myfile,profile_file,orb,m1,m2, G=1,rsoft2=0.1,level=0, x1_max=None): dblank=ar.athdf(myfile,level=level,quantities=[],subsample=True) get_cartesian=True get_torque=False get_energy=False if quantity in ['torque_dens_1_z','torque_dens_2_z']: get_torque=True if quantity in ['ek','ei','etot','epot','epotg','epotp','h','bern']: get_energy=True x3slicevalue = dblank['x3v'][np.argmin(np.abs(dblank['x3v']+phislice))] d=read_data(myfile,orb,m1,m2,G=G,rsoft2=rsoft2,level=level, get_cartesian=get_cartesian, get_torque=get_torque, get_energy=get_energy, x1_max=x1_max,x3_min=x3slicevalue,x3_max=x3slicevalue, profile_file=profile_file) x1 = d['gx1v'][0,:,:]*np.sin(d['gx2v'][0,:,:]) z1 = d['z'][0,:,:] val1 = d[quantity][0,:,:] if(x3slicevalue<0): x3slicevalue += np.pi else: x3slicevalue -= np.pi d=read_data(myfile,orb,m1,m2,G=G,rsoft2=rsoft2,level=level, get_cartesian=get_cartesian, get_torque=get_torque, get_energy=get_energy, x1_max=x1_max,x3_min=x3slicevalue,x3_max=x3slicevalue, profile_file=profile_file) x2 = -d['gx1v'][0,:,:]*np.sin(d['gx2v'][0,:,:]) z2 = d['z'][0,:,:] val2 = d[quantity][0,:,:] # Combine the arrays x = np.concatenate((x2,np.flipud(x1))) z = np.concatenate((z2,np.flipud(z1))) val = np.concatenate((val2,np.flipud(val1))) x = np.concatenate((x,x2[0:1]) ,axis=0) z = np.concatenate((z,z2[0:1]) ,axis=0) val = np.concatenate((val,val2[0:1]) ,axis=0) return x,z,val
def aread(filename): suffix = filename.split('.')[-1] if suffix == 'vtk': return ar.vtk(filename) if suffix == 'tab': return ar.tab(filename) if suffix == 'athdf': return ar.athdf(filename, subsample=True) else: print('Error: filetype not supported') return
def analyze(): analyze_status = True # check density max and Vz and Bz components in tab slice slice_data = athena_read.tab( filename='bin/TestOutputs.block0.out2.00010.tab', raw=True, dimensions=1) if max(slice_data[1, :]) < 0.25: analyze_status = False if max(slice_data[5, :]) != 0.0: analyze_status = False if max(slice_data[8, :]) != 0.0: analyze_status = False # check density max and Vz and Bz components in tab sum sum_data = athena_read.tab( filename='bin/TestOutputs.block0.out3.00010.tab', raw=True, dimensions=1) if max(sum_data[1, :]) < 15.0 and max(sum_data[:, 1]) > 20.0: analyze_status = False if max(sum_data[5, :]) != 0.0: analyze_status = False if max(sum_data[8, :]) != 0.0: analyze_status = False # assuming domain is 64x64 w/o ghost zones output, slice near central interface x2=0.0 # check density max and Vz and Bz components in VTK dump xf, yf, _, vtk_data = athena_read.vtk( filename='bin/TestOutputs.block0.out4.00010.vtk') if max(vtk_data['rho'][0, 32, :]) < 0.25: analyze_status = False if max(vtk_data['vel'][0, 32, :, 2]) != 0.0: analyze_status = False if max(vtk_data['Bcc'][0, 32, :, 2]) != 0.0: analyze_status = False # if max(xf) != 0.5 and min(xf) != -0.5: # analyze_status = False # if max(yf) != 0.5 and min(yf) != -0.5: # analyze_status = False # logger.debug(str(vtk_data['rho'].shape)) hdf5_data = athena_read.athdf('bin/TestOutputs.out5.00010.athdf', dtype=np.float32) if max(hdf5_data['rho'][0, 32, :]) < 0.25: analyze_status = False if max(hdf5_data['vel3'][0, 32, :]) != 0.0: analyze_status = False if max(hdf5_data['Bcc3'][0, 32, :]) != 0.0: analyze_status = False return analyze_status
def am_avg(name, name2): frame = athdf(name) frame2 = athdf(name2) time = frame['Time'] Radius = frame['x1v'] am_avg = np.array([]) am_kep = np.array([]) for radius in range(rmin,rmax): rad = frame['x1v'][radius] rho = frame['rho'][0,:,radius] sigma = rho dphi = np.gradient(frame['x2v']) am = frame2['user_out_var25'][0,:,radius] am = np.sum(am)/(2*PI) am_avg = np.append(am_avg, am) am_k = np.sum(np.sqrt(GM1*rad)*sigma*dphi)/(2*PI) am_kep = np.append(am_kep, am_k) return [Radius, am_avg, am_kep]
def intammdot(i): #change file names here accordingly if i < 10: name = 'BDstream.out1.0000'+str(i)+'.athdf' name2 = 'BDstream.out2.0000'+str(i)+'.athdf' elif i >=100: name = 'BDstream.out1.00'+str(i)+'.athdf' name2 = 'BDstream.out2.00'+str(i)+'.athdf' else: name = 'BDstream.out1.000'+str(i)+'.athdf' name2 = 'BDstream.out2.000'+str(i)+'.athdf' frame = athdf(name) frame2 = athdf(name2) time = frame['Time'] Radius = frame['x1v'] dR = np.gradient(Radius) AMMdot = np.sum(frame2['user_out_var16'][0,:,:], axis=0) return [Radius,AMMdot]
def plotspiral(name, ax): #reading the hdf5 data dumps frame = athdf(name) rho = frame['rho'][0,:,rmin:rmax] r,phi = np.meshgrid(frame['x1f'][rmin:rmax], frame['x2f']) X = np.log10(r) Y = phi ax.pcolormesh(X,Y,rho ,norm=colors.LogNorm(vmin=0.001, vmax=3.0)) ax.set_xlabel(r"$\rm Log~R$") ax.set_ylabel(r"$\phi$") ax.set_xlim(np.min(X), np.max(X)) ax.set_ylim(np.min(Y), np.max(Y))
def readam(i): #change file names here accordingly if i < 10: name = 'BDstream.out1.0000'+str(i)+'.athdf' name2 = 'BDstream.out2.0000'+str(i)+'.athdf' elif i >=100: name = 'BDstream.out1.00'+str(i)+'.athdf' name2 = 'BDstream.out2.00'+str(i)+'.athdf' else: name = 'BDstream.out1.000'+str(i)+'.athdf' name2 = 'BDstream.out2.000'+str(i)+'.athdf' frame = athdf(name) frame2 = athdf(name2) time = frame['Time'] Radius = frame['x1v'] AM = np.sum(frame2['user_out_var6'][0,:,:], axis=0) AM_net = np.sum(frame2['user_out_var11'][0,:,:], axis=0) AM_nc = np.sum(frame2['user_out_var34'][0,:,:], axis=0) return [Radius, AM, time, AM_net, AM_nc]
def intamfh(i): #change file names here accordingly if i < 10: name = 'BDstream.out1.0000'+str(i)+'.athdf' name2 = 'BDstream.out2.0000'+str(i)+'.athdf' elif i >=100: name = 'BDstream.out1.00'+str(i)+'.athdf' name2 = 'BDstream.out2.00'+str(i)+'.athdf' else: name = 'BDstream.out1.000'+str(i)+'.athdf' name2 = 'BDstream.out2.000'+str(i)+'.athdf' frame = athdf(name) frame2 = athdf(name2) time = frame['Time'] Radius = frame['x1v'] dR = np.gradient(Radius) amfh = np.sum(frame2['user_out_var17'][0,:,:], axis=0) amth_net = np.sum(frame2['user_out_var18'][0,:,:], axis=0) amth_nc = np.sum(frame2['user_out_var37'][0,:,:], axis=0) return [Radius, amfh, amth_net, amth_nc]
def animated(i): ax0.clear() axins.clear() #track file time secondary_x = pmtrackfile.x.iloc[0:(i * 10 + 1)] secondary_y = pmtrackfile.y.iloc[0:(i * 10 + 1)] #change file names here accordingly hdf5_index = i if hdf5_index < 10: name = 'BDstream.out1.0000' + str(hdf5_index) + '.athdf' elif hdf5_index >= 100: name = 'BDstream.out1.00' + str(hdf5_index) + '.athdf' else: name = 'BDstream.out1.000' + str(hdf5_index) + '.athdf' frame = athdf(name) print("plotting " + name + "...") print("t_frame=", frame['Time'], "t_pmtrack=", pmtrackfile.time[i * 10]) im = ax0.pcolormesh(Y, X, frame['rho'][0, :, rmin:rmax], norm=colors.LogNorm(vmin=0.001, vmax=0.5), cmap="magma") ax0.scatter(secondary_x, secondary_y, c='k', s=1.0) ax0.scatter(secondary_x.iloc[-1], secondary_y.iloc[-1], marker='x', c='r', s=32.0) ax0.set_aspect('equal') ax0.set_title("time=" + str(frame['Time'])) ax0.set_xlabel(r"$r\cos\phi$") ax0.set_ylabel(r"$r\sin\phi$") ax0.set_ylim(np.min(Y), np.max(Y)) ax0.set_xlim(np.min(X), 1.3) fig.colorbar(im, cax=axins) axins.set_xlabel(r"$\rm log(\rho/\rho_{0})$") #fig.colorbar(im,cax=cbar) #cbar.set_xlabel(r"$\rho$") return [im]
def plotdensity(name, ax): #reading the hdf5 data dumps frame = athdf(name) rho = frame['rho'][0,:,rmin:rmax] r,phi = np.meshgrid(frame['x1f'][rmin:rmax], frame['x2f']) X = r*np.sin(phi) Y = r*np.cos(phi) im = ax.pcolormesh(Y,X,rho, norm=colors.LogNorm(vmin=0.001, vmax=5.0)) ax.set_xlabel(r"$r\sin\phi$") #ax.set_ylabel(r"$r\cos\phi$") ax.set_aspect('auto') ax.set_xlim(np.min(X), np.max(X)) ax.set_ylim(np.min(Y), np.max(Y)) #ax.set_title(r"t="+str(frame['Time'])) ax.set_aspect('equal') return im
def read_all_hdf5(fname,level=0,subsample=False): time,data=ath.athdf(fname,level=level,subsample=subsample) bx = data['Bcc1'][0] #time,data=ath.athdf(fname,quantities=['Bcc2']) by = data['Bcc2'][0] #time,data=ath.athdf(fname,quantities=['Bcc3']) bz = data['Bcc3'][0] #time,data=ath.athdf(fname,quantities=['vel1']) vx = data['vel1'][0] #time,data=ath.athdf(fname,quantities=['vel2']) vy = data['vel2'][0] #time,data=ath.athdf(fname,quantities=['vel3']) vz = data['vel3'][0] rho = data['rho'][0] T = data['press'][0]/rho x = data['x1f'];y = data['x2f']; z = data['x3f'] x = (x + 0.5*(x[1]-x[0]))[:-1] y = (y + 0.5*(y[1]-y[0]))[:-1] z = (z + 0.5*(z[1]-z[0]))[:-1] return time,x,y,z,rho,T,bx,by,bz,vx,vy,vz
def get_quant(file, quant, lev, derived=False, myquant='None'): #read in hdf5 file # note: specifying level=0 restricts all data to coarsest level data = ar.athdf(file, quantities=quant, level=lev) if derived: if myquant == 'v1' or myquant == 'v2' or myquant == 'v3': qdat = data[quant[0]] / data[quant[1]] elif myquant == 'eint': qdat = data[quant[0]] - 0.5 * (data[quant[1]]**2. + data[quant[2]]**2. + data[quant[3]]**3.) / data[quant[4]] elif myquant == 'ediff': eint = data[quant[1]] - 0.5 * (data[quant[2]]**2. + data[quant[3]]**2. + data[quant[4]]**2.) / data[quant[5]] ie = data[quant[0]] qdat = np.abs(eint - ie) rms = np.sqrt(np.sum((eint - ie)**2.) / eint.size) print('[get_quant]: time = %1.3f ediff_RMS = %13.5e' % (data['Time'], rms)) # Cless derived quants elif myquant == 'vcl1' or myquant == 'vcl2' or myquant == 'vcl3': qdat = data[quant[0]] / data[quant[1]] elif myquant == 'P11' or myquant == 'P22' or myquant == 'P33': qdat = data[quant[0]] - (data[quant[1]] * data[quant[1]]) / data[quant[2]] elif myquant == 'P12' or myquant == 'P13' or myquant == 'P23': qdat = data[quant[0]] - (data[quant[1]] * data[quant[2]]) / data[quant[3]] else: qdat = data[quant[0]] return data['Time'], data['x1v'], data['x2v'], data['x3v'], qdat
def main(**kwargs): # Load Python plotting modules if kwargs['output_file'] != 'show': import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt import matplotlib.colors as colors # Verify user inputs slice_erroneously_specified = False if kwargs['slice_location'] is not None: if kwargs['stream'] is not None: if (kwargs['average'] or kwargs['sum']) and kwargs['stream_average']: slice_erroneously_specified = True else: if kwargs['average'] or kwargs['sum']: slice_erroneously_specified = True if slice_erroneously_specified: raise RuntimeError( 'Slice location specified but all quantities are to be ' 'averaged or summed') # Set default slice location (even if averaging or summing) if kwargs['slice_location'] is None: kwargs['slice_location'] = 0.0 # Determine refinement level to use if kwargs['level'] is not None: level = kwargs['level'] else: level = None # Determine if vector quantities should be read quantities = [kwargs['quantity']] if kwargs['stream'] is not None: if kwargs['direction'] == 1: quantities.append(kwargs['stream'] + '2') quantities.append(kwargs['stream'] + '3') elif kwargs['direction'] == 2: quantities.append(kwargs['stream'] + '1') quantities.append(kwargs['stream'] + '3') else: quantities.append(kwargs['stream'] + '1') quantities.append(kwargs['stream'] + '2') # Read data if quantities[0] == 'Levels': data = athena_read.athdf(kwargs['data_file'], quantities=quantities[1:], level=level, return_levels=True) else: data = athena_read.athdf(kwargs['data_file'], quantities=quantities, level=level) # Check that coordinates work with user choices coordinates = data['Coordinates'] ave_or_sum = kwargs['average'] or kwargs['sum'] or kwargs['stream_average'] warn_projection = False warn_vector = False if coordinates in ('cartesian', 'minkowski'): pass elif coordinates == 'cylindrical': if ave_or_sum and kwargs['direction'] == 1: warn_projection = True if kwargs['stream'] and kwargs['direction'] in (1, 3): warn_vector = True elif coordinates in ('spherical_polar', 'schwarzschild', 'kerr-schild'): if ave_or_sum and kwargs['direction'] in (1, 2): warn_projection = True if kwargs['stream']: warn_vector = True else: warnings.warn('Coordinates not recognized; results may be misleading') if warn_projection: warnings.warn('Sums/slices are not computed with correct volumes') if warn_vector: warnings.warn('Vector plot may be misleading') # Name coordinates if coordinates in ('cartesian', 'minkowski'): coord_labels = (r'$x$', r'$y$', r'$z$') elif coordinates == 'cartesian': coord_labels = (r'$R$', r'$\phi$', r'$z$') elif coordinates in ('spherical_polar', 'schwarzschild', 'kerr-schild'): coord_labels = (r'$r$', r'$\theta$', r'$\phi$') else: coord_labels = (r'$x^1$', r'$x^2$', r'$x^3$') # Extract basic coordinate information if kwargs['direction'] == 1: xf = data['x2f'] xv = data['x2v'] yf = data['x3f'] yv = data['x3v'] zf = data['x1f'] x_label = coord_labels[1] y_label = coord_labels[2] elif kwargs['direction'] == 2: xf = data['x1f'] xv = data['x1v'] yf = data['x3f'] yv = data['x3v'] zf = data['x2f'] x_label = coord_labels[0] y_label = coord_labels[2] if kwargs['direction'] == 3: xf = data['x1f'] xv = data['x1v'] yf = data['x2f'] yv = data['x2v'] zf = data['x3f'] x_label = coord_labels[0] y_label = coord_labels[1] # Create grids x_grid, y_grid = np.meshgrid(xf, yf) x_stream, y_stream = np.meshgrid(xv, yv) # Extract scalar data vals = data[kwargs['quantity']] if kwargs['average'] or kwargs['sum']: vals = np.sum(vals, axis=3 - kwargs['direction']) if kwargs['sum']: vals *= zf[-1] - zf[0] vals /= len(zf) - 1 else: if kwargs['slice_location'] < zf[0]: index = 0 elif kwargs['slice_location'] >= zf[-1]: index = -1 else: index = np.where(zf <= kwargs['slice_location'])[0][-1] if kwargs['direction'] == 1: vals = vals[:, :, index] elif kwargs['direction'] == 2: vals = vals[:, index, :] else: vals = vals[index, :, :] # Extract vector data if kwargs['stream'] is not None: if kwargs['direction'] == 1: vals_x = data[kwargs['stream'] + '2'] vals_y = data[kwargs['stream'] + '3'] elif kwargs['direction'] == 2: vals_x = data[kwargs['stream'] + '1'] vals_y = data[kwargs['stream'] + '3'] else: vals_x = data[kwargs['stream'] + '1'] vals_y = data[kwargs['stream'] + '2'] if kwargs['stream_average']: vals_x = np.sum(vals_x, axis=3 - kwargs['direction']) vals_y = np.sum(vals_y, axis=3 - kwargs['direction']) vals_x /= len(zf) - 1 vals_y /= len(zf) - 1 else: if kwargs['slice_location'] < zf[0]: index = 0 elif kwargs['slice_location'] >= zf[-1]: index = -1 else: index = np.where(zf <= kwargs['slice_location'])[0][-1] if kwargs['direction'] == 1: vals_x = vals_x[:, :, index] vals_y = vals_y[:, :, index] elif kwargs['direction'] == 2: vals_x = vals_x[:, index, :] vals_y = vals_y[:, index, :] else: vals_x = vals_x[index, :, :] vals_y = vals_y[index, :, :] # Determine colormap norm if kwargs['logc']: norm = colors.LogNorm() else: norm = colors.Normalize() # Determine plot limits x_min = kwargs['x_min'] if kwargs['x_min'] is not None else xf[0] x_max = kwargs['x_max'] if kwargs['x_max'] is not None else xf[-1] y_min = kwargs['y_min'] if kwargs['y_min'] is not None else yf[0] y_max = kwargs['y_max'] if kwargs['y_max'] is not None else yf[-1] # Make plot plt.figure() im = plt.pcolormesh(x_grid, y_grid, vals, cmap=kwargs['colormap'], vmin=kwargs['vmin'], vmax=kwargs['vmax'], norm=norm) if kwargs['stream'] is not None: with warnings.catch_warnings(): warnings.filterwarnings( 'ignore', 'invalid value encountered in greater_equal', RuntimeWarning, 'numpy') plt.streamplot(x_stream, y_stream, vals_x, vals_y, density=kwargs['stream_density'], color='k') plt.xlim((x_min, x_max)) plt.ylim((y_min, y_max)) if not kwargs['fill']: plt.gca().set_aspect('equal') plt.xlabel(x_label) plt.ylabel(y_label) plt.colorbar(im) if kwargs['output_file'] == 'show': plt.show() else: plt.savefig(kwargs['output_file'], bbox_inches='tight')
def main(**kwargs): # Load function for transforming coordinates if kwargs['stream'] is not None: from scipy.ndimage import map_coordinates # Determine refinement level to use if kwargs['level'] is not None: level = kwargs['level'] else: level = None # Determine if vector quantities should be read quantities = [kwargs['quantity']] if kwargs['stream'] is not None: quantities.append(kwargs['stream'] + '1') if kwargs['midplane']: quantities.append(kwargs['stream'] + '3') else: quantities.append(kwargs['stream'] + '2') # Define grid compression in theta-direction h = kwargs['theta_compression'] if kwargs[ 'theta_compression'] is not None else 1.0 def theta_func(xmin, xmax, _, nf): x2_vals = np.linspace(xmin, xmax, nf) theta_vals = x2_vals + (1.0 - h) / 2.0 * np.sin(2.0 * x2_vals) return theta_vals # Read data data = athena_read.athdf(kwargs['data_file'], quantities=quantities, level=level, \ face_func_2=theta_func) # Extract basic coordinate information with h5py.File(kwargs['data_file'], 'r') as f: coordinates = f.attrs['Coordinates'] r = data['x1v'] theta = data['x2v'] phi = data['x3v'] nx1 = len(r) nx2 = len(theta) nx3 = len(phi) # Set radial extent if kwargs['r_max'] is not None: r_max = kwargs['r_max'] else: r_max = data['x1f'][-1] # Account for logarithmic radial coordinate if kwargs['logr']: r = np.log10(r) r_max = np.log10(r_max) # Create scalar grid if kwargs['midplane']: phi_extended = \ np.concatenate((phi[-1:]-2.0*np.pi, phi, phi[:1]+2.0*np.pi)) phi_extended -= 0.5 * 2.0 * np.pi / nx3 r_grid, phi_grid = np.meshgrid(r, phi_extended) x_grid = r_grid * np.cos(phi_grid) y_grid = r_grid * np.sin(phi_grid) else: theta_extended = np.concatenate((-theta[0:1], theta, 2.0*np.pi-theta[::-1], \ 2.0*np.pi+theta[0:1])) theta_extended_corrected = theta_extended - 0.5 * (theta[1] - theta[0]) r_grid, theta_grid = np.meshgrid(r, theta_extended_corrected) x_grid = r_grid * np.sin(theta_grid) y_grid = r_grid * np.cos(theta_grid) # Create streamline grid if kwargs['stream'] is not None: x_stream = np.linspace(-r_max, r_max, kwargs['stream_samples']) if kwargs['midplane']: y_stream = np.linspace(-r_max, r_max, kwargs['stream_samples']) x_grid_stream, y_grid_stream = np.meshgrid(x_stream, y_stream) r_grid_stream_coord = (x_grid_stream.T**2 + y_grid_stream.T**2)**0.5 phi_grid_stream_coord = np.pi + np.arctan2(-y_grid_stream.T, -x_grid_stream.T) phi_grid_stream_pix = \ (phi_grid_stream_coord+phi[0]) / (2.0*np.pi+2.0*phi[0]) * (nx3+1) else: z_stream = np.linspace(-r_max, r_max, kwargs['stream_samples']) x_grid_stream, z_grid_stream = np.meshgrid(x_stream, z_stream) r_grid_stream_coord = (x_grid_stream.T**2 + z_grid_stream.T**2)**0.5 theta_grid_stream_coord = np.pi - np.arctan2( x_grid_stream.T, -z_grid_stream.T) if kwargs['theta_compression'] is None: theta_grid_stream_pix = \ (theta_grid_stream_coord+theta[0]) / (2.0*np.pi+2.0*theta[0]) * (2*nx2+1) else: theta_grid_stream_pix = np.empty_like(theta_grid_stream_coord) for (i, j), theta_val in np.ndenumerate(theta_grid_stream_coord): index = sum(theta_extended[1:-1] < theta_val) - 1 if index < 0: theta_grid_stream_pix[i, j] = -1 elif index < 2 * nx2 - 1: theta_grid_stream_pix[i,j] = index + (theta_val-theta_extended[index]) \ / (theta_extended[index+1]-theta_extended[index]) else: theta_grid_stream_pix[i, j] = 2 * nx2 + 2 r_grid_stream_pix = np.empty_like(r_grid_stream_coord) for (i, j), r_val in np.ndenumerate(r_grid_stream_coord): index = sum(r < r_val) - 1 if index < 0: r_grid_stream_pix[i, j] = -1 elif index < nx1 - 1: r_grid_stream_pix[ i, j] = index + (r_val - r[index]) / (r[index + 1] - r[index]) else: r_grid_stream_pix[i, j] = nx1 # Perform slicing/averaging of scalar data if kwargs['midplane']: if nx2 % 2 == 0: vals = np.mean(data[kwargs['quantity']][:, nx2 / 2 - 1:nx2 / 2 + 1, :], axis=1) else: vals = data[kwargs['quantity']][:, nx2 / 2, :] if kwargs['average']: vals = np.repeat(np.mean(vals, axis=0, keepdims=True), nx3, axis=0) else: if kwargs['average']: vals_right = np.mean(data[kwargs['quantity']], axis=0) vals_left = vals_right else: vals_right = \ 0.5 * (data[kwargs['quantity']][-1,:,:] + data[kwargs['quantity']][0,:,:]) vals_left = 0.5 \ * (data[kwargs['quantity']][nx3/2-1,:,:] + data[kwargs['quantity']][nx3/2,:,:]) # Join scalar data through boundaries if kwargs['midplane']: vals = np.vstack((vals[-1:, :], vals, vals[:1, :])) else: vals = np.vstack((vals_left[:1, :], vals_right, vals_left[::-1, :], vals_right[:1, :])) # Perform slicing/averaging of vector data if kwargs['stream'] is not None: if kwargs['midplane']: if nx2 % 2 == 0: vals_r = np.mean(data[kwargs['stream'] + '1'][:, nx2 / 2 - 1:nx2 / 2 + 1, :], axis=1).T vals_phi = np.mean(data[kwargs['stream'] + '3'][:, nx2 / 2 - 1:nx2 / 2 + 1, :], axis=1).T else: vals_r = data[kwargs['stream'] + '1'][:, nx2 / 2, :].T vals_phi = data[kwargs['stream'] + '3'][:, nx2 / 2, :].T if kwargs['stream_average']: vals_r = np.tile(np.reshape(np.mean(vals_r, axis=1), (nx1, 1)), nx3) vals_phi = np.tile( np.reshape(np.mean(vals_phi, axis=1), (nx1, 1)), nx3) else: if kwargs['stream_average']: vals_r_right = np.mean(data[kwargs['stream'] + '1'], axis=0).T vals_r_left = vals_r_right vals_theta_right = np.mean(data[kwargs['stream'] + '2'], axis=0).T vals_theta_left = -vals_theta_right else: vals_r_right = data[kwargs['stream'] + '1'][0, :, :].T vals_r_left = data[kwargs['stream'] + '1'][nx3 / 2, :, :].T vals_theta_right = data[kwargs['stream'] + '2'][0, :, :].T vals_theta_left = -data[kwargs['stream'] + '2'][nx3 / 2, :, :].T # Join vector data through boundaries if kwargs['stream'] is not None: if kwargs['midplane']: vals_r = np.hstack((vals_r[:, -1:], vals_r, vals_r[:, :1])) vals_r = map_coordinates(vals_r, (r_grid_stream_pix, phi_grid_stream_pix), \ order=1, cval=np.nan) vals_phi = np.hstack((vals_phi[:, -1:], vals_phi, vals_phi[:, :1])) vals_phi = map_coordinates(vals_phi, (r_grid_stream_pix, phi_grid_stream_pix), \ order=1, cval=np.nan) else: vals_r = np.hstack((vals_r_left[:,:1], vals_r_right, vals_r_left[:,::-1], \ vals_r_right[:,:1])) vals_r = map_coordinates(vals_r, (r_grid_stream_pix, theta_grid_stream_pix), \ order=1, cval=np.nan) vals_theta = np.hstack((vals_theta_left[:,:1], vals_theta_right, \ vals_theta_left[:,::-1], vals_theta_right[:,:1])) vals_theta = map_coordinates(vals_theta, \ (r_grid_stream_pix, theta_grid_stream_pix), order=1, cval=np.nan) # Transform vector data to Cartesian components if kwargs['stream'] is not None: if kwargs['logr']: r_vals = 10.0**r_grid_stream_coord logr_vals = r_grid_stream_coord else: r_vals = r_grid_stream_coord if kwargs['midplane']: sin_phi = np.sin(phi_grid_stream_coord) cos_phi = np.cos(phi_grid_stream_coord) if kwargs['logr']: dx_dr = 1.0 / (np.log(10.0) * r_vals) * cos_phi dy_dr = 1.0 / (np.log(10.0) * r_vals) * sin_phi dx_dphi = -logr_vals * sin_phi dy_dphi = logr_vals * cos_phi else: dx_dr = cos_phi dy_dr = sin_phi dx_dphi = -r_vals * sin_phi dy_dphi = r_vals * cos_phi if not (coordinates == 'schwarzschild' or coordinates == 'kerr-schild'): dx_dphi /= r_vals dy_dphi /= r_vals vals_x = dx_dr * vals_r + dx_dphi * vals_phi vals_y = dy_dr * vals_r + dy_dphi * vals_phi else: sin_theta = np.sin(theta_grid_stream_coord) cos_theta = np.cos(theta_grid_stream_coord) if kwargs['logr']: dx_dr = 1.0 / (np.log(10.0) * r_vals) * sin_theta dz_dr = 1.0 / (np.log(10.0) * r_vals) * cos_theta dx_dtheta = logr_vals * cos_theta dz_dtheta = -logr_vals * sin_theta else: dx_dr = sin_theta dz_dr = cos_theta dx_dtheta = r_vals * cos_theta dz_dtheta = -r_vals * sin_theta if not (coordinates == 'schwarzschild' or coordinates == 'kerr-schild'): dx_dtheta /= r_vals dz_dtheta /= r_vals vals_x = dx_dr * vals_r + dx_dtheta * vals_theta vals_z = dz_dr * vals_r + dz_dtheta * vals_theta # Determine colormapping properties cmap = plt.get_cmap(kwargs['colormap']) vmin = kwargs['vmin'] vmax = kwargs['vmax'] if kwargs['logc']: norm = colors.LogNorm() else: norm = colors.Normalize() # Make plot plt.figure() im = plt.pcolormesh(x_grid, y_grid, vals, cmap=cmap, vmin=vmin, vmax=vmax, norm=norm) if kwargs['stream'] is not None: with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'invalid value encountered in greater_equal', \ RuntimeWarning, 'numpy') if kwargs['midplane']: plt.streamplot(x_stream, y_stream, vals_x.T, vals_y.T, \ density=kwargs['stream_density'], color='k') else: plt.streamplot(x_stream, z_stream, vals_x.T, vals_z.T, \ density=kwargs['stream_density'], color='k') plt.gca().set_aspect('equal') plt.xlim((-r_max, r_max)) plt.ylim((-r_max, r_max)) r_string = r'\log_{10}(r)\ ' if kwargs['logr'] else r'r\ ' angle_string_x = r'\cos(\phi)' if kwargs['midplane'] else r'\sin(\theta)' angle_string_y = r'\sin(\phi)' if kwargs['midplane'] else r'\cos(\theta)' plt.xlabel('$' + r_string + angle_string_x + '$') plt.ylabel('$' + r_string + angle_string_y + '$') plt.colorbar(im) plt.savefig(kwargs['output_file'], bbox_inches='tight')
def torque(filename, setup): # 1. Read data from .athdf file array = athdf("athdf/" + filename) x1 = array['x1f'] x2 = array['x2f'] x3 = array['x3f'] data = array['dens'] n1 = len(x1) n2 = len(x2) n3 = len(x3) # 2. Transform to cell-centered coordinates y1 = np.zeros((n1 - 1)) y2 = np.zeros((n2 - 1)) y3 = np.zeros((n3 - 1)) d1 = np.zeros((n1 - 1)) d2 = np.zeros((n2 - 1)) d3 = np.zeros((n3 - 1)) for ir in range(0, n1 - 1): y1[ir] = 0.75 * (x1[ir + 1] + math.pow(x1[ir], 3) / (math.pow(x1[ir], 2) + x1[ir] * x1[ir + 1] + math.pow(x1[ir + 1], 2))) d1[ir] = x1[ir + 1] - x1[ir] for ith in range(0, n2 - 1): y2[ith] = (x2[ith] * np.cos(x2[ith]) - x2[ith + 1] * np.cos(x2[ith + 1]) - np.sin(x2[ith]) + np.sin(x2[ith + 1])) / (np.cos(x2[ith]) - np.cos(x2[ith + 1])) d2[ith] = x2[ith + 1] - x2[ith] for iphi in range(0, n3 - 1): y3[iphi] = 0.5 * (x3[iphi] + x3[iphi + 1]) d3[iphi] = x3[iphi + 1] - x3[iphi] # 3. Reshape a1 = y1.reshape(1, 1, n1 - 1) a2 = y2.reshape(1, n2 - 1, 1) a3 = y3.reshape(n3 - 1, 1, 1) b1 = d1.reshape(1, 1, n1 - 1) b2 = d2.reshape(1, n2 - 1, 1) b3 = d3.reshape(n3 - 1, 1, 1) # 4. Simulation setup GM0 = setup.GM GM1 = setup.GM1 R0 = setup.R0 pl_inc = setup.inclination Rsoft = setup.Rsoft time = setup.number * setup.time phase = math.pow((GM0 + GM1) / R0 / R0 / R0, 0.5) * time h = setup.hor zeta = setup.zeta rho0 = setup.rho_0 # 5. Calculate surface density result = data * np.sin(a2) * a1 * a1 * b2 * b3 sigma = result.sum(axis=0).sum(axis=0) output = out_name(filename, "sigma.dat") out = open("sigma/" + output, 'w+') for ir in range(0, n1 - 1): out.write(str(y1[ir])) out.write("\t") out.write(str(sigma[ir])) out.write("\t") out.write(str(d1[ir])) out.write("\n") out.close() del result, sigma c1 = y1.reshape(1, 1, n1 - 1) c2 = y2.reshape(1, n2 - 1, 1) c3 = y3.reshape(n3 - 1, 1, 1) e1 = (c1 * np.sin(c2) * np.cos(c3) - np.ones( (n3 - 1, n2 - 1, n1 - 1)) * R0 * np.cos(pl_inc) * np.cos(phase)) e2 = (c1 * np.sin(c2) * np.sin(c3) - np.ones( (n3 - 1, n2 - 1, n1 - 1)) * R0 * np.sin(phase)) e3 = (c1 * np.cos(c2) - np.ones( (n3 - 1, n2 - 1, n1 - 1)) * R0 * np.sin(pl_inc) * np.cos(phase)) dist2 = e1 * e1 + e2 * e2 + e3 * e3 data0 = rho0 * np.power(a1 * np.sin(a2), -zeta) * np.exp( (1.0 - 1.0 / np.sin(a2)) / h / h / a1) # 6. Calculate T-force arrayT = (dist2 * dist2 + 3.5 * dist2 * Rsoft * Rsoft + np.ones( (n3 - 1, n2 - 1, n1 - 1)) * 4.375 * Rsoft * Rsoft * Rsoft * Rsoft) / (dist2 + np.ones( (n3 - 1, n2 - 1, n1 - 1)) * Rsoft * Rsoft) / (dist2 + np.ones( (n3 - 1, n2 - 1, n1 - 1)) * Rsoft * Rsoft) / (dist2 + np.ones( (n3 - 1, n2 - 1, n1 - 1)) * Rsoft * Rsoft) / np.sqrt( dist2 + np.ones((n3 - 1, n2 - 1, n1 - 1)) * Rsoft * Rsoft) * (-e1 * np.cos(pl_inc) * np.sin(phase) + e2 * np.cos(phase) - e3 * np.sin(pl_inc) * np.sin(phase)) resultT = arrayT * data * np.sin(a2) * a1 * a1 * b2 * b3 forceT = resultT.sum(axis=0).sum(axis=0) output = out_name(filename, "Tforce.dat") out = open("Tforce/" + output, 'w+') for ir in range(0, n1 - 1): out.write(str(y1[ir])) out.write("\t") out.write(str(forceT[ir])) out.write("\t") out.write(str(d1[ir])) out.write("\n") out.close() del forceT, resultT # 7. Calculate R-force arrayR = (dist2 * dist2 + 3.5 * dist2 * Rsoft * Rsoft + np.ones( (n3 - 1, n2 - 1, n1 - 1)) * 4.375 * Rsoft * Rsoft * Rsoft * Rsoft) / (dist2 + np.ones( (n3 - 1, n2 - 1, n1 - 1)) * Rsoft * Rsoft) / (dist2 + np.ones( (n3 - 1, n2 - 1, n1 - 1)) * Rsoft * Rsoft) / (dist2 + np.ones( (n3 - 1, n2 - 1, n1 - 1)) * Rsoft * Rsoft) / np.sqrt( dist2 + np.ones((n3 - 1, n2 - 1, n1 - 1)) * Rsoft * Rsoft) * (e1 * np.cos(pl_inc) * np.cos(phase) + e2 * np.sin(phase) + e3 * np.sin(pl_inc) * np.cos(phase)) resultR = arrayR * data * np.sin(a2) * a1 * a1 * b2 * b3 forceR = resultR.sum(axis=0).sum(axis=0) output = out_name(filename, "Rforce.dat") out = open("Rforce/" + output, 'w+') for ir in range(0, n1 - 1): out.write(str(y1[ir])) out.write("\t") out.write(str(forceR[ir])) out.write("\t") out.write(str(d1[ir])) out.write("\n") out.close() del forceR, resultR resultRpert = arrayR * (data - data0) * np.sin(a2) * a1 * a1 * b2 * b3 forceRpert = resultRpert.sum(axis=0).sum(axis=0) output = out_name(filename, "RforcePert.dat") out = open("RforcePert/" + output, 'w+') for ir in range(0, n1 - 1): out.write(str(y1[ir])) out.write("\t") out.write(str(forceRpert[ir])) out.write("\t") out.write(str(d1[ir])) out.write("\n") out.close() del forceRpert, resultRpert # 8. Calculate N-force arrayN = (dist2 * dist2 + 3.5 * dist2 * Rsoft * Rsoft + np.ones( (n3 - 1, n2 - 1, n1 - 1)) * 4.375 * Rsoft * Rsoft * Rsoft * Rsoft) / (dist2 + np.ones( (n3 - 1, n2 - 1, n1 - 1)) * Rsoft * Rsoft) / (dist2 + np.ones( (n3 - 1, n2 - 1, n1 - 1)) * Rsoft * Rsoft) / (dist2 + np.ones( (n3 - 1, n2 - 1, n1 - 1)) * Rsoft * Rsoft) / np.sqrt( dist2 + np.ones((n3 - 1, n2 - 1, n1 - 1)) * Rsoft * Rsoft) * (-e1 * np.sin(pl_inc) + e2 * 0.0 + e3 * np.cos(pl_inc)) resultN = arrayN * (data) * np.sin(a2) * a1 * a1 * b2 * b3 forceN = resultN.sum(axis=0).sum(axis=0) output = out_name(filename, "Nforce.dat") out = open("Nforce/" + output, 'w+') for ir in range(0, n1 - 1): out.write(str(y1[ir])) out.write("\t") out.write(str(forceN[ir])) out.write("\t") out.write(str(d1[ir])) out.write("\n") out.close() del forceN, resultN resultNpert = arrayN * (data - data0) * np.sin(a2) * a1 * a1 * b2 * b3 forceNpert = resultNpert.sum(axis=0).sum(axis=0) output = out_name(filename, "NforcePert.dat") out = open("NforcePert/" + output, 'w+') for ir in range(0, n1 - 1): out.write(str(y1[ir])) out.write("\t") out.write(str(forceNpert[ir])) out.write("\t") out.write(str(d1[ir])) out.write("\n") out.close() del forceNpert, resultNpert
def angles(filename): # Read the file array = athdf("athdf/" + filename) x1 = array['x1f'] x2 = array['x2f'] x3 = array['x3f'] data = array['dens'] v1 = array['mom1'] v2 = array['mom2'] v3 = array['mom3'] n1 = len(x1) n2 = len(x2) n3 = len(x3) output1 = out_name(filename, "inc_angle.dat") output2 = out_name(filename, "prec_angle.dat") y1 = np.zeros((n1 - 1)) y2 = np.zeros((n2 - 1)) y3 = np.zeros((n3 - 1)) d1 = np.zeros((n1 - 1)) d2 = np.zeros((n2 - 1)) d3 = np.zeros((n3 - 1)) # Thansform to cell-centered coordinates for ir in range(0, n1 - 1): y1[ir] = 0.75 * (x1[ir + 1] + math.pow(x1[ir], 3) / (math.pow(x1[ir], 2) + x1[ir] * x1[ir + 1] + math.pow(x1[ir + 1], 2))) d1[ir] = x1[ir + 1] - x1[ir] for ith in range(0, n2 - 1): y2[ith] = (x2[ith] * np.cos(x2[ith]) - x2[ith + 1] * np.cos(x2[ith + 1]) - np.sin(x2[ith]) + np.sin(x2[ith + 1])) / (np.cos(x2[ith]) - np.cos(x2[ith + 1])) d2[ith] = x2[ith + 1] - x2[ith] for iphi in range(0, n3 - 1): y3[iphi] = 0.5 * (x3[iphi] + x3[iphi + 1]) d3[iphi] = x3[iphi + 1] - x3[iphi] # Reshape a1 = y1.reshape(1, 1, n1 - 1) a2 = y2.reshape(1, n2 - 1, 1) a3 = y3.reshape(n3 - 1, 1, 1) b1 = d1.reshape(1, 1, n1 - 1) b2 = d2.reshape(1, n2 - 1, 1) b3 = d3.reshape(n3 - 1, 1, 1) # Calculate inclination angle Lz = np.sin(a2) * np.sin(a2) * v3 Ly = np.cos(a3) * np.sin(a2) * v2 - np.cos(a2) * np.sin(a2) * np.sin( a3) * v3 Lx = -np.sin(a3) * np.sin(a2) * v2 - np.cos(a2) * np.cos(a3) * np.sin( a2) * v3 resLz = Lz.sum(axis=0).sum(axis=0) resLy = Ly.sum(axis=0).sum(axis=0) resLx = Lx.sum(axis=0).sum(axis=0) L = np.sqrt(resLx * resLx + resLy * resLy + resLz * resLz) inc = np.arccos(resLz / L) prec = np.arccos(-resLy / np.sqrt(resLy * resLy + resLx * resLx)) # Write output out1 = open("inc_angle/" + output1, 'w+') for ir in range(0, n1 - 1): out1.write(str(y1[ir])) out1.write("\t") out1.write(str(inc[ir])) out1.write("\t") out1.write(str(d1[ir])) out1.write("\n") out1.close() out2 = open("prec_angle/" + output2, 'w+') for ir in range(0, n1 - 1): out2.write(str(y1[ir])) out2.write("\t") out2.write(str(prec[ir])) out2.write("\t") out2.write(str(d1[ir])) out2.write("\n") out2.close()
def get_column(filename, outfile, tstar, dim, lz): # Python modules import h5py import numpy as np #from mpi4py import MPI import struct # Athena++ modules import athena_read #import get_ave from sys import byteorder tstar = 1000.0 dim = 2 lz = 3000 cstar = (8.314510E7 * tstar)**0.5 hstar = 3.0857E17 #gstar = 4.19251E-7 * Sigma gstar = cstar * cstar / hstar leveln = 2 quantities = [ 'rho', 'vel1', 'vel2', 'vel3', 'pgas', 'Er', 'Fr1', 'Fr2', 'Fr3', 'Pr11', 'Pr22', 'Pr33', 'Pr12', 'Pr13', 'Pr23', 'Er0', 'Fr01', 'Fr02', 'Fr03' ] f = h5py.File(filename, 'r') attributes = f.attrs.items() attrs = dict(attributes) level = f.attrs['MaxLevel'] time = f.attrs['Time'] subsample = False if leveln is not None: if level > leveln: subsample = True level = leveln #print "Real Athena++ athdf file from level {:d}".format(level) data = athena_read.athdf(filename, quantities=quantities, level=level, subsample=subsample) # Determine new grid size nx1 = attrs['RootGridSize'][0] * 2**level nx2 = attrs['RootGridSize'][1] * 2**level nx3 = attrs['RootGridSize'][2] * 2**level f.close() dens_tot = data['rho'] #tgas_tot=data['pgas']/dens #vx_tot=data['vel1'][np.where(dens0 <2.0e-4)] #vy_tot=data['vel2'] #vz_tot=data['vel3'] i = 0 column = 0 while (i <= 999): if (dens_tot[0, 199, i] > 1.99e-4): #print column,i column = column + dens_tot[0, 199, i] i = i + 1 critical = 2.0e-4 #vmean = -138.19021332199961 dens = dens_tot[np.where(dens_tot > critical)] table = [time, column] print(table) outfile.write(format(time) + ' ') outfile.write(format(column) + ' ') outfile.write('\n') return
def get_mass(filename, outfile, tstar, dim, lz): # Python modules import h5py import numpy as np #from mpi4py import MPI import struct # Athena++ modules import athena_read from sys import byteorder tstar = tstar dim = dim lz = lz #filename = '../kt.out1.00100.athdf' #outfile = 'hotwind.ave' #print filename cstar = (8.314510E7 * tstar)**0.5 hstar = 3.0857E17 #gstar = 4.19251E-7 * Sigma gstar = cstar * cstar / hstar leveln = 2 quantities = [ 'rho', 'vel1', 'vel2', 'vel3', 'pgas', 'Er', 'Fr1', 'Fr2', 'Fr3', 'Pr11', 'Pr22', 'Pr33', 'Pr12', 'Pr13', 'Pr23', 'Er0', 'Fr01', 'Fr02', 'Fr03' ] f = h5py.File(filename, 'r') attributes = f.attrs.items() attrs = dict(attributes) level = f.attrs['MaxLevel'] time = f.attrs['Time'] subsample = False if leveln is not None: if level > leveln: subsample = True level = leveln #print "Real Athena++ athdf file from level {:d}".format(level) data = athena_read.athdf(filename, quantities=quantities, level=level, subsample=subsample) # Determine new grid size nx1 = attrs['RootGridSize'][0] * 2**level nx2 = attrs['RootGridSize'][1] * 2**level nx3 = attrs['RootGridSize'][2] * 2**level f.close() dens_tot = data['rho'] #tgas_tot=data['pgas']/dens #vx_tot=data['vel1'][np.where(dens0 <2.0e-4)] #vy_tot=data['vel2'] #vz_tot=data['vel3'] critical = 2.0e-4 #vmean = -138.19021332199961 dens = dens_tot[np.where(dens_tot > critical)] tgas = data['pgas'][np.where(dens_tot > critical)] / dens vx = data['vel1'][np.where(dens_tot > critical)] vy = data['vel2'][np.where(dens_tot > critical)] vz = data['vel3'][np.where(dens_tot > critical)] #print vx mass = np.mean(dens) totmass = np.sum(dens) if (dim == 2): vx0 = np.mean(vx * dens) / mass vy0 = np.mean(vy * dens) / mass sigma0 = np.sqrt(np.mean(dens * ((vx - vx0)**2.0)) / mass) sigma1 = np.sqrt(np.mean(dens * ((vy - vy0)**2.0)) / mass) sigma = np.sqrt(sigma0**2.0 + sigma1**2.0) else: vx0 = np.mean(vx * dens) / mass vy0 = np.mean(vy * dens) / mass vz0 = np.mean(vz * dens) / mass sigma0 = np.sqrt(np.mean(dens * ((vx - vx0)**2.0)) / mass) sigma1 = np.sqrt(np.mean(dens * ((vy - vy0)**2.0)) / mass) sigma2 = np.sqrt(np.mean(dens * ((vz - vz0)**2.0)) / mass) sigma = np.sqrt(sigma0**2.0 + sigma1**2.0 + sigma2**2.0) table = [time, mass, vx0, vy0, sigma0, sigma1, sigma] print(table) #print(outfile) outfile.write(format(time) + ' ') outfile.write(format(mass) + ' ') outfile.write(format(vx0) + ' ') outfile.write(format(vy0) + ' ') outfile.write(format(sigma0) + ' ') outfile.write(format(sigma1) + ' ') outfile.write(format(sigma) + ' ') outfile.write('\n') return