def diff_files(fn1, fn2, field): pf1 = load(fn1) data1 = pf1.h.all_data()[field] pf2 = load(fn2) data2 = pf2.h.all_data()[field] diff = data1 - data2 norm = np.sqrt((diff ** 2).sum() / data1.sum()) print("Calculating difference in %s between files %s and %s" % (field, pf1, pf2)) print("L2 error norm = %12.6f, min and max error = %15.6f %15.6f" % (norm, diff.min(), diff.max())) del data1, data2, diff, pf1, pf2
def diff_files(fn1, fn2, field): pf1 = load(fn1) data1 = pf1.h.all_data()[field] pf2 = load(fn2) data2 = pf2.h.all_data()[field] diff = data1 - data2 norm = np.sqrt((diff**2).sum() / data1.sum()) print("Calculating difference in %s between files %s and %s" % (field, pf1, pf2)) print("L2 error norm = %12.6f, min and max error = %15.6f %15.6f" % (norm, diff.min(), diff.max())) del data1, data2, diff, pf1, pf2
def io_nodes(fn, n_io, n_work, func, *args, **kwargs): from yt.mods import load pool, wg = ProcessorPool.from_sizes([(n_io, "io"), (n_work, "work")]) rv = None if wg.name == "work": ds = load(fn) with remote_io(ds, wg, pool): rv = func(ds, *args, **kwargs) elif wg.name == "io": ds = load(fn) io = IOCommunicator(ds, wg, pool) io.wait() # We should broadcast the result rv = pool.comm.mpi_bcast(rv, root=pool['work'].ranks[0]) pool.free_all() mylog.debug("Return value: %s", rv) return rv
def visualize(files): output = [] for fn in parallel_objects(files, njobs=-1): pf = load(fn) for field in FIELDS: slc = SlicePlot(pf, 'z', field) output.append(slc.save(fn.replace('.h5', '_%s.png' % field))[0]) return output
def density_profile_1D_evolution(files,outdir): import matplotlib.pyplot as plt import yt.mods as ytm from numpy import linspace,max from toolbox import select_scale print 'Producing a time-evolution plot of the radial density profile.' pc = 3.08568025e18 AU = 1.49598e13 Rsun = 6.955e10 scale = select_scale(6.17e18) #ts = ytm.TimeSeriesData.from_filenames(files) fig = plt.figure() ax = plt.subplot(1,1,1) ax.set_xlabel(r'Radius [{0}]'.format(r'$R_{\odot}$' if scale == 'Rsun' else scale)) ax.set_ylabel(r'Density [g/cm$^3$]') ax.grid(True) numfiles = len(files) greys = linspace(0.8,0,numfiles) colors = [[greys[i],greys[i],greys[i]] for i in range(numfiles)] for i, file in enumerate(files): print 'Processing file {0} of {1}: {2}'.format(i+1,len(files),file) pf = ytm.load(file) a = ytm.PlotCollection(pf,center=[0.5,0.5,0.5]/pf["unitary"]).add_profile_sphere(0.5, "unitary",["Radius","Density"], weight="CellMassMsun") radii, densities = a.data['Radius']*pf[scale], a.data['Density'] if i == numfiles - 1: ax.semilogy(radii,densities,color=colors[i],label="Radial average density profile") ax.legend() else: ax.semilogy(radii,densities,color=colors[i]) if i == 1: ax.set_xlim((0,0.5*pf[scale]/pf["unitary"])) plt.savefig(outdir+'/'+'temp_radial_density_profile.png') # for sto,pf in ts.piter(storage=storage): # print 'Processing file {0} of {1}: {2}'.format(i+1,len(files),pf.basename) # a = ytm.PlotCollection(pf,center=[0.5,0.5,0.5]/pf["unitary"]).add_profile_sphere(0.5, "unitary",["Radius","Density"], weight="CellMassMsun") # sto.result = a.data # i+=1 # # for i in storage: # scale = select_scale(6.17e18) # radii = storage[i]['Radius']*pf[scale] # densities = storage[i]['Density'] # if i == len(storage) - 1: # ax.semilogy(radii,densities,color=colors[i],label="Radial density profile") # else: # ax.semilogy(radii,densities,color=colors[i]) formats = ['png','eps','pdf'] for type in formats: plt.savefig(outdir+'/'+'radial_density_profile.'+type,format=type)
def visualize(files): output = [] for fn in files: pf = load(fn) for field in FIELDS: slc = SlicePlot(pf, 'z', field) if field == "prei": slc.set_cmap(field, 'gist_stern') output.append(slc.save(fn.replace('.h5', '_%s.png' % field))[0]) return output
def calculate_norm(fn): pf = load(fn) data = pf.h.all_data() diff = data['inid'].v - data['denn'].v norm = np.sqrt((diff ** 2).sum() / data['inid'].sum()) print("Calculating difference between numerical and analytical solution") print("L2 error norm = %12.6f, min and max error = %15.6f %15.6f" % (norm, diff.min(), diff.max())) del data, diff, pf
def calculate_norm(fn): pf = load(fn) data = pf.h.all_data() diff = data['inid'] - data['denn'] norm = np.sqrt((diff**2).sum() / data['inid'].sum()) print("Calculating difference between numerical and analytical solution") print("L2 error norm = %12.6f, min and max error = %15.6f %15.6f" % (norm, diff.min(), diff.max())) del data, diff, pf
def visualize(files): output = [] for fn in parallel_objects(files, njobs=-1): pf = load(fn) for field in FIELDS: slc = SlicePlot(pf, 'z', field) if field == 'curz': slc.set_cmap(field, 'bwr') maxabs = abs(slc._frb[field]).max() slc.set_log(field, False) slc.set_zlim(field, -maxabs, maxabs) output.append(slc.save(fn.replace('.h5', '_%s.png' % field))[0]) return output
def yt_data(path): """Use yt to load a gridded dataset This function will extract all particle and field datasets (excluding derived datasets) from a file. Currently, you cannot make images from this data. The resulting Field dataset refers to the highest-resolution subgrids Paramters --------- path : str Path to file to load. This is what get's passed to yt.mods.load() Returns ------- One or two Glue data objects """ ds = load(path) dd = ds.h.all_data() particles = [f for f in ds.h.field_list if ds.field_info[f].particle_type] fields = [f for f in ds.h.field_list if not ds.field_info[f].particle_type] lbl = data_label(path) result = [] if len(particles) > 0: d1 = Data(label=lbl + "_particle") shp = dd[particles[0]].shape for p in particles: d1.add_component(YtComponent(ds, p, shp), p) result.append(d1) if len(fields) > 0: d2 = Data(label=lbl + "_field") shp = dd[fields[0]].shape for f in fields: d2.add_component(YtComponent(ds, f, shp), f) result.append(d2) return result
def test_write_gdf(): """Main test suite for write_gdf""" tmpdir = tempfile.mkdtemp() tmpfile = os.path.join(tmpdir, 'test_gdf.h5') try: test_ds = fake_random_ds(64) write_to_gdf(test_ds, tmpfile, data_author=TEST_AUTHOR, data_comment=TEST_COMMENT) del test_ds assert isinstance(load(tmpfile), GDFDataset) h5f = h5.File(tmpfile, 'r') gdf = h5f['gridded_data_format'].attrs assert_equal(gdf['data_author'], TEST_AUTHOR) assert_equal(gdf['data_comment'], TEST_COMMENT) h5f.close() finally: shutil.rmtree(tmpdir)
def test_write_gdf(): """Main test suite for write_gdf""" tmpdir = tempfile.mkdtemp() tmpfile = os.path.join(tmpdir, "test_gdf.h5") try: test_ds = fake_random_ds(64) write_to_gdf( test_ds, tmpfile, data_author=TEST_AUTHOR, data_comment=TEST_COMMENT ) del test_ds assert isinstance(load(tmpfile), GDFDataset) h5f = h5.File(tmpfile, "r") gdf = h5f["gridded_data_format"].attrs assert_equal(gdf["data_author"], TEST_AUTHOR) assert_equal(gdf["data_comment"], TEST_COMMENT) h5f.close() finally: shutil.rmtree(tmpdir)
def setup(self): if yt.__version__.startswith('3'): self.ds = yt.load(self.dsname) self.ad = self.ds.all_data() self.field_name = "density" else: self.ds = load(self.dsname) self.ad = self.ds.h.all_data() self.field_name = "Density" # Warmup hdd self.ad[self.field_name] if yt.__version__.startswith('3'): mi, ma = self.ad.quantities['Extrema'](self.field_name) self.tf = yt.ColorTransferFunction((np.log10(mi)+1, np.log10(ma))) else: mi, ma = self.ad.quantities['Extrema'](self.field_name)[0] self.tf = ColorTransferFunction((np.log10(mi)+1, np.log10(ma))) self.tf.add_layers(5, w=0.02, colormap="spectral") self.c = [0.5, 0.5, 0.5] self.L = [0.5, 0.2, 0.7] self.W = 1.0 self.Npixels = 512
def setup(self): if yt.__version__.startswith('3'): self.ds = yt.load(self.dsname) self.ad = self.ds.all_data() self.field_name = "density" else: self.ds = load(self.dsname) self.ad = self.ds.h.all_data() self.field_name = "Density" # Warmup hdd self.ad[self.field_name] if yt.__version__.startswith('3'): mi, ma = self.ad.quantities['Extrema'](self.field_name) self.tf = yt.ColorTransferFunction( (np.log10(mi) + 1, np.log10(ma))) else: mi, ma = self.ad.quantities['Extrema'](self.field_name)[0] self.tf = ColorTransferFunction((np.log10(mi) + 1, np.log10(ma))) self.tf.add_layers(5, w=0.02, colormap="spectral") self.c = [0.5, 0.5, 0.5] self.L = [0.5, 0.2, 0.7] self.W = 1.0 self.Npixels = 512
except ImportError: mpi = False rank = 0 arguments = docopt.docopt(__doc__, version='Surface Analysis 13/11/13') def glob_files(tube_r, search): files = glob.glob(os.path.join(cfg.data_dir,tube_r,search)) files.sort() return files def path_join(filename): return os.path.join(os.path.join(cfg.data_dir,'%s/'%tube_r),filename) #Read Wave Flux HDF5 files in using yt timeseries = ytm.load(os.path.join(cfg.gdf_dir,"*{}_fwave_0*.gdf".format(cfg.str_exp_fac))) ds = timeseries[0] #============================================================================== # Define some crap #============================================================================== top_cut = -5 cube_slice = np.s_[:,:,:top_cut] x_slice = np.s_[:,:,:,:top_cut] cg = ds.h.grids[0] #nlines is the number of fieldlines used in the surface n_lines = 100 #the line is the fieldline to use as "the line" line_n = 25 #==============================================================================
""" import numpy as np import yt.mods as ytm from tvtk.api import tvtk from mayavi import mlab from tvtk.util.ctf import PiecewiseFunction from tvtk.util.ctf import ColorTransferFunction from astropy.io import fits # pysac imports # These files normally live in pysac import yt_fields import mayavi_plotting_functions as mpf ds = ytm.load('./data/Slog_p30-0_A20r2_B005_00400.gdf') cg = ds.h.grids[0] cube_slice = np.s_[:,:,:-5] r = tvtk.XMLPolyDataReader(file_name='./data/Fieldline_surface_Slog_p30-0_A20r2_r60__B005_00400.vtp') r.update() surf_poly = r.output fig = mlab.figure() # Create a bfield tvtk field, in mT bfield = mlab.pipeline.vector_field(cg['mag_field_x'][cube_slice] * 1e3, cg['mag_field_y'][cube_slice] * 1e3, cg['mag_field_z'][cube_slice] * 1e3, name="Magnetic Field",figure=fig) # Create a scalar field of the magntiude of the vector field
#!/usr/bin/env python from yt.mods import load import sys from matplotlib.pylab import imshow, savefig for fn in sys.argv[1:]: fields = ['dend'] pf = load(fn) c = 0.5 * (pf.domain_left_edge + pf.domain_right_edge) S = pf.domain_right_edge - pf.domain_left_edge n_d = pf.domain_dimensions slc = pf.h.slice(2, c[2], fields=fields) frb = slc.to_frb(S[0], (n_d[1], n_d[0]), height=S[1], center=c) imshow(frb['dend']) savefig('%s.png' % pf)
# -*- coding: utf-8 -*- """ Created on Tue Apr 8 15:12:13 2014 @author: stuart """ import os import glob import numpy as np import yt.mods as ytm from sacconfig import SACConfig cfg = SACConfig() gdf_files = glob.glob(os.path.join(cfg.gdf_dir, cfg.get_identifier() + "_0*.gdf")) gdf_files.sort() ts = ytm.load(gdf_files) np.save(os.path.join(cfg.data_dir, "Times_{}.npy".format(cfg.get_identifier())), [ds.current_time for ds in ts])
#!/usr/bin/python '''Stupid h5 content comparison script''' import sys from yt.mods import load THRESHOLD = 1e-9 if len(sys.argv) != 3: print("Wrong number of arguments!") sys.exit(-1) PF1 = load(sys.argv[1]) PF2 = load(sys.argv[2]) DATA1 = PF1.h.all_data() DATA2 = PF2.h.all_data() if not PF1.h.field_list == PF2.h.field_list: print("Fields in files differ!") sys.exit(-1) for field in PF1.h.field_list: if abs(DATA1[field] - DATA2[field]).max() >= THRESHOLD: print("Field %s differs" % field) sys.exit(-1)
import pysac.io.yt_fields import pysac.analysis.tube3D.tvtk_tube_functions as ttf import pysac.plot.tube3D.mayavi_plotting_functions as mpf #Import this repos config sys.path.append("../") from scripts.sacconfig import SACConfig cfg = SACConfig() def glob_files(tube_r, search): files = glob.glob(os.path.join(cfg.data_dir,tube_r,search)) files.sort() return files n = 400 timeseries = ytm.load(os.path.join(cfg.gdf_dir,"*5_0*.gdf")) ds = timeseries[n] cg = ds.h.grids[0] cube_slice = np.s_[:,:,:-5] #Define the size of the domain linesurf = glob_files('r60','Fieldline_surface*') surf_poly = ttf.read_step(linesurf[n]) mlab.options.offscreen = True fig = mlab.figure() #Create a bfield tvtk field, in mT bfield = mlab.pipeline.vector_field(cg['mag_field_x'][cube_slice] * 1e3, cg['mag_field_y'][cube_slice] * 1e3,
def octree_zoom_bbox_filter(fname,pf,bbox0,field_add): ds0 = pf ds0.index ad = ds0.all_data() print ('\n\n') print ('----------------------------') print ("[octree zoom_bbox_filter:] Calculating Center of Mass") gas_com_x = np.sum(ad["gasdensity"] * ad["gascoordinates"][:,0])/np.sum(ad["gasdensity"]) gas_com_y = np.sum(ad["gasdensity"] * ad["gascoordinates"][:,1])/np.sum(ad["gasdensity"]) gas_com_z = np.sum(ad["gasdensity"] * ad["gascoordinates"][:,2])/np.sum(ad["gasdensity"]) com = [gas_com_x,gas_com_y,gas_com_z] print ("[octree zoom_bbox_filter:] Center of Mass is at coordinates (kpc): ",com) center = [cfg.model.x_cent,cfg.model.y_cent,cfg.model.z_cent] print ('[octree zoom_bbox_filter:] using center: ',center) box_len = cfg.par.zoom_box_len #now begin the process of converting box_len to physical units in #case we're in a cosmological simulation. We'll first give it #units of proper kpc, then convert to code length (which for #gadget is kpcm/h) for the bbox calculation (dropping the units of #course). then when we re-convert to proper units, the box_len as #input in parameters_master will be in proper units. if a #simulation isn't cosmological, then the only difference here will #be a 1/h box_len = ds0.quan(box_len,'kpc') box_len = box_len.convert_to_units('code_length').value bbox_lim = box_len bbox1 = [[center[0]-bbox_lim,center[0]+bbox_lim], [center[1]-bbox_lim,center[1]+bbox_lim], [center[2]-bbox_lim,center[2]+bbox_lim]] print ('[octree zoom] new zoomed bbox (comoving/h) in code units= ',bbox1) try: #particle ds1 = load(fname,bounding_box=bbox1,n_ref = cfg.par.n_ref,over_refine_factor=cfg.par.oref) except: #amr ds1 = load(fname,n_ref = cfg.par.n_ref,over_refine_factor=cfg.par.oref) bbox1 = None ds1.periodicity = (False,False,False) #re-add the new powderday convention fields; this time we need to #make sure to do the ages calculation since it hasn't been done #before. ds1 = field_add(None,bounding_box = bbox1,ds=ds1,starages=True) return ds1
zc = map(np.float64,args.z) my_cmap = matplotlib.colors.LinearSegmentedColormap('my_colormap', ct.p05) h = 0.146484375 phi = 0.5 c1 = np.array([xc[0], phi, zc[0]]) c2 = np.array([xc[1], phi, zc[1]]) patch1 = [c1[0] - h, c1[0] + h, c1[2]-h, c1[2]+h] patch2 = [c2[0] - h, c2[0] + h, c2[2]-h, c2[2]+h] vmin = 0.0 vmax = 0.1 * 0.2 first_pass = True for fn in parallel_objects(args.files, njobs=-1): pf = load(fn) field = "dend" le = pf.domain_left_edge * pf['au'] re = pf.domain_right_edge * pf['au'] s = pf.h.slice(1, phi, fields=["dend"]) # = pf.h.proj(1, 'dend') #fac = pf['au'] / (2.0 * pf['dend'] * np.pi) fac = 1./pf['dend'] if first_pass: c1 /= pf.units['au'] c2 /= pf.units['au'] ext = [ le[0], re[0], le[2], re[2] ] fig = plt.figure(0, figsize=(14,10)) fig.clf() ax1 = plt.subplot2grid((4,8), (0,0), colspan=8)
#!/usr/bin/env python '''Stupid h5 content comparison script''' import sys from yt.mods import load THRESHOLD = 1e-9 if len(sys.argv) != 3: print("Wrong number of arguments!") sys.exit(-1) PF1 = load(sys.argv[1]) PF2 = load(sys.argv[2]) DATA1 = PF1.h.all_data() DATA2 = PF2.h.all_data() if not PF1.h.field_list == PF2.h.field_list: print("Fields in files differ!") sys.exit(-1) for field in PF1.h.field_list: if abs(DATA1[field] - DATA2[field]).max() >= THRESHOLD: print("Field %s differs" % field) sys.exit(-1)
def density_profile_1D_evolution(files, outdir): import matplotlib.pyplot as plt import yt.mods as ytm from numpy import linspace, max from toolbox import select_scale print 'Producing a time-evolution plot of the radial density profile.' pc = 3.08568025e18 AU = 1.49598e13 Rsun = 6.955e10 scale = select_scale(6.17e18) #ts = ytm.TimeSeriesData.from_filenames(files) fig = plt.figure() ax = plt.subplot(1, 1, 1) ax.set_xlabel( r'Radius [{0}]'.format(r'$R_{\odot}$' if scale == 'Rsun' else scale)) ax.set_ylabel(r'Density [g/cm$^3$]') ax.grid(True) numfiles = len(files) greys = linspace(0.8, 0, numfiles) colors = [[greys[i], greys[i], greys[i]] for i in range(numfiles)] for i, file in enumerate(files): print 'Processing file {0} of {1}: {2}'.format(i + 1, len(files), file) pf = ytm.load(file) a = ytm.PlotCollection(pf, center=[0.5, 0.5, 0.5] / pf["unitary"]).add_profile_sphere( 0.5, "unitary", ["Radius", "Density"], weight="CellMassMsun") radii, densities = a.data['Radius'] * pf[scale], a.data['Density'] if i == numfiles - 1: ax.semilogy(radii, densities, color=colors[i], label="Radial average density profile") ax.legend() else: ax.semilogy(radii, densities, color=colors[i]) if i == 1: ax.set_xlim((0, 0.5 * pf[scale] / pf["unitary"])) plt.savefig(outdir + '/' + 'temp_radial_density_profile.png') # for sto,pf in ts.piter(storage=storage): # print 'Processing file {0} of {1}: {2}'.format(i+1,len(files),pf.basename) # a = ytm.PlotCollection(pf,center=[0.5,0.5,0.5]/pf["unitary"]).add_profile_sphere(0.5, "unitary",["Radius","Density"], weight="CellMassMsun") # sto.result = a.data # i+=1 # # for i in storage: # scale = select_scale(6.17e18) # radii = storage[i]['Radius']*pf[scale] # densities = storage[i]['Density'] # if i == len(storage) - 1: # ax.semilogy(radii,densities,color=colors[i],label="Radial density profile") # else: # ax.semilogy(radii,densities,color=colors[i]) formats = ['png', 'eps', 'pdf'] for type in formats: plt.savefig(outdir + '/' + 'radial_density_profile.' + type, format=type)
#Add the '_' to exp_fac if cfg.exp_fac: exp_fac = '_' + cfg.str_exp_fac else: exp_fac='' #Make the pull paths data_dir = cfg.data_dir identifier = cfg.get_identifier() gdf_path = cfg.gdf_dir gdf_files = glob.glob(os.path.join(gdf_path, identifier+'_0*.gdf')) gdf_files.sort() timeseries = ytm.load(gdf_files) if rank == 0: print "Configuration:" print 'driver:', driver print 'post_amp:', post_amp print 'period:', period print 'exp_fac:', exp_fac print 'data_dir:', data_dir print 'gdf_dir:', gdf_path if rank == 0: #Prevents race condition where one processes creates the dir if not os.path.exists(data_dir): os.mkdir(data_dir) #Define a var to limit iterations, no limt = len(ts)