def file_import(name): f = DataFile(name) # Open file varlist = f.list() # Get list of all variables in file data = {} # Create empty dictionary for v in varlist: data[v] = f.read(v) f.close() return data
def run(self): # self.display.string += 'File' + '\n' f = DataFile() DataFile.open(f,self.path+self.filename) varlist = f.list() self.display.string += 'Variables in file :' + '\t' + 'No. of Dims.' + '\n' for i in varlist: self.display.string += str(i)+ '\t' + str(f.ndims(i))+ '\n' f.close()
def save2nc(file_name,**var): """save2nc(file_name, varName=var, ...) save variables to netCDF file. 'file_name' is the exported file name, and the suffix ".nc" will be append automatically. 'varName' is the name saved in exported files, 'var' is the variable saved. varName and var do NOT need the quotation mark. E.g. save2nc('file',t=t,x=psi,P0=P0_origin) will save variables t as t, psi as x, P0_origin as P0 into a netcdf file named 'file.nc'. """ if file_name[-3:] != '.nc': file_name += '.nc' f = DataFile(file_name, write=True, create=True) for v in var: try: varg = var[v] f.write(v,varg) except: print 'check the source code "save2nv.py" for more infomation' f.close()
def pol_slice(var3d, gridfile, n=1, zangle=0.0): """ data2d = pol_slice(data3d, 'gridfile', n=1, zangle=0.0) """ n = int(n) zangle = float(zangle) s = np.shape(var3d) if len(s) != 3: print("ERROR: pol_slice expects a 3D variable") return None nx, ny, nz = s dz = 2. * np.pi / float(n * (nz - 1)) try: # Open the grid file gf = DataFile(gridfile) # Check the grid size is correct if gf.read("nx") != nx: print("ERROR: Grid X size is different to the variable") return None if gf.read("ny") != ny: print("ERROR: Grid Y size is different to the variable") return None # Get the toroidal shift zShift = gf.read("qinty") if zShift != None: print("Using qinty as toroidal shift angle") else: zShift = gf.read("zShift") if zShift != None: print("Using zShift as toroidal shift angle") else: print("ERROR: Neither qinty nor zShift found") return None gf.close() except: print("ERROR: pol_slice couldn't read grid file") return None var2d = np.zeros([nx, ny]) ###################################### # Perform 2D slice zind = old_div((zangle - zShift), dz) z0f = np.floor(zind) z0 = z0f.astype(int) p = zind - z0f # Make z0 between 0 and (nz-2) z0 = ((z0 % (nz - 1)) + (nz - 1)) % (nz - 1) # Get z+ and z- zp = (z0 + 1) % (nz - 1) zm = (z0 - 1 + (nz - 1)) % (nz - 1) # There may be some more cunning way to do this indexing for x in np.arange(nx): for y in np.arange(ny): var2d[x,y] = 0.5*p[x,y]*(p[x,y]-1.0) * var3d[x,y,zm[x,y]] + \ (1.0 - p[x,y]*p[x,y]) * var3d[x,y,z0[x,y]] + \ 0.5*p[x,y]*(p[x,y]+1.0) * var3d[x,y,zp[x,y]] return var2d
def collect(varname, xind=None, yind=None, zind=None, tind=None, path=".",yguards=False, info=True,prefix="BOUT.dmp"): """Collect a variable from a set of BOUT++ outputs. data = collect(name) name Name of the variable (string) Optional arguments: xind = [min,max] Range of X indices to collect yind = [min,max] Range of Y indices to collect zind = [min,max] Range of Z indices to collect tind = [min,max] Range of T indices to collect path = "." Path to data files prefix = "BOUT.dmp" File prefix yguards = False Collect Y boundary guard cells? info = True Print information about collect? """ # Search for BOUT++ dump files in NetCDF format file_list = glob.glob(os.path.join(path, prefix+".*.nc")) if file_list == []: print "ERROR: No data files found" return None nfiles = len(file_list) #print "Number of files: " + str(nfiles) # Read data from the first file f = DataFile(file_list[0]) #print "File format : " + f.file_format try: dimens = f.dimensions(varname) ndims = len(dimens) except KeyError: print "ERROR: Variable '"+varname+"' not found" return None if ndims < 2: # Just read from file data = f.read(varname) f.close() return data if ndims > 4: print "ERROR: Too many dimensions" raise CollectError mxsub = f.read("MXSUB") mysub = f.read("MYSUB") mz = f.read("MZ") myg = f.read("MYG") t_array = f.read("t_array") nt = len(t_array) if info: print "mxsub = %d mysub = %d mz = %d\n" % (mxsub, mysub, mz) # Get the version of BOUT++ (should be > 0.6 for NetCDF anyway) try: v = f.read("BOUT_VERSION") # 2D decomposition nxpe = f.read("NXPE") mxg = f.read("MXG") nype = f.read("NYPE") npe = nxpe * nype if info: print "nxpe = %d, nype = %d, npe = %d\n" % (nxpe, nype, npe) if npe < nfiles: print "WARNING: More files than expected (" + str(npe) + ")" elif npe > nfiles: print "WARNING: Some files missing. Expected " + str(npe) nx = nxpe * mxsub + 2*mxg except KeyError: print "BOUT++ version : Pre-0.2" # Assume number of files is correct # No decomposition in X nx = mxsub mxg = 0 nxpe = 1 nype = nfiles if yguards: ny = mysub * nype + 2*myg else: ny = mysub * nype f.close(); # Check ranges def check_range(r, low, up, name="range"): r2 = r if r != None: try: n = len(r2) except: # No len attribute, so probably a single number r2 = [r2,r2] if (len(r2) < 1) or (len(r2) > 2): print "WARNING: "+name+" must be [min, max]" r2 = None else: if len(r2) == 1: r2 = [r2,r2] if r2[0] < low: r2[0] = low if r2[0] > up: r2[0] = up if r2[1] < 0: r2[1] = 0 if r2[1] > up: r2[1] = up if r2[0] > r2[1]: tmp = r2[0] r2[0] = r2[1] r2[1] = tmp else: r2 = [low, up] return r2 xind = check_range(xind, 0, nx-1, "xind") yind = check_range(yind, 0, ny-1, "yind") zind = check_range(zind, 0, mz-2, "zind") tind = check_range(tind, 0, nt-1, "tind") xsize = xind[1] - xind[0] + 1 ysize = yind[1] - yind[0] + 1 zsize = zind[1] - zind[0] + 1 tsize = tind[1] - tind[0] + 1 # Map between dimension names and output size sizes = {'x':xsize, 'y':ysize, 'z':zsize, 't':tsize} # Create a list with size of each dimension ddims = map(lambda d: sizes[d], dimens) # Create the data array data = np.zeros(ddims) for i in range(npe): # Get X and Y processor indices pe_yind = int(i / nxpe) pe_xind = i % nxpe # Get local ranges if yguards: ymin = yind[0] - pe_yind*mysub ymax = yind[1] - pe_yind*mysub else: ymin = yind[0] - pe_yind*mysub + myg ymax = yind[1] - pe_yind*mysub + myg xmin = xind[0] - pe_xind*mxsub xmax = xind[1] - pe_xind*mxsub inrange = True if yguards: # Check lower y boundary if pe_yind == 0: # Keeping inner boundary if ymax < 0: inrange = False if ymin < 0: ymin = 0 else: if ymax < myg: inrange = False if ymin < myg: ymin = myg # Upper y boundary if pe_yind == (nype - 1): # Keeping outer boundary if ymin >= (mysub + 2*myg): inrange = False if ymax > (mysub + 2*myg - 1): ymax = (mysub + 2*myg - 1) else: if ymin >= (mysub + myg): inrange = False if ymax >= (mysub + myg): ymax = (mysub+myg-1) else: if (ymin >= (mysub + myg)) or (ymax < myg): inrange = False # Y out of range if ymin < myg: ymin = myg if ymax >= mysub+myg: ymax = myg + mysub - 1 # Check lower x boundary if pe_xind == 0: # Keeping inner boundary if xmax < 0: inrange = False if xmin < 0: xmin = 0 else: if xmax < mxg: inrange = False if xmin < mxg: xmin = mxg # Upper x boundary if pe_xind == (nxpe - 1): # Keeping outer boundary if xmin >= (mxsub + 2*mxg): inrange = False if xmax > (mxsub + 2*mxg - 1): xmax = (mxsub + 2*mxg - 1) else: if xmin >= (mxsub + mxg): inrange = False if xmax >= (mxsub + mxg): xmax = (mxsub+mxg-1) # Number of local values nx_loc = xmax - xmin + 1 ny_loc = ymax - ymin + 1 # Calculate global indices xgmin = xmin + pe_xind * mxsub xgmax = xmax + pe_xind * mxsub if yguards: ygmin = ymin + pe_yind * mysub ygmax = ymax + pe_yind * mysub else: ygmin = ymin + pe_yind * mysub - myg ygmax = ymax + pe_yind * mysub - myg if not inrange: continue # Don't need this file filename = os.path.join(path, prefix+"." + str(i) + ".nc") if info: sys.stdout.write("\rReading from " + filename + ": [" + \ str(xmin) + "-" + str(xmax) + "][" + \ str(ymin) + "-" + str(ymax) + "] -> [" + \ str(xgmin) + "-" + str(xgmax) + "][" + \ str(ygmin) + "-" + str(ygmax) + "]") f = DataFile(filename) if ndims == 4: d = f.read(varname, ranges=[tind[0],tind[1]+1, xmin, xmax+1, ymin, ymax+1, zind[0],zind[1]+1]) data[:, (xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc), :] = d elif ndims == 3: # Could be xyz or txy if dimens[2] == 'z': # xyz d = f.read(varname, ranges=[xmin, xmax+1, ymin, ymax+1, zind[0],zind[1]+1]) data[(xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc), :] = d else: # txy d = f.read(varname, ranges=[tind[0],tind[1]+1, xmin, xmax+1, ymin, ymax+1]) data[:, (xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc)] = d elif ndims == 2: # xy d = f.read(varname, ranges=[xmin, xmax+1, ymin, ymax+1]) data[(xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc)] = d f.close() # Finished looping over all files if info: sys.stdout.write("\n") return data
of.write("Bxy", Bxy) of.write("hthe", hthe) # Topology for general configurations of.write("yup_xsplit", yup_xsplit) of.write("ydown_xsplit", ydown_xsplit) of.write("yup_xin", yup_xin) of.write("ydown_xin", ydown_xin) of.write("ydown_xout", ydown_xout) of.write("nrad", nrad) of.write("npol", npol) # plasma profiles of.write("pressure", pressure) of.write("Jpar0", Jpar0) of.write("Ni0", Ni0) of.write("Te0", Te0) of.write("Ti0", Ti0) of.write("Ni_x", Ni) of.write("Te_x", Ti) of.write("Ti_x", Ti) of.write("bmag", Bt0) of.write("rmag", Rmaj) # Curvature of.write("logB", logB) of.close() print("Done")
from past.utils import old_div from boututils import DataFile # Wrapper around NetCDF4 libraries from math import pow from sys import argv length = 80. # Length of the domain in m nx = 5 # Minimum is 5: 2 boundary, one evolved if len(argv)>1: ny = int(argv[1]) # Minimum 5. Should be divisible by number of processors (so powers of 2 nice) else: ny = 256 # Minimum 5. Should be divisible by number of processors (so powers of 2 nice) #dy = [[1.]*ny]*nx # distance between points in y, in m/g22/lengthunit g22 = [[pow(old_div(float(ny-1),length),2)]*ny]*nx g_22 = [[pow(old_div(length,float(ny-1)),2)]*ny]*nx ixseps1 = -1 ixseps2 = 0 f = DataFile() f.open("conduct_grid.nc", create=True) f.write("nx", nx) f.write("ny", ny) #f.write("dy", dy) f.write("g22",g22) f.write("g_22", g_22) f.write("ixseps1", ixseps1) f.write("ixseps2", ixseps2) f.close()
def redistribute(npes, path="data", nxpe=None, output=".", informat=None, outformat=None, mxg=2, myg=2): """Resize restart files across NPES processors. Does not check if new processor arrangement is compatible with the branch cuts. In this respect restart.split is safer. However, BOUT++ checks the topology during initialisation anyway so this is not too serious. Parameters ---------- npes : int number of processors for the new restart files path : string, optional location of old restart files nxpe : int, optional number of processors to use in the x-direction (determines split: npes = nxpe * nype). Default is None which uses the same algorithm as BoutMesh (but without topology information) to determine a suitable value for nxpe. output : string, optional location to save new restart files informat : string, optional specify file format of old restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default uses the format of the first 'BOUT.restart.*' file listed by glob.glob. outformat : string, optional specify file format of new restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default is to use the same as informat. Returns ------- True on success """ if npes <= 0: print("ERROR: Negative or zero number of processors") return False if path == output: print("ERROR: Can't overwrite restart files") return False if informat == None: file_list = glob.glob(os.path.join(path, "BOUT.restart.*")) else: file_list = glob.glob(os.path.join(path, "BOUT.restart.*."+informat)) nfiles = len(file_list) # Read old processor layout f = DataFile(file_list[0]) # Get list of variables var_list = f.list() if len(var_list) == 0: print("ERROR: No data found") return False old_npes = f.read('NPES') old_nxpe = f.read('NXPE') old_nype = int(old_npes/old_nxpe) if nfiles != old_npes: print("WARNING: Number of restart files inconsistent with NPES") print("Setting nfiles = " + str(old_npes)) nfiles = old_npes if nfiles == 0: print("ERROR: No restart files found") return False informat = file_list[0].split(".")[-1] if outformat == None: outformat = informat old_mxsub = 0 old_mysub = 0 mz = 0 for v in var_list: if f.ndims(v) == 3: s = f.size(v) old_mxsub = s[0] - 2*mxg if old_mxsub < 0: if s[0] == 1: old_mxsub = 1 mxg = 0 elif s[0] == 3: old_mxsub = 1 mxg = 1 else: print("Number of x points is wrong?") return False old_mysub = s[1] - 2*myg if old_mysub < 0: if s[1] == 1: old_mysub = 1 myg = 0 elif s[1] == 3: old_mysub = 1 myg = 1 else: print("Number of y points is wrong?") return False mz = s[2] break # Calculate total size of the grid nx = old_mxsub * old_nxpe ny = old_mysub * old_nype print("Grid sizes: ", nx, ny, mz) if nxpe == None: # Copy algorithm from BoutMesh for selecting nxpe ideal = sqrt(float(nx) * float(npes) / float(ny)) # Results in square domain for i in range(1,npes+1): if npes%i == 0 and nx%i == 0 and int(nx/i) >= mxg and ny%(npes/i) == 0: # Found an acceptable value # Warning: does not check branch cuts! if nxpe==None or abs(ideal - i) < abs(ideal - nxpe): nxpe = i # Keep value nearest to the ideal if nxpe == None: print("ERROR: could not find a valid value for nxpe") return False nype = int(npes/nxpe) outfile_list = [] for i in range(npes): outpath = os.path.join(output, "BOUT.restart."+str(i)+"."+outformat) outfile_list.append(DataFile(outpath, write=True, create=True)) infile_list = [] for i in range(old_npes): inpath = os.path.join(path, "BOUT.restart."+str(i)+"."+outformat) infile_list.append(DataFile(inpath)) old_mxsub = int(nx/old_nxpe) old_mysub = int(ny/old_nype) mxsub = int(nx/nxpe) mysub = int(ny/nype) for v in var_list: ndims = f.ndims(v) #collect data if ndims == 0: #scalar data = f.read(v) elif ndims == 2: data = numpy.zeros( (nx+2*mxg,ny+2*nyg) ) for i in range(old_npes): ix = i%old_nxpe iy = int(i/old_nxpe) ixstart = mxg if ix == 0: ixstart = 0 ixend = -mxg if ix == old_nxpe-1: ixend = 0 iystart = myg if iy == 0: iystart = 0 iyend = -myg if iy == old_nype-1: iyend = 0 data[ix*old_mxsub+ixstart:(ix+1)*old_mxsub+2*mxg+ixend, iy*old_mysub+iystart:(iy+1)*old_mysub+2*myg+iyend] = infile_list[i].read(v)[ixstart:old_mxsub+2*mxg+ixend, iystart:old_mysub+2*myg+iyend] elif ndims == 3: data = numpy.zeros( (nx+2*mxg,ny+2*myg,mz) ) for i in range(old_npes): ix = i%old_nxpe iy = int(i/old_nxpe) ixstart = mxg if ix == 0: ixstart = 0 ixend = -mxg if ix == old_nxpe-1: ixend = 0 iystart = myg if iy == 0: iystart = 0 iyend = -myg if iy == old_nype-1: iyend = 0 data[ix*old_mxsub+ixstart:(ix+1)*old_mxsub+2*mxg+ixend, iy*old_mysub+iystart:(iy+1)*old_mysub+2*myg+iyend, :] = infile_list[i].read(v)[ixstart:old_mxsub+2*mxg+ixend, iystart:old_mysub+2*myg+iyend, :] else: print("ERROR: variable found with unexpected number of dimensions,",ndims,v) return False # write data for i in range(npes): ix = i%nxpe iy = int(i/nxpe) outfile = outfile_list[i] if v == "NPES": outfile.write(v,npes) elif v == "NXPE": outfile.write(v,nxpe) elif ndims == 0: # scalar outfile.write(v,data) elif ndims == 2: # Field2D outfile.write(v,data[ix*mxsub:(ix+1)*mxsub+2*mxg, iy*mysub:(iy+1)*mysub+2*myg]) elif ndims == 3: # Field3D outfile.write(v,data[ix*mxsub:(ix+1)*mxsub+2*mxg, iy*mysub:(iy+1)*mysub+2*myg, :]) else: print("ERROR: variable found with unexpected number of dimensions,",f.ndims(v)) f.close() for infile in infile_list: infile.close() for outfile in outfile_list: outfile.close() return True
def collect(varname, xind=None, yind=None, zind=None, tind=None, path=".", yguards=False, info=True, prefix="BOUT.dmp"): """Collect a variable from a set of BOUT++ outputs. data = collect(name) name Name of the variable (string) Optional arguments: xind = [min,max] Range of X indices to collect yind = [min,max] Range of Y indices to collect zind = [min,max] Range of Z indices to collect tind = [min,max] Range of T indices to collect path = "." Path to data files prefix = "BOUT.dmp" File prefix yguards = False Collect Y boundary guard cells? info = True Print information about collect? """ # Search for BOUT++ dump files in NetCDF format file_list = glob.glob(os.path.join(path, prefix + ".nc")) if file_list != []: print "Single (parallel) data file" f = DataFile(file_list[0]) # Open the file data = f.read(varname) return data file_list = glob.glob(os.path.join(path, prefix + "*.nc")) file_list.sort() if file_list == []: print "ERROR: No data files found" return None nfiles = len(file_list) #print "Number of files: " + str(nfiles) # Read data from the first file f = DataFile(file_list[0]) #print "File format : " + f.file_format try: dimens = f.dimensions(varname) ndims = len(dimens) except KeyError: print "ERROR: Variable '" + varname + "' not found" return None if ndims < 2: # Just read from file data = f.read(varname) f.close() return data if ndims > 4: print "ERROR: Too many dimensions" raise CollectError mxsub = f.read("MXSUB") mysub = f.read("MYSUB") mz = f.read("MZ") myg = f.read("MYG") t_array = f.read("t_array") nt = len(t_array) if info: print "mxsub = %d mysub = %d mz = %d\n" % (mxsub, mysub, mz) # Get the version of BOUT++ (should be > 0.6 for NetCDF anyway) try: v = f.read("BOUT_VERSION") # 2D decomposition nxpe = f.read("NXPE") mxg = f.read("MXG") nype = f.read("NYPE") npe = nxpe * nype if info: print "nxpe = %d, nype = %d, npe = %d\n" % (nxpe, nype, npe) if npe < nfiles: print "WARNING: More files than expected (" + str(npe) + ")" elif npe > nfiles: print "WARNING: Some files missing. Expected " + str(npe) nx = nxpe * mxsub + 2 * mxg except KeyError: print "BOUT++ version : Pre-0.2" # Assume number of files is correct # No decomposition in X nx = mxsub mxg = 0 nxpe = 1 nype = nfiles if yguards: ny = mysub * nype + 2 * myg else: ny = mysub * nype f.close() # Check ranges def check_range(r, low, up, name="range"): r2 = r if r != None: try: n = len(r2) except: # No len attribute, so probably a single number r2 = [r2, r2] if (len(r2) < 1) or (len(r2) > 2): print "WARNING: " + name + " must be [min, max]" r2 = None else: if len(r2) == 1: r2 = [r2, r2] if r2[0] < low: r2[0] = low if r2[0] > up: r2[0] = up if r2[1] < 0: r2[1] = 0 if r2[1] > up: r2[1] = up if r2[0] > r2[1]: tmp = r2[0] r2[0] = r2[1] r2[1] = tmp else: r2 = [low, up] return r2 xind = check_range(xind, 0, nx - 1, "xind") yind = check_range(yind, 0, ny - 1, "yind") zind = check_range(zind, 0, mz - 2, "zind") tind = check_range(tind, 0, nt - 1, "tind") xsize = xind[1] - xind[0] + 1 ysize = yind[1] - yind[0] + 1 zsize = zind[1] - zind[0] + 1 tsize = tind[1] - tind[0] + 1 # Map between dimension names and output size sizes = {'x': xsize, 'y': ysize, 'z': zsize, 't': tsize} # Create a list with size of each dimension ddims = map(lambda d: sizes[d], dimens) # Create the data array data = np.zeros(ddims) for i in range(npe): # Get X and Y processor indices pe_yind = int(i / nxpe) pe_xind = i % nxpe # Get local ranges if yguards: ymin = yind[0] - pe_yind * mysub ymax = yind[1] - pe_yind * mysub else: ymin = yind[0] - pe_yind * mysub + myg ymax = yind[1] - pe_yind * mysub + myg xmin = xind[0] - pe_xind * mxsub xmax = xind[1] - pe_xind * mxsub inrange = True if yguards: # Check lower y boundary if pe_yind == 0: # Keeping inner boundary if ymax < 0: inrange = False if ymin < 0: ymin = 0 else: if ymax < myg: inrange = False if ymin < myg: ymin = myg # Upper y boundary if pe_yind == (nype - 1): # Keeping outer boundary if ymin >= (mysub + 2 * myg): inrange = False if ymax > (mysub + 2 * myg - 1): ymax = (mysub + 2 * myg - 1) else: if ymin >= (mysub + myg): inrange = False if ymax >= (mysub + myg): ymax = (mysub + myg - 1) else: if (ymin >= (mysub + myg)) or (ymax < myg): inrange = False # Y out of range if ymin < myg: ymin = myg if ymax >= mysub + myg: ymax = myg + mysub - 1 # Check lower x boundary if pe_xind == 0: # Keeping inner boundary if xmax < 0: inrange = False if xmin < 0: xmin = 0 else: if xmax < mxg: inrange = False if xmin < mxg: xmin = mxg # Upper x boundary if pe_xind == (nxpe - 1): # Keeping outer boundary if xmin >= (mxsub + 2 * mxg): inrange = False if xmax > (mxsub + 2 * mxg - 1): xmax = (mxsub + 2 * mxg - 1) else: if xmin >= (mxsub + mxg): inrange = False if xmax >= (mxsub + mxg): xmax = (mxsub + mxg - 1) # Number of local values nx_loc = xmax - xmin + 1 ny_loc = ymax - ymin + 1 # Calculate global indices xgmin = xmin + pe_xind * mxsub xgmax = xmax + pe_xind * mxsub if yguards: ygmin = ymin + pe_yind * mysub ygmax = ymax + pe_yind * mysub else: ygmin = ymin + pe_yind * mysub - myg ygmax = ymax + pe_yind * mysub - myg if not inrange: continue # Don't need this file filename = os.path.join(path, prefix + "." + str(i) + ".nc") if info: sys.stdout.write("\rReading from " + filename + ": [" + \ str(xmin) + "-" + str(xmax) + "][" + \ str(ymin) + "-" + str(ymax) + "] -> [" + \ str(xgmin) + "-" + str(xgmax) + "][" + \ str(ygmin) + "-" + str(ygmax) + "]") f = DataFile(filename) if ndims == 4: d = f.read(varname, ranges=[ tind[0], tind[1] + 1, xmin, xmax + 1, ymin, ymax + 1, zind[0], zind[1] + 1 ]) data[:, (xgmin - xind[0]):(xgmin - xind[0] + nx_loc), (ygmin - yind[0]):(ygmin - yind[0] + ny_loc), :] = d elif ndims == 3: # Could be xyz or txy if dimens[2] == 'z': # xyz d = f.read(varname, ranges=[ xmin, xmax + 1, ymin, ymax + 1, zind[0], zind[1] + 1 ]) data[(xgmin - xind[0]):(xgmin - xind[0] + nx_loc), (ygmin - yind[0]):(ygmin - yind[0] + ny_loc), :] = d else: # txy d = f.read(varname, ranges=[ tind[0], tind[1] + 1, xmin, xmax + 1, ymin, ymax + 1 ]) data[:, (xgmin - xind[0]):(xgmin - xind[0] + nx_loc), (ygmin - yind[0]):(ygmin - yind[0] + ny_loc)] = d elif ndims == 2: # xy d = f.read(varname, ranges=[xmin, xmax + 1, ymin, ymax + 1]) data[(xgmin - xind[0]):(xgmin - xind[0] + nx_loc), (ygmin - yind[0]):(ygmin - yind[0] + ny_loc)] = d f.close() # Finished looping over all files if info: sys.stdout.write("\n") return data
def pol_slice(var3d, gridfile, n=1, zangle=0.0): """ data2d = pol_slice(data3d, 'gridfile', n=1, zangle=0.0) """ n = int(n) zangle = float(zangle) s = np.shape(var3d) if len(s) != 3: print("ERROR: pol_slice expects a 3D variable") return None nx, ny, nz = s dz = 2.*np.pi / float(n * (nz-1)) try: # Open the grid file gf = DataFile(gridfile) # Check the grid size is correct if gf.read("nx") != nx: print("ERROR: Grid X size is different to the variable") return None if gf.read("ny") != ny: print("ERROR: Grid Y size is different to the variable") return None # Get the toroidal shift zShift = gf.read("qinty") if zShift != None: print("Using qinty as toroidal shift angle") else: zShift = gf.read("zShift") if zShift != None: print("Using zShift as toroidal shift angle") else: print("ERROR: Neither qinty nor zShift found") return None gf.close() except: print("ERROR: pol_slice couldn't read grid file") return None var2d = np.zeros([nx, ny]) ###################################### # Perform 2D slice zind = old_div((zangle - zShift), dz) z0f = np.floor(zind) z0 = z0f.astype(int) p = zind - z0f # Make z0 between 0 and (nz-2) z0 = ((z0 % (nz-1)) + (nz-1)) % (nz-1) # Get z+ and z- zp = (z0 + 1) % (nz-1) zm = (z0 - 1 + (nz-1)) % (nz-1) # There may be some more cunning way to do this indexing for x in np.arange(nx): for y in np.arange(ny): var2d[x,y] = 0.5*p[x,y]*(p[x,y]-1.0) * var3d[x,y,zm[x,y]] + \ (1.0 - p[x,y]*p[x,y]) * var3d[x,y,z0[x,y]] + \ 0.5*p[x,y]*(p[x,y]+1.0) * var3d[x,y,zp[x,y]] return var2d
def resizeY(newy, path="data", output=".", informat="nc", outformat=None,myg=2): """ Resize all the restart files in Y """ if outformat == None: outformat = informat file_list = glob.glob(os.path.join(path, "BOUT.restart.*."+informat)) nfiles = len(file_list) if nfiles == 0: print("ERROR: No restart files found") return False for i in range(nfiles): # Open each data file infname = os.path.join(path, "BOUT.restart."+str(i)+"."+informat) outfname = os.path.join(output, "BOUT.restart."+str(i)+"."+outformat) print("Processing %s -> %s", infname, outfname) infile = DataFile(infname) outfile = DataFile(outfname, create=True) # Copy basic information for var in ["hist_hi", "NPES", "NXPE", "tt"]: data = infile.read(var) try: # Convert to scalar if necessary data = data[0] except: pass outfile.write(var, data) # Get a list of variables varnames = infile.list() for var in varnames: if infile.ndims(var) == 3: # Could be an evolving variable [x,y,z] print(" -> " + var) # Read variable from input indata = infile.read(var) nx,ny,nz = indata.shape # y coordinate in input and output data iny = (arange(ny) - myg + 0.5) / (ny - 2*myg) outy = (arange(newy) - myg + 0.5) / (newy - 2*myg) outdata = zeros([nx, newy, nz]) for x in range(nx): for z in range(nz): f = interp1d(iny, indata[x,:,z], bounds_error=False, fill_value=0.0) outdata[x,:,z] = f(outy) outfile.write(var, outdata) infile.close() outfile.close()
def slice(infile, outfile, region = None, xind=None, yind=None): """ xind, yind - index ranges. Range includes first point, but not last point """ # Open input and output files indf = DataFile(infile) outdf = DataFile(outfile, create=True) nx = indf["nx"][0] ny = indf["ny"][0] if region: # Select a region of the mesh xind = [0, nx] if region == 0: # Lower inner leg yind = [0, indf["jyseps1_1"][0]+1] elif region == 1: # Inner core yind = [indf["jyseps1_1"][0]+1, indf["jyseps2_1"][0]+1] elif region == 2: # Upper inner leg yind = [indf["jyseps2_1"][0]+1, indf["ny_inner"][0]] elif region == 3: # Upper outer leg yind = [indf["ny_inner"][0], indf["jyseps1_2"][0]+1] elif region == 4: # Outer core yind = [indf["jyseps1_2"][0]+1, indf["jyseps2_2"][0]+1] else: # Lower outer leg yind = [indf["jyseps2_2"][0]+1, ny] else: # Use indices if not xind: xind = [0, nx] if not yind: yind = [0, ny] print("Indices: [%d:%d, %d:%d]" % (xind[0], xind[1], yind[0], yind[1])) # List of variables requiring special handling special = ["nx", "ny", "ny_inner", "ixseps1", "ixseps2", "jyseps1_1", "jyseps1_2", "jyseps2_1", "jyseps2_2", "ShiftAngle"] outdf["nx"] = xind[1] - xind[0] outdf["ny"] = yind[1] - yind[0] outdf["ny_inner"] = indf["ny_inner"][0] - yind[0] outdf["ixseps1"] = indf["ixseps1"][0] outdf["ixseps2"] = indf["ixseps2"][0] outdf["jyseps1_1"] = indf["jyseps1_1"][0] - yind[0] outdf["jyseps2_1"] = indf["jyseps2_1"][0] - yind[0] outdf["jyseps1_2"] = indf["jyseps1_2"][0] - yind[0] outdf["jyseps2_2"] = indf["jyseps2_2"][0] - yind[0] outdf["ShiftAngle"] = indf["ShiftAngle"][xind[0]:xind[1]] # Loop over all variables for v in list(indf.keys()): if v in special: continue # Skip these variables ndims = indf.ndims(v) if ndims == 0: # Copy scalars print("Copying variable: " + v) outdf[v] = indf[v][0] elif ndims == 2: # Assume [x,y] print("Slicing variable: " + v); outdf[v] = indf[v][xind[0]:xind[1], yind[0]:yind[1]] else: # Skip print("Skipping variable: " + v) indf.close() outdf.close()
def generate( nx, ny, R=2.0, r=0.2, # Major & minor radius dr=0.05, # Radial width of domain Bt=1.0, # Toroidal magnetic field q=5.0, # Safety factor mxg=2, file="circle.nc"): # q = rBt / RBp Bp = r * Bt / (R * q) # Minor radius as function of x. Choose so boundary # is half-way between grid points h = dr / (nx - 2. * mxg) # Grid spacing in r rminor = linspace(r - 0.5 * dr - (mxg - 0.5) * h, r + 0.5 * dr + (mxg - 0.5) * h, nx) # mesh spacing in x and y dx = ndarray([nx, ny]) dx[:, :] = r * Bt * h # NOTE: dx is toroidal flux dy = ndarray([nx, ny]) dy[:, :] = 2. * pi / ny # LogB = log(1/(1+r/R cos(theta))) =(approx) -(r/R)*cos(theta) logB = zeros([nx, ny, 3]) # (constant, n=1 real, n=1 imag) # At y = 0, Rmaj = R + r*cos(theta) logB[:, 0, 1] = -(rminor / R) # Moving in y, phase shift by (toroidal angle) / q for y in range(1, ny): dtheta = y * 2. * pi / ny / q # Change in poloidal angle logB[:, y, 1] = -(rminor / R) * cos(dtheta) logB[:, y, 2] = -(rminor / R) * sin(dtheta) # Shift angle from one end of y to the other ShiftAngle = ndarray([nx]) ShiftAngle[:] = 2. * pi / q Rxy = ndarray([nx, ny]) Rxy[:, :] = r # NOTE : opposite to standard BOUT convention Btxy = ndarray([nx, ny]) Btxy[:, :] = Bp Bpxy = ndarray([nx, ny]) Bpxy[:, :] = Bt Bxy = ndarray([nx, ny]) Bxy[:, :] = sqrt(Bt**2 + Bp**2) hthe = ndarray([nx, ny]) hthe[:, :] = R print("Writing to file '" + file + "'") f = DataFile() f.open(file, create=True) # Mesh size f.write("nx", nx) f.write("ny", ny) # Mesh spacing f.write("dx", dx) f.write("dy", dy) # Metric components f.write("Rxy", Rxy) f.write("Btxy", Btxy) f.write("Bpxy", Bpxy) f.write("Bxy", Bxy) f.write("hthe", hthe) # Shift f.write("ShiftAngle", ShiftAngle) # Curvature f.write("logB", logB) # Input parameters f.write("R", R) f.write("r", r) f.write("dr", dr) f.write("Bt", Bt) f.write("q", q) f.write("mxg", mxg) f.close()
of.write("Bxy", Bxy) of.write("hthe", hthe) # Topology for general configurations of.write("yup_xsplit", yup_xsplit) of.write("ydown_xsplit", ydown_xsplit) of.write("yup_xin", yup_xin) of.write("ydown_xin", ydown_xin) of.write("ydown_xout", ydown_xout) of.write("nrad", nrad) of.write("npol", npol) # plasma profiles of.write("pressure", pressure) of.write("Jpar0", Jpar0) of.write("Ni0", Ni0) of.write("Te0", Te0) of.write("Ti0", Ti0) of.write("Ni_x", Ni) of.write("Te_x", Ti) of.write("Ti_x", Ti) of.write("bmag", Bt0) of.write("rmag", Rmaj) # Curvature of.write("logB", logB) of.close() print "Done"
def create(averagelast=1, final=-1, path="data", output="./", informat="nc", outformat=None): """ Create restart files from data (dmp) files. Inputs ====== averagelast Number of time points to average over. Default is 1 i.e. just take last time-point final The last time point to use. Default is last (-1) path Path to the input data files output Path where the output restart files should go informat Format of the input data files outformat Format of the output restart files """ if outformat == None: outformat = informat file_list = glob.glob(os.path.join(path, "BOUT.dmp.*."+informat)) nfiles = len(file_list) print(("Number of data files: ", nfiles)) for i in range(nfiles): # Open each data file infname = os.path.join(path, "BOUT.dmp."+str(i)+"."+informat) outfname = os.path.join(output, "BOUT.restart."+str(i)+"."+outformat) print((infname, " -> ", outfname)) infile = DataFile(infname) outfile = DataFile(outfname, create=True) # Get the data always needed in restart files hist_hi = infile.read("iteration") print(("hist_hi = ", hist_hi)) outfile.write("hist_hi", hist_hi) t_array = infile.read("t_array") tt = t_array[final] print(("tt = ", tt)) outfile.write("tt", tt) NXPE = infile.read("NXPE") NYPE = infile.read("NYPE") NPES = NXPE * NYPE print(("NPES = ", NPES, " NXPE = ", NXPE)) outfile.write("NPES", NPES) outfile.write("NXPE", NXPE) # Get a list of variables varnames = infile.list() for var in varnames: if infile.ndims(var) == 4: # Could be an evolving variable print((" -> ", var)) data = infile.read(var) if averagelast == 1: slice = data[final,:,:,:] else: slice = mean(data[(final - averagelast):final,:,:,:], axis=0) print(slice.shape) outfile.write(var, slice) infile.close() outfile.close()
def create(averagelast=1, final=-1, path="data", output="./", informat="nc", outformat=None): """ Create restart files from data (dmp) files. Inputs ====== averagelast Number of time points to average over. Default is 1 i.e. just take last time-point final The last time point to use. Default is last (-1) path Path to the input data files output Path where the output restart files should go informat Format of the input data files outformat Format of the output restart files """ if outformat == None: outformat = informat file_list = glob.glob(os.path.join(path, "BOUT.dmp.*." + informat)) nfiles = len(file_list) print "Number of data files: ", nfiles for i in range(nfiles): # Open each data file infname = os.path.join(path, "BOUT.dmp." + str(i) + "." + informat) outfname = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat) print infname, " -> ", outfname infile = DataFile(infname) outfile = DataFile(outfname, create=True) # Get the data always needed in restart files hist_hi = infile.read("iteration") print "hist_hi = ", hist_hi outfile.write("hist_hi", hist_hi) t_array = infile.read("t_array") tt = t_array[final] print "tt = ", tt outfile.write("tt", tt) NXPE = infile.read("NXPE") NYPE = infile.read("NYPE") NPES = NXPE * NYPE print "NPES = ", NPES, " NXPE = ", NXPE outfile.write("NPES", NPES) outfile.write("NXPE", NXPE) # Get a list of variables varnames = infile.list() for var in varnames: if infile.ndims(var) == 4: # Could be an evolving variable print " -> ", var data = infile.read(var) if averagelast == 1: slice = data[final, :, :, :] else: slice = mean(data[(final - averagelast):final, :, :, :], axis=0) print slice.shape outfile.write(var, slice) infile.close() outfile.close()
def generate(nx, ny, R = 2.0, r=0.2, # Major & minor radius dr=0.05, # Radial width of domain Bt=1.0, # Toroidal magnetic field q=5.0, # Safety factor mxg=2, file="circle.nc" ): # q = rBt / RBp Bp = r*Bt / (R*q) # Minor radius as function of x. Choose so boundary # is half-way between grid points h = dr / (nx - 2.*mxg) # Grid spacing in r rminor = linspace(r - 0.5*dr - (mxg-0.5)*h, r + 0.5*dr + (mxg-0.5)*h, nx) # mesh spacing in x and y dx = ndarray([nx,ny]) dx[:,:] = r*Bt*h # NOTE: dx is toroidal flux dy = ndarray([nx,ny]) dy[:,:] = 2.*pi / ny # LogB = log(1/(1+r/R cos(theta))) =(approx) -(r/R)*cos(theta) logB = zeros([nx, ny, 3]) # (constant, n=1 real, n=1 imag) # At y = 0, Rmaj = R + r*cos(theta) logB[:,0,1] = -(rminor/R) # Moving in y, phase shift by (toroidal angle) / q for y in range(1,ny): dtheta = y * 2.*pi / ny / q # Change in poloidal angle logB[:,y,1] = -(rminor/R)*cos(dtheta) logB[:,y,2] = -(rminor/R)*sin(dtheta) # Shift angle from one end of y to the other ShiftAngle = ndarray([nx]) ShiftAngle[:] = 2.*pi / q Rxy = ndarray([nx,ny]) Rxy[:,:] = r # NOTE : opposite to standard BOUT convention Btxy = ndarray([nx,ny]) Btxy[:,:] = Bp Bpxy = ndarray([nx,ny]) Bpxy[:,:] = Bt Bxy = ndarray([nx,ny]) Bxy[:,:] = sqrt(Bt**2 + Bp**2) hthe = ndarray([nx,ny]) hthe[:,:] = R print("Writing to file '"+file+"'") f = DataFile() f.open(file, create=True) # Mesh size f.write("nx", nx) f.write("ny", ny) # Mesh spacing f.write("dx", dx) f.write("dy", dy) # Metric components f.write("Rxy", Rxy) f.write("Btxy", Btxy) f.write("Bpxy", Bpxy) f.write("Bxy", Bxy) f.write("hthe", hthe) # Shift f.write("ShiftAngle", ShiftAngle); # Curvature f.write("logB", logB) # Input parameters f.write("R", R) f.write("r", r) f.write("dr", dr) f.write("Bt", Bt) f.write("q", q) f.write("mxg", mxg) f.close()
def split(nxpe, nype, path="data", output="./", informat="nc", outformat=None): """Split restart files across NXPE x NYPE processors. Returns True on success """ if outformat == None: outformat = informat mxg = 2 myg = 2 npes = nxpe * nype if npes <= 0: print "ERROR: Negative or zero number of processors" return False if path == output: print "ERROR: Can't overwrite restart files" return False file_list = glob.glob(os.path.join(path, "BOUT.restart.*."+informat)) nfiles = len(file_list) if nfiles == 0: print "ERROR: No restart files found" return False # Read old processor layout f = DataFile(os.path.join(path, file_list[0])) # Get list of variables var_list = f.list() if len(var_list) == 0: print "ERROR: No data found" return False old_npes = f.read('NPES') old_nxpe = f.read('NXPE') if nfiles != old_npes: print "WARNING: Number of restart files inconsistent with NPES" print "Setting nfiles = " + str(old_npes) nfiles = old_npes if old_npes % old_nxpe != 0: print "ERROR: Old NPES is not a multiple of old NXPE" return False old_nype = old_npes / old_nxpe if nype % old_nype != 0: print "SORRY: New nype must be a multiple of old nype" return False if nxpe % old_nxpe != 0: print "SORRY: New nxpe must be a multiple of old nxpe" return False # Get dimension sizes old_mxsub = 0 old_mysub = 0 mz = 0 for v in var_list: if f.ndims(v) == 3: s = f.size(v) old_mxsub = s[0] - 2*mxg old_mysub = s[1] - 2*myg mz = s[2] break f.close() # Calculate total size of the grid nx = old_mxsub * old_nxpe ny = old_mysub * old_nype print "Grid sizes: ", nx, ny, mz # Create the new restart files for mype in range(npes): # Calculate X and Y processor numbers pex = mype % nxpe pey = int(mype / nxpe) old_pex = int(pex / xs) old_pey = int(pey / ys) old_x = pex % xs old_y = pey % ys # Old restart file number old_mype = old_nxpe * old_pey + old_pex # Calculate indices in old restart file xmin = old_x*mxsub xmax = xmin + mxsub - 1 + 2*mxg ymin = old_y*mysub ymax = ymin + mysub - 1 + 2*myg print "New: "+str(mype)+" ("+str(pex)+", "+str(pey)+")" print " => "+str(old_mype)+" ("+str(old_pex)+", "+str(old_pey)+") : ("+str(old_x)+", "+str(old_y)+")"
Lbar = 1. Bbar = 1. J0 = -J0 * shape.Bxy / (MU0 * Lbar) # Turn into A/m^2 P0 = P0 * Bbar**2 / (2.0 * MU0) # Pascals shape.add(P0, "pressure") shape.add(J0, "Jpar0") shape.add(bxcvz, "bxcvz") for nx in nxlist: # Generate a new mesh file filename = "grid%d.nc" % nx if isfile(filename): print("Grid file '%s' already exists" % filename) else: print("Creating grid file '%s'" % filename) f = DataFile(filename, create=True) shape.write(nx, nx, f) f.close() # Generate BOUT.inp file directory = "grid%d" % nx shell("mkdir " + directory) shell("cp data/BOUT.inp " + directory) shell("sed -i 's/MZ = 17/MZ = %d/g' %s/BOUT.inp" % (nx, directory)) shell("sed -i 's/grid = \"grid16.nc\"/grid = \"%s\"/g' %s/BOUT.inp" % (filename, directory))