def new_to_old(filename): f = DataFile(filename) newfile = DataFile(os.path.splitext(filename)[0] + str(".BOUT_metrics.nc"), create=True) name_changes = { "g_yy": "g_22", "gyy": "g22", "gxx": "g11", "gxz": "g13", "gzz": "g33", "g_xx": "g_11", "g_xz": "g_13", "g_zz": "g_33" } for key in f.keys(): name = key if name in name_changes: name = name_changes[name] newfile.write(name, np.asarray(f.read(key))) f.close() newfile.close() newfile.list()
def file_import(name): f = DataFile(name) # Open file varlist = f.list() # Get list of all variables in file data = {} # Create empty dictionary for v in varlist: data[v] = f.read(v) f.close() return data
def calc_curvilinear_curvature(fname, field, grid): from scipy.signal import savgol_filter f = DataFile(str(fname), write=True) B = f.read("B") dBydz = np.zeros(np.shape(B)) dBydx = np.zeros(np.shape(B)) dBxdz = np.zeros(np.shape(B)) dBzdx = np.zeros(np.shape(B)) dx = grid.metric()["dx"] dz = grid.metric()["dz"] g_11 = grid.metric()["g_xx"] g_22 = grid.metric()["g_yy"] g_33 = grid.metric()["g_zz"] g_12 = 0.0 g_13 = grid.metric()["g_xz"] g_23 = 0.0 J = np.sqrt(g_11 * (g_22 * g_33 - g_23 * g_23) + g_12 * (g_13 * g_23 - g_12 * g_33) + g_13 * (g_12 * g_23 - g_22 * g_23)) Bx_smooth = np.zeros(B.shape) By_smooth = np.zeros(B.shape) Bz_smooth = np.zeros(B.shape) for y in np.arange(0, B.shape[1]): pol, _ = grid.getPoloidalGrid(y) R = pol.R Z = pol.Z for x in np.arange(0, B.shape[0]): Bx_smooth[x, y, :] = savgol_filter( field.Bxfunc(R[x, :], y, Z[x, :]), np.int(np.ceil(B.shape[-1] / 21) // 2 * 2 + 1), 5) By_smooth[x, y, :] = savgol_filter( field.Byfunc(R[x, :], y, Z[x, :]), np.int(np.ceil(B.shape[-1] / 21) // 2 * 2 + 1), 5) dBydz[x, y, :] = calc.deriv(By_smooth[x, y, :]) / dz[x, y, :] dBxdz[x, y, :] = calc.deriv(Bx_smooth[x, y, :]) / dz[x, y, :] for z in np.arange(0, B.shape[-1]): Bz_smooth[:, y, z] = savgol_filter( field.Bzfunc(R[:, z], y, Z[:, z]), np.int(np.ceil(B.shape[0] / 7) // 2 * 2 + 1), 5) dBzdx[:, y, z] = calc.deriv(Bz_smooth[:, y, z]) / dx[:, y, z] dBydx[:, y, z] = calc.deriv(By_smooth[:, y, z]) / dx[:, y, z] bxcvx = (-1 / J) * (dBydz / B**2.) bxcvy = (1 / J) * ((dBxdz - dBzdx) / B**2.) bxcvz = (1 / J) * (dBydx / B**2.) f.write('bxcvz', bxcvz) f.write('bxcvx', bxcvx) f.write('bxcvy', bxcvy) f.close()
def change_variable(filename, variable, new_value): f = DataFile(filename) newfile = DataFile(os.path.splitext(filename)[0] + str(variable) + "." + str(new_value), create=True) var_changes = {str(variable)} for key in f.keys(): name = key if name in var_changes: name = name_changes[name] newfile.write(name, np.asarray(f.read(key))) f.close() newfile.close() newfile.list()
def file_import(name): """Read all variables from file into a dictionary Parameters ---------- name : str Name of file to read Returns ------- dict Dictionary containing all the variables in the file """ f = DataFile(name) # Open file varlist = f.list() # Get list of all variables in file data = {} # Create empty dictionary for v in varlist: data[v] = f.read(v) f.close() return data
def redistribute(npes, path="data", nxpe=None, output=".", informat=None, outformat=None, mxg=2, myg=2): """Resize restart files across NPES processors. Does not check if new processor arrangement is compatible with the branch cuts. In this respect restart.split is safer. However, BOUT++ checks the topology during initialisation anyway so this is not too serious. Parameters ---------- npes : int number of processors for the new restart files path : string, optional location of old restart files nxpe : int, optional number of processors to use in the x-direction (determines split: npes = nxpe * nype). Default is None which uses the same algorithm as BoutMesh (but without topology information) to determine a suitable value for nxpe. output : string, optional location to save new restart files informat : string, optional specify file format of old restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default uses the format of the first 'BOUT.restart.*' file listed by glob.glob. outformat : string, optional specify file format of new restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default is to use the same as informat. Returns ------- True on success """ if npes <= 0: print("ERROR: Negative or zero number of processors") return False if path == output: print("ERROR: Can't overwrite restart files") return False if informat is None: file_list = glob.glob(os.path.join(path, "BOUT.restart.*")) else: file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat)) nfiles = len(file_list) # Read old processor layout f = DataFile(file_list[0]) # Get list of variables var_list = f.list() if len(var_list) == 0: print("ERROR: No data found") return False old_npes = f.read("NPES") old_nxpe = f.read("NXPE") old_nype = int(old_npes / old_nxpe) if nfiles != old_npes: print("WARNING: Number of restart files inconsistent with NPES") print("Setting nfiles = " + str(old_npes)) nfiles = old_npes if nfiles == 0: print("ERROR: No restart files found") return False informat = file_list[0].split(".")[-1] if outformat is None: outformat = informat old_mxsub = 0 old_mysub = 0 mz = 0 for v in var_list: if f.ndims(v) == 3: s = f.size(v) old_mxsub = s[0] - 2 * mxg if old_mxsub < 0: if s[0] == 1: old_mxsub = 1 mxg = 0 elif s[0] == 3: old_mxsub = 1 mxg = 1 else: print("Number of x points is wrong?") return False old_mysub = s[1] - 2 * myg if old_mysub < 0: if s[1] == 1: old_mysub = 1 myg = 0 elif s[1] == 3: old_mysub = 1 myg = 1 else: print("Number of y points is wrong?") return False mz = s[2] break # Calculate total size of the grid nx = old_mxsub * old_nxpe ny = old_mysub * old_nype print("Grid sizes: ", nx, ny, mz) if nxpe is None: # Copy algorithm from BoutMesh for selecting nxpe ideal = sqrt(float(nx) * float(npes) / float(ny)) # Results in square domain for i in range(1, npes + 1): if npes % i == 0 and nx % i == 0 and int(nx / i) >= mxg and ny % (npes / i) == 0: # Found an acceptable value # Warning: does not check branch cuts! if nxpe is None or abs(ideal - i) < abs(ideal - nxpe): nxpe = i # Keep value nearest to the ideal if nxpe is None: print("ERROR: could not find a valid value for nxpe") return False nype = int(npes / nxpe) outfile_list = [] for i in range(npes): outpath = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat) outfile_list.append(DataFile(outpath, write=True, create=True)) infile_list = [] for i in range(old_npes): inpath = os.path.join(path, "BOUT.restart." + str(i) + "." + outformat) infile_list.append(DataFile(inpath)) old_mxsub = int(nx / old_nxpe) old_mysub = int(ny / old_nype) mxsub = int(nx / nxpe) mysub = int(ny / nype) for v in var_list: ndims = f.ndims(v) # collect data if ndims == 0: # scalar data = f.read(v) elif ndims == 2: data = np.zeros((nx + 2 * mxg, ny + 2 * myg)) for i in range(old_npes): ix = i % old_nxpe iy = int(i / old_nxpe) ixstart = mxg if ix == 0: ixstart = 0 ixend = -mxg if ix == old_nxpe - 1: ixend = 0 iystart = myg if iy == 0: iystart = 0 iyend = -myg if iy == old_nype - 1: iyend = 0 data[ ix * old_mxsub + ixstart : (ix + 1) * old_mxsub + 2 * mxg + ixend, iy * old_mysub + iystart : (iy + 1) * old_mysub + 2 * myg + iyend, ] = infile_list[i].read(v)[ixstart : old_mxsub + 2 * mxg + ixend, iystart : old_mysub + 2 * myg + iyend] elif ndims == 3: data = np.zeros((nx + 2 * mxg, ny + 2 * myg, mz)) for i in range(old_npes): ix = i % old_nxpe iy = int(i / old_nxpe) ixstart = mxg if ix == 0: ixstart = 0 ixend = -mxg if ix == old_nxpe - 1: ixend = 0 iystart = myg if iy == 0: iystart = 0 iyend = -myg if iy == old_nype - 1: iyend = 0 data[ ix * old_mxsub + ixstart : (ix + 1) * old_mxsub + 2 * mxg + ixend, iy * old_mysub + iystart : (iy + 1) * old_mysub + 2 * myg + iyend, :, ] = infile_list[i].read(v)[ ixstart : old_mxsub + 2 * mxg + ixend, iystart : old_mysub + 2 * myg + iyend, : ] else: print("ERROR: variable found with unexpected number of dimensions,", ndims, v) return False # write data for i in range(npes): ix = i % nxpe iy = int(i / nxpe) outfile = outfile_list[i] if v == "NPES": outfile.write(v, npes) elif v == "NXPE": outfile.write(v, nxpe) elif ndims == 0: # scalar outfile.write(v, data) elif ndims == 2: # Field2D outfile.write(v, data[ix * mxsub : (ix + 1) * mxsub + 2 * mxg, iy * mysub : (iy + 1) * mysub + 2 * myg]) elif ndims == 3: # Field3D outfile.write( v, data[ix * mxsub : (ix + 1) * mxsub + 2 * mxg, iy * mysub : (iy + 1) * mysub + 2 * myg, :] ) else: print("ERROR: variable found with unexpected number of dimensions,", f.ndims(v)) f.close() for infile in infile_list: infile.close() for outfile in outfile_list: outfile.close() return True
def resizeY(newy, path="data", output=".", informat="nc", outformat=None, myg=2): """Increase the number of Y points in restart files NOTE: * Can't overwrite Parameters ---------- newy : int ny for the new file path : str, optional Path to original restart files (default: "data") output : str, optional Path to write new restart files (default: current directory) informat : str, optional File extension of original files (default: "nc") outformat : str, optional File extension of new files (default: use the same as `informat`) myg : int, optional Number of ghost points in y (default: 2) Returns ------- True on success, else False TODO ---- - Replace printing errors with raising `ValueError` - Make informat work like `redistribute` """ if outformat is None: outformat = informat file_list = glob.glob(os.path.join(path, "BOUT.restart.*."+informat)) nfiles = len(file_list) if nfiles == 0: print("ERROR: No restart files found") return False for i in range(nfiles): # Open each data file infname = os.path.join(path, "BOUT.restart."+str(i)+"."+informat) outfname = os.path.join(output, "BOUT.restart."+str(i)+"."+outformat) print("Processing %s -> %s" % (infname, outfname)) infile = DataFile(infname) outfile = DataFile(outfname, create=True) # Copy basic information for var in ["hist_hi", "NXPE", "NYPE", "tt"]: data = infile.read(var) try: # Convert to scalar if necessary data = data[0] except: pass outfile.write(var, data) # Get a list of variables varnames = infile.list() for var in varnames: if infile.ndims(var) == 3: # Could be an evolving variable [x,y,z] print(" -> Resizing " + var) # Read variable from input indata = infile.read(var) nx, ny, nz = indata.shape # y coordinate in input and output data iny = (arange(ny) - myg + 0.5) / (ny - 2*myg) outy = (arange(newy) - myg + 0.5) / (newy - 2*myg) outdata = zeros([nx, newy, nz]) for x in range(nx): for z in range(nz): f = interp1d( iny, indata[x, :, z], bounds_error=False, fill_value=0.0) outdata[x, :, z] = f(outy) outfile.write(var, outdata) elif infile.ndims(var) == 2: # Assume evolving variable [x,y] print(" -> Resizing " + var) # Read variable from input indata = infile.read(var) nx, ny = indata.shape # y coordinate in input and output data iny = (arange(ny) - myg + 0.5) / (ny - 2*myg) outy = (arange(newy) - myg + 0.5) / (newy - 2*myg) outdata = zeros([nx, newy]) for x in range(nx): f = interp1d(iny, indata[x, :], bounds_error=False, fill_value=0.0) outdata[x, :] = f(outy) outfile.write(var, outdata) else: # Copy variable print(" -> Copying " + var) # Read variable from input data = infile.read(var) try: # Convert to scalar if necessary data = data[0] except: pass outfile.write(var, data) infile.close() outfile.close()
def slice(infile, outfile, region=None, xind=None, yind=None): """ xind, yind - index ranges. Range includes first point, but not last point """ # Open input and output files indf = DataFile(infile) outdf = DataFile(outfile, create=True) nx = indf["nx"][0] ny = indf["ny"][0] if region: # Select a region of the mesh xind = [0, nx] if region == 0: # Lower inner leg yind = [0, indf["jyseps1_1"][0] + 1] elif region == 1: # Inner core yind = [indf["jyseps1_1"][0] + 1, indf["jyseps2_1"][0] + 1] elif region == 2: # Upper inner leg yind = [indf["jyseps2_1"][0] + 1, indf["ny_inner"][0]] elif region == 3: # Upper outer leg yind = [indf["ny_inner"][0], indf["jyseps1_2"][0] + 1] elif region == 4: # Outer core yind = [indf["jyseps1_2"][0] + 1, indf["jyseps2_2"][0] + 1] else: # Lower outer leg yind = [indf["jyseps2_2"][0] + 1, ny] else: # Use indices if not xind: xind = [0, nx] if not yind: yind = [0, ny] print("Indices: [%d:%d, %d:%d]" % (xind[0], xind[1], yind[0], yind[1])) # List of variables requiring special handling special = [ "nx", "ny", "ny_inner", "ixseps1", "ixseps2", "jyseps1_1", "jyseps1_2", "jyseps2_1", "jyseps2_2", "ShiftAngle" ] outdf["nx"] = xind[1] - xind[0] outdf["ny"] = yind[1] - yind[0] outdf["ny_inner"] = indf["ny_inner"][0] - yind[0] outdf["ixseps1"] = indf["ixseps1"][0] outdf["ixseps2"] = indf["ixseps2"][0] outdf["jyseps1_1"] = indf["jyseps1_1"][0] - yind[0] outdf["jyseps2_1"] = indf["jyseps2_1"][0] - yind[0] outdf["jyseps1_2"] = indf["jyseps1_2"][0] - yind[0] outdf["jyseps2_2"] = indf["jyseps2_2"][0] - yind[0] outdf["ShiftAngle"] = indf["ShiftAngle"][xind[0]:xind[1]] # Loop over all variables for v in list(indf.keys()): if v in special: continue # Skip these variables ndims = indf.ndims(v) if ndims == 0: # Copy scalars print("Copying variable: " + v) outdf[v] = indf[v][0] elif ndims == 2: # Assume [x,y] print("Slicing variable: " + v) outdf[v] = indf[v][xind[0]:xind[1], yind[0]:yind[1]] else: # Skip print("Skipping variable: " + v) indf.close() outdf.close()
of.write("Bxy", Bxy) of.write("hthe", hthe) # Topology for general configurations of.write("yup_xsplit", yup_xsplit) of.write("ydown_xsplit", ydown_xsplit) of.write("yup_xin", yup_xin) of.write("ydown_xin", ydown_xin) of.write("ydown_xout", ydown_xout) of.write("nrad", nrad) of.write("npol", npol) # plasma profiles of.write("pressure", pressure) of.write("Jpar0", Jpar0) of.write("Ni0", Ni0) of.write("Te0", Te0) of.write("Ti0", Ti0) of.write("Ni_x", Ni) of.write("Te_x", Ti) of.write("Ti_x", Ti) of.write("bmag", Bt0) of.write("rmag", Rmaj) # Curvature of.write("logB", logB) of.close() print("Done")
def create(file_list, path, averagelast=1, final=-1, output="./", informat="nc", outformat=None): """Create restart files from data (dmp) files. Parameters ---------- averagelast : int, optional Number of time points (counting from `final`, inclusive) to average over (default is 1 i.e. just take last time-point) final : int, optional The last time point to use (default is last, -1) path : str, optional Path to original restart files (default: "data") output : str, optional Path to write new restart files (default: current directory) informat : str, optional File extension of original files (default: "nc") outformat : str, optional File extension of new files (default: use the same as `informat`) """ if outformat is None: outformat = informat nfiles = len(file_list) print(("Number of data files: ", nfiles)) for i in range(nfiles): # Open each data file infname = os.path.join(path, "BOUT.dmp." + str(i) + "." + informat) outfname = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat) print((infname, " -> ", outfname)) infile = DataFile(infname) outfile = DataFile(outfname, create=True) # Get the data always needed in restart files hist_hi = infile.read("iteration") print(("hist_hi = ", hist_hi)) outfile.write("hist_hi", hist_hi) t_array = infile.read("t_array") tt = t_array[final] print(("tt = ", tt)) outfile.write("tt", tt) tind = final if tind < 0.0: tind = len(t_array) + final NXPE = infile.read("NXPE") NYPE = infile.read("NYPE") print(("NXPE = ", NXPE, " NYPE = ", NYPE)) outfile.write("NXPE", NXPE) outfile.write("NYPE", NYPE) # Get a list of variables varnames = infile.list() for var in varnames: if infile.ndims(var) == 4: # Could be an evolving variable print((" -> ", var)) data = infile.read(var) if averagelast == 1: slice = data[final, :, :, :] else: slice = mean(data[(final - averagelast):final, :, :, :], axis=0) print(slice.shape) outfile.write(var, slice) infile.close() outfile.close()
def resizeY(newy, path="data", output=".", informat="nc", outformat=None, myg=2): """Increase the number of Y points in restart files NOTE: * Can't overwrite Parameters ---------- newy : int ny for the new file path : str, optional Path to original restart files (default: "data") output : str, optional Path to write new restart files (default: current directory) informat : str, optional File extension of original files (default: "nc") outformat : str, optional File extension of new files (default: use the same as `informat`) myg : int, optional Number of ghost points in y (default: 2) Returns ------- True on success, else False TODO ---- - Replace printing errors with raising `ValueError` - Make informat work like `redistribute` """ if outformat is None: outformat = informat file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat)) nfiles = len(file_list) if nfiles == 0: print("ERROR: No restart files found") return False for i in range(nfiles): # Open each data file infname = os.path.join(path, "BOUT.restart." + str(i) + "." + informat) outfname = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat) print("Processing %s -> %s" % (infname, outfname)) infile = DataFile(infname) outfile = DataFile(outfname, create=True) # Copy basic information for var in ["hist_hi", "NXPE", "NYPE", "tt"]: data = infile.read(var) try: # Convert to scalar if necessary data = data[0] except: pass outfile.write(var, data) # Get a list of variables varnames = infile.list() for var in varnames: if infile.ndims(var) == 3: # Could be an evolving variable [x,y,z] print(" -> Resizing " + var) # Read variable from input indata = infile.read(var) nx, ny, nz = indata.shape # y coordinate in input and output data iny = (arange(ny) - myg + 0.5) / (ny - 2 * myg) outy = (arange(newy) - myg + 0.5) / (newy - 2 * myg) outdata = zeros([nx, newy, nz]) for x in range(nx): for z in range(nz): f = interp1d(iny, indata[x, :, z], bounds_error=False, fill_value=0.0) outdata[x, :, z] = f(outy) outfile.write(var, outdata) elif infile.ndims(var) == 2: # Assume evolving variable [x,y] print(" -> Resizing " + var) # Read variable from input indata = infile.read(var) nx, ny = indata.shape # y coordinate in input and output data iny = (arange(ny) - myg + 0.5) / (ny - 2 * myg) outy = (arange(newy) - myg + 0.5) / (newy - 2 * myg) outdata = zeros([nx, newy]) for x in range(nx): f = interp1d(iny, indata[x, :], bounds_error=False, fill_value=0.0) outdata[x, :] = f(outy) outfile.write(var, outdata) else: # Copy variable print(" -> Copying " + var) # Read variable from input data = infile.read(var) try: # Convert to scalar if necessary data = data[0] except: pass outfile.write(var, data) infile.close() outfile.close()
def split(nxpe, nype, path="data", output="./", informat="nc", outformat=None, mxg=2, myg=2): """Split restart files across NXPE x NYPE processors. Returns True on success Parameters ---------- nxpe, nype : int The number of processors in x and y path : str, optional Path to original restart files (default: "data") output : str, optional Path to write new restart files (default: current directory) informat : str, optional File extension of original files (default: "nc") outformat : str, optional File extension of new files (default: use the same as `informat`) mxg, myg : int, optional The number of guard cells in x and y TODO ---- - Replace printing errors with raising `ValueError` - Fix undefined variables! - Make informat work like `redistribute` """ if outformat is None: outformat = informat npes = nxpe * nype if npes <= 0: print("ERROR: Negative or zero number of processors") return False if path == output: print("ERROR: Can't overwrite restart files") return False file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat)) nfiles = len(file_list) if nfiles == 0: print("ERROR: No restart files found") return False # Read old processor layout f = DataFile(os.path.join(path, file_list[0])) old_layout = get_processor_layout(f, False) f.close() if nfiles != old_layout.npes: print("WARNING: Number of restart files inconsistent with NPES") print("Setting nfiles = " + str(old_npes)) nfiles = old_layout.npes if old_layout.npes % old_layout.nxpe != 0: print("ERROR: Old NPES is not a multiple of old NXPE") return False if nype % old_layout.nype != 0: print("SORRY: New nype must be a multiple of old nype") return False if nxpe % old_layout.nxpe != 0: print("SORRY: New nxpe must be a multiple of old nxpe") return False # Calculate total size of the grid nx = old_layout.mxsub * old_layout.nxpe ny = old_layout.mysub * old_layout.nype print(("Grid sizes: ", nx, ny, mz)) # Create the new restart files for mype in range(npes): # Calculate X and Y processor numbers pex = mype % nxpe pey = int(mype / nxpe) old_pex = int(pex / xs) old_pey = int(pey / ys) old_x = pex % xs old_y = pey % ys # Old restart file number old_mype = old_layout.nxpe * old_pey + old_pex # Calculate indices in old restart file xmin = old_x * mxsub xmax = xmin + mxsub - 1 + 2 * mxg ymin = old_y * mysub ymax = ymin + mysub - 1 + 2 * myg print("New: " + str(mype) + " (" + str(pex) + ", " + str(pey) + ")") print(" => " + str(old_layout.mype) + " (" + str(old_pex) + ", " + str(old_pey) + ") : (" + str(old_x) + ", " + str(old_y) + ")")
def smooth_metric(fname, write_to_file=False, return_values=False, smooth_metric=True, order=7): from scipy.signal import savgol_filter f = DataFile(str(fname), write=True) B = f.read('B') bxcvx = f.read('bxcvx') bxcvz = f.read('bxcvz') bxcvy = f.read('bxcvy') J = f.read('J') bxcvx_smooth = np.zeros(bxcvx.shape) bxcvy_smooth = np.zeros(bxcvy.shape) bxcvz_smooth = np.zeros(bxcvz.shape) J_smooth = np.zeros(J.shape) if smooth_metric: g13 = f.read('g13') g_13 = f.read('g_13') g11 = f.read('g11') g_11 = f.read('g_11') g33 = f.read('g33') g_33 = f.read('g_33') g13_smooth = np.zeros(g13.shape) g_13_smooth = np.zeros(g_13.shape) g11_smooth = np.zeros(g11.shape) g_11_smooth = np.zeros(g_11.shape) g33_smooth = np.zeros(g33.shape) g_33_smooth = np.zeros(g_33.shape) for y in np.arange(0, bxcvx.shape[1]): for x in np.arange(0, bxcvx.shape[0]): bxcvx_smooth[x, y, :] = savgol_filter( bxcvx[x, y, :], np.int(np.ceil(bxcvx.shape[-1] / 2) // 2 * 2 + 1), order) bxcvz_smooth[x, y, :] = savgol_filter( bxcvz[x, y, :], np.int(np.ceil(bxcvz.shape[-1] / 2) // 2 * 2 + 1), order) bxcvy_smooth[x, y, :] = savgol_filter( bxcvy[x, y, :], np.int(np.ceil(bxcvy.shape[-1] / 2) // 2 * 2 + 1), order) J_smooth[x, y, :] = savgol_filter( J[x, y, :], np.int(np.ceil(J.shape[-1] / 2) // 2 * 2 + 1), order) if smooth_metric: g11_smooth[x, y, :] = savgol_filter( g11[x, y, :], np.int(np.ceil(g11.shape[-1] / 2) // 2 * 2 + 1), order) g_11_smooth[x, y, :] = savgol_filter( g_11[x, y, :], np.int(np.ceil(g_11.shape[-1] / 2) // 2 * 2 + 1), order) g13_smooth[x, y, :] = savgol_filter( g13[x, y, :], np.int(np.ceil(g13.shape[-1] / 2) // 2 * 2 + 1), order) g_13_smooth[x, y, :] = savgol_filter( g_13[x, y, :], np.int(np.ceil(g_13.shape[-1] / 2) // 2 * 2 + 1), order) g33_smooth[x, y, :] = savgol_filter( g33[x, y, :], np.int(np.ceil(g33.shape[-1] / 2) // 2 * 2 + 1), order) g_33_smooth[x, y, :] = savgol_filter( g_33[x, y, :], np.int(np.ceil(g_33.shape[-1] / 2) // 2 * 2 + 1), order) if (write_to_file): # f.write('bxcvx',bxcvx_smooth) # f.write('bxcvy',bxcvy_smooth) # f.write('bxcvz',bxcvz_smooth) f.write('J', J_smooth) if smooth_metric: f.write('g11', g11_smooth) f.write('g_11', g_11_smooth) f.write('g13', g13_smooth) f.write('g_13', g_13_smooth) f.write('g33', g33_smooth) f.write('g_33', g_33_smooth) f.close() if (return_values): return bxcvx_smooth, bxcvy_smooth, bxcvz_smooth, bxcvx, bxcvy, bxcvz
def rotating_ellipse(nx=68, ny=16, nz=128, xcentre=5.5, I_coil=0.01, curvilinear=True, rectangular=False, fname='rotating-ellipse.fci.nc', a=0.4, curvilinear_inner_aligned=True, curvilinear_outer_aligned=True, npoints=421, Btor=2.5, show_maps=False, calc_curvature=True, smooth_curvature=False, return_iota=True, write_iota=False): yperiod = 2 * np.pi / 5. field = zb.field.RotatingEllipse(xcentre=xcentre, I_coil=I_coil, radius=2 * a, yperiod=yperiod, Btor=Btor) # Define the y locations ycoords = np.linspace(0.0, yperiod, ny, endpoint=False) start_r = xcentre + a / 2. start_z = 0. if rectangular: print("Making rectangular poloidal grid") poloidal_grid = zb.poloidal_grid.RectangularPoloidalGrid( nx, nz, 1.0, 1.0, Rcentre=xcentre) elif curvilinear: print("Making curvilinear poloidal grid") inner = zb.rzline.shaped_line(R0=xcentre, a=a / 2., elong=0, triang=0.0, indent=0, n=npoints) outer = zb.rzline.shaped_line(R0=xcentre, a=a, elong=0, triang=0.0, indent=0, n=npoints) if curvilinear_inner_aligned: print("Aligning to inner flux surface...") inner_lines = get_lines(field, start_r, start_z, ycoords, yperiod=yperiod, npoints=npoints) if curvilinear_outer_aligned: print("Aligning to outer flux surface...") outer_lines = get_lines(field, xcentre + a, start_z, ycoords, yperiod=yperiod, npoints=npoints) print("creating grid...") if curvilinear_inner_aligned: if curvilinear_outer_aligned: poloidal_grid = [ zb.poloidal_grid.grid_elliptic(inner, outer, nx, nz, show=show_maps) for inner, outer in zip(inner_lines, outer_lines) ] else: poloidal_grid = [ zb.poloidal_grid.grid_elliptic(inner, outer, nx, nz, show=show_maps) for inner in inner_lines ] else: poloidal_grid = zb.poloidal_grid.grid_elliptic( inner, outer, nx, nz) # Create the 3D grid by putting together 2D poloidal grids grid = zb.grid.Grid(poloidal_grid, ycoords, yperiod, yperiodic=True) maps = zb.make_maps(grid, field) zb.write_maps(grid, field, maps, str(fname), metric2d=False) if (curvilinear and calc_curvature): print("calculating curvature...") calc_curvilinear_curvature(fname, field, grid, maps) if (calc_curvature and smooth_curvature): smooth_metric(fname, write_to_file=True, return_values=False, smooth_metric=True) if (return_iota or write_iota): rindices = np.linspace(start_r, xcentre + a, nx) zindices = np.zeros((nx)) iota_bar = calc_iota(field, start_r, start_z) if (write_iota): f = DataFile(str(fname), write=True) f.write('iota_bar', iota_bar) f.close() else: print("Iota_bar = ", iota_bar)
def calc_curvilinear_curvature(fname, field, grid, maps): from scipy.signal import savgol_filter f = DataFile(str(fname), write=True) B = f.read("B") dx = grid.metric()["dx"] dz = grid.metric()["dz"] g_11 = grid.metric()["g_xx"] g_22 = grid.metric()["g_yy"] g_33 = grid.metric()["g_zz"] g_12 = 0.0 g_13 = grid.metric()["g_xz"] g_23 = 0.0 GR = np.zeros(B.shape) GZ = np.zeros(B.shape) Gphi = np.zeros(B.shape) dRdz = np.zeros(B.shape) dZdz = np.zeros(B.shape) dRdx = np.zeros(B.shape) dZdx = np.zeros(B.shape) for y in np.arange(0, B.shape[1]): pol, _ = grid.getPoloidalGrid(y) R = pol.R Z = pol.Z # G = \vec{B}/B, here in cylindrical coordinates GR[:, y, :] = field.Bxfunc(R, y, Z) / ((B[:, y, :])**2) GZ[:, y, :] = field.Bzfunc(R, y, Z) / ((B[:, y, :])**2) Gphi[:, y, :] = field.Byfunc(R, y, Z) / ((B[:, y, :])**2) for x in np.arange(0, B.shape[0]): dRdz[x, y, :] = calc.deriv(R[x, :]) / dz[x, y, :] dZdz[x, y, :] = calc.deriv(Z[x, :]) / dz[x, y, :] for z in np.arange(0, B.shape[-1]): dRdx[:, y, z] = calc.deriv(R[:, z]) / dx[:, y, z] dZdx[:, y, z] = calc.deriv(Z[:, z]) / dx[:, y, z] R = f.read("R") Z = f.read("Z") dy = f.read("dy") ## calculate Jacobian and contravariant terms in curvilinear coordinates J = R * (dZdz * dRdx - dZdx * dRdz) Gx = (GR * dZdz - GZ * dRdz) * (R / J) Gz = (GZ * dRdx - GR * dZdx) * (R / J) G_x = Gx * g_11 + Gphi * g_12 + Gz * g_13 G_y = Gx * g_12 + Gphi * g_22 + Gz * g_23 G_z = Gx * g_13 + Gphi * g_23 + Gz * g_33 dG_zdy = np.zeros(B.shape) dG_ydz = np.zeros(B.shape) dG_xdz = np.zeros(B.shape) dG_zdx = np.zeros(B.shape) dG_ydx = np.zeros(B.shape) dG_xdy = np.zeros(B.shape) for y in np.arange(0, B.shape[1]): for x in np.arange(0, B.shape[0]): dG_ydz[x, y, :] = calc.deriv(G_y[x, y, :]) / dz[x, y, :] dG_xdz[x, y, :] = calc.deriv(G_x[x, y, :]) / dz[x, y, :] for z in np.arange(0, B.shape[-1]): dG_ydx[:, y, z] = calc.deriv(G_y[:, y, z]) / dx[:, y, z] dG_zdx[:, y, z] = calc.deriv(G_z[:, y, z]) / dx[:, y, z] #this should really use the maps... for x in np.arange(0, B.shape[0]): for z in np.arange(0, B.shape[-1]): dG_zdy[x, :, z] = calc.deriv(G_z[x, :, z]) / dy[x, :, z] dG_xdy[x, :, z] = calc.deriv(G_x[x, :, z]) / dy[x, :, z] bxcvx = (dG_zdy - dG_ydz) / J bxcvy = (dG_xdz - dG_zdx) / J bxcvz = (dG_ydx - dG_xdy) / J bxcv = g_11 * (bxcvx**2) + g_22 * (bxcvy**2) + g_33 * (bxcvz**2) + 2 * ( bxcvz * bxcvx * g_13) f.write('bxcvx', bxcvx) f.write('bxcvy', bxcvy) f.write('bxcvz', bxcvz) f.write('J', J) f.close()
def split(nxpe, nype, path="data", output="./", informat="nc", outformat=None, mxg=2, myg=2): """Split restart files across NXPE x NYPE processors. Returns True on success Parameters ---------- nxpe, nype : int The number of processors in x and y path : str, optional Path to original restart files (default: "data") output : str, optional Path to write new restart files (default: current directory) informat : str, optional File extension of original files (default: "nc") outformat : str, optional File extension of new files (default: use the same as `informat`) mxg, myg : int, optional The number of guard cells in x and y TODO ---- - Replace printing errors with raising `ValueError` - Fix undefined variables! - Make informat work like `redistribute` """ if outformat is None: outformat = informat npes = nxpe * nype if npes <= 0: print("ERROR: Negative or zero number of processors") return False if path == output: print("ERROR: Can't overwrite restart files") return False file_list = glob.glob(os.path.join(path, "BOUT.restart.*."+informat)) nfiles = len(file_list) if nfiles == 0: print("ERROR: No restart files found") return False # Read old processor layout f = DataFile(os.path.join(path, file_list[0])) old_layout = get_processor_layout(f, False) f.close() if nfiles != old_layout.npes: print("WARNING: Number of restart files inconsistent with NPES") print("Setting nfiles = " + str(old_npes)) nfiles = old_layout.npes if old_layout.npes % old_layout.nxpe != 0: print("ERROR: Old NPES is not a multiple of old NXPE") return False if nype % old_layout.nype != 0: print("SORRY: New nype must be a multiple of old nype") return False if nxpe % old_layout.nxpe != 0: print("SORRY: New nxpe must be a multiple of old nxpe") return False # Calculate total size of the grid nx = old_layout.mxsub * old_layout.nxpe ny = old_layout.mysub * old_layout.nype print(("Grid sizes: ", nx, ny, mz)) # Create the new restart files for mype in range(npes): # Calculate X and Y processor numbers pex = mype % nxpe pey = int(mype / nxpe) old_pex = int(pex / xs) old_pey = int(pey / ys) old_x = pex % xs old_y = pey % ys # Old restart file number old_mype = old_layout.nxpe * old_pey + old_pex # Calculate indices in old restart file xmin = old_x*mxsub xmax = xmin + mxsub - 1 + 2*mxg ymin = old_y*mysub ymax = ymin + mysub - 1 + 2*myg print("New: "+str(mype)+" ("+str(pex)+", "+str(pey)+")") print(" => "+str(old_layout.mype)+" ("+str(old_pex)+", " + str(old_pey)+") : ("+str(old_x)+", "+str(old_y)+")")
def create(averagelast=1, final=-1, path="data", output="./", informat="nc", outformat=None): """Create restart files from data (dmp) files. Parameters ---------- averagelast : int, optional Number of time points (counting from `final`, inclusive) to average over (default is 1 i.e. just take last time-point) final : int, optional The last time point to use (default is last, -1) path : str, optional Path to original restart files (default: "data") output : str, optional Path to write new restart files (default: current directory) informat : str, optional File extension of original files (default: "nc") outformat : str, optional File extension of new files (default: use the same as `informat`) """ if outformat is None: outformat = informat file_list = glob.glob(os.path.join(path, "BOUT.dmp.*."+informat)) nfiles = len(file_list) print(("Number of data files: ", nfiles)) for i in range(nfiles): # Open each data file infname = os.path.join(path, "BOUT.dmp."+str(i)+"."+informat) outfname = os.path.join(output, "BOUT.restart."+str(i)+"."+outformat) print((infname, " -> ", outfname)) infile = DataFile(infname) outfile = DataFile(outfname, create=True) # Get the data always needed in restart files hist_hi = infile.read("iteration") print(("hist_hi = ", hist_hi)) outfile.write("hist_hi", hist_hi) t_array = infile.read("t_array") tt = t_array[final] print(("tt = ", tt)) outfile.write("tt", tt) tind = final if tind < 0.0: tind = len(t_array) + final NXPE = infile.read("NXPE") NYPE = infile.read("NYPE") print(("NXPE = ", NXPE, " NYPE = ", NYPE)) outfile.write("NXPE", NXPE) outfile.write("NYPE", NYPE) # Get a list of variables varnames = infile.list() for var in varnames: if infile.ndims(var) == 4: # Could be an evolving variable print((" -> ", var)) data = infile.read(var) if averagelast == 1: slice = data[final, :, :, :] else: slice = mean(data[(final - averagelast) :final, :, :, :], axis=0) print(slice.shape) outfile.write(var, slice) infile.close() outfile.close()
def resizeY(newy, path="data", output=".", informat="nc", outformat=None, myg=2): """ Resize all the restart files in Y """ if outformat is None: outformat = informat file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat)) nfiles = len(file_list) if nfiles == 0: print("ERROR: No restart files found") return False for i in range(nfiles): # Open each data file infname = os.path.join(path, "BOUT.restart." + str(i) + "." + informat) outfname = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat) print("Processing %s -> %s", infname, outfname) infile = DataFile(infname) outfile = DataFile(outfname, create=True) # Copy basic information for var in ["hist_hi", "NPES", "NXPE", "tt"]: data = infile.read(var) try: # Convert to scalar if necessary data = data[0] except: pass outfile.write(var, data) # Get a list of variables varnames = infile.list() for var in varnames: if infile.ndims(var) == 3: # Could be an evolving variable [x,y,z] print(" -> " + var) # Read variable from input indata = infile.read(var) nx, ny, nz = indata.shape # y coordinate in input and output data iny = (arange(ny) - myg + 0.5) / (ny - 2 * myg) outy = (arange(newy) - myg + 0.5) / (newy - 2 * myg) outdata = zeros([nx, newy, nz]) for x in range(nx): for z in range(nz): f = interp1d(iny, indata[x, :, z], bounds_error=False, fill_value=0.0) outdata[x, :, z] = f(outy) outfile.write(var, outdata) infile.close() outfile.close()
def split(nxpe, nype, path="data", output="./", informat="nc", outformat=None, mxg=2, myg=2): """Split restart files across NXPE x NYPE processors. Returns True on success """ if outformat is None: outformat = informat npes = nxpe * nype if npes <= 0: print("ERROR: Negative or zero number of processors") return False if path == output: print("ERROR: Can't overwrite restart files") return False file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat)) nfiles = len(file_list) if nfiles == 0: print("ERROR: No restart files found") return False # Read old processor layout f = DataFile(os.path.join(path, file_list[0])) # Get list of variables var_list = f.list() if len(var_list) == 0: print("ERROR: No data found") return False old_npes = f.read('NPES') old_nxpe = f.read('NXPE') if nfiles != old_npes: print("WARNING: Number of restart files inconsistent with NPES") print("Setting nfiles = " + str(old_npes)) nfiles = old_npes if old_npes % old_nxpe != 0: print("ERROR: Old NPES is not a multiple of old NXPE") return False old_nype = int(old_npes / old_nxpe) if nype % old_nype != 0: print("SORRY: New nype must be a multiple of old nype") return False if nxpe % old_nxpe != 0: print("SORRY: New nxpe must be a multiple of old nxpe") return False # Get dimension sizes old_mxsub = 0 old_mysub = 0 mz = 0 for v in var_list: if f.ndims(v) == 3: s = f.size(v) old_mxsub = s[0] - 2 * mxg old_mysub = s[1] - 2 * myg mz = s[2] break f.close() # Calculate total size of the grid nx = old_mxsub * old_nxpe ny = old_mysub * old_nype print(("Grid sizes: ", nx, ny, mz)) # Create the new restart files for mype in range(npes): # Calculate X and Y processor numbers pex = mype % nxpe pey = int(mype / nxpe) old_pex = int(pex / xs) old_pey = int(pey / ys) old_x = pex % xs old_y = pey % ys # Old restart file number old_mype = old_nxpe * old_pey + old_pex # Calculate indices in old restart file xmin = old_x * mxsub xmax = xmin + mxsub - 1 + 2 * mxg ymin = old_y * mysub ymax = ymin + mysub - 1 + 2 * myg print("New: " + str(mype) + " (" + str(pex) + ", " + str(pey) + ")") print(" => " + str(old_mype) + " (" + str(old_pex) + ", " + str(old_pey) + ") : (" + str(old_x) + ", " + str(old_y) + ")")
def slice(infile, outfile, region=None, xind=None, yind=None): """Copy an X-Y slice from one DataFile to another Parameters ---------- infile : str Name of DataFile to read slice from outfile : str Name of DataFile to write slice to. File will be created, and will be overwritten if it already exists region : {0, 1, 2, 3, 4, 5, None}, optional Copy a whole region. The available regions are: - 0: Lower inner leg - 1: Inner core - 2: Upper inner leg - 3: Upper outer leg - 4: Outer core - 5: Lower outer leg xind, yind : (int, int), optional Index ranges for x and y. Range includes first point, but not last point TODO ---- - Rename to not clobber builtin `slice` - Better regions? """ # Open input and output files indf = DataFile(infile) outdf = DataFile(outfile, create=True) nx = indf["nx"][0] ny = indf["ny"][0] if region: # Select a region of the mesh xind = [0, nx] if region == 0: # Lower inner leg yind = [0, indf["jyseps1_1"][0] + 1] elif region == 1: # Inner core yind = [indf["jyseps1_1"][0] + 1, indf["jyseps2_1"][0] + 1] elif region == 2: # Upper inner leg yind = [indf["jyseps2_1"][0] + 1, indf["ny_inner"][0]] elif region == 3: # Upper outer leg yind = [indf["ny_inner"][0], indf["jyseps1_2"][0] + 1] elif region == 4: # Outer core yind = [indf["jyseps1_2"][0] + 1, indf["jyseps2_2"][0] + 1] else: # Lower outer leg yind = [indf["jyseps2_2"][0] + 1, ny] else: # Use indices if not xind: xind = [0, nx] if not yind: yind = [0, ny] print("Indices: [%d:%d, %d:%d]" % (xind[0], xind[1], yind[0], yind[1])) # List of variables requiring special handling special = [ "nx", "ny", "ny_inner", "ixseps1", "ixseps2", "jyseps1_1", "jyseps1_2", "jyseps2_1", "jyseps2_2", "ShiftAngle", ] outdf["nx"] = xind[1] - xind[0] outdf["ny"] = yind[1] - yind[0] outdf["ny_inner"] = indf["ny_inner"][0] - yind[0] outdf["ixseps1"] = indf["ixseps1"][0] outdf["ixseps2"] = indf["ixseps2"][0] outdf["jyseps1_1"] = indf["jyseps1_1"][0] - yind[0] outdf["jyseps2_1"] = indf["jyseps2_1"][0] - yind[0] outdf["jyseps1_2"] = indf["jyseps1_2"][0] - yind[0] outdf["jyseps2_2"] = indf["jyseps2_2"][0] - yind[0] outdf["ShiftAngle"] = indf["ShiftAngle"][xind[0]:xind[1]] # Loop over all variables for v in list(indf.keys()): if v in special: continue # Skip these variables ndims = indf.ndims(v) if ndims == 0: # Copy scalars print("Copying variable: " + v) outdf[v] = indf[v][0] elif ndims == 2: # Assume [x,y] print("Slicing variable: " + v) outdf[v] = indf[v][xind[0]:xind[1], yind[0]:yind[1]] else: # Skip print("Skipping variable: " + v) indf.close() outdf.close()
def create(averagelast=1, final=-1, path="data", output="./", informat="nc", outformat=None): """ Create restart files from data (dmp) files. Inputs ====== averagelast Number of time points to average over. Default is 1 i.e. just take last time-point final The last time point to use. Default is last (-1) path Path to the input data files output Path where the output restart files should go informat Format of the input data files outformat Format of the output restart files """ if outformat is None: outformat = informat file_list = glob.glob(os.path.join(path, "BOUT.dmp.*." + informat)) nfiles = len(file_list) print(("Number of data files: ", nfiles)) for i in range(nfiles): # Open each data file infname = os.path.join(path, "BOUT.dmp." + str(i) + "." + informat) outfname = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat) print((infname, " -> ", outfname)) infile = DataFile(infname) outfile = DataFile(outfname, create=True) # Get the data always needed in restart files hist_hi = infile.read("iteration") print(("hist_hi = ", hist_hi)) outfile.write("hist_hi", hist_hi) t_array = infile.read("t_array") tt = t_array[final] print(("tt = ", tt)) outfile.write("tt", tt) tind = final if tind < 0.0: tind = len(t_array) + final NXPE = infile.read("NXPE") NYPE = infile.read("NYPE") NPES = NXPE * NYPE print(("NPES = ", NPES, " NXPE = ", NXPE)) outfile.write("NPES", NPES) outfile.write("NXPE", NXPE) # Get a list of variables varnames = infile.list() for var in varnames: if infile.ndims(var) == 4: # Could be an evolving variable print((" -> ", var)) data = infile.read(var) if averagelast == 1: slice = data[final, :, :, :] else: slice = mean(data[(final - averagelast):final, :, :, :], axis=0) print(slice.shape) outfile.write(var, slice) infile.close() outfile.close()
def redistribute(npes, path="data", nxpe=None, output=".", informat=None, outformat=None, mxg=2, myg=2): """Resize restart files across NPES processors. Does not check if new processor arrangement is compatible with the branch cuts. In this respect :py:func:`restart.split` is safer. However, BOUT++ checks the topology during initialisation anyway so this is not too serious. Parameters ---------- npes : int Number of processors for the new restart files path : str, optional Path to original restart files (default: "data") nxpe : int, optional Number of processors to use in the x-direction (determines split: npes = nxpe * nype). Default is None which uses the same algorithm as BoutMesh (but without topology information) to determine a suitable value for nxpe. output : str, optional Location to save new restart files (default: current directory) informat : str, optional Specify file format of old restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default uses the format of the first 'BOUT.restart.*' file listed by glob.glob. outformat : str, optional Specify file format of new restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default is to use the same as informat. Returns ------- True on success TODO ---- - Replace printing errors with raising `ValueError` """ if npes <= 0: print("ERROR: Negative or zero number of processors") return False if path == output: print("ERROR: Can't overwrite restart files") return False if informat is None: file_list = glob.glob(os.path.join(path, "BOUT.restart.*")) else: file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat)) nfiles = len(file_list) # Read old processor layout f = DataFile(file_list[0]) # Get list of variables var_list = f.list() if len(var_list) == 0: print("ERROR: No data found") return False old_processor_layout = get_processor_layout(f, has_t_dimension=False) print("Grid sizes: ", old_processor_layout.nx, old_processor_layout.ny, old_processor_layout.mz) if nfiles != old_processor_layout.npes: print("WARNING: Number of restart files inconsistent with NPES") print("Setting nfiles = " + str(old_processor_layout.npes)) nfiles = old_processor_layout.npes if nfiles == 0: print("ERROR: No restart files found") return False informat = file_list[0].split(".")[-1] if outformat is None: outformat = informat try: new_processor_layout = create_processor_layout(old_processor_layout, npes, nxpe=nxpe) except ValueError as e: print("Could not find valid processor split. " + e.what()) nx = old_processor_layout.nx ny = old_processor_layout.ny mz = old_processor_layout.mz mxg = old_processor_layout.mxg myg = old_processor_layout.myg old_npes = old_processor_layout.npes old_nxpe = old_processor_layout.nxpe old_nype = old_processor_layout.nype old_mxsub = old_processor_layout.mxsub old_mysub = old_processor_layout.mysub nxpe = new_processor_layout.nxpe nype = new_processor_layout.nype mxsub = new_processor_layout.mxsub mysub = new_processor_layout.mysub outfile_list = [] for i in range(npes): outpath = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat) outfile_list.append(DataFile(outpath, write=True, create=True)) infile_list = [] for i in range(old_npes): inpath = os.path.join(path, "BOUT.restart." + str(i) + "." + outformat) infile_list.append(DataFile(inpath)) for v in var_list: ndims = f.ndims(v) # collect data if ndims == 0: # scalar data = f.read(v) elif ndims == 2: data = np.zeros((nx + 2 * mxg, ny + 2 * myg)) for i in range(old_npes): ix = i % old_nxpe iy = int(i / old_nxpe) ixstart = mxg if ix == 0: ixstart = 0 ixend = -mxg if ix == old_nxpe - 1: ixend = 0 iystart = myg if iy == 0: iystart = 0 iyend = -myg if iy == old_nype - 1: iyend = 0 data[ix * old_mxsub + ixstart:(ix + 1) * old_mxsub + 2 * mxg + ixend, iy * old_mysub + iystart:(iy + 1) * old_mysub + 2 * myg + iyend] = infile_list[i].read(v)[ixstart:old_mxsub + 2 * mxg + ixend, iystart:old_mysub + 2 * myg + iyend] data = BoutArray(data, attributes=infile_list[0].attributes(v)) elif ndims == 3: data = np.zeros((nx + 2 * mxg, ny + 2 * myg, mz)) for i in range(old_npes): ix = i % old_nxpe iy = int(i / old_nxpe) ixstart = mxg if ix == 0: ixstart = 0 ixend = -mxg if ix == old_nxpe - 1: ixend = 0 iystart = myg if iy == 0: iystart = 0 iyend = -myg if iy == old_nype - 1: iyend = 0 data[ix * old_mxsub + ixstart:(ix + 1) * old_mxsub + 2 * mxg + ixend, iy * old_mysub + iystart:(iy + 1) * old_mysub + 2 * myg + iyend, :] = infile_list[i].read(v)[ixstart:old_mxsub + 2 * mxg + ixend, iystart:old_mysub + 2 * myg + iyend, :] data = BoutArray(data, attributes=infile_list[0].attributes(v)) else: print( "ERROR: variable found with unexpected number of dimensions,", ndims, v) return False # write data for i in range(npes): ix = i % nxpe iy = int(i / nxpe) outfile = outfile_list[i] if v == "NPES": outfile.write(v, npes) elif v == "NXPE": outfile.write(v, nxpe) elif v == "NYPE": outfile.write(v, nype) elif ndims == 0: # scalar outfile.write(v, data) elif ndims == 2: # Field2D outfile.write( v, data[ix * mxsub:(ix + 1) * mxsub + 2 * mxg, iy * mysub:(iy + 1) * mysub + 2 * myg]) elif ndims == 3: # Field3D outfile.write( v, data[ix * mxsub:(ix + 1) * mxsub + 2 * mxg, iy * mysub:(iy + 1) * mysub + 2 * myg, :]) else: print( "ERROR: variable found with unexpected number of dimensions,", f.ndims(v)) f.close() for infile in infile_list: infile.close() for outfile in outfile_list: outfile.close() return True
from past.utils import old_div from boututils.datafile import DataFile # Wrapper around NetCDF4 libraries from math import pow from sys import argv length = 80. # Length of the domain in m nx = 5 # Minimum is 5: 2 boundary, one evolved if len(argv)>1: ny = int(argv[1]) # Minimum 5. Should be divisible by number of processors (so powers of 2 nice) else: ny = 256 # Minimum 5. Should be divisible by number of processors (so powers of 2 nice) #dy = [[1.]*ny]*nx # distance between points in y, in m/g22/lengthunit g22 = [[pow(old_div(float(ny-1),length),2)]*ny]*nx g_22 = [[pow(old_div(length,float(ny-1)),2)]*ny]*nx ixseps1 = -1 ixseps2 = 0 f = DataFile() f.open("conduct_grid.nc", create=True) f.write("nx", nx) f.write("ny", ny) #f.write("dy", dy) f.write("g22",g22) f.write("g_22", g_22) f.write("ixseps1", ixseps1) f.write("ixseps2", ixseps2) f.close()
def pol_slice(var3d, gridfile, n=1, zangle=0.0): """ data2d = pol_slice(data3d, 'gridfile', n=1, zangle=0.0) """ n = int(n) zangle = float(zangle) s = np.shape(var3d) if len(s) != 3: print("ERROR: pol_slice expects a 3D variable") return None nx, ny, nz = s dz = 2. * np.pi / float(n * (nz - 1)) try: # Open the grid file gf = DataFile(gridfile) # Check the grid size is correct if gf.read("nx") != nx: print("ERROR: Grid X size is different to the variable") return None if gf.read("ny") != ny: print("ERROR: Grid Y size is different to the variable") return None # Get the toroidal shift zShift = gf.read("qinty") if zShift != None: print("Using qinty as toroidal shift angle") else: zShift = gf.read("zShift") if zShift != None: print("Using zShift as toroidal shift angle") else: print("ERROR: Neither qinty nor zShift found") return None gf.close() except: print("ERROR: pol_slice couldn't read grid file") return None var2d = np.zeros([nx, ny]) ###################################### # Perform 2D slice zind = (zangle - zShift) / dz z0f = np.floor(zind) z0 = z0f.astype(int) p = zind - z0f # Make z0 between 0 and (nz-2) z0 = ((z0 % (nz - 1)) + (nz - 1)) % (nz - 1) # Get z+ and z- zp = (z0 + 1) % (nz - 1) zm = (z0 - 1 + (nz - 1)) % (nz - 1) # There may be some more cunning way to do this indexing for x in np.arange(nx): for y in np.arange(ny): var2d[x,y] = 0.5*p[x,y]*(p[x,y]-1.0) * var3d[x,y,zm[x,y]] + \ (1.0 - p[x,y]*p[x,y]) * var3d[x,y,z0[x,y]] + \ 0.5*p[x,y]*(p[x,y]+1.0) * var3d[x,y,zp[x,y]] return var2d
def pol_slice(var3d, gridfile, n=1, zangle=0.0): """ data2d = pol_slice(data3d, 'gridfile', n=1, zangle=0.0) """ n = int(n) zangle = float(zangle) s = np.shape(var3d) if len(s) != 3: print("ERROR: pol_slice expects a 3D variable") return None nx, ny, nz = s dz = 2.*np.pi / float(n * (nz-1)) try: # Open the grid file gf = DataFile(gridfile) # Check the grid size is correct if gf.read("nx") != nx: print("ERROR: Grid X size is different to the variable") return None if gf.read("ny") != ny: print("ERROR: Grid Y size is different to the variable") return None # Get the toroidal shift zShift = gf.read("qinty") if zShift != None: print("Using qinty as toroidal shift angle") else: zShift = gf.read("zShift") if zShift != None: print("Using zShift as toroidal shift angle") else: print("ERROR: Neither qinty nor zShift found") return None gf.close() except: print("ERROR: pol_slice couldn't read grid file") return None var2d = np.zeros([nx, ny]) ###################################### # Perform 2D slice zind = (zangle - zShift) / dz z0f = np.floor(zind) z0 = z0f.astype(int) p = zind - z0f # Make z0 between 0 and (nz-2) z0 = ((z0 % (nz-1)) + (nz-1)) % (nz-1) # Get z+ and z- zp = (z0 + 1) % (nz-1) zm = (z0 - 1 + (nz-1)) % (nz-1) # There may be some more cunning way to do this indexing for x in np.arange(nx): for y in np.arange(ny): var2d[x,y] = 0.5*p[x,y]*(p[x,y]-1.0) * var3d[x,y,zm[x,y]] + \ (1.0 - p[x,y]*p[x,y]) * var3d[x,y,z0[x,y]] + \ 0.5*p[x,y]*(p[x,y]+1.0) * var3d[x,y,zp[x,y]] return var2d
def collect(varname, xind=None, yind=None, zind=None, tind=None, path=".",yguards=False, xguards=True, info=True,prefix="BOUT.dmp",strict=False): """Collect a variable from a set of BOUT++ outputs. data = collect(name) name Name of the variable (string) Optional arguments: xind = [min,max] Range of X indices to collect yind = [min,max] Range of Y indices to collect zind = [min,max] Range of Z indices to collect tind = [min,max] Range of T indices to collect path = "." Path to data files prefix = "BOUT.dmp" File prefix yguards = False Collect Y boundary guard cells? xguards = True Collect X boundary guard cells? (Set to True to be consistent with the definition of nx) info = True Print information about collect? strict = False Fail if the exact variable name is not found? """ # Search for BOUT++ dump files in NetCDF format file_list_nc = glob.glob(os.path.join(path, prefix+".nc")) file_list_h5 = glob.glob(os.path.join(path, prefix+".hdf5")) if file_list_nc != [] and file_list_h5 != []: raise IOError("Error: Both NetCDF and HDF5 files are present: do not know which to read.") elif file_list_h5 != []: suffix = ".hdf5" file_list = file_list_h5 else: suffix = ".nc" file_list = file_list_nc if file_list != []: print("Single (parallel) data file") f = DataFile(file_list[0]) # Open the file data = f.read(varname) return data file_list_nc = glob.glob(os.path.join(path, prefix+".*nc")) file_list_h5 = glob.glob(os.path.join(path, prefix+".*hdf5")) if file_list_nc != [] and file_list_h5 != []: raise IOError("Error: Both NetCDF and HDF5 files are present: do not know which to read.") elif file_list_h5 != []: suffix = ".hdf5" file_list = file_list_h5 else: suffix = ".nc" file_list = file_list_nc file_list.sort() if file_list == []: raise IOError("ERROR: No data files found") nfiles = len(file_list) # Read data from the first file f = DataFile(file_list[0]) try: dimens = f.dimensions(varname) #ndims = len(dimens) ndims = f.ndims(varname) except: if strict: raise else: # Find the variable varname = findVar(varname, f.list()) dimens = f.dimensions(varname) #ndims = len(dimens) ndims = f.ndims(varname) if ndims < 2: # Just read from file data = f.read(varname) f.close() return data if ndims > 4: raise ValueError("ERROR: Too many dimensions") mxsub = f.read("MXSUB") if mxsub is None: raise ValueError("Missing MXSUB variable") mysub = f.read("MYSUB") mz = f.read("MZ") myg = f.read("MYG") t_array = f.read("t_array") if t_array is None: nt = 1 t_array = np.zeros(1) else: nt = len(t_array) if info: print("mxsub = %d mysub = %d mz = %d\n" % (mxsub, mysub, mz)) # Get the version of BOUT++ (should be > 0.6 for NetCDF anyway) try: v = f.read("BOUT_VERSION") # 2D decomposition nxpe = f.read("NXPE") mxg = f.read("MXG") nype = f.read("NYPE") npe = nxpe * nype if info: print("nxpe = %d, nype = %d, npe = %d\n" % (nxpe, nype, npe)) if npe < nfiles: print("WARNING: More files than expected (" + str(npe) + ")") elif npe > nfiles: print("WARNING: Some files missing. Expected " + str(npe)) if xguards: nx = nxpe * mxsub + 2*mxg else: nx = nxpe * mxsub except KeyError: print("BOUT++ version : Pre-0.2") # Assume number of files is correct # No decomposition in X nx = mxsub mxg = 0 nxpe = 1 nype = nfiles if yguards: ny = mysub * nype + 2*myg else: ny = mysub * nype f.close(); # Check ranges def check_range(r, low, up, name="range"): r2 = r if r != None: try: n = len(r2) except: # No len attribute, so probably a single number r2 = [r2,r2] if (len(r2) < 1) or (len(r2) > 2): print("WARNING: "+name+" must be [min, max]") r2 = None else: if len(r2) == 1: r2 = [r2,r2] if r2[0] < low: r2[0] = low if r2[0] > up: r2[0] = up if r2[1] < 0: r2[1] = 0 if r2[1] > up: r2[1] = up if r2[0] > r2[1]: tmp = r2[0] r2[0] = r2[1] r2[1] = tmp else: r2 = [low, up] return r2 xind = check_range(xind, 0, nx-1, "xind") yind = check_range(yind, 0, ny-1, "yind") zind = check_range(zind, 0, mz-2, "zind") tind = check_range(tind, 0, nt-1, "tind") xsize = xind[1] - xind[0] + 1 ysize = yind[1] - yind[0] + 1 zsize = zind[1] - zind[0] + 1 tsize = tind[1] - tind[0] + 1 # Map between dimension names and output size sizes = {'x':xsize, 'y':ysize, 'z':zsize, 't':tsize} # Create a list with size of each dimension ddims = [sizes[d] for d in dimens] # Create the data array data = np.zeros(ddims) for i in range(npe): # Get X and Y processor indices pe_yind = int(i/nxpe) pe_xind = i % nxpe inrange = True if yguards: # Get local ranges ymin = yind[0] - pe_yind*mysub ymax = yind[1] - pe_yind*mysub # Check lower y boundary if pe_yind == 0: # Keeping inner boundary if ymax < 0: inrange = False if ymin < 0: ymin = 0 else: if ymax < myg: inrange = False if ymin < myg: ymin = myg # Upper y boundary if pe_yind == (nype - 1): # Keeping outer boundary if ymin >= (mysub + 2*myg): inrange = False if ymax > (mysub + 2*myg - 1): ymax = (mysub + 2*myg - 1) else: if ymin >= (mysub + myg): inrange = False if ymax >= (mysub + myg): ymax = (mysub+myg-1) # Calculate global indices ygmin = ymin + pe_yind * mysub ygmax = ymax + pe_yind * mysub else: # Get local ranges ymin = yind[0] - pe_yind*mysub + myg ymax = yind[1] - pe_yind*mysub + myg if (ymin >= (mysub + myg)) or (ymax < myg): inrange = False # Y out of range if ymin < myg: ymin = myg if ymax >= mysub+myg: ymax = myg + mysub - 1 # Calculate global indices ygmin = ymin + pe_yind * mysub - myg ygmax = ymax + pe_yind * mysub - myg if xguards: # Get local ranges xmin = xind[0] - pe_xind*mxsub xmax = xind[1] - pe_xind*mxsub # Check lower x boundary if pe_xind == 0: # Keeping inner boundary if xmax < 0: inrange = False if xmin < 0: xmin = 0 else: if xmax < mxg: inrange = False if xmin < mxg: xmin = mxg # Upper x boundary if pe_xind == (nxpe - 1): # Keeping outer boundary if xmin >= (mxsub + 2*mxg): inrange = False if xmax > (mxsub + 2*mxg - 1): xmax = (mxsub + 2*mxg - 1) else: if xmin >= (mxsub + mxg): inrange = False if xmax >= (mxsub + mxg): xmax = (mxsub+mxg-1) # Calculate global indices xgmin = xmin + pe_xind * mxsub xgmax = xmax + pe_xind * mxsub else: # Get local ranges xmin = xind[0] - pe_xind*mxsub + mxg xmax = xind[1] - pe_xind*mxsub + mxg if (xmin >= (mxsub + mxg)) or (xmax < mxg): inrange = False # X out of range if xmin < mxg: xmin = mxg if xmax >= mxsub+mxg: xmax = mxg + mxsub - 1 # Calculate global indices xgmin = xmin + pe_xind * mxsub - mxg xgmax = xmax + pe_xind * mxsub - mxg # Number of local values nx_loc = xmax - xmin + 1 ny_loc = ymax - ymin + 1 if not inrange: continue # Don't need this file filename = os.path.join(path, prefix+"." + str(i) + suffix) if info: sys.stdout.write("\rReading from " + filename + ": [" + \ str(xmin) + "-" + str(xmax) + "][" + \ str(ymin) + "-" + str(ymax) + "] -> [" + \ str(xgmin) + "-" + str(xgmax) + "][" + \ str(ygmin) + "-" + str(ygmax) + "]") f = DataFile(filename) if ndims == 4: d = f.read(varname, ranges=[tind[0],tind[1]+1, xmin, xmax+1, ymin, ymax+1, zind[0],zind[1]+1]) data[:, (xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc), :] = d elif ndims == 3: # Could be xyz or txy if dimens[2] == 'z': # xyz d = f.read(varname, ranges=[xmin, xmax+1, ymin, ymax+1, zind[0],zind[1]+1]) data[(xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc), :] = d else: # txy d = f.read(varname, ranges=[tind[0],tind[1]+1, xmin, xmax+1, ymin, ymax+1]) data[:, (xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc)] = d elif ndims == 2: # xy d = f.read(varname, ranges=[xmin, xmax+1, ymin, ymax+1]) data[(xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc)] = d elif ndims == 1: if dimens[0] == 't': # t d = f.read(varname, ranges=[tind[0],tind[1]+1]) data[:] = d f.close() # Force the precision of arrays of dimension>1 if ndims>1: try: data = data.astype(t_array.dtype, copy=False) except TypeError: data = data.astype(t_array.dtype) # Finished looping over all files if info: sys.stdout.write("\n") return data
def collect(varname, xind=None, yind=None, zind=None, tind=None, path=".", yguards=False, xguards=True, info=True, prefix="BOUT.dmp", strict=False): """Collect a variable from a set of BOUT++ outputs. data = collect(name) name Name of the variable (string) Optional arguments: xind = [min,max] Range of X indices to collect yind = [min,max] Range of Y indices to collect zind = [min,max] Range of Z indices to collect tind = [min,max] Range of T indices to collect path = "." Path to data files prefix = "BOUT.dmp" File prefix yguards = False Collect Y boundary guard cells? xguards = True Collect X boundary guard cells? (Set to True to be consistent with the definition of nx) info = True Print information about collect? strict = False Fail if the exact variable name is not found? """ # Search for BOUT++ dump files in NetCDF format file_list_nc = glob.glob(os.path.join(path, prefix + ".nc")) file_list_h5 = glob.glob(os.path.join(path, prefix + ".hdf5")) if file_list_nc != [] and file_list_h5 != []: raise IOError( "Error: Both NetCDF and HDF5 files are present: do not know which to read." ) elif file_list_h5 != []: suffix = ".hdf5" file_list = file_list_h5 else: suffix = ".nc" file_list = file_list_nc if file_list != []: print("Single (parallel) data file") f = DataFile(file_list[0]) # Open the file data = f.read(varname) return data file_list_nc = glob.glob(os.path.join(path, prefix + ".*nc")) file_list_h5 = glob.glob(os.path.join(path, prefix + ".*hdf5")) if file_list_nc != [] and file_list_h5 != []: raise IOError( "Error: Both NetCDF and HDF5 files are present: do not know which to read." ) elif file_list_h5 != []: suffix = ".hdf5" file_list = file_list_h5 else: suffix = ".nc" file_list = file_list_nc file_list.sort() if file_list == []: raise IOError("ERROR: No data files found") nfiles = len(file_list) # Read data from the first file f = DataFile(file_list[0]) try: dimens = f.dimensions(varname) #ndims = len(dimens) ndims = f.ndims(varname) except: if strict: raise else: # Find the variable varname = findVar(varname, f.list()) dimens = f.dimensions(varname) #ndims = len(dimens) ndims = f.ndims(varname) # ndims is 0 for reals, and 1 for f.ex. t_array if ndims < 2: # Just read from file if varname != 't_array': data = f.read(varname) elif (varname == 't_array') and (tind is None): data = f.read(varname) elif (varname == 't_array') and (tind is not None): data = f.read(varname, ranges=[tind[0], tind[1] + 1]) f.close() return data if ndims > 4: raise ValueError("ERROR: Too many dimensions") mxsub = f.read("MXSUB") if mxsub is None: raise ValueError("Missing MXSUB variable") mysub = f.read("MYSUB") mz = f.read("MZ") myg = f.read("MYG") t_array = f.read("t_array") if t_array is None: nt = 1 t_array = np.zeros(1) else: nt = len(t_array) if info: print("mxsub = %d mysub = %d mz = %d\n" % (mxsub, mysub, mz)) # Get the version of BOUT++ (should be > 0.6 for NetCDF anyway) try: version = f["BOUT_VERSION"] except KeyError: print("BOUT++ version : Pre-0.2") version = 0 if version < 3.5: # Remove extra point nz = mz - 1 else: nz = mz # Fallback to sensible (?) defaults try: nxpe = f["NXPE"] except KeyError: nxpe = 1 print("NXPE not found, setting to {}".format(nxpe)) try: mxg = f["MXG"] except KeyError: mxg = 0 print("MXG not found, setting to {}".format(mxg)) try: nype = f["NYPE"] except KeyError: nype = nfiles print("NYPE not found, setting to {}".format(nype)) npe = nxpe * nype if info: print("nxpe = %d, nype = %d, npe = %d\n" % (nxpe, nype, npe)) if npe < nfiles: print("WARNING: More files than expected (" + str(npe) + ")") elif npe > nfiles: print("WARNING: Some files missing. Expected " + str(npe)) if xguards: nx = nxpe * mxsub + 2 * mxg else: nx = nxpe * mxsub if yguards: ny = mysub * nype + 2 * myg else: ny = mysub * nype f.close() # Check ranges def check_range(r, low, up, name="range"): r2 = r if r is not None: try: n = len(r2) except: # No len attribute, so probably a single number r2 = [r2, r2] if (len(r2) < 1) or (len(r2) > 2): print("WARNING: " + name + " must be [min, max]") r2 = None else: if len(r2) == 1: r2 = [r2, r2] if r2[0] < 0 and low >= 0: r2[0] += (up - low + 1) if r2[1] < 0 and low >= 0: r2[1] += (up - low + 1) if r2[0] < low: r2[0] = low if r2[0] > up: r2[0] = up if r2[1] < low: r2[1] = low if r2[1] > up: r2[1] = up if r2[0] > r2[1]: tmp = r2[0] r2[0] = r2[1] r2[1] = tmp else: r2 = [low, up] return r2 xind = check_range(xind, 0, nx - 1, "xind") yind = check_range(yind, 0, ny - 1, "yind") zind = check_range(zind, 0, nz - 1, "zind") tind = check_range(tind, 0, nt - 1, "tind") xsize = xind[1] - xind[0] + 1 ysize = yind[1] - yind[0] + 1 zsize = zind[1] - zind[0] + 1 tsize = tind[1] - tind[0] + 1 # Map between dimension names and output size sizes = {'x': xsize, 'y': ysize, 'z': zsize, 't': tsize} # Create a list with size of each dimension ddims = [sizes[d] for d in dimens] # Create the data array data = np.zeros(ddims) for i in range(npe): # Get X and Y processor indices pe_yind = int(i / nxpe) pe_xind = i % nxpe inrange = True if yguards: # Get local ranges ymin = yind[0] - pe_yind * mysub ymax = yind[1] - pe_yind * mysub # Check lower y boundary if pe_yind == 0: # Keeping inner boundary if ymax < 0: inrange = False if ymin < 0: ymin = 0 else: if ymax < myg: inrange = False if ymin < myg: ymin = myg # Upper y boundary if pe_yind == (nype - 1): # Keeping outer boundary if ymin >= (mysub + 2 * myg): inrange = False if ymax > (mysub + 2 * myg - 1): ymax = (mysub + 2 * myg - 1) else: if ymin >= (mysub + myg): inrange = False if ymax >= (mysub + myg): ymax = (mysub + myg - 1) # Calculate global indices ygmin = ymin + pe_yind * mysub ygmax = ymax + pe_yind * mysub else: # Get local ranges ymin = yind[0] - pe_yind * mysub + myg ymax = yind[1] - pe_yind * mysub + myg if (ymin >= (mysub + myg)) or (ymax < myg): inrange = False # Y out of range if ymin < myg: ymin = myg if ymax >= mysub + myg: ymax = myg + mysub - 1 # Calculate global indices ygmin = ymin + pe_yind * mysub - myg ygmax = ymax + pe_yind * mysub - myg if xguards: # Get local ranges xmin = xind[0] - pe_xind * mxsub xmax = xind[1] - pe_xind * mxsub # Check lower x boundary if pe_xind == 0: # Keeping inner boundary if xmax < 0: inrange = False if xmin < 0: xmin = 0 else: if xmax < mxg: inrange = False if xmin < mxg: xmin = mxg # Upper x boundary if pe_xind == (nxpe - 1): # Keeping outer boundary if xmin >= (mxsub + 2 * mxg): inrange = False if xmax > (mxsub + 2 * mxg - 1): xmax = (mxsub + 2 * mxg - 1) else: if xmin >= (mxsub + mxg): inrange = False if xmax >= (mxsub + mxg): xmax = (mxsub + mxg - 1) # Calculate global indices xgmin = xmin + pe_xind * mxsub xgmax = xmax + pe_xind * mxsub else: # Get local ranges xmin = xind[0] - pe_xind * mxsub + mxg xmax = xind[1] - pe_xind * mxsub + mxg if (xmin >= (mxsub + mxg)) or (xmax < mxg): inrange = False # X out of range if xmin < mxg: xmin = mxg if xmax >= mxsub + mxg: xmax = mxg + mxsub - 1 # Calculate global indices xgmin = xmin + pe_xind * mxsub - mxg xgmax = xmax + pe_xind * mxsub - mxg # Number of local values nx_loc = xmax - xmin + 1 ny_loc = ymax - ymin + 1 if not inrange: continue # Don't need this file filename = os.path.join(path, prefix + "." + str(i) + suffix) if info: sys.stdout.write("\rReading from " + filename + ": [" + \ str(xmin) + "-" + str(xmax) + "][" + \ str(ymin) + "-" + str(ymax) + "] -> [" + \ str(xgmin) + "-" + str(xgmax) + "][" + \ str(ygmin) + "-" + str(ygmax) + "]") f = DataFile(filename) if ndims == 4: d = f.read(varname, ranges=[ tind[0], tind[1] + 1, xmin, xmax + 1, ymin, ymax + 1, zind[0], zind[1] + 1 ]) data[:, (xgmin - xind[0]):(xgmin - xind[0] + nx_loc), (ygmin - yind[0]):(ygmin - yind[0] + ny_loc), :] = d elif ndims == 3: # Could be xyz or txy if dimens[2] == 'z': # xyz d = f.read(varname, ranges=[ xmin, xmax + 1, ymin, ymax + 1, zind[0], zind[1] + 1 ]) data[(xgmin - xind[0]):(xgmin - xind[0] + nx_loc), (ygmin - yind[0]):(ygmin - yind[0] + ny_loc), :] = d else: # txy d = f.read(varname, ranges=[ tind[0], tind[1] + 1, xmin, xmax + 1, ymin, ymax + 1 ]) data[:, (xgmin - xind[0]):(xgmin - xind[0] + nx_loc), (ygmin - yind[0]):(ygmin - yind[0] + ny_loc)] = d elif ndims == 2: # xy d = f.read(varname, ranges=[xmin, xmax + 1, ymin, ymax + 1]) data[(xgmin - xind[0]):(xgmin - xind[0] + nx_loc), (ygmin - yind[0]):(ygmin - yind[0] + ny_loc)] = d f.close() # Force the precision of arrays of dimension>1 if ndims > 1: try: data = data.astype(t_array.dtype, copy=False) except TypeError: data = data.astype(t_array.dtype) # Finished looping over all files if info: sys.stdout.write("\n") return data
def generate( nx, ny, R=2.0, r=0.2, # Major & minor radius dr=0.05, # Radial width of domain Bt=1.0, # Toroidal magnetic field q=5.0, # Safety factor mxg=2, file="circle.nc", ): # q = rBt / RBp Bp = r * Bt / (R * q) # Minor radius as function of x. Choose so boundary # is half-way between grid points h = dr / (nx - 2.0 * mxg) # Grid spacing in r rminor = linspace(r - 0.5 * dr - (mxg - 0.5) * h, r + 0.5 * dr + (mxg - 0.5) * h, nx) # mesh spacing in x and y dx = ndarray([nx, ny]) dx[:, :] = r * Bt * h # NOTE: dx is toroidal flux dy = ndarray([nx, ny]) dy[:, :] = 2.0 * pi / ny # LogB = log(1/(1+r/R cos(theta))) =(approx) -(r/R)*cos(theta) logB = zeros([nx, ny, 3]) # (constant, n=1 real, n=1 imag) # At y = 0, Rmaj = R + r*cos(theta) logB[:, 0, 1] = -(rminor / R) # Moving in y, phase shift by (toroidal angle) / q for y in range(1, ny): dtheta = y * 2.0 * pi / ny / q # Change in poloidal angle logB[:, y, 1] = -(rminor / R) * cos(dtheta) logB[:, y, 2] = -(rminor / R) * sin(dtheta) # Shift angle from one end of y to the other ShiftAngle = ndarray([nx]) ShiftAngle[:] = 2.0 * pi / q Rxy = ndarray([nx, ny]) Rxy[:, :] = r # NOTE : opposite to standard BOUT convention Btxy = ndarray([nx, ny]) Btxy[:, :] = Bp Bpxy = ndarray([nx, ny]) Bpxy[:, :] = Bt Bxy = ndarray([nx, ny]) Bxy[:, :] = sqrt(Bt ** 2 + Bp ** 2) hthe = ndarray([nx, ny]) hthe[:, :] = R print("Writing to file '" + file + "'") f = DataFile() f.open(file, create=True) # Mesh size f.write("nx", nx) f.write("ny", ny) # Mesh spacing f.write("dx", dx) f.write("dy", dy) # Metric components f.write("Rxy", Rxy) f.write("Btxy", Btxy) f.write("Bpxy", Bpxy) f.write("Bxy", Bxy) f.write("hthe", hthe) # Shift f.write("ShiftAngle", ShiftAngle) # Curvature f.write("logB", logB) # Input parameters f.write("R", R) f.write("r", r) f.write("dr", dr) f.write("Bt", Bt) f.write("q", q) f.write("mxg", mxg) f.close()
def redistribute(npes, path="data", nxpe=None, output=".", informat=None, outformat=None, mxg=2, myg=2): """Resize restart files across NPES processors. Does not check if new processor arrangement is compatible with the branch cuts. In this respect :py:func:`restart.split` is safer. However, BOUT++ checks the topology during initialisation anyway so this is not too serious. Parameters ---------- npes : int Number of processors for the new restart files path : str, optional Path to original restart files (default: "data") nxpe : int, optional Number of processors to use in the x-direction (determines split: npes = nxpe * nype). Default is None which uses the same algorithm as BoutMesh (but without topology information) to determine a suitable value for nxpe. output : str, optional Location to save new restart files (default: current directory) informat : str, optional Specify file format of old restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default uses the format of the first 'BOUT.restart.*' file listed by glob.glob. outformat : str, optional Specify file format of new restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default is to use the same as informat. Returns ------- True on success TODO ---- - Replace printing errors with raising `ValueError` """ if npes <= 0: print("ERROR: Negative or zero number of processors") return False if path == output: print("ERROR: Can't overwrite restart files") return False if informat is None: file_list = glob.glob(os.path.join(path, "BOUT.restart.*")) else: file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat)) nfiles = len(file_list) # Read old processor layout f = DataFile(file_list[0]) # Get list of variables var_list = f.list() if len(var_list) == 0: print("ERROR: No data found") return False old_processor_layout = get_processor_layout(f, has_t_dimension=False) print("Grid sizes: ", old_processor_layout.nx, old_processor_layout.ny, old_processor_layout.mz) if nfiles != old_processor_layout.npes: print("WARNING: Number of restart files inconsistent with NPES") print("Setting nfiles = " + str(old_processor_layout.npes)) nfiles = old_processor_layout.npes if nfiles == 0: print("ERROR: No restart files found") return False informat = file_list[0].split(".")[-1] if outformat is None: outformat = informat try: new_processor_layout = create_processor_layout(old_processor_layout, npes, nxpe=nxpe) except ValueError as e: print("Could not find valid processor split. " + e.what()) nx = old_processor_layout.nx ny = old_processor_layout.ny mz = old_processor_layout.mz mxg = old_processor_layout.mxg myg = old_processor_layout.myg old_npes = old_processor_layout.npes old_nxpe = old_processor_layout.nxpe old_nype = old_processor_layout.nype old_mxsub = old_processor_layout.mxsub old_mysub = old_processor_layout.mysub nxpe = new_processor_layout.nxpe nype = new_processor_layout.nype mxsub = new_processor_layout.mxsub mysub = new_processor_layout.mysub mzsub = new_processor_layout.mz outfile_list = [] for i in range(npes): outpath = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat) outfile_list.append(DataFile(outpath, write=True, create=True)) DataFileCache = create_cache(path, "BOUT.restart") for v in var_list: dimensions = f.dimensions(v) ndims = len(dimensions) # collect data data = collect(v, xguards=True, yguards=True, info=False, datafile_cache=DataFileCache) # write data for i in range(npes): ix = i % nxpe iy = int(i / nxpe) outfile = outfile_list[i] if v == "NPES": outfile.write(v, npes) elif v == "NXPE": outfile.write(v, nxpe) elif v == "NYPE": outfile.write(v, nype) elif v == "MXSUB": outfile.write(v, mxsub) elif v == "MYSUB": outfile.write(v, mysub) elif v == "MZSUB": outfile.write(v, mzsub) elif dimensions == (): # scalar outfile.write(v, data) elif dimensions == ('x', 'y'): # Field2D outfile.write( v, data[ix * mxsub:(ix + 1) * mxsub + 2 * mxg, iy * mysub:(iy + 1) * mysub + 2 * myg]) elif dimensions == ('x', 'z'): # FieldPerp yindex_global = data.attributes['yindex_global'] if yindex_global + myg >= iy * mysub and yindex_global + myg < ( iy + 1) * mysub + 2 * myg: outfile.write( v, data[ix * mxsub:(ix + 1) * mxsub + 2 * mxg, :]) else: nullarray = BoutArray(np.zeros( [mxsub + 2 * mxg, mysub + 2 * myg]), attributes={ "bout_type": "FieldPerp", "yindex_global": -myg - 1 }) outfile.write(v, nullarray) elif dimensions == ('x', 'y', 'z'): # Field3D outfile.write( v, data[ix * mxsub:(ix + 1) * mxsub + 2 * mxg, iy * mysub:(iy + 1) * mysub + 2 * myg, :]) else: print("ERROR: variable found with unexpected dimensions,", dimensions, v) f.close() for outfile in outfile_list: outfile.close() return True
def redistribute(npes, path="data", nxpe=None, output=".", informat=None, outformat=None, mxg=2, myg=2): """Resize restart files across NPES processors. Does not check if new processor arrangement is compatible with the branch cuts. In this respect restart.split is safer. However, BOUT++ checks the topology during initialisation anyway so this is not too serious. Parameters ---------- npes : int number of processors for the new restart files path : string, optional location of old restart files nxpe : int, optional number of processors to use in the x-direction (determines split: npes = nxpe * nype). Default is None which uses the same algorithm as BoutMesh (but without topology information) to determine a suitable value for nxpe. output : string, optional location to save new restart files informat : string, optional specify file format of old restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default uses the format of the first 'BOUT.restart.*' file listed by glob.glob. outformat : string, optional specify file format of new restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default is to use the same as informat. Returns ------- True on success """ if npes <= 0: print("ERROR: Negative or zero number of processors") return False if path == output: print("ERROR: Can't overwrite restart files") return False if informat is None: file_list = glob.glob(os.path.join(path, "BOUT.restart.*")) else: file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat)) nfiles = len(file_list) # Read old processor layout f = DataFile(file_list[0]) # Get list of variables var_list = f.list() if len(var_list) == 0: print("ERROR: No data found") return False old_npes = f.read('NPES') old_nxpe = f.read('NXPE') old_nype = int(old_npes / old_nxpe) if nfiles != old_npes: print("WARNING: Number of restart files inconsistent with NPES") print("Setting nfiles = " + str(old_npes)) nfiles = old_npes if nfiles == 0: print("ERROR: No restart files found") return False informat = file_list[0].split(".")[-1] if outformat is None: outformat = informat old_mxsub = 0 old_mysub = 0 mz = 0 for v in var_list: if f.ndims(v) == 3: s = f.size(v) old_mxsub = s[0] - 2 * mxg if old_mxsub < 0: if s[0] == 1: old_mxsub = 1 mxg = 0 elif s[0] == 3: old_mxsub = 1 mxg = 1 else: print("Number of x points is wrong?") return False old_mysub = s[1] - 2 * myg if old_mysub < 0: if s[1] == 1: old_mysub = 1 myg = 0 elif s[1] == 3: old_mysub = 1 myg = 1 else: print("Number of y points is wrong?") return False mz = s[2] break # Calculate total size of the grid nx = old_mxsub * old_nxpe ny = old_mysub * old_nype print("Grid sizes: ", nx, ny, mz) if nxpe is None: # Copy algorithm from BoutMesh for selecting nxpe ideal = sqrt(float(nx) * float(npes) / float(ny)) # Results in square domain for i in range(1, npes + 1): if npes % i == 0 and nx % i == 0 and int( nx / i) >= mxg and ny % (npes / i) == 0: # Found an acceptable value # Warning: does not check branch cuts! if nxpe is None or abs(ideal - i) < abs(ideal - nxpe): nxpe = i # Keep value nearest to the ideal if nxpe is None: print("ERROR: could not find a valid value for nxpe") return False nype = int(npes / nxpe) outfile_list = [] for i in range(npes): outpath = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat) outfile_list.append(DataFile(outpath, write=True, create=True)) infile_list = [] for i in range(old_npes): inpath = os.path.join(path, "BOUT.restart." + str(i) + "." + outformat) infile_list.append(DataFile(inpath)) old_mxsub = int(nx / old_nxpe) old_mysub = int(ny / old_nype) mxsub = int(nx / nxpe) mysub = int(ny / nype) for v in var_list: ndims = f.ndims(v) #collect data if ndims == 0: #scalar data = f.read(v) elif ndims == 2: data = np.zeros((nx + 2 * mxg, ny + 2 * myg)) for i in range(old_npes): ix = i % old_nxpe iy = int(i / old_nxpe) ixstart = mxg if ix == 0: ixstart = 0 ixend = -mxg if ix == old_nxpe - 1: ixend = 0 iystart = myg if iy == 0: iystart = 0 iyend = -myg if iy == old_nype - 1: iyend = 0 data[ix * old_mxsub + ixstart:(ix + 1) * old_mxsub + 2 * mxg + ixend, iy * old_mysub + iystart:(iy + 1) * old_mysub + 2 * myg + iyend] = infile_list[i].read(v)[ixstart:old_mxsub + 2 * mxg + ixend, iystart:old_mysub + 2 * myg + iyend] elif ndims == 3: data = np.zeros((nx + 2 * mxg, ny + 2 * myg, mz)) for i in range(old_npes): ix = i % old_nxpe iy = int(i / old_nxpe) ixstart = mxg if ix == 0: ixstart = 0 ixend = -mxg if ix == old_nxpe - 1: ixend = 0 iystart = myg if iy == 0: iystart = 0 iyend = -myg if iy == old_nype - 1: iyend = 0 data[ix * old_mxsub + ixstart:(ix + 1) * old_mxsub + 2 * mxg + ixend, iy * old_mysub + iystart:(iy + 1) * old_mysub + 2 * myg + iyend, :] = infile_list[i].read(v)[ixstart:old_mxsub + 2 * mxg + ixend, iystart:old_mysub + 2 * myg + iyend, :] else: print( "ERROR: variable found with unexpected number of dimensions,", ndims, v) return False # write data for i in range(npes): ix = i % nxpe iy = int(i / nxpe) outfile = outfile_list[i] if v == "NPES": outfile.write(v, npes) elif v == "NXPE": outfile.write(v, nxpe) elif ndims == 0: # scalar outfile.write(v, data) elif ndims == 2: # Field2D outfile.write( v, data[ix * mxsub:(ix + 1) * mxsub + 2 * mxg, iy * mysub:(iy + 1) * mysub + 2 * myg]) elif ndims == 3: # Field3D outfile.write( v, data[ix * mxsub:(ix + 1) * mxsub + 2 * mxg, iy * mysub:(iy + 1) * mysub + 2 * myg, :]) else: print( "ERROR: variable found with unexpected number of dimensions,", f.ndims(v)) f.close() for infile in infile_list: infile.close() for outfile in outfile_list: outfile.close() return True
def redistribute(npes, path="data", nxpe=None, output=".", informat=None, outformat=None, mxg=2, myg=2): """Resize restart files across NPES processors. Does not check if new processor arrangement is compatible with the branch cuts. In this respect :py:func:`restart.split` is safer. However, BOUT++ checks the topology during initialisation anyway so this is not too serious. Parameters ---------- npes : int Number of processors for the new restart files path : str, optional Path to original restart files (default: "data") nxpe : int, optional Number of processors to use in the x-direction (determines split: npes = nxpe * nype). Default is None which uses the same algorithm as BoutMesh (but without topology information) to determine a suitable value for nxpe. output : str, optional Location to save new restart files (default: current directory) informat : str, optional Specify file format of old restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default uses the format of the first 'BOUT.restart.*' file listed by glob.glob. outformat : str, optional Specify file format of new restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default is to use the same as informat. Returns ------- True on success TODO ---- - Replace printing errors with raising `ValueError` """ if npes <= 0: print("ERROR: Negative or zero number of processors") return False if path == output: print("ERROR: Can't overwrite restart files") return False if informat is None: file_list = glob.glob(os.path.join(path, "BOUT.restart.*")) else: file_list = glob.glob(os.path.join(path, "BOUT.restart.*."+informat)) nfiles = len(file_list) # Read old processor layout f = DataFile(file_list[0]) # Get list of variables var_list = f.list() if len(var_list) == 0: print("ERROR: No data found") return False old_processor_layout = get_processor_layout(f, has_t_dimension=False) print("Grid sizes: ", old_processor_layout.nx, old_processor_layout.ny, old_processor_layout.mz) if nfiles != old_processor_layout.npes: print("WARNING: Number of restart files inconsistent with NPES") print("Setting nfiles = " + str(old_processor_layout.npes)) nfiles = old_processor_layout.npes if nfiles == 0: print("ERROR: No restart files found") return False informat = file_list[0].split(".")[-1] if outformat is None: outformat = informat try: new_processor_layout = create_processor_layout( old_processor_layout, npes, nxpe=nxpe) except ValueError as e: print("Could not find valid processor split. " + e.what()) nx = old_processor_layout.nx ny = old_processor_layout.ny mz = old_processor_layout.mz mxg = old_processor_layout.mxg myg = old_processor_layout.myg old_npes = old_processor_layout.npes old_nxpe = old_processor_layout.nxpe old_nype = old_processor_layout.nype old_mxsub = old_processor_layout.mxsub old_mysub = old_processor_layout.mysub nxpe = new_processor_layout.nxpe nype = new_processor_layout.nype mxsub = new_processor_layout.mxsub mysub = new_processor_layout.mysub outfile_list = [] for i in range(npes): outpath = os.path.join(output, "BOUT.restart."+str(i)+"."+outformat) outfile_list.append(DataFile(outpath, write=True, create=True)) infile_list = [] for i in range(old_npes): inpath = os.path.join(path, "BOUT.restart."+str(i)+"."+outformat) infile_list.append(DataFile(inpath)) for v in var_list: ndims = f.ndims(v) # collect data if ndims == 0: # scalar data = f.read(v) elif ndims == 2: data = np.zeros((nx+2*mxg, ny+2*myg)) for i in range(old_npes): ix = i % old_nxpe iy = int(i/old_nxpe) ixstart = mxg if ix == 0: ixstart = 0 ixend = -mxg if ix == old_nxpe-1: ixend = 0 iystart = myg if iy == 0: iystart = 0 iyend = -myg if iy == old_nype-1: iyend = 0 data[ix*old_mxsub+ixstart:(ix+1)*old_mxsub+2*mxg+ixend, iy*old_mysub+iystart:(iy+1)*old_mysub+2*myg+iyend] = infile_list[i].read(v)[ixstart:old_mxsub+2*mxg+ixend, iystart:old_mysub+2*myg+iyend] data = BoutArray(data, attributes=infile_list[0].attributes(v)) elif ndims == 3: data = np.zeros((nx+2*mxg, ny+2*myg, mz)) for i in range(old_npes): ix = i % old_nxpe iy = int(i/old_nxpe) ixstart = mxg if ix == 0: ixstart = 0 ixend = -mxg if ix == old_nxpe-1: ixend = 0 iystart = myg if iy == 0: iystart = 0 iyend = -myg if iy == old_nype-1: iyend = 0 data[ix*old_mxsub+ixstart:(ix+1)*old_mxsub+2*mxg+ixend, iy*old_mysub+iystart:(iy+1)*old_mysub+2*myg+iyend, :] = infile_list[i].read(v)[ixstart:old_mxsub+2*mxg+ixend, iystart:old_mysub+2*myg+iyend, :] data = BoutArray(data, attributes=infile_list[0].attributes(v)) else: print("ERROR: variable found with unexpected number of dimensions,", ndims, v) return False # write data for i in range(npes): ix = i % nxpe iy = int(i/nxpe) outfile = outfile_list[i] if v == "NPES": outfile.write(v, npes) elif v == "NXPE": outfile.write(v, nxpe) elif v == "NYPE": outfile.write(v, nype) elif ndims == 0: # scalar outfile.write(v, data) elif ndims == 2: # Field2D outfile.write( v, data[ix*mxsub:(ix+1)*mxsub+2*mxg, iy*mysub:(iy+1)*mysub+2*myg]) elif ndims == 3: # Field3D outfile.write( v, data[ix*mxsub:(ix+1)*mxsub+2*mxg, iy*mysub:(iy+1)*mysub+2*myg, :]) else: print( "ERROR: variable found with unexpected number of dimensions,", f.ndims(v)) f.close() for infile in infile_list: infile.close() for outfile in outfile_list: outfile.close() return True
def resizeY(newy, path="data", output=".", informat="nc", outformat=None, myg=2): """ Resize all the restart files in Y """ if outformat is None: outformat = informat file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat)) nfiles = len(file_list) if nfiles == 0: print("ERROR: No restart files found") return False for i in range(nfiles): # Open each data file infname = os.path.join(path, "BOUT.restart." + str(i) + "." + informat) outfname = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat) print("Processing %s -> %s" % (infname, outfname)) infile = DataFile(infname) outfile = DataFile(outfname, create=True) # Copy basic information for var in ["hist_hi", "NPES", "NXPE", "tt"]: data = infile.read(var) try: # Convert to scalar if necessary data = data[0] except: pass outfile.write(var, data) # Get a list of variables varnames = infile.list() for var in varnames: if infile.ndims(var) == 3: # Could be an evolving variable [x,y,z] print(" -> Resizing " + var) # Read variable from input indata = infile.read(var) nx, ny, nz = indata.shape # y coordinate in input and output data iny = (arange(ny) - myg + 0.5) / (ny - 2 * myg) outy = (arange(newy) - myg + 0.5) / (newy - 2 * myg) outdata = zeros([nx, newy, nz]) for x in range(nx): for z in range(nz): f = interp1d(iny, indata[x, :, z], bounds_error=False, fill_value=0.0) outdata[x, :, z] = f(outy) outfile.write(var, outdata) elif infile.ndims(var) == 2: # Assume evolving variable [x,y] print(" -> Resizing " + var) # Read variable from input indata = infile.read(var) nx, ny = indata.shape # y coordinate in input and output data iny = (arange(ny) - myg + 0.5) / (ny - 2 * myg) outy = (arange(newy) - myg + 0.5) / (newy - 2 * myg) outdata = zeros([nx, newy]) for x in range(nx): f = interp1d(iny, indata[x, :], bounds_error=False, fill_value=0.0) outdata[x, :] = f(outy) outfile.write(var, outdata) else: # Copy variable print(" -> Copying " + var) # Read variable from input data = infile.read(var) try: # Convert to scalar if necessary data = data[0] except: pass outfile.write(var, data) infile.close() outfile.close()
def split(nxpe, nype, path="data", output="./", informat="nc", outformat=None): """Split restart files across NXPE x NYPE processors. Returns True on success """ if outformat is None: outformat = informat mxg = 2 myg = 2 npes = nxpe * nype if npes <= 0: print("ERROR: Negative or zero number of processors") return False if path == output: print("ERROR: Can't overwrite restart files") return False file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat)) nfiles = len(file_list) if nfiles == 0: print("ERROR: No restart files found") return False # Read old processor layout f = DataFile(os.path.join(path, file_list[0])) # Get list of variables var_list = f.list() if len(var_list) == 0: print("ERROR: No data found") return False old_npes = f.read("NPES") old_nxpe = f.read("NXPE") if nfiles != old_npes: print("WARNING: Number of restart files inconsistent with NPES") print("Setting nfiles = " + str(old_npes)) nfiles = old_npes if old_npes % old_nxpe != 0: print("ERROR: Old NPES is not a multiple of old NXPE") return False old_nype = int(old_npes / old_nxpe) if nype % old_nype != 0: print("SORRY: New nype must be a multiple of old nype") return False if nxpe % old_nxpe != 0: print("SORRY: New nxpe must be a multiple of old nxpe") return False # Get dimension sizes old_mxsub = 0 old_mysub = 0 mz = 0 for v in var_list: if f.ndims(v) == 3: s = f.size(v) old_mxsub = s[0] - 2 * mxg old_mysub = s[1] - 2 * myg mz = s[2] break f.close() # Calculate total size of the grid nx = old_mxsub * old_nxpe ny = old_mysub * old_nype print(("Grid sizes: ", nx, ny, mz)) # Create the new restart files for mype in range(npes): # Calculate X and Y processor numbers pex = mype % nxpe pey = int(mype / nxpe) old_pex = int(pex / xs) old_pey = int(pey / ys) old_x = pex % xs old_y = pey % ys # Old restart file number old_mype = old_nxpe * old_pey + old_pex # Calculate indices in old restart file xmin = old_x * mxsub xmax = xmin + mxsub - 1 + 2 * mxg ymin = old_y * mysub ymax = ymin + mysub - 1 + 2 * myg print("New: " + str(mype) + " (" + str(pex) + ", " + str(pey) + ")") print( " => " + str(old_mype) + " (" + str(old_pex) + ", " + str(old_pey) + ") : (" + str(old_x) + ", " + str(old_y) + ")" )
def slice(infile, outfile, region = None, xind=None, yind=None): """ xind, yind - index ranges. Range includes first point, but not last point """ # Open input and output files indf = DataFile(infile) outdf = DataFile(outfile, create=True) nx = indf["nx"][0] ny = indf["ny"][0] if region: # Select a region of the mesh xind = [0, nx] if region == 0: # Lower inner leg yind = [0, indf["jyseps1_1"][0]+1] elif region == 1: # Inner core yind = [indf["jyseps1_1"][0]+1, indf["jyseps2_1"][0]+1] elif region == 2: # Upper inner leg yind = [indf["jyseps2_1"][0]+1, indf["ny_inner"][0]] elif region == 3: # Upper outer leg yind = [indf["ny_inner"][0], indf["jyseps1_2"][0]+1] elif region == 4: # Outer core yind = [indf["jyseps1_2"][0]+1, indf["jyseps2_2"][0]+1] else: # Lower outer leg yind = [indf["jyseps2_2"][0]+1, ny] else: # Use indices if not xind: xind = [0, nx] if not yind: yind = [0, ny] print("Indices: [%d:%d, %d:%d]" % (xind[0], xind[1], yind[0], yind[1])) # List of variables requiring special handling special = ["nx", "ny", "ny_inner", "ixseps1", "ixseps2", "jyseps1_1", "jyseps1_2", "jyseps2_1", "jyseps2_2", "ShiftAngle"] outdf["nx"] = xind[1] - xind[0] outdf["ny"] = yind[1] - yind[0] outdf["ny_inner"] = indf["ny_inner"][0] - yind[0] outdf["ixseps1"] = indf["ixseps1"][0] outdf["ixseps2"] = indf["ixseps2"][0] outdf["jyseps1_1"] = indf["jyseps1_1"][0] - yind[0] outdf["jyseps2_1"] = indf["jyseps2_1"][0] - yind[0] outdf["jyseps1_2"] = indf["jyseps1_2"][0] - yind[0] outdf["jyseps2_2"] = indf["jyseps2_2"][0] - yind[0] outdf["ShiftAngle"] = indf["ShiftAngle"][xind[0]:xind[1]] # Loop over all variables for v in list(indf.keys()): if v in special: continue # Skip these variables ndims = indf.ndims(v) if ndims == 0: # Copy scalars print("Copying variable: " + v) outdf[v] = indf[v][0] elif ndims == 2: # Assume [x,y] print("Slicing variable: " + v); outdf[v] = indf[v][xind[0]:xind[1], yind[0]:yind[1]] else: # Skip print("Skipping variable: " + v) indf.close() outdf.close()
def create(averagelast=1, final=-1, path="data", output="./", informat="nc", outformat=None): """ Create restart files from data (dmp) files. Inputs ====== averagelast Number of time points to average over. Default is 1 i.e. just take last time-point final The last time point to use. Default is last (-1) path Path to the input data files output Path where the output restart files should go informat Format of the input data files outformat Format of the output restart files """ if outformat is None: outformat = informat file_list = glob.glob(os.path.join(path, "BOUT.dmp.*." + informat)) nfiles = len(file_list) print(("Number of data files: ", nfiles)) for i in range(nfiles): # Open each data file infname = os.path.join(path, "BOUT.dmp." + str(i) + "." + informat) outfname = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat) print((infname, " -> ", outfname)) infile = DataFile(infname) outfile = DataFile(outfname, create=True) # Get the data always needed in restart files hist_hi = infile.read("iteration") print(("hist_hi = ", hist_hi)) outfile.write("hist_hi", hist_hi) t_array = infile.read("t_array") tt = t_array[final] print(("tt = ", tt)) outfile.write("tt", tt) tind = final if tind < 0.0: tind = len(t_array) + final NXPE = infile.read("NXPE") NYPE = infile.read("NYPE") NPES = NXPE * NYPE print(("NPES = ", NPES, " NXPE = ", NXPE)) outfile.write("NPES", NPES) outfile.write("NXPE", NXPE) # Get a list of variables varnames = infile.list() for var in varnames: if infile.ndims(var) == 4: # Could be an evolving variable print((" -> ", var)) data = infile.read(var) if averagelast == 1: slice = data[final, :, :, :] else: slice = mean(data[(final - averagelast) : final, :, :, :], axis=0) print(slice.shape) outfile.write(var, slice) infile.close() outfile.close()
def generate( nx, ny, R=2.0, r=0.2, # Major & minor radius dr=0.05, # Radial width of domain Bt=1.0, # Toroidal magnetic field q=5.0, # Safety factor mxg=2, file="circle.nc"): # q = rBt / RBp Bp = r * Bt / (R * q) # Minor radius as function of x. Choose so boundary # is half-way between grid points h = dr / (nx - 2. * mxg) # Grid spacing in r rminor = linspace(r - 0.5 * dr - (mxg - 0.5) * h, r + 0.5 * dr + (mxg - 0.5) * h, nx) # mesh spacing in x and y dx = ndarray([nx, ny]) dx[:, :] = r * Bt * h # NOTE: dx is toroidal flux dy = ndarray([nx, ny]) dy[:, :] = 2. * pi / ny # LogB = log(1/(1+r/R cos(theta))) =(approx) -(r/R)*cos(theta) logB = zeros([nx, ny, 3]) # (constant, n=1 real, n=1 imag) # At y = 0, Rmaj = R + r*cos(theta) logB[:, 0, 1] = -(rminor / R) # Moving in y, phase shift by (toroidal angle) / q for y in range(1, ny): dtheta = y * 2. * pi / ny / q # Change in poloidal angle logB[:, y, 1] = -(rminor / R) * cos(dtheta) logB[:, y, 2] = -(rminor / R) * sin(dtheta) # Shift angle from one end of y to the other ShiftAngle = ndarray([nx]) ShiftAngle[:] = 2. * pi / q Rxy = ndarray([nx, ny]) Rxy[:, :] = r # NOTE : opposite to standard BOUT convention Btxy = ndarray([nx, ny]) Btxy[:, :] = Bp Bpxy = ndarray([nx, ny]) Bpxy[:, :] = Bt Bxy = ndarray([nx, ny]) Bxy[:, :] = sqrt(Bt**2 + Bp**2) hthe = ndarray([nx, ny]) hthe[:, :] = R print("Writing to file '" + file + "'") f = DataFile() f.open(file, create=True) # Mesh size f.write("nx", nx) f.write("ny", ny) # Mesh spacing f.write("dx", dx) f.write("dy", dy) # Metric components f.write("Rxy", Rxy) f.write("Btxy", Btxy) f.write("Bpxy", Bpxy) f.write("Bxy", Bxy) f.write("hthe", hthe) # Shift f.write("ShiftAngle", ShiftAngle) # Curvature f.write("logB", logB) # Input parameters f.write("R", R) f.write("r", r) f.write("dr", dr) f.write("Bt", Bt) f.write("q", q) f.write("mxg", mxg) f.close()
def slice(infile, outfile, region=None, xind=None, yind=None): """Copy an X-Y slice from one DataFile to another Parameters ---------- infile : str Name of DataFile to read slice from outfile : str Name of DataFile to write slice to. File will be created, and will be overwritten if it already exists region : {0, 1, 2, 3, 4, 5, None}, optional Copy a whole region. The available regions are: - 0: Lower inner leg - 1: Inner core - 2: Upper inner leg - 3: Upper outer leg - 4: Outer core - 5: Lower outer leg xind, yind : (int, int), optional Index ranges for x and y. Range includes first point, but not last point TODO ---- - Rename to not clobber builtin `slice` - Better regions? """ # Open input and output files indf = DataFile(infile) outdf = DataFile(outfile, create=True) nx = indf["nx"][0] ny = indf["ny"][0] if region: # Select a region of the mesh xind = [0, nx] if region == 0: # Lower inner leg yind = [0, indf["jyseps1_1"][0]+1] elif region == 1: # Inner core yind = [indf["jyseps1_1"][0]+1, indf["jyseps2_1"][0]+1] elif region == 2: # Upper inner leg yind = [indf["jyseps2_1"][0]+1, indf["ny_inner"][0]] elif region == 3: # Upper outer leg yind = [indf["ny_inner"][0], indf["jyseps1_2"][0]+1] elif region == 4: # Outer core yind = [indf["jyseps1_2"][0]+1, indf["jyseps2_2"][0]+1] else: # Lower outer leg yind = [indf["jyseps2_2"][0]+1, ny] else: # Use indices if not xind: xind = [0, nx] if not yind: yind = [0, ny] print("Indices: [%d:%d, %d:%d]" % (xind[0], xind[1], yind[0], yind[1])) # List of variables requiring special handling special = ["nx", "ny", "ny_inner", "ixseps1", "ixseps2", "jyseps1_1", "jyseps1_2", "jyseps2_1", "jyseps2_2", "ShiftAngle"] outdf["nx"] = xind[1] - xind[0] outdf["ny"] = yind[1] - yind[0] outdf["ny_inner"] = indf["ny_inner"][0] - yind[0] outdf["ixseps1"] = indf["ixseps1"][0] outdf["ixseps2"] = indf["ixseps2"][0] outdf["jyseps1_1"] = indf["jyseps1_1"][0] - yind[0] outdf["jyseps2_1"] = indf["jyseps2_1"][0] - yind[0] outdf["jyseps1_2"] = indf["jyseps1_2"][0] - yind[0] outdf["jyseps2_2"] = indf["jyseps2_2"][0] - yind[0] outdf["ShiftAngle"] = indf["ShiftAngle"][xind[0]:xind[1]] # Loop over all variables for v in list(indf.keys()): if v in special: continue # Skip these variables ndims = indf.ndims(v) if ndims == 0: # Copy scalars print("Copying variable: " + v) outdf[v] = indf[v][0] elif ndims == 2: # Assume [x,y] print("Slicing variable: " + v); outdf[v] = indf[v][xind[0]:xind[1], yind[0]:yind[1]] else: # Skip print("Skipping variable: " + v) indf.close() outdf.close()