def new_to_old(filename): f = DataFile(filename) newfile = DataFile(os.path.splitext(filename)[0] + str(".BOUT_metrics.nc"), create=True) name_changes = { "g_yy": "g_22", "gyy": "g22", "gxx": "g11", "gxz": "g13", "gzz": "g33", "g_xx": "g_11", "g_xz": "g_13", "g_zz": "g_33" } for key in f.keys(): name = key if name in name_changes: name = name_changes[name] newfile.write(name, np.asarray(f.read(key))) f.close() newfile.close() newfile.list()
def scalevar(var, factor, path="."): """ Scales a variable by a given factor, modifying restart files in place Inputs ------ var Name of the variable (string) factor Factor to multiply (float) path Path to the restart files Returns ------- None """ file_list = glob.glob(os.path.join(path, "BOUT.restart.*")) nfiles = len(file_list) print("Number of restart files: %d" % (nfiles, )) for file in file_list: print(file) with DataFile(file, write=True) as d: d[var] = d[var] * factor
def checkForDifferentLengths(dmpFiles): #{{{docstring """ Checks that the length of the variables are the same. Paramters --------- dmpFiles : iterable Iterable containing the paths to the restart files. Returns ------- maxDiff : int Maximum difference between the time indices shortestCommonLen : int Shortest common time indices """ #}}} print("\nChecking if the output has the same number of outputs") curMax = 0 curMin = float("inf") for d in dmpFiles: print("\nChecking {}".format(d)) with DataFile(d) as dmp: tLen = len(dmp.read("t_array")) curMax = curMax if curMax > tLen else tLen curMin = curMin if curMin < tLen else tLen maxDiff = curMax - curMin shortestCommonLen = curMin return maxDiff, shortestCommonLen
def create_cache(path, prefix): """Create a list of DataFile objects to be passed repeatedly to collect. Parameters ---------- path : str Path to data files prefix : str File prefix Returns ------- namedtuple : (list of str, bool, str, list of :py:obj:`~boututils.datafile.DataFile`) The cache of DataFiles in a namedtuple along with the file_list, and parallel and suffix attributes """ # define namedtuple to return as the result from collections import namedtuple datafile_cache_tuple = namedtuple( "datafile_cache", ["file_list", "parallel", "suffix", "datafile_list"]) file_list, parallel, suffix = findFiles(path, prefix) cache = [] for f in file_list: cache.append(DataFile(f)) return datafile_cache_tuple(file_list=file_list, parallel=parallel, suffix=suffix, datafile_list=cache)
def scalevar(var, factor, path="."): """Scales a variable by a given factor, modifying restart files in place .. warning:: Modifies restart files in place! This is in contrast to most of the functions in this module! Parameters ---------- var : str Name of the variable factor : float Factor to multiply path : str, optional Path to the restart files (default: current directory) """ file_list = glob.glob(os.path.join(path, "BOUT.restart.*")) nfiles = len(file_list) print("Number of restart files: %d" % (nfiles, )) for file in file_list: print(file) with DataFile(file, write=True) as d: d[var] = d[var] * factor
def checkForCorruption(restartFiles): #{{{docstring """ Check for corruption by checking the field mean. WARNING: This is not very water-proof. Corruption could in theory still have occured, so use with care. Paramters --------- restartFiles : iterable Iterable containing the paths to the restart files. """ #}}} print("\nChecking for corrupted restart files by checking the field mean") for r in restartFiles: print("\nChecking {}".format(r)) with DataFile(r) as restart: for var in restart.list(): if restart.ndims(var) == 3: mean = restart.read(var).mean() if np.isclose(mean, 0): message="{} has a zero mean. File could be corrupted.".\ format(var) raise RuntimeError(message) else: print("{} PASSED with a mean of {}.".format(var, mean))
def getUniformSpacing(path, coordinate, xguards=False, yguards=False): #{{{docstring """ Fastest way to obtain the grid spacing assuming equidistant grid Parameters ---------- path : str Path to read from coordinate : str Coordinate to return size of xguards : bool If the ghost points in x should be included yguards : bool If the ghost points in y should be included Returns ------- spacing : array-like The grid spacing """ #}}} with DataFile(os.path.join(path, "BOUT.dmp.0.nc")) as f: if coordinate == "x" or coordinate == "y": if coordinate == "x": # dx spacing = f.read("dx") elif coordinate == "y": # dy spacing = f.read("dy") shape = spacing.shape xSize = shape[0] - 2 * int(f.read("MXG")) ySize = shape[1] - 2 * int(f.read("MYG")) if not (xguards) and not (yguards): spacingEmpty = np.empty((xSize*int(f.read("NXPE")),\ ySize*int(f.read("NYPE")))) elif xguards and not (yguards): spacingEmpty = np.empty((xSize*int(f.read("NXPE"))+\ 2*int(f.read("MXG")),\ ySize*int(f.read("NYPE")))) elif not (xguards) and yguards: spacingEmpty = np.empty((xSize*int(f.read("NXPE")),\ ySize*int(f.read("NYPE"))+\ 2*int(f.read("MYG")))) elif xguards and yguards: spacingEmpty = np.empty((xSize*int(f.read("NXPE"))+\ 2*int(f.read("MXG")),\ ySize*int(f.read("NYPE"))+\ 2*int(f.read("MYG")))) spacingEmpty.fill(spacing[0, 0]) spacing = spacingEmpty elif coordinate == "z": # dz spacing = f.read("dz") else: raise ValueError("Unknown coordinate {}".format(coordinate)) return spacing
def __init__(self, out_dir): self.out_dir = out_dir self.pickle_dir = '{}/pickles'.format(out_dir) self.fig_dir = '{}/figures'.format(out_dir) os.chdir(out_dir) os.system('mkdir -p {}'.format(self.pickle_dir)) os.system('mkdir -p {}'.format(self.fig_dir)) self.dat = DataFile('BOUT.dmp.0.nc')
def listKeys(self, simIndex=0, simType='1-base'): if simType == '1-base': os.chdir('{}/{}'.format(self.outDir, simIndex)) else: os.chdir('{}/{}/{}'.format(self.outDir, simIndex, simType)) datFile = DataFile('BOUT.dmp.0.nc') self.datFile = datFile return datFile.keys()
def file_import(name): f = DataFile(name) # Open file varlist = f.list() # Get list of all variables in file data = {} # Create empty dictionary for v in varlist: data[v] = f.read(v) f.close() return data
def rmSpuriousTime(newFiles, dmpFiles, shortestCommonLen): #{{{docstring """ Will remove the suprious times in the dump files. Parameters ---------- newFiles : iterable Iterable containing the paths to the new files to be created. dmpFiles : iterable Iterable containing the paths to the dump files. shortestCommonLen : int Shortest common time indices """ #}}} for n, d in zip(newFiles, dmpFiles): print("\nRemoving spurious time {}".format(d)) # Open the restart file in read mode and create the restart file with DataFile(n, write=True, create=True) as newF,\ DataFile(d) as dmp: # Loop over the variables in the dmp file # Put a 4d variable in the front theList = dmp.list() ind = theList.index("lnN") theList[0], theList[ind] = theList[ind], theList[0] for var in theList: # Read the data dData = dmp.read(var) # Find 4D variables and time traces if dmp.ndims(var) == 4 or dmp.ndims(var) == 1: print(" Using first {} timepoints in {}".\ format(shortestCommonLen, var)) # Read from restart newData = dData[:shortestCommonLen, ...] elif var == "iteration": print(" Fixing 'iteration'") newData = shortestCommonLen - 2 else: print(" Nothing to be done for {}".format(var)) newData = dData.copy() newF.write(var, newData, info=True) print("{} written".format(n))
def getDataFile(i): """Get the DataFile from the cache, if present, otherwise open the DataFile """ if datafile_cache is not None: return datafile_cache.datafile_list[i] else: return DataFile(file_list[i])
def rmLastTime(newFiles, dmpFiles): #{{{docstring """ Will remove the last time index in the dump files. Parameters ---------- newFiles : iterable Iterable containing the paths to the new files to be created. dmpFiles : iterable Iterable containing the paths to the dump files. maxDiff : int Maximum difference between the time indices """ #}}} for n, d in zip(newFiles, dmpFiles): print("\nRemoving last time point in {}".format(d)) # Open the restart file in read mode and create the restart file with DataFile(n, write=True, create=True) as newF,\ DataFile(d) as dmp: # Loop over the variables in the dmp file # Put a 4d variable in the front theList = dmp.list() ind = theList.index("lnN") theList[0], theList[ind] = theList[ind], theList[0] for var in theList: # Read the data dData = dmp.read(var) # Find 4D variables and time traces if dmp.ndims(var) == 4 or dmp.ndims(var) == 1: print(" Removing last time in " + var) # Read from restart newData = dData[:-1, ...] elif var == "iteration": print(" Fixing 'iteration'") newData = dData - 1 else: print(" Nothing to be done for {}".format(var)) newData = dData.copy() newF.write(var, newData, info=True) print("{} written".format(n))
def check_test(): print("Checking output") numFailures = 0 numTests = 0 try: run = BoutOutputs(runOutputDir, info=False, yguards=True) except TypeError: # Option not implemented in boutdata.data run = BoutOutputs(runOutputDir, yguards=True) runExpected = DataFile(runExpectedOutput) # Get number of guard cells m_guards = (run["MXG"], run["MYG"]) m_guards_expected = (runExpected["MXG"], runExpected["MYG"]) # Get names of evolving variables, which we will test try: evolvingVariables = run.evolvingVariables() except AttributeError: # This part should be deleted once BOUT++ repo is updated so that the above works everywhere print("Warning: Updated boutdata.data not found") from get_evolving_fields import get_evolving_fields evolvingVariables = get_evolving_fields(run) # Test output for name in evolvingVariables: if len(run[name].shape) == 1: # scalars are diagnostic outputs like wall-time, so not useful to test continue # exclude second x guard cells as they are not used and may not always be set consistently data = testfield_slice(run[name], m_guards) expectedData = testfield_slice(runExpected.read(name), m_guards_expected) diff_max, norm_max = testfield_max(data, expectedData) numTests = numTests + 1 if diff_max / norm_max > tolerance and diff_max > abs_tolerance: numFailures = numFailures + 1 print("FAILURE: Test of max error " + str(numTests) + " (" + name + ") failed, with diff_max/norm_max=" + str(diff_max / norm_max) + " and diff_max=" + str(diff_max)) else: print("Test of max error " + str(numTests) + " (" + name + ") passed, with diff_max/norm_max=" + str(diff_max / norm_max) + " and diff_max=" + str(diff_max)) diff_mean, norm_mean = testfield_mean(data, expectedData) print("Test of mean error " + str(numTests) + " (" + name + ") diff_mean/norm_mean=" + str(diff_mean / norm_mean) + " and diff_mean=" + str(diff_mean)) print( str(numTests - numFailures) + "/" + str(numTests) + " tests passed in " + testname) return numFailures, numTests
def createNewProfile(baseGrid, newProfile, offset, pedestal): os.system("cp {} {}".format(baseGrid, newProfile)) profile(newProfile, "Te0", 5, 100, hwid=0.1, alpha=0.1) profile(newProfile, "Ti0", 5, 100, hwid=0.1, alpha=0.1) profile(newProfile, "Ne0", offset, pedestal, hwid=0.1, alpha=0.1) with DataFile(newProfile, write=True) as d: d["Ni0"] = d["Ne0"] print("generated {}".format(newProfile))
def change_variable(filename, variable, new_value): f = DataFile(filename) newfile = DataFile(os.path.splitext(filename)[0] + str(variable) + "." + str(new_value), create=True) var_changes = {str(variable)} for key in f.keys(): name = key if name in var_changes: name = name_changes[name] newfile.write(name, np.asarray(f.read(key))) f.close() newfile.close() newfile.list()
def calc_curvilinear_curvature(fname, field, grid): from scipy.signal import savgol_filter f = DataFile(str(fname), write=True) B = f.read("B") dBydz = np.zeros(np.shape(B)) dBydx = np.zeros(np.shape(B)) dBxdz = np.zeros(np.shape(B)) dBzdx = np.zeros(np.shape(B)) dx = grid.metric()["dx"] dz = grid.metric()["dz"] g_11 = grid.metric()["g_xx"] g_22 = grid.metric()["g_yy"] g_33 = grid.metric()["g_zz"] g_12 = 0.0 g_13 = grid.metric()["g_xz"] g_23 = 0.0 J = np.sqrt(g_11 * (g_22 * g_33 - g_23 * g_23) + g_12 * (g_13 * g_23 - g_12 * g_33) + g_13 * (g_12 * g_23 - g_22 * g_23)) Bx_smooth = np.zeros(B.shape) By_smooth = np.zeros(B.shape) Bz_smooth = np.zeros(B.shape) for y in np.arange(0, B.shape[1]): pol, _ = grid.getPoloidalGrid(y) R = pol.R Z = pol.Z for x in np.arange(0, B.shape[0]): Bx_smooth[x, y, :] = savgol_filter( field.Bxfunc(R[x, :], y, Z[x, :]), np.int(np.ceil(B.shape[-1] / 21) // 2 * 2 + 1), 5) By_smooth[x, y, :] = savgol_filter( field.Byfunc(R[x, :], y, Z[x, :]), np.int(np.ceil(B.shape[-1] / 21) // 2 * 2 + 1), 5) dBydz[x, y, :] = calc.deriv(By_smooth[x, y, :]) / dz[x, y, :] dBxdz[x, y, :] = calc.deriv(Bx_smooth[x, y, :]) / dz[x, y, :] for z in np.arange(0, B.shape[-1]): Bz_smooth[:, y, z] = savgol_filter( field.Bzfunc(R[:, z], y, Z[:, z]), np.int(np.ceil(B.shape[0] / 7) // 2 * 2 + 1), 5) dBzdx[:, y, z] = calc.deriv(Bz_smooth[:, y, z]) / dx[:, y, z] dBydx[:, y, z] = calc.deriv(By_smooth[:, y, z]) / dx[:, y, z] bxcvx = (-1 / J) * (dBydz / B**2.) bxcvy = (1 / J) * ((dBxdz - dBzdx) / B**2.) bxcvz = (1 / J) * (dBydx / B**2.) f.write('bxcvz', bxcvz) f.write('bxcvx', bxcvx) f.write('bxcvy', bxcvy) f.close()
def profile(filename, name, offset, pedestal, hwid=0.1, alpha=0.1): """ Calculate a radial profile, and add to file """ with DataFile(filename, write=True) as d: nx = d["nx"] ny = d["ny"] x = np.arange(nx) ix = d["ixseps1"] prof = mtanh_profile(x, ix, hwid * nx, offset, pedestal, alpha) prof2d = np.zeros([nx, ny]) for y in range(ny): prof2d[:, y] = prof # Handle X-points # Lower inner PF region j11 = d["jyseps1_1"] if j11 >= 0: # Reflect around separatrix ix = d["ixseps1"] for x in range(0, ix): prof2d[x, 0:(j11 + 1)] = prof2d[np.clip(2 * ix - x, 0, nx - 1), 0:(j11 + 1)] # Lower outer PF region j22 = d["jyseps2_2"] if j22 < ny - 1: ix = d["ixseps1"] for x in range(0, ix): prof2d[x, (j22 + 1):] = prof2d[np.clip(2 * ix - x, 0, nx - 1), (j22 + 1):] # Upper PF region j21 = d["jyseps2_1"] j12 = d["jyseps1_2"] if j21 != j12: ix = d["ixseps2"] for x in range(0, ix): prof2d[x, (j21 + 1):(j12 + 1)] = prof2d[np.clip(2 * ix - x, 0, nx - 1), (j21 + 1):(j12 + 1)] d.write(name, prof2d)
def addvar(var, value, path="."): """Adds a variable with constant value to all restart files. .. warning:: Modifies restart files in place! This is in contrast to most of the functions in this module! This is useful for restarting simulations whilst turning on new equations. By default BOUT++ throws an error if an evolving variable is not in the restart file. By setting an option the variable can be set to zero. This allows it to start with a non-zero value. Parameters ---------- var : str The name of the variable to add value : float Constant value for the variable path : str, optional Input path to data files (default: current directory) """ file_list = glob.glob(os.path.join(path, "BOUT.restart.*")) nfiles = len(file_list) print("Number of restart files: %d" % (nfiles, )) # Loop through all the restart files for filename in file_list: print(filename) # Open the restart file for writing (modification) with DataFile(filename, write=True) as df: size = None # Find a 3D variable and get its size for varname in df.list(): size = df.size(varname) if len(size) == 3: break if size is None: raise Exception("no 3D variables found") # Create a new 3D array with input value data = np.zeros(size) + value # Set the variable in the NetCDF file df.write(var, data)
def getMYG(path): #{{{docstring """ Fastest way to obtain MYG Parameters ---------- path : str Path to read from Returns ------- MYG : int Number of ghost points in y """ #}}} with DataFile(os.path.join(path, "BOUT.dmp.0.nc")) as f: return f.read("MYG")
def collectData(self, quant, simIndex=0, simType='1-base'): try: quant2 = np.squeeze(self.unPickle(quant, simIndex, simType)) except(FileNotFoundError): print('{} has not been pickled'.format(quant)) if simType == '1-base': os.chdir('{}/{}'.format(self.outDir, simIndex)) else: os.chdir('{}/{}/{}'.format(self.outDir, simIndex, simType)) try: quant2 = np.squeeze(collect(quant)) except(ValueError): print('quant in gridFile') self.gridFile = fnmatch.filter(next(os.walk('./'))[2], '*profile*')[0] grid_dat = DataFile(self.gridFile) quant2 = grid_dat[quant] return quant2
def getGridSizes(path, coordinate, varName="lnN", includeGhost=False): #{{{docstring """ Fastest way to obtain coordinate sizes. Parameters ---------- path : str Path to read from coordinate : ["x"|"y"|"z"|"t"] Coordinate to return size of varName : str Field to get the size of includeGhost : bool If the ghost points should be included Returns ------- coordinateSize : int Size of the desired coordinate """ #}}} with DataFile(os.path.join(path, "BOUT.dmp.0.nc")) as f: if coordinate == "x": # nx coordinateSize =\ (f.size(varName)[1] - 2*int(f.read("MXG")))*f.read("NXPE") if includeGhost: coordinateSize += 2 * int(f.read("MXG")) elif coordinate == "y": # ny coordinateSize =\ (f.size(varName)[2] - 2*int(f.read("MYG")))*f.read("NYPE") if includeGhost: coordinateSize += 2 * int(f.read("MYG")) elif coordinate == "z": # nz coordinateSize = (f.size(varName)[3]) elif coordinate == "z": coordinateSize = (f.size(varName)[0]) else: raise ValueError("Unknown coordinate {}".format(coordinate)) return coordinateSize
def calc_qPar(self, simIndex=0, simType='3-addC'): if simType == '1-base': os.chdir('{}/{}'.format(self.outDir, simIndex)) else: os.chdir('{}/{}/{}'.format(self.outDir, simIndex, simType)) datFile = DataFile('BOUT.dmp.0.nc') Tnorm = float(datFile['Tnorm']) Nnorm = float(datFile['Nnorm']) gamma_e = 4 gamma_i = 2.5 mi = 3.34524e-27 # 2*Mp - deuterium e = 1.6e-19 Te = self.collectData('Telim', simIndex, simType)[-1, :, -1]*Tnorm Ti = self.collectData('Tilim', simIndex, simType)[-1, :, -1]*Tnorm n = self.collectData('Ne', simIndex, simType)[-1, :, -1]*Nnorm Cs = np.sqrt(Te + (5/3)*Ti)*np.sqrt(e/mi) q_e = gamma_e * n * e * Te * Cs q_i = gamma_i * n * e * Ti * Cs return q_e + q_i
def file_import(name): """Read all variables from file into a dictionary Parameters ---------- name : str Name of file to read Returns ------- dict Dictionary containing all the variables in the file """ f = DataFile(name) # Open file varlist = f.list() # Get list of all variables in file data = {} # Create empty dictionary for v in varlist: data[v] = f.read(v) f.close() return data
def dimensions(varname, path=".", prefix="BOUT.dmp"): """Return the names of dimensions of a variable in an output file Parameters ---------- varname : str Name of the variable path : str, optional Path to data files (default: ".") prefix : str, optional File prefix (default: "BOUT.dmp") Returns ------- tuple of strs The elements of the tuple give the names of corresponding variable dimensions """ file_list, _, _ = findFiles(path, prefix) return DataFile(file_list[0]).dimensions(varname)
def __init__(self, path=".", prefix="BOUT.dmp"): """ Initialise BoutOutputs object """ self._path = path self._prefix = prefix # Label for this data self.label = path # Check that the path contains some data file_list = glob.glob(os.path.join(path, prefix + "*.nc")) if len(file_list) == 0: raise ValueError("ERROR: No data files found") # Available variables self.varNames = [] with DataFile(file_list[0]) as f: # Get variable names self.varNames = f.keys()
def collectTime(paths, tInd=None): #{{{docstring """ Collects the time Parameters ----------- paths : iterable of strings What path to use when collecting the variable. Must be in ascending temporal order as the variable will be concatenated. tInd : [None|tuple] Start and end of the time if not None Returns ------- time : 1d-array Array of the time at the difference time indices """ #}}} # Initialize time = None for path in paths: with DataFile(os.path.join(path, "BOUT.dmp.0.nc")) as f: if time is None: time = f.read("t_array") else: # Remove first point in time in the current time as this # is the same as the last of the previous time = np.concatenate((time, f.read("t_array")[1:]), axis=0) if tInd is not None: # NOTE: +1 since the collect ranges is INCLUSIVE, i.e. not working # like a python slice lastT = tInd[1] + 1 if tInd[1] is not None else None time = time[tInd[0]:lastT] return time
def gridData(self, simIndex=0): os.chdir('{}/{}'.format(self.outDir, simIndex)) self.gridFile = fnmatch.filter(next(os.walk('./'))[2], '*profile*')[0] grid_dat = DataFile(self.gridFile) self.grid_dat = grid_dat self.j11 = int(grid_dat["jyseps1_1"]) self.j12 = int(grid_dat["jyseps1_2"]) self.j21 = int(grid_dat["jyseps2_1"]) self.j22 = int(grid_dat["jyseps2_2"]) self.ix1 = int(grid_dat["ixseps1"]) self.ix2 = int(grid_dat["ixseps2"]) try: self.nin = int(grid_dat["ny_inner"]) except(KeyError): self.nin = self.j12 self.nx = int(grid_dat["nx"]) self.ny = int(grid_dat["ny"]) self.R = grid_dat['Rxy'] self.Z = grid_dat['Zxy'] R2 = self.R[:, self.j12:self.j22] self.outMid_idx = self.j12 + np.where(R2 == np.amax(R2))[1][0]
def checkProfiles(gridFiles=[], densities=[]): if len(densities) < 1: densities = np.zeros(len(gridFiles)) grids = [] ne = [] te = [] for i in gridFiles: grd = DataFile(i) grids.append(grd) ne.append(grd["Ne0"] * 1e20) te.append(grd["Te0"]) ix1 = grids[0]["ixseps1"] j11 = grids[0]["jyseps1_1"] j12 = grids[0]["jyseps1_2"] j22 = grids[0]["jyseps2_2"] j21 = grids[0]["jyseps2_1"] mid = int((j12 + j22) / 2) colors = getDistinctColors(len(gridFiles)) plt.figure(1) plt.axvline(x=ix1, color="black", linestyle="--") for i in range(len(ne)): plt.plot(ne[i][:, mid], color=colors[i], label=gridFiles[i]) print(densities[i]) plt.axhline(y=eval("{}e19".format(densities[i])), color=colors[i], linestyle="--") plt.figure(2) plt.axvline(x=ix1, color="black", linestyle="--") for i in range(len(te)): plt.plot(te[i][:, mid], color=colors[i], label=gridFiles[i]) plt.axhline(y=te[i][ix1, mid], color=colors[i], linestyle="--") plt.legend() plt.show()
def addnoise(path=".", var=None, scale=1e-5): """Add random noise to restart files .. warning:: Modifies restart files in place! This is in contrast to most of the functions in this module! Parameters ---------- path : str, optional Path to restart files (default: current directory) var : str, optional The variable to modify. By default all 3D variables are modified scale : float Amplitude of the noise. Gaussian noise is used, with zero mean and this parameter as the standard deviation """ file_list = glob.glob(os.path.join(path, "BOUT.restart.*")) nfiles = len(file_list) print("Number of restart files: %d" % (nfiles, )) for file in file_list: print(file) with DataFile(file, write=True) as d: if var is None: for v in d.list(): if d.ndims(v) == 3: print(" -> " + v) data = d.read(v, asBoutArray=True) data += normal(scale=scale, size=data.shape) d.write(v, data) else: # Modify a single variable print(" -> " + var) data = d.read(var) data += normal(scale=scale, size=data.shape) d.write(var, data)