def file_import(name): f = DataFile(name) # Open file varlist = f.list() # Get list of all variables in file data = {} # Create empty dictionary for v in varlist: data[v] = f.read(v) f.close() return data
def split(nxpe, nype, path="data", output="./", informat="nc", outformat=None): """Split restart files across NXPE x NYPE processors. Returns True on success """ if outformat == None: outformat = informat mxg = 2 myg = 2 npes = nxpe * nype if npes <= 0: print "ERROR: Negative or zero number of processors" return False if path == output: print "ERROR: Can't overwrite restart files" return False file_list = glob.glob(os.path.join(path, "BOUT.restart.*."+informat)) nfiles = len(file_list) if nfiles == 0: print "ERROR: No restart files found" return False # Read old processor layout f = DataFile(os.path.join(path, file_list[0])) # Get list of variables var_list = f.list() if len(var_list) == 0: print "ERROR: No data found" return False old_npes = f.read('NPES') old_nxpe = f.read('NXPE') if nfiles != old_npes: print "WARNING: Number of restart files inconsistent with NPES" print "Setting nfiles = " + str(old_npes) nfiles = old_npes if old_npes % old_nxpe != 0: print "ERROR: Old NPES is not a multiple of old NXPE" return False old_nype = old_npes / old_nxpe if nype % old_nype != 0: print "SORRY: New nype must be a multiple of old nype" return False if nxpe % old_nxpe != 0: print "SORRY: New nxpe must be a multiple of old nxpe" return False # Get dimension sizes old_mxsub = 0 old_mysub = 0 mz = 0 for v in var_list: if f.ndims(v) == 3: s = f.size(v) old_mxsub = s[0] - 2*mxg old_mysub = s[1] - 2*myg mz = s[2] break f.close() # Calculate total size of the grid nx = old_mxsub * old_nxpe ny = old_mysub * old_nype print "Grid sizes: ", nx, ny, mz # Create the new restart files for mype in range(npes): # Calculate X and Y processor numbers pex = mype % nxpe pey = int(mype / nxpe) old_pex = int(pex / xs) old_pey = int(pey / ys) old_x = pex % xs old_y = pey % ys # Old restart file number old_mype = old_nxpe * old_pey + old_pex # Calculate indices in old restart file xmin = old_x*mxsub xmax = xmin + mxsub - 1 + 2*mxg ymin = old_y*mysub ymax = ymin + mysub - 1 + 2*myg print "New: "+str(mype)+" ("+str(pex)+", "+str(pey)+")" print " => "+str(old_mype)+" ("+str(old_pex)+", "+str(old_pey)+") : ("+str(old_x)+", "+str(old_y)+")"
from boututils import DataFile from boutdata import collect # Load in the datafile as a BOUT DataFile object # path links to the directory containing the data # file1 is the first file in this subdirectory (NB requires / at start) path = '/hwdisks/home/nrw504/BOUT/blobsims/3DMASTmodel/RUNS/RUN13' file1 = '/BOUT.dmp.0.nc' f = DataFile() print 'Opening file: ' + path + file1 DataFile.open(f, path + file1) # Calculates the ranges of time, x, y and z in the dataset # See BOUT manual p.27 t = [int(np.min(f.read('t_array'))), int(np.max(f.read('t_array')))] nx = f.read('NXPE') * f.read('MXSUB') + 2*f.read('MXG') ny = f.read('NYPE') * f.read('MYSUB') + 2*f.read('MYG') nz = f.read('MZ') # Function query_yes_no released under MIT license # http://code.activestate.com/recipes/577058-query-yesno/ Accessed 6/8/2012 # Returns 'yes' if user responds 'y', 'no' if 'n', &c. def query_yes_no(question, default=None): """Ask a yes/no question via raw_input() and return their answer. "question" is a string that is presented to the user. "default" is the presumed answer if the user just hits <Enter>. It must be "yes" (the default), "no" or None (meaning an answer is required of the user).
def resizeY(newy, path="data", output=".", informat="nc", outformat=None,myg=2): """ Resize all the restart files in Y """ if outformat == None: outformat = informat file_list = glob.glob(os.path.join(path, "BOUT.restart.*."+informat)) nfiles = len(file_list) if nfiles == 0: print("ERROR: No restart files found") return False for i in range(nfiles): # Open each data file infname = os.path.join(path, "BOUT.restart."+str(i)+"."+informat) outfname = os.path.join(output, "BOUT.restart."+str(i)+"."+outformat) print("Processing %s -> %s", infname, outfname) infile = DataFile(infname) outfile = DataFile(outfname, create=True) # Copy basic information for var in ["hist_hi", "NPES", "NXPE", "tt"]: data = infile.read(var) try: # Convert to scalar if necessary data = data[0] except: pass outfile.write(var, data) # Get a list of variables varnames = infile.list() for var in varnames: if infile.ndims(var) == 3: # Could be an evolving variable [x,y,z] print(" -> " + var) # Read variable from input indata = infile.read(var) nx,ny,nz = indata.shape # y coordinate in input and output data iny = (arange(ny) - myg + 0.5) / (ny - 2*myg) outy = (arange(newy) - myg + 0.5) / (newy - 2*myg) outdata = zeros([nx, newy, nz]) for x in range(nx): for z in range(nz): f = interp1d(iny, indata[x,:,z], bounds_error=False, fill_value=0.0) outdata[x,:,z] = f(outy) outfile.write(var, outdata) infile.close() outfile.close()
def redistribute(npes, path="data", nxpe=None, output=".", informat=None, outformat=None, mxg=2, myg=2): """Resize restart files across NPES processors. Does not check if new processor arrangement is compatible with the branch cuts. In this respect restart.split is safer. However, BOUT++ checks the topology during initialisation anyway so this is not too serious. Parameters ---------- npes : int number of processors for the new restart files path : string, optional location of old restart files nxpe : int, optional number of processors to use in the x-direction (determines split: npes = nxpe * nype). Default is None which uses the same algorithm as BoutMesh (but without topology information) to determine a suitable value for nxpe. output : string, optional location to save new restart files informat : string, optional specify file format of old restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default uses the format of the first 'BOUT.restart.*' file listed by glob.glob. outformat : string, optional specify file format of new restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default is to use the same as informat. Returns ------- True on success """ if npes <= 0: print("ERROR: Negative or zero number of processors") return False if path == output: print("ERROR: Can't overwrite restart files") return False if informat == None: file_list = glob.glob(os.path.join(path, "BOUT.restart.*")) else: file_list = glob.glob(os.path.join(path, "BOUT.restart.*."+informat)) nfiles = len(file_list) # Read old processor layout f = DataFile(file_list[0]) # Get list of variables var_list = f.list() if len(var_list) == 0: print("ERROR: No data found") return False old_npes = f.read('NPES') old_nxpe = f.read('NXPE') old_nype = int(old_npes/old_nxpe) if nfiles != old_npes: print("WARNING: Number of restart files inconsistent with NPES") print("Setting nfiles = " + str(old_npes)) nfiles = old_npes if nfiles == 0: print("ERROR: No restart files found") return False informat = file_list[0].split(".")[-1] if outformat == None: outformat = informat old_mxsub = 0 old_mysub = 0 mz = 0 for v in var_list: if f.ndims(v) == 3: s = f.size(v) old_mxsub = s[0] - 2*mxg if old_mxsub < 0: if s[0] == 1: old_mxsub = 1 mxg = 0 elif s[0] == 3: old_mxsub = 1 mxg = 1 else: print("Number of x points is wrong?") return False old_mysub = s[1] - 2*myg if old_mysub < 0: if s[1] == 1: old_mysub = 1 myg = 0 elif s[1] == 3: old_mysub = 1 myg = 1 else: print("Number of y points is wrong?") return False mz = s[2] break # Calculate total size of the grid nx = old_mxsub * old_nxpe ny = old_mysub * old_nype print("Grid sizes: ", nx, ny, mz) if nxpe == None: # Copy algorithm from BoutMesh for selecting nxpe ideal = sqrt(float(nx) * float(npes) / float(ny)) # Results in square domain for i in range(1,npes+1): if npes%i == 0 and nx%i == 0 and int(nx/i) >= mxg and ny%(npes/i) == 0: # Found an acceptable value # Warning: does not check branch cuts! if nxpe==None or abs(ideal - i) < abs(ideal - nxpe): nxpe = i # Keep value nearest to the ideal if nxpe == None: print("ERROR: could not find a valid value for nxpe") return False nype = int(npes/nxpe) outfile_list = [] for i in range(npes): outpath = os.path.join(output, "BOUT.restart."+str(i)+"."+outformat) outfile_list.append(DataFile(outpath, write=True, create=True)) infile_list = [] for i in range(old_npes): inpath = os.path.join(path, "BOUT.restart."+str(i)+"."+outformat) infile_list.append(DataFile(inpath)) old_mxsub = int(nx/old_nxpe) old_mysub = int(ny/old_nype) mxsub = int(nx/nxpe) mysub = int(ny/nype) for v in var_list: ndims = f.ndims(v) #collect data if ndims == 0: #scalar data = f.read(v) elif ndims == 2: data = numpy.zeros( (nx+2*mxg,ny+2*nyg) ) for i in range(old_npes): ix = i%old_nxpe iy = int(i/old_nxpe) ixstart = mxg if ix == 0: ixstart = 0 ixend = -mxg if ix == old_nxpe-1: ixend = 0 iystart = myg if iy == 0: iystart = 0 iyend = -myg if iy == old_nype-1: iyend = 0 data[ix*old_mxsub+ixstart:(ix+1)*old_mxsub+2*mxg+ixend, iy*old_mysub+iystart:(iy+1)*old_mysub+2*myg+iyend] = infile_list[i].read(v)[ixstart:old_mxsub+2*mxg+ixend, iystart:old_mysub+2*myg+iyend] elif ndims == 3: data = numpy.zeros( (nx+2*mxg,ny+2*myg,mz) ) for i in range(old_npes): ix = i%old_nxpe iy = int(i/old_nxpe) ixstart = mxg if ix == 0: ixstart = 0 ixend = -mxg if ix == old_nxpe-1: ixend = 0 iystart = myg if iy == 0: iystart = 0 iyend = -myg if iy == old_nype-1: iyend = 0 data[ix*old_mxsub+ixstart:(ix+1)*old_mxsub+2*mxg+ixend, iy*old_mysub+iystart:(iy+1)*old_mysub+2*myg+iyend, :] = infile_list[i].read(v)[ixstart:old_mxsub+2*mxg+ixend, iystart:old_mysub+2*myg+iyend, :] else: print("ERROR: variable found with unexpected number of dimensions,",ndims,v) return False # write data for i in range(npes): ix = i%nxpe iy = int(i/nxpe) outfile = outfile_list[i] if v == "NPES": outfile.write(v,npes) elif v == "NXPE": outfile.write(v,nxpe) elif ndims == 0: # scalar outfile.write(v,data) elif ndims == 2: # Field2D outfile.write(v,data[ix*mxsub:(ix+1)*mxsub+2*mxg, iy*mysub:(iy+1)*mysub+2*myg]) elif ndims == 3: # Field3D outfile.write(v,data[ix*mxsub:(ix+1)*mxsub+2*mxg, iy*mysub:(iy+1)*mysub+2*myg, :]) else: print("ERROR: variable found with unexpected number of dimensions,",f.ndims(v)) f.close() for infile in infile_list: infile.close() for outfile in outfile_list: outfile.close() return True
def create(averagelast=1, final=-1, path="data", output="./", informat="nc", outformat=None): """ Create restart files from data (dmp) files. Inputs ====== averagelast Number of time points to average over. Default is 1 i.e. just take last time-point final The last time point to use. Default is last (-1) path Path to the input data files output Path where the output restart files should go informat Format of the input data files outformat Format of the output restart files """ if outformat == None: outformat = informat file_list = glob.glob(os.path.join(path, "BOUT.dmp.*."+informat)) nfiles = len(file_list) print(("Number of data files: ", nfiles)) for i in range(nfiles): # Open each data file infname = os.path.join(path, "BOUT.dmp."+str(i)+"."+informat) outfname = os.path.join(output, "BOUT.restart."+str(i)+"."+outformat) print((infname, " -> ", outfname)) infile = DataFile(infname) outfile = DataFile(outfname, create=True) # Get the data always needed in restart files hist_hi = infile.read("iteration") print(("hist_hi = ", hist_hi)) outfile.write("hist_hi", hist_hi) t_array = infile.read("t_array") tt = t_array[final] print(("tt = ", tt)) outfile.write("tt", tt) NXPE = infile.read("NXPE") NYPE = infile.read("NYPE") NPES = NXPE * NYPE print(("NPES = ", NPES, " NXPE = ", NXPE)) outfile.write("NPES", NPES) outfile.write("NXPE", NXPE) # Get a list of variables varnames = infile.list() for var in varnames: if infile.ndims(var) == 4: # Could be an evolving variable print((" -> ", var)) data = infile.read(var) if averagelast == 1: slice = data[final,:,:,:] else: slice = mean(data[(final - averagelast):final,:,:,:], axis=0) print(slice.shape) outfile.write(var, slice) infile.close() outfile.close()
def pol_slice(var3d, gridfile, n=1, zangle=0.0): """ data2d = pol_slice(data3d, 'gridfile', n=1, zangle=0.0) """ n = int(n) zangle = float(zangle) s = np.shape(var3d) if len(s) != 3: print("ERROR: pol_slice expects a 3D variable") return None nx, ny, nz = s dz = 2. * np.pi / float(n * (nz - 1)) try: # Open the grid file gf = DataFile(gridfile) # Check the grid size is correct if gf.read("nx") != nx: print("ERROR: Grid X size is different to the variable") return None if gf.read("ny") != ny: print("ERROR: Grid Y size is different to the variable") return None # Get the toroidal shift zShift = gf.read("qinty") if zShift != None: print("Using qinty as toroidal shift angle") else: zShift = gf.read("zShift") if zShift != None: print("Using zShift as toroidal shift angle") else: print("ERROR: Neither qinty nor zShift found") return None gf.close() except: print("ERROR: pol_slice couldn't read grid file") return None var2d = np.zeros([nx, ny]) ###################################### # Perform 2D slice zind = old_div((zangle - zShift), dz) z0f = np.floor(zind) z0 = z0f.astype(int) p = zind - z0f # Make z0 between 0 and (nz-2) z0 = ((z0 % (nz - 1)) + (nz - 1)) % (nz - 1) # Get z+ and z- zp = (z0 + 1) % (nz - 1) zm = (z0 - 1 + (nz - 1)) % (nz - 1) # There may be some more cunning way to do this indexing for x in np.arange(nx): for y in np.arange(ny): var2d[x,y] = 0.5*p[x,y]*(p[x,y]-1.0) * var3d[x,y,zm[x,y]] + \ (1.0 - p[x,y]*p[x,y]) * var3d[x,y,z0[x,y]] + \ 0.5*p[x,y]*(p[x,y]+1.0) * var3d[x,y,zp[x,y]] return var2d
def collect(varname, xind=None, yind=None, zind=None, tind=None, path=".",yguards=False, info=True,prefix="BOUT.dmp"): """Collect a variable from a set of BOUT++ outputs. data = collect(name) name Name of the variable (string) Optional arguments: xind = [min,max] Range of X indices to collect yind = [min,max] Range of Y indices to collect zind = [min,max] Range of Z indices to collect tind = [min,max] Range of T indices to collect path = "." Path to data files prefix = "BOUT.dmp" File prefix yguards = False Collect Y boundary guard cells? info = True Print information about collect? """ # Search for BOUT++ dump files in NetCDF format file_list = glob.glob(os.path.join(path, prefix+".*.nc")) if file_list == []: print "ERROR: No data files found" return None nfiles = len(file_list) #print "Number of files: " + str(nfiles) # Read data from the first file f = DataFile(file_list[0]) #print "File format : " + f.file_format try: dimens = f.dimensions(varname) ndims = len(dimens) except KeyError: print "ERROR: Variable '"+varname+"' not found" return None if ndims < 2: # Just read from file data = f.read(varname) f.close() return data if ndims > 4: print "ERROR: Too many dimensions" raise CollectError mxsub = f.read("MXSUB") mysub = f.read("MYSUB") mz = f.read("MZ") myg = f.read("MYG") t_array = f.read("t_array") nt = len(t_array) if info: print "mxsub = %d mysub = %d mz = %d\n" % (mxsub, mysub, mz) # Get the version of BOUT++ (should be > 0.6 for NetCDF anyway) try: v = f.read("BOUT_VERSION") # 2D decomposition nxpe = f.read("NXPE") mxg = f.read("MXG") nype = f.read("NYPE") npe = nxpe * nype if info: print "nxpe = %d, nype = %d, npe = %d\n" % (nxpe, nype, npe) if npe < nfiles: print "WARNING: More files than expected (" + str(npe) + ")" elif npe > nfiles: print "WARNING: Some files missing. Expected " + str(npe) nx = nxpe * mxsub + 2*mxg except KeyError: print "BOUT++ version : Pre-0.2" # Assume number of files is correct # No decomposition in X nx = mxsub mxg = 0 nxpe = 1 nype = nfiles if yguards: ny = mysub * nype + 2*myg else: ny = mysub * nype f.close(); # Check ranges def check_range(r, low, up, name="range"): r2 = r if r != None: try: n = len(r2) except: # No len attribute, so probably a single number r2 = [r2,r2] if (len(r2) < 1) or (len(r2) > 2): print "WARNING: "+name+" must be [min, max]" r2 = None else: if len(r2) == 1: r2 = [r2,r2] if r2[0] < low: r2[0] = low if r2[0] > up: r2[0] = up if r2[1] < 0: r2[1] = 0 if r2[1] > up: r2[1] = up if r2[0] > r2[1]: tmp = r2[0] r2[0] = r2[1] r2[1] = tmp else: r2 = [low, up] return r2 xind = check_range(xind, 0, nx-1, "xind") yind = check_range(yind, 0, ny-1, "yind") zind = check_range(zind, 0, mz-2, "zind") tind = check_range(tind, 0, nt-1, "tind") xsize = xind[1] - xind[0] + 1 ysize = yind[1] - yind[0] + 1 zsize = zind[1] - zind[0] + 1 tsize = tind[1] - tind[0] + 1 # Map between dimension names and output size sizes = {'x':xsize, 'y':ysize, 'z':zsize, 't':tsize} # Create a list with size of each dimension ddims = map(lambda d: sizes[d], dimens) # Create the data array data = np.zeros(ddims) for i in range(npe): # Get X and Y processor indices pe_yind = int(i / nxpe) pe_xind = i % nxpe # Get local ranges if yguards: ymin = yind[0] - pe_yind*mysub ymax = yind[1] - pe_yind*mysub else: ymin = yind[0] - pe_yind*mysub + myg ymax = yind[1] - pe_yind*mysub + myg xmin = xind[0] - pe_xind*mxsub xmax = xind[1] - pe_xind*mxsub inrange = True if yguards: # Check lower y boundary if pe_yind == 0: # Keeping inner boundary if ymax < 0: inrange = False if ymin < 0: ymin = 0 else: if ymax < myg: inrange = False if ymin < myg: ymin = myg # Upper y boundary if pe_yind == (nype - 1): # Keeping outer boundary if ymin >= (mysub + 2*myg): inrange = False if ymax > (mysub + 2*myg - 1): ymax = (mysub + 2*myg - 1) else: if ymin >= (mysub + myg): inrange = False if ymax >= (mysub + myg): ymax = (mysub+myg-1) else: if (ymin >= (mysub + myg)) or (ymax < myg): inrange = False # Y out of range if ymin < myg: ymin = myg if ymax >= mysub+myg: ymax = myg + mysub - 1 # Check lower x boundary if pe_xind == 0: # Keeping inner boundary if xmax < 0: inrange = False if xmin < 0: xmin = 0 else: if xmax < mxg: inrange = False if xmin < mxg: xmin = mxg # Upper x boundary if pe_xind == (nxpe - 1): # Keeping outer boundary if xmin >= (mxsub + 2*mxg): inrange = False if xmax > (mxsub + 2*mxg - 1): xmax = (mxsub + 2*mxg - 1) else: if xmin >= (mxsub + mxg): inrange = False if xmax >= (mxsub + mxg): xmax = (mxsub+mxg-1) # Number of local values nx_loc = xmax - xmin + 1 ny_loc = ymax - ymin + 1 # Calculate global indices xgmin = xmin + pe_xind * mxsub xgmax = xmax + pe_xind * mxsub if yguards: ygmin = ymin + pe_yind * mysub ygmax = ymax + pe_yind * mysub else: ygmin = ymin + pe_yind * mysub - myg ygmax = ymax + pe_yind * mysub - myg if not inrange: continue # Don't need this file filename = os.path.join(path, prefix+"." + str(i) + ".nc") if info: sys.stdout.write("\rReading from " + filename + ": [" + \ str(xmin) + "-" + str(xmax) + "][" + \ str(ymin) + "-" + str(ymax) + "] -> [" + \ str(xgmin) + "-" + str(xgmax) + "][" + \ str(ygmin) + "-" + str(ygmax) + "]") f = DataFile(filename) if ndims == 4: d = f.read(varname, ranges=[tind[0],tind[1]+1, xmin, xmax+1, ymin, ymax+1, zind[0],zind[1]+1]) data[:, (xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc), :] = d elif ndims == 3: # Could be xyz or txy if dimens[2] == 'z': # xyz d = f.read(varname, ranges=[xmin, xmax+1, ymin, ymax+1, zind[0],zind[1]+1]) data[(xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc), :] = d else: # txy d = f.read(varname, ranges=[tind[0],tind[1]+1, xmin, xmax+1, ymin, ymax+1]) data[:, (xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc)] = d elif ndims == 2: # xy d = f.read(varname, ranges=[xmin, xmax+1, ymin, ymax+1]) data[(xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc)] = d f.close() # Finished looping over all files if info: sys.stdout.write("\n") return data
def create(averagelast=1, final=-1, path="data", output="./", informat="nc", outformat=None): """ Create restart files from data (dmp) files. Inputs ====== averagelast Number of time points to average over. Default is 1 i.e. just take last time-point final The last time point to use. Default is last (-1) path Path to the input data files output Path where the output restart files should go informat Format of the input data files outformat Format of the output restart files """ if outformat == None: outformat = informat file_list = glob.glob(os.path.join(path, "BOUT.dmp.*." + informat)) nfiles = len(file_list) print "Number of data files: ", nfiles for i in range(nfiles): # Open each data file infname = os.path.join(path, "BOUT.dmp." + str(i) + "." + informat) outfname = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat) print infname, " -> ", outfname infile = DataFile(infname) outfile = DataFile(outfname, create=True) # Get the data always needed in restart files hist_hi = infile.read("iteration") print "hist_hi = ", hist_hi outfile.write("hist_hi", hist_hi) t_array = infile.read("t_array") tt = t_array[final] print "tt = ", tt outfile.write("tt", tt) NXPE = infile.read("NXPE") NYPE = infile.read("NYPE") NPES = NXPE * NYPE print "NPES = ", NPES, " NXPE = ", NXPE outfile.write("NPES", NPES) outfile.write("NXPE", NXPE) # Get a list of variables varnames = infile.list() for var in varnames: if infile.ndims(var) == 4: # Could be an evolving variable print " -> ", var data = infile.read(var) if averagelast == 1: slice = data[final, :, :, :] else: slice = mean(data[(final - averagelast):final, :, :, :], axis=0) print slice.shape outfile.write(var, slice) infile.close() outfile.close()
def collect(varname, xind=None, yind=None, zind=None, tind=None, path=".", yguards=False, info=True, prefix="BOUT.dmp"): """Collect a variable from a set of BOUT++ outputs. data = collect(name) name Name of the variable (string) Optional arguments: xind = [min,max] Range of X indices to collect yind = [min,max] Range of Y indices to collect zind = [min,max] Range of Z indices to collect tind = [min,max] Range of T indices to collect path = "." Path to data files prefix = "BOUT.dmp" File prefix yguards = False Collect Y boundary guard cells? info = True Print information about collect? """ # Search for BOUT++ dump files in NetCDF format file_list = glob.glob(os.path.join(path, prefix + ".nc")) if file_list != []: print "Single (parallel) data file" f = DataFile(file_list[0]) # Open the file data = f.read(varname) return data file_list = glob.glob(os.path.join(path, prefix + "*.nc")) file_list.sort() if file_list == []: print "ERROR: No data files found" return None nfiles = len(file_list) #print "Number of files: " + str(nfiles) # Read data from the first file f = DataFile(file_list[0]) #print "File format : " + f.file_format try: dimens = f.dimensions(varname) ndims = len(dimens) except KeyError: print "ERROR: Variable '" + varname + "' not found" return None if ndims < 2: # Just read from file data = f.read(varname) f.close() return data if ndims > 4: print "ERROR: Too many dimensions" raise CollectError mxsub = f.read("MXSUB") mysub = f.read("MYSUB") mz = f.read("MZ") myg = f.read("MYG") t_array = f.read("t_array") nt = len(t_array) if info: print "mxsub = %d mysub = %d mz = %d\n" % (mxsub, mysub, mz) # Get the version of BOUT++ (should be > 0.6 for NetCDF anyway) try: v = f.read("BOUT_VERSION") # 2D decomposition nxpe = f.read("NXPE") mxg = f.read("MXG") nype = f.read("NYPE") npe = nxpe * nype if info: print "nxpe = %d, nype = %d, npe = %d\n" % (nxpe, nype, npe) if npe < nfiles: print "WARNING: More files than expected (" + str(npe) + ")" elif npe > nfiles: print "WARNING: Some files missing. Expected " + str(npe) nx = nxpe * mxsub + 2 * mxg except KeyError: print "BOUT++ version : Pre-0.2" # Assume number of files is correct # No decomposition in X nx = mxsub mxg = 0 nxpe = 1 nype = nfiles if yguards: ny = mysub * nype + 2 * myg else: ny = mysub * nype f.close() # Check ranges def check_range(r, low, up, name="range"): r2 = r if r != None: try: n = len(r2) except: # No len attribute, so probably a single number r2 = [r2, r2] if (len(r2) < 1) or (len(r2) > 2): print "WARNING: " + name + " must be [min, max]" r2 = None else: if len(r2) == 1: r2 = [r2, r2] if r2[0] < low: r2[0] = low if r2[0] > up: r2[0] = up if r2[1] < 0: r2[1] = 0 if r2[1] > up: r2[1] = up if r2[0] > r2[1]: tmp = r2[0] r2[0] = r2[1] r2[1] = tmp else: r2 = [low, up] return r2 xind = check_range(xind, 0, nx - 1, "xind") yind = check_range(yind, 0, ny - 1, "yind") zind = check_range(zind, 0, mz - 2, "zind") tind = check_range(tind, 0, nt - 1, "tind") xsize = xind[1] - xind[0] + 1 ysize = yind[1] - yind[0] + 1 zsize = zind[1] - zind[0] + 1 tsize = tind[1] - tind[0] + 1 # Map between dimension names and output size sizes = {'x': xsize, 'y': ysize, 'z': zsize, 't': tsize} # Create a list with size of each dimension ddims = map(lambda d: sizes[d], dimens) # Create the data array data = np.zeros(ddims) for i in range(npe): # Get X and Y processor indices pe_yind = int(i / nxpe) pe_xind = i % nxpe # Get local ranges if yguards: ymin = yind[0] - pe_yind * mysub ymax = yind[1] - pe_yind * mysub else: ymin = yind[0] - pe_yind * mysub + myg ymax = yind[1] - pe_yind * mysub + myg xmin = xind[0] - pe_xind * mxsub xmax = xind[1] - pe_xind * mxsub inrange = True if yguards: # Check lower y boundary if pe_yind == 0: # Keeping inner boundary if ymax < 0: inrange = False if ymin < 0: ymin = 0 else: if ymax < myg: inrange = False if ymin < myg: ymin = myg # Upper y boundary if pe_yind == (nype - 1): # Keeping outer boundary if ymin >= (mysub + 2 * myg): inrange = False if ymax > (mysub + 2 * myg - 1): ymax = (mysub + 2 * myg - 1) else: if ymin >= (mysub + myg): inrange = False if ymax >= (mysub + myg): ymax = (mysub + myg - 1) else: if (ymin >= (mysub + myg)) or (ymax < myg): inrange = False # Y out of range if ymin < myg: ymin = myg if ymax >= mysub + myg: ymax = myg + mysub - 1 # Check lower x boundary if pe_xind == 0: # Keeping inner boundary if xmax < 0: inrange = False if xmin < 0: xmin = 0 else: if xmax < mxg: inrange = False if xmin < mxg: xmin = mxg # Upper x boundary if pe_xind == (nxpe - 1): # Keeping outer boundary if xmin >= (mxsub + 2 * mxg): inrange = False if xmax > (mxsub + 2 * mxg - 1): xmax = (mxsub + 2 * mxg - 1) else: if xmin >= (mxsub + mxg): inrange = False if xmax >= (mxsub + mxg): xmax = (mxsub + mxg - 1) # Number of local values nx_loc = xmax - xmin + 1 ny_loc = ymax - ymin + 1 # Calculate global indices xgmin = xmin + pe_xind * mxsub xgmax = xmax + pe_xind * mxsub if yguards: ygmin = ymin + pe_yind * mysub ygmax = ymax + pe_yind * mysub else: ygmin = ymin + pe_yind * mysub - myg ygmax = ymax + pe_yind * mysub - myg if not inrange: continue # Don't need this file filename = os.path.join(path, prefix + "." + str(i) + ".nc") if info: sys.stdout.write("\rReading from " + filename + ": [" + \ str(xmin) + "-" + str(xmax) + "][" + \ str(ymin) + "-" + str(ymax) + "] -> [" + \ str(xgmin) + "-" + str(xgmax) + "][" + \ str(ygmin) + "-" + str(ygmax) + "]") f = DataFile(filename) if ndims == 4: d = f.read(varname, ranges=[ tind[0], tind[1] + 1, xmin, xmax + 1, ymin, ymax + 1, zind[0], zind[1] + 1 ]) data[:, (xgmin - xind[0]):(xgmin - xind[0] + nx_loc), (ygmin - yind[0]):(ygmin - yind[0] + ny_loc), :] = d elif ndims == 3: # Could be xyz or txy if dimens[2] == 'z': # xyz d = f.read(varname, ranges=[ xmin, xmax + 1, ymin, ymax + 1, zind[0], zind[1] + 1 ]) data[(xgmin - xind[0]):(xgmin - xind[0] + nx_loc), (ygmin - yind[0]):(ygmin - yind[0] + ny_loc), :] = d else: # txy d = f.read(varname, ranges=[ tind[0], tind[1] + 1, xmin, xmax + 1, ymin, ymax + 1 ]) data[:, (xgmin - xind[0]):(xgmin - xind[0] + nx_loc), (ygmin - yind[0]):(ygmin - yind[0] + ny_loc)] = d elif ndims == 2: # xy d = f.read(varname, ranges=[xmin, xmax + 1, ymin, ymax + 1]) data[(xgmin - xind[0]):(xgmin - xind[0] + nx_loc), (ygmin - yind[0]):(ygmin - yind[0] + ny_loc)] = d f.close() # Finished looping over all files if info: sys.stdout.write("\n") return data
def pol_slice(var3d, gridfile, n=1, zangle=0.0): """ data2d = pol_slice(data3d, 'gridfile', n=1, zangle=0.0) """ n = int(n) zangle = float(zangle) s = np.shape(var3d) if len(s) != 3: print("ERROR: pol_slice expects a 3D variable") return None nx, ny, nz = s dz = 2.*np.pi / float(n * (nz-1)) try: # Open the grid file gf = DataFile(gridfile) # Check the grid size is correct if gf.read("nx") != nx: print("ERROR: Grid X size is different to the variable") return None if gf.read("ny") != ny: print("ERROR: Grid Y size is different to the variable") return None # Get the toroidal shift zShift = gf.read("qinty") if zShift != None: print("Using qinty as toroidal shift angle") else: zShift = gf.read("zShift") if zShift != None: print("Using zShift as toroidal shift angle") else: print("ERROR: Neither qinty nor zShift found") return None gf.close() except: print("ERROR: pol_slice couldn't read grid file") return None var2d = np.zeros([nx, ny]) ###################################### # Perform 2D slice zind = old_div((zangle - zShift), dz) z0f = np.floor(zind) z0 = z0f.astype(int) p = zind - z0f # Make z0 between 0 and (nz-2) z0 = ((z0 % (nz-1)) + (nz-1)) % (nz-1) # Get z+ and z- zp = (z0 + 1) % (nz-1) zm = (z0 - 1 + (nz-1)) % (nz-1) # There may be some more cunning way to do this indexing for x in np.arange(nx): for y in np.arange(ny): var2d[x,y] = 0.5*p[x,y]*(p[x,y]-1.0) * var3d[x,y,zm[x,y]] + \ (1.0 - p[x,y]*p[x,y]) * var3d[x,y,z0[x,y]] + \ 0.5*p[x,y]*(p[x,y]+1.0) * var3d[x,y,zp[x,y]] return var2d