def __init__(self, filename, hsize=5, nstep=None): '''Class constructor: filename : NetCDF output file name hsize : output image width in inces nstep : the number of frames to process (default all frames in the file)''' self.filename = filename f = NetCDF.NetCDFFile(filename, 'r') g = NetCDF.NetCDFFile('wrfout_times', 'r') self.nstep = nstep if nstep is None: # in case nstep was not specified read the total number of time slices from the file self.nstep = g.variables['Times'].shape[0]
def read_output(field, suffix, array_type='numpy', path=read_output_default_path): """Read instantaneous and mean output data. Returns a 2-element tuple of the instantaneous and mean output data, as arrays of array_type, for files "[qi,qm]_"+suffix+".nc" in directory path+"/"+suffix. The default of path is set to the utilities module module variable read_output_default_path, which unless overloaded equals os.path.join(os.getcwd(), 'rundir'). Positional Input: * field: QTCM field name, as given in the netCDF file. String. * suffix: Descriptor. String. Keyword Input: * array_type: Type of array: 'Numeric', 'numarray', or 'numpy'. * path: Path to the directory the suffix directories holding run output are in. Output: * 2-element tuple where the first element is the instantaneous data and the second element is the mean data. """ if array_type == "Numeric": import Numeric as N elif array_type == "numarray": import numarray as N elif array_type == "numpy": import numpy as N else: raise ValueError, "No array package is defined" fn_inst = os.path.join(path, suffix, "qi_" + suffix + ".nc") fn_mean = os.path.join(path, suffix, "qm_" + suffix + ".nc") file_inst = S.NetCDFFile(fn_inst, mode='r') file_mean = S.NetCDFFile(fn_mean, mode='r') output = (N.array(file_inst.variables[field].getValue()), N.array(file_mean.variables[field].getValue())) file_inst.close() file_mean.close() return output
def __init__(self, filename, hsize=5): '''Class constructor: filename : string NetCDF file to read hsize : optional, width of output images in inches''' self.f = NetCDF.NetCDFFile(filename, 'r') self.hsize = hsize
def read_nc(fileName, fieldName='rho'): """ReadNetCDF data from RAMSES-GPU simulation run using Scientific.IO.NetCDF module. Possible value for fieldName : rho, E, rho_vx, rho_vy, rho_vz, Bx, By, Bz return numpy array of corresponding field. """ f = nc.NetCDFFile(fileName, 'r') # TODO test if fieldName is in f.keys() data = f.variables[fieldName].getValue() # get total time totalTime = getattr(f, 'total time') f.close() # print '%s read; returning field %s (%d,%d,%d)' % (fileName, fieldName, # data.shape[0], # data.shape[1], # data.shape[2]) #return data, totalTime return data
def write_ncdf(self): # flags = asarray(ncfile.variables['flags']) # bolo_params = asarray(ncfile.variables['bolo_params']) # scans_info = asarray(ncfile.variables['scans_info']) flags = asarray(self.ncflags) scans_info = asarray(self.ncscans) bolo_params = asarray(self.ncbolo_params) nbolos = self.nbolos scanlen = self.scanlen nscans = self.nscans # self.ngoodbolos = bolo_params[:,0].sum() bolo_indices = (self.bolo_indices[newaxis, :] + zeros([self.whscan.shape[0], 1])).astype('int') whscan = (self.whscan[:, newaxis] + zeros([1, self.ngoodbolos])).astype('int') # fs= reshape(self.flags,[nscans*scanlen,ngoodbolos]) # fs2 = zeros([nscans*scanlen,nbolos]) # fs2[:,self.bolo_indices] = fs # flags[self.whscan,:] = fs2 flags[whscan, bolo_indices] = reshape(self.flags, [nscans * scanlen, self.ngoodbolos]) if flags.min() < 0: flags[flags < 0] = 0 self.ncfile.close() ncfile = NetCDF.NetCDFFile(self.ncfilename, 'a') ncfile.variables['flags'].assignValue(flags) ncfile.history += "\n Flagged on " + time.ctime() ncfile.flush() # print ncfile.variables['flags'].max() # import pdb; pdb.set_trace() ncfile.close()
def chg_attribute(ncpath, attname, attval, verbose): """Call to change a netcdf attribute.""" if not os.path.exists(ncpath): print 'File not found: {}'.format(ncpath) raise SystemExit try: if verbose: print 'Opening {}'.format(ncpath) cdf_fh = NetCDF.NetCDFFile(ncpath, 'a') except IOError: print 'Error accessing {}'.format(ncpath) raise SystemExit except OSError: print 'Error accessing {}'.format(ncpath) raise SystemExit try: setattr(cdf_fh, attname, attval) except IOError: print 'Error accessing {}'.format(ncpath) raise SystemExit except OSError: print 'Error accessing {}'.format(ncpath) raise SystemExit print 'Attribute {} changed to {}'.format(attname, attval) cdf_fh.close() return
def test_file(srcpath): """ copy the image data from the source file over the image data of the destination file""" # if not os.path.exists(srcpath): print "File not found: {}".format(srcpath) raise SystemExit try: fh_src = NetCDF.NetCDFFile(srcpath, "r") except IOError: print 'Error accessing {}'.format(srcpath) raise SystemExit except OSError: print 'Error accessing {}'.format(srcpath) raise SystemExit # # print "TESTING: {}".format(srcpath) vidsrc = fh_src.variables['image'] pixsrc = vidsrc.getValue() # ######## for Debug ######### pixcnt = numpy.sum(pixsrc != 0) print "FILE HAS: {} Nonzero pixels".format(pixcnt) # fh_src.close()
def openDestinationFile(destpath, chnlname): """ open the destination netcdf file, change attributes, and return the file handle """ # if not os.path.exists(destpath): print "File not found: {}".format(destpath) raise SystemExit try: fh_dest = NetCDF.NetCDFFile(destpath, "a") except IOError: print 'Error accessing {}'.format(destpath) raise SystemExit except OSError: print 'Error accessing {}'.format(destpath) raise SystemExit # print "Opening file: {}".format(destpath) setattr(fh_dest, "satelliteName", "MOSAIC") # if "mosaicdelta" in destpath: # time delta attstr = "{0} timedelta".format(chnlname) else: attstr = "{0}".format(chnlname) setattr(fh_dest, "channel", attstr) return fh_dest
def lastdelta(srcpath): """ get the most recent time delta in the new mosaic file""" # if not os.path.exists(srcpath): print "File not found: {}".format(srcpath) raise SystemExit try: fh_src = NetCDF.NetCDFFile(srcpath, "r") except IOError: print 'Error accessing {}'.format(srcpath) raise SystemExit except OSError: print 'Error accessing {}'.format(srcpath) raise SystemExit # # vidsrc = fh_src.variables['image'] pixdata = vidsrc.getValue() pixcnt = numpy.sum(pixdata != 0) if pixcnt > 0: pixmin = numpy.min(pixdata[numpy.nonzero(pixdata)]) delsecs = pixel2timedif(pixmin) else: delsecs = 0 return delsecs
def merge_delta_files(fh_dest, srcpath, destpath, ageval): """ copy the image data from the source file over the image data of the destination file""" # if not os.path.exists(srcpath): print "File not found: {}".format(srcpath) raise SystemExit try: fh_src = NetCDF.NetCDFFile(srcpath, "r") except IOError: print 'Error accessing {}'.format(srcpath) raise SystemExit except OSError: print 'Error accessing {}'.format(srcpath) raise SystemExit # # #print "Adding time of file: {} to destination: {}".format(srcpath,destpath) vidsrc = fh_src.variables['image'] viddest = fh_dest.variables['image'] pixsrc = vidsrc.getValue() pixdest = viddest.getValue() pixdest = numpy.where(pixsrc != 0, ageval, pixdest) pixdest = pixdest.astype(int8) # rtn = viddest.assignValue(pixdest) # fh_src.close()
def getImages(filename,vname): file=NetCDF.NetCDFFile(filename,'r') vdata=file.variables[vname] vsize=vdata.shape[0] # create empty files subdirectory for output images try: shutil.rmtree('colorbarImages') except: pass os.makedirs('colorbarImages') # go through the whole dataset and generate a color bar image for each step for i in range(vsize): varray = vdata[i,:,:,] data=pylab.flipud(varray) pylab.imshow(data, norm=LogNorm()) imgNum = 'TimeStep_'+ str(i) if len(data[data>0])>0: #make a new figure that contains the colorbar fig=pylab.figure(figsize=(2,5)) ax1 = fig.add_axes([0.35, 0.03, 0.1, 0.9]) vmin=data[data>0].min() vmax=data.max() norm = LogNorm(vmin,vmax) #make the colorbar in log scale logFormatter=LogFormatter(10, labelOnlyBase=False) cb1 = ColorbarBase(ax1,norm=norm,format=logFormatter,spacing='proportional', orientation='vertical') imgName='colorbarImages/%s.png' %imgNum fig.savefig(imgName, bbox_inches='tight')
def update_timedelta(fh_dest, srcpath, destpath, pixdel, pixmax): """ update the time delta in the new mosaic file""" # if not os.path.exists(srcpath): print "File not found: {}".format(srcpath) raise SystemExit try: fh_src = NetCDF.NetCDFFile(srcpath, "a") except IOError: print 'Error accessing {}'.format(srcpath) raise SystemExit except OSError: print 'Error accessing {}'.format(srcpath) raise SystemExit # # print "Updating time delta: ", destpath vidsrc = fh_src.variables['image'] viddest = fh_dest.variables['image'] pixsrc_c = vidsrc.getValue() pixsrc = pixsrc_c.astype(uint8) # pixmax -= pixdel # pixsrc = numpy.where(pixsrc > pixmax, 0, pixsrc) pixsrc = numpy.where(pixsrc != 0, pixsrc + pixdel, pixsrc) # pixsrc_c = pixsrc.astype(int8) rtn = viddest.assignValue(pixsrc_c)
def openNetCDFFile(fn, mode='all'): ''' Open a file as numpy array Either get a dict with "var","lon","lat" or only "var" ''' f = S.NetCDFFile(fn, mode='r') try: lon = f.variables['lon'].getValue() lat = f.variables['lat'].getValue() except: try: lon = f.variables['X'].getValue() lat = f.variables['Y'].getValue() except: print 'Can\'t find lon/lat variables.' varName = cdo.showname(input = fn)[0] #TODO: Better way to get the variable name mVar = f.variables[varName].getValue() mVar = mVar.squeeze() if(len(np.shape(mVar)) == 3): mVar = mVar[0,:,:] elif(len(np.shape(mVar)) == 2): mVar = mVar[:,:] f.close() if mode == 'all': return {'variable': mVar, 'lon':lon, 'lat':lat} else: return mVar
def main(): """Call to run script.""" args = _process_command_line() if not os.path.exists(args.filepath): print 'File not found: {}'.format(args.filepath) raise SystemExit try: if args.verbose: print 'Opening {}'.format(args.filepath) cdf_fh = NetCDF.NetCDFFile(args.filepath, 'a') except IOError: print 'Error accessing {}'.format(args.filepath) raise SystemExit except OSError: print 'Error accessing {}'.format(args.filepath) raise SystemExit tvar = cdf_fh.variables['validTime'] if args.seconds > 0: tvar.assignValue(args.seconds) print 'validTime changed to {}'.format(args.seconds) else: filesecs = tvar.getValue() print 'File validTime is {}'.format(filesecs) cdf_fh.close() return
def test_scientificpython_bug(): import sys from pytest import importorskip importorskip('Scientific') import numpy as np msg = "\n'TypeError: array cannot be safely cast to required type'\n" msg += "means you are probably using a broken ScientficPython, \n" msg += "see: https://bugs.launchpad.net/ubuntu/+source/python-scientific/+bug/1041302\n" import Scientific.IO.NetCDF as netcdf import Scientific version = Scientific.__version__.split(".") print('Found ScientificPython version: ', Scientific.__version__) if list(map(int, version)) < [2, 8]: print( 'ScientificPython 2.8 or greater required for numpy support in NetCDF' ) raise RuntimeError( 'ScientificPython version 2.8 or greater is requied') handle = netcdf.NetCDFFile("test.nc", "w") try: handle.test = np.array([1.0]) except TypeError: print(msg, file=sys.stderr) raise handle.close()
def main(): """ counts valid pixels in satellite netcdf file.""" if (len(sys.argv) <= 1): print "No file path! Syntax: ", sys.argv[0], " {file path}" raise SystemExit else: filepath = sys.argv[1] # if not os.path.exists(filepath): print "File not found: ", filepath raise SystemExit try: cdf_fh = NetCDF.NetCDFFile(filepath, "r") except IOError: print 'Error opening {}'.format(args.filepath) raise SystemExit except OSError: print 'Error accessing {}'.format(args.filepath) raise SystemExit varid = cdf_fh.variables['image'] pixdata = varid.getValue() # pixcnt = numpy.sum(pixdata != 0) print "{} pixels".format(pixcnt) # cdf_fh.close() return
def ReadEPHNCFile(filename): """ Reads a NetCDF file that describes dynamical matrix, self-energies """ class eph: pass file = nc.NetCDFFile(filename,'r') print 'Reading from %s' % filename # General attributes eph.filename = filename eph.wl= N.array(file.variables['Wlist']) eph.hw= N.array(file.variables['hw']) eph.U= N.array(file.variables['U']) eph.DynMat= N.array(file.variables['DynMat']) eph.SigL= N.array(file.variables['ReSigL'])+1j*N.array(file.variables['ImSigL']) eph.SigR= N.array(file.variables['ReSigR'])+1j*N.array(file.variables['ImSigR']) eph.efric=N.array(file.variables['Friction']) eph.xim=N.array(file.variables['NC']) eph.xip=N.array(file.variables['NCP']) file.close() return eph
def ncget(self, fname, variable): fh = nf.NetCDFFile(fname, 'r') for v in fh.variables: if v == variable: data = fh.variables[v][:].ravel() return data fh.close() return None
def process_stdin(runfunc, gzip=False, prepend=False, outputdir=None, simpleServer=None, connectstring=None, dataProviders=None, reported=None, write_reported=None): #whole_file = sys.stdin.read() if gzip: f = NetCDF.NetCDFFile( StringIO(zlib.decompress(sys.stdin.read(), 16 + zlib.MAX_WBITS)), 'r') else: f = NetCDF.NetCDFFile(StringIO(sys.stdin.read()), 'r') runfunc(f, outputdir, simpleServer, prepend, connectstring, dataProviders, reported, write_reported)
def getvar(fname, varname): usescipy = False try: import Scientific.IO.NetCDF as netcdf except ImportError: import scipy from scipy.io import netcdf usescipy = True if (usescipy): nffile = netcdf.netcdf_file(fname,"r",mmap=False) var = nffile.variables[varname] varvals = var[:].copy() #works for vector only? nffile.close() else: nffile = netcdf.NetCDFFile(fname,"r") var = nffile.variables[varname] varvals = var.getValue() nffile.close() return varvals
def getField(netcdf_file): #obs file = NetCDF.NetCDFFile(netcdf_file, 'r') global lon, lat, psi lon = file.variables['lon'][:] lat = file.variables['lat'][:] psi = file.variables['z'][:, :] lon = np.array(lon) lat = np.array(lat) psi = np.array(psi) return lon, lat, psi
def putvar(fname, varname, varvals): usescipy = False try: import Scientific.IO.NetCDF as netcdf except ImportError: import scipy from scipy.io import netcdf usescipy = True if (usescipy): nffile = netcdf.netcdf_file(fname,"a",mmap=False) var = nffile.variables[varname] var[:] = varvals nffile.close() else: nffile = netcdf.NetCDFFile(fname,"a") var = nffile.variables[varname] var.assignValue(varvals) nffile.close() ierr = 0 return ierr
def process_netcdf_meta_file(filepath, outputdir=None, simpleServer=None, prepend=False, connectstring=None, dataProviders=None, reported=None, write_reported=None): f = NetCDF.NetCDFFile(filepath, 'r') process_netcdf_meta(f, outputdir, simpleServer, prepend, connectstring, dataProviders, reported, write_reported)
def write_grid_as_netcdf(grid_data, outpath, options={}, progress=None): from Scientific.IO import NetCDF f = NetCDF.NetCDFFile(outpath, 'w') if progress: progress.close_on_cancel(f) # createDimension() cannot handle long integer size values xsize, ysize, zsize = [int(s) for s in grid_data.size] f.createDimension('x', xsize) f.createDimension('y', ysize) f.createDimension('z', zsize) f.xyz_origin = grid_data.origin f.xyz_step = grid_data.step if grid_data.cell_angles != (90, 90, 90): f.cell_angles = grid_data.cell_angles if grid_data.rotation != ((1, 0, 0), (0, 1, 0), (0, 0, 1)): from chimerax.geometry import matrix axis, angle = matrix.rotation_axis_angle(grid_data.rotation) f.rotation_axis = axis f.rotation_angle = angle name = 'data' typecode = grid_data.value_type.char v = f.createVariable(name, typecode, ('z', 'y', 'x')) v.rgba = grid_data.rgba v.component_number = 1 save_unsigned_typecode(v, typecode) sarrays = subsample_arrays(grid_data, name, f) for k in range(zsize): if progress: progress.plane(k) values = grid_data.matrix((0, 0, k), (xsize, ysize, 1)) v[k, :, :] = values.view(v.typecode())[0, :, :] for cell_size, ssv in sarrays: kstep = cell_size[2] if k % kstep == 0: ssd = grid_data.available_subsamplings[cell_size] xs, ys, zs = ssd.size ssk = k / kstep if ssk < zs: values = ssd.matrix((0, 0, ssk), (xs, ys, 1)) ssv[ssk, :, :] = values.view(ssv.typecode())[0, :, :] # Subsample arrays may have an extra plane. for cell_size, ssv in sarrays: ssd = grid_data.available_subsamplings[cell_size] xs, ys, zs = ssd.size for ssk in range(zsize / cell_size[2], zs): values = ssd.matrix((0, 0, ssk), (xs, ys, 1)) ssv[ssk, :, :] = values.view(ssv.typecode())[0, :, :] f.close()
def h5tonc(self,filename,packshort=False,scale_factor=None,add_offset=None): """convert to a true netcdf file (filename). Requires Scientific.IO.NetCDF module. If packshort=True, variables are packed as short integers using the dictionaries scale_factor and add_offset. The dictionary keys are the the variable names in the hdf5 file to be packed as short integers. Each variable's unlimited dimension must be the slowest varying (the first dimension for C/Python, the last for Fortran).""" if not ScientificIONetCDF_imported or not Numeric_imported: print 'Scientific.IO.NetCDF and Numeric must be installed to convert to NetCDF' return ncfile = RealNetCDF.NetCDFFile(filename,'w') # create dimensions. for dimname,size in self.dimensions.iteritems(): ncfile.createDimension(dimname,size) # create global attributes. for key in self.ncattrs(): setattr(ncfile,key,getattr(self,key)) # create variables. for varname,varin in self.variables.iteritems(): packvar = False dims = varin.dimensions dimsizes = [self.dimensions[dim] for dim in dims] if None in dimsizes: if dimsizes.index(None) != 0: raise ValueError,'unlimited or enlargeable dimension must be most significant (slowest changing, or first) one in order to convert to a true netCDF file' if packshort and scale_factor.has_key(varname) and add_offset.has_key(varname): print 'packing %s as short integers ...'%(varname) datatype = 's' packvar = True else: datatype = varin.typecode() if not _netcdftype_dict[datatype]: raise ValueError,'datatype not supported in netCDF, cannot convert to a true netCDF file' varout = ncfile.createVariable(varname,datatype,dims) for key in varin.ncattrs(): setattr(varout,key,getattr(varin,key)) if packvar: setattr(varout,'scale_factor',scale_factor[varname]) setattr(varout,'add_offset',add_offset[varname]) for n in range(varin.shape[0]): if packvar: varout[n] = ((1./scale_factor[varname])*(varin[n] - add_offset[varname])).astype('s') else: if datatype == 'c': tmp = Numeric.array(varin[n].flatten(),'c') varout[n] = Numeric.reshape(tmp, varin.shape[1:]) else: varout[n] = varin[n] # close file. ncfile.close()
def main(argv): import argparse import os styles = { "quickly": InsertChunksWithoutCheckingForExistingReadings, # "safely": InsertRowsIfNoConflict } parser = argparse.ArgumentParser( description="Imports climate data from NetCDF file.", prog=argv[0], usage=""" %(prog)s --NetCDF_file path/to/file.nc --parameter_name <parameter> --style <import style> --field_name <field name> e.g. python ./run.py %(prog)s --field_name rr --style quickly --parameter_name "Gridded Rainfall mm" --NetCDF_file gridded_rainfall_mm.nc """) parser.add_argument("--NetCDF_file", required=True, help="NetCDF file to import.") parser.add_argument( "--parameter_name", required=True, choices=climate_sample_tables.keys(), help="Parameter name, which corresponds to an added table.") parser.add_argument("--clear_existing_data", type=bool, default=False, help="Truncate database tables first.") parser.add_argument("--style", required=True, choices=styles.keys(), default="safely", help=""" quickly: just insert readings into the database safely: check that data is not overwritten """) parser.add_argument("--field_name", required=True, help="""name of netCDF field that holds the data value e.g. "tt" or "rr". Type "?", to discover options.""") args = parser.parse_args(argv[1:]) sample_table = ClimateDataPortal.SampleTable.with_name(args.parameter_name) sample_table.clear() db.commit() import_climate_readings( netcdf_file=NetCDF.NetCDFFile(args.NetCDF_file), field_name=args.field_name, add_reading=styles[args.style](sample_table), converter=ClimateDataPortal.units_in_out[units]["in"])
def __init__(self, filename, attrs='r'): """ @param filename the name of the exodus file @param attrs the file type attributes: 'r' - read only (filename must exist) 'a' - append/new file 'w' - new file (erases any existing contents) """ self.filename = filename self.cdf = netcdf.NetCDFFile(filename, attrs) self.dims = self.cdf.dimensions self.vars = self.cdf.variables
def __init__(self, filename, latitudeName = "latitude", longitudeName = "longitude", timeName = "time"): self._file = netcdf.netcdf_file(filename, "r") self._latitudes = self.Values(latitudeName) self._longitudes = self.Values(longitudeName) self._times = self.Values(timeName) # Longitude wrap-around self._longitudes.append(self._longitudes[0] + 360.0) debug.dprint(self) return
def returnField(outputFileName): #obbs output_file = '%s.nc' % str(outputFileName) global outLon, outLat, outField, outsize1, outsize2 f = NetCDF.NetCDFFile(outputFileName, 'w') f.createDimension('dim1', outsize1) f.createDimension('dim2', outsize2) f.createVariable('lon', 'd', ('dim1',)) f.createVariable('lat', 'd', ('dim2',)) f.createVariable('z', 'd', ('dim1','dim2',)) f.variables['lon'][:] = outLon f.variables['lat'][:] = outLat f.variables['z'][:] = outField f.close()
def ReadMatlabPHNCFile(filename): """ Reads a NetCDF file that describes dynamical matrix, self-energies """ class eph: pass file = nc.NetCDFFile(filename, 'r') print 'Reading from %s' % filename # General attributes eph.filename = filename eph.T = float(file.variables['T'][:, 0]) #np.array(file.variables['T'][:,0]) eph.dT = float( file.variables['dT'][:, 0]) #np.array(file.variables['dT'][:,0]) eph.hwcut = float( file.variables['hwcut'][:, 0]) #np.array(file.variables['hwcut'][:,0]) eph.dt = float( file.variables['dt'][:, 0]) #np.array(file.variables['dt'][:,0]) eph.nw = int( file.variables['nw'][:, 0]) #np.array(file.variables['nw'][:,0],int) eph.nmd = int( file.variables['nmd'][:, 0]) #np.array(file.variables['nmd'][:,0],int) eph.nmemL = int(file.variables['nmemL'] [:, 0]) #np.array(file.variables['nmemL'][:,0],int) eph.nmemR = int(file.variables['nmemR'] [:, 0]) #np.array(file.variables['nmemR'][:,0],int) eph.NA_L = np.array(file.variables['NA_L'][:, 0], int) eph.NA_R = np.array(file.variables['NA_R'][:, 0], int) eph.idL = np.array(file.variables['idLn'][:, 0], int) eph.idR = np.array(file.variables['idRn'][:, 0], int) eph.E = np.array(file.variables['E'][:, 0]) eph.SigL = np.array(file.variables['SigL_Re'][:]) + 1j * np.array( file.variables['SigL_Im'][:]) eph.SigR = np.array(file.variables['SigR_Re'][:]) + 1j * np.array( file.variables['SigR_Im'][:]) #for iw in range(eph.nw): #eph.SigL[iw,1,1]=eph.SigL2[1,1,iw] #eph.SigR[iw,1,1]=eph.SigR2[1,1,iw] #N=3*eph.NA_L #eph.SigL=np.zeros((N,N,eph.nw),np.complex) #for iw in range(eph.nw): # for id1 in range(N): # for id2 in range(N): #eph.SigL[iw,:,:]= np.array(file.variables['SigL_Re'][:])+1j*np.array(file.variables['SigL_Im'][:]) #eph.SigR[iw,:,:]= np.array(file.variables['SigR_Re'][:])+1j*np.array(file.variables['SigR_Im'][:]) #print "was here" file.close() return eph
def read_lonlat_netcdf(netcdf): # where the lon is 2D : lon = (lon,lat) """ to read value from a netcdf file for particular variable """ #open input files infile = IO.NetCDFFile(netcdf, 'r') # read the variable lon = infile.variables['lon'][:].copy() lat = infile.variables['lat'][:].copy() # time spaces return lon, lat
def getvar(fname, varname): usescipy = False try: import Scientific.IO.NetCDF as netcdf except ImportError: import scipy from scipy.io import netcdf usescipy = True if (usescipy): nffile = netcdf.netcdf_file(fname,"r") var = nffile.variables[varname] varvals = var[:].copy() #works for vector only? nffile.close() else: nffile = netcdf.NetCDFFile(fname,"r") var = nffile.variables[varname] varvals = var.getValue() nffile.close() return varvals
def putvar(fname, varname, varvals): usescipy = False try: import Scientific.IO.NetCDF as netcdf except ImportError: import scipy from scipy.io import netcdf usescipy = True if (usescipy): nffile = netcdf.netcdf_file(fname,"a") var = nffile.variables[varname] var[:] = varvals[:] nffile.close() else: nffile = netcdf.NetCDFFile(fname,"a") var = nffile.variables[varname] var.assignValue(varvals) nffile.close() ierr = 0 return ierr