def open_ncfile(nc_filename): """ Opens nc_filename, unzipping if necessary. """ if nc_filename.find('.gz') >= 0: gztemp= gzip.open(nc_filename) gztemp.closed=False # pupynere needs this attribute, but gzip objects don't have it. ncdf = netcdf.netcdf_file(gztemp, 'r') else: ncdf = netcdf.netcdf_file(nc_filename,'r') return ncdf
def open_ncfile(nc_filename): """ Opens nc_filename, unzipping if necessary. """ if nc_filename.find('.gz') >= 0: gztemp = gzip.open(nc_filename) gztemp.closed = False # pupynere needs this attribute, but gzip objects don't have it. ncdf = netcdf.netcdf_file(gztemp, 'r') else: ncdf = netcdf.netcdf_file(nc_filename, 'r') return ncdf
def runTest(self): """Should not be able to create a non-first unlimited dimension""" f = pupynere.netcdf_file(None, 'w') f.createDimension('n1', n1dim) with self.assertRaises(ValueError): f.createDimension('n2', None) self.assertEquals(f.dimensions, {'n1': n1dim})
def csv_to_nc(argv): """Transform csv file into netcdf format. Args: - argv: filename, example: data.csv Returns: - netcdf file, example: data.nc Usage: $ ./csv_to_nc.py data.csv """ with open(sys.argv[1], 'r') as f: ll = [l.strip().split(',') for l in f.readlines() if not l.startswith('#')] vv = zip(*[map(float, l) for l in ll]) transformed_file = update_name(sys.argv[1]) nc = pupynere.netcdf_file(transformed_file, 'w') nc.createDimension('dim', None) for i, item in enumerate(vv): nc.createVariable('var_%02d' % i, 'd', ('dim',))[:] = vv[i] return 0
def load_energy_score_data(fname, dtype=float, comments="""#""", delimiter="\t", converters=None, skiprows=1, usecols=None, unpack=False, ndmin=0): """ Reads a data file and returns a Numpy Array containing the data. Two input file types are allowed, tab-delimited data and NetCDF files. For tab-delimited data, the function parameters are passed along to numpy.loadtxt For NetCDF data, the function arguments, except for fname, are ignored. Data is assumed to be in the \"ranks\" variable. Loading NetCDF data requires the pupynere module. @param fname: Input file name @type fname: str @return: Numpy Array """ use_netcdf = False with open(fname,"r") as infile: magic_number = infile.read(3) if magic_number == "CDF": use_netcdf = True if use_netcdf: import pupynere as nc f = nc.netcdf_file(fname,"r") return f.variables['ranks'].data else: from numpy import loadtxt return loadtxt(fname, dtype, comments, delimiter, converters, skiprows, usecols, unpack, ndmin)
def test_readtf(self): ncf=btf2nc(self.dummyfile,badctf=self.data) x=[n for n in ncf.variables] ncf.close() # will write a netcdf version out! n2=netcdf_file(self.dummyfile) y=[n for n in n2.variables] self.assertEqual(x,y)
def runTest(self): """Should not be able to create a variable where there are multiple unlimited dimensions""" f = pupynere.netcdf_file(None, 'w') f.createDimension('n1', None) f.createDimension('n2', n2dim) with self.assertRaises(ValueError): f.createVariable('data1', None, ('n2', 'n1')) f.close()
def setUp(self): self.file = FILE_NAME f = pupynere.netcdf_file(self.file, 'w') # scalar variable. temp = f.createVariable(VAR_NAME,VAR_TYPE) temp.assignValue(VAR_VAL) #tempg.assignValue(VAR_VAL) f.close()
def runTest(self): """Testing a basic read case""" f = pupynere.netcdf_file(FILE_NAME, "r") self.assertEqual(f.history, "Created for a test") self.assertEqual(f.location, u"北京") time = f.variables["time"] self.assertEqual(time.units, u"µs since 2008-01-01") self.assertEqual(time.shape, (10,)) self.assertEqual(time[-1], 9) f.close()
def setUp(self): self.file = FILE_NAME f = pupynere.netcdf_file(self.file, "w") f.history = "Created for a test" f.location = u"北京" f.createDimension("time", 10) time = f.createVariable("time", "i", ("time",)) time[:] = range(10) time.units = u"µs since 2008-01-01" f.close()
def runTest(self): """Testing the creation of an unlimited dimensions""" f = pupynere.netcdf_file(self.file, 'r') foo = f.variables['data1'] # check shape. self.assertEquals(foo.shape, (2*n1dim,n2dim,n3dim)) # check data. self.assertTrue((foo[0:n1dim,:,:] == ranarr).all()) self.assertTrue((foo[n1dim:3*n1dim,:,:] == 2.*ranarr).all()) f.close()
def readproptmser(exptitle,expnr,prop,username='******'): expsdir = '/home/%s/Les/Experiments' % (username) expdir = expsdir + '/%s/%s' %(exptitle,expnr) f = pu.netcdf_file(expdir + '/tmser.%s.nc' % (expnr)) time = f.variables['time'][:] prop = f.variables[prop][:] return {'time': time, 'prop':prop}
def setUp(self): self.file = tempfile.mktemp(".nc") f = pupynere.netcdf_file(self.file, 'w') # foo has a single unlimited dimension f.createDimension('n1', None) f.createDimension('n2', n2dim) f.createDimension('n3', n3dim) foo = f.createVariable('data1', ranarr.dtype.str[1:], ('n1','n2','n3')) # write some data to it. foo[:] = ranarr foo[n1dim:,:,:] = 2.*ranarr f.close()
def runTest(self): """Testing scalar variables""" # check dimensions in root group. f = pupynere.netcdf_file(self.file, 'r') v = f.variables[VAR_NAME] # dimensions and shape should be empty tuples self.assert_(v.dimensions == ()) self.assert_(v.shape == ()) # check result of getValue and slice assert_almost_equal(v.getValue(), VAR_VAL, decimal=6) assert_almost_equal(v[:], VAR_VAL, decimal=6) f.close()
def read_netcdf(self, fname, frame=0): from pupynere import netcdf_file nc = netcdf_file(fname) self.n = nc.dimensions['atom'] self.lattice = make_lattice(nc.variables['cell_lengths'][frame], nc.variables['cell_angles'][frame]) self.g = numpy.linalg.inv(self.lattice) self.params = OrderedDict() self.properties = OrderedDict() self.real = numpy.zeros((self.n, 0), dtype=float) self.int = numpy.zeros((self.n, 0), dtype=int) self.str = numpy.zeros((self.n, 0), dtype='S10') self.logical = numpy.zeros((self.n, 0), dtype=bool) vars = nc.variables.keys() vars = filter(lambda v: not v in ('cell_angles', 'cell_lengths'), vars) # ensure first var is species and second positions sp = vars.index('species') if sp != 0: vars[sp], vars[0] = vars[0], vars[sp] pos = vars.index('coordinates') if pos != 1: vars[pos], vars[1] = vars[1], vars[pos] for v in vars: d = nc.variables[v].dimensions if d[0] != 'frame': continue value = nc.variables[v][frame] if value.dtype == numpy.dtype('|S1'): value = [''.join(x).strip() for x in value] if len(d) == 1 or (len(d) == 2 and d[1] in ('label', 'string')): if (len(d) == 2 and d[1] in ('label', 'string')): value = ''.join(value) self.params[v] = value else: # Name mangling if v == 'coordinates': p = 'pos' elif v == 'velocities': p = 'velo' else: p = v value = nc.variables[v][frame] if value.dtype == numpy.dtype('|S1'): value = [''.join(x).strip() for x in value] self.add_property(p, value)
def runTest(self): """Testing whether we can read a file with no variables""" f = pupynere.netcdf_file(self.file, 'r') # check attributes in root group. # global attributes. # check __dict__ method for accessing all netCDF attributes. for key,val in ATTDICT.items(): assert f.__dict__[key] == val # check accessing individual attributes. assert f.attr0 == ATTR0 assert f.attr1 == ATTR1 f.close()
def readtime(exptitle,casetitle,expnr,livedata=False,username='******'): expsdir = '/home/%s/Les/Experiments' % (username) expdir = expsdir + '/%s/%s' %(exptitle,expnr) fddatadir = expdir print 'Extracting time array' f = pu.netcdf_file(fddatadir + '/fielddump.000.000.%s.nc' % (expnr)) t = f.variables['time'][:] tsteps = len(t) return {'tsteps' : tsteps, 't' : t}
def write_nc(self): esec_list = [] ex = ['col1', 'col2','col3','latitude','longitude'] #Colum labels # ex = csv.reader(open('info.txt')) reader = csv.DictReader(open('test.txt')) #open de the file and save the data in reader #Do a list of each label with the data on it for r in reader: gmtDTString = r['Time'] #read the variable time and save it in CF-1.0 nercdf convention tt = time.strptime(gmtDTString, '%Y-%m-%d %H:%M:%S') diff = datetime.datetime(*tt[:6]) - datetime.datetime(1970,1,1,0,0,0) esec_list.append(diff.days * 86400 + diff.seconds) #Create the list of all the variable for v in ex: exec('%s_list = []' % (v)) exec("%s_list.append(r['%s'])" % (v,v,)) #====================================================================== # Create the NetCDF file outFile='foo1.nc' self.ncFile = netcdf_file(outFile, 'w') self.outFile = outFile # Trajectory dataset, time is the only netCDF dimension self.ncFile.createDimension('TIME', len(esec_list)) self.time = self.ncFile.createVariable('TIME', 'float64', ('TIME',)) self.time.units = 'seconds since 1970-01-01' self.time[:] = esec_list # Write variables for v in ex: ncVar = v.replace(' ', '_', 42) # Only Latitude, Longitude, Depth, and Time variables are upper case to match other Glider data print(v) if v == 'Latitude' or v == 'Longitude': exec("self.%s = self.ncFile.createVariable('%s', 'float64', ('TIME',))" % (ncVar.lower(), ncVar.upper(), )) else: exec("self.%s = self.ncFile.createVariable('%s', 'float64', ('TIME',))" % (ncVar.lower(), ncVar, )) exec("self.%s.long_name = '%s'" % (ncVar.lower(), v, )) exec("self.%s[:] = %s_list" % (ncVar.lower(), ncVar, )) # Fudge up a depth variable with a value of zero self.depth = self.ncFile.createVariable('DEPTH', 'float64', ('TIME',)) self.depth.long_name = 'Depth' self.depth.standard_name = 'depth' self.depth.units = 'm' self.depth[:] = np.zeros(len(self.time[:])) self.add_global_metadata() self.ncFile.close()
def write_nc(self): esec_list = [] ex = ['col1', 'col2','col3','latitude','longitude'] #Colum labels # ex = csv.reader(open('info.txt')) reader = csv.DictReader(open('test.txt')) #open de the file and save the data in reader #Do a list of each label with the data on it for r in reader: gmtDTString = r['Time'] #read the variable time and save it in CF-1.0 nercdf convention tt = time.strptime(gmtDTString, '%Y-%m-%d %H:%M:%S') diff = datetime.datetime(*tt[:6]) - datetime.datetime(1970,1,1,0,0,0) esec_list.append(diff.days * 86400 + diff.seconds) #Create the list of all the variable for v in ex: exec '%s_list = []' % (v) exec "%s_list.append(r['%s'])" % (v,v,) #====================================================================== # Create the NetCDF file outFile='foo1.nc' self.ncFile = netcdf_file(outFile, 'w') self.outFile = outFile # Trajectory dataset, time is the only netCDF dimension self.ncFile.createDimension('TIME', len(esec_list)) self.time = self.ncFile.createVariable('TIME', 'float64', ('TIME',)) self.time.units = 'seconds since 1970-01-01' self.time[:] = esec_list # Write variables for v in ex: ncVar = v.replace(' ', '_', 42) # Only Latitude, Longitude, Depth, and Time variables are upper case to match other Glider data print v if v == 'Latitude' or v == 'Longitude': exec "self.%s = self.ncFile.createVariable('%s', 'float64', ('TIME',))" % (ncVar.lower(), ncVar.upper(), ) else: exec "self.%s = self.ncFile.createVariable('%s', 'float64', ('TIME',))" % (ncVar.lower(), ncVar, ) exec "self.%s.long_name = '%s'" % (ncVar.lower(), v, ) exec "self.%s[:] = %s_list" % (ncVar.lower(), ncVar, ) # Fudge up a depth variable with a value of zero self.depth = self.ncFile.createVariable('DEPTH', 'float64', ('TIME',)) self.depth.long_name = 'Depth' self.depth.standard_name = 'depth' self.depth.units = 'm' self.depth[:] = np.zeros(len(self.time[:])) self.add_global_metadata() self.ncFile.close()
def __getitem__(self, index): index = fix_slice(index, self.shape) # create a new axis if self.axis is None: # get the slice along the aggregation axis, and leave the rest for # the variable itself slice_, index = index[0], index[1:] data = [] for file, n in self.count[slice_]: f = netcdf_file(file) data.append(f.variables[self.name][index]) f.close() return np.array(data).astype(self.dtype) # concatenate along an existing axis else: # convert index to list so we can change it index = list(index) # get the slice along the aggregation axis and store it in a # boolean array that we'll map to the files slice_ = index[self.axis] indexes = np.zeros(self.shape[self.axis], bool) indexes[slice_] = 1 offset = 0 data = [] for file, n in self.count: selected_here = indexes[offset:offset+n] if any(selected_here): index[self.axis] = selected_here f = netcdf_file(file) data.append(f.variables[self.name][tuple(index)]) f.close() offset += n return np.concatenate(data, axis=self.axis).astype(self.dtype)
def transform(fl): """Transforms csv file into netcdf format. :Args: - fl, cvs data file, example: data.csv :Returns: - netcdf file, example: data.nc """ nc = pupynere.netcdf_file(new_name(fl), 'w') nc.createDimension('dim', None) for i, item in enumerate(make_float(fl)): nc.createVariable('var_%02d' % i, 'd', ('dim',))[:] = item
def readprop(exptitle,expnr,prop,username='******'): expsdir = '/home/%s/Les/Experiments' % (username) expdir = expsdir + '/%s/%s' %(exptitle,expnr) f = pu.netcdf_file(expdir + '/profiles.%s.nc' % (expnr)) # columns of data zt = f.variables['zt'][:] # full levels zm = f.variables['zm'][:] # half levels # rows of data time = f.variables['time'][:] p = f.variables[prop][:,:] return {'zt' : zt, 'zm' : zm, 'time': time, prop:p}
def __init__(self, cfg): """ """ self.cfg = cfg self.set_logger() self.logger.info("Initializing AVISO_fetch class") self.logger.debug("cfg: %s" % cfg) if ('username' not in self.cfg) | ('password' not in self.cfg): self.logger.error("Aviso DAP server requires a registered username and password. I'll abort.") return if 'urlbase' not in self.cfg: self.cfg['urlbase'] = \ "http://%s:%[email protected]/thredds/dodsC" % \ (self.cfg['username'], self.cfg['password']) self.logger.debug("urlbase: %s" % self.cfg['urlbase']) #if type(self.cfg['map']) == str: # self.cfg['map'] = [self.cfg['map']] if 'force_download' not in self.cfg: self.cfg['force_download'] = False if (self.cfg['datadir'] != '') and \ (not os.path.isdir(self.cfg['datadir'])): print "There is no data directory: %s" % self.cfg['datadir'] return self.file = os.path.join(self.cfg['datadir'], self.cfg['filename']) try: self.nc = netCDF4.Dataset(self.file,'w', format='NETCDF4') except: self.nc = pupynere.netcdf_file(self.file,'w') # ---------- self.nc.created_datetime = datetime.now().isoformat() self.nc.metadata_map = self.cfg['map'] self.nc.metadata_type = self.cfg['type'] self.nc.metadata_urlbase = self.cfg['urlbase'] self.nc.metadata_force_download = str(self.cfg['force_download']) # ---------- self.download_data() self.nc.close()
def write_netcdf(self, out_file, in_url): # Check parent directory and create if needed dirName = os.path.dirname(out_file) try: os.makedirs(dirName) except OSError as e: if e.errno != errno.EEXIST: raise # Create the NetCDF file self.logger.debug("Creating netCDF file %s", out_file) self.ncFile = netcdf_file(out_file, 'w') # If specified on command line override the default generic title with what is specified self.ncFile.title = 'LRAUV interpolated data' # Combine any summary text specified on command line with the generic summary stating the original source file self.ncFile.summary = 'Observational oceanographic data translated with modification from original data file %s' % in_url # add in time dimensions first ts_key = [] for key in self.all_sub_ts.keys(): if key.find('time') != -1: ts = self.all_sub_ts[key] if not ts.empty: self.logger.debug("Adding in record variable %s", key) v = self.initRecordVariable(key) v[:] = self.all_sub_ts[key] else: ts_key.append(key) # add in other remaining time series for key in ts_key: ts = self.all_sub_ts[key] if not ts.empty: try: logging.debug("Adding in record variable %s", key) v = self.initRecordVariable(key) v[:] = self.all_sub_ts[key] except Exception, e: self.logger.error(e) continue
def __init__(self, filename, append=True, verbose=False, allowrecord=True): (head, tail) = os.path.split(filename) tmpfile = os.path.join(head, "0-%d-%s" % (os.getpid(), tail)) netcdf.netcdf_file.__init__(self, tmpfile, "w") self.__dict__[ '_targetfile'] = filename #Setattr writes also to self._attributes self.__dict__['_verbose'] = verbose # We do not want it... self.__dict__['_records'] = allowrecord # No record dimension oldF = None if append: try: oldF = netcdf.netcdf_file(filename, "r") except IOError as e: if verbose: print "Previous file can't be updated..." # Copy from the oldfile if oldF: # Copy all stuff from the old file for k, v in oldF._attributes.items(): if hasattr(self, k): #Overwriting targetfile is harmful # it should not appear in files anyhow continue self.__setattr__(k, v) for dim in oldF._dims: if (allowrecord or oldF.dimensions[dim] > 0): value = oldF.dimensions[dim] else: value = oldF.variables[dim].shape[0] if value > -1: self.createDimension(dim, int(value)) else: self.createDimension(dim, None) for key, var in oldF.variables.items(): ovar = self.createVariable(key, var.typecode(), var.dimensions) for k, v in var._attributes.items(): ovar.__setattr__(k, v) if len(var.data.shape): ovar[:] = var.data.copy() else: ovar = var.data.copy() oldF.close() else: self.history = ""
def writeNcFile(data, fileName=None, oldStyle=False): if not ncOk: raise Exception( 'module pupynere not found, please make sure it is installed, writeNcFile() failed!' ) if not fileName: fileName = data['name'] + '.nc' f = netcdf_file(fileName, 'w') f.createDimension('time', data['time'].shape[0]) f.file_format = file_format if oldStyle: f.createDimension('scalar', 1) if 'comment' in data: f.comment = data['comment'] else: f.comment = 'created by MeteonormFile.py (v%s)' % version if 'source_file' in data: f.source_file = str(data['source_file']) for vn in ('latitude', 'longitude', 'height'): setattr(f, vn, data[vn]) if oldStyle: v = f.createVariable(vn, 'd', ('scalar', )) v[:] = [data[vn]] setattr(f, 'longitude_0', 15.0 * data['timezone']) if oldStyle: v = f.createVariable('longitude_0', 'd', ('scalar', )) v[:] = [15.0 * data['timezone']] for vn in variables.keys(): t = variables[vn][1] v = f.createVariable(vn, t, ('time', )) v[:] = data[vn].astype(t) oname = variables[vn][0] if oname.startswith('<'): oname = oname[1:] if oname.endswith('>'): oname = oname[:-1] v.original_name = oname v.unit = variables[vn][2] if vn == 'time': v.extrapolation = 'periodic' f.sync() f.close()
def writeNcFile(data, fileName=None, oldStyle=False): if not ncOk: raise Exception('module pupynere not found, please make sure it is installed, writeNcFile() failed!') if not fileName: fileName = data['name']+'_weather.nc' f = netcdf_file(fileName, 'w') f.createDimension('time', data['time'].shape[0]) f.file_format = file_format if oldStyle: f.createDimension('scalar', 1) if 'comment' in data: f.comment = data['comment'] else: f.comment = 'created by MeteonormFile.py (v%s)' % version if 'source_file' in data: f.source_file = str(data['source_file']) for vn in ('latitude', 'longitude', 'height'): setattr(f, vn, data[vn]) if oldStyle: v = f.createVariable(vn, 'd', ('scalar', )) v[:] = [data[vn]] setattr(f, 'longitude_0', 15.0*data['timezone']) if oldStyle: v = f.createVariable('longitude_0', 'd', ('scalar', )) v[:] = [15.0 * data['timezone']] for vn in variables.keys(): t = variables[vn][1] v = f.createVariable(vn, t, ('time',)) v[:] = data[vn].astype(t) oname = variables[vn][0] if oname.startswith('<'): oname = oname[1:] if oname.endswith('>'): oname = oname[:-1] v.original_name = oname v.unit = variables[vn][2] if vn == 'time': v.extrapolation = 'periodic' f.sync() f.close()
def write_pctd(self, inFile): ''' Write lists out as NetCDF using the base name of the file for the .nc file that this creates. ''' outFile = '.'.join(inFile.split('.')[:-1]) + '.nc' # Create the NetCDF file self.ncFile = netcdf_file(outFile, 'w') # If specified on command line override the default generic title with what is specified self.ncFile.title = 'Profile CTD cast data' if self.args.title: self.ncFile.title = self.args.title # Combine any summary text specified on commamd line with the generic summary stating the original source file self.ncFile.summary = 'Observational oceanographic data translated with no modification from original data file %s' % inFile if self.args.summary: self.ncFile.summary = self.args.summary if not self.args.summary.endswith('.'): self.ncFile.summary += '.' self.ncFile.summary += ' Translated with no modification from original data file %s' % inFile # If specified on command line override the default generic license with what is specified if self.args.license: self.ncFile.license = self.args.license # Trajectory dataset, time is the only netCDF dimension self.ncFile.createDimension('time', len(self.esec_list)) self.time = self.ncFile.createVariable('time', 'float64', ('time', )) self.time.standard_name = 'time' self.time.units = 'seconds since 1970-01-01' self.time[:] = self.esec_list # Record Variables - coordinates for trajectory - save in the instance and use for metadata generation self.latitude = self.ncFile.createVariable('latitude', 'float64', ('time', )) self.latitude.long_name = 'LATITUDE' self.latitude.standard_name = 'latitude' self.latitude.units = 'degree_north' self.latitude[:] = self.lat_list self.longitude = self.ncFile.createVariable('longitude', 'float64', ('time', )) self.longitude.long_name = 'LONGITUDE' self.longitude.standard_name = 'longitude' self.longitude.units = 'degree_east' self.longitude[:] = self.lon_list self.depth = self.ncFile.createVariable('depth', 'float64', ('time', )) self.depth.long_name = 'DEPTH' self.depth.standard_name = 'depth' self.depth.units = 'm' self.depth[:] = csiro.depth(self.pr_list, self.lat_list) # Convert pressure to depth # Record Variables - Profile CTD Data temp = self.ncFile.createVariable('TEMP', 'float64', ('time', )) temp.long_name = 'Temperature, [ITS-90, deg C]' temp.standard_name = 'sea_water_temperature' temp.coordinates = 'time depth latitude longitude' temp.units = 'Celsius' temp[:] = self.t_list sal = self.ncFile.createVariable('PSAL', 'float64', ('time', )) sal.long_name = 'Salinity, Practical [PSU]' sal.standard_name = 'sea_water_salinity' sal.coordinates = 'time depth latitude longitude' sal[:] = self.sal_list xmiss = self.ncFile.createVariable('xmiss', 'float64', ('time', )) xmiss.long_name = 'Beam Transmission, Chelsea/Seatech' xmiss.coordinates = 'time depth latitude longitude' xmiss.missing_value = self.missing_value xmiss._FillValue = self._FillValue xmiss.units = '%' xmiss[:] = self.xmiss_list if self.ecofl_list: ecofl = self.ncFile.createVariable('ecofl', 'float64', ('time', )) ecofl.long_name = 'Fluorescence, WET Labs ECO-AFL/FL' ecofl.coordinates = 'time depth latitude longitude' ecofl.units = 'mg/m^3' ecofl[:] = self.ecofl_list if self.wetstar_list: wetstar = self.ncFile.createVariable('wetstar', 'float64', ('time', )) wetstar.long_name = 'Fluorescence, WET Labs WETstar' wetstar.coordinates = 'time depth latitude longitude' wetstar.units = 'mg/m^3' wetstar[:] = self.wetstar_list if self.oxygen_list: oxygen = self.ncFile.createVariable('oxygen', 'float64', ('time', )) oxygen.long_name = 'Oxygen, SBE 43' oxygen.coordinates = 'time depth latitude longitude' oxygen.units = 'ml/l' oxygen[:] = self.oxygen_list if self.args.analog: if self.analog_list: analog = self.ncFile.createVariable(self.an_var, 'float64', ('time', )) analog.coordinates = 'time depth latitude longitude' analog.units = self.an_units analog[:] = self.analog_list self.add_global_metadata() self.ncFile.close() print 'Wrote ' + outFile
wind_capacity = capacity['wind'] solar_capacity = capacity['solar'] output = best_results['output'] wind_out = output['wind'] solar_out = output['solar'] hydro_out = output['hydro'] gas_out = output['fossil'] nstations_w = len(wind_capacity) nstations_s = len(solar_capacity) nsteps = len(wind_out) #Write to netcdf file: o = nc.netcdf_file(file + '.nc', 'w') o.createDimension('nstations_wind', nstations_w) o.createDimension('nstations_solar', nstations_s) o.createDimension('nsteps', nsteps) wind_output = o.createVariable("ts_wind", 'f', ('nsteps', )) solar_output = o.createVariable("ts_solar", 'f', ('nsteps', )) hydro_output = o.createVariable("ts_hydro", 'f', ('nsteps', )) gas_output = o.createVariable("ts_gas", 'f', ('nsteps', )) demand_output = o.createVariable("ts_demand", 'f', ('nsteps', )) wind_cap = o.createVariable("wind_cap", 'f', ('nstations_wind', )) solar_cap = o.createVariable("solar_cap", 'f', ('nstations_solar', )) wind_output[:] = wind_out
def save_emission_old(self): print "debug: Emission save start" self.progress("Saving Result") for n in range(self.maxdom): domain = self.domains[n] filename_1 = opj("{0}/wrfchemi_00z_d{1:0>2}".format( self.save_dir, n + 1)) filename_2 = opj("{0}/wrfchemi_12z_d{1:0>2}".format( self.save_dir, n + 1)) #file1 cdf_file = pupynere.netcdf_file(filename_1, 'w') cdf_file.createDimension('Time', 12) cdf_file.createDimension('south_north', domain.h) cdf_file.createDimension('emissions_zdim', 2) cdf_file.createDimension('DateStrLen', 19) cdf_file.createDimension('west_east', domain.w) cdf_vars = {} for plt in self.pollutant_list: cdf_vars[plt] = cdf_file.createVariable( plt, 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east')) cdf_vars[plt].units = 'mol km^-2 hr^-1' for plt in self.additional_pollutan_list: cdf_vars[plt] = cdf_file.createVariable( plt, 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east')) cdf_vars[plt].units = 'mol km^-2 hr^-1' cdf_vars['times'] = cdf_file.createVariable( 'Times', 'c', ('Time', 'DateStrLen')) for i in range(12): #time for j in range(1): #z level for y in range(domain.h): for x in range(domain.w): for p, plt in enumerate(self.pollutant_str): cdf_vars[plt][i][j][y][x] = self.domain_emiss[ n][y][x][p] cdf_file.close() #file2 cdf_file = pupynere.netcdf_file(filename_2, 'w') cdf_file.createDimension('Time', 12) cdf_file.createDimension('south_north', domain.h) cdf_file.createDimension('emissions_zdim', 2) cdf_file.createDimension('DateStrLen', 19) cdf_file.createDimension('west_east', domain.w) cdf_vars = {} for plt in self.pollutant_list: cdf_vars[plt] = cdf_file.createVariable( plt, 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east')) cdf_vars[plt].units = 'mol km^-2 hr^-1' for plt in self.additional_pollutan_list: cdf_vars[plt] = cdf_file.createVariable( plt, 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east')) cdf_vars[plt].units = 'mol km^-2 hr^-1' cdf_vars['times'] = cdf_file.createVariable( 'Times', 'c', ('Time', 'DateStrLen')) for i in range(12): #time for j in range(1): #z level for y in range(domain.h): for x in range(domain.w): for p, plt in enumerate(self.pollutant_str): cdf_vars[plt][i][j][y][x] = self.domain_emiss[ n][y][x][p] cdf_file.close() print "debug: Emission save end" print "------------------------------"
def write_pctd(self, inFile): ''' Write lists out as NetCDF using the base name of the file for the .nc file that this creates. ''' outFile = '.'.join(inFile.split('.')[:-1]) + '.nc' # Create the NetCDF file self.ncFile = netcdf_file(outFile, 'w') self.outFile = outFile # Trajectory dataset, time is the only netCDF dimension self.ncFile.createDimension('time', len(self.esec_list)) self.time = self.ncFile.createVariable('time', 'float64', ('time',)) self.time.standard_name = 'time' self.time.units = 'seconds since 1970-01-01' self.time[:] = self.esec_list # Record Variables - coordinates for trajectory - save in the instance and use for metadata generation self.latitude = self.ncFile.createVariable('latitude', 'float64', ('time',)) self.latitude.long_name = 'LATITUDE' self.latitude.standard_name = 'latitude' self.latitude.units = 'degree_north' self.latitude[:] = self.lat_list self.longitude = self.ncFile.createVariable('longitude', 'float64', ('time',)) self.longitude.long_name = 'LONGITUDE' self.longitude.standard_name = 'longitude' self.longitude.units = 'degree_east' self.longitude[:] = self.lon_list self.depth = self.ncFile.createVariable('depth', 'float64', ('time',)) self.depth.long_name = 'DEPTH' self.depth.standard_name = 'depth' self.depth.units = 'm' self.depth[:] = csiro.depth(self.pr_list, self.lat_list) # Convert pressure to depth # Record Variables - Profile CTD Data temp = self.ncFile.createVariable('TEMP', 'float64', ('time',)) temp.long_name = 'Temperature, [ITS-90, deg C]' temp.standard_name = 'sea_water_temperature' temp.coordinates = 'time depth latitude longitude' temp.units = 'Celsius' temp[:] = self.t_list sal = self.ncFile.createVariable('PSAL', 'float64', ('time',)) sal.long_name = 'Salinity, Practical [PSU]' sal.standard_name = 'sea_water_salinity' sal.coordinates = 'time depth latitude longitude' sal[:] = self.sal_list xmiss = self.ncFile.createVariable('xmiss', 'float64', ('time',)) xmiss.long_name = 'Beam Transmission, Chelsea/Seatech' xmiss.coordinates = 'time depth latitude longitude' xmiss.units = '%' xmiss[:] = self.xmiss_list if self.ecofl_list: ecofl = self.ncFile.createVariable('ecofl', 'float64', ('time',)) ecofl.long_name = 'Fluorescence, WET Labs ECO-AFL/FL' ecofl.coordinates = 'time depth latitude longitude' ecofl.units = 'mg/m^3' ecofl[:] = self.ecofl_list if self.wetstar_list: wetstar = self.ncFile.createVariable('wetstar', 'float64', ('time',)) wetstar.long_name = 'Fluorescence, WET Labs WETstar' wetstar.coordinates = 'time depth latitude longitude' wetstar.units = 'mg/m^3' wetstar[:] = self.wetstar_list if self.oxygen_list: oxygen = self.ncFile.createVariable('oxygen', 'float64', ('time',)) oxygen.long_name = 'Oxygen, SBE 43' oxygen.coordinates = 'time depth latitude longitude' oxygen.units = 'ml/l' oxygen[:] = self.oxygen_list if self.oxyps_list: oxygen = self.ncFile.createVariable('oxygen_ps', 'float64', ('time',)) oxygen.long_name = 'Oxygen, SBE 43' oxygen.coordinates = 'time depth latitude longitude' oxygen.units = '%' oxygen[:] = self.oxyps_list self.add_global_metadata() self.ncFile.close()
def write_gpctd(self, inFile='waveglider_gpctd_WG.txt', outFile='waveglider_gpctd_WG.nc'): ''' Read in records from one of the waveglider data files and write out as NetCDF. The records look like: GPCTD Timestamp, Latitude, Longitude, Pressure(decibars), Temperature(degrees C), Salinity(PSU), Conductivity(S/m), Dissolved Oxygen(frequency), Dissolved Oxygen(mL/L) 2012-05-21 20:10:00, 36.7989, -121.8609, 0.280, 12.169, 33.764, 3.889, 4390.700, 5.374 2012-05-21 20:10:10, 36.7989, -121.8609, 0.330, 12.148, 33.779, 3.888, 4397.800, 5.387 ''' # Initialize lists for the data to be parsed and written esec_list = [] lat_list = [] lon_list = [] dep_list = [] tem_list = [] sal_list = [] do_list = [] # Read data in from the input file reader = csv.DictReader(open(os.path.join(self.parentDir, inFile))) last_esec = 0 for r in reader: gmtDTString = r['GPCTD Timestamp'] tt = time.strptime(gmtDTString, '%Y-%m-%d %H:%M:%S') diff = datetime.datetime(*tt[:6]) - datetime.datetime(1970,1,1,0,0,0) esec = diff.days * 86400 + diff.seconds if esec > last_esec: esec_list.append(esec) lat_list.append(r[' Latitude']) lon_list.append(r[' Longitude']) dep_list.append(r[' Pressure(decibars)']) # decibars is darn close to meters at the surface tem_list.append(r[' Temperature(degrees C)']) sal_list.append(r[' Salinity(PSU)']) do_list.append(r[' Dissolved Oxygen(mL/L)']) last_esec = esec else: print(('Skipping esec = %d' % esec)) # Create the NetCDF file self.ncFile = netcdf_file(outFile, 'w') self.outFile = outFile # Trajectory dataset, time is the only netCDF dimension self.ncFile.createDimension('TIME', len(esec_list)) self.time = self.ncFile.createVariable('TIME', 'float64', ('TIME',)) self.time.units = 'seconds since 1970-01-01' self.time.standard_name = 'time' self.time[:] = esec_list # Record Variables - coordinates for trajectory - save in the instance and use for metadata generation self.latitude = self.ncFile.createVariable('latitude', 'float64', ('TIME',)) self.latitude.long_name = 'LATITUDE' self.latitude.standard_name = 'latitude' self.latitude.units = 'degree_north' self.latitude[:] = lat_list self.longitude = self.ncFile.createVariable('longitude', 'float64', ('TIME',)) self.longitude.long_name = 'LONGITUDE' self.longitude.standard_name = 'longitude' self.longitude.units = 'degree_east' self.longitude[:] = lon_list self.depth = self.ncFile.createVariable('depth', 'float64', ('TIME',)) self.depth.long_name = 'DEPTH' self.depth.standard_name = 'depth' self.depth.units = 'm' self.depth[:] = dep_list # Record Variables - CTD Data temp = self.ncFile.createVariable('TEMP', 'float64', ('TIME',)) temp.long_name = 'Sea Water Temperature in-situ ITS-90 or IPTS-68 scale' temp.standard_name = 'sea_water_temperature' temp.units = 'Celsius' temp.coordinates = 'TIME latitude longitude depth' temp[:] = tem_list sal = self.ncFile.createVariable('PSAL', 'float64', ('TIME',)) sal.long_name = 'Sea Water Salinity in-situ PSS 1978 scale' sal.standard_name = 'sea_water_salinity' sal.coordinates = 'TIME latitude longitude depth' sal[:] = sal_list do = self.ncFile.createVariable('oxygen', 'float64', ('TIME',)) do.long_name = 'Dissolved Oxygen' do.units = 'ml/l' do.coordinates = 'TIME latitude longitude depth' do[:] = do_list self.add_global_metadata() self.ncFile.close()
class InterpolatorWriter(BaseWriter): logger = [] logger = logging.getLogger('lrauvNc4ToNetcdf') fh = logging.StreamHandler() f = logging.Formatter( "%(levelname)s %(asctime)sZ %(filename)s %(funcName)s():%(lineno)d %(message)s" ) fh.setFormatter(f) logger.addHandler(fh) logger.setLevel(logging.DEBUG) def write_netcdf(self, outFile, inUrl): # Check parent directory and create if needed dirName = os.path.dirname(outFile) try: os.makedirs(dirName) except OSError, e: if e.errno != errno.EEXIST: raise # Create the NetCDF file self.ncFile = netcdf_file(outFile, 'w') # If specified on command line override the default generic title with what is specified self.ncFile.title = 'LRAUV interpolated data' # Combine any summary text specified on commamd line with the generic summary stating the original source file self.ncFile.summary = 'Observational oceanographic data translated with modification from original data file %s' % inUrl # If specified on command line override the default generic license with what is specified # Trajectory dataset, time is the only netCDF dimension self.ncFile.createDimension('time', len(self.esec_list)) self.time = self.ncFile.createVariable('time', 'float64', ('time', )) self.time.standard_name = 'time' self.time.units = 'seconds since 1970-01-01' self.time[:] = self.esec_list # Record Variables - coordinates for trajectory - save in the instance and use for metadata generation self.latitude = self.ncFile.createVariable('latitude', 'float64', ('time', )) self.latitude.long_name = 'LATITUDE' self.latitude.standard_name = 'latitude' self.latitude.units = 'degree_north' i = self.parms.index('latitude') self.latitude[:] = self.parm_sub_ts[i] self.longitude = self.ncFile.createVariable('longitude', 'float64', ('time', )) self.longitude.long_name = 'LONGITUDE' self.longitude.standard_name = 'longitude' self.longitude.units = 'degree_east' i = self.parms.index('longitude') self.longitude[:] = self.parm_sub_ts[i] self.depth = self.ncFile.createVariable('depth', 'float64', ('time', )) self.depth.long_name = 'DEPTH' self.depth.standard_name = 'depth' self.depth.units = 'm' i = self.parms.index('depth') self.depth[:] = self.parm_sub_ts[i] # add in parameters for i in range(len(self.parms)): ts = self.parm_sub_ts[i] # done record empty variables if not ts.empty: parm = self.parms[i] v = self.initRecordVariable(parm) v[:] = self.parm_sub_ts[i] self.add_global_metadata() self.ncFile.close()
def serialize(dataset): buf = StringIO() f = netcdf_file(buf, 'w', version=2) # Global attributes. nc_global = dataset.attributes.pop('NC_GLOBAL', {}) for k, v in nc_global.items(): if not isinstance(v, dict): setattr(f, k, v) for k, v in dataset.attributes.items(): if not isinstance(v, dict): setattr(f, k, v) # Gridded data. for grid in walk(dataset, GridType): # Add dimensions. for dim, map_ in grid.maps.items(): if dim not in f.dimensions: # check if this is a record dimension if ('DODS_EXTRA' in dataset.attributes and dataset.attributes['DODS_EXTRA']['Unlimited_Dimension'] == dim): length = None else: length = map_.shape[0] f.createDimension(dim, length) var = f.createVariable(dim, map_.type.typecode, (dim,)) var[:] = numpy.asarray(map_) for k, v in map_.attributes.items(): if not isinstance(v, dict): setattr(var, k, v) # Add the var. var = f.createVariable(grid.name, grid.type.typecode, grid.dimensions) var[:] = numpy.asarray(grid.array) for k, v in grid.attributes.items(): if not isinstance(v, dict): setattr(var, k, v) # Sequences. for seq in walk(dataset, SequenceType): n = len(seq.data) dim, i = 'axis_0', 0 while dim in f.dimensions: i += 1 dim = 'axis_%d' % i f.createDimension(dim, None) var = f.createVariable(dim, 'i', (dim,)) var[:] = numpy.arange(n) var.indexOf = seq.name # Add vars. for child in seq.walk(): if child.type.typecode == 'S': data = map(str, child.data) n = max(map(len, data)) dim2, i = 'string_0', 0 while dim2 in f.dimensions: i += 1 dim2 = 'string_%d' % i f.createDimension(dim2, n) data = numpy.array(map(list, data)) var = f.createVariable(child.name, child.type.typecode, (dim, dim2)) var[:] = numpy.array(data, child.type.typecode) else: var = f.createVariable(child.name, child.type.typecode, (dim,)) var[:] = numpy.fromiter(child.data, child.type.typecode) for k, v in child.attributes.items(): if not isinstance(v, dict): setattr(var, k, v) f.flush() return [ buf.getvalue() ]
def __init__(self, fileName): self.__fileName = fileName self.__file = netcdf.netcdf_file(self.__fileName, "r") self.__vars = self.__file.variables self.__inventory = None self.__globalAttributes = None
def __init__(self, dataset): BaseResponse.__init__(self, dataset) self.nc = netcdf_file(None) if 'NC_GLOBAL' in self.dataset.attributes: self.nc._attributes.update(self.dataset.attributes['NC_GLOBAL']) dimensions = [ var.dimensions for var in walk(self.dataset) if isinstance(var, BaseType) ] dimensions = set(reduce(lambda x, y: x + y, dimensions)) try: unlim_dim = self.dataset.attributes['DODS_EXTRA'][ 'Unlimited_Dimension'] except: unlim_dim = None # GridType for grid in walk(dataset, GridType): # add dimensions for dim, map_ in grid.maps.items(): if dim in self.nc.dimensions: continue n = None if dim == unlim_dim else grid[dim].data.shape[0] self.nc.createDimension(dim, n) if not n: self.nc.set_numrecs(grid[dim].data.shape[0]) var = grid[dim] # and add dimension variable self.nc.createVariable(dim, var.dtype.char, (dim, ), attributes=var.attributes) # finally add the grid variable itself base_var = grid[grid.name] var = self.nc.createVariable(base_var.name, base_var.dtype.char, base_var.dimensions, attributes=base_var.attributes) # Sequence types! for seq in walk(dataset, SequenceType): self.nc.createDimension(seq.name, None) try: n = len(seq) except TypeError: # FIXME: materializing and iterating through a sequence to find the length # could have performance problems and could potentially consume the iterable # Do lots of testing here and determine the result of not calling set_numrecs() n = len([x for x in seq[seq.keys()[0]]]) self.nc.set_numrecs(n) dim = seq.name, for child in seq.children(): dtype = child.dtype # netcdf does not have a date type, so remap to float if dtype == np.dtype('datetime64'): dtype = np.dtype('float32') elif dtype == np.dtype('object'): raise TypeError( "Don't know how to handle numpy type {0}".format( dtype)) var = self.nc.createVariable(child.name, dtype.char, dim, attributes=child.attributes) self.headers.extend([('Content-type', 'application/x-netcdf')]) # Optionally set the filesize header if possible try: self.headers.extend([('Content-length', self.nc.filesize)]) except ValueError: pass
def save_emission_old(self): print "debug: Emission save start" self.progress("Saving Result") for n in range(self.maxdom): domain = self.domains[n] filename_1 = opj("{0}/wrfchemi_00z_d{1:0>2}".format(self.save_dir, n+1)) filename_2 = opj("{0}/wrfchemi_12z_d{1:0>2}".format(self.save_dir, n+1)) #file1 cdf_file = pupynere.netcdf_file(filename_1, 'w') cdf_file.createDimension('Time', 12) cdf_file.createDimension('south_north', domain.h) cdf_file.createDimension('emissions_zdim', 2) cdf_file.createDimension('DateStrLen', 19) cdf_file.createDimension('west_east', domain.w) cdf_vars = {} for plt in self.pollutant_list: cdf_vars[plt] = cdf_file.createVariable(plt, 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east')) cdf_vars[plt].units = 'mol km^-2 hr^-1' for plt in self.additional_pollutan_list: cdf_vars[plt] = cdf_file.createVariable(plt, 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east')) cdf_vars[plt].units = 'mol km^-2 hr^-1' cdf_vars['times'] = cdf_file.createVariable('Times', 'c', ('Time', 'DateStrLen')) for i in range(12): #time for j in range(1): #z level for y in range(domain.h): for x in range(domain.w): for p, plt in enumerate(self.pollutant_str): cdf_vars[plt][i][j][y][x] = self.domain_emiss[n][y][x][p] cdf_file.close() #file2 cdf_file = pupynere.netcdf_file(filename_2, 'w') cdf_file.createDimension('Time', 12) cdf_file.createDimension('south_north', domain.h) cdf_file.createDimension('emissions_zdim', 2) cdf_file.createDimension('DateStrLen', 19) cdf_file.createDimension('west_east', domain.w) cdf_vars = {} for plt in self.pollutant_list: cdf_vars[plt] = cdf_file.createVariable(plt, 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east')) cdf_vars[plt].units = 'mol km^-2 hr^-1' for plt in self.additional_pollutan_list: cdf_vars[plt] = cdf_file.createVariable(plt, 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east')) cdf_vars[plt].units = 'mol km^-2 hr^-1' cdf_vars['times'] = cdf_file.createVariable('Times', 'c', ('Time', 'DateStrLen')) for i in range(12): #time for j in range(1): #z level for y in range(domain.h): for x in range(domain.w): for p, plt in enumerate(self.pollutant_str): cdf_vars[plt][i][j][y][x] = self.domain_emiss[n][y][x][p] cdf_file.close() print "debug: Emission save end" print "------------------------------"
def download(self): """ Migrate it to use np.lib.arrayterator.Arrayterator """ url_h = "%s/%s-h-daily" % (self.metadata['urlbase'], self.metadata['source_filename']) dataset_h = open_url(url_h) url_uv = "%s/%s-uv-daily" % (self.metadata['urlbase'], self.metadata['source_filename']) dataset_uv = open_url(url_uv) # ---- if 't_ini' not in self.metadata['limits']: self.metadata['limits']['t_ini'] = 0 if 't_fin' not in self.metadata['limits']: self.metadata['limits']['t_fin'] = dataset_h['time'].shape[0] if 't_step' not in self.metadata['limits']: self.metadata['limits']['t_step'] = 0 else: print "Atention!! t_step set to: %s" % self.metadata['limits']['t_step'] t_ini = self.metadata['limits']['t_ini'] t_fin = self.metadata['limits']['t_fin'] t_step = self.metadata['limits']['t_step'] # ---- data={} # #from coards import from_udunits t0=datetime(1950,1,1) #if (re.match('^hours since \d{4}-\d{2}-\d{2}$',dataset_h['time'].attributes['units'])): if (re.match('^hours since 1950-01-01',dataset_h['time'].attributes['units'])): data['datetime']=numpy.array([t0+timedelta(hours=h) for h in dataset_h['time'][t_ini:t_fin:t_step].tolist()]) else: print "Problems interpreting the time" return #time = self.nc.createVariable('time', 'i', ('time',)) #time[:] = dataset_h['time'][t_ini:t_fin:t_step] #time.units = dataset_h['time'].attributes['units'] #data['time'] = time # limits=self.metadata['limits'] Lat=dataset_h['NbLatitudes'] Lon=dataset_h['NbLongitudes'] Latlimits=numpy.arange(Lat.shape[0])[(Lat[:]>=limits["LatIni"]) & (Lat[:]<=limits["LatFin"])] Latlimits=[Latlimits[0],Latlimits[-1]] Lonlimits=numpy.arange(Lon.shape[0])[(Lon[:]>=limits["LonIni"]) & (Lon[:]<=limits["LonFin"])] Lonlimits=[Lonlimits[0],Lonlimits[-1]] data['Lon'], data['Lat'] = numpy.meshgrid( (Lon[Lonlimits[0]:Lonlimits[-1]]), (Lat[Latlimits[0]:Latlimits[-1]]) ) #------ self.data = data #Arrayterator = numpy.lib.arrayterator.Arrayterator #dataset = dataset_h['Grid_0001']['Grid_0001'] #ssh = Arrayterator(dataset)[t_ini:t_fin:t_step] #blocks = 1e4 file = os.path.join(self.metadata['datadir'],self.metadata['source_filename']+".nc") nc = pupynere.netcdf_file(file,'w') nc.createDimension('time', len(range(t_ini,t_fin,t_step))) nc.createDimension('lon', (Lonlimits[-1]-Lonlimits[0])) nc.createDimension('lat', (Latlimits[-1]-Latlimits[0])) dblocks = max(1,int(1e5/((Lonlimits[-1]-Lonlimits[0])*(Latlimits[-1]-Latlimits[0])))) ti = numpy.arange(t_ini, t_fin, t_step) blocks = ti[::dblocks] if ti[-1] not in blocks: blocks = numpy.append(blocks,t_fin) ntries = 40 #------ for v, dataset, missing_value in zip(['h','u','v'], [dataset_h['Grid_0001']['Grid_0001'], dataset_uv['Grid_0001']['Grid_0001'], dataset_uv['Grid_0002']['Grid_0002']], [dataset_h['Grid_0001']._FillValue, dataset_uv['Grid_0001']._FillValue, dataset_uv['Grid_0002']._FillValue]): print "Getting %s" % v #data['h'] = ma.masked_all((len(ti),Lonlimits[-1]-Lonlimits[0], Latlimits[-1]-Latlimits[0]), dtype=numpy.float64) self.data[v] = nc.createVariable(v, 'f4', ('time', 'lat', 'lon')) self.data[v].missing_value = missing_value for b1, b2 in zip(blocks[:-1], blocks[1:]): print "From %s to %s of %s" % (b1, b2, blocks[-1]) ind = numpy.nonzero((ti>=b1) & (ti<b2)) for i in range(ntries): print "Try n: %s" % i try: self.data[v][ind] = dataset[b1:b2:t_step, Lonlimits[0]:Lonlimits[-1],Latlimits[0]:Latlimits[-1]].swapaxes(1,2).astype('f') break except: waitingtime = 30+i*20 print "Failed to download. I'll try again in %ss" % waitingtime time.sleep(waitingtime)
def write_pco2(self, inFile='waveglider_pco2_WG.txt', outFile='waveglider_pco2_WG.nc'): ''' Read in records from one of the waveglider data files and write out as NetCDF. The records are really long and ugly - the header for the file is expressed in the pco2_var list. This method builds the NetCDF variables dynamically using the Python 'exec' method. ''' esec_list = [] pco2_vars = [ 'Latitude', 'Longitude', 'EquilPumpOn pco2', 'EquilPumpOn Temp', 'EquilPumpOn Pressure', 'EquilPumpOff pco2', 'EquilPumpOff Temp', 'EquilPumpOff Pressure', 'EquilPumpOff Humidity', 'ZeroPumpOn pco2', 'ZeroPumpOn Temp', 'ZeroPumpOn Pressure', 'ZeroPumpOff pco2', 'ZeroPumpOff Temp', 'ZeroPumpOff Pressure', 'AirPumpOn pco2', 'AirPumpOn Temp', 'AirPumpOn Pressure', 'AirPumpOff pco2', 'AirPumpOff Temp', 'AirPumpOff Pressure', 'AirPumpOff Humidity', 'StandardFlowOn Pressure', 'StandardFlowOff pco2', 'StandardFlowOff Temp', 'StandardFlowOff Pressure', 'StandardFlowOff pco2 Humidity', 'Durafet pH 1', 'Durafet pH 2', 'Durafet pH 3', 'Durafet pH 4', 'Durafet pH 5', 'Durafet pH 6', 'Can Humidity' ] reader = csv.DictReader(open(os.path.join(self.parentDir, inFile))) last_esec = 0 for r in reader: gmtDTString = r['PCO2 Timestamp'] tt = time.strptime(gmtDTString, '%Y-%m-%d %H:%M:%S') diff = datetime.datetime(*tt[:6]) - datetime.datetime(1970,1,1,0,0,0) esec = diff.days * 86400 + diff.seconds if esec > last_esec: esec_list.append(esec) for v in pco2_vars: ncVar = v.replace(' ', '_', 42) try: exec("%s_list.append(r[' %s'])" % (ncVar, v, )) except NameError: exec('%s_list = []' % ncVar) exec("%s_list.append(r[' %s'])" % (ncVar, v, )) last_esec = esec else: print(('Skipping esec = %d' % esec)) # Create the NetCDF file self.ncFile = netcdf_file(outFile, 'w') self.outFile = outFile # Trajectory dataset, time is the only netCDF dimension self.ncFile.createDimension('TIME', len(esec_list)) self.time = self.ncFile.createVariable('TIME', 'float64', ('TIME',)) self.time.units = 'seconds since 1970-01-01' self.time.long_name = 'Time GMT' self.time.standard_name = 'time' self.time[:] = esec_list # PCO2 variables for v in pco2_vars: ncVar = v.replace(' ', '_', 42) # Only Latitude, Longitude, Depth, and Time variables are upper case to match other Glider data if v in ('Latitude', 'Longitude'): # Name the coordinate variable all upper case exec("self.%s = self.ncFile.createVariable('%s', 'float64', ('TIME',))" % (v.lower(), v.upper(), )) exec("self.%s.long_name = '%s'" % (v.lower(), v.lower(), )) exec("self.%s.standard_name = '%s'" % (v.lower(), v.lower(), )) if v == 'Latitude': exec("self.%s.units = 'degrees_north'" % v.lower()) if v == 'Longitude': exec("self.%s.units = 'degrees_east'" % v.lower()) else: exec("self.%s = self.ncFile.createVariable('%s', 'float64', ('TIME',))" % (ncVar.lower(), ncVar, )) exec("self.%s.coordinates = 'TIME LATITUDE LONGITUDE DEPTH'" % ncVar.lower()) exec("self.%s.long_name = '%s'" % (ncVar.lower(), v, )) exec("self.%s[:] = %s_list" % (ncVar.lower(), ncVar, )) # Fudge up a depth variable with a value of zero self.depth = self.ncFile.createVariable('DEPTH', 'float64', ('TIME',)) self.depth.long_name = 'depth below sea level' self.depth.standard_name = 'depth' self.depth.units = 'm' self.depth[:] = np.zeros(len(self.time[:])) self.add_global_metadata() self.ncFile.close()
#!/usr/bin/python import sys, pupynere ll = [l.strip().split(',') for l in open(sys.argv[1]) if not l.startswith('#')] vv = zip(*[map(float, l) for l in ll]) nc = pupynere.netcdf_file(sys.argv[1]+'.nc', 'w') nc.createDimension('dim', None) for i in range(len(vv)): nc.createVariable('var_%02d' % i, 'd', ('dim',))[:] = vv[i] ##my alternative #import sys, pupynere #ll = [l.strip().split(',') for l in open(sys.argv[1]) if not l.startswith('#')] #vv = zip(*[map(float, l) for l in ll]) #nc = pupynere.netcdf_file(sys.argv[1]+'.nc', 'w') #dims = () #for i in range(len(vv)): # dim_name = 'dim_%d' % i # if i == 1: # nc.createDimension(dim_name, None) # else: # nc.createDimension(dim_name, sys.argv[2]) # dims = dims + (dim_name,) #print dims #nc.createVariable('point', 'd', ('dim_1',))[:] = ll[:]
def write_ctd(self, inFile='ESP_ctd.csv', outFile='ESP_ctd.nc'): ''' Read in records from one of the ESP drifter and write out as NetCDF. The records look like (time is local): year,month,day,hour,minute,second,temp,sal,chl (calibrated),chl (ini) 2012, 9, 11, 15, 32, 38,15.24,33.34,0.68,2.54 2012, 9, 11, 15, 37, 39,15.29,33.25,0.66,2.44 ''' # Initialize lists for the data to be parsed and written esec_list = [] lat_list = [] lon_list = [] dep_list = [] tem_list = [] sal_list = [] chl_cal_list = [] chl_ini_list = [] # Read data in from the input file reader = csv.DictReader(open(os.path.join(self.parentDir, inFile))) for r in reader: localDT = datetime.datetime(int(r['year']), int(r['month']), int(r['day']), int(r['hour']), int(r['minute']), int(r['second'])) ##print str(localDT) es = time.mktime(localDT.timetuple()) esec_list.append(es) lat_list.append(self.gps_lat[es]) lon_list.append(self.gps_lon[es]) dep_list.append( 10.0 ) # For September 2012 ESP deployment the nominal depth is 10m tem_list.append(r['temp']) sal_list.append(r['sal']) chl_cal_list.append(r['chl (calibrated)']) chl_ini_list.append(r['chl (ini)']) # Create the NetCDF file self.ncFile = netcdf_file(outFile, 'w') self.outFile = outFile # Trajectory dataset, time is the only netCDF dimension self.ncFile.createDimension('time', len(esec_list)) self.time = self.ncFile.createVariable('time', 'float64', ('time', )) self.time.standard_name = 'time' self.time.units = 'seconds since 1970-01-01' self.time[:] = esec_list # Record Variables - coordinates for trajectory - save in the instance and use for metadata generation self.latitude = self.ncFile.createVariable('latitude', 'float64', ('time', )) self.latitude.long_name = 'LATITUDE' self.latitude.standard_name = 'latitude' self.latitude.units = 'degree_north' self.latitude[:] = lat_list self.longitude = self.ncFile.createVariable('longitude', 'float64', ('time', )) self.longitude.long_name = 'LONGITUDE' self.longitude.standard_name = 'longitude' self.longitude.units = 'degree_east' self.longitude[:] = lon_list self.depth = self.ncFile.createVariable('depth', 'float64', ('time', )) self.depth.long_name = 'DEPTH' self.depth.standard_name = 'depth' self.depth.units = 'm' self.depth[:] = dep_list # Record Variables - CTD Data temp = self.ncFile.createVariable('TEMP', 'float64', ('time', )) temp.long_name = 'Sea Water Temperature in-situ ITS-90 or IPTS-68 scale' temp.standard_name = 'sea_water_temperature' temp.coordinates = 'time depth latitude longitude' temp.units = 'Celsius' temp[:] = tem_list sal = self.ncFile.createVariable('PSAL', 'float64', ('time', )) sal.long_name = 'Sea Water Salinity in-situ PSS 1978 scale' sal.standard_name = 'sea_water_salinity' sal.coordinates = 'time depth latitude longitude' sal[:] = sal_list chlcal = self.ncFile.createVariable('chl', 'float64', ('time', )) chlcal.long_name = 'Chlorophyll' chlcal.coordinates = 'time depth latitude longitude' chlcal.units = '?' chlcal[:] = chl_cal_list chlini = self.ncFile.createVariable('chl_ini', 'float64', ('time', )) chlini.long_name = 'Raw Chlorophyll' chlini.coordinates = 'time depth latitude longitude' chlini.units = '?' chlini[:] = chl_ini_list self.add_global_metadata() self.ncFile.close()
dx = namopt['dx'] dz = namopt['dz'] xsize = namopt['xsize'] ysize = namopt['ysize'] zsize = namopt['zsize'] turhx = namopt['turhx'] turhy = namopt['turhy'] turhz = namopt['turhz'] if prop == 'RELH': pres = readprop(exptitle,expnr,'presh')['presh'][-1,:] exnr = ((pres/p0)**(Rd/cp)) filename = '%s_%s_thl_%s_%s.nc' % (exptitle, expnr, trange[0],trange[1]) datapath = datadir + '/%s' % (filename) f = pu.netcdf_file(datapath) thl = np.mean(f.variables['thl'][:,turhz/dz,:,:]*1e-2+300, axis=0) x = f.variables['xt'][:] y = f.variables['yt'][:] z = f.variables['zt'][:] filename = '%s_%s_qt_%s_%s.nc' % (exptitle, expnr, trange[0],trange[1]) datapath = datadir + '/%s' % (filename) f = pu.netcdf_file(datapath) qt = np.mean(f.variables['qt'][:,turhz/dz,:,:]*1e-5,axis=0) qsat = np.zeros(np.shape(thl)) for i,u in enumerate(x): for j,v in enumerate(y): #for k,w in enumerate(z): tmp = thl[j,i]*exnr[turhz/dz]
# db = couch['euporias'] output = csv.DictWriter(open("../csv/globalstats.csv", "w"), ["cellID","lat", "lon", "rpss", "meanPrediction", "meanHistoric", "power", "lonSlice", "ocean"], delimiter="\t") output.writeheader() outputPredictions = csv.DictWriter(open("../csv/predictions.csv", "w"), ["cellID","memberID", "windSpeed"], delimiter="\t") outputPredictions.writeheader() outputHistoric = csv.DictWriter(open("../csv/historic.csv", "w"), ["cellID","year","windSpeed"], delimiter="\t") outputHistoric.writeheader() windfarms = csv.DictReader(open("../csv/windfarms.csv", "r"), delimiter="\t") # skillsFile = netcdf.netcdf_file(IN_FOLDER + 'WindMod1DJF1leadGlobalSkill.nc', 'r') print "skills file" print skillsFile.variables # oceanMaskFile = netcdf.netcdf_file(IN_FOLDER + 'land_sea_mask_512x256.nc', 'r') print "oceanMaskFile" print oceanMaskFile.variables #
def write_isus(self, inFile='ESP_isus.csv', outFile='ESP_isus.nc'): ''' Read in records from .csv file and write out as NetCDF. Merge with GPS data from MBARI Tracking. This method builds the NetCDF variables dynamically using the Python 'exec' method. ''' esec_list = [] lat_list = [] lon_list = [] dep_list = [] isus_vars = ['no3'] lastEs = 0 reader = csv.DictReader(open(os.path.join(self.parentDir, inFile))) for r in reader: localDT = datetime.datetime(int(r['year']), int(r['month']), int(r['day']), int(r['hour']), int(r['minute']), int(r['second'])) ##print str(localDT) es = time.mktime(localDT.timetuple()) if es <= lastEs: continue # Must have monotonically increasing time esec_list.append(es) lat_list.append(self.gps_lat[es]) lon_list.append(self.gps_lon[es]) dep_list.append( 10.0 ) # For September 2012 ESP deployment the nominal depth is 10m # This is kind of ridiculous for just one variable for v in isus_vars: ncVar = v.replace(' ', '_', 42) try: exec "%s_list.append(r['%s'])" % ( ncVar, v, ) except NameError: exec '%s_list = []' % ncVar exec "%s_list.append(r['%s'])" % ( ncVar, v, ) lastEs = es # Create the NetCDF file self.ncFile = netcdf_file(outFile, 'w') self.outFile = outFile # Trajectory dataset, time is the only netCDF dimension self.ncFile.createDimension('time', len(esec_list)) self.time = self.ncFile.createVariable('time', 'float64', ('time', )) self.time.standard_name = 'time' self.time.units = 'seconds since 1970-01-01' self.time[:] = esec_list # Record Variables - coordinates for trajectory - save in the instance and use for metadata generation self.latitude = self.ncFile.createVariable('latitude', 'float64', ('time', )) self.latitude.long_name = 'LATITUDE' self.latitude.standard_name = 'latitude' self.latitude.units = 'degree_north' self.latitude[:] = lat_list self.longitude = self.ncFile.createVariable('longitude', 'float64', ('time', )) self.longitude.long_name = 'LONGITUDE' self.longitude.standard_name = 'longitude' self.longitude.units = 'degree_east' self.longitude[:] = lon_list self.depth = self.ncFile.createVariable('depth', 'float64', ('time', )) self.depth.long_name = 'DEPTH' self.depth.standard_name = 'depth' self.depth.units = 'm' self.depth[:] = dep_list # isus variables for v in isus_vars: ncVar = v.replace(' ', '_', 42) # Only Latitude, Longitude, Depth, and Time variables are upper case to match other Glider data if v == 'Latitude' or v == 'Longitude': exec "self.%s = self.ncFile.createVariable('%s', 'float64', ('time',))" % ( ncVar.lower(), ncVar.upper(), ) else: exec "self.%s = self.ncFile.createVariable('%s', 'float64', ('time',))" % ( ncVar.lower(), ncVar, ) exec "self.%s.coordinates = 'time depth latitude longitude'" % ncVar.lower( ) exec "self.%s.long_name = '%s'" % ( ncVar.lower(), v, ) exec "self.%s[:] = %s_list" % ( ncVar.lower(), ncVar, ) self.add_global_metadata() self.ncFile.close()
import pupynere if __name__ == '__main__': print 'write netcdf file' path = 'res/test_cdf' cdf_file = pupynere.netcdf_file(path, 'w') cdf_file.createDimension('Time', 12) cdf_file.createDimension('south_north', 90) cdf_file.createDimension('emissions_zdim', 2) cdf_file.createDimension('DateStrLen', 19) cdf_file.createDimension('west_east', 90) so2 = cdf_file.createVariable( 'E_SO2', 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east')) so2.units = 'mol km^-2 hr^-1' olt = cdf_file.createVariable( 'E_OLT', 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east')) olt.units = 'mol km^-2 hr^-1' ora2 = cdf_file.createVariable( 'E_ORA2', 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east')) ora2.units = 'mol km^-2 hr^-1' co = cdf_file.createVariable( 'E_CO', 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east')) co.units = 'mol km^-2 hr^-1' eci = cdf_file.createVariable( 'E_ECI', 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east')) eci.units = 'mol km^-2 hr^-1' ecj = cdf_file.createVariable( 'E_ECJ', 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east')) ecj.units = 'mol km^-2 hr^-1' pm10 = cdf_file.createVariable( 'E_PM_10', 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east'))
def write_ctd(self, inFile, ranges=None): ''' Write lists out as NetCDF. ''' # Create the NetCDF file outFile = '.'.join(inFile.split('.')[:-1]) + '.nc' self.ncFile = netcdf_file(outFile, 'w') # If specified on command line override the default generic title with what is specified self.ncFile.title = 'Underway CTD data' if self.args.title: self.ncFile.title = self.args.title # Combine any summary text specified on commamd line with the generic summary stating the original source file self.ncFile.summary = 'Observational oceanographic data translated with no modification from original data file %s' % inFile if self.args.summary: self.ncFile.summary = self.args.summary if not self.args.summary.endswith('.'): self.ncFile.summary += '.' self.ncFile.summary += ' Translated with no modification from original data file %s' % inFile # Add range-checking QC paramters to the summary if ranges: self.ncFile.summary += '. Range checking QC performed on the following variables with values outside of associated ranges discarded: %s' % ranges # If specified on command line override the default generic license with what is specified if self.args.license: self.ncFile.license = self.args.license # Trajectory dataset, time is the only netCDF dimension self.ncFile.createDimension('time', len(self.esec_list)) self.time = self.ncFile.createVariable('time', 'float64', ('time', )) self.time.standard_name = 'time' self.time.units = 'seconds since 1970-01-01' self.time[:] = self.esec_list # Record Variables - coordinates for trajectory - save in the instance and use for metadata generation self.latitude = self.ncFile.createVariable('latitude', 'float64', ('time', )) self.latitude.long_name = 'LATITUDE' self.latitude.standard_name = 'latitude' self.latitude.units = 'degree_north' self.latitude[:] = self.lat_list self.longitude = self.ncFile.createVariable('longitude', 'float64', ('time', )) self.longitude.long_name = 'LONGITUDE' self.longitude.standard_name = 'longitude' self.longitude.units = 'degree_east' self.longitude[:] = self.lon_list self.depth = self.ncFile.createVariable('depth', 'float64', ('time', )) self.depth.long_name = 'DEPTH' self.depth.standard_name = 'depth' self.depth.units = 'm' self.depth[:] = self.dep_list # Record Variables - Underway CTD Data temp = self.ncFile.createVariable('TEMP', 'float64', ('time', )) temp.long_name = 'Temperature, 2 [ITS-90, deg C]' temp.standard_name = 'sea_water_temperature' temp.coordinates = 'time depth latitude longitude' temp.units = 'Celsius' temp._FillValue = self._FillValue temp.missing_value = self.missing_value temp[:] = self.t1_list sal = self.ncFile.createVariable('PSAL', 'float64', ('time', )) sal.long_name = 'Salinity, Practical [PSU]' sal.standard_name = 'sea_water_salinity' sal.coordinates = 'time depth latitude longitude' sal._FillValue = self._FillValue sal.missing_value = self.missing_value sal[:] = self.sal_list if self.xmiss_list: xmiss = self.ncFile.createVariable('xmiss', 'float64', ('time', )) xmiss.long_name = 'Beam Transmission, Chelsea/Seatech' xmiss.coordinates = 'time depth latitude longitude' xmiss.units = '%' xmiss._FillValue = self._FillValue xmiss.missing_value = self.missing_value xmiss[:] = self.xmiss_list if self.wetstar_list: wetstar = self.ncFile.createVariable('wetstar', 'float64', ('time', )) wetstar.long_name = 'Fluorescence, WET Labs WETstar' wetstar.coordinates = 'time depth latitude longitude' wetstar.units = 'mg/m^3' wetstar._FillValue = self._FillValue wetstar.missing_value = self.missing_value wetstar[:] = self.wetstar_list if self.turb_scufa_list: turb_scufa = self.ncFile.createVariable('turb_scufa', 'float64', ('time', )) turb_scufa.long_name = 'Turbidity_Scufa' turb_scufa.coordinates = 'time depth latitude longitude' turb_scufa.units = 'NTU' turb_scufa._FillValue = self._FillValue turb_scufa.missing_value = self.missing_value turb_scufa[:] = self.turb_scufa_list if self.fl_scufa_list: fl_scufa = self.ncFile.createVariable('fl_scufa', 'float64', ('time', )) fl_scufa.long_name = 'Raw_Fluorescence_Volts_Scufa' fl_scufa.coordinates = 'time depth latitude longitude' fl_scufa.units = 'volts' fl_scufa._FillValue = self._FillValue fl_scufa.missing_value = self.missing_value fl_scufa[:] = self.fl_scufa_list self.add_global_metadata() self.ncFile.close() print "Wrote %s" % outFile
import pupynere if __name__ == '__main__': path = 'res/test_cdf' # path = 'res/wrfchemi_00z_d01' # path = 'res/wrfout_d01' cdf_file = pupynere.netcdf_file(path) print "Variables" v = [] for key in cdf_file.variables: #print key, cdf_file.variables[key] v.append(key) v.sort() print v print 'Dimensions' print cdf_file.dimensions print 'so2' so2 = cdf_file.variables['E_SO2'] print so2.units print so2.dimensions # print so2.getValue() print so2.shape print so2[0][1][89][89] # T z y x print so2.typecode()
# import pupynere as netcdf import numpy import sys import os def convert(i): return str((i & 0xFF) / 255.0) ct = sys.argv numct = len(ct) for k in range(1, numct): print 'Converting: ' + ct[k] nc = netcdf.netcdf_file(ct[k], "r") colors = nc.variables['tableColors'][:][0] f = open('/tmp/' + os.path.basename(ct[k]).replace('.COLORTABLE', '.cmap'), 'w') f.write('<colorMap>\n') aVal = 1.0 for i in range(numpy.shape(colors)[1]): f.write(" <color ") f.write('r = "' + convert(colors[0,i]) + '" ') f.write('g = "' + convert(colors[1,i]) + '" ') f.write('b = "' + convert(colors[2,i]) + '" ') f.write('a = "' + str(aVal) + '" ') f.write('/>\n') f.write('</colorMap>\n') f.close()
def getFlowArrays(nameDict): '''Retrieves vectors from current data nameDict: csv file dictionary of user specifications/flags returns: list of information relevant for interpolating current field at each mesh node. flowArrays[0] = list of current vector longitude coordinates flowArrays[1] = list of current vector latitude coordinates flowArrays[2] = list of current vector u component magnitudes flowArrays[3] = list of current vector v component magnitudes flowArrays[4] = list of vector magnitudes (module of u and v components) at each coordinate ''' fU = netcdf_file(nameDict['current_db'][0], "r") iLon = fU.variables["lon"][:, :] # longitude iLat = fU.variables["lat"][:, :] # latitude '''in the future, write so that it looks at the database to find number of timesteps. I also don't know how to access one timestep in the whole database, so I don't know how the for loop will work. The important thing is that once a single timestep of data is extracted, add it to a list where the index of the list corresponds to a timestep.''' nTsteps = 1 #iU_tot = [] #iV_tot = [] #iModule_tot = [] for tstep in range( nTsteps ): #depending on how you found the number of timesteps, this may need to be range(nTsteps - 1) or something iU = fU.variables["sozocrtx"][:, :, :] # u iV = fU.variables["somecrty"][:, :, :] # v iU_ = np.copy(iU.squeeze()) iV_ = np.copy(iV.squeeze()) iU_[np.where(iU_ > 100)] = np.nan iV_[np.where(iV_ > 100)] = np.nan fU.close() iModule = np.sqrt( iU_**2 + iV_**2 ) * 1.94384 # use built in from 2D vector magnitude; put conversion m/s->ft into a constant #iU_tot.append(iU_) #iV_tot.append(iV_) #iModule_tot.append(iModule) flowArrays = [] flowArrays.append(iLon) flowArrays.append(iLat) flowArrays.append(iU_) flowArrays.append(iV_) flowArrays.append(iModule) #flowArrays = [] #flowArrays.append(iLon) #flowArrays.append(iLat) #flowArrays.append(iU_tot) #flowArrays.append(iV_tot) #flowArrays.append(iModule_tot) return flowArrays
def btf2nc(ncfilename,source_filename=None,badctf=None): ''' Convert a badc text file object to a netcdf file object. Call by providing an output filename for the netcdf file, and one of a source filename or a badctf instance ''' if source_filename is None and badctf is None: raise ValueError('Arguments must include on of a source file or BADCtf instance') elif source_filename is not None and badctf is not None: raise ValueError('Arguments must include only ONE of a source file or BADCtf instance') if source_filename is not None: tf=BADCtf('r',source_filename) else: tf=badctf ncf=netcdf_file(ncfilename,'w') # file global attributes for a in tf._metadata.globalRecords: if len(a)==2: try: t=getattr(ncf,a[0]) t=';'.join([t,a[1][0]]) setattr(ncf,a[0],t) except AttributeError: setattr(ncf,a[0],a[1][0]) else: setattr(ncf,a[0],a[1:]) fvars={} # first get attributes into useful dictionary for v in tf.colnames(): attrs=tf._metadata[('*',v)] adict={} for a in tf._metadata[('*',v)]: adict[a[0]]=a[1] fvars[v]=adict # now load up the coordinate variables first index=-1 dimensions=[] # we do this loop first for a future with multiple coordinate # variables for v in tf.colnames(): index+=1 if 'coordinate_variable' in fvars[v]: data=tf[index] dlen=len(data) dim=ncf.createDimension(v,dlen) dimdata=ncf.createVariable(v,fvars[v]['type'][0],(v,)) dimdata[:]=data dimensions.append(v) # now the assumption with a badc text file is that there is # only one coordinate variable. # not necessarily true for trajectory files, but one thing at a time ... assert len(dimensions) == 1, "Code doesn't support multiple coordinate variables" index=-1 for v in tf.colnames(): index+=1 if 'coordinate_variable' not in fvars[v]: data=tf[index] fdata=ncf.createVariable(v,fvars[v]['type'][0],tuple(dimensions)) fdata[:]=data return ncf