def applymask(arg): # load data data_in = Dataset(arg.file_in) data_out = Dataset(arg.file_out, 'r+') # dimensions and time series data_out.variables['x'][:] = data_in.variables['x'][160:400] data_out.variables['y'][:] = data_in.variables['y'][:] data_out.variables['z'][:] = data_in.variables['z'][:] data_out.variables['time'][:] = data_in.variables['time'][:] #data_out.variables['time'][1:] = data_in.variables['time'][1::] - data_in.variables['time'][1] + 2678400 data_out.variables['meanMeltRate'][:] = data_in.variables['meanMeltRate'][:] data_out.variables['totalMeltFlux'][:] = data_in.variables['totalMeltFlux'][:] data_out.variables['totalOceanVolume'][:] = data_in.variables['totalOceanVolume'][:] data_out.variables['meanTemperature'][:] = data_in.variables['meanTemperature'][:] data_out.variables['meanSalinity'][:] = data_in.variables['meanSalinity'][:] # list of variables to be corrected variables = ['temperatureYZ','salinityYZ'] for v in data_in.variables: if len(data_in.variables[v][:].shape) > 1: print('Processing variable ',v) if v in variables: tmp = data_in.variables[v][:] data_out.variables[v][:] = tmp else: tmp = data_in.variables[v][:] new_tmp = tmp[:,:,160:400] data_out.variables[v][:] = new_tmp else: print('Skipping.. \n') # mask bathymetry print('Masking bathymetry...\n') bat = data_in.variables['bathymetry'][:,:,160:400] bottomSalinity = data_in.variables['bottomSalinity'][:,:,160:400] tm, jm, im = bat.shape for t in range(tm): bat[t,:] = np.ma.masked_where(bottomSalinity[t,:].mask == True, bat[t,:]) data_out.variables['bathymetry'][:,:,:] = bat[:,:,:] # : mask zero values print('Masking iceDraft...\n') iceDraft = data_in.variables['iceDraft'][:,:,160:400] for t in range(tm): iceDraft[t,:] = np.ma.masked_where(bottomSalinity[t,:].mask == True, iceDraft[t,:]) data_out.variables['iceDraft'][:,:,:] = iceDraft[:,:,:] # change attributes data_out.Author = 'Gustavo Marques ([email protected])' data_out.Created = datetime.now().isoformat() data_out.sync() data_out.close() data_in.close() print('Done!')
def writeNC(self, outfile): """ Export the grid variables to a netcdf file """ from netCDF4 import Dataset from soda.dataio.suntans.suntans_ugrid import ugrid nc = Dataset(outfile, 'w', format='NETCDF4_CLASSIC') nc.Description = 'Unstructured grid file' nc.Author = '' #nc.Created = datetime.now().isoformat() nc.createDimension('Nc', self.Nc) nc.createDimension('Np', self.Np) try: nc.createDimension('Ne', self.Ne) except: print('No dimension: Ne') nc.createDimension('Nk', self.Nkmax) nc.createDimension('Nkw', self.Nkmax + 1) nc.createDimension('numsides', self.MAXFACES) nc.createDimension('two', 2) nc.createDimension('time', 0) # Unlimited # Write the grid variables def write_nc_var(var, name, dimensions, attdict, dtype='f8'): tmp = nc.createVariable(name, dtype, dimensions) for aa in list(attdict.keys()): tmp.setncattr(aa, attdict[aa]) nc.variables[name][:] = var gridvars = ['suntans_mesh','cells','face','nfaces',\ 'edges','neigh','grad','xp','yp','xv','yv','xe','ye',\ 'normal','n1','n2','df','dg','def',\ 'Ac','dv','dz','z_r','z_w','Nk','Nke','mark'] self.Nk += 1 # Set to one-base in the file (reset to zero-base after) self.suntans_mesh = [0] for vv in gridvars: if vv in self.__dict__: if self.VERBOSE: print('Writing variables: %s' % vv) write_nc_var(self[vv],vv,\ ugrid[vv]['dimensions'],\ ugrid[vv]['attributes'],\ dtype=ugrid[vv]['dtype']) # Special treatment for "def" if vv == 'def' and 'DEF' in self.__dict__: if self.VERBOSE: print('Writing variables: %s' % vv) write_nc_var(self['DEF'],vv,ugrid[vv]['dimensions'],\ ugrid[vv]['attributes'],\ dtype=ugrid[vv]['dtype']) nc.close() self.Nk -= 1 # set back to zero base
def writeNC(self,outfile): """ Export the grid variables to a netcdf file """ from netCDF4 import Dataset from soda.dataio.suntans.suntans_ugrid import ugrid nc = Dataset(outfile, 'w', format='NETCDF4_CLASSIC') nc.Description = 'Unstructured grid file' nc.Author = '' #nc.Created = datetime.now().isoformat() nc.createDimension('Nc', self.Nc) nc.createDimension('Np', self.Np) try: nc.createDimension('Ne', self.Ne) except: print 'No dimension: Ne' nc.createDimension('Nk', self.Nkmax) nc.createDimension('Nkw', self.Nkmax+1) nc.createDimension('numsides', self.MAXFACES) nc.createDimension('two', 2) nc.createDimension('time', 0) # Unlimited # Write the grid variables def write_nc_var(var, name, dimensions, attdict, dtype='f8'): tmp=nc.createVariable(name, dtype, dimensions) for aa in attdict.keys(): tmp.setncattr(aa,attdict[aa]) nc.variables[name][:] = var gridvars = ['suntans_mesh','cells','face','nfaces',\ 'edges','neigh','grad','xp','yp','xv','yv','xe','ye',\ 'normal','n1','n2','df','dg','def',\ 'Ac','dv','dz','z_r','z_w','Nk','Nke','mark'] self.Nk += 1 # Set to one-base in the file (reset to zero-base after) self.suntans_mesh=[0] for vv in gridvars: if self.__dict__.has_key(vv): if self.VERBOSE: print 'Writing variables: %s'%vv write_nc_var(self[vv],vv,\ ugrid[vv]['dimensions'],\ ugrid[vv]['attributes'],\ dtype=ugrid[vv]['dtype']) # Special treatment for "def" if vv == 'def' and self.__dict__.has_key('DEF'): if self.VERBOSE: print 'Writing variables: %s'%vv write_nc_var(self['DEF'],vv,ugrid[vv]['dimensions'],\ ugrid[vv]['attributes'],\ dtype=ugrid[vv]['dtype']) nc.close() self.Nk -= 1 # set back to zero base
def create_nc(self,ncfile): """ Create the particle netcdf file NetCDF variable and dimension names are consistent with partrac data. """ if self.verbose: print '\nInitialising particle netcdf file: %s...\n'%ncfile # Global Attributes nc = Dataset(ncfile, 'w', format='NETCDF4_CLASSIC') nc.Description = 'Particle trajectory file' nc.Author = os.getenv('USER') nc.Created = datetime.now().isoformat() # Dimensions nc.createDimension('ntrac', self.N) nc.createDimension('nt', 0) # Unlimited # Create variables def create_nc_var( name, dimensions, attdict, dtype='f8'): tmp=nc.createVariable(name, dtype, dimensions) for aa in attdict.keys(): tmp.setncattr(aa,attdict[aa]) basetimestr = 'seconds since %s'%(datetime.strftime(self.basetime,\ '%Y-%m-%d %H:%M:%S')) create_nc_var('tp',('nt'),{'units':basetimestr\ ,'long_name':"time at drifter locations"},dtype='f8') create_nc_var('xp',('ntrac','nt'),{'units':'m',\ 'long_name':"Easting coordinate of drifter",'time':'tp'},dtype='f8') create_nc_var('yp',('ntrac','nt'),{'units':'m',\ 'long_name':"Northing coordinate of drifter",'time':'tp'},dtype='f8') create_nc_var('zp',('ntrac','nt'),{'units':'m',\ 'long_name':"vertical position of drifter (negative is downward from surface)",'time':'tp'},dtype='f8') if self.has_age: create_nc_var('age',('ntrac','nt'),{'units':'seconds',\ 'long_name':"Particle age",'time':'tp'},dtype='f8') create_nc_var('agemax',('ntrac','nt'),{'units':'seconds',\ 'long_name':"Maximum particle age",'time':'tp'},dtype='f8') # Keep the pointer to the open file as an attribute self._nc = nc
def create_nc(self, ncfile): """ Create the particle netcdf file NetCDF variable and dimension names are consistent with partrac data. """ if self.verbose: print '\nInitialising particle netcdf file: %s...\n' % ncfile # Global Attributes nc = Dataset(ncfile, 'w', format='NETCDF4_CLASSIC') nc.Description = 'Particle trajectory file' nc.Author = os.getenv('USER') nc.Created = datetime.now().isoformat() # Dimensions nc.createDimension('ntrac', self.N) nc.createDimension('nt', 0) # Unlimited # Create variables def create_nc_var(name, dimensions, attdict, dtype='f8'): tmp = nc.createVariable(name, dtype, dimensions) for aa in attdict.keys(): tmp.setncattr(aa, attdict[aa]) basetimestr = 'seconds since %s'%(datetime.strftime(self.basetime,\ '%Y-%m-%d %H:%M:%S')) create_nc_var('tp',('nt'),{'units':basetimestr\ ,'long_name':"time at drifter locations"},dtype='f8') create_nc_var('xp',('ntrac','nt'),{'units':'m',\ 'long_name':"Easting coordinate of drifter",'time':'tp'},dtype='f8') create_nc_var('yp',('ntrac','nt'),{'units':'m',\ 'long_name':"Northing coordinate of drifter",'time':'tp'},dtype='f8') create_nc_var('zp',('ntrac','nt'),{'units':'m',\ 'long_name':"vertical position of drifter (negative is downward from surface)",'time':'tp'},dtype='f8') if self.has_age: create_nc_var('age',('ntrac','nt'),{'units':'seconds',\ 'long_name':"Particle age",'time':'tp'},dtype='f8') create_nc_var('agemax',('ntrac','nt'),{'units':'seconds',\ 'long_name':"Maximum particle age",'time':'tp'},dtype='f8') # Keep the pointer to the open file as an attribute self._nc = nc
def initParticleNC(self,outfile,Np,age=False): """ Export the grid variables to a netcdf file """ import os if self.verbose: print '\nInitialising particle netcdf file: %s...\n'%outfile # Global Attributes nc = Dataset(outfile, 'w', format='NETCDF4_CLASSIC') nc.Description = 'Particle trajectory file' nc.Author = os.getenv('USER') nc.Created = datetime.now().isoformat() #tseas = self.time_sec[1] - self.time_sec[0] #nsteps = np.floor(tseas/self.dt) #nc.nsteps = '%f (number of linear interpolation steps in time between model outputs)'%nsteps #nc.tseas = '%d (Time step (seconds) between model outputs'%tseas #nc.dt = '%f (Particle model time steps [seconds])'%self.dt nc.dataset_location = '%s'%self.ncfile # Dimensions nc.createDimension('ntrac', Np) nc.createDimension('nt', 0) # Unlimited # Create variables def create_nc_var( name, dimensions, attdict, dtype='f8'): tmp=nc.createVariable(name, dtype, dimensions) for aa in attdict.keys(): tmp.setncattr(aa,attdict[aa]) create_nc_var('tp',('nt'),{'units':'seconds since 1990-01-01 00:00:00','long_name':"time at drifter locations"},dtype='f8') create_nc_var('xp',('ntrac','nt'),{'units':'m','long_name':"Easting coordinate of drifter",'time':'tp'},dtype='f8') create_nc_var('yp',('ntrac','nt'),{'units':'m','long_name':"Northing coordinate of drifter",'time':'tp'},dtype='f8') create_nc_var('zp',('ntrac','nt'),{'units':'m','long_name':"vertical position of drifter (negative is downward from surface)",'time':'tp'},dtype='f8') if age: create_nc_var('age',('ntrac','nt'),{'units':'seconds','long_name':"Particle age",'time':'tp'},dtype='f8') create_nc_var('agemax',('ntrac','nt'),{'units':'seconds','long_name':"Maximum particle age",'time':'tp'},dtype='f8') nc.close()
id_lat[:, :] = nav_lat[:, :] if nb_dim == 4: id_dpt[:] = deptht[:] if nb_dim == 4: id_sg0 = f_out.createVariable('sigma0', 'f4', ( 'time_counter', 'deptht', 'y', 'x', )) if nb_dim == 3: id_sg0 = f_out.createVariable('sigma0', 'f4', ( 'time_counter', 'y', 'x', )) id_sg0.long_name = 'SIGMA0 density computed from ' + cv_T + ' and ' + cv_S + ' with TEOS8 / Jackett and McDougall (1994)' #f_out.About = 'Bla bla' f_out.Author = 'barakuda [density_from_T_and_S.py] (https://github.com/brodeau/barakuda)' id_tim[jt] = vtime[jt] if nb_dim == 4: id_sg0[jt, :, :, :] = xsg0[:, :, :] if nb_dim == 3: id_sg0[jt, :, :] = xsg0[:, :] f_out.close() f_nemo_T.close() print '\n' + cf_out + ' sucessfully created!\n'
def writeNC(self, outfile, tt, x, y, Uwind, Vwind, Tair, Cloud, RH, Pair, Rain): """ SUNTANS required wind file, this function creates the netcdf file """ Nstation = x.shape[0] Nt = len(tt) nc = Dataset(outfile, 'w', format='NETCDF4_CLASSIC') nc.Description = 'SUNTANS History file' nc.Author = '' nc.Created = datetime.now().isoformat() ####Create dimensions#### nc.createDimension('NVwind', Nstation) nc.createDimension('NTair', Nstation) nc.createDimension('Nrain', Nstation) nc.createDimension('NUwind', Nstation) nc.createDimension('NPair', Nstation) nc.createDimension('NRH', Nstation) nc.createDimension('Ncloud', Nstation) nc.createDimension('nt', Nt) nc.close() def create_nc_var(outfile, name, dimensions, attdict, dtype='f8', zlib=False, complevel=0, fill_value=None): nc = Dataset(outfile, 'a') tmp = nc.createVariable(name, dtype, dimensions, zlib=zlib, complevel=complevel, fill_value=fill_value) for aa in attdict.keys(): tmp.setncattr(aa, attdict[aa]) #nc.variables[name][:] = var nc.close() ####adding variables#### create_nc_var(outfile, 'x_Vwind', ('NVwind'), { 'long_name': 'Longitude at Vwind', 'units': 'degrees_north' }) create_nc_var(outfile, 'y_Vwind', ('NVwind'), { 'long_name': 'Latitude at Vwind', 'units': 'degrees_east' }) create_nc_var(outfile, 'z_Vwind', ('NVwind'), { 'long_name': 'Elevation at Vwind', 'units': 'm' }) create_nc_var(outfile, 'x_Tair', ('NTair'), { 'long_name': 'Longitude at Tair', 'units': 'degrees_north' }) create_nc_var(outfile, 'y_Tair', ('NTair'), { 'long_name': 'Latitude at Tair', 'units': 'degrees_east' }) create_nc_var(outfile, 'z_Tair', ('NTair'), { 'long_name': 'Elevation at Tair', 'units': 'm' }) create_nc_var(outfile, 'x_rain', ('Nrain'), { 'long_name': 'Longitude at rain', 'units': 'degrees_north' }) create_nc_var(outfile, 'y_rain', ('Nrain'), { 'long_name': 'Latitude at rain', 'units': 'degrees_east' }) create_nc_var(outfile, 'z_rain', ('Nrain'), { 'long_name': 'Elevation at rain', 'units': 'm' }) create_nc_var(outfile, 'x_Uwind', ('NUwind'), { 'long_name': 'Longitude at Uwind', 'units': 'degrees_north' }) create_nc_var(outfile, 'y_Uwind', ('NUwind'), { 'long_name': 'Latitude at Uwind', 'units': 'degrees_east' }) create_nc_var(outfile, 'z_Uwind', ('NUwind'), { 'long_name': 'Elevation at Uwind', 'units': 'm' }) create_nc_var(outfile, 'x_Pair', ('NPair'), { 'long_name': 'Longitude at Pair', 'units': 'degrees_north' }) create_nc_var(outfile, 'y_Pair', ('NPair'), { 'long_name': 'Latitude at Pair', 'units': 'degrees_east' }) create_nc_var(outfile, 'z_Pair', ('NPair'), { 'long_name': 'Elevation at Pair', 'units': 'm' }) create_nc_var(outfile, 'x_RH', ('NRH'), { 'long_name': 'Longitude at RH', 'units': 'degrees_north' }) create_nc_var(outfile, 'y_RH', ('NRH'), { 'long_name': 'Latitude at RH', 'units': 'degrees_east' }) create_nc_var(outfile, 'z_RH', ('NRH'), { 'long_name': 'Elevation at RH', 'units': 'm' }) create_nc_var(outfile, 'x_cloud', ('Ncloud'), { 'long_name': 'Longitude at cloud', 'units': 'degrees_north' }) create_nc_var(outfile, 'y_cloud', ('Ncloud'), { 'long_name': 'Latitude at cloud', 'units': 'degrees_east' }) create_nc_var(outfile, 'z_cloud', ('Ncloud'), { 'long_name': 'Elevation at cloud', 'units': 'm' }) create_nc_var(outfile, 'Time', ('nt'), { 'units': 'seconds since 1990-01-01 00:00:00', 'long_name': 'time' }) create_nc_var( outfile, 'Vwind', ('nt', 'NVwind'), { 'units': 'm s-1', 'long_name': 'Northward wind velocity component', 'coordinates': 'x_Vwind,y_Vwind' }) create_nc_var( outfile, 'Tair', ('nt', 'NTair'), { 'units': 'Celsius', 'long_name': 'Air Temperature', 'coordinates': 'x_Tair,y_Tair' }) create_nc_var( outfile, 'rain', ('nt', 'Nrain'), { 'units': 'kg m2 s-1', 'long_name': 'rain fall rate', 'coordinates': 'x_rain,y_rain' }) create_nc_var( outfile, 'Uwind', ('nt', 'NUwind'), { 'long_name': 'Eastward wind velocity component', 'coordinates': 'x_Uwind,y_Uwind', 'units': 'm s-1' }) create_nc_var( outfile, 'Pair', ('nt', 'NPair'), { 'units': 'hPa', 'long_name': 'Air Pressure', 'coordinates': 'x_Pair,y_Pair' }) create_nc_var( outfile, 'RH', ('nt', 'NRH'), { 'units': 'percent', 'long_name': 'Relative Humidity', 'coordinates': 'x_RH,y_RH' }) create_nc_var( outfile, 'cloud', ('nt', 'Ncloud'), { 'units': 'dimensionless', 'long_name': 'Cloud cover fraction', 'coordinates': 'x_cloud,y_cloud' }) z = np.ones([Nstation]) * 2 ## change time units time_new = SecondsSince(tt) # ##Tair, rain, Pair, RH, cloud are set to be constant due to a lack of information # Tair = np.ones([Nt, Nstation])*30.0 # rain = np.ones([Nt, Nstation])*0.0 # Pair = np.ones([Nt, Nstation])*1010.0 # RH = np.ones([Nt, Nstation])*50.0 # cloud = np.ones([Nt, Nstation])*0.0 ######Now writting the variables###### nc = Dataset(outfile, 'a') nc.variables['x_Vwind'][:] = x nc.variables['y_Vwind'][:] = y nc.variables['z_Vwind'][:] = z nc.variables['x_Tair'][:] = x nc.variables['y_Tair'][:] = y nc.variables['z_Tair'][:] = z nc.variables['x_rain'][:] = x nc.variables['y_rain'][:] = y nc.variables['z_rain'][:] = z nc.variables['x_Uwind'][:] = x nc.variables['y_Uwind'][:] = y nc.variables['z_Uwind'][:] = z nc.variables['x_Pair'][:] = x nc.variables['y_Pair'][:] = y nc.variables['z_Pair'][:] = z nc.variables['x_RH'][:] = x nc.variables['y_RH'][:] = y nc.variables['z_RH'][:] = z nc.variables['x_cloud'][:] = x nc.variables['y_cloud'][:] = y nc.variables['z_cloud'][:] = z nc.variables['Time'][:] = time_new nc.variables['Vwind'][:] = Vwind nc.variables['Tair'][:] = Tair nc.variables['rain'][:] = Rain nc.variables['Uwind'][:] = Uwind nc.variables['Pair'][:] = Pair nc.variables['RH'][:] = RH nc.variables['cloud'][:] = Cloud print "Ending writing variables into netcdf file !!!" nc.close()
def writeNC(self, outfile): """ This function is used to create the netcdf file """ print 'under developing' ####create netcdf File#### nc = Dataset(outfile, 'w', format='NETCDF4_CLASSIC') nc.Description = 'SUNTANS History file' nc.Author = '' nc.Created = datetime.now().isoformat() ####Create dimensions#### nc.createDimension('NVwind', self.Nstation) nc.createDimension('NTair', self.Nstation) nc.createDimension('Nrain', self.Nstation) nc.createDimension('NUwind', self.Nstation) nc.createDimension('NPair', self.Nstation) nc.createDimension('NRH', self.Nstation) nc.createDimension('Ncloud', self.Nstation) nc.createDimension('nt', self.Nt) nc.close() ####adding variables#### self.create_nc_var(outfile,'x_Vwind',('NVwind'),{'long_name':'Longitude at Vwind','units':'degrees_north'}) self.create_nc_var(outfile,'y_Vwind',('NVwind'),{'long_name':'Latitude at Vwind','units':'degrees_east'}) self.create_nc_var(outfile,'z_Vwind',('NVwind'),{'long_name':'Elevation at Vwind','units':'m'}) self.create_nc_var(outfile,'x_Tair',('NTair'),{'long_name':'Longitude at Tair','units':'degrees_north'}) self.create_nc_var(outfile,'y_Tair',('NTair'),{'long_name':'Latitude at Tair','units':'degrees_east'}) self.create_nc_var(outfile,'z_Tair',('NTair'),{'long_name':'Elevation at Tair','units':'m'}) self.create_nc_var(outfile,'x_rain',('Nrain'),{'long_name':'Longitude at rain','units':'degrees_north'}) self.create_nc_var(outfile,'y_rain',('Nrain'),{'long_name':'Latitude at rain','units':'degrees_east'}) self.create_nc_var(outfile,'z_rain',('Nrain'),{'long_name':'Elevation at rain','units':'m'}) self.create_nc_var(outfile,'x_Uwind',('NUwind'),{'long_name':'Longitude at Uwind','units':'degrees_north'}) self.create_nc_var(outfile,'y_Uwind',('NUwind'),{'long_name':'Latitude at Uwind','units':'degrees_east'}) self.create_nc_var(outfile,'z_Uwind',('NUwind'),{'long_name':'Elevation at Uwind','units':'m'}) self.create_nc_var(outfile,'x_Pair',('NPair'),{'long_name':'Longitude at Pair','units':'degrees_north'}) self.create_nc_var(outfile,'y_Pair',('NPair'),{'long_name':'Latitude at Pair','units':'degrees_east'}) self.create_nc_var(outfile,'z_Pair',('NPair'),{'long_name':'Elevation at Pair','units':'m'}) self.create_nc_var(outfile,'x_RH',('NRH'),{'long_name':'Longitude at RH','units':'degrees_north'}) self.create_nc_var(outfile,'y_RH',('NRH'),{'long_name':'Latitude at RH','units':'degrees_east'}) self.create_nc_var(outfile,'z_RH',('NRH'),{'long_name':'Elevation at RH','units':'m'}) self.create_nc_var(outfile,'x_cloud',('Ncloud'),{'long_name':'Longitude at cloud','units':'degrees_north'}) self.create_nc_var(outfile,'y_cloud',('Ncloud'),{'long_name':'Latitude at cloud','units':'degrees_east'}) self.create_nc_var(outfile,'z_cloud',('Ncloud'),{'long_name':'Elevation at cloud','units':'m'}) self.create_nc_var(outfile,'Time',('nt'),{'units':'seconds since 1990-01-01 00:00:00','long_name':'time'}) self.create_nc_var(outfile,'Vwind',('nt','NVwind'),{'units':'m s-1','long_name':'Northward wind velocity component','coordinates':'x_Vwind,y_Vwind'}) self.create_nc_var(outfile,'Tair',('nt','NTair'),{'units':'Celsius','long_name':'Air Temperature','coordinates':'x_Tair,y_Tair'}) self.create_nc_var(outfile,'rain',('nt','Nrain'),{'units':'kg m2 s-1','long_name':'rain fall rate','coordinates':'x_rain,y_rain'}) self.create_nc_var(outfile,'Uwind',('nt','NUwind'),{'long_name':'Eastward wind velocity component','coordinates':'x_Uwind,y_Uwind','units':'m s-1'}) self.create_nc_var(outfile,'Pair',('nt','NPair'),{'units':'hPa','long_name':'Air Pressure','coordinates':'x_Pair,y_Pair'}) self.create_nc_var(outfile,'RH',('nt','NRH'),{'units':'percent','long_name':'Relative Humidity','coordinates':'x_RH,y_RH'}) self.create_nc_var(outfile,'cloud',('nt','Ncloud'),{'units':'dimensionless','long_name':'Cloud cover fraction','coordinates':'x_cloud,y_cloud'}) ######Now writting the variables###### nc = Dataset(outfile,'a') nc.variables['x_Vwind'][:]=self.lat nc.variables['y_Vwind'][:]=self.lon nc.variables['z_Vwind'][:]=self.z nc.variables['x_Tair'][:]=self.lat nc.variables['y_Tair'][:]=self.lon nc.variables['z_Tair'][:]=self.z nc.variables['x_rain'][:]=self.lat nc.variables['y_rain'][:]=self.lon nc.variables['z_rain'][:]=self.z nc.variables['x_Uwind'][:]=self.lat nc.variables['y_Uwind'][:]=self.lon nc.variables['z_Uwind'][:]=self.z nc.variables['x_Pair'][:]=self.lat nc.variables['y_Pair'][:]=self.lon nc.variables['z_Pair'][:]=self.z nc.variables['x_RH'][:]=self.lat nc.variables['y_RH'][:]=self.lon nc.variables['z_RH'][:]=self.z nc.variables['x_cloud'][:]=self.lat nc.variables['y_cloud'][:]=self.lon nc.variables['z_cloud'][:]=self.z nc.variables['Time'][:]=self.time nc.variables['Vwind'][:]=self.Vwind nc.variables['Tair'][:]=self.Tair nc.variables['rain'][:]=self.rain nc.variables['Uwind'][:]=self.Uwind nc.variables['Pair'][:]=self.Pair nc.variables['RH'][:]=self.RH nc.variables['cloud'][:]=self.cloud print "Ending writing variables into netcdf file !!!" nc.close()
def make_remap_grid_file(grd): #create remap file remap_filename = 'remap_grid_' + grd.name + '_t.nc' nc = Dataset(remap_filename, 'w', format='NETCDF3_64BIT') nc.Description = 'remap grid file for Mecrator' nc.Author = 'Jheka' nc.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") nc.title = grd.name print "4" lon_corner = grd.lon_vert lat_corner = grd.lat_vert grid_center_lon = grd.lon_t.flatten() grid_center_lat = grd.lat_t.flatten() Mp, Lp = grd.lon_t.shape grid_imask = grd.mask_t[0,:].flatten() grid_size = Lp * Mp grid_corner_lon = np.zeros((grid_size, 4)) grid_corner_lat = np.zeros((grid_size, 4)) k = 0 for j in range(Mp-2): for i in range(Lp-2): #print i,j grid_corner_lon[k,0] = lon_corner[j,i] grid_corner_lat[k,0] = lat_corner[j,i] grid_corner_lon[k,1] = lon_corner[j,i+1] grid_corner_lat[k,1] = lat_corner[j,i+1] grid_corner_lon[k,2] = lon_corner[j+1,i+1] grid_corner_lat[k,2] = lat_corner[j+1,i+1] grid_corner_lon[k,3] = lon_corner[j+1,i] grid_corner_lat[k,3] = lat_corner[j+1,i] k = k + 1 #Write netcdf file nc.createDimension('grid_size', grid_size) nc.createDimension('grid_corners', 4) nc.createDimension('grid_rank', 2) nc.createVariable('grid_dims', 'i4', ('grid_rank')) nc.variables['grid_dims'].long_name = 'grid size along x and y axis' nc.variables['grid_dims'].units = 'None' nc.variables['grid_dims'][:] = [(Lp, Mp)] nc.createVariable('grid_center_lon', 'f8', ('grid_size')) nc.variables['grid_center_lon'].long_name = 'longitude of cell center' nc.variables['grid_center_lon'].units = 'degrees' nc.variables['grid_center_lon'][:] = grid_center_lon nc.createVariable('grid_center_lat', 'f8', ('grid_size')) nc.variables['grid_center_lat'].long_name = 'latitude of cell center' nc.variables['grid_center_lat'].units = 'degrees' nc.variables['grid_center_lat'][:] = grid_center_lat nc.createVariable('grid_imask', 'i4', ('grid_size')) nc.variables['grid_imask'].long_name = 'mask' nc.variables['grid_imask'].units = 'None' nc.variables['grid_imask'][:] = np.ones((Lp * Mp)) nc.createVariable('grid_corner_lon', 'f8', ('grid_size', 'grid_corners')) nc.variables['grid_corner_lon'].long_name = 'longitude of cell corner' nc.variables['grid_corner_lon'].units = 'degrees' nc.variables['grid_corner_lon'][:] = grid_corner_lon nc.createVariable('grid_corner_lat', 'f8', ('grid_size', 'grid_corners')) nc.variables['grid_corner_lat'].long_name = 'latitude of cell corner' nc.variables['grid_corner_lat'].units = 'degrees' nc.variables['grid_corner_lat'][:] = grid_corner_lat nc.close()
id_lat = f_out.createVariable('nav_lat', 'f4', ( 'y', 'x', )) id_lon[:, :] = nav_lon[:, :] id_lat[:, :] = nav_lat[:, :] jb = 0 jbt = 0 for cb in vbasins: if vtreat[jb]: #id_bas = f_out.createVariable(crout+cb,'i1',('y','x',)) id_bas = f_out.createVariable(crout + cb, 'f4', ( 'y', 'x', )) id_bas.long_name = vbnames[jb] + ' ' + vocesea[jb] + ' basin' id_bas[:, :] = XBASINS[jbt, :, :] * mask[:, :] jbt = jbt + 1 jb = jb + 1 f_out.About = 'ORCA025, masks for main ocean basins, created with orca_mesh_mask_to_bitmap.py, Gimp, and tiff_to_orca_mask.py, ' + cdate + '.' f_out.Author = 'L. Brodeau (https://github.com/brodeau/barakuda)' f_out.close() print cf_bm + ' created!!!'
# BMB var = fout.createVariable('tendlibmassbf', 'f', ('time', )) var.units = "kg s^{-1}" var.standard_name = "tendency_of_land_ice_mass_due_to_basal_mass_balance" var[:] = annualAverage('totalBasalMassBal') / secInYr # calving var = fout.createVariable('tendlicalvf', 'f', ('time', )) var.units = "kg s^{-1}" var.standard_name = "tendency_of_land_ice_mass_due_to_calving" var[:] = -1.0 * annualAverage('totalCalvingFlux') / secInYr # GL flux var = fout.createVariable('tendligroundf', 'f', ('time', )) var.units = "kg s^{-1}" var.standard_name = "tendency_of_grounded_ice_mass" var[:] = -1.0 * annualAverage('groundingLineFlux') / secInYr fout.Author = "Matthew Hoffman ([email protected])" fout.Model = "MALI (MPAS-Albany Land Ice)" fout.Variables = "Scalar variables" fout.Notes = "Experiments performed at Los Alamos National Laboratory using the Edison supercomputer at National Energy Research Scientific Computing Center at Lawrence Berkeley National Laboratory. Experiments performed by Matthew Hoffman, Tong Zhang, Stephen Price, and Mauro Perego." fout.Date = "28-Aug-2018" fout.close() inputData.close() spinupData.close() print("Complete.")
def wrt_1d_series(vt, vd, cvar, cinfo, cu_t='unknown', cu_d='unknown', cln_d='unknown', nsmooth=0, vd2=[], vd3=[], vd4=[], vd5=[], cvar2='', cvar3='', cvar4='', cvar5='', cln_d2='', cln_d3='', cln_d4='', cln_d5='',): cf_o = cvar+'_'+cinfo+'.nc' lsmooth = False if nsmooth > 0: import barakuda_stat as bs lsmooth = True if nsmooth == 11: vd_sm = bs.running_mean_11(vd, l_fill_bounds=False) elif nsmooth == 5: vd_sm = bs.running_mean_5(vd, l_fill_bounds=False) else: print 'ERROR: wrt_1d_series.barakuda_ncio => smoothing with nsmooth='+str(nsmooth)+' not supported!'; sys.exit(0) f_o = Dataset(cf_o, 'w', format='NETCDF3_CLASSIC') nt = len(vt) if len(vd) != nt: print 'ERROR: wrt_1d_series.barakuda_ncio => data & time have different lengths!'; sys.exit(0) l_do_v2=False ; l_do_v3=False ; l_do_v4=False ; l_do_v5=False if len(vd2) == nt: l_do_v2=True if len(vd3) == nt: l_do_v3=True if len(vd4) == nt: l_do_v4=True if len(vd5) == nt: l_do_v5=True f_o.createDimension('time', None) id_t = f_o.createVariable('time','f4',('time',)) ; id_t.units = cu_t id_d = f_o.createVariable(cvar,'f4',('time',)) id_d.units = cu_d ; id_d.long_name = cln_d if l_do_v2: id_d2 = f_o.createVariable(cvar2,'f4',('time',)); id_d2.units = cu_d; id_d2.long_name = cln_d2 if l_do_v3: id_d3 = f_o.createVariable(cvar3,'f4',('time',)); id_d3.units = cu_d; id_d3.long_name = cln_d3 if l_do_v4: id_d4 = f_o.createVariable(cvar4,'f4',('time',)); id_d4.units = cu_d; id_d4.long_name = cln_d4 if l_do_v5: id_d5 = f_o.createVariable(cvar5,'f4',('time',)); id_d5.units = cu_d; id_d5.long_name = cln_d5 if lsmooth: id_sm = f_o.createVariable(cvar+'_'+str(nsmooth)+'yrm','f4',('time',)) id_sm.units = cu_d ; id_sm.long_name = str(nsmooth)+'-year running mean of '+cln_d for jt in range(nt): id_t[jt] = vt[jt] id_d[jt] = vd[jt] if lsmooth: id_sm[jt] = vd_sm[jt] if l_do_v2: id_d2[jt] = vd2[jt] if l_do_v3: id_d3[jt] = vd3[jt] if l_do_v4: id_d4[jt] = vd4[jt] if l_do_v5: id_d5[jt] = vd5[jt] f_o.Author = 'L. Brodeau (barakuda_ncio.py of Barakuda)' f_o.close() print ' * wrt_1d_series => '+cf_o+' written!\n' return 0
def wrt_1d_series(vt, vd, cvar, cinfo, cu_t='unknown', cu_d='unknown', cln_d='unknown', nsmooth=0, vd2=[], vd3=[], vd4=[], vd5=[], cvar2='', cvar3='', cvar4='', cvar5='', cln_d2='', cln_d3='', cln_d4='', cln_d5=''): cf_o = cvar+'_'+cinfo+'.nc' lsmooth = False if nsmooth > 0: import barakuda_stat as bs lsmooth = True if nsmooth == 11: vd_sm = bs.running_mean_11(vd, l_fill_bounds=False) elif nsmooth == 5: vd_sm = bs.running_mean_5(vd, l_fill_bounds=False) else: print 'ERROR: wrt_1d_series.barakuda_ncio => smoothing with nsmooth='+str(nsmooth)+' not supported!'; sys.exit(0) f_o = Dataset(cf_o, 'w', format='NETCDF3_CLASSIC') nt = len(vt) if len(vd) != nt: print 'ERROR: wrt_1d_series.barakuda_ncio => data & time have different lengths!'; sys.exit(0) l_do_v2=False ; l_do_v3=False ; l_do_v4=False ; l_do_v5=False if len(vd2) == nt: l_do_v2=True if len(vd3) == nt: l_do_v3=True if len(vd4) == nt: l_do_v4=True if len(vd5) == nt: l_do_v5=True f_o.createDimension('time', None) id_t = f_o.createVariable('time','f4',('time',)) ; id_t.units = cu_t id_d = f_o.createVariable(cvar,'f4',('time',)) id_d.units = cu_d ; id_d.long_name = cln_d if l_do_v2: id_d2 = f_o.createVariable(cvar2,'f4',('time',)); id_d2.units = cu_d; id_d2.long_name = cln_d2 if l_do_v3: id_d3 = f_o.createVariable(cvar3,'f4',('time',)); id_d3.units = cu_d; id_d3.long_name = cln_d3 if l_do_v4: id_d4 = f_o.createVariable(cvar4,'f4',('time',)); id_d4.units = cu_d; id_d4.long_name = cln_d4 if l_do_v5: id_d5 = f_o.createVariable(cvar5,'f4',('time',)); id_d5.units = cu_d; id_d5.long_name = cln_d5 if lsmooth: id_sm = f_o.createVariable(cvar+'_'+str(nsmooth)+'yrm','f4',('time',)) id_sm.units = cu_d ; id_sm.long_name = str(nsmooth)+'-year running mean of '+cln_d for jt in range(nt): id_t[jt] = vt[jt] id_d[jt] = vd[jt] if lsmooth: id_sm[jt] = vd_sm[jt] if l_do_v2: id_d2[jt] = vd2[jt] if l_do_v3: id_d3[jt] = vd3[jt] if l_do_v4: id_d4[jt] = vd4[jt] if l_do_v5: id_d5[jt] = vd5[jt] f_o.Author = 'L. Brodeau (barakuda_ncio.py of Barakuda)' f_o.close() print ' * wrt_1d_series => '+cf_o+' written!\n' return 0
for jt in range(nt): id_t[jt] = vtime[jt] id_v01[jt] = rmean_sst[jt,jb] id_v02[jt] = rmean_sss[jt,jb] Xbox[:,:] = XSST[jt,j1:j2,i1:i2] Xbox[idx_msk] = -9999. id_x01[jt,:,:] = Xbox[:,:] Xbox[:,:] = XSSS[jt,j1:j2,i1:i2] Xbox[idx_msk] = -9999. id_x02[jt,:,:] = Xbox[:,:] f_out.Author = 'L. Brodeau (ssx_boxes.py of Barakuda)' else: vt = f_out.variables['time'] jrec2write = len(vt) v01 = f_out.variables[cv_sst+'_sa'] x01 = f_out.variables[cv_sst] v02 = f_out.variables[cv_sss+'_sa'] x02 = f_out.variables[cv_sss] for jt in range(nt): vt [jrec2write+jt] = vtime[jt] v01[jrec2write+jt] = rmean_sst[jt,jb] v02[jrec2write+jt] = rmean_sss[jt,jb] Xbox[:,:] = XSST[jt,j1:j2,i1:i2] ; Xbox[idx_msk] = -9999. x01[jrec2write+jt,:,:] = Xbox[:,:]
f_bathy.close() (Nj, Ni) = nmp.shape(xbathy) xnew = nmp.zeros((Nj, Ni)) print '\n' # Opening the Netcdf file: f_new = Dataset(cf_new, 'r+') # r+ => can read and write in the file... ) print 'File ', cf_new, 'is open...\n' # Extracting tmask at surface level: xtemp = f_new.variables[cv_rnf_dept][:, :, :] xnew[:, :] = xtemp[0, :, :] idx = nmp.where((xbathy[:, :] >= rmin_depth) & (xnew[:, :] < rmin_depth)) #print idx xnew[idx] = rmin_depth # Updating: f_new.variables[cv_rnf_dept][0, :, :] = xnew[:, :] f_new.Author = 'L. Brodeau (orca_correct_runoff_depth.py of Barakuda)' f_new.close() print cf_new + ' sucessfully created!'
jrec2write = 0 # Creating Dimensions: f_out.createDimension('time', None) f_out.createDimension('deptht', nk) # Creating variables: id_t = f_out.createVariable('time','f4',('time',)) ; id_t.units = 'year' id_z = f_out.createVariable('deptht','f4',('deptht',)) ; id_z.units = 'm' id_v01 = f_out.createVariable(cvar ,'f4',('time','deptht',)) id_v01.long_name = 'Horizontally-averaged '+cvar+': '+colnm # Writing depth vector id_z[:] = vdepth[:] id_t[jrec2write] = float(jyear)+0.5 id_v01[jrec2write,:] = Vf[:] f_out.Author = 'L. Brodeau ('+cnexec+' of Barakuda)' else: vt = f_out.variables['time'] jrec2write = len(vt) v01 = f_out.variables[cvar] vt[jrec2write] = float(jyear)+0.5 v01[jrec2write,:] = Vf[:] f_out.close() print cf_out+' written!'
if NbDim == 3: id_msk = f_out.createVariable('mask', 'i1', ( cdim_z, cdim_y, cdim_x, ), zlib=True, complevel=8) id_msk[:, :, :] = mask[:, :, :] else: id_msk = f_out.createVariable('mask', 'i1', ( cdim_y, cdim_x, ), zlib=True, complevel=8) id_msk[:, :] = mask[:, :] id_msk.long_name = 'Land-Sea mask' f_out.About = 'Land-sea mask built out of variable `' + cv_nc + '` of file `' + path.basename( cf_nc) + '` !' f_out.Author = 'Generated with `' + path.basename( sys.argv[0]) + '` of `climporn` (https://github.com/brodeau/climporn)' f_out.close() print(cf_msk + ' created!!!')
jrec2write = 0 # Creating Dimensions: f_out.createDimension('time', None) f_out.createDimension('deptht', nk) # Creating variables: id_t = f_out.createVariable('time','f4',('time',)) ; id_t.units = 'year' id_z = f_out.createVariable('deptht','f4',('deptht',)) ; id_z.units = 'm' id_v01 = f_out.createVariable(cvar ,'f4',('time','deptht',)) id_v01.long_name = 'Horizontally-averaged '+cvar+': '+cocean # Writing depth vector id_z[:] = vdepth[:] id_t[jrec2write] = float(jyear)+0.5 id_v01[jrec2write,:] = Vf[:] f_out.Author = 'L. Brodeau ('+cnexec+' of Barakuda)' else: vt = f_out.variables['time'] jrec2write = len(vt) v01 = f_out.variables[cvar] vt[jrec2write] = float(jyear)+0.5 v01[jrec2write,:] = Vf[:] f_out.close() print cf_out+' written!'
)) id_v03.unit = 'PSU' id_v03.long_name = 'salinity on box ' + cbox id_z[:] = Vdepth[:] for jm in range(Nt): id_t[jrec2write + jm] = Vtime[jm] id_v01[jrec2write + jm, :] = Zm1[jm, :] id_v02[jrec2write + jm, :] = Tm1[jm, :] id_v03[jrec2write + jm, :] = Sm1[jm, :] f_out.box_coordinates = cbox + ' => ' + str(i1) + ',' + str( j1) + ' -> ' + str(i2 - 1) + ',' + str(j2 - 1) f_out.box_file = FILE_DEF_BOXES f_out.Author = 'L. Brodeau (' + cname_script + ' of Barakuda)' else: vt = f_out.variables['time'] jrec2write = len(vt) v01 = f_out.variables['sigma0'] v02 = f_out.variables['theta'] v03 = f_out.variables['S'] for jm in range(Nt): vt[jrec2write + jm] = Vtime[jm] v01[jrec2write + jm, :] = Zm1[jm, :] v02[jrec2write + jm, :] = Tm1[jm, :] v03[jrec2write + jm, :] = Sm1[jm, :] f_out.close()
for jt in range(nt): id_t[jt] = vtime[jt] id_v01[jt] = rmean_sst[jt,jb] id_v02[jt] = rmean_sss[jt,jb] Xbox[:,:] = XSST[jt,j1:j2,i1:i2] Xbox[idx_msk] = -9999. id_x01[jt,:,:] = Xbox[:,:] Xbox[:,:] = XSSS[jt,j1:j2,i1:i2] Xbox[idx_msk] = -9999. id_x02[jt,:,:] = Xbox[:,:] f_out.Author = 'Generated with "ssx_boxes.py" of BaraKuda (https://github.com/brodeau/barakuda)' else: vt = f_out.variables['time'] jrec2write = len(vt) v01 = f_out.variables[cv_sst+'_sa'] x01 = f_out.variables[cv_sst] v02 = f_out.variables[cv_sss+'_sa'] x02 = f_out.variables[cv_sss] for jt in range(nt): vt [jrec2write+jt] = vtime[jt] v01[jrec2write+jt] = rmean_sst[jt,jb] v02[jrec2write+jt] = rmean_sss[jt,jb] Xbox[:,:] = XSST[jt,j1:j2,i1:i2] ; Xbox[idx_msk] = -9999. x01[jrec2write+jt,:,:] = Xbox[:,:]
id_blue = f_out.createVariable('blue', 'f4', ( cdim_y, cdim_x, )) id_blue.long_name = 'Blue (of RGB)' id_red[:, :] = nmp.flipud(xpic[:, :, 0]) id_green[:, :] = nmp.flipud(xpic[:, :, 1]) id_blue[:, :] = nmp.flipud(xpic[:, :, 2]) else: #if l_nemo_like: # id_bw = f_out.createVariable('bw','i1',('t',cdim_y,cdim_x,)) # id_bw.long_name = 'Grey scale' # #id_bw[0,:,:] = nmp.flipud(xpic[:,:]) / idiv # id_bw[0,:,:] = 1 - (nmp.flipud(xpic[:,:]) + 1)/idiv #else: id_bw = f_out.createVariable('bw', 'i1', ( cdim_y, cdim_x, )) id_bw.long_name = 'Grey scale' id_bw[:, :] = 1 - (nmp.flipud(xpic[:, :]) + 1) / idiv f_out.About = 'Image ' + cf_im + ' converted to netcdf.' f_out.Author = 'Generated with image_to_netcdf.py of BARAKUDA (https://github.com/brodeau/barakuda)' f_out.close() print(cf_nc + ' created!!!')
id_blue = f_out.createVariable('blue', 'f4', ( cdim_y, cdim_x, )) id_blue.long_name = 'Blue (of RGB)' id_red[:, :] = nmp.flipud(xpic[:, :, 0]) id_green[:, :] = nmp.flipud(xpic[:, :, 1]) id_blue[:, :] = nmp.flipud(xpic[:, :, 2]) else: #if l_nemo_like: # id_bw = f_out.createVariable('bw','i1',('t',cdim_y,cdim_x,)) # id_bw.long_name = 'Grey scale' # #id_bw[0,:,:] = nmp.flipud(xpic[:,:]) / idiv # id_bw[0,:,:] = 1 - (nmp.flipud(xpic[:,:]) + 1)/idiv #else: id_bw = f_out.createVariable('bw', 'i1', ( cdim_y, cdim_x, )) id_bw.long_name = 'Grey scale' id_bw[:, :] = 1 - (nmp.flipud(xpic[:, :]) + 1) / idiv f_out.About = 'Image ' + cf_im + ' converted to netcdf.' f_out.Author = 'Generated with `imageBW_to_NetCDF.py` of `climporn` (https://github.com/brodeau/climporn)' f_out.close() print(cf_nc + ' created!!!\n')
id_t[jrec2write] = float(jy) id_zw[:] = zgdepw[:] id_zt[:] = zgdept[:] id_v01[jrec2write,:] = rmean_sss0_deep_jfm[:] id_v02[jrec2write,:] = rmean_sss0_deep_m03[:] id_v03[jrec2write,:] = nbp_deeper_zcrit[:] id_v04[jrec2write,:] = vprof_sig0_ann[:] id_v05[jrec2write,:] = vprof_sig0_jfm[:] id_v06[jrec2write,:] = vprof_sig0_m03[:] f_out.box_coordinates = cbox+' => '+str(i1)+','+str(j1)+' -> '+str(i2-1)+','+str(j2-1) f_out.box_file = FILE_DEF_BOXES f_out.Author = 'L. Brodeau ('+cname_script+' of Barakuda)' else: vt = f_out.variables['time'] jrec2write = len(vt) v01 = f_out.variables['SSsig0_jfm'] v02 = f_out.variables['SSsig0_m03'] v03 = f_out.variables['Nbp_w_deep'] v04 = f_out.variables['sig0_ann'] v05 = f_out.variables['sig0_jfm'] v06 = f_out.variables['sig0_m03'] vt [jrec2write] = float(jy) v01[jrec2write,:] = rmean_sss0_deep_jfm[:] v02[jrec2write,:] = rmean_sss0_deep_m03[:] v03[jrec2write,:] = nbp_deeper_zcrit[:]
def writeNC(logfile, ncname, site): #get today's date: dtod = dt.datetime.today() #read .log-file into dictionnary: data_nc = readASCII(logfile, site) #data not quality filtered data = data_with_nans(data_nc) # replace bad data with nans. #get number of lines in file ie length of data columns filelen = len(data['unixtime']) #open .nc outfile. ncout = Dataset(ncname, 'w', format='NETCDF4') # define dimensions: time = ncout.createDimension( 'time', filelen) #filelen, set='none' if unlimited dimension vclasses = ncout.createDimension('vclasses', 32) #sorting into velocity classes = bins dclasses = ncout.createDimension('dclasses', 32) #sorting into diameter classes = bins #define global attributes: ncout.Title = "Parsivel disdrometer data" ncout.Institution = 'University of Cologne (IGMK)' ncout.Contact_person = 'Bernhard Pospichal ([email protected])' ncout.Source = 'OTT Parsivel 2.10.1: 70.210.001.3.0, serial number: %s' % ( data['serial_no'][0]) ncout.History = 'Data processed with parsivel_log_nc_convert_samdconform.py' ncout.Dependencies = 'external' ncout.Conventions = "CF-1.6 where applicable" ncout.Processing_date = dt.datetime.today().strftime('%Y-%m-%d,%H:%m:%S') ncout.Author = 'Sabrina Schnitt, [email protected]' ncout.Comments = 'none' ncout.Licence = 'For non-commercial use only. This data is subject to the SAMD data policy to be found at www.icdc.cen.uni-hamburg.de/projekte/samd.html and in the SAMD Observation Data Product standard.' #ncout.Measurement_site = 'JOYCE Juelich Observatory for Cloud Evolution' #read variables: time = ncout.createVariable('time', 'i', ('time', )) #time in double-precision... time.units = 'seconds since 1970-01-01 00:00:00 UTC' time.long_name = 'time' time.fill_value = -9999 time[:] = data['unixtime'] rr_si = data['rr'] * 0.001 / 3600. #convert from mm/h in m/s rain_rate = ncout.createVariable('rr', 'f', ('time', )) rain_rate.units = 'm s-1' rain_rate.long_name = 'rainfall_rate' rain_rate.fill_value = np.nan rain_rate[:] = rr_si rain_accum = ncout.createVariable('precipitation_amount', 'f', ('time', )) rain_accum.units = 'kg m-2' rain_accum[:] = data['r_accum'] - data['r_accum'][0] rain_accum.long_name = 'precipitation amount' rain_accum.fill_value = np.nan rain_accum.comment = 'accumulated precipitation amount (32 bit) since start of day' wawa = ncout.createVariable('wawa', 'f', ('time', )) wawa.long_name = 'weather code according to WMO SYNOP 4680' wawa.units = '1' wawa.fill_value = np.nan wawa.comment = 'WMO Code Table 4680: 00: No Precip., 51-53: Drizzle, 57-58: Drizzle and Rain, 61-63: Rain, 67-68: Rain and Snow, 71-73: Snow, 77: Snow Grains, 87-88: Graupel, 89: Hail; Increasing Intensity in one category indicated by increasing numbers' #wawa.missing_value = np.Nan wawa[:] = data['wawa'] zeff = ncout.createVariable('Ze', 'f', ('time', )) zeff.fill_value = np.nan zeff.long_name = 'equivalent_reflectivity_factor; identical to the 6th moment of the drop size distribution' zeff.units = 'dBZ' zeff[:] = data['z'] vis = ncout.createVariable('vis', 'f', ('time', )) vis.fill_value = np.nan vis.long_name = 'visibility_in_air' vis.units = 'm' vis[:] = data['vis'] ''' interval = ncout.createVariable('sample_interval','f',('time',)) interval.long_name = 'time interval for each sample' interval.units = 's' interval[:] = data['interval'] ''' ''' ampli = ncout.createVariable('signal_amplitude','f',('time',)) ampli.units = '' ampli[:] = data['amp'] ''' n_part = ncout.createVariable('n_particles', 'f', ('time', )) n_part.fill_value = np.nan n_part.units = '1' n_part.long_name = 'number of detected particles' n_part[:] = data['nmb'] ''' temp_sens = ncout.createVariable('T_sensor','f',('time',)) temp_sens.fill_value = np.nan temp_sens.long_name = 'temperature_of_sensor' temp_sens.units = 'K' temp_sens[:] = data['T_sensor']+273.15 ''' ''' serial_no = ncout.createVariable('serial_no','S6',('stri',)) serial_no[:] = data['serial_no'] ''' ''' version = ncout.createVariable('version','S5',('stri',)) version.description = 'IOP firmware version' version[:] = data['version'] ''' ''' curr_heating = ncout.createVariable('I_heating','f',('time',)) curr_heating.fill_value = np.nan curr_heating.units = 'A' curr_heating.long_name = 'Current of heating system' curr_heating[:] = data['curr_heating'] volt_sensor = ncout.createVariable('volt_sensor','f',('time',)) volt_sensor.fill_value = np.nan volt_sensor.units = 'V' volt_sensor.long_name = 'Power supply voltage of the sensor' volt_sensor[:] = data['volt_sensor'] ''' status_sensor = ncout.createVariable('status_sensor', 'i', ('time', )) status_sensor.fill_value = -9999 status_sensor.units = '1' status_sensor.long_name = 'Status of the Sensor' status_sensor.comments = '0: everything OK, 1: Laser protective glass is dirty, but measurements are still possible, 2: Laser protective glass is dirty, partially covered. No further usable measurements are possible.' status_sensor[:] = data['status_sensor'] ''' station_name = ncout.createVariable('station_name','S5',('stri',)) station_name[:] = data['station_name'] ''' ''' rain_am = ncout.createVariable('precipitation_amount2','f',('time',)) rain_am.units = 'mm' rain_am.fill_value = np.nan rain_am.long_name = 'absolute precipitation_amount' rain_am[:] = data['r_amount'] ''' ''' error_code = ncout.createVariable('error_code','S3',('stri',)) error_code[:] = data['error_code'] ''' d = ncout.createVariable('dmean', 'f', ('dclasses', 'time')) d.fill_value = np.nan d.units = 'log10(m-3 mm-1)' #d.long_name = 'mean volume equivalent diameter per class' d.long_name = 'number of particles per diameter class' d[:, :] = data['n'] v = ncout.createVariable('vmean', 'f', ('vclasses', 'time')) v.fill_value = np.nan v.units = 'm s-1' v.long_name = 'mean falling velocity per diameter class' v[:, :] = data['v'] vclass = ncout.createVariable('vclasses', 'f', ('vclasses')) vclass.units = 'm s-1' vclass.long_name = 'velocity class center' vclass[:] = np.asarray([ 0.05, 0.15, 0.25, 0.35, 0.45, 0.55, 0.65, 0.75, 0.85, 0.95, 1.1, 1.3, 1.5, 1.7, 1.9, 2.2, 2.6, 3., 3.4, 3.8, 4.4, 5.2, 6., 6.8, 7.6, 8.8, 10.4, 12., 13.6, 15.2, 17.6, 20.8 ]) vclassw = ncout.createVariable('vwidth', 'f', ('vclasses')) vclassw.units = 'm s-1' vclassw.long_name = 'velocity class width' vclassw[:] = np.asarray([ 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.2, 0.2, 0.2, 0.2, 0.2, 0.4, 0.4, 0.4, 0.4, 0.4, 0.8, 0.8, 0.8, 0.8, 0.8, 1.6, 1.6, 1.6, 1.6, 1.6, 3.2, 3.2 ]) dclass = ncout.createVariable('dclasses', 'f', ('dclasses')) dclass.units = 'mm' dclass.long_name = 'volume equivalent diameter class center' dclass[:] = np.asarray([ 0.062, 0.187, 0.312, 0.437, 0.562, 0.687, 0.812, 0.937, 1.062, 1.187, 1.375, 1.625, 1.875, 2.125, 2.375, 2.750, 3.250, 3.75, 4.25, 4.75, 5.5, 6.5, 7.5, 8.5, 9.5, 11., 13., 15., 17., 19., 21.5, 24.5 ]) dclassw = ncout.createVariable('dwidth', 'f', ('dclasses')) dclassw.units = 'mm' dclassw.long_name = 'volume equivalent diameter class width' dclassw[:] = np.asarray([ 0.125, 0.125, 0.125, 0.125, 0.125, 0.125, 0.125, 0.125, 0.125, 0.125, 0.250, 0.250, 0.250, 0.250, 0.250, 0.5, 0.5, 0.5, 0.5, 0.5, 1., 1., 1., 1., 1., 2., 2., 2., 2., 2., 3., 3. ]) M = ncout.createVariable('M', 'f', ('dclasses', 'vclasses', 'time')) M.fill_value = np.nan M.units = '1' M.long_name = 'number of particles per volume equivalent diameter class and fall velocity class' M[:, :, :] = data['M'] #additional needed variables (by hdcp standards) lat = ncout.createVariable('lat', 'f') lat.standard_name = 'latitude' lat.comments = 'Latitude of instrument location' lat.units = 'degrees_north' lat[:] = 50.908547 lon = ncout.createVariable('lon', 'f') lon.standard_name = 'longitude' lon.comments = 'Longitude of instrument location' lon.units = 'degrees_east' lon[:] = 6.413536 zsl = ncout.createVariable('zsl', 'f') zsl.standard_name = 'altitude' zsl.comments = 'Altitude of instrument above mean sea level' zsl.units = 'm' zsl[:] = 111. #close .nc-file: ncout.close() return
import matplotlib.pyplot as plt import numpy as np from datetime import datetime from netCDF4 import Dataset from shutil import copyfile N = 15 Nr = 4 nc = Dataset('Rivers.nc', 'w', format='NETCDF3_64BIT') nc.Description = 'Discharges of major rivers' nc.Author = 'Evgeny Ivanov' nc.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") nc.title = 'Discharges of major rivers' nc.createDimension('river_time', 365 * 10) ### ####nc.createDimension('river_time', 365) nc.createDimension('xi_rho', 82) nc.createDimension('xi_u', 81) nc.createDimension('xi_v', 82) nc.createDimension('eta_rho', 112) nc.createDimension('eta_u', 112) nc.createDimension('eta_v', 111) nc.createDimension('s_rho', N) nc.createDimension('river', Nr) YY = [] MM = [] DD = [] QQ = [] for line in open('Maas_Dordrecht.txt', 'r').readlines():
id_v03 = f_out.createVariable("S", "f4", ("time", "deptht")) id_v03.unit = "PSU" id_v03.long_name = "salinity on box " + cbox id_z[:] = Vdepth[:] for jm in range(Nt): id_t[jrec2write + jm] = Vtime[jm] id_v01[jrec2write + jm, :] = Zm1[jm, :] id_v02[jrec2write + jm, :] = Tm1[jm, :] id_v03[jrec2write + jm, :] = Sm1[jm, :] f_out.box_coordinates = cbox + " => " + str(i1) + "," + str(j1) + " -> " + str(i2 - 1) + "," + str(j2 - 1) f_out.box_file = FILE_DEF_BOXES f_out.Author = "L. Brodeau (" + cname_script + " of Barakuda)" else: vt = f_out.variables["time"] jrec2write = len(vt) v01 = f_out.variables["sigma0"] v02 = f_out.variables["theta"] v03 = f_out.variables["S"] for jm in range(Nt): vt[jrec2write + jm] = Vtime[jm] v01[jrec2write + jm, :] = Zm1[jm, :] v02[jrec2write + jm, :] = Tm1[jm, :] v03[jrec2write + jm, :] = Sm1[jm, :] f_out.close()
id_atl = f_out.createVariable('tmaskatl' ,'f4',('y','x',)) ; id_atl.long_name = 'Atlantic Basin' id_pac = f_out.createVariable('tmaskpac' ,'f4',('y','x',)) ; id_pac.long_name = 'Pacific Basin' id_ind = f_out.createVariable('tmaskind' ,'f4',('y','x',)) ; id_ind.long_name = 'Indian Basin' id_soc = f_out.createVariable('tmasksoc' ,'f4',('y','x',)) ; id_soc.long_name = 'Southern Basin' id_inp = f_out.createVariable('tmaskinp' ,'f4',('y','x',)) ; id_inp.long_name = 'Indo-Pacific Basin' # Filling variables: id_lat[:,:] = xlat[:,:] id_lon[:,:] = xlon[:,:] id_atl[:,:] = mask_atl[:,:] id_pac[:,:] = mask_pac[:,:] id_ind[:,:] = mask_ind[:,:] id_soc[:,:] = mask_soc[:,:] id_inp[:,:] = mask_inp[:,:] f_out.About = 'ORCA1 main oceans basin land-sea mask created from '+cf_mm f_out.Author = 'L. Brodeau (lb_nemo_create_basin_mask.py of PYLB)' f_out.close() print cf_out+' sucessfully created!'
)) id_ind.long_name = 'Indian Basin' id_soc = f_out.createVariable('tmasksoc', 'f4', ( 'y', 'x', )) id_soc.long_name = 'Southern Basin' id_inp = f_out.createVariable('tmaskinp', 'f4', ( 'y', 'x', )) id_inp.long_name = 'Indo-Pacific Basin' # Filling variables: id_lat[:, :] = xlat[:, :] id_lon[:, :] = xlon[:, :] id_atl[:, :] = mask_atl[:, :] id_pac[:, :] = mask_pac[:, :] id_ind[:, :] = mask_ind[:, :] id_soc[:, :] = mask_soc[:, :] id_inp[:, :] = mask_inp[:, :] f_out.About = 'ORCA1 main oceans basin land-sea mask created from ' + cf_mm f_out.Author = 'L. Brodeau (lb_nemo_create_basin_mask.py of PYLB)' f_out.close() print cf_out + ' sucessfully created!'
id_lon = f_out.createVariable('nav_lon','f4',('y','x',)) id_lat = f_out.createVariable('nav_lat','f4',('y','x',)) id_atl = f_out.createVariable('tmaskatl' ,'f4',('y','x',)) ; id_atl.long_name = 'Atlantic Basin' id_pac = f_out.createVariable('tmaskpac' ,'f4',('y','x',)) ; id_pac.long_name = 'Pacific Basin' id_ind = f_out.createVariable('tmaskind' ,'f4',('y','x',)) ; id_ind.long_name = 'Indian Basin' id_soc = f_out.createVariable('tmasksoc' ,'f4',('y','x',)) ; id_soc.long_name = 'Southern Basin' id_inp = f_out.createVariable('tmaskinp' ,'f4',('y','x',)) ; id_inp.long_name = 'Indo-Pacific Basin' # Filling variables: id_lat[:,:] = xlat[:,:] id_lon[:,:] = xlon[:,:] id_atl[:,:] = mask_atl[:,:] id_pac[:,:] = mask_pac[:,:] id_ind[:,:] = mask_ind[:,:] id_soc[:,:] = mask_soc[:,:] id_inp[:,:] = mask_inp[:,:] f_out.About = 'ORCA1 main oceanic basin land-sea mask created from '+cf_mm f_out.Author = ' Generated with "orca025_create_basin_mask_from_meshmask.py" of BaraKuda (https://github.com/brodeau/barakuda)' f_out.close() print cf_out+' sucessfully created!'