def test_modis(): modis_file = "MYD06_L2.A2010100.0755.051.2010108054555.hdf" print "****** Reading MODIS data from file: ", modis_file modis = FEMODIS.front_end_modis_cloud_1km_dev(modis_file) tim=modis.get_time() lat=modis.get_latitude() lon=modis.get_longitude() dat=modis.get_data() print dat.keys() cwp=dat['Cloud_Water_Path'] # print lat, lon, lwp ncfile = Dataset('modis_1km.nc','w') ndim = len(lat) ncfile.createDimension('time',ndim) time = ncfile.createVariable('time',dtype('float32').char,('time', )) lats = ncfile.createVariable('latitude',dtype('float32').char,('time', )) lons = ncfile.createVariable('longitude',dtype('float32').char,('time', )) cwps = ncfile.createVariable('cloud_water_path',dtype('float32').char,('time', )) time[:] = N.cast['float32'](tim) lats[:] = N.cast['float32'](lat) lons[:] = N.cast['float32'](lon) cwps[:] = N.cast['float32'](cwp[1]) ncfile.close()
def OPENPIV2D2C(filename, ux_out, uy_out, x_out, y_out, flag1, flag2, flag3): """Storage in NetCDF format: 2D2C PIV datas with 3 flags used in OPENPIV""" # open a new netCDF file for writing. ncfile = Dataset(filename, 'w') # create the x and y dimensions. nx, ny = ux_out.shape ncfile.createDimension('x', nx) ncfile.createDimension('y', ny) # create the variable (4 byte integer in this case) # first argument is name of variable, second is datatype, third is # a tuple with the names of dimensions. #data = ncfile.createVariable('data',np.dtype('int32').char,('x','y')) xvar = ncfile.createVariable('xvar', 'd', ('x', 'y')) yvar = ncfile.createVariable('yvar', 'd', ('x', 'y')) ux = ncfile.createVariable('ux', 'd', ('x', 'y')) uy = ncfile.createVariable('uy', 'd', ('x', 'y')) Flags1 = ncfile.createVariable('flag1', 'd', ('x', 'y')) Flags2 = ncfile.createVariable('flag2', 'd', ('x', 'y')) Flags3 = ncfile.createVariable('flag3', 'd', ('x', 'y')) # write data to variable. xvar[:] = x_out yvar[:] = y_out ux[:] = ux_out uy[:] = uy_out Flags1[:] = flag1 Flags2[:] = flag2 Flags3[:] = flag3 # close the file. ncfile.close() print '*** SUCCESS writing:', filename
def gen(self): ncfile = Dataset(self.fname, 'w') tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) # set dimension info ncfile.createDimension('grid_size', self.obj.grid_size) # set variable info grid_center_lat_var = ncfile.createVariable('grid_center_lat', dtype('d').char, ('grid_size', )) grid_center_lon_var = ncfile.createVariable('grid_center_lon', dtype('d').char, ('grid_size', )) physical_variable = ncfile.createVariable('physical_variable', dtype('d').char, ('grid_size', )) grid_center_lat_var[:] = np.array(self.obj.grid_center_lat) grid_center_lon_var[:] = np.array(self.obj.grid_center_lon) physical_variable[:] = np.array(self.obj.physical_variable) setattr(ncfile, 'title', 'Threp ' + self.fname) setattr(ncfile, 'createdate', tm) setattr(ncfile, 'map_method', self.method) setattr(ncfile, 'conventions', 'Threp') setattr(ncfile, 'src_grid', self.obj.src_grid_name) setattr(ncfile, 'dst_grid', self.obj.dst_grid_name) ncfile.close() print '*** Successfully generated netcdf file for ncl usage. ***'
def modify_filter(gridfilename, ttlname, indflag=1): tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) ncfile = Dataset(gridfilename, 'a') if indflag: grid_center_lat_var = ncfile.variables['grid_center_lat'] setattr(grid_center_lat_var, 'units', 'degrees') grid_center_lon_var = ncfile.variables['grid_center_lon'] setattr(grid_center_lon_var, 'units', 'degrees') grid_corner_lat_var = ncfile.variables['grid_corner_lat'] setattr(grid_corner_lat_var, 'units', 'degrees') grid_corner_lon_var = ncfile.variables['grid_corner_lon'] setattr(grid_corner_lon_var, 'units', 'degrees') setattr(ncfile, 'title', ttlname) setattr(ncfile, 'modifydate', tm) if hasattr(ncfile, 'grid_name'): delattr(ncfile, 'grid_name') if hasattr(ncfile, 'map_method'): delattr(ncfile, 'map_method') ncfile.sync() ncfile.close()
def loadsrcoords(self): self.ncfile = Dataset(self.srcfile, 'r') variable_name = 'grid_center_lat' __grid_center_lat = self.ncfile.variables[variable_name][:] variable_name = 'grid_center_lon' __grid_center_lon = self.ncfile.variables[variable_name][:] self.__grid_center_lat = __grid_center_lat.tolist() self.__grid_center_lon = __grid_center_lon.tolist()
def load_rmpwfile(fname): ncfile = Dataset(fname, 'r') src_coord_lat = ncfile.variables['src_grid_center_lat'][:].tolist() src_coord_lon = ncfile.variables['src_grid_center_lon'][:].tolist() dst_coord_lat = ncfile.variables['dst_grid_center_lat'][:].tolist() dst_coord_lon = ncfile.variables['dst_grid_center_lon'][:].tolist() remap_src_indx = ncfile.variables['remap_src_indx'][:].tolist() remap_dst_indx = ncfile.variables['remap_dst_indx'][:].tolist() remap_matrix_compact = ncfile.variables['remap_matrix'][:].tolist() ncfile.close() return src_coord_lat, src_coord_lon, dst_coord_lat, dst_coord_lon, remap_src_indx, remap_dst_indx, remap_matrix_compact
def transfercoord(self): self.resncfile = Dataset(self.newfile, 'w') tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) # set dimension info self.resncfile.createDimension('grid_size', self.grid_size) self.resncfile.createDimension('grid_rank', self.grid_rank) self.resncfile.createDimension('grid_corners', self.grid_corners) # set variable info grid_dims_var = self.resncfile.createVariable('grid_dims', dtype('int32').char, ('grid_rank', )) grid_center_lat_var = self.resncfile.createVariable( 'grid_center_lat', dtype('d').char, ('grid_size', )) grid_center_lat_var.units = 'degrees' grid_center_lon_var = self.resncfile.createVariable( 'grid_center_lon', dtype('d').char, ('grid_size', )) grid_center_lon_var.units = 'degrees' grid_imask_var = self.resncfile.createVariable('grid_imask', dtype('i').char, ('grid_size', )) grid_imask_var.units = 'unitless' grid_dims_var[:] = self.grid_dims grid_center_lat_var[:] = np.array(self.__grid_center_lat) grid_center_lon_var[:] = np.array(self.__grid_center_lon) buffer1 = [np.int32(i) for i in self.grid_imask] grid_imask_var[:] = np.array(buffer1) setattr(self.resncfile, 'title', 'Threp ' + self.newfile) setattr(self.resncfile, 'createdate', tm) setattr(self.resncfile, 'conventions', 'Threp') setattr(self.resncfile, 'grid', self.newfile)
def write_output(self): local_dimension_directory={} if self.format == 'netCDF': # open a new netCDF file for writing. ncfile = Dataset(self.file,'w') ndim = len(self.target_time) #--print 'ndim: ', ndim ncfile.createDimension('time', ndim) # create variables # first argument is name of variable, second is datatype, third is # a tuple with the names of dimensions. time = ncfile.createVariable('time',dtype('float64').char,('time', )) lats = ncfile.createVariable('latitude',dtype('float64').char,('time', )) lons = ncfile.createVariable('longitude',dtype('float64').char,('time', )) time.units = 'second (since midnight of 1/1/1970)' lats.units = 'degree' lons.units = 'degree' # create variables for levels # first argument is name of variable, second is datatype, third is # a tuple with the names of dimensions. lkeys = self.target_levels.keys() #--print 'lkeys: ', lkeys if len(lkeys) > 0: self.lvars = [0]*len(lkeys) kk = 0 for k in lkeys: #--print 'k: ', k kname = k.replace(' ', '_') #--print 'kk: ', kk, ', kname: ', kname atuple = self.target_levels[k] attribute = atuple[0] #--print 'attribute: ', attribute local_level = atuple[1] ### print 'local_level: ', local_level #--print 'local_level.shape: ', local_level.shape lc = 'lc-' + str(kk) #--print 'lc: ', lc if attribute.has_key('dimension1'): lc = attribute['dimension1'] if (local_dimension_directory.has_key(lc) == False): ncfile.createDimension(lc, len(local_level)) local_dimension_directory[lc] = len(local_level) else: ncfile.createDimension(lc, len(local_level)) self.lvars[kk] = ncfile.createVariable(kname, dtype('float64').char, (lc, )) if attribute.has_key('units'): self.lvars[kk].units = attribute['units'] #else: # self.lvars[kk].units = '' if attribute.has_key('long_name'): self.lvars[kk].long_name = attribute['long_name'] kk += 1 # end of for k loop # write data to variables for levels for kk in range(len(lkeys)): ### print 'kk: ', kk ### print 'self.target_levels[lkeys[kk]][1].shape: ', self.target_levels[lkeys[kk]][1].shape ### print 'len(self.target_levels[lkeys[kk]][1]): ', len(self.target_levels[lkeys[kk]][1]) self.target_levels[lkeys[kk]][1].shape = (len(self.target_levels[lkeys[kk]][1]), ) self.lvars[kk][:] = self.target_levels[lkeys[kk]][1] # end of for kk loop # create variables for data # first argument is name of variable, second is datatype, third is # a tuple with the names of dimensions. keys = self.target_data.keys() #--print 'keys: ', keys self.vars = [0]*len(keys) kk = 0 for k in keys: #--print 'k: ', k kname = k.replace(' ', '_') #--print 'kk: ', kk, ', kname: ', kname #--print 'attribute keys: ', self.target_data[k][0].keys() s = self.target_data[k][1].shape d2 = len(s) cc = 'cc-' + str(kk) #--print 'cc: ', cc if d2 == 1: #--print '--- 1D data' self.vars[kk] = ncfile.createVariable(kname, dtype('float64').char,('time', )) elif d2 == 2: #--print '--- 2D data' if self.target_data[k][0].has_key('dimension1'): local_dimension = self.target_data[k][0]['dimension1'] cc = local_dimension if (local_dimension_directory.has_key(local_dimension) == False): #print 'local dimension =', local_dimension ncfile.createDimension(local_dimension, s[1]) local_dimension_directory[local_dimension] = s[1] else: ncfile.createDimension(cc, s[1]) self.vars[kk] = ncfile.createVariable(kname, dtype('float64').char,('time', cc)) elif d2 == 3: #--print '--- 3D data' cc1 = cc+'1' cc2 = cc+'2' ncfile.createDimension(cc1, s[1]) ncfile.createDimension(cc2, s[2]) self.vars[kk] = ncfile.createVariable(kname, dtype('float64').char,('time', cc1, cc2)) elif d2 == 4: #--print '--- 4D data' cc1 = cc+'1' cc2 = cc+'2' cc3 = cc+'3' ncfile.createDimension(cc1, s[1]) ncfile.createDimension(cc2, s[2]) ncfile.createDimension(cc3, s[3]) self.vars[kk] = ncfile.createVariable(kname, dtype('float64').char,('time', cc1, cc2, cc3)) if self.target_data[k][0].has_key('units'): self.vars[kk].units = self.target_data[k][0]['units'] #else: # self.vars[kk].units = '' if self.target_data[k][0].has_key('long_name'): self.vars[kk].long_name = self.target_data[k][0]['long_name'] #else: # self.vars[kk].long_name = '' # add missing_value in the variable attribute self.vars[kk].missing_value = UT.NAN # add invalid_data in the variable attribute from collocation self.vars[kk].collocation_invalid_value = self.invalid_data kk += 1 # end of for k loop # write data to variables self.target_time.shape = (ndim, ) self.target_lat.shape = (ndim, ) self.target_lon.shape = (ndim, ) time[:] = self.target_time lats[:] = self.target_lat lons[:] = self.target_lon #--print 'in backend: self.target_data[keys[0]][1]: ', self.target_data[keys[0]][1] # write data to variables for data for kk in range(len(keys)): #--print 'kk: ', kk #--print 'self.target_data[keys[kk]][1].shape: ', self.target_data[keys[kk]][1].shape s3 = self.target_data[keys[kk]][1].shape d3 = len(s3) if d3 == 1: self.target_data[keys[kk]][1].shape = (ndim, ) elif d3 == 2: self.target_data[keys[kk]][1].shape = (ndim, s3[1]) elif d3 == 3: self.target_data[keys[kk]][1].shape = (ndim, s3[1], s3[2]) elif d3 == 4: self.target_data[keys[kk]][1].shape = (ndim, s3[1], s3[2], s3[3]) #--print 'self.target_data[keys[kk]][1].shape: ', self.target_data[keys[kk]][1].shape self.vars[kk][:] = self.target_data[keys[kk]][1] # end of for kk loop ncfile.close()
def write(self): ncfile = Dataset(self.fname, 'w') tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) # set dimension info ncfile.createDimension('src_grid_size', self.obj.src_grid_size) ncfile.createDimension('dst_grid_size', self.obj.dst_grid_size) ncfile.createDimension('n_wgt', self.n_wgt) ncfile.createDimension('src_grid_rank', self.obj.src_grid_rank) ncfile.createDimension('dst_grid_rank', self.obj.dst_grid_rank) ncfile.createDimension('num_wgts', 1) ncfile.createDimension('src_grid_corners', self.obj.src_grid_corners) ncfile.createDimension('dst_grid_corners', self.obj.dst_grid_corners) # set variable info src_grid_dims_var = ncfile.createVariable('src_grid_dims', dtype('int32').char, ('src_grid_rank', )) dst_grid_dims_var = ncfile.createVariable('dst_grid_dims', dtype('int32').char, ('dst_grid_rank', )) src_grid_center_lat_var = ncfile.createVariable( 'src_grid_center_lat', dtype('d').char, ('src_grid_size', )) src_grid_center_lon_var = ncfile.createVariable( 'src_grid_center_lon', dtype('d').char, ('src_grid_size', )) dst_grid_center_lat_var = ncfile.createVariable( 'dst_grid_center_lat', dtype('d').char, ('dst_grid_size', )) dst_grid_center_lon_var = ncfile.createVariable( 'dst_grid_center_lon', dtype('d').char, ('dst_grid_size', )) src_grid_imask_var = ncfile.createVariable('src_grid_imask', dtype('i').char, ('src_grid_size', )) dst_grid_imask_var = ncfile.createVariable('dst_grid_imask', dtype('i').char, ('dst_grid_size', )) remap_src_indx_var = ncfile.createVariable('remap_src_indx', dtype('i').char, ('n_wgt', )) remap_dst_indx_var = ncfile.createVariable('remap_dst_indx', dtype('i').char, ('n_wgt', )) remap_matrix_var = ncfile.createVariable('remap_matrix', dtype('d').char, ('n_wgt', )) src_grid_dims_var[:] = self.obj.src_grid_dims dst_grid_dims_var[:] = self.obj.dst_grid_dims src_grid_center_lat_var[:] = np.array( self.obj.original_src_grid_center_lat) src_grid_center_lon_var[:] = np.array( self.obj.original_src_grid_center_lon) dst_grid_center_lat_var[:] = np.array(self.obj.dst_grid_center_lat) dst_grid_center_lon_var[:] = np.array(self.obj.dst_grid_center_lon) #src_grid_imask_var[:] = np.array(self.obj.original_src_grid_imask) buffer1 = [np.int32(i) for i in self.obj.original_src_grid_imask] src_grid_imask_var[:] = np.array(buffer1) buffer2 = [np.int32(i) for i in self.obj.dst_grid_imask] dst_grid_imask_var[:] = np.array(buffer2) #dst_grid_imask_var[:] = np.array(self.obj.dst_grid_imask) buffer3 = [np.int32(i) for i in self.obj.remap_src_indx] remap_src_indx_var[:] = np.array(buffer3) #remap_src_indx_var[:] = np.array(self.obj.remap_src_indx) buffer4 = [np.int32(i) for i in self.obj.remap_dst_indx] remap_dst_indx_var[:] = np.array(buffer4) #remap_dst_indx_var[:] = np.array(self.obj.remap_dst_indx) remap_matrix_var[:] = np.array(self.obj.remap_matrix_compact) setattr(ncfile, 'title', 'Threp ' + self.fname) setattr(ncfile, 'createdate', tm) setattr(ncfile, 'map_method', self.method) setattr(ncfile, 'conventions', 'Threp') setattr(ncfile, 'src_grid', self.obj.src_grid_name) setattr(ncfile, 'dst_grid', self.obj.dst_grid_name) ncfile.close() print '*** Successfully generate remap matrix file. ***'
def __init__(self, file_name): ''' Initialize grid file object. Open a netCDF file for reading.''' self.filename = file_name self.ncfile = Dataset(file_name, 'r')
temperatures. The data file read by this program is produced companion program sfc_pres_temp_wr.py. This example demonstrates the netCDF Python API. It will work either with the Scientific Python NetCDF version 3 interface (http://dirac.cnrs-orleans.fr/ScientificPython/) of the 'classic' version of the netCDF4 interface. (http://netcdf4-python.googlecode.com/svn/trunk/docs/netCDF4_classic-module.html) To switch from one to another, just comment/uncomment the appropriate import statements at the beginning of this file. Jeff Whitaker <*****@*****.**> 20070202 """ nlats = 6; nlons = 12 # open netCDF file for reading ncfile = Dataset('sfc_pres_temp.nc','r') # expected latitudes and longitudes of grid lats_check = -25.0 + 5.0*arange(nlats,dtype='float32') lons_check = -125.0 + 5.0*arange(nlons,dtype='float32') # expected data. press_check = 900. + arange(nlats*nlons,dtype='float32') # 1d array press_check.shape = (nlats,nlons) # reshape to 2d array temp_check = 9. + 0.25*arange(nlats*nlons,dtype='float32') # 1d array temp_check.shape = (nlats,nlons) # reshape to 2d array # get pressure and temperature variables. temp = ncfile.variables['temperature'] press = ncfile.variables['pressure'] # check units attributes. try: assert(temp.units == 'celsius') except:
#Run the script to get the data #execfile('open_L2_C6_MODIS_run.py') from open_L2_C6_MODIS_file_func import * #Jesus' MODIS file for SO #path='/group_workspaces/jasmin/asci/dgrosv/MODIS/Jesus/' path = '/nfs/a201/eejvt/CASIM/SO_KALLI/SATELLITE/modis/' file_hdf = 'MYD06_L2.A2014343.1325.006.2014344210847.hdf' #Get the data MODL2_C6_outputs = open_modis_L2(path, file_hdf) #Will just write out N37 Nd_37 = MODL2_C6_outputs.get('N37') nx = Nd_37.shape[0] ny = Nd_37.shape[1] #write the file ncfile = Dataset(path + 'Nd_' + file_hdf + '.nc', 'w') ncfile.createDimension('x', nx) ncfile.createDimension('y', ny) data = ncfile.createVariable('CDNC_37_MODIS', np.dtype('float64').char, ('x', 'y')) data[:] = Nd_37 ncfile.close() #%%
#! /usr/bin/python # Filename: gen_gx1v1.py # Notice: no corner info be contained. import time import scipy import copy from numpy import dtype from Scientific.IO.NetCDF import NetCDFFile as Dataset ncfile = Dataset('../../grid/CPLDATA/X02.cpl6.ha.0007-07.nc', 'r') nc = Dataset('T42_Gaussian_mask.nc', 'w') tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) nx = 128 ny = 64 grid_size = ncfile.dimensions['n_a'] grid_rank = ncfile.dimensions['d2'] # load data #dims = ncfile.variables['grid_dims'][:] dims = scipy.array([nx, ny]) lat = ncfile.variables['domain_a_lat'][:, :] tmp = [] for i in range(ny): for j in range(nx): tmp.append(lat[i][j]) lat = scipy.array(tmp) lon = ncfile.variables['domain_a_lon'][:, :] tmp = [] for i in range(ny):
def __init__(self, file_name): self.filename = file_name self.ncfile = Dataset(file_name, 'r')
#! /usr/bin/python # Filename: gen_gx1v1.py # Notice: no corner info be contained. import time import scipy import copy from numpy import dtype from Scientific.IO.NetCDF import NetCDFFile as Dataset ncfile = Dataset('../../grid/CPLDATA/X02.cpl6.ha.0007-07.nc', 'r') nc = Dataset('gx1v1.nc', 'w') tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time())) nx = 360; ny = 200 grid_size = ncfile.dimensions['n_o'] grid_rank = ncfile.dimensions['d2'] # load data #dims = ncfile.variables['grid_dims'][:] dims = scipy.array([nx, ny]) lat = ncfile.variables['domain_o_lat'][:, :] tmp = [] for i in range(ny): for j in range(nx): tmp.append(lat[i][j]) lat = scipy.array(tmp) lon = ncfile.variables['domain_o_lon'][:, :] tmp = [] for i in range(ny): for j in range(nx):
def classificaGELO(caminhonc, caminhosaidatsmmsg, caminhosaidaantartica): # >> Gerando a lista -------------------------------------------- dirList = os.listdir(caminhonc) print dirList controleNome = dirList[0] #Buscando variaveis nos elementos da lista posicA = controleNome.find( "CON") #Buscando variaveis nos elementos da lista anoi = posicA + 4 #Buscando variaveis nos elementos da lista anof = posicA + 8 #Buscando variaveis nos elementos da lista mesi = posicA + 8 #Buscando variaveis nos elementos da lista mesf = posicA + 10 #Buscando variaveis nos elementos da lista diai = posicA + 10 #Buscando variaveis nos elementos da lista diaf = posicA + 12 #Buscando variaveis nos elementos da lista horai = posicA + 12 #Buscando variaveis nos elementos da lista horaf = posicA + 14 #Buscando variaveis nos elementos da lista ano = controleNome[anoi:anof] #Buscando variaveis nos elementos da lista mes = controleNome[mesi:mesf] #Buscando variaveis nos elementos da lista dia = controleNome[diai:diaf] #Buscando variaveis nos elementos da lista horaTIT = controleNome[horai: horaf] #Buscando variaveis nos elementos da lista #imLista = len(dirList) #Definindo o tamnho da lista imLista = 01 #Redefinindo para considerar com base de 24 horas print ano, mes, dia, horaTIT print "################### TRATANDO OS DADOS *composição* ###################" # >> Lendo os arquivos e compondo a matriz ---------------------- entradas = '' for hora in range(0, imLista): entradas = caminhonc + nomeMSG + ano + mes + dia + str(horaTIT).zfill( 2) + FMT_nc #Criando caminho print entradas fileCon = Dataset(entradas, 'r') #------------------------------------------------------------------ IC0 = fileCon.variables['icec'][:] IC0 = numpy.copy(IC0) dims = IC0.shape ny = dims[1] nx = dims[2] IC0 = IC0[0, :, :] IC02 = np.zeros((ny, nx)) #print min(IC0) #print max(IC0) #exit() for j in range(0, nx): for i in range(0, ny): if IC0[i, j] > 0. and IC0[i, j] <= 9.9: IC02[i, j] = 3 elif IC0[i, j] > 9.9 and IC0[i, j] <= 39.9: IC02[i, j] = 2 elif IC0[i, j] > 39.9 and IC0[i, j] <= 100.0: IC02[i, j] = 1 elif IC0[i, j] == 0.: IC02[i, j] = 3 else: IC02[i, j] = 0 #fileCon.close IC02 = IC02 #my_cmap = colores.ListedColormap([(.3, .3, .3), (1., 0., 0.), (1., 0.4901960784, 0.0274509804), (1., 1., 0), (0.5490196078, 1., 0.6274509804), (0.5882352941, 0.7843137255, 1.), (0., 0.3921568627, 1.)]) my_cmap = colors.ListedColormap(['gray', 'red', 'yellow', 'green']) filename2 = '/home/geonetcast/gelo/d-eumetcast/LonLatGelo_OSI-SAF.nc' print filename2 file1 = Dataset(filename2, 'r') lon0 = file1.variables['longitude'][:] lat0 = file1.variables['latitude'][:] fig = plt.figure(figsize=(9.5, 9)) #print "to aqui" Dmap = Basemap( projection='spstere', lat_0=-90, lon_0=0., boundinglat=-55, lat_ts=-70, resolution='h', rsphere=(6378273., 6356889.44891)) #modificar a projeção entre os polos #Dmap.fillcontinents(color='gray') cor = 'black' x0, y0 = Dmap(lon0, lat0) cmap = cm.Blues IC0 = np.flipud(IC0[:, :]) col = Dmap.pcolor(x0, y0, IC0, shading='interp', vmin=0, vmax=3, cmap=my_cmap) # define parallels and meridians to draw. parallels = np.arange(-90., -55, 10.) meridians = np.arange(0., 360., 20.) Dmap.drawcoastlines(linewidth=.5, color=cor) # Linha de costa Dmap.drawcountries(linewidth=0.8, color='k', antialiased=1, ax=None, zorder=None) Dmap.drawstates(linewidth=0.5, color='k', antialiased=1, ax=None, zorder=None) Dmap.drawparallels(parallels, labels=[1, 0, 0, 0], color=cor, dashes=[1, 0], linewidth=0.2) Dmap.drawmeridians(meridians, labels=[0, 0, 0, 1], color=cor, dashes=[1, 0], linewidth=0.2) #Paleta de Cor #bounds=[421.5,422.5,423.5,424.5,425.5,426.5,427.5] bounds = [0.5, 1.5, 2.5, 3.5] norm = colores.BoundaryNorm(bounds, my_cmap.N) #l_label = '0-Mascara/1-Sem Gelo/2-Gelo Aberto/3-Gelo Fechado' #cbar = fig.colorbar(col, cmap=cm.Blues, norm=norm, boundaries=bounds, ticks=[422,423,424,425,426,427], orientation='horizontal', shrink=0.8,pad=0.045) #cbar.set_ticklabels(['9-10 Thents', '7-8 Thents', '4-6 Thents', '1-3 Thents', '< 1 Thents', 'Ice Free']) cbar = fig.colorbar(col, cmap=cmap, norm=norm, boundaries=bounds, ticks=[0, 1, 2, 3], orientation='horizontal', shrink=0.8, pad=0.045) cbar.set_ticklabels(['Gelo Fechado', 'Gelo Aberto', 'Sem Gelo']) #------------------------------------------------------------------ #Salvando e apresentado a imagem. d1 = datetime.date(int(ano), int(mes), int(dia)) #DIAS DA SEMANA diasem = d1.strftime("%A") if diasem == 'Monday': diasem = 'SEG' elif diasem == 'Tuesday': diasem = 'TER' elif diasem == 'Wednesday': diasem = 'QUA' elif diasem == 'Thursday': diasem = 'QUI' elif diasem == 'Friday': diasem = 'SEX' elif diasem == 'Saturday': diasem = 'SAB' elif diasem == 'Sunday': diasem = 'DOM' #MES EM NOME nomemes = d1.strftime("%B") if nomemes == 'January': nomemes = 'JAN' elif nomemes == 'February': nomemes = 'FEV' elif nomemes == 'March': nomemes = 'MAR' elif nomemes == 'April': nomemes = 'ABR' elif nomemes == 'May': nomemes = 'MAI' elif nomemes == 'June': nomemes = 'JUN' elif nomemes == 'July': nomemes = 'JUL' elif nomemes == 'August': nomemes = 'AGO' elif nomemes == 'September': nomemes = 'SET' elif nomemes == 'October': nomemes = 'OUT' elif nomemes == 'November': nomemes = 'NOV' elif nomemes == 'December': nomemes = 'DEZ' title('CHM-REMO Limite de Gelo Marinho ' + str(dia).zfill(2) + nomemes + ano + '(' + diasem + ') - EUMETCAST/O&SI-SAF', fontsize=8., fontweight='bold') dirsaida = caminhosaidaantartica FMT_png = ".png" nome1 = "LIM_GEL_" fname = dirsaida + nome1 + str(ano) + str(mes).zfill(2) + str(dia).zfill( 2) + 'analise' + FMT_png #savefig(fname, bbox_inches='tight') savefig(fname, dpi=700, bbox_inches='tight')
def write_output(self, afl): print afl local_dimension_directory={} if self.format == 'netCDF': # open a new netCDF file for writing. ncfile = Dataset(self.file,'w') ndim = len(self.target_time) #--print 'ndim: ', ndim ncfile.createDimension('time', ndim) # create variables # first argument is name of variable, second is datatype, third is # a tuple with the names of dimensions. time = ncfile.createVariable('time',dtype('float64').char,('time', )) lats = ncfile.createVariable('latitude',dtype('float32').char,('time', )) lons = ncfile.createVariable('longitude',dtype('float32').char,('time', )) time.units = 'second (since midnight of 1/1/1970)' lats.units = 'degree' lons.units = 'degree' # write time, lat, lon to variables self.target_time.shape = (ndim, ) self.target_lat.shape = (ndim, ) self.target_lon.shape = (ndim, ) time[:] = N.cast['float64'](self.target_time) lats[:] = N.cast['float32'](self.target_lat) lons[:] = N.cast['float32'](self.target_lon) # create variables for levels # first argument is name of variable, second is datatype, third is # a tuple with the names of dimensions. lkeys = self.target_levels.keys() print 'lkeys: ', lkeys if len(lkeys) > 0: self.lvars = [0]*len(lkeys) kk = 0 for k in lkeys: print 'k: ', k kname = k.replace(' ', '_') #---print 'kk: ', kk, ', kname: ', kname atuple = self.target_levels[k] attribute = atuple[0] #---print 'attribute: ', attribute local_level = atuple[1] #--print 'local_level: ', local_level #--print 'local_level.shape: ', local_level.shape lc = 'lc-' + str(kk) print 'lc: ', lc if attribute.has_key('dimension1'): lc = attribute['dimension1'] if (local_dimension_directory.has_key(lc) == False): ncfile.createDimension(lc, len(local_level)) local_dimension_directory[lc] = len(local_level) elif kname!='P0': ncfile.createDimension(lc, len(local_level)) else: # 'P0' ncfile.createDimension(lc, 1) self.lvars[kk] = ncfile.createVariable(kname, dtype('float32').char, (lc, )) if attribute.has_key('units'): self.lvars[kk].units = attribute['units'] #else: # self.lvars[kk].units = '' if attribute.has_key('long_name'): self.lvars[kk].long_name = attribute['long_name'] kk += 1 # end of for k loop # write data to variables for levels for kk in range(len(lkeys)): print 'kk: ', kk if(lkeys[kk]!='P0'): #---print 'self.target_levels[lkeys[kk]][1].shape: ', self.target_levels[lkeys[kk]][1].shape #---print 'len(self.target_levels[lkeys[kk]][1]): ', len(self.target_levels[lkeys[kk]][1]) self.target_levels[lkeys[kk]][1].shape = (len(self.target_levels[lkeys[kk]][1]), ) self.lvars[kk][:] = N.cast['float32'](self.target_levels[lkeys[kk]][1]) else: self.lvars[kk][:] = N.cast['float32']([self.target_levels[lkeys[kk]][1]]) # end of for kk loop # create variables for data # first argument is name of variable, second is datatype, third is # a tuple with the names of dimensions. keys = self.target_data.keys() #--print 'keys: ', keys self.vars = [0]*len(keys) kk = 0 for k in keys: #--print 'k: ', k kname = k.replace(' ', '_') #--print 'k: ', k, ', kname: ', kname # check whether the data type should be inter or float data_type = self.check_data_type(kname) # check whether 2D array can be collapsed to 1D array s = self.target_data[k][1].shape d2 = len(s) if d2 == 2 and s[1] == 1: tmp = N.reshape(self.target_data[k][1], s[0]) attribute = self.target_data[k][0] self.target_data[k] = (attribute, tmp) #--print 'attribute keys: ', self.target_data[k][0].keys() s = self.target_data[k][1].shape d2 = len(s) cc = 'cc-' + str(kk) #--print 'cc: ', cc if d2 == 1: #--print '--- 1D data' self.vars[kk] = ncfile.createVariable(kname, data_type, ('time', )) elif d2 == 2: #--print '--- 2D data' if self.target_data[k][0].has_key('dimension1'): local_dimension = self.target_data[k][0]['dimension1'] cc = local_dimension if (local_dimension_directory.has_key(local_dimension) == False): #print 'local dimension =', local_dimension ncfile.createDimension(local_dimension, s[1]) local_dimension_directory[local_dimension] = s[1] else: ncfile.createDimension(cc, s[1]) self.vars[kk] = ncfile.createVariable(kname, data_type,('time', cc)) elif d2 == 3: #--print '--- 3D data' cc1 = cc+'1' cc2 = cc+'2' ncfile.createDimension(cc1, s[1]) ncfile.createDimension(cc2, s[2]) self.vars[kk] = ncfile.createVariable(kname, data_type,('time', cc1, cc2)) elif d2 == 4: #--print '--- 4D data' cc1 = cc+'1' cc2 = cc+'2' cc3 = cc+'3' ncfile.createDimension(cc1, s[1]) ncfile.createDimension(cc2, s[2]) ncfile.createDimension(cc3, s[3]) self.vars[kk] = ncfile.createVariable(kname, data_type,('time', cc1, cc2, cc3)) if self.target_data[k][0].has_key('units'): self.vars[kk].units = self.target_data[k][0]['units'] if self.target_data[k][0].has_key('long_name'): self.vars[kk].long_name = self.target_data[k][0]['long_name'] if self.target_data[k][0].has_key('_FillValue'): self.vars[kk].FillValue = self.target_data[k][0]['_FillValue'] if self.target_data[k][0].has_key('missing_value'): self.vars[kk].missing_value = self.target_data[k][0]['missing_value'] if self.target_data[k][0].has_key('scale_factor'): self.vars[kk].scale_factor = self.target_data[k][0]['scale_factor'] if self.target_data[k][0].has_key('add_offset'): self.vars[kk].add_offset = self.target_data[k][0]['add_offset'] if self.target_data[k][0].has_key('valid_range'): self.vars[kk].valid_range = self.target_data[k][0]['valid_range'] if self.target_data[k][0].has_key('Parameter_Type'): self.vars[kk].Parameter_Type = self.target_data[k][0]['Parameter_Type'] if self.target_data[k][0].has_key('Cell_Along_Swath_Sampling'): self.vars[kk].Cell_Along_Swath_Sampling = self.target_data[k][0]['Cell_Along_Swath_Sampling'] if self.target_data[k][0].has_key('Cell_Across_Swath_Sampling'): self.vars[kk].Cell_Across_Swath_Sampling = self.target_data[k][0]['Cell_Across_Swath_Sampling'] if self.target_data[k][0].has_key('Geolocation_Pointer'): self.vars[kk].Geolocation_Pointer = self.target_data[k][0]['Geolocation_Pointer'] # add missing_value in the variable attribute if given by the XML input parameter if (afl.missing_value !='None' and self.target_data[k][0].has_key('missing_value')==False and self.target_data[k][0].has_key('_FillValue')==False ): self.vars[kk].missing_value = afl.missing_value # add invalid_data in the variable attribute from collocation self.vars[kk].collocation_invalid_value = self.invalid_data kk += 1 # end of for k loop #--print 'in backend: self.target_data[keys[0]][1]: ', self.target_data[keys[0]][1] # write data to variables for data for kk in range(len(keys)): # check whether the data type should be inter or float data_type = self.check_data_type(keys[kk]) #--print 'kk: ', kk #--print 'self.target_data[keys[kk]][1].shape: ', self.target_data[keys[kk]][1].shape s3 = self.target_data[keys[kk]][1].shape d3 = len(s3) if d3 == 1: self.target_data[keys[kk]][1].shape = (ndim, ) elif d3 == 2: self.target_data[keys[kk]][1].shape = (ndim, s3[1]) elif d3 == 3: self.target_data[keys[kk]][1].shape = (ndim, s3[1], s3[2]) elif d3 == 4: self.target_data[keys[kk]][1].shape = (ndim, s3[1], s3[2], s3[3]) #--print 'self.target_data[keys[kk]][1].shape: ', self.target_data[keys[kk]][1].shape #--print 'self.target_data[keys[kk]][1] data type: ', type(self.target_data[keys[kk]][1]) if data_type=='i': self.vars[kk][:] = N.cast['int32'](self.target_data[keys[kk]][1]) #elif data_type=='f': # self.vars[kk][:] = N.cast['float32'](self.target_data[keys[kk]][1]) else: self.vars[kk][:] = self.target_data[keys[kk]][1] # float32 # end of for kk loop ncfile.close()
#!/usr/bin/env python # Create a 2D netCDF file. from Scientific.IO.NetCDF import NetCDFFile as Dataset #from netCDF4_classic import Dataset from numpy import arange, dtype # array module from http://numpy.scipy.org # the output array to write will be nx x ny nx = 6 ny = 12 # open a new netCDF file for writing. ncfile = Dataset('simple_xy.nc', 'w') # create the output data. data_out = arange(nx * ny) # 1d array data_out.shape = (nx, ny) # reshape to 2d array # create the x and y dimensions. ncfile.createDimension('x', nx) ncfile.createDimension('y', ny) # create the variable (4 byte integer in this case) # first argument is name of variable, second is datatype, third is # a tuple with the names of dimensions. data = ncfile.createVariable('data', dtype('int32').char, ('x', 'y')) # write data to variable. data[:] = data_out # close the file. ncfile.close() print '*** SUCCESS writing example file simple_xy.nc!'
#! /usr/bin/python # Filename: rgen_triplepole.py __author__ = ['Wu Hong<*****@*****.**>'] import time import scipy from numpy import dtype from Scientific.IO.NetCDF import NetCDFFile as Dataset ncfile = Dataset('../../grid/More/Ocean_p50_for_SCRIP.nc', 'r') nc = Dataset('Ocean_p50_triplepole.nc', 'w') tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) nx = 720; ny = 410 grid_size = ncfile.dimensions['grid_size'] grid_rank = ncfile.dimensions['grid_rank'] # load data dims = scipy.array([nx, ny]) lat = ncfile.variables['grid_center_lat'][:] tmp = [] for i in xrange(nx * ny): if lat[i] < 0: lat[i] += 360 tmp.append(lat[i]) lon_adjust = scipy.array(tmp) lon = ncfile.variables['grid_center_lon'][:] tmp = [] for i in xrange(nx * ny):