Exemplo n.º 1
0
def ncep2fall3d(ncep_filename, fall3d_ncep_filename, verbose=True):
    """Convert standard NCEP file to fall3d NCEP format
    """
    
    # Copy standard NCEP file to fall3d NCEP file
    s = 'cp %s %s' % (ncep_filename, fall3d_ncep_filename)
    os.system(s)
    
    # Open files
    infile = NetCDFFile(ncep_filename)
    outfile = NetCDFFile(ncep_filename, 'a')
    
    # Establish special global attributes for fall3 NCEP format     
    
    print 'Found dimensions:', infile.dimensions.keys()    
    print 'Found variables:', infile.variables.keys()
    
    lon = infile.variables['lon'][:]
    lonmin = min(lon)
    lonmax = max(lon)    
    
    lat = infile.variables['lat'][:]
    latmin = min(lat)
    latmax = max(lat)    
    
    nx = infile.dimensions['lon']
    ny = infile.dimensions['lat']        
    np = infile.dimensions['pres']                
    nt = infile.dimensions['time']            
    print nx, ny, np, nt
    

    infile.close()
    outfile.close()
Exemplo n.º 2
0
 def gen(self):
   ncfile = Dataset(self.fname, 'w')
   tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
   
   # set dimension info 
   ncfile.createDimension('grid_size', self.obj.grid_size)
   
   # set variable info
   grid_center_lat_var = ncfile.createVariable('grid_center_lat', dtype('d').char, ('grid_size',))
   grid_center_lon_var = ncfile.createVariable('grid_center_lon', dtype('d').char, ('grid_size',))
   physical_variable = ncfile.createVariable('physical_variable', dtype('d').char, ('grid_size',))
   
   grid_center_lat_var[:] = np.array(self.obj.grid_center_lat)
   grid_center_lon_var[:] = np.array(self.obj.grid_center_lon)
   physical_variable[:] = np.array(self.obj.physical_variable)
    
   setattr(ncfile, 'title', 'Threp ' + self.fname)
   setattr(ncfile, 'createdate', tm)
   setattr(ncfile, 'map_method', self.method)
   setattr(ncfile, 'conventions', 'Threp')
   setattr(ncfile, 'src_grid', self.obj.src_grid_name)
   setattr(ncfile, 'dst_grid', self.obj.dst_grid_name)
   
   ncfile.close() 
   print '*** Successfully generated netcdf file for ncl usage. ***'
Exemplo n.º 3
0
def test_modis():
        
    modis_file      = "MYD06_L2.A2010100.0755.051.2010108054555.hdf"
    print "****** Reading MODIS data from file: ", modis_file
    modis           = FEMODIS.front_end_modis_cloud_1km_dev(modis_file)
    tim=modis.get_time()
    lat=modis.get_latitude()
    lon=modis.get_longitude() 
    dat=modis.get_data()
    print dat.keys()
    cwp=dat['Cloud_Water_Path']
 
    # print lat, lon, lwp
    ncfile = Dataset('modis_1km.nc','w')
    ndim = len(lat)
    ncfile.createDimension('time',ndim)
    time = ncfile.createVariable('time',dtype('float32').char,('time', ))
    lats = ncfile.createVariable('latitude',dtype('float32').char,('time', ))
    lons = ncfile.createVariable('longitude',dtype('float32').char,('time', ))
    cwps = ncfile.createVariable('cloud_water_path',dtype('float32').char,('time', ))
    time[:] = N.cast['float32'](tim)
    lats[:] = N.cast['float32'](lat)
    lons[:] = N.cast['float32'](lon)
    cwps[:] = N.cast['float32'](cwp[1])
    ncfile.close()    
Exemplo n.º 4
0
def OPENPIV2D2C(filename,ux_out,uy_out,x_out,y_out,flag1,flag2,flag3):
    """Storage in NetCDF format: 2D2C PIV datas with 3 flags used in OPENPIV"""
    # open a new netCDF file for writing.
    ncfile = Dataset(filename,'w') 
    # create the x and y dimensions.
    nx,ny=ux_out.shape
    ncfile.createDimension('x',nx)
    ncfile.createDimension('y',ny)
    # create the variable (4 byte integer in this case)
    # first argument is name of variable, second is datatype, third is
    # a tuple with the names of dimensions.
    #data = ncfile.createVariable('data',np.dtype('int32').char,('x','y'))
    xvar = ncfile.createVariable('xvar','d',('x','y'))
    yvar = ncfile.createVariable('yvar','d',('x','y'))
    ux = ncfile.createVariable('ux','d',('x','y'))
    uy = ncfile.createVariable('uy','d',('x','y'))
    Flags1 = ncfile.createVariable('flag1','d',('x','y'))
    Flags2 = ncfile.createVariable('flag2','d',('x','y'))
    Flags3 = ncfile.createVariable('flag3','d',('x','y'))
    # write data to variable.
    xvar[:] = x_out
    yvar[:] = y_out
    ux[:] = ux_out
    uy[:] = uy_out
    Flags1[:] = flag1
    Flags2[:] = flag2
    Flags3[:] = flag3
    # close the file.
    ncfile.close()
    print '*** SUCCESS writing:',filename
Exemplo n.º 5
0
def modify_filter(gridfilename, ttlname, indflag = 1):

  tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))

  ncfile = Dataset(gridfilename, 'a')

  if indflag:
    grid_center_lat_var = ncfile.variables['grid_center_lat']
    setattr(grid_center_lat_var, 'units', 'degrees')
    grid_center_lon_var = ncfile.variables['grid_center_lon']
    setattr(grid_center_lon_var, 'units', 'degrees')
    grid_corner_lat_var = ncfile.variables['grid_corner_lat']
    setattr(grid_corner_lat_var, 'units', 'degrees')
    grid_corner_lon_var = ncfile.variables['grid_corner_lon']
    setattr(grid_corner_lon_var, 'units', 'degrees')
  
  setattr(ncfile, 'title', ttlname)
  setattr(ncfile, 'modifydate', tm)

  if hasattr(ncfile, 'grid_name'):
    delattr(ncfile, 'grid_name')

  if hasattr(ncfile, 'map_method'):
    delattr(ncfile, 'map_method')

  ncfile.sync()
  ncfile.close()
Exemplo n.º 6
0
 def write(self):
   ncfile = Dataset(self.fname, 'w')
   tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
   
   # set dimension info 
   ncfile.createDimension('src_grid_size', self.obj.src_grid_size)
   ncfile.createDimension('dst_grid_size', self.obj.dst_grid_size)
   ncfile.createDimension('n_wgt', self.n_wgt)
   ncfile.createDimension('src_grid_rank', self.obj.src_grid_rank)
   ncfile.createDimension('dst_grid_rank', self.obj.dst_grid_rank)
   ncfile.createDimension('num_wgts', 1)
   ncfile.createDimension('src_grid_corners', self.obj.src_grid_corners)
   ncfile.createDimension('dst_grid_corners', self.obj.dst_grid_corners)
   
   # set variable info
   src_grid_dims_var = ncfile.createVariable('src_grid_dims', dtype('int32').char, ('src_grid_rank',))
   dst_grid_dims_var = ncfile.createVariable('dst_grid_dims', dtype('int32').char, ('dst_grid_rank',))
   src_grid_center_lat_var = ncfile.createVariable('src_grid_center_lat', dtype('d').char, ('src_grid_size',))
   src_grid_center_lon_var = ncfile.createVariable('src_grid_center_lon', dtype('d').char, ('src_grid_size',))
   dst_grid_center_lat_var = ncfile.createVariable('dst_grid_center_lat', dtype('d').char, ('dst_grid_size',))
   dst_grid_center_lon_var = ncfile.createVariable('dst_grid_center_lon', dtype('d').char, ('dst_grid_size',))
   src_grid_imask_var = ncfile.createVariable('src_grid_imask', dtype('i').char, ('src_grid_size',))
   dst_grid_imask_var = ncfile.createVariable('dst_grid_imask', dtype('i').char, ('dst_grid_size',))
   remap_src_indx_var = ncfile.createVariable('remap_src_indx', dtype('i').char, ('n_wgt',))
   remap_dst_indx_var = ncfile.createVariable('remap_dst_indx', dtype('i').char, ('n_wgt',))
   remap_matrix_var = ncfile.createVariable('remap_matrix', dtype('d').char, ('n_wgt',))
   
   src_grid_dims_var[:] = self.obj.src_grid_dims
   dst_grid_dims_var[:] = self.obj.dst_grid_dims
   src_grid_center_lat_var[:] = np.array(self.obj.original_src_grid_center_lat)
   src_grid_center_lon_var[:] = np.array(self.obj.original_src_grid_center_lon)
   dst_grid_center_lat_var[:] = np.array(self.obj.dst_grid_center_lat)
   dst_grid_center_lon_var[:] = np.array(self.obj.dst_grid_center_lon)
   #src_grid_imask_var[:] = np.array(self.obj.original_src_grid_imask)
   buffer1 = [np.int32(i) for i in self.obj.original_src_grid_imask]
   src_grid_imask_var[:] = np.array(buffer1)
   buffer2 = [np.int32(i) for i in self.obj.dst_grid_imask]
   dst_grid_imask_var[:] = np.array(buffer2)
   #dst_grid_imask_var[:] = np.array(self.obj.dst_grid_imask)
   buffer3 = [np.int32(i) for i in self.obj.remap_src_indx]
   remap_src_indx_var[:] = np.array(buffer3)
   #remap_src_indx_var[:] = np.array(self.obj.remap_src_indx)
   buffer4 = [np.int32(i) for i in self.obj.remap_dst_indx]
   remap_dst_indx_var[:] = np.array(buffer4)
   #remap_dst_indx_var[:] = np.array(self.obj.remap_dst_indx)
   remap_matrix_var[:] = np.array(self.obj.remap_matrix_compact)
   
   setattr(ncfile, 'title', 'Threp ' + self.fname)
   setattr(ncfile, 'createdate', tm)
   setattr(ncfile, 'map_method', self.method)
   setattr(ncfile, 'conventions', 'Threp')
   setattr(ncfile, 'src_grid', self.obj.src_grid_name)
   setattr(ncfile, 'dst_grid', self.obj.dst_grid_name)
   
   ncfile.close() 
   print '*** Successfully generate remap matrix file. ***'
 def select_file(self, filename):
     self.currentFilename = filename
     f = NetCDFFile(filename,"r")
     variables = f.variables
     
     if not variables.has_key('molecular_trace'):
         raise DensitySuperpositionError('Trace file format not compatible with Plugin')
     
     self.dim.SetValue(str(variables['molecular_trace'].getValue().shape))
     f.close()
def get_single_model_data(dataset,start_dates,var,lat,lon,plev=None,models='all',n_ens=None):
    
    data_dir_main=get_data_dir_main()
    
    fname_coords=get_fname_coords(lat,lon,plev=plev)
    
    fname='get_seasfc_data_'+dataset+'_'+var+fname_coords
    
    nc_file=NetCDFFile(data_dir_main+fname+'.nc')
    
    #Convert intuitive variable name to name used inside files
    var_data_name=get_var_data_name(var)  
    
    if models=='all':
        if dataset=='ENSEMBLES':
            models=['ECMWF', 'INGV', 'Kiel', 'MetFr', 'MO']

    data_fc_sm={}  #dict to hold single model forecasts
    for start_date in start_dates:
        data_fc_sm[start_date]={}
        
        for model in models:
            data_name=model.lower()+'_'+start_date.lower()+'_'+var_data_name
            data_fc_sm[start_date][model]={}
            if n_ens:
                data_fc_sm[start_date][model]['data']=nc_file.variables[data_name][...,:n_ens]  #reads in data with dimensions [forecast month, year of forecast start, ensemble member]
            else:
                data_fc_sm[start_date][model]['data']=nc_file.variables[data_name][:]
                
            
            #Get the array containing the corresponding dates of the verification dataset in YYYYMM format.
            #This array has dimension [forecast month, year of forecast start]
            ver_month_arr_name_pos=getattr(nc_file.variables[data_name],'coordinates').find('verifying_month')
            ver_month_arr_name=getattr(nc_file.variables[data_name],'coordinates')[ver_month_arr_name_pos:ver_month_arr_name_pos+17]
            data_fc_sm[start_date][model]['verifying_month']=nc_file.variables[ver_month_arr_name][:]
            
            #Get lead times
            if len(nc_file.variables['forecast_lead_month'])==data_fc_sm[start_date][model]['data'].shape[0]:
                lead_time_dim_name='forecast_lead_month'
            elif 'forecast_lead_month_0' in nc_file.variables and len(nc_file.variables['forecast_lead_month_0'])==data_fc_sm[start_date][model]['data'].shape[0]:
                lead_time_dim_name='forecast_lead_month_0'
            else:
                print 'Forecast lead time dimension not known', dataset, start_date, model
                
            data_fc_sm[start_date][model]['lead times']=nc_file.variables[lead_time_dim_name][:]
            
            if dataset=='ENSEMBLES':
                lead_time_units='months'
            
            data_fc_sm[start_date][model]['lead time units']=lead_time_units


    nc_file.close()
    
    return data_fc_sm
Exemplo n.º 9
0
def load_rmpwfile(fname):
  ncfile = Dataset(fname, 'r')
  src_coord_lat = ncfile.variables['src_grid_center_lat'][:].tolist()
  src_coord_lon = ncfile.variables['src_grid_center_lon'][:].tolist()
  dst_coord_lat = ncfile.variables['dst_grid_center_lat'][:].tolist()
  dst_coord_lon = ncfile.variables['dst_grid_center_lon'][:].tolist()
  remap_src_indx = ncfile.variables['remap_src_indx'][:].tolist()
  remap_dst_indx = ncfile.variables['remap_dst_indx'][:].tolist()
  remap_matrix_compact = ncfile.variables['remap_matrix'][:].tolist()
  ncfile.close()
  return src_coord_lat, src_coord_lon, dst_coord_lat, dst_coord_lon, remap_src_indx, remap_dst_indx, remap_matrix_compact
Exemplo n.º 10
0
def load_geoinfo(SrcFilename, DstFilename, GridSize, LatName, LonName):
    # get lat/long values from 400/640 level files
    ncfile = front_end_NetCDF_helper()

    ncfile.open(SrcFilename)

    # read lat array
    SrcLatData = ncfile.read_data(LatName)
    # print SrcLatData

    # read lon array
    SrcLonData = ncfile.read_data(LonName)
    # print SrcLonData

    ncfile.close()

    # create down-sampled long array
    DstLonData = N.zeros(GridSize)

    #
    for DstLonNo in range(0, GridSize):
        SrcLonNo = DstLonNo * 2

        # down-sampling strategy 1, use avarage
        # DstLonData[DstLonNo] = (SrcLonData[SrcLonNo] + SrcLonData[SrcLonNo+1])/2.0
        # print DstLonNo, SrcLonNo, DstLonData[DstLonNo], SrcLonData[SrcLonNo], SrcLonData[SrcLonNo+1]

        # start with 0 and skip alternate points
        DstLonData[DstLonNo] = SrcLonData[SrcLonNo]
        # print DstLonNo, SrcLonNo, DstLonData[DstLonNo], SrcLonData[SrcLonNo]

    # write NetCDF file
    # open output file
    dst_ncfile = NetCDFFile(DstFilename, "w")

    # define dimensions
    dst_lat_dim = dst_ncfile.createDimension(LatName, GridSize)
    dst_lon_dim = dst_ncfile.createDimension(LonName, GridSize)

    # define variables
    dst_lat_var = dst_ncfile.createVariable(LatName, "d", (LatName,))
    # dst_lat_var.setattr (
    # NetCDFFile.setattr (dst_lat_var, 'attrname', attr_val)

    dst_lon_var = dst_ncfile.createVariable(LonName, "d", (LonName,))

    # write lat data
    dst_lat_var.assignValue(SrcLatData)

    # write lon data
    dst_lon_var.assignValue(DstLonData)

    # close output file
    dst_ncfile.close()
def load_file(file_name = file_name, time_start = time_start, 
		      time_end = time_end, lat_start = lat_start, lat_end = lat_end,
	              lon_start = lon_start, lon_end = lon_end, masked_value = masked_value):
		      
	nc = NetCDFFile(file_name, 'r')
	new_array = nc.variables['Cflx'][time_start:time_end, lat_start:lat_end, 
                                     lon_start:lon_end]
	nc.close()
	new_array = ma.masked_values(new_array, masked_value)
	new_array = new_array*1e08
	return new_array    
Exemplo n.º 12
0
def write_to_file(CS,nmax_coarse, nmax_fine, nblocks ,hw_ph,filename):
    """
    Writes the results of a computation to a netcdf file.
    Takes a Compute_Loop_Function object as input; it is assumed that 
    this object has already computed what we wish to write!
    """

    #--------------------------------------------
    # Write to netcdf file 
    #--------------------------------------------
    ncfile   = Dataset(filename,'w')

    # --- set various attributes, identifying the parameters of the computation ----
    setattr(ncfile,'mu',CS.mu) 
    setattr(ncfile,'beta',CS.beta) 
    setattr(ncfile,'acell',acell) 
    setattr(ncfile,'Area',Area) 
    setattr(ncfile,'nmax_coarse',nmax_coarse) 
    setattr(ncfile,'nmax_fine',nmax_fine) 
    setattr(ncfile,'n_blocks_coarse_to_fine',nblocks) 
    setattr(ncfile,'Gamma_width',CS.Gamma) 
    setattr(ncfile,'phonon_frequency',hw_ph) 


    # --- Create dimensions ----
    ncfile.createDimension("number_of_frequencies",CS.list_hw.shape[0])
    ncfile.createDimension("xy",2)
    ncfile.createDimension("gamma_i",2)
    ncfile.createDimension("uv",2)
    ncfile.createDimension("phonon_alpha_kappa",6)


    # --- Write data ----
    Q      = ncfile.createVariable("q_phonon",'d',('xy',))
    REPH   = ncfile.createVariable("Re_E_phonon",'d',('phonon_alpha_kappa',))
    IEPH   = ncfile.createVariable("Im_E_phonon",'d',('phonon_alpha_kappa',))
    HW     = ncfile.createVariable("list_hw",'d',('number_of_frequencies',))

    RH     = ncfile.createVariable("Re_H",'d',('xy','gamma_i','uv','number_of_frequencies'))
    IH     = ncfile.createVariable("Im_H",'d',('xy','gamma_i','uv','number_of_frequencies'))


    Q[:]    = CS.q
    REPH[:] = N.real(CS.E_ph)
    IEPH[:] = N.imag(CS.E_ph)
    HW[:]   = N.real(CS.list_hw)

    RH[:,:,:,:] = N.real(CS.Hq)
    IH[:,:,:,:] = N.imag(CS.Hq)



    ncfile.close()
Exemplo n.º 13
0
def write_to_file(CS, nmax_coarse, nmax_fine, nblocks, hw_ph, filename):

    """
    Writes the results of a computation to a netcdf file.
    Takes a Compute_Loop_Function object as input; it is assumed that 
    this object has already computed what we wish to write!
    """

    # --------------------------------------------
    # Write to netcdf file
    # --------------------------------------------
    ncfile = Dataset(filename, "w")

    # --- set various attributes, identifying the parameters of the computation ----
    setattr(ncfile, "mu", CS.mu)
    setattr(ncfile, "beta", CS.beta)
    setattr(ncfile, "acell", acell)
    setattr(ncfile, "Area", Area)
    setattr(ncfile, "nmax_coarse", nmax_coarse)
    setattr(ncfile, "nmax_fine", nmax_fine)
    setattr(ncfile, "n_blocks_coarse_to_fine", nblocks)
    setattr(ncfile, "Green_Gamma_width", CS.Green_Gamma_width)
    setattr(ncfile, "kernel_Gamma_width", CS.kernel_Gamma_width)
    setattr(ncfile, "phonon_frequency", hw_ph)

    # --- Create dimensions ----
    ncfile.createDimension("xy", 2)
    ncfile.createDimension("L_AB", 2)
    ncfile.createDimension("phonon_alpha_kappa", 6)

    # --- Write data ----
    Q = ncfile.createVariable("q_phonon", "d", ("xy",))
    REPH = ncfile.createVariable("Re_E_phonon", "d", ("phonon_alpha_kappa",))
    IEPH = ncfile.createVariable("Im_E_phonon", "d", ("phonon_alpha_kappa",))

    Re_R = ncfile.createVariable("Re_R", "d", ("xy", "L_AB"))
    Im_R = ncfile.createVariable("Im_R", "d", ("xy", "L_AB"))
    Re_I = ncfile.createVariable("Re_I", "d", ("xy", "L_AB"))
    Im_I = ncfile.createVariable("Im_I", "d", ("xy", "L_AB"))

    Q[:] = CS.q
    REPH[:] = N.real(CS.E_ph)
    IEPH[:] = N.imag(CS.E_ph)

    Re_R[:, :] = N.real(CS.Rq)
    Im_R[:, :] = N.imag(CS.Rq)
    Re_I[:, :] = N.real(CS.Iq)
    Im_I[:, :] = N.imag(CS.Iq)

    ncfile.close()

    return
Exemplo n.º 14
0
class CoordTransfer(Exception):
  def __init__(self, srcfile, dstfile, newfile):
    self.srcfile = srcfile
    self.dstfile = dstfile
    self.newfile = newfile
    
  def loadsrcoords(self):
    self.ncfile = Dataset(self.srcfile, 'r')
    variable_name = 'grid_center_lat'
    __grid_center_lat = self.ncfile.variables[variable_name][:]
    variable_name = 'grid_center_lon'
    __grid_center_lon = self.ncfile.variables[variable_name][:]
    self.__grid_center_lat = __grid_center_lat.tolist()
    self.__grid_center_lon = __grid_center_lon.tolist()
    
  def loadstinfo(self):
   self.nc_obj = Loadnc(self.dstfile)
   self.grid_size, self.grid_corners, self.grid_rank, self.grid_dims, ach1, ach2, self.grid_imask = self.nc_obj.load()
   
  def transfercoord(self):
    self.resncfile = Dataset(self.newfile, 'w')
    tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
    
    # set dimension info 
    self.resncfile.createDimension('grid_size', self.grid_size)
    self.resncfile.createDimension('grid_rank', self.grid_rank)
    self.resncfile.createDimension('grid_corners', self.grid_corners)

    # set variable info
    grid_dims_var = self.resncfile.createVariable('grid_dims', dtype('int32').char, ('grid_rank',))
    grid_center_lat_var = self.resncfile.createVariable('grid_center_lat', dtype('d').char, ('grid_size',))
    grid_center_lat_var.units = 'degrees'
    grid_center_lon_var = self.resncfile.createVariable('grid_center_lon', dtype('d').char, ('grid_size',))
    grid_center_lon_var.units = 'degrees'
    grid_imask_var = self.resncfile.createVariable('grid_imask', dtype('i').char, ('grid_size',))
    grid_imask_var.units = 'unitless'

    grid_dims_var[:] = self.grid_dims
    grid_center_lat_var[:] = np.array(self.__grid_center_lat)
    grid_center_lon_var[:] = np.array(self.__grid_center_lon)
    buffer1 = [np.int32(i) for i in self.grid_imask]
    grid_imask_var[:] = np.array(buffer1)

    setattr(self.resncfile, 'title', 'Threp ' + self.newfile)
    setattr(self.resncfile, 'createdate', tm)
    setattr(self.resncfile, 'conventions', 'Threp')
    setattr(self.resncfile, 'grid', self.newfile)

  def finish(self):
    self.resncfile.close()
    self.nc_obj.closenc()
Exemplo n.º 15
0
def filter_netcdf(filename1, filename2, first=0, last=None, step=1):
    """Filter data file, selecting timesteps first:step:last.
    
    Read netcdf filename1, pick timesteps first:step:last and save to
    nettcdf file filename2
    """

    from Scientific.IO.NetCDF import NetCDFFile

    # Get NetCDF
    infile = NetCDFFile(filename1, netcdf_mode_r)  #Open existing file for read
    outfile = NetCDFFile(filename2, netcdf_mode_w)  #Open new file

    # Copy dimensions
    for d in infile.dimensions:
        outfile.createDimension(d, infile.dimensions[d])

    # Copy variable definitions
    for name in infile.variables:
        var = infile.variables[name]
        outfile.createVariable(name, var.dtype.char, var.dimensions)

    # Copy the static variables
    for name in infile.variables:
        if name == 'time' or name == 'stage':
            pass
        else:
            outfile.variables[name][:] = infile.variables[name][:]

    # Copy selected timesteps
    time = infile.variables['time']
    stage = infile.variables['stage']

    newtime = outfile.variables['time']
    newstage = outfile.variables['stage']

    if last is None:
        last = len(time)

    selection = range(first, last, step)
    for i, j in enumerate(selection):
        log.critical('Copying timestep %d of %d (%f)'
                     % (j, last-first, time[j]))
        newtime[i] = time[j]
        newstage[i,:] = stage[j,:]

    # Close
    infile.close()
    outfile.close()
Exemplo n.º 16
0
class Loadreal(Exception):
  
  def __init__(self, file_name):
    self.filename = file_name
    self.ncfile = Dataset(file_name, 'r')
  
  def closenc(self):
    self.ncfile.close()
  
  def load(self):
    dimension_name = 'grid_size'
    grid_size = self.ncfile.dimensions[dimension_name]
    variable_name = 'data'
    data = self.ncfile.variables[variable_name][:]
    return grid_size, data
Exemplo n.º 17
0
    def write(cls, filename, data, header=""):
        '''
        Write a set of output variables into a NetCDF file.
                
        :param filename: the path to the output NetCDF file.
        :type filename: str
        :param data: the data to be written out.
        :type data: dict of Framework.OutputVariables.IOutputVariable
        :param header: the header to add to the output file.
        :type header: str
        '''
                
        filename = os.path.splitext(filename)[0]

        filename = "%s%s" % (filename,cls.extensions[0])
       
        # The NetCDF output file is opened for writing.
        outputFile = NetCDFFile(filename, 'w')
        
        if header:
            outputFile.header = header
        
        # Loop over the OutputVariable instances to write.
        
        for var in data.values():
                                    
            varName = str(var.name).strip().encode('string-escape').replace('/', '|')
            
            # The NetCDF dimensions are created for all the dimensions of the OutputVariable instance.
            dimensions = []
            for i,v in enumerate(var.shape):
                name = str("%s_%d" % (varName,i))
                dimensions.append(name)
                outputFile.createDimension(name, int(v))

            # A NetCDF variable instance is created for the running OutputVariable instance.        
            NETCDFVAR = outputFile.createVariable(varName, numpy.dtype(var.dtype).char, tuple(dimensions))

            # The array stored in the OutputVariable instance is written to the NetCDF file.
            NETCDFVAR.assignValue(var)  

            # All the attributes stored in the OutputVariable instance are written to the NetCDF file.
            for k, v in vars(var).items():
                setattr(NETCDFVAR,str(k),str(v))
        
        # The NetCDF file is closed.
        outputFile.close()
 def on_superpose(self, event):
     self.on_clear()
     rendtype = self.rendlist.GetValue()
     opacity = float(self.opacity.GetValue())
     filename = self.get_file()
     
     f = NetCDFFile(filename,"r")
     variables = f.variables
     
     data = variables['molecular_trace'].getValue()
     origin = variables['origin'].getValue()
     spacing = variables['spacing'].getValue()
     
     f.close()
     
     mi, ma = self.draw_isosurface(data, rendtype, opacity, origin, spacing)
     self.isov_slider.SetRange(mi, ma)
     self.isov_slider.Enable()
Exemplo n.º 19
0
def gen_realdata(path, filename, var):
  ncfile = Dataset(path + filename, 'r')
  filename = './realdata/T42_' + var + '-' + filename.split('.')[-2] + '.nc'
  nc = Dataset(filename, 'w')
  tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
  nx = 128
  ny = 64
  grid_size = ncfile.dimensions['n_a']

  # load data
  data = ncfile.variables[var][:, :, :]
  long_name = ncfile.variables[var].long_name
  units = ncfile.variables[var].units
  missing_value = ncfile.variables[var].missing_value
  _FillValue = ncfile.variables[var]._FillValue
  cell_method = ncfile.variables[var].cell_method
  tmp = []
  for i in range(1):
    for j in range(ny):
      for k in range(nx):
        tmp.append(data[i][j][k])
  data = scipy.array(tmp)

  # create variables
  nc.createDimension('grid_size', nx * ny)

  # create varibels
  data_var = nc.createVariable('data', dtype('d').char, ('grid_size',))
  
  data_var[:] = data
  data_var.long_name = long_name
  data_var.units = units
  data_var.missing_value = missing_value
  data_var._FillValue = _FillValue
  data_var.cell_method = cell_method
 
  string = 'Threp' + var + ' data'
  setattr(nc, 'title', string)
  setattr(nc, 'createdata', tm)

  nc.close()
  ncfile.close()
  print '*** Successfully generating real data file. ***'
Exemplo n.º 20
0
    def writeNetCDF(self, filename):
        from Scientific.IO.NetCDF import NetCDFFile

        ncfile = NetCDFFile(filename, "w")

        for dim, i in (("x", 0), ("y", 1), ("z", 2)):
            ncfile.createDimension(dim, self.voxels[i])
            ncfile.createVariable(dim, "d", (dim,))[:] = arange(self.voxels[i]) * self.vector[i][i]

        ncfile.createVariable("data", "d", ("x", "y", "z"))
        ncfile.variables["data"][:] = self.data

        ncfile.Natom = self.Natom
        ncfile.origin = self.origin
        for n in range(self.Natom):
            setattr(ncfile, "atom%i.Type" % n, self.atomType[n])
            setattr(ncfile, "atom%i.Pos" % n, self.atomPos[n])

        ncfile.close()
Exemplo n.º 21
0
def write_elevation_nc(file_out, lon, lat, depth_vector):
    """Write an nc elevation file."""

    # NetCDF file definition
    outfile = NetCDFFile(file_out, netcdf_mode_w)

    #Create new file
    nc_lon_lat_header(outfile, lon, lat)

    # ELEVATION
    zname = 'ELEVATION'
    outfile.createVariable(zname, precision, (lat_name, lon_name))
    outfile.variables[zname].units = 'CENTIMETERS'
    outfile.variables[zname].missing_value = -1.e+034

    outfile.variables[lon_name][:] = ensure_numeric(lon)
    outfile.variables[lat_name][:] = ensure_numeric(lat)

    depth = num.reshape(depth_vector, (len(lat), len(lon)))
    outfile.variables[zname][:] = depth

    outfile.close()
Exemplo n.º 22
0
def regrid_array(data=data_cflux):
	'''
	#Could be put with plotting tools???
	# Regrid array to be used with Basemap
	# Only works if the same latitudes and longitudes are selected from netdcf file and grid
	# Uses the ORCA netcdf file
	### transform the longitude of ORCA onto something that basemap can read
	### The ORCA grid starts at 80 and goes to 440
	### What we want: starts at 80 and goes to 180 and then switches to -180 and goes to 80
	### this method 
	'''
	from Scientific.IO.NetCDF import NetCDFFile
	#nc_grid_file = choose_netcdf_file()
	#~ indir = raw_input('Where is the ORCA netcdf file located? \n')
	nc_grid = NetCDFFile(NC_PATH+ 'ORCA2.0_grid.nc','r')
	lon = nc_grid.variables['lon'][0:40,:]
	lat = nc_grid.variables['lat'][0:40,:]
	area = nc_grid.variables['area'][0:40,:]
	mask = nc_grid.variables['mask'][0,0:40,:]
	nc_grid.close()
	
	lon_min = lon.copy()
	i,j = np.where(lon_min >= 180.) # elements of lon_min that are over 180
	lon_min[i,j] = lon_min[i,j] - 360. # takes those elements and subtracts 360 from them

	### ==============================================================================================================
	### get rid of the funny extra lon and do the same for the lat array ! 
	iw = np.where(lon_min[0,:] >= lon_min[0][0])[0] # are the elements that are greater or equal to the first element ie. 78.000038
	ie = np.where(lon_min[0,:] < lon_min[0][0])[0] # are the elements less than 78.000038

	### puts the lon in order from -180 to 180 and removes the extra 80 at the end
	lon = np.concatenate((np.take(lon_min,ie,axis=1),np.take(lon_min,iw,axis=1)),axis=1)[:,:-1]
	lat = np.concatenate((np.take(lat,ie,axis=1),np.take(lat,iw,axis=1)),axis=1)[:,:-1]

	# The data that is to be plotted needs to be regridded
	bm_array = [ma.concatenate((ma.take(data[i, :, :],ie,axis=1),ma.take(data[i, :, :],iw,axis=1)),axis=1)[:,:-1] for i in range(3650)]
	bm_array = ma.array(bm_array)
	return bm_array
def get_veri_data(veri_datasets,var,lat,lon,plev=None,time=None):
    
    data_dir_main=get_data_dir_main()    
    
    if var=='sst': var_veri='sea_surface_temperature'  #the name of the variable as used inside the NetCDF file
    else: var_veri=get_var_data_name(var)
    
    data_veri={}
    for veri in veri_datasets:
        fname='get_seasfc_data_'+veri+'_'+var
        if time is not None:  fname=fname+'_'+'{:02d}'.format(time)+'00'
        fname_coords=get_fname_coords(lat,lon,plev=plev)
        fname=fname+fname_coords
        nc_file_veri=NetCDFFile(data_dir_main+fname+'.nc')
        data_name=veri.lower()+'_'+var_veri
        
        data_veri[veri]={}
        data_veri[veri]['data']=nc_file_veri.variables[data_name][:]  #values of monthly means of the variable. The first dimension should correspond to time.
        data_veri[veri]['date']=nc_file_veri.variables['date'][:]  #the months in YYYYMM format (assumed to be a 1D array)
    
        nc_file_veri.close()
        
        return data_veri
Exemplo n.º 24
0
class NetCDFInputData(InputFileData):
    
    type = "netcdf_data"
    
    extension = "nc"
    
    def load(self):
        
        try:
            self._netcdf = NetCDFFile(self._filename,"r")
            
        except IOError:
            raise InputDataError("The data stored in %r filename could not be loaded property." % self._filename)

        else:
            self._data = collections.OrderedDict()
            variables = self._netcdf.variables
            for k in variables:
                self._data[k]={}
                try :
                    if vars[k].axis:
                        self._data[k]['axis'] =  variables[k].axis.split('|')
                    else:
                        self._data[k]['axis'] = []
                except:
                    self._data[k]['axis'] = []
                self._data[k]['data'] = variables[k].getValue()
                self._data[k]['units'] = getattr(variables[k], 'units', 'au')

    def close(self):
        self._netcdf.close()
        
    @property
    def netcdf(self):
        
        return self._netcdf
Exemplo n.º 25
0
def writeNcFile(data, fileName=None, oldStyle=1):
    if not ncOk:
        raise Exception('module Scientific.IO.NetCDF not found, writeNcFile() failed!')
    if not fileName:
        fileName = data['name']+'_weather.nc'
    f = NetCDFFile(fileName, 'w')
    f.createDimension('time', data['time'].shape[0])
    f.file_format = file_format
    if oldStyle:
        f.createDimension('scalar', 1)
    if data.has_key('comment'):
        f.comment = data['comment']
    else:
        f.comment = 'created by MeteonormFile.py (v%s)' % version
    if data.has_key('source_file'):
        f.source_file = str(data['source_file'])
    for vn in ('latitude', 'longitude', 'height'):
        setattr(f, vn, data[vn])
        if oldStyle:
            v = f.createVariable(vn, 'd', ('scalar', ))
            v[:] = [data[vn]]
    setattr(f, 'longitude_0', 15.0*data['timezone'])
    if oldStyle:
        v = f.createVariable('longitude_0', 'd', ('scalar', ))
        v[:] = [15.0 * data['timezone']]
    for vn in variables.keys():
        t = variables[vn][1]
        v = f.createVariable(vn, t, ('time',))
        v[:] = data[vn].astype(t)
        oname = variables[vn][0]
        if oname.startswith('<'): oname = oname[1:]
        if oname.endswith('>'): oname = oname[:-1]
        v.original_name = oname
        v.unit = variables[vn][2]
    f.sync()
    f.close()
Exemplo n.º 26
0
 def test(self):
     # Create file
     FileName = '%s_out.nc' % self.Case
     print 'creating %s ...' % FileName
     File = NetCDFFile(FileName,'w')
     File.createDimension('time',None)
     var = File.createVariable('time','f',('time',))
     var.long_name = 'time'
     var.units = ' '
     File.createDimension('lat',len(self.data.lat))
     var = File.createVariable('lat','f',('lat',))
     var.long_name = 'latitude'
     var.units = 'degrees_north'
     var[:] = self.data.lat.astype('f')
     File.createDimension('lon',len(self.data.lon))
     var = File.createVariable('lon','f',('lon',))
     var.long_name = 'longitude'
     var.units = 'degrees_east'
     var[:] = self.data.lon.astype('f')
     for Field in ['SwToa','LwToa','SwToaCf','LwToaCf']:
         var = File.createVariable(Field,'f',('time','lat','lon'))
         var.long_name = ''
         var.units = 'W m-2'
     lmax = 3
     for l in range(lmax):
         print 'doing %s of %s' % (l+1,lmax)
         # get data
         Data = self.getFields(l)[0]
         Data.update(self.Fixed)
         # compute
         self.r(**Data)
         File.variables['SwToa'][l] = self.r['SwToa'].astype('f')
         File.variables['LwToa'][l] = -self.r['LwToa'].astype('f')
         File.variables['SwToaCf'][l] = self.r['SwToaCf'].astype('f')
         File.variables['LwToaCf'][l] = self.r['LwToaCf'].astype('f')
     File.close()
Exemplo n.º 27
0
    for j in arange(var2.shape[1]):
        var2[i, j] = 0.0

## Get the data from first variable
data = var1.getValue()
data2 = var22.getValue()

## Print out the data
print data
print data2

## Get the dimension names of var1
dimNames = var1.dimensions
print "Dimension names of var1:", dimNames

## Create some attributes for var1
setattr(var1, 'units', 'Degrees C')
setattr(var1, 'precision', 2)
setattr(var1, 'maxValue', 19.999)

## Read the variable attributes we just created
att1 = getattr(var1, 'units')
att2 = getattr(var1, 'precision')
att3 = getattr(var1, 'maxValue')

## Print the value of these variable attributes
print "units =", att1, 'precision = ', att2, 'maxValue = ', att3

## Close the netCDF file
file.close()
Exemplo n.º 28
0
def create(missingdata=False,
           missingdimension=False,
           missingvariable=False,
           incorrectdimension=False,
           incorrectvariable=False):

    if (missingdata):
        filename = 'missingdata.nc'
        description = ', with missing data.'
    elif (missingdimension):
        filename = 'missingdimension.nc'
        description = ', with a missing dimension.'
    elif (missingvariable):
        filename = 'missingvariable.nc'
        description = ', with a missing variable.'
    elif (incorrectdimension):
        filename = 'incorrectdimension.nc'
        description = ', with an incorrect dimension label.'
    elif (incorrectvariable):
        filename = 'incorrectvariable.nc'
        description = ', with an incorrect variable label.'
    else:
        filename = 'valid.nc'
        description = '.'

    print "Creating " + filename + description

    f = NetCDFFile(filename, 'w')
    f.description = 'Example free surface height' + description

    if (missingdata):
        offset = -0.4
    else:
        offset = 0.0

    x = arange(-1.2 + offset, 1.21 + offset, 0.2)
    y = arange(-1.2, 1.21, 0.2)
    h = zeros((len(x), len(y)))

    for i in range(len(x)):
        for j in range(len(y)):
            # Nice ordering netCDF API - y,x !
            h[j, i] = x[i] * y[j]

    if (not incorrectdimension):
        xdimlabel = 'x'
    else:
        xdimlabel = 'lat'
    ydimlabel = 'y'
    if (not incorrectvariable):
        zdimlabel = 'z'
    else:
        zdimlabel = 'height'

    print xdimlabel, ydimlabel, zdimlabel

    # dimensions
    f.createDimension(xdimlabel, len(x))
    if (not missingdimension):
        f.createDimension(ydimlabel, len(y))

    # variables
    fx = f.createVariable(xdimlabel, 'd', (xdimlabel, ))
    fx[:] = x
    if (not missingdimension):
        fy = f.createVariable(ydimlabel, 'd', (ydimlabel, ))
        fy[:] = y
        if (not missingvariable):
            fh = f.createVariable(zdimlabel, 'd', (
                xdimlabel,
                ydimlabel,
            ))
            fh[:] = h
    else:
        fh = f.createVariable(zdimlabel, 'd', (xdimlabel, ))
        fh[:] = x

    f.close()
Exemplo n.º 29
0
    if serialOutput.variables[varName].typecode() != 'c':

        # The corresponding values for the serial run.
        serialValues = serialOutput.variables[varName].getValue()

        # The corresponding values for the parallel run.
        parallelValues = parallelOutput.variables[varName].getValue()

        def testvar(varName, serialValues, parallelValues):
            return lambda self: self.check_var(varName, serialValues,
                                               parallelValues)

        setattr(Test_Serial_vs_Parallel, "test_%s" % varName,
                testvar(varName, serialValues, parallelValues))

serialOutput.close()

parallelOutput.close()


def suite():
    loader = unittest.TestLoader()
    s = unittest.TestSuite()
    s.addTest(loader.loadTestsFromTestCase(Test_Serial_vs_Parallel))

    return s


if __name__ == '__main__':
    unittest.TextTestRunner(verbosity=2).run(suite())
Exemplo n.º 30
0
class Loadnc(Exception):
    def __init__(self, file_name):
        ''' Initialize grid file object.
        Open a netCDF file for reading.'''
        self.filename = file_name
        self.ncfile = Dataset(file_name, 'r')

    def closenc(self):
        ''' Close the file.'''
        self.ncfile.close()

    def load(self):
        '''load all dimensions and variables info.'''
        # load dimension info.
        grid_size = self.__get_grid_size()
        grid_corners = self.__get_grid_corners()
        grid_rank = self.__get_grid_rank()

        # load variable info.
        grid_dims = self.__get_grid_dims()
        [grid_center_lat, grid_center_lon] = self.__get_grid_center_coords()
        grid_imask = self.__get_grid_imask()

        # check dimension info.
        cond = grid_size > 0 and grid_corners > 0 and grid_rank and grid_size == grid_dims[
            0] * grid_dims[1]
        if cond:
            print '***Successfully reading dimension info from netCDF file %s.*** ' % self.filename
        else:
            print '***Dimension info is invalid.***'
            sys.exit()
        # check dimension info.
        cond = len(grid_center_lat) == grid_size and len(
            grid_center_lon) == grid_size and len(grid_imask) == grid_size
        if cond:
            print '***Successfully reading variable info from netCDF file %s.*** ' % self.filename
        else:
            print '***Dimension info is invalid.***'
        return grid_size, grid_corners, grid_rank, grid_dims, grid_center_lat, grid_center_lon, grid_imask

    def __get_grid_size(self):
        dimension_name = 'grid_size'
        __grid_size = self.ncfile.dimensions[dimension_name]
        return __grid_size

    def __get_grid_corners(self):
        dimension_name = 'grid_corners'
        __grid_corners = self.ncfile.dimensions[dimension_name]
        return __grid_corners

    def __get_grid_rank(self):
        dimension_name = 'grid_rank'
        __grid_rank = self.ncfile.dimensions[dimension_name]
        return __grid_rank

    def __get_grid_dims(self):
        variable_name = 'grid_dims'
        __grid_dims = self.ncfile.variables[variable_name][:]
        return __grid_dims

    def __get_grid_center_coords(self):
        variable_name = 'grid_center_lat'
        __grid_center_lat = self.ncfile.variables[variable_name][:]
        variable_name = 'grid_center_lon'
        __grid_center_lon = self.ncfile.variables[variable_name][:]
        __grid_center_lat = __grid_center_lat.tolist()
        __grid_center_lon = __grid_center_lon.tolist()

        # transform radians to degrees
        if re.findall('r?R?ad', self.ncfile.variables[variable_name].units):
            __grid_center_lat = [
                item * 180 / math.pi for item in __grid_center_lat
            ]
            __grid_center_lon = [
                item * 180 / math.pi for item in __grid_center_lon
            ]

        return __grid_center_lat, __grid_center_lon

    def __get_grid_imask(self):
        variable_name = 'grid_imask'
        __grid_imask = self.ncfile.variables[variable_name][:]
        __grid_imask = __grid_imask.tolist()

        oc_num = 0
        ld_num = 0
        #for mask in __grid_imask:
        #  if mask == 0:
        #    oc_num += 1
        #  else:
        #    ld_num += 1
        #print 'oc_num'
        #print oc_num
        #print 'ld_num'
        #print ld_num
        return __grid_imask

    def __get_grid_corner_coords(self):
        pass

    def __get_grid_frac(self):
        pass
Exemplo n.º 31
0
class Write_nc:
    """Write an nc file.

    Note, this should be checked to meet cdc netcdf conventions for gridded
    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
    """

    def __init__(self,
                 quantity_name,
                 file_name,
                 time_step_count,
                 time_step,
                 lon,
                 lat):
        """Instantiate a Write_nc instance (NetCDF file writer).

        time_step_count is the number of time steps.
        time_step is the time step size

        pre-condition: quantity_name must be 'HA', 'UA'or 'VA'.
        """

        self.quantity_name = quantity_name
        quantity_units = {'HA':'CENTIMETERS',
                          'UA':'CENTIMETERS/SECOND',
                          'VA':'CENTIMETERS/SECOND'}

        multiplier_dic = {'HA':100.0,   # To convert from m to cm
                          'UA':100.0,   #             and m/s to cm/sec
                          'VA':-100.0}  # MUX files have positive x in the
                                        # Southern direction.  This corrects
                                        # for it, when writing nc files.

        self.quantity_multiplier =  multiplier_dic[self.quantity_name]

        #self.file_name = file_name
        self.time_step_count = time_step_count
        self.time_step = time_step

        # NetCDF file definition
        self.outfile = NetCDFFile(file_name, netcdf_mode_w)
        outfile = self.outfile

        #Create new file
        nc_lon_lat_header(outfile, lon, lat)

        # TIME
        outfile.createDimension(time_name, None)
        outfile.createVariable(time_name, precision, (time_name,))

        #QUANTITY
        outfile.createVariable(self.quantity_name, precision,
                               (time_name, lat_name, lon_name))
        outfile.variables[self.quantity_name].missing_value = -1.e+034
        outfile.variables[self.quantity_name].units = \
                                 quantity_units[self.quantity_name]
        outfile.variables[lon_name][:]= ensure_numeric(lon)
        outfile.variables[lat_name][:]= ensure_numeric(lat)

        #Assume no one will be wanting to read this, while we are writing
        #outfile.close()

    def store_timestep(self, quantity_slice):
        """Write a time slice of quantity info

        quantity_slice is the data to be stored at this time step
        """

        # Get the variables
        time = self.outfile.variables[time_name]
        quantity = self.outfile.variables[self.quantity_name]

        # get index oflice to write
        i = len(time)

        #Store time
        time[i] = i * self.time_step    #self.domain.time
        quantity[i,:] = quantity_slice * self.quantity_multiplier

    def close(self):
        """ Close file underlying the class instance. """
        self.outfile.close()
Exemplo n.º 32
0
    def read_data(self):

        # open a new netCDF file for reading.
        ncfile = NetCDFFile(self.file, 'r')

        dimNames = ncfile.dimensions.keys()
        variableNames = ncfile.variables.keys()

        #print dimNames
        #print variableNames

        # Get the geolocation data
        intime = ncfile.variables['initial_time0_hours']
        inlatitude = ncfile.variables['g0_lat_2']
        inlongitude = ncfile.variables['g0_lon_3']

        self.intime_size = N.size(intime)
        self.inlat_size = N.size(inlatitude)
        self.inlon_size = N.size(inlongitude)

        #print 'original time size = ', self.intime_size
        #print 'original latitude size = ', self.inlat_size
        #print 'original longitude size =', self.inlon_size

        # Here we have to 'expand out' lat, lon and time so that the
        # middle end is permitted to function as it does for irregular
        # grids.  This involves quite a bit of repititive values and
        # increased memory consumption.

        new_var_time = N.array([])
        new_var_lat = N.array([])
        new_var_lon = N.array([])

        coloc_param_size = self.intime_size * self.inlat_size * self.inlon_size

        new_var_time.resize(coloc_param_size)
        new_var_lat.resize(coloc_param_size)
        new_var_lon.resize(coloc_param_size)

        #print 'new time size = ', new_var_time.size
        #print 'new latitude size = ', new_var_lat.size
        #print 'new longitude size =', new_var_lon.size

        # Now that we have both the original representation of the geolocation params
        # and a properly-sized location to put their expanded representations - do the
        # expansion
        i = j = k = m = 0
        for i in range(0, self.intime_size):
            # Convert hours since 01/01/1900 to unix time
            tk = UT.hours_since_19th_century_to_unix_time(intime[i])
            for j in range(0, self.inlat_size):
                lat_tk = inlatitude[j]
                for k in range(0, self.inlon_size):
                    lon_tk = inlongitude[k]

                    new_var_time[m] = tk
                    new_var_lat[m] = lat_tk
                    new_var_lon[m] = lon_tk

                    #print m, ": ", tk, ", ", lat_tk, ", ", lon_tk

                    # Show leading edge of expanded colocation params
                    #if(m < 242):
                    #print m, ": ", tk, ", ", lat_tk, ", ", lon_tk

                    m += 1

        self.time = new_var_time
        self.latitude = new_var_lat
        ### convert to (-180,180) so it's consistent with CloudSat
        ### self.longitude  = new_var_lon
        self.longitude = N.where(new_var_lon > 180.0, new_var_lon - 360.0,
                                 new_var_lon)

        # End of colocation parameter expansion for regular grids

        level_name = ['lv_ISBL1']
        for i in range(len(level_name)):
            local_level = ncfile.variables[level_name[i]]
            level_data = local_level.getValue()
            attList = dir(local_level)
            attribute = {}
            for j in attList:
                if (j != 'assignValue' and j != 'getValue'
                        and j != 'typecode'):
                    attValue = getattr(local_level, j)
                    attribute[j] = attValue
            # add dimension name to the attribute
            attribute['dimension1'] = 'isobaric_level'
            # Store (attribute, data) in the level dictionary
            self.levels[level_name[i]] = (attribute, level_data)

        #print 'new time size = ', N.size(self.time)
        #print 'new latitude size = ', N.size(self.latitude)
        #print 'new longitude size =', N.size(self.longitude)

        for i in range(len(variableNames)):
            if (variableNames[i] != 'initial_time0_encoded'
                    and variableNames[i] != 'initial_time0_hours'
                    and variableNames[i] != 'lv_ISBL1'
                    and variableNames[i] != 'g0_lat_2'
                    and variableNames[i] != 'g0_lon_3'):
                local_var = ncfile.variables[variableNames[i]]
                # get data of the variable
                local_data = N.array(local_var.getValue())
                # get attributes of the variable
                attList = dir(local_var)
                attribute = {}
                for j in attList:
                    if (j != 'assignValue' and j != 'getValue'
                            and j != 'typecode'):
                        attValue = getattr(local_var, j)
                        attribute[j] = attValue

                # add dimension 1 name to attribute
                attribute['dimension1'] = 'isobaric_level'

                # Fill in UT.NAN where data is the value defined by attribute '_FillValue'
                if (N.size(local_data.shape) == 3):
                    fill_value = N.where(local_data == attribute['_FillValue'])
                    local_data[fill_value] = UT.NAN

                    # remove unnecessary attribute since we already applied it
                    del attribute['_FillValue']

                    # Now we reshaped have to reshape to 1D for the middle end
                    oneD_data = N.reshape(
                        local_data,
                        (local_data.shape[0] * local_data.shape[1] *
                         local_data.shape[2]))

                    # store (attribute, data) in the data dictionary
                    self.data[variableNames[i]] = (attribute, oneD_data)

                    #print "attribute         = ", attribute
                    #print "data              = ", oneD_data

                # Fill in UT.NAN where data is the value defined by attribute '_FillValue'
                elif (N.size(local_data.shape) == 4):
                    fill_value = N.where(local_data == attribute['_FillValue'])
                    local_data[fill_value] = UT.NAN

                    # remove unnecessary attribute since we already applied it
                    del attribute['_FillValue']

                    # Now we have to swap the axis of array to make the p-level to be the last axis
                    local_data = N.swapaxes(
                        local_data, 1,
                        2)  # swap the axes of p-level and latitude
                    local_data = N.swapaxes(
                        local_data, 2,
                        3)  # swap the axes of p-level and longitude
                    # Now the array has the axis in this order (time, lat, lon, p-level)
                    # Now we have to reshape the four-dimensional array to 2-dimensional array for middle end
                    print local_data.shape
                    twoD_data = N.reshape(
                        local_data,
                        (local_data.shape[0] * local_data.shape[1] *
                         local_data.shape[2], local_data.shape[3]))

                    # store (attribute, data) in the data dictionary
                    self.data[variableNames[i]] = (attribute, twoD_data)

                    #print "attribute         = ", attribute
                    #print "data              = ", oneD_data
                else:
                    print 'Warning: Unsupported shape detected for ', variableNames[
                        i], ' ... skipping'

        ncfile.close()
Exemplo n.º 33
0
class ETSFWriter:
    def __init__(self, filename='gpaw', title='gpaw'):
        if not filename.endswith('-etsf.nc'):
            if filename.endswith('.nc'):
                filename = filename[:-3] + '-etsf.nc'
            else:
                filename = filename + '-etsf.nc'
            
        self.nc = NetCDFFile(filename, 'w')

        self.nc.file_format = 'ETSF Nanoquanta'
        self.nc.file_format_version = np.array([3.3], dtype=np.float32)
        self.nc.Conventions = 'http://www.etsf.eu/fileformats/'
        self.nc.history = 'File generated by GPAW'
        self.nc.title = title

    def write(self, calc, spacegroup=1):

        #sg = Spacegroup(spacegroup)
        #print sg
        
        wfs = calc.wfs
        setups = wfs.setups
        bd = wfs.bd
        kd = wfs.kd
        
        atoms = calc.atoms
        natoms = len(atoms)
        
        if wfs.kd.symmetry is None:
            op_scc = np.eye(3, dtype=int).reshape((1, 3, 3))
        else:
            op_scc = wfs.kd.symmetry.op_scc

        specie_a = np.empty(natoms, np.int32)
        nspecies = 0
        species = {}
        names = []
        symbols = []
        numbers = []
        charges = []
        for a, id in enumerate(setups.id_a):
            if id not in species:
                species[id] = nspecies
                nspecies += 1
                names.append(setups[a].symbol)
                symbols.append(setups[a].symbol)
                numbers.append(setups[a].Z)
                charges.append(setups[a].Nv)
            specie_a[a] = species[id]
            
        dimensions = [
            ('character_string_length', 80),
            ('max_number_of_states', bd.nbands),
            ('number_of_atoms', len(atoms)),
            ('number_of_atom_species', nspecies),
            ('number_of_cartesian_directions', 3),
            ('number_of_components', 1),
            ('number_of_kpoints', kd.nibzkpts),
            ('number_of_reduced_dimensions', 3),
            ('number_of_spinor_components', 1),
            ('number_of_spins', wfs.nspins),
            ('number_of_symmetry_operations', len(op_scc)),
            ('number_of_vectors', 3),
            ('real_or_complex_coefficients', 2),
            ('symbol_length', 2)]

        for name, size in dimensions:
            print(('%-34s %d' % (name, size)))
            self.nc.createDimension(name, size)

        var = self.add_variable
        
        var('space_group', (), np.array(spacegroup, dtype=int))
        var('primitive_vectors',
            ('number_of_vectors', 'number_of_cartesian_directions'),
            wfs.gd.cell_cv, units='atomic units')
        var('reduced_symmetry_matrices',
            ('number_of_symmetry_operations',
             'number_of_reduced_dimensions', 'number_of_reduced_dimensions'),
            op_scc.astype(np.int32), symmorphic='yes')
        var('reduced_symmetry_translations',
            ('number_of_symmetry_operations', 'number_of_reduced_dimensions'),
            np.zeros((len(op_scc), 3), dtype=np.int32))
        var('atom_species', ('number_of_atoms',), specie_a + 1)
        var('reduced_atom_positions',
            ('number_of_atoms', 'number_of_reduced_dimensions'),
            calc.spos_ac)
        var('atomic_numbers', ('number_of_atom_species',),
            np.array(numbers, dtype=float))
        var('valence_charges', ('number_of_atom_species',),
            np.array(charges, dtype=float))
        var('atom_species_names',
            ('number_of_atom_species', 'character_string_length'), names)
        var('chemical_symbols', ('number_of_atom_species', 'symbol_length'),
            symbols)
        var('pseudopotential_types',
            ('number_of_atom_species', 'character_string_length'),
            ['HGH'] * nspecies)
        var('fermi_energy', (), calc.occupations.fermilevel,
            units='atomic units')
        var('smearing_scheme', ('character_string_length',), 'fermi-dirac')
        var('smearing_width', (), calc.occupations.width, units='atomic units')
        var('number_of_states', ('number_of_spins', 'number_of_kpoints'),
            np.zeros((wfs.nspins, kd.nibzkpts), np.int32) + bd.nbands,
            k_dependent='no')
        var('eigenvalues',
            ('number_of_spins', 'number_of_kpoints', 'max_number_of_states'),
            np.array([[calc.get_eigenvalues(k, s) / Hartree
                       for k in range(kd.nibzkpts)]
                      for s in range(wfs.nspins)]), units='atomic units')
        var('occupations',
            ('number_of_spins', 'number_of_kpoints', 'max_number_of_states'),
            np.array([[calc.get_occupation_numbers(k, s) / kd.weight_k[k]
                       for k in range(kd.nibzkpts)]
                      for s in range(wfs.nspins)]))
        var('reduced_coordinates_of_kpoints',
            ('number_of_kpoints', 'number_of_reduced_dimensions'), kd.ibzk_kc)
        var('kpoint_weights', ('number_of_kpoints',), kd.weight_k)
        var('basis_set', ('character_string_length',), 'plane_waves')
        var('number_of_electrons', (), np.array(wfs.nvalence, dtype=np.int32))
        self.nc.close()

    def add_variable(self, name, dims, data=None, **kwargs):
        if data is None:
            char = 'd'
        else:
            if isinstance(data, np.ndarray):
                char = data.dtype.char
            elif isinstance(data, float):
                char = 'd'
            elif isinstance(data, int):
                char = 'i'
            else:
                char = 'c'
        print(('%-34s %s%s' % (
            name, char,
            tuple([self.nc.dimensions[dim] for dim in dims]))))
        var = self.nc.createVariable(name, char, dims)
        for attr, value in kwargs.items():
            setattr(var, attr, value)
        if data is not None:
            if len(dims) == 0:
                var.assignValue(data)
            else:
                if char == 'c':
                    if len(dims) == 1:
                        var[:len(data)] = data
                    else:
                        for i, x in enumerate(data):
                            var[i, :len(x)] = x
                else:
                    var[:] = data
        return var
Exemplo n.º 34
0
    job.initialize()

    obsinfo = job.observations
    outputvars = [oi['outputvariable'] for oi in obsinfo]

    # Run and retrieve results.
    ncpath = job.controller.run((), showoutput=False, returnncpath=True)
    if ncpath == None:
        print 'GOTM run failed - exiting.'
        sys.exit(1)
    nc = NetCDFFile(ncpath, 'r')
    res = acpy.run.job.controller.getNetCDFVariables(nc,
                                                     outputvars,
                                                     addcoordinates=True)
    nc.close()

    # Shortcuts to coordinates
    tim_cent, z_cent, z1_cent = res['time_center'], res['z_center'], res[
        'z1_center']
    tim_stag, z_stag, z1_stag = res['time_staggered'], res['z_staggered'], res[
        'z1_staggered']

    # Find the depth index from where we start
    ifirstz = z_cent.searchsorted(-300)
    viewdepth = 300

    hres = matplotlib.pylab.figure()
    herr = matplotlib.pylab.figure()
    for i, oi in enumerate(obsinfo):
        modeldata = res[oi['outputvariable']]
Exemplo n.º 35
0
print 'X units: ', d.units, 'X size: ', eden_ncol
#print eden_x
d = f.variables['y']
eden_nrow = d.shape[0]
eden_y = np.copy( d )
eden_y = eden_y[::-1]
print 'Y units: ', d.units, 'Y size: ', eden_nrow
#print eden_y
eden_dx = eden_dy = 400. #m
stage = f.variables['stage']
s = np.copy( stage[0,:,:] )
s = np.flipud( s )
s = np.ma.masked_invalid( s )
eden_mask = np.ma.getmask( s )
#print eden_mask
f.close()
#--load eden topo data
eden_topo_ref   = '..\\Topography\\ref\\eden_topo.ref'
eden_topo = au.loadArrayFromFile(eden_nrow,eden_ncol,eden_topo_ref)
eden_topo = np.ma.masked_where(eden_topo==-9999.,eden_topo)

#--find the netcdf cell nw of modflow cells
eden2mf_col = np.zeros( (ncol), np.int )
eden2mf_row = np.zeros( (nrow), np.int )
for icol in range(0,ncol):
    x = xcell[icol]
    ix = 0
    for xe in eden_x:
        if xe > x:
            break
        ix += 1
Exemplo n.º 36
0
class _ParNetCDFFile(ParBase):

    """Distributed netCDF file

    Constructor: ParNetCDFFile(|filename|, |split_dimension|, |mode|='r',
                               |local_access| = 0)

    Arguments:

    |filename| -- the name of the netCDF file

    |split_dimension| -- the name of the dimension along which the data
                         is distributed over the processors

    |mode| -- read ('r'), write ('w'), or append ('a'). Default is 'r'.

    |local_access| -- if 0 (default), processor 0 is the only one to access
                      the file, all others communicate with processor 0. If
                      1 (only for reading), each processor accesses the
                      file directly. In the latter case, the file must be
                      accessible on all processors under the same name.
                      A third mode is 'auto', which uses some heuristics
                      to decide if the file is accessible everywhere:
                      it checks for existence of the file, then compares
                      the size on all processors, and finally verifies
                      that the same variables exist everywhere, with
                      identical names, types, and sizes.

    A ParNetCDFFile object acts as much as possible like a NetCDFFile object.
    Variables become ParNetCDFVariable objects, which behave like
    distributed sequences. Variables that use the dimension named by
    |split_dimension| are automatically distributed among the processors
    such that each treats only one slice of the whole file.
    """

    def __parinit__(self, pid, nprocs, filename, split_dimension,
                    mode = 'r', local_access = 0):
        if mode != 'r': local_access = 0
        self.pid = pid
        self.nprocs = nprocs
        self.filename = filename
        self.split = split_dimension
        self.local_access = local_access
        self.read_only = mode == 'r'
        if local_access or pid == 0:
            self.file = NetCDFFile(filename, mode)
            try:
                length = self.file.dimensions[split_dimension]
                if length is None:
                    length = -1
            except KeyError:
                length = None
            vars = {}
            for name, var in self.file.variables.items():
                vars[name] = (name, var.dimensions)
                if length < 0 and split_dimension in var.dimensions:
                    index = list(var.dimensions).index(split_dimension)
                    length = var.shape[index]
        else:
            self.file = None
            self.split = split_dimension
            length = None
            vars = None
        if not local_access:
            length = self.broadcast(length)
            vars = self.broadcast(vars)
        if length is not None:
            self._divideData(length)
        self.variables = {}
        for name, var in vars.items():
            self.variables[name] = _ParNetCDFVariable(self, var[0], var[1],
                                                      split_dimension)

    def __repr__(self):
        return repr(self.filename)

    def close(self):
        if self.local_access or self.pid == 0:
            self.file.close()

    def createDimension(self, name, length):
        if name == self.split:
            if length is None:
                raise ValueError, "Split dimension cannot be unlimited"
            self._divideData(length)
        if self.pid == 0:
            self.file.createDimension(name, length)

    def createVariable(self, name, typecode, dimensions):
        if self.pid == 0:
            var = self.file.createVariable(name, typecode, dimensions)
            dim = var.dimensions
        else:
            dim = 0
        name, dim = self.broadcast((name, dim))
        self.variables[name] = _ParNetCDFVariable(self, name, dim, self.split)
        return self.variables[name]

    def _divideData(self, length):
        chunk = (length+self.nprocs-1)/self.nprocs
        self.first = min(self.pid*chunk, length)
        self.last = min(self.first+chunk, length)
        if (not self.local_access) and self.pid == 0:
            self.parts = []
            for pid in range(self.nprocs):
                first = pid*chunk
                last = min(first+chunk, length)
                self.parts.append((first, last))

    def sync(self):
        if self.pid == 0:
            self.file.sync()
    flush = sync
Exemplo n.º 37
0
    def finalize(self):
        """Finalizes the calculations (e.g. averaging the total term, output files creations ...).
        """

        if self.architecture == 'monoprocessor':
            t = self.trajectory
        else:
            # Load the whole trajectory set.
            t = Trajectory(None, self.trajectoryFilename, 'r')

        orderedAtoms = sorted(t.universe.atomList(),
                              key=operator.attrgetter('index'))
        groups = [
            Collection([orderedAtoms[ind] for ind in g]) for g in self.group
        ]

        # 'freqencies' = 1D Numeric array. Frequencies at which the DOS was computed
        frequencies = N.arange(self.nFrames) / (2.0 * self.nFrames * self.dt)

        # The NetCDF output file is opened for writing.
        outputFile = NetCDFFile(self.output, 'w')
        outputFile.title = self.__class__.__name__
        outputFile.jobinfo = self.information + '\nOutput file written on: %s\n\n' % asctime(
        )

        # Dictionnary whose keys are of the form Gi where i is the group number
        # and the entries are the list of the index of the atoms building the group.
        comp = 1
        for g in self.group:
            outputFile.jobinfo += 'Group %d: %s\n' % (comp,
                                                      [index for index in g])
            comp += 1

        # Some dimensions are created.
        outputFile.createDimension('NFRAMES', self.nFrames)

        # Creation of the NetCDF output variables.
        # The time.
        TIMES = outputFile.createVariable('time', N.Float, ('NFRAMES', ))
        TIMES[:] = self.times[:]
        TIMES.units = 'ps'

        # The resolution function.
        RESOLUTIONFUNCTION = outputFile.createVariable('resolution_function',
                                                       N.Float, ('NFRAMES', ))
        RESOLUTIONFUNCTION[:] = self.resolutionFunction[:]
        RESOLUTIONFUNCTION.units = 'unitless'

        # Creation of the NetCDF output variables.
        # The frequencies.
        FREQUENCIES = outputFile.createVariable('frequency', N.Float,
                                                ('NFRAMES', ))
        FREQUENCIES[:] = frequencies[:]
        FREQUENCIES.units = 'THz'

        OMEGAS = outputFile.createVariable('angular_frequency', N.Float,
                                           ('NFRAMES', ))
        OMEGAS[:] = 2.0 * N.pi * frequencies[:]
        OMEGAS.units = 'rad ps-1'

        avacfTotal = N.zeros((self.nFrames), typecode=N.Float)
        adosTotal = N.zeros((self.nFrames), typecode=N.Float)

        comp = 1
        totalMass = 0.0
        for g in groups:

            AVACF = outputFile.createVariable('avacf-group%s' % comp, N.Float,
                                              ('NFRAMES', ))
            AVACF[:] = self.AVACF[comp][:]
            AVACF.units = 'rad^2*ps^-2'

            N.add(avacfTotal, self.AVACF[comp], avacfTotal)

            ADOS = outputFile.createVariable('ados-group%s' % comp, N.Float,
                                             ('NFRAMES', ))
            ADOS[:] = self.ADOS[comp][:]
            ADOS.units = 'rad^2*ps^-1'

            N.add(adosTotal, g.mass() * self.ADOS[comp], adosTotal)

            comp += 1
            totalMass += g.mass()

        adosTotal *= 0.5 * self.dt / (self.nGroups * totalMass)

        AVACF = outputFile.createVariable('avacf-total', N.Float,
                                          ('NFRAMES', ))
        AVACF[:] = avacfTotal
        AVACF.units = 'rad^2*ps^-2'

        ADOS = outputFile.createVariable('ados-total', N.Float, ('NFRAMES', ))
        ADOS[:] = adosTotal
        ADOS.units = 'rad^2*ps^-1'

        asciiVar = sorted(outputFile.variables.keys())

        outputFile.close()

        self.toPlot = {
            'netcdf': self.output,
            'xVar': 'angular_frequency',
            'yVar': 'ados-total'
        }

        # Create an ASCII version of the NetCDF output file.
        convertNetCDFToASCII(inputFile = self.output,\
                             outputFile = os.path.splitext(self.output)[0] + '.cdl',\
                             variables = asciiVar)
    def read_data(self):

        # open a new netCDF file for reading.
        ncfile = NetCDFFile(self.file,'r')
        
        dimNames = ncfile.dimensions.keys()
        variableNames = ncfile.variables.keys()

        #print dimNames
        #print variableNames
        
        # Get the geolocation data
        intime         = ncfile.variables['initial_time0_hours']
        inlatitude     = ncfile.variables['g0_lat_1']
        inlongitude    = ncfile.variables['g0_lon_2']
                 
        self.intime_size = N.size(intime)    
        self.inlat_size  = N.size(inlatitude)
        self.inlon_size  = N.size(inlongitude)
 
        #print 'original time size = ', self.intime_size
        #print 'original latitude size = ', self.inlat_size
        #print 'original longitude size =', self.inlon_size
        
        # Here we have to 'expand out' lat, lon and time so that the
        # middle end is permitted to function as it does for irregular
        # grids.  This involves quite a bit of repititive values and
        # increased memory consumption.
        
        new_var_time = N.array([])
        new_var_lat  = N.array([])
        new_var_lon  = N.array([])
        
        coloc_param_size = self.intime_size * self.inlat_size * self.inlon_size
        
        new_var_time.resize(coloc_param_size)
        new_var_lat.resize(coloc_param_size)
        new_var_lon.resize(coloc_param_size)
        
        #print 'new time size = ', new_time_size
        #print 'new latitude size = ', new_lat_size
        #print 'new longitude size =', new_lon_size
                                
        # Now that we have both the original representation of the geolocation params
        # and a properly-sized location to put their expanded representations - do the
        # expansion
        i = j = k = m = 0
        for i in range(0, self.intime_size):
            # Convert hours since 01/01/1900 to unix time
            tk = UT.hours_since_19th_century_to_unix_time(intime[i])
            for j in range(0, self.inlat_size):
                lat_tk = inlatitude[j]
                for k in range(0, self.inlon_size):
                    lon_tk = inlongitude[k]
                    
                    new_var_time[m] = tk
                    new_var_lat[m]  = lat_tk
                    new_var_lon[m]  = lon_tk
                    
                    #print m, ": ", tk, ", ", lat_tk, ", ", lon_tk                   
                    
                    # Show leading edge of expanded colocation params
                    #if(m < 242):
                        #print m, ": ", tk, ", ", lat_tk, ", ", lon_tk

                    m += 1
        
        self.time       = new_var_time
        self.latitude   = new_var_lat
	### convert to (-180,180) so it's consistent with CloudSat
	### self.longitude  = new_var_lon
	self.longitude = N.where(new_var_lon > 180.0, new_var_lon - 360.0, new_var_lon)
                
        # End of colocation parameter expansion for regular grids
        
        #print 'new time size = ', N.size(self.time)
        #print 'new latitude size = ', N.size(self.latitude)
        #print 'new longitude size =', N.size(self.longitude)        

        for i in range(len(variableNames)):
          if(variableNames[i]!='initial_time0_encoded'
                and variableNames[i]!='initial_time0_hours'
                and variableNames[i]!='g0_lat_1' 
                and variableNames[i]!='g0_lon_2'):
              local_var = ncfile.variables[variableNames[i]]  
              # get data of the variable
              local_data = N.array(local_var.getValue())
              # get attributes of the variable
              attList = dir(local_var)
              attribute = {}
              for j in attList:
                  if(j!='assignValue' and j!='getValue' and j!='typecode'):
                    attValue = getattr(local_var, j)
                    attribute[j]=attValue
                    #print j, attValue
                   
              #print variableNames[i], "attribute = ", attribute

              #print 'local data dimension', local_data.shape
            
              # Fill in UT.NAN where data is the value defined by attribute '_FillValue'
              fill_value = N.where(local_data == attribute['_FillValue'])
              local_data[fill_value] = UT.NAN

              # remove unnecessary attribute since we already applied it
              del attribute['_FillValue']
              
              # Now we reshaped have to reshape to 1D for the middle end
              oneD_data = N.reshape(local_data, local_data.shape[0]*local_data.shape[1]*local_data.shape[2])
              
              # store (attribute, data) in the data dictionary
              self.data[variableNames[i]]=(attribute, oneD_data)
              
              #print "attribute         = ", attribute
              #print "data              = ", oneD_data
        
        ncfile.close()
Exemplo n.º 39
0
class ETSFWriter:
    def __init__(self, filename='gpaw', title='gpaw'):
        if not filename.endswith('-etsf.nc'):
            if filename.endswith('.nc'):
                filename = filename[:-3] + '-etsf.nc'
            else:
                filename = filename + '-etsf.nc'

        self.nc = NetCDFFile(filename, 'w')

        self.nc.file_format = 'ETSF Nanoquanta'
        self.nc.file_format_version = np.array([3.3], dtype=np.float32)
        self.nc.Conventions = 'http://www.etsf.eu/fileformats/'
        self.nc.history = 'File generated by GPAW'
        self.nc.title = title

    def write(self, calc, ecut=40 * Hartree, spacegroup=1):

        #sg = Spacegroup(spacegroup)
        #print sg

        wfs = calc.wfs
        setups = wfs.setups
        bd = wfs.bd
        kd = wfs.kd

        atoms = calc.atoms
        natoms = len(atoms)

        if wfs.symmetry is None:
            op_scc = np.eye(3, dtype=int).reshape((1, 3, 3))
        else:
            op_scc = wfs.symmetry.op_scc

        pwd = PWDescriptor(ecut / Hartree, wfs.gd, kd.ibzk_kc)
        N_c = pwd.gd.N_c
        i_Qc = np.indices(N_c, np.int32).transpose((1, 2, 3, 0))
        i_Qc += N_c // 2
        i_Qc %= N_c
        i_Qc -= N_c // 2
        i_Qc.shape = (-1, 3)
        i_Gc = i_Qc[pwd.Q_G]

        B_cv = 2.0 * np.pi * wfs.gd.icell_cv
        G_Qv = np.dot(i_Gc, B_cv).reshape((-1, 3))
        G2_Q = (G_Qv**2).sum(axis=1)

        specie_a = np.empty(natoms, np.int32)
        nspecies = 0
        species = {}
        names = []
        symbols = []
        numbers = []
        charges = []
        for a, id in enumerate(setups.id_a):
            if id not in species:
                species[id] = nspecies
                nspecies += 1
                names.append(setups[a].symbol)
                symbols.append(setups[a].symbol)
                numbers.append(setups[a].Z)
                charges.append(setups[a].Nv)
            specie_a[a] = species[id]

        dimensions = [('character_string_length', 80),
                      ('max_number_of_coefficients', len(i_Gc)),
                      ('max_number_of_states', bd.nbands),
                      ('number_of_atoms', len(atoms)),
                      ('number_of_atom_species', nspecies),
                      ('number_of_cartesian_directions', 3),
                      ('number_of_components', 1),
                      ('number_of_grid_points_vector1', N_c[0]),
                      ('number_of_grid_points_vector2', N_c[1]),
                      ('number_of_grid_points_vector3', N_c[2]),
                      ('number_of_kpoints', kd.nibzkpts),
                      ('number_of_reduced_dimensions', 3),
                      ('number_of_spinor_components', 1),
                      ('number_of_spins', wfs.nspins),
                      ('number_of_symmetry_operations', len(op_scc)),
                      ('number_of_vectors', 3),
                      ('real_or_complex_coefficients', 2),
                      ('symbol_length', 2)]

        for name, size in dimensions:
            print('%-34s %d' % (name, size))
            self.nc.createDimension(name, size)

        var = self.add_variable

        var('space_group', (), np.array(spacegroup, dtype=int))
        var('primitive_vectors',
            ('number_of_vectors', 'number_of_cartesian_directions'),
            wfs.gd.cell_cv,
            units='atomic units')
        var('reduced_symmetry_matrices',
            ('number_of_symmetry_operations', 'number_of_reduced_dimensions',
             'number_of_reduced_dimensions'),
            op_scc.astype(np.int32),
            symmorphic='yes')
        var('reduced_symmetry_translations',
            ('number_of_symmetry_operations', 'number_of_reduced_dimensions'),
            np.zeros((len(op_scc), 3), dtype=np.int32))
        var('atom_species', ('number_of_atoms', ), specie_a + 1)
        var('reduced_atom_positions',
            ('number_of_atoms', 'number_of_reduced_dimensions'),
            atoms.get_scaled_positions())
        var('atomic_numbers', ('number_of_atom_species', ),
            np.array(numbers, dtype=float))
        var('valence_charges', ('number_of_atom_species', ),
            np.array(charges, dtype=float))
        var('atom_species_names',
            ('number_of_atom_species', 'character_string_length'), names)
        var('chemical_symbols', ('number_of_atom_species', 'symbol_length'),
            symbols)
        var('pseudopotential_types',
            ('number_of_atom_species', 'character_string_length'),
            ['HGH'] * nspecies)
        var('fermi_energy', (),
            calc.occupations.fermilevel,
            units='atomic units')
        var('smearing_scheme', ('character_string_length', ), 'fermi-dirac')
        var('smearing_width', (), calc.occupations.width, units='atomic units')
        var('number_of_states', ('number_of_spins', 'number_of_kpoints'),
            np.zeros((wfs.nspins, kd.nibzkpts), np.int32) + bd.nbands,
            k_dependent='no')
        var('eigenvalues',
            ('number_of_spins', 'number_of_kpoints', 'max_number_of_states'),
            np.array([[
                calc.get_eigenvalues(k, s) / Hartree
                for k in range(kd.nibzkpts)
            ] for s in range(wfs.nspins)]),
            units='atomic units')
        var(
            'occupations',
            ('number_of_spins', 'number_of_kpoints', 'max_number_of_states'),
            np.array([[
                calc.get_occupation_numbers(k, s) / kd.weight_k[k]
                for k in range(kd.nibzkpts)
            ] for s in range(wfs.nspins)]))
        var('reduced_coordinates_of_kpoints',
            ('number_of_kpoints', 'number_of_reduced_dimensions'), kd.ibzk_kc)
        var('kpoint_weights', ('number_of_kpoints', ), kd.weight_k)
        var('basis_set', ('character_string_length', ), 'plane_waves')
        var('kinetic_energy_cutoff', (), 1.0 * ecut, units='atomic units')
        var('number_of_coefficients', ('number_of_kpoints', ),
            np.zeros(kd.nibzkpts, np.int32) + len(i_Gc),
            k_dependent='no')
        var('reduced_coordinates_of_plane_waves',
            ('max_number_of_coefficients', 'number_of_reduced_dimensions'),
            i_Gc[np.argsort(G2_Q)],
            k_dependent='no')
        var('number_of_electrons', (), np.array(wfs.nvalence, dtype=np.int32))

        #var('exchange_functional', ('character_string_length',),
        #    calc.hamiltonian.xc.name)
        #var('correlation_functional', ('character_string_length',),
        #    calc.hamiltonian.xc.name)

        psit_skn1G2 = var(
            'coefficients_of_wavefunctions',
            ('number_of_spins', 'number_of_kpoints', 'max_number_of_states',
             'number_of_spinor_components', 'max_number_of_coefficients',
             'real_or_complex_coefficients'))

        x = atoms.get_volume()**0.5 / N_c.prod()
        psit_Gx = np.empty((len(i_Gc), 2))
        for s in range(wfs.nspins):
            for k in range(kd.nibzkpts):
                for n in range(bd.nbands):
                    psit_G = pwd.fft(calc.get_pseudo_wave_function(
                        n, k, s))[np.argsort(G2_Q)]
                    psit_G *= x
                    psit_Gx[:, 0] = psit_G.real
                    psit_Gx[:, 1] = psit_G.imag
                    psit_skn1G2[s, k, n, 0] = psit_Gx

        self.nc.close()

    def add_variable(self, name, dims, data=None, **kwargs):
        if data is None:
            char = 'd'
        else:
            if isinstance(data, np.ndarray):
                char = data.dtype.char
            elif isinstance(data, float):
                char = 'd'
            elif isinstance(data, int):
                char = 'i'
            else:
                char = 'c'
        print('%-34s %s%s' %
              (name, char, tuple([self.nc.dimensions[dim] for dim in dims])))
        var = self.nc.createVariable(name, char, dims)
        for attr, value in kwargs.items():
            setattr(var, attr, value)
        if data is not None:
            if len(dims) == 0:
                var.assignValue(data)
            else:
                if char == 'c':
                    if len(dims) == 1:
                        var[:len(data)] = data
                    else:
                        for i, x in enumerate(data):
                            var[i, :len(x)] = x
                else:
                    var[:] = data
        return var
    Gamma = ncfile.kernel_Gamma_width
    delta = ncfile.delta_width
    beta  = ncfile.beta

    knmax_coarse_smooth = ncfile.nmax_coarse_smooth
    knmax_fine_smooth   = ncfile.nmax_fine_smooth
    knmax_block_smooth  = ncfile.n_blocks_coarse_to_fine_smooth

    knmax_coarse_singular = ncfile.nmax_coarse_singular
    knmax_fine_singular   = ncfile.nmax_fine_singular
    knmax_block_singular  = ncfile.n_blocks_coarse_to_fine_singular




    ncfile.close()


    combined_filename = dic_job['combined_filename']


    #--------------------------------------------
    # Write to netcdf file 
    #--------------------------------------------
    Cncfile   = Dataset(combined_filename,'w')

    # --- set various attributes, identifying the parameters of the computation ----
    setattr(Cncfile,'mu',mu) 
    setattr(Cncfile,'beta',beta) 
    setattr(Cncfile,'acell',acell) 
    setattr(Cncfile,'Area',Area) 
Exemplo n.º 41
0
 def __ReadGridData__(self, GridVar): 
     NCdata = Dataset(self.NCFileGrid,'r') 
     result = NCdata.variables[GridVar][:]
     NCdata.close() 
     return result
Exemplo n.º 42
0
    def write_output(self, afl):
        print afl
        local_dimension_directory={}
	if self.format == 'netCDF':
	    # open a new netCDF file for writing.
	    ncfile = Dataset(self.file,'w')

	    ndim = len(self.target_time)
	    #--print 'ndim: ', ndim
	    ncfile.createDimension('time', ndim)

	    # create variables
	    # first argument is name of variable, second is datatype, third is
	    # a tuple with the names of dimensions.
	    time = ncfile.createVariable('time',dtype('float64').char,('time', ))
	    lats = ncfile.createVariable('latitude',dtype('float32').char,('time', ))
	    lons = ncfile.createVariable('longitude',dtype('float32').char,('time', ))

	    time.units = 'second (since midnight of 1/1/1970)'
	    lats.units = 'degree'
	    lons.units = 'degree'

            # write time, lat, lon to variables
            self.target_time.shape = (ndim, )
            self.target_lat.shape = (ndim, )
            self.target_lon.shape = (ndim, )

            time[:] = N.cast['float64'](self.target_time)
            lats[:] = N.cast['float32'](self.target_lat)
            lons[:] = N.cast['float32'](self.target_lon)


	    # create variables for levels
	    # first argument is name of variable, second is datatype, third is
	    # a tuple with the names of dimensions.
	    lkeys = self.target_levels.keys()
	    print 'lkeys: ', lkeys
	    if len(lkeys) > 0:
		self.lvars = [0]*len(lkeys)
	    kk = 0
	    for k in lkeys:
		print 'k: ', k
		kname = k.replace(' ', '_')
		#---print 'kk: ', kk, ', kname: ', kname

		atuple = self.target_levels[k]
		attribute = atuple[0]
		#---print 'attribute: ', attribute
		local_level = atuple[1]
		#--print 'local_level: ', local_level
		#--print 'local_level.shape: ', local_level.shape

		lc = 'lc-' + str(kk)
		print 'lc: ', lc

                if attribute.has_key('dimension1'):
                       lc = attribute['dimension1']
                       if (local_dimension_directory.has_key(lc) == False):
                          ncfile.createDimension(lc, len(local_level))
                          local_dimension_directory[lc] = len(local_level) 
                elif kname!='P0':
		       ncfile.createDimension(lc, len(local_level))
                else: # 'P0'
                       ncfile.createDimension(lc, 1)

		self.lvars[kk] = ncfile.createVariable(kname, dtype('float32').char, (lc, ))

		if attribute.has_key('units'):
		    self.lvars[kk].units = attribute['units']
		#else:
		#    self.lvars[kk].units = ''

		if attribute.has_key('long_name'):
		    self.lvars[kk].long_name = attribute['long_name']
		kk += 1

	    # end of for k loop

	    # write data to variables for levels
	    for kk in range(len(lkeys)):
		print 'kk: ', kk
                if(lkeys[kk]!='P0'):
		  #---print 'self.target_levels[lkeys[kk]][1].shape: ', self.target_levels[lkeys[kk]][1].shape
		  #---print 'len(self.target_levels[lkeys[kk]][1]): ', len(self.target_levels[lkeys[kk]][1])
		  self.target_levels[lkeys[kk]][1].shape = (len(self.target_levels[lkeys[kk]][1]), )
		  self.lvars[kk][:] = N.cast['float32'](self.target_levels[lkeys[kk]][1])
                else: 
                  self.lvars[kk][:] = N.cast['float32']([self.target_levels[lkeys[kk]][1]])
	    # end of for kk loop

	    # create variables for data
	    # first argument is name of variable, second is datatype, third is
	    # a tuple with the names of dimensions.
	    keys = self.target_data.keys()
	    #--print 'keys: ', keys
	    self.vars = [0]*len(keys)
	    kk = 0
	    for k in keys:
		#--print 'k: ', k
		kname = k.replace(' ', '_')
		#--print 'k: ', k, ', kname: ', kname

                # check whether the data type should be inter or float
                data_type = self.check_data_type(kname)

		# check whether 2D array can be collapsed to 1D array
		s = self.target_data[k][1].shape
		d2 = len(s)
		if d2 == 2 and s[1] == 1: 
		    tmp = N.reshape(self.target_data[k][1], s[0])
                    attribute = self.target_data[k][0]
                    self.target_data[k] = (attribute, tmp)
	
		#--print 'attribute keys: ', self.target_data[k][0].keys()
		s = self.target_data[k][1].shape
		d2 = len(s)
		cc = 'cc-' + str(kk)
		#--print 'cc: ', cc
		if d2 == 1:
		    #--print '--- 1D data'
		    self.vars[kk] = ncfile.createVariable(kname, data_type, ('time', ))
		elif d2 == 2:
		    #--print '--- 2D data'
                    if self.target_data[k][0].has_key('dimension1'):
                       local_dimension = self.target_data[k][0]['dimension1']
                       cc = local_dimension
                       if (local_dimension_directory.has_key(local_dimension) == False):
                          #print 'local dimension =', local_dimension
                          ncfile.createDimension(local_dimension, s[1])
                          local_dimension_directory[local_dimension] = s[1]
                    else: 
		      ncfile.createDimension(cc, s[1])
		    self.vars[kk] = ncfile.createVariable(kname, data_type,('time', cc))
		elif d2 == 3:
		    #--print '--- 3D data'
		    cc1 = cc+'1'
		    cc2 = cc+'2'
		    ncfile.createDimension(cc1, s[1])
		    ncfile.createDimension(cc2, s[2])
		    self.vars[kk] = ncfile.createVariable(kname, data_type,('time', cc1, cc2))
		elif d2 == 4:
		    #--print '--- 4D data'
		    cc1 = cc+'1'
		    cc2 = cc+'2'
		    cc3 = cc+'3'
		    ncfile.createDimension(cc1, s[1])
		    ncfile.createDimension(cc2, s[2])
		    ncfile.createDimension(cc3, s[3])
		    self.vars[kk] = ncfile.createVariable(kname, data_type,('time', cc1, cc2, cc3))

		if self.target_data[k][0].has_key('units'):
		    self.vars[kk].units = self.target_data[k][0]['units']

		if self.target_data[k][0].has_key('long_name'):
		    self.vars[kk].long_name = self.target_data[k][0]['long_name']

                if self.target_data[k][0].has_key('_FillValue'):
                    self.vars[kk].FillValue = self.target_data[k][0]['_FillValue']

                if self.target_data[k][0].has_key('missing_value'):
                    self.vars[kk].missing_value = self.target_data[k][0]['missing_value']

                if self.target_data[k][0].has_key('scale_factor'):
                    self.vars[kk].scale_factor = self.target_data[k][0]['scale_factor']

                if self.target_data[k][0].has_key('add_offset'):
                    self.vars[kk].add_offset = self.target_data[k][0]['add_offset']

                if self.target_data[k][0].has_key('valid_range'):
                    self.vars[kk].valid_range = self.target_data[k][0]['valid_range']

                if self.target_data[k][0].has_key('Parameter_Type'):
                    self.vars[kk].Parameter_Type = self.target_data[k][0]['Parameter_Type']

                if self.target_data[k][0].has_key('Cell_Along_Swath_Sampling'):
                    self.vars[kk].Cell_Along_Swath_Sampling = self.target_data[k][0]['Cell_Along_Swath_Sampling']

                if self.target_data[k][0].has_key('Cell_Across_Swath_Sampling'):
                    self.vars[kk].Cell_Across_Swath_Sampling = self.target_data[k][0]['Cell_Across_Swath_Sampling']

                if self.target_data[k][0].has_key('Geolocation_Pointer'):
                    self.vars[kk].Geolocation_Pointer = self.target_data[k][0]['Geolocation_Pointer']

                # add missing_value in the variable attribute if given by the XML input parameter
                if (afl.missing_value !='None' and self.target_data[k][0].has_key('missing_value')==False 
                     and  self.target_data[k][0].has_key('_FillValue')==False ): 
                    self.vars[kk].missing_value = afl.missing_value

                # add invalid_data in the variable attribute from collocation
                self.vars[kk].collocation_invalid_value = self.invalid_data

		kk += 1

	    # end of for k loop

	    #--print 'in backend: self.target_data[keys[0]][1]: ', self.target_data[keys[0]][1]

	    # write data to variables for data
	    for kk in range(len(keys)):
                # check whether the data type should be inter or float
                data_type = self.check_data_type(keys[kk])
		#--print 'kk: ', kk
		#--print 'self.target_data[keys[kk]][1].shape: ', self.target_data[keys[kk]][1].shape
		s3 = self.target_data[keys[kk]][1].shape
		d3 = len(s3)
		if d3 == 1:
		    self.target_data[keys[kk]][1].shape = (ndim, )
		elif d3 == 2:
		    self.target_data[keys[kk]][1].shape = (ndim, s3[1])
		elif d3 == 3:
		    self.target_data[keys[kk]][1].shape = (ndim, s3[1], s3[2])
		elif d3 == 4:
		    self.target_data[keys[kk]][1].shape = (ndim, s3[1], s3[2], s3[3])

		#--print 'self.target_data[keys[kk]][1].shape: ', self.target_data[keys[kk]][1].shape
                #--print 'self.target_data[keys[kk]][1] data type: ', type(self.target_data[keys[kk]][1])

                if data_type=='i':
		    self.vars[kk][:] =  N.cast['int32'](self.target_data[keys[kk]][1])
                #elif data_type=='f':
		#    self.vars[kk][:] =  N.cast['float32'](self.target_data[keys[kk]][1])
                else: 
		    self.vars[kk][:] = self.target_data[keys[kk]][1]  # float32

	    # end of for kk loop

	    ncfile.close()
Exemplo n.º 43
0
    def process_file(self):
        print 'using NCAR daily surface front end - file = ' + self.filename
        ncfile = NetCDFFile(self.filename, 'r')

        NameLen = len(self.filename)
        DateStrt = NameLen - 13
        FileDate = self.filename[DateStrt:DateStrt + 10]
        #print '**** date - ', FileDate

        FileYr = int(FileDate[0:4])
        FileMo = int(FileDate[4:6])
        FileDa = int(FileDate[6:8])
        FileHr = int(FileDate[8:10])

        ctime = calendar.timegm((FileYr, FileMo, FileDa, FileHr, 0, 0))
        print 'date: ', FileYr, '/', FileMo, '/', FileDa, '/', FileHr, ', unix time: ', ctime

        # single time step
        self.intime_size = 1

        # Get the geolocation data
        CdfVarLat = ncfile.variables['g4_lat_0']
        CdfVarLon = ncfile.variables['g4_lon_1']

        CdfLatData = N.array(CdfVarLat.getValue())
        CdfLonData = N.array(CdfVarLon.getValue())

        self.CdfLatSize = N.size(CdfVarLat)
        self.CdfLonSize = N.size(CdfVarLon)

        # Here we have to 'expand out' lat, lon and time so that the
        # middle end is permitted to function as it does for irregular
        # grids.  This involves quite a bit of repititive values and
        # increased memory consumption.

        # calculate size of single dimension for (time,lat,lon)
        #self.grid_size = self.intime_size * self.CdfLatSize * self.CdfLonSize
        self.grid_size = self.CdfLatSize * self.CdfLonSize
        print "Number of time steps = ", self.intime_size
        print "Lat Size = ", self.CdfLatSize
        print "Lon Size = ", self.CdfLonSize
        print "grid size = ", self.grid_size

        mid_time = N.zeros(self.grid_size)
        mid_lat = N.zeros(self.grid_size)
        mid_lon = N.zeros(self.grid_size)

        # fill in the lat long values for each grid point (lon varies fastest in mid data)
        grid_pt = 0
        for j in range(0, self.CdfLatSize):
            lat_tk = CdfVarLat[j]
            for k in range(0, self.CdfLonSize):
                lon_tk = CdfVarLon[k]
                #if lon_tk>180:
                #	lon_tk = lon_tk-360.0 # make sure longitude is in (-180,180).

                mid_time[grid_pt] = ctime
                mid_lat[grid_pt] = lat_tk
                mid_lon[grid_pt] = lon_tk
                grid_pt += 1

        # convert long range to -180 to 180
        self.longitude = N.where(mid_lon > 180.0, mid_lon - 360.0, mid_lon)

        self.utime = mid_time
        self.latitude = mid_lat

        ##############################################
        # process data variables
        #
        # get info about dimensions and variables
        print '***********************'
        print 'processing data'
        CdfVarNameList = ncfile.variables.keys()

        #print 'types - ', dir(types)

        for CdfVarName in CdfVarNameList:
            # skip geoinfo vars
            if ((CdfVarName != 'g4_lon_1') and (CdfVarName != 'g4_lat_0')):
                # debug if (CdfVarName == self.LfracName):
                #print CdfVarName

                CdfVar = ncfile.variables[CdfVarName]

                # process attributes
                MidAttr = {}

                AttrNameList = dir(CdfVar)
                for AttrName in AttrNameList:
                    # remove extra attrs
                    if ((AttrName != 'assignValue') & (AttrName != 'getValue')
                            & (AttrName != 'typecode')):
                        #print 'attr name ', AttrName
                        CdfAttr = getattr(CdfVar, AttrName)
                        AttrType = type(CdfAttr)

                        #print 'attribute - ', CdfVarName, ':', AttrName, ', type ', AttrType

                        # type is string or array
                        if (AttrType == types.StringType):
                            MidAttr[AttrName] = CdfAttr
                        else:
                            #print 'array', CdfVarName, ':', AttrName, ', type '#, AttrName.typecode
                            MidAttr[AttrName] = N.array(CdfAttr)

                # get CDF data (shape [long,lat])
                CdfData = N.array(CdfVar.getValue())

                # creat flat destination array
                MidData = N.array([])
                MidData.resize(self.grid_size)

                # copy to flat array
                # loop iteration must match geo info setup
                MidIndx = 0
                for LatIndx in range(0, self.CdfLatSize):
                    for LonIndx in range(0, self.CdfLonSize):
                        Val = CdfData[LatIndx, LonIndx]

                        MidData[MidIndx] = Val
                        MidIndx += 1

                # print 'shape ', CdfVarName, MidData.shape, self.grid_size

                # insert into dictionary
                self.data[CdfVarName] = (MidAttr, MidData)

        ncfile.close()
Exemplo n.º 44
0
    def read_data(self):

        # open a new netCDF file for reading.
        ncfile = NetCDFFile(self.file, 'r')

        dimNames = ncfile.dimensions.keys()
        variableNames = ncfile.variables.keys()

        #print dimNames
        #print variableNames

        # Get the geolocation data
        intime = ncfile.variables['time']
        inlatitude = ncfile.variables['latitude']
        inlongitude = ncfile.variables['longitude']

        print 'time =', N.array(intime)

        self.intime_size = N.size(intime)
        self.inlat_size = N.size(inlatitude)
        self.inlon_size = N.size(inlongitude)

        #print 'original time size = ', self.intime_size
        #print 'original latitude size = ', self.inlat_size
        #print 'original longitude size =', self.inlon_size

        # Here we have to 'expand out' lat, lon and time so that the
        # middle end is permitted to function as it does for irregular
        # grids.  This involves quite a bit of repititive values and
        # increased memory consumption.

        new_var_time = N.array([])
        new_var_lat = N.array([])
        new_var_lon = N.array([])

        coloc_param_size = self.intime_size * self.inlat_size * self.inlon_size

        new_var_time.resize(coloc_param_size)
        new_var_lat.resize(coloc_param_size)
        new_var_lon.resize(coloc_param_size)

        #print 'new time size = ', new_time_size
        #print 'new latitude size = ', new_lat_size
        #print 'new longitude size =', new_lon_size

        # Now that we have both the original representation of the geolocation params
        # and a properly-sized location to put their expanded representations - do the
        # expansion
        i = j = k = m = 0
        for i in range(0, self.intime_size):
            # Convert hours since 01/01/1900 to unix time
            tk = UT.hours_since_20th_century_to_unix_time(intime[i])
            for j in range(0, self.inlat_size):
                lat_tk = inlatitude[j]
                for k in range(0, self.inlon_size):
                    lon_tk = inlongitude[k]

                    new_var_time[m] = tk
                    new_var_lat[m] = lat_tk
                    new_var_lon[m] = lon_tk

                    # Show leading edge of expanded colocation params
                    #if(m < 242):
                    #print m, ": ", tk, ", ", lat_tk, ", ", lon_tk

                    m += 1

        self.time = new_var_time
        self.latitude = new_var_lat
        ### convert to (-180,180) so it's consistent with CloudSat
        ### self.longitude  = new_var_lon
        self.longitude = N.where(new_var_lon > 180.0, new_var_lon - 360.0,
                                 new_var_lon)

        # End of colocation parameter expansion for regular grids

        #print 'new time size = ', N.size(self.time)
        #print 'new latitude size = ', N.size(self.latitude)
        #print 'new longitude size =', N.size(self.longitude)

        for i in range(len(variableNames)):
            if (variableNames[i] != 'time' and variableNames[i] != 'latitude'
                    and variableNames[i] != 'longitude'):
                local_var = ncfile.variables[variableNames[i]]
                # get data of the variable
                local_data = N.array(local_var.getValue())
                # get attributes of the variable
                attList = dir(local_var)
                attribute = {}
                for j in attList:
                    if (j != 'assignValue' and j != 'getValue'
                            and j != 'typecode'):
                        attValue = getattr(local_var, j)
                        attribute[j] = attValue
                        #print j, attValue

                #print variableNames[i], "attribute = ", attribute

                # collect indices of data with missing values or filled value
                N1 = local_var.shape[0]
                N2 = local_var.shape[1]
                N3 = local_var.shape[2]
                #print 'n1,n2,n3 = ', N1, N2, N3

                #print 'local data dimension', local_data.shape

                if (attribute['_FillValue'] != attribute['missing_value']):
                    print "fill value differ from missing value"
                    print attribute['_FillValue']
                    print attribute['missing_value']
                    print attribute['_FillValue'].shape

                # find all the indices of the local data whose values are invalid/missing
                missing_value1 = N.where(local_data == attribute['_FillValue'])
                missing_value2 = N.where(
                    local_data == attribute['missing_value'])
                print 'missing_value1=', missing_value1
                # update data with scale_factor and add_offset
                local_data = local_data * attribute[
                    'scale_factor'] + attribute['add_offset']

                # update missing data value
                local_data[missing_value1] = UT.NAN
                local_data[missing_value2] = UT.NAN

                # remove unnecessary attributes since we already applied scale_factor, add_offset, missing_value, and fillvalue.
                del attribute['_FillValue']
                del attribute['scale_factor']
                del attribute['add_offset']
                del attribute['missing_value']

                # Now we reshaped have to reshape to 1D for the middle end
                oneD_data = N.reshape(
                    local_data, local_data.shape[0] * local_data.shape[1] *
                    local_data.shape[2])

                # store (attribute, data) in the data dictionary
                self.data[variableNames[i]] = (attribute, oneD_data)

                #print "attribute         = ", attribute
                #print "data              = ", oneD_data

                # Dynamically spot check our flattening of the 3D array such that is is
                # straight forwardly indexed into using time, lat and lon fields (after
                # expansion).
                if 0:
                    testx = 8  # Valid in range 0 to size of time in original data
                    testy = 42  # Valid in range 0 to size of lons in original data
                    testz = 56  # Valid in range 0 to size of lats in original data
                    if (local_data[testx][testy][testz] !=
                            oneD_data[testx * 240 * 121 + testy * 240 +
                                      testz]):
                        print 'Flattening of 3D array failed'
                        print 'sample point data3d[x][y][z] = ', local_data[
                            testx][testy][testz]
                        print 'sample point data1d[x*240*121 + y*240 + z] = ', oneD_data[
                            testx * 240 * 121 + testy * 240 + testz]
                        sys.exit(-1)

        ncfile.close()
Exemplo n.º 45
0
#Run the script to get the data
#execfile('open_L2_C6_MODIS_run.py')

from open_L2_C6_MODIS_file_func import *

#Jesus' MODIS file for SO
#path='/group_workspaces/jasmin/asci/dgrosv/MODIS/Jesus/'
path = '/nfs/a201/eejvt/CASIM/SO_KALLI/SATELLITE/modis/'
file_hdf = 'MYD06_L2.A2014343.1325.006.2014344210847.hdf'

#Get the data
MODL2_C6_outputs = open_modis_L2(path, file_hdf)

#Will just write out N37
Nd_37 = MODL2_C6_outputs.get('N37')

nx = Nd_37.shape[0]
ny = Nd_37.shape[1]

#write the file
ncfile = Dataset(path + 'Nd_' + file_hdf + '.nc', 'w')
ncfile.createDimension('x', nx)
ncfile.createDimension('y', ny)

data = ncfile.createVariable('CDNC_37_MODIS',
                             np.dtype('float64').char, ('x', 'y'))
data[:] = Nd_37
ncfile.close()

#%%
class TrajectoryInspector:

    def __init__(self, filename):
        self.filename = filename
        self.file = NetCDFFile(self.filename, 'r')
        try:
            self.block_size = self.file.dimensions['minor_step_number']
        except KeyError:
            self.block_size = 1
        self._countSteps()

    def close(self):
        self.file.close()

    def reopen(self):
        self.file.close()
        self.file = NetCDFFile(self.filename, 'r')
        self._countSteps()

    def _countSteps(self):
        if self.block_size == 1:
            self.nsteps = self.file.variables['step'].shape[0]
        else:
            blocks = self.file.variables['step'].shape[0]
            last_block = self.file.variables['step'][blocks-1]
            unused = Numeric.sum(Numeric.equal(last_block, -2147483647))
            self.nsteps = blocks*self.block_size-unused

    def comment(self):
        try:
            return self.file.comment
        except AttributeError:
            return ''

    def history(self):
        try:
            return self.file.history
        except AttributeError:
            return ''

    def description(self):
        return self.file.variables['description'][:].tostring()

    def numberOfAtoms(self):
        return self.file.dimensions['atom_number']

    def numberOfSteps(self):
        return self.nsteps

    def variableNames(self):
        return self.file.variables.keys()

    def readScalarVariable(self, name, first=0, last=None, step=1):
        if last is None:
            last = self.nsteps
        variable = self.file.variables[name]
        if self.block_size > 1:
            variable = Numeric.ravel(variable[:, :])
        return variable[first:last:step]

    def readConfiguration(self, index):
        if self.block_size == 1:
            try:
                cell = self.file.variables['box_size'][index]
            except KeyError:
                cell = None
            return cell, self.file.variables['configuration'][index]
        else:
            i1 = index / self.block_size
            i2 = index % self.block_size
            try:
                cell = self.file.variables['box_size'][i1, :, i2]
            except KeyError:
                cell = None
            return cell, self.file.variables['configuration'][i1, :, :, i2]
Exemplo n.º 47
0
def nc2asc(ncfilename,
           subdataset,
           projection=None,
           verbose=False):
    """Extract given subdataset from ncfile name and create one ASCII file for each band.

    This function is reading the NetCDF file using the Python Library Scientific.IO.NetCDF

    Time is assumed to be in whole hours.
    """


    basename, _ = os.path.splitext(ncfilename) # Get rid of .nc
    basename, _ = os.path.splitext(basename)   # Get rid of .res

    if verbose:
        print 'Converting layer %s in file %s to ASCII files' % (subdataset,
                                                                 ncfilename)


    infile = NetCDFFile(ncfilename)

    layers = infile.variables.keys()




    msg = 'Subdataset %s was not found in file %s. Options are %s.' % (subdataset, ncfilename, layers)
    assert subdataset in layers, msg


    A = infile.variables[subdataset].getValue()
    msg = 'Data must have 3 dimensions: Time, X and Y. I got shape: %s' % str(A.shape)
    assert len(A.shape) == 3, msg


    if 'time' in infile.variables:
        units = infile.variables['time'].units
        msg = 'Time units must be "h". I got %s' % units
        assert units == 'h', msg

        times = infile.variables['time'].getValue()
        assert A.shape[0] == len(times)

    cols = infile.dimensions['x']
    rows = infile.dimensions['y']

    assert A.shape[1] == rows
    assert A.shape[2] == cols

    # Header information
    xmin = float(infile.XMIN)
    xmax = float(infile.XMAX)
    ymin = float(infile.YMIN)
    ymax = float(infile.YMAX)

    # Check that cells are square
    cellsize = (xmax-xmin)/cols
    assert numpy.allclose(cellsize, (ymax-ymin)/rows)

    header = 'ncols %i\n' % cols
    header += 'nrows %i\n' % rows
    header += 'xllcorner %.1f\n' % xmin
    header += 'yllcorner %.1f\n' % ymin
    header += 'cellsize %.1f\n' % cellsize
    header += 'NODATA_value -9999\n'

    if 'time' in infile.variables:
        # Loop through time slices and name files by hour.
        for k, t in enumerate(times):
            hour = str(int(t)).zfill(2) + 'h'

            asciifilename = basename + '.' + hour + '.' + subdataset.lower() + '.asc'
            _write_ascii(header, A[k,:,:], asciifilename, projection)
    else:
        # Write the one ASCII file
        asciifilename = basename + '.' + subdataset.lower() + '.asc'
        _write_ascii(header, A[0,:,:], asciifilename, projection)


    infile.close()
Exemplo n.º 48
0
def AsapFileToTrajectory(oldfile, newfile, firstframe=None, lastframe=None):
    # Check if input file is a filename or a NetCDF file
    if isinstance(oldfile, types.StringTypes):
        oldfile = NetCDFFile(oldfile)

    pos = oldfile.variables['cartesianPositions']  # Must be present
    (nframes, natoms, three) = pos.shape
    print natoms, three, nframes
    firstframe = normalize(firstframe, nframes, 0)
    lastframe = normalize(lastframe, nframes, -1)
    if lastframe < firstframe:
        raise ValueError, "No frames to copy, giving up."

    print "Preparing to copy frames", firstframe, "to", lastframe
    # Now open the output file, and define the variables.
    if isinstance(newfile, types.StringTypes):
        newfile = NetCDFFile(newfile, "w")
    oncevars = []
    manyvars = []
    for v in oldfile.variables.keys():
        try:
            newname = old_names[v]
        except KeyError:
            print "WARNING: Skipping data named", v
            continue
        if new_names[newname][2]:
            shape = new_names[newname][0]
            oncevars.append((v, newname))
        else:
            shape = ("unlim", ) + new_names[newname][0]
            manyvars.append((v, newname))
        shape2 = []
        for d in shape:
            if isinstance(d, types.IntType):
                n = d
                d = str(d)
            elif d == 'natoms':
                n = natoms
            elif d == 'unlim':
                n = None
            else:
                raise RuntimeError, "Unknown dimension " + str(d)
            if not newfile.dimensions.has_key(d):
                newfile.createDimension(d, n)
            shape2.append(d)
        print v, "-->", newname, " shape", shape2
        var = newfile.createVariable(newname, oldfile.variables[v].typecode(),
                                     tuple(shape2))
        var.once = new_names[newname][2]
        var.units = new_names[newname][3]

    # Now copy the data
    print "Copying global data"
    newfile.history = 'ASE trajectory'
    newfile.version = '0.1'
    newfile.lengthunit = 'Ang'
    newfile.energyunit = 'eV'
    for oldname, newname in oncevars:
        newfile.variables[newname][:] = oldfile.variables[oldname][:]

    for n in range(firstframe, lastframe + 1):
        print "Copying frame", n
        for oldname, newname in manyvars:
            newfile.variables[newname][n] = oldfile.variables[oldname][n]
    newfile.close()
Exemplo n.º 49
0
#!/usr/bin/env python

import numpy as np
from Scientific.IO.NetCDF import NetCDFFile

tdata = np.loadtxt('az_training.txt', delimiter=',')
mags = np.loadtxt('m.txt')
dists = np.loadtxt('r.txt')

fout = 'az_training.nc'

nid = NetCDFFile(fout, 'w')
nid.createDimension('time', 1)
nid.createDimension('traces', tdata.shape[0])
nid.createDimension('filter', tdata.shape[1])

t = nid.createVariable('time', np.dtype(float).char, ('time', ))
t.units = 's'
f = nid.createVariable('filter', np.dtype(float).char, ('filter', ))
f.units = 'Hz'
m = nid.createVariable('magnitude', np.dtype(float).char, ('traces', ))
m[:] = mags
ed = nid.createVariable('epicdist', np.dtype(float).char, ('traces', ))
ed.units = 'km'
ed[:] = dists
z = nid.createVariable('z', np.dtype(float).char, ('time', 'traces', 'filter'))
z[0, :, :] = np.log10(tdata)
h = nid.createVariable('h', np.dtype(float).char, ('time', 'traces', 'filter'))
h[0, :, :] = np.log10(tdata)
nid.close()
Exemplo n.º 50
0
    # Setup the directory for the first run
    os.system('mkdir ' + dirname)
    os.system('cp ' + modelfiles + ' ' + dirname)
    os.chdir(dirname)
    if options.run:
        os.system('ln -s ../land_ice_model.exe .')
    print 'Working in ' + os.getcwd()

    ########### Calculate a guess for beta ########
    if i == 0:
        print 'iteration 0 is an initial guess!  Using beta=1e8 as the initial guess.'
        # Make an initial guess for beta
        fi = NetCDFFile(infile, 'r+')
        fi.variables['betaTimeSeries'][:] = 1.0e8
        fi.close()

    else:
        ########## Get Taub ###############
        # Read the previous input, output files
        fo_old = NetCDFFile('../iter{0:3d}'.format(i - 1) + '/' + outfile, 'r')
        fi_old = NetCDFFile('../iter{0:3d}'.format(i - 1) + '/' + infile, 'r')
        # Get ub and beta
        beta_old = fi_old.variables[
            'betaTimeSeries'][:,
                              0]  # just use the first time level from the input file since beta isn't in the output  (the time level chosen shouldn't matter)
        #cellMask=fi.variables['cellMask'][0,:]  # need to treat floating ice separately?  i guess not since it gets handled independently...
        # use the reconstructed velocities because beta is on cell centers
        ux = fo_old.variables['uReconstructX'][
            0, :, :]  # use the first time level
        uy = fo_old.variables['uReconstructY'][
Exemplo n.º 51
0
class radarout(object):
    def __init__(self, xi, yi, filename, radius):
        self.filename = filename
        self.x0 = xi
        self.y0 = yi
        self.radius = radius  # Search radius - may eventually limit search to within wedges

    def setup(self):
        self.ncfile = NetCDFFile(self.filename)
        self.reflectivity = self.ncfile.variables['Reflectivity'].getValue(
        )[:, :360, :]

    def coordinates(self):
        # Raw inputs
        self.azimuth = self.ncfile.variables['azimuthR'].getValue()[:, :360]
        self.r = self.ncfile.variables['distanceR'].getValue()
        # We will reorder everything to this set of azimuths
        self.theta = np.arange(
            0.5, 360.)  # We will shift all of the reflectivities to match this
        self.thetarad = self.theta * np.pi / 180.
        # Grid - get x,y at every r,theta
        self.thetarad2d, trash = np.meshgrid(self.thetarad,
                                             np.ones(len(self.r)))
        self.thetarad2d = self.thetarad2d.transpose()
        self.y = self.r * np.cos(self.thetarad2d)
        self.x = self.r * np.sin(self.thetarad2d)

    def makeComposite(self):
        # Now do all angles and combine
        # They try to hit the half-degree, so start by assuming that they all stack
        theta_unsorted = np.round(self.azimuth * 2) / 2.
        # Sort and stack - checked this and I transformed everything correctly
        self.composite = np.zeros(self.reflectivity[0, :, :].shape)
        for i in range(self.reflectivity.shape[0]
                       ):  #3,4):# Temporarily have it just look at one elev
            index0_5 = (theta_unsorted[i, :] == .5).nonzero()[0][0]
            ref = np.zeros(self.reflectivity[0, :, :].shape)
            ref[:len(self.theta) -
                index0_5, :] = self.reflectivity[i, index0_5:, :]  # Beginning
            ref[len(self.theta) -
                index0_5:, :] = self.reflectivity[i, :index0_5, :]  # End
            self.composite += ref

    def findNearestPoints(self):
        # Find cell that is closest to the weather radar point
        # Radius in meters but dist in km
        dist = np.sqrt((self.x - self.x0)**2 + (self.y - self.y0)**2)
        self.points_selected = dist < self.radius

    def reflectivityAtLoc(self):
        refAtLoc = self.composite[self.points_selected]
        # Doesn't quite work - probably because precip and clear air (?) modes not comparable
        self.meanCompRef = np.mean(refAtLoc) / self.reflectivity.shape[
            0]  # Mean in x,y, then divide by shape for mean in z
        return self.meanCompRef

    def close(self):
        # Forgot this earlier: was crashing with too many files open
        self.ncfile.close()

    def run(self):
        try:
            self.setup()
            self.coordinates()
            self.makeComposite()
            self.findNearestPoints()
            ralv = self.reflectivityAtLoc()
            self.close()
            return ralv
        except:
            print "     Maybe problem in input file? Printing variable list. If empty, problem!"
            print self.ncfile.variables
            return 'error'

    def plot(self):
        # Local variables only here
        thetarad2d, trash = np.meshgrid(self.thetarad, np.ones(len(self.r)))
        thetarad2d = thetarad2d.transpose()
        x = self.r * np.cos(thetarad2d) / 1000.
        y = self.r * np.sin(thetarad2d) / 1000.
        # Sample Z-R relationship
        rain = (composite / 300) * (1. / 1.4)
        # Plot
        figure(1)
        contourf(x, y, rain, 50, colors=None, cmap=None)
        show()
Exemplo n.º 52
0
class Reader:
    """Reads configurations from a NetCDF file (ASAP 1.x format).

Read configuration number "N" from a NetCDF file:    
>>> atoms = Reader("stuff.nc").Read(N)

Read last configuration from a NetCDF file:
>>> atoms = Reader("stuff.nc").Read()

Read several configurations:
>>> r = Reader("things,.nc")
>>> atoms1 = r.Read(0)
>>> atoms2 = r.Read(1)
"""
    def __init__(self, filename, whatToRead=None):
        """Construct a reader from a filename."""
        self.whatToRead = whatToRead
        try:
            self.nc = NetCDFFile(filename, 'r')
        except IOError:
            self.nc = NetCDFFile(filename + ".nc", 'r')

    def __del__(self):
        try:
            self.nc.close()
        except IOError:
            pass

    def Close(self):
        """Close the associated NetCDF file."""
        self.nc.close()

    def GetNetCDFFile(self):
        """Get the associated NetCDF file handle."""
        return self.nc

    def Read(self, frame=-1):
        """Reads a frame, returning a ListOfAtoms.

        Per default it reads the last frame, but other frames can be
        specified.
        """
        vars = self.nc.variables
        positions = vars["cartesianPositions"][frame]
        cell = vars["basisVectors"][frame]
        peri = vars["periodic"][:]
        atoms = _Asap.ListOfAtoms(positions=positions,
                                  cell=cell,
                                  periodic=peri)
        if self.whatToRead is None:
            self.whatToRead = []
            for stuff in ["CartesianMomenta", "Classes", "AtomicNumbers"]:
                if vars.has_key(stuff[0].lower() + stuff[1:]):
                    self.whatToRead.append(stuff)
        for stuff in self.whatToRead:
            if stuff == "CartesianMomenta":
                atoms.SetCartesianMomenta(
                    vars["cartesianMomenta"][frame].astype("d"))
            elif stuff == "Classes":
                atoms.SetTags(vars["classes"][frame])
            elif stuff == "AtomicNumbers":
                if len(vars["atomicNumbers"]) == 1:
                    atoms.SetAtomicNumbers(vars["atomicNumbers"][0] +
                                           Numeric.zeros(len(atoms)))
                else:
                    atoms.SetAtomicNumbers(vars["atomicNumbers"][:])
            else:
                raise RuntimeError, "Don't know how to read " + stuff
        return atoms
else:
    # halfar dome
    thickness_field[r < r0] = h0 * (1.0 -
                                    (r[r < r0] / r0)**(4.0 / 3.0))**(3.0 / 7.0)
thickness[0, :] = thickness_field

# zero velocity everywhere
normalVelocity[:] = 0.0
# flat bed at sea level
bedTopography[:] = 0.0
# constant, arbitrary temperature, degrees C
temperature[:] = 273.15
# Setup layerThicknessFractions
layerThicknessFractions[:] = 1.0 / nVertLevels

# boundary conditions
# Sample values to use, or comment these out for them to be 0.
SMB[:] = 0.0
#beta[:] = 50000.
#SMB[:] = 2.0/1000.0 * (thickness[:] + bedTopography[:]) - 1.0  # units: m/yr, lapse rate of 1 m/yr with 0 at 500 m
# Convert from units of m/yr to kg/m2/s using an assumed ice density
SMB[:] = SMB[:] * 910.0 / (3600.0 * 24.0 * 365.0)

#Tsfc[:,0] = -5.0/1000.0 * (thickness[0,:] + bedTopography[0,:]) # lapse rate of 5 deg / km
#G = 0.01
#BMB[:] = -20.0  # units: m/yr

gridfile.close()

print 'Successfully added dome initial conditions to: ', options.filename
Exemplo n.º 54
0
    def read_data(self):

        # open a new netCDF file for reading.
        ncfile = NetCDFFile(self.file,'r')
        
        dimNames = ncfile.dimensions.keys()
        variableNames = ncfile.variables.keys()

        #print dimNames
        #print variableNames
        
        # Get the geolocation data
        intime         = ncfile.variables['time']
        inlatitude     = ncfile.variables['latitude']
        inlongitude    = ncfile.variables['longitude']

        print 'time = ', N.array(intime)
                 
        self.intime_size = N.size(intime)    
        self.inlat_size  = N.size(inlatitude)
        self.inlon_size  = N.size(inlongitude)
 
        # print 'original time size = ', self.intime_size
        # print 'original latitude size = ', self.inlat_size
        # print 'original longitude size =', self.inlon_size
        
        # Here we have to 'expand out' lat, lon and time so that the
        # middle end is permitted to function as it does for irregular
        # grids.  This involves quite a bit of repititive values and
        # increased memory consumption.
        
        new_var_time = N.array([])
        new_var_lat  = N.array([])
        new_var_lon  = N.array([])
        
        coloc_param_size = self.intime_size * self.inlat_size * self.inlon_size
        
        new_var_time.resize(coloc_param_size)
        new_var_lat.resize(coloc_param_size)
        new_var_lon.resize(coloc_param_size)
        
        #print 'new time size = ', new_time_size
        #print 'new latitude size = ', new_lat_size
        #print 'new longitude size =', new_lon_size
                                
        # Now that we have both the original representation of the geolocation params
        # and a properly-sized location to put their expanded representations - do the
        # expansion
        i = j = k = m = 0
        for i in range(0, self.intime_size):
            # Convert hours since 01/01/1900 to unix time
            tk = UT.hours_since_20th_century_to_unix_time(intime[i])
            for j in range(0, self.inlat_size):
                lat_tk = inlatitude[j]
                for k in range(0, self.inlon_size):
                    lon_tk = inlongitude[k]
                    
                    new_var_time[m] = tk
                    new_var_lat[m]  = lat_tk
                    new_var_lon[m]  = lon_tk
                    
                    # Show leading edge of expanded colocation params
                    #if(m < 242):
                        #print m, ": ", tk, ", ", lat_tk, ", ", lon_tk

                    m += 1
        
        self.time       = new_var_time
        self.latitude   = new_var_lat
	### convert to (-180,180) so it's consistent with CloudSat
	### self.longitude  = new_var_lon
	self.longitude = N.where(new_var_lon > 180.0, new_var_lon - 360.0, new_var_lon)
        
        # End of colocation parameter expansion for regular grids
        
        level_name = ['levelist']
        for i in range(len(level_name)):
            local_level     = ncfile.variables[level_name[i]]
            level_data      = local_level.getValue()
            attList         = dir(local_level)
            attribute       = {}
            for j in attList:
                if(j!='assignValue' and j!='getValue' and j!='typecode'):
                    attValue     = getattr(local_level, j)
                    attribute[j] = attValue
            # add dimension1 name in the attribute list
            attribute['dimension1']='pressure_level' 
            # Store (attribute, data) in the level dictionary
            self.levels[level_name[i]]=(attribute, level_data)

        #print 'new time size = ', N.size(self.time)
        #print 'new latitude size = ', N.size(self.latitude)
        #print 'new longitude size =', N.size(self.longitude)        

        for i in range(len(variableNames)):
          if(variableNames[i]!='time'
             and variableNames[i]!='latitude'
             and variableNames[i]!='longitude'
             and variableNames[i]!='levelist'):
              local_var = ncfile.variables[variableNames[i]]  
              # get data of the variable
              local_data = N.array(local_var.getValue())
              # get attributes of the variable
              attList = dir(local_var)
              attribute = {}
              for j in attList:
                  if(j!='assignValue' and j!='getValue' and j!='typecode'):
                    attValue = getattr(local_var, j)
                    attribute[j]=attValue
                    #print j, attValue
                   
              #print variableNames[i], "attribute = ", attribute

              # collect indices of data with missing values or filled value
              N1 =  local_var.shape[0]
              N2 =  local_var.shape[1]
              N3 =  local_var.shape[2]
              N4 =  local_var.shape[3]
              #print 'n1,n2,n3.n4 = ', N1, N2, N3, N4

              #print 'local data dimension', local_data.shape
            
              if(attribute['_FillValue']!= attribute['missing_value']):
                  print "fill value differ from missing value"
                  print attribute['_FillValue']
                  print attribute['missing_value']
                  print attribute['_FillValue'].shape
          
              # find all the indices of the local data whose values are invalid/missing 
              missing_value1 = N.where(local_data == attribute['_FillValue']) 
              missing_value2 = N.where(local_data == attribute['missing_value']) 

              # update data with scale_factor and add_offset
              local_data = local_data*attribute['scale_factor']+attribute['add_offset'] 
              
              # update missing data value
              local_data[missing_value1] = UT.NAN
              local_data[missing_value2] = UT.NAN

              # remove unnecessary attributes since we already applied scale_factor, add_offset, missing_value, and fillvalue. 	
              del attribute['_FillValue']
              del attribute['scale_factor']
              del attribute['add_offset']
              del attribute['missing_value']
             

              # add attribute for dimension name
              attribute['dimension1']='pressure_level'
 
              # Now we have to swap the axis of array to make the p-level to be the last axis 
              local_data = N.swapaxes(local_data, 1, 2) # swap the axes of p-level and latitude 
              local_data = N.swapaxes(local_data, 2, 3) # swap the axes of p-level and longitude 
              # Now the array has the axis in this order (time, lat, lon, p-level) 
              # Now we have to reshape the four-dimensional array to 2-dimensional array for middle end
              print local_data.shape
              twoD_data = N.reshape(local_data, (local_data.shape[0]*local_data.shape[1]*local_data.shape[2],local_data.shape[3]))

              # store (attribute, data) in the data dictionary
              self.data[variableNames[i]]=(attribute, twoD_data)
              
              #print "attribute         = ", attribute
              #print "data              = ", twoD_data
	    
        ncfile.close()
Exemplo n.º 55
0
def  doSetNoDataInSeries(infile, nodata, variable, outfile):
    fileH = NetCDFFile(infile, mode="r")

    if fileH is None:
        exitMessage('Could not open file {0}. Exit(1).'.format(infile), 1)

    data = fileH.variables[variable][:]
    if len(data.shape)!=3:
        exitMessage('3d data needed for {0}. Exit(2).'.format(variable), 2)
    # if nodata are found on the first image, return
    wnodata = (data[0,:,:] == nodata)
    if wnodata.any():
        print 'No data already set. Return(0)'
        return(0)

    common = numpy.ravel(numpy.ones((data.shape[1], data.shape[2]), dtype=numpy.bool))

    for iband in range(1, data.shape[0]):
        wnequal = data[iband-1,:,:].ravel() == data[iband,:,:].ravel()
        if wnequal.any():
            common[wnequal]=False

        gdal.TermProgress_nocb(iband/float(data.shape[0]))
    common = numpy.reshape(common, (data.shape[1], data.shape[2]))
    data[common] = nodata

    # save result
    outfile = NetCDFFile(outfile, mode='w')
    # build a list of variables without the processed variable
    listOfVariables = list( itertools.ifilter( lamdba x:x!=variable , fileH.variables.keys() ) )
    for ivar in listOfVariables:
        
    
    varToWrite = fileH.createVariable('new_{0}'.format(variable), 'f', fileH.variables[variable].dimensions )
    varToWrite[:] = data

    fileH.close()

    gdal.TermProgress_nocb(1)

## Assume NETCDF
def doSetNoDataInSeriesOld(infile, nodata, outfile, outformat, options):
    fileH = gdal.Open(infile, GA_ReadOnly)
    if fileH is None:
        exitMessage('Could not open file {0}. Exit(1).'.format(infile), 1)
    
    # does not data exist?
    data = numpy.ravel( fileH.GetRasterBand(1).ReadAsArray())
    wnodata = (data==nodata)
    if wnodata.any():
        print 'No data already set. Return(0)'
        return(0)

    common = numpy.ones(data.shape)
    for iband in range(1, fileH.RasterCount):
        newdata = numpy.ravel(fileH.GetRasterBand(iband + 1).ReadAsArray())
        wnequal = data!=newdata
        common[wnequal] = 0
        gdal.TermProgress_nocb( (iband+1)/float( 2*fileH.RasterCount ) )

    # is there any constant time series?
    if common.any():
        outDrv = gdal.GetDriverByName(outformat)
        outDS = outDrv.Create(outfile, fileH.RasterXSize, fileH.RasterYSize, fileH.RasterCount, fileH.GetRasterBand(1).GetRasterDataType, options)
        outDS.SetProjection( fileH.GetProjection() )
        outDS.SetGeoTransform( fileH.GetGeoTransform() )
        #then set these time series to nodata
        for iband in range(fileH.RasterCount):
            data = numpy.ravel(fileH.GetRasterBand(iband + 1).ReadAsArray(0, 0, fileH.RasterXSize, fileH.RasterYSize))
            data[common] = nodata
            outDS.GetRasterBand( iband + 1 ).WriteArray( data.reshape(fileH.RasterYSize, fileH.RasterXSize), 0, 0)
            gdal.TermProgress_nocb( (iband+1+fileH.RasterCount) / float( 2*fileH.RasterCount ) )

    gdal.TermProgress_nocb(1)

##
if __name__=="__main__":

    infile = None
    variable = None
    nodata = 1.e20
    outfile = None
    outformat='hfa'
    options=[]
    
    ii = 1
    while ii < len(sys.argv):
        arg = sys.argv[ii]
    
        if arg == '-v':
            ii = ii + 1
            variable = sys.argv[ii]
        elif arg=='-o':
            ii = ii +1
            outfile = sys.argv[ii]
        elif arg == '-nodata':
            ii = ii + 1
            nodata = float(sys.argv[ii])
            
        else:
            infile=sys.argv[ii]
        ii = ii + 1

        
    if infile is None:
        exitMessage('Input file not defined. Exit(10).', 10)

    if variable is None:
        exitMessage('netcdf variable not defined. Exit(11).', 11)

    if outfile is None:
        exitMessage('Missing an output file name. Exit(12).', 12)

    doSetNoDataInSeries(infile, nodata, variable, outfile)
Exemplo n.º 56
0
class ViewEffectiveModeDialog(PortableToplevel):
    """Sets up a dialog used to visualize the effective modes resulting from a QHA analysis.
    """

    def __init__(self, parent, title = None):
        """The constructor.
        
        @param parent: the parent widget.
        
        @param title: a string specifying the title of the dialog.
        @type title: string
        """

        PortableToplevel.__init__(self, parent)        
        self.transient(parent)
        
        if title:
            self.title(title)

        self.parent = parent       
                        
        body = Frame(self)
        self.initial_focus = self.body(body)
        body.grid(row = 0, column = 0, sticky = EW)  

        self.buttonbox()        
        
        self.grab_set()

        if not self.initial_focus:
            self.initial_focus = self

        self.protocol("WM_DELETE_WINDOW", self.cancel)

        self.resizable(width = NO, height = NO)

        self.geometry("+%d+%d" % (parent.winfo_rootx()+50, parent.winfo_rooty()+50))

        self.initial_focus.focus_set()

        self.wait_window(self)            

    def body(self, master):
        """
        Create dialog body. Return widget that should have initial focus.
        """

        settingsFrame = LabelFrame(master, text = 'Settings', bd = 2, relief = GROOVE)
        settingsFrame.grid(row = 0, column = 0, sticky = EW, padx = 3, pady = 3)
        settingsFrame.grid_columnconfigure(0, weight = 1)

        # The combo widget for the file browser.
        self.fileBrowser = ComboFileBrowser(settingsFrame,\
                                            frameLabel = "QHA input file",\
                                            tagName = 'view_effective_modes_qha_input_file',\
                                            contents = '',\
                                            save = False,\
                                            command = self.openNetCDFFile,\
                                            filetypes = [("NetCDF file", ".nc"),])
        self.fileBrowser.grid(row = 0, column = 0, sticky = EW, padx = 2, pady = 2)
        self.fileBrowser.grid_columnconfigure(0, weight = 1)
        self.fileBrowser.entry.bind('<Return>', self.openNetCDFFile)

        # The combo listbox that will contain the X variables.
        self.selectedModeLb = ComboListbox(settingsFrame,\
                                           frameLabel = 'Quasi-Harmonic mode',\
                                           tagName = 'quasi_harmonic_mode',\
                                           contents = [])
        self.selectedModeLb.lb.config({'exportselection' : 0, 'width' : 22, 'height' : 8, 'selectmode' : MULTIPLE})
        self.selectedModeLb.grid(row = 1, column = 0, sticky = EW, padx = 2, pady = 2)
        self.selectedModeLb.grid_columnconfigure(0, weight = 1)
        
        # The combo widget to set the number of frames for the animation.
        self.nFramesEntry = ComboIntegerEntry(settingsFrame,\
                                              frameLabel = 'Number of frames',\
                                              tagName = 'view_effective_modes_number_of_frames')
        self.nFramesEntry.grid(row = 2, column = 0, sticky = EW, padx = 2, pady = 2)
        self.nFramesEntry.grid_columnconfigure(0, weight = 1)

        # The combo widget to set the amplitude of the effective mode to view.
        self.amplitudeEntry = ComboFloatEntry(settingsFrame,\
                                              frameLabel = 'Amplitude (in nm)',\
                                              tagName = 'view_effective_modes_amplitude')
        self.amplitudeEntry.grid(row = 3, column = 0, sticky = EW, padx = 2, pady = 2)
        self.amplitudeEntry.grid_columnconfigure(0, weight = 1)
        
        return None        

    def buttonbox(self):
        """
        Add standard button box.
        """

        # The frame that contains the 'Cancel' and 'OK' buttons.
        box = LabelFrame(self, text = 'Actions', bd = 2, relief = GROOVE)
        box.grid(row = 1, column = 0, sticky = EW, padx = 3, pady = 3)
        box.grid_columnconfigure(0, weight = 1)

        w = Button(box, text = "Cancel", width=10, command = self.cancel)
        w.grid(row = 0, column = 0, sticky = E)
        w = Button(box, text = "OK", width=10, command = self.ok, default=ACTIVE)
        w.grid(row = 0, column = 1, sticky = E)
        
        self.bind("<Return>", self.ok)
        self.bind("<Escape>", self.cancel)

    # Standard button semantics.
    def ok(self, event = None):

        if not self.validate():
            self.initial_focus.focus_set()
            return

        self.update_idletasks()

        self.apply()
        
    def cancel(self, event=None):

        # Put focus back to the parent window
        self.parent.focus_set()
        self.destroy()

    # Command hooks
    def validate(self):
        
        try:
                        
            if not self.selectedModeLb.lb.curselection():
                LogMessage('warning','Please select a vibration mode.',['gui'])
                raise

            self.selectedMode = [int(v) - 1 for v in self.selectedModeLb.lb.curselection()]

            self.amplitude = self.amplitudeEntry.getValue()
            self.nFrames = self.nFramesEntry.getValue()
            
            if self.amplitude <= 0.0:
                raise
            
            if self.nFrames < 0:
                raise
                        
        except:
            LogMessage('warning','Bad input. Please try again.',['gui'])
            return False
            
        return True
    
    def apply(self):

        try:
            if os.path.exists(PREFERENCES['vmd_path']):
                definePDBViewer('vmd', PREFERENCES['vmd_path'])
            
            else:
                raise
            
        except:
            raise Error('Error when defining the PDB viewer from %s path.' % PREFERENCES['vmd_path'])
            
        try:
        
            local = {}
            skeleton = eval(self.description, vars(Skeleton), local)
            universe = skeleton.make({}, self.avgStruct)
            universe.setCellParameters(self.cell)

            avg = Configuration(universe, self.avgStruct)
            pseudoTraj = [avg]
            
            for comp in range(self.nFrames):
                vibr = copy.copy(avg)
                for selMode in self.selectedMode:
                    dx = copy.copy(self.dx[selMode])
                    dx.shape = (universe.numberOfAtoms(), 3)
                    d = ParticleVector(universe, dx)
                    vibr += self.amplitude*Num.sin(2.0*Num.pi*float(comp)/self.nFrames)*d
                    
                pseudoTraj.append(vibr)

            viewSequenceVMD(universe, pseudoTraj, periodic = 1)
            
        except:
            raise Error('Error when animating the selected mode(s).')
        
    def openNetCDFFile(self, event = None):
        """
        This method open the NetCDF that contains the effective modes.
        Arguments:
            - event: Tkinter event.
        """

        # Case where the user enters a file name directly in the entry widget without using the browser.
        if event is not None:
            if event.widget == self.fileBrowser.entry:
                filename = self.fileBrowser.getValue()
            else:
                return
            
        else:
            # The name of the NetCDF file to load.
            filename = askopenfilename(parent = self,\
                                       filetypes = [('NetCDF file','*.nc')],\
                                       initialdir = PREFERENCES['trajfile_path'])

        # The file must exist.
        if filename:
            try:
                self.netcdf = NetCDFFile(filename, 'r')
                
            except IOError:
                LogMessage('warning','Problem when reading the NetCDF file.',['gui'])
                self.fileBrowser.setValue('')
                self.selectedModeEntry.setValue('')
                self.nFramesEntry.setValue('')
                self.amplitudeEntry.setValue('')
                
            try:
                self.description = self.netcdf.variables['description'][:].tostring()

                # The frequencies values.
                self.omega = self.netcdf.variables['omega'].getValue()
                
                # The displacements.
                self.dx = self.netcdf.variables['dx'].getValue()
                
                # The average structure.
                self.avgStruct = self.netcdf.variables['avgstruct'].getValue()
                                
            except KeyError:
                LogMessage('warning','The NetCDF file %s miss some QHA analysis keywords.' % filename,['gui'])
                
            self.fileBrowser.setValue(filename)

            self.selectedModeLb.lb.delete(0, END)

            for i in range(len(self.omega)):
                ome = self.omega[i]
                self.selectedModeLb.lb.insert(END, 'Mode %s (%s cm-1)' % (i+1, ome))

            self.nFramesEntry.setValue(10)
            self.amplitudeEntry.setValue(0.1)

            try:
                self.cell = self.netcdf.variables['box_size'][:]
            except KeyError:
                self.cell = None
                        
            self.netcdf.close()

        return 'break'
Exemplo n.º 57
0
vel[1][1:nx+2,1:ny+2] = nf.Cy

# dimension-independency logic contd.
advop_X = Adv(advop,psi,vel[0])
advop_Y = Adv(advop,psi_sw,vel_sw[1])

# integration loop
for t in range(1,nt+1):
    n = 0 # for human readibility :)

    # filling the halos
    advop_X.fill_halos(n)
    advop_Y.fill_halos(n)

    # advecting in each dimension
    psi[n+1][:] = psi[n]
    advop_X.advect(advop,n)
    advop_Y.advect(advop,n)

    # outputtting to the netCDF
    if t%no == 0:
        nf.variables['psi'][t/no,:] = (
          psi[n+1][advop.halo:nx+advop.halo, advop.halo:ny+advop.halo].astype('f')
        )

    # cycling the pointers
    psi[n][:] = psi[n+1]

# closing the netCDF file
nf.close()
                  dest="zero",
                  default=0,
                  help="zero value")
parser.add_option("-o",
                  "--one",
                  action="store",
                  type="float",
                  dest="one",
                  default=1,
                  help="one value")

#parse command line options
(options, args) = parser.parse_args()
print options
if (len(args) != 1):
    parser.error("incorrect number of arguments")
filename = args[0]
print 'filename', filename
infile = NetCDFFile(filename, 'a')
invar = infile.variables['inputs']
inputs = invar.getValue()
print inputs.shape
for i in range(len(inputs)):
    for j in range(len(inputs[0])):
        if inputs[i][j] > 0:
            inputs[i][j] = options.one
        else:
            inputs[i][j] = options.zero
invar.assignValue(inputs)
infile.close()
Exemplo n.º 59
0
class _ParNetCDFFile(ParBase):

    """
    Distributed netCDF file

    A ParNetCDFFile object acts as much as possible like a NetCDFFile object.
    Variables become ParNetCDFVariable objects, which behave like
    distributed sequences. Variables that use the dimension named by
    |split_dimension| are automatically distributed among the processors
    such that each treats only one slice of the whole file.
    """

    def __parinit__(self, pid, nprocs, filename, split_dimension,
                    mode = 'r', local_access = False):
        """
        @param filename: the name of the netCDF file
        @type filename: C{str}
        @param split_dimension: the name of the dimension along which the data
                                is distributed over the processors
        @type split_dimension: C{str}
        @param mode: read ('r'), write ('w'), or append ('a')
        @type mode: C{str}
        @param local_access: if C{False}, processor 0 is the only one to
                             access the file, all others communicate with
                             processor 0. If C{True} (only for reading), each
                             processor accesses the file directly. In the
                             latter case, the file must be accessible on all
                             processors under the same name. A third mode is
                             'auto', which uses some heuristics to decide
                             if the file is accessible everywhere: it checks
                             for existence of the file, then compares
                             the size on all processors, and finally verifies
                             that the same variables exist everywhere, with
                             identical names, types, and sizes.
        @type local_access: C{bool} or C{str}
        """
        if mode != 'r':
            local_access = 0
        self.pid = pid
        self.nprocs = nprocs
        self.filename = filename
        self.split = split_dimension
        self.local_access = local_access
        self.read_only = mode == 'r'
        if local_access or pid == 0:
            self.file = NetCDFFile(filename, mode)
            try:
                length = self.file.dimensions[split_dimension]
                if length is None:
                    length = -1
            except KeyError:
                length = None
            variables = {}
            for name, var in self.file.variables.items():
                variables[name] = (name, var.dimensions)
                if length < 0 and split_dimension in var.dimensions:
                    index = list(var.dimensions).index(split_dimension)
                    length = var.shape[index]
        else:
            self.file = None
            self.split = split_dimension
            length = None
            variables = None
        if not local_access:
            length = self.broadcast(length)
            variables = self.broadcast(variables)
        if length is not None:
            self._divideData(length)
        self.variables = {}
        for name, var in variables.items():
            self.variables[name] = _ParNetCDFVariable(self, var[0], var[1],
                                                      split_dimension)

    def __repr__(self):
        return repr(self.filename)

    def close(self):
        if self.local_access or self.pid == 0:
            self.file.close()

    def createDimension(self, name, length):
        if name == self.split:
            if length is None:
                raise ValueError("Split dimension cannot be unlimited")
            self._divideData(length)
        if self.pid == 0:
            self.file.createDimension(name, length)

    def createVariable(self, name, typecode, dimensions):
        if self.pid == 0:
            var = self.file.createVariable(name, typecode, dimensions)
            dim = var.dimensions
        else:
            dim = 0
        name, dim = self.broadcast((name, dim))
        self.variables[name] = _ParNetCDFVariable(self, name, dim, self.split)
        return self.variables[name]

    def _divideData(self, length):
        chunk = (length+self.nprocs-1)/self.nprocs
        self.first = min(self.pid*chunk, length)
        self.last = min(self.first+chunk, length)
        if (not self.local_access) and self.pid == 0:
            self.parts = []
            for pid in range(self.nprocs):
                first = pid*chunk
                last = min(first+chunk, length)
                self.parts.append((first, last))

    def sync(self):
        if self.pid == 0:
            self.file.sync()
    flush = sync
Exemplo n.º 60
0
class ETSFWriter:
    def __init__(self, filename):
        from Scientific.IO.NetCDF import NetCDFFile
        self.nc = NetCDFFile(filename, 'w')

        self.nc.file_format = 'ETSF Nanoquanta'
        self.nc.file_format_version = np.array([3.3], dtype=np.float32)
        self.nc.Conventions = 'http://www.etsf.eu/fileformats/'
        self.nc.history = 'File generated by ASE'

    def write_atoms(self, atoms):
        specie_a = np.empty(len(atoms), np.int32)
        nspecies = 0
        species = {}
        numbers = []
        for a, Z in enumerate(atoms.get_atomic_numbers()):
            if Z not in species:
                species[Z] = nspecies
                nspecies += 1
                numbers.append(Z)
            specie_a[a] = species[Z]
            
        dimensions = [
            ('character_string_length', 80),
            ('number_of_atoms', len(atoms)),
            ('number_of_atom_species', nspecies),
            ('number_of_cartesian_directions', 3),
            ('number_of_reduced_dimensions', 3),
            ('number_of_vectors', 3)]

        for name, size in dimensions:
            self.nc.createDimension(name, size)

        var = self.add_variable
        
        var('primitive_vectors',
            ('number_of_vectors', 'number_of_cartesian_directions'),
            atoms.cell / Bohr, units='atomic units')
        var('atom_species', ('number_of_atoms',), specie_a + 1)
        var('reduced_atom_positions',
            ('number_of_atoms', 'number_of_reduced_dimensions'),
            atoms.get_scaled_positions())
        var('atomic_numbers', ('number_of_atom_species',),
            np.array(numbers, dtype=float))

    def close(self):
        self.nc.close()
    
    def add_variable(self, name, dims, data=None, **kwargs):
        if data is None:
            char = 'd'
        else:
            if isinstance(data, np.ndarray):
                char = data.dtype.char
            elif isinstance(data, float):
                char = 'd'
            elif isinstance(data, int):
                char = 'i'
            else:
                char = 'c'

        var = self.nc.createVariable(name, char, dims)
        for attr, value in kwargs.items():
            setattr(var, attr, value)
        if data is not None:
            if len(dims) == 0:
                var.assignValue(data)
            else:
                if char == 'c':
                    if len(dims) == 1:
                        var[:len(data)] = data
                    else:
                        for i, x in enumerate(data):
                            var[i, :len(x)] = x
                else:
                    var[:] = data
        return var