Exemple #1
0
 def openOutputFile(self):
     # Create file
     os.system('mkdir -p results/%s-%s' % (self.Case1,self.Case2))
     FileName = 'results/%s-%s/Cam3Feedbacks.%s-%s.%03i.nc' % \
                (self.Case1,self.Case2,self.Case1,self.Case2,self.FileNumber)
     if not os.path.exists(FileName):
         print 'creating %s ...' % FileName
         File = NetCDFFile(FileName,'w')
         File.createDimension('lat',len(self.data.lat))
         var = File.createVariable('lat','f',('lat',))
         var.long_name = 'latitude'
         var.units = 'degrees_north'
         var[:] = self.data.lat.astype('f')
         File.createDimension('lon',len(self.data.lon))
         var = File.createVariable('lon','f',('lon',))
         var.long_name = 'longitude'
         var.units = 'degrees_east'
         var[:] = self.data.lon.astype('f')
         # create variables
         for Field in ['dR_Ts','dR_lapse','dR_q','dR_cld_sw','dR_cld_lw','dR_alb','dR_co2']:
             var = File.createVariable(Field,'f',('lat','lon'))
             var.long_name = 'TOA radiative perturbation'
             var.units = 'W m-2'
             var[:,:] = 0.
         File.NsnapsDone = 0
         return 0, File
     else:
         File = NetCDFFile(FileName,'a')
         NsnapsDone = int(File.NsnapsDone[0])
         if NsnapsDone < len(self.data.time):
             return NsnapsDone, File
         else:
             print 'No more snaps to be done'
             sys.exit(0)
    def init(self,agent,env,**kw):
        super(LoggingRLI,self).init(agent,env,**kw)

        self.step_count = self.ep_count = 0

        if os.access(self.episode_filename,os.F_OK):
            self.remove_or_rename(self.episode_filename)

        self.episode_data = ed = NetCDFFile(self.episode_filename,'w')
        ed.createDimension('index',None)
        ed.createDimension('value',1)
        ed.createVariable('start','d',('index','value'))
        ed.createVariable('length','d',('index','value'))
        ed.createVariable('reward','f',('index','value'))

        for name,(fn,type,size) in self.ep_vars.items():
            ed.createDimension(name+'_dim',size)
            ed.createVariable(name,type,('index',name+'_dim'))

        if self.step_vars:
            if os.access(self.step_filename,os.F_OK):
                self.remove_or_rename(self.step_filename)

            self.step_data = sd = NetCDFFile(self.step_filename,'a')
            sd.createDimension('index',None)
            for name,(fn,type,size) in self.step_vars.items():
                sd.createDimension(name+'_dim',size)
                sd.createVariable(name,type,('index',name+'_dim'))

        self.last_ckpt_step = 0
        self.last_ckpt_episode = 0
Exemple #3
0
def modify_filter(gridfilename, ttlname, indflag=1):

    tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))

    ncfile = Dataset(gridfilename, 'a')

    if indflag:
        grid_center_lat_var = ncfile.variables['grid_center_lat']
        setattr(grid_center_lat_var, 'units', 'degrees')
        grid_center_lon_var = ncfile.variables['grid_center_lon']
        setattr(grid_center_lon_var, 'units', 'degrees')
        grid_corner_lat_var = ncfile.variables['grid_corner_lat']
        setattr(grid_corner_lat_var, 'units', 'degrees')
        grid_corner_lon_var = ncfile.variables['grid_corner_lon']
        setattr(grid_corner_lon_var, 'units', 'degrees')

    setattr(ncfile, 'title', ttlname)
    setattr(ncfile, 'modifydate', tm)

    if hasattr(ncfile, 'grid_name'):
        delattr(ncfile, 'grid_name')

    if hasattr(ncfile, 'map_method'):
        delattr(ncfile, 'map_method')

    ncfile.sync()
    ncfile.close()
Exemple #4
0
    def open(self, filename):
        self.ncfile = NetCDFFile(filename, 'r')

        # initialize variable name list
        self.VarNameList = {}
        for VarName in self.ncfile.variables.keys():
            self.VarNameList[VarName] = True
Exemple #5
0
class CoordTransfer(Exception):
    def __init__(self, srcfile, dstfile, newfile):
        self.srcfile = srcfile
        self.dstfile = dstfile
        self.newfile = newfile

    def loadsrcoords(self):
        self.ncfile = Dataset(self.srcfile, 'r')
        variable_name = 'grid_center_lat'
        __grid_center_lat = self.ncfile.variables[variable_name][:]
        variable_name = 'grid_center_lon'
        __grid_center_lon = self.ncfile.variables[variable_name][:]
        self.__grid_center_lat = __grid_center_lat.tolist()
        self.__grid_center_lon = __grid_center_lon.tolist()

    def loadstinfo(self):
        self.nc_obj = Loadnc(self.dstfile)
        self.grid_size, self.grid_corners, self.grid_rank, self.grid_dims, ach1, ach2, self.grid_imask = self.nc_obj.load(
        )

    def transfercoord(self):
        self.resncfile = Dataset(self.newfile, 'w')
        tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))

        # set dimension info
        self.resncfile.createDimension('grid_size', self.grid_size)
        self.resncfile.createDimension('grid_rank', self.grid_rank)
        self.resncfile.createDimension('grid_corners', self.grid_corners)

        # set variable info
        grid_dims_var = self.resncfile.createVariable('grid_dims',
                                                      dtype('int32').char,
                                                      ('grid_rank', ))
        grid_center_lat_var = self.resncfile.createVariable(
            'grid_center_lat',
            dtype('d').char, ('grid_size', ))
        grid_center_lat_var.units = 'degrees'
        grid_center_lon_var = self.resncfile.createVariable(
            'grid_center_lon',
            dtype('d').char, ('grid_size', ))
        grid_center_lon_var.units = 'degrees'
        grid_imask_var = self.resncfile.createVariable('grid_imask',
                                                       dtype('i').char,
                                                       ('grid_size', ))
        grid_imask_var.units = 'unitless'

        grid_dims_var[:] = self.grid_dims
        grid_center_lat_var[:] = np.array(self.__grid_center_lat)
        grid_center_lon_var[:] = np.array(self.__grid_center_lon)
        buffer1 = [np.int32(i) for i in self.grid_imask]
        grid_imask_var[:] = np.array(buffer1)

        setattr(self.resncfile, 'title', 'Threp ' + self.newfile)
        setattr(self.resncfile, 'createdate', tm)
        setattr(self.resncfile, 'conventions', 'Threp')
        setattr(self.resncfile, 'grid', self.newfile)

    def finish(self):
        self.resncfile.close()
        self.nc_obj.closenc()
Exemple #6
0
 def __init__(self, filename, whatToRead=None):
     """Construct a reader from a filename."""
     self.whatToRead = whatToRead
     try:
         self.nc = NetCDFFile(filename, 'r')
     except IOError:
         self.nc = NetCDFFile(filename + ".nc", 'r')
Exemple #7
0
 def __init__(self, filename, whatToRead = None):
     """Construct a reader from a filename."""        
     self.whatToRead = whatToRead
     try:
         self.nc = NetCDFFile(filename, 'r')
     except IOError:
         self.nc = NetCDFFile(filename + ".nc", 'r')
Exemple #8
0
def modify_filter(gridfilename, ttlname, indflag = 1):

  tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))

  ncfile = Dataset(gridfilename, 'a')

  if indflag:
    grid_center_lat_var = ncfile.variables['grid_center_lat']
    setattr(grid_center_lat_var, 'units', 'degrees')
    grid_center_lon_var = ncfile.variables['grid_center_lon']
    setattr(grid_center_lon_var, 'units', 'degrees')
    grid_corner_lat_var = ncfile.variables['grid_corner_lat']
    setattr(grid_corner_lat_var, 'units', 'degrees')
    grid_corner_lon_var = ncfile.variables['grid_corner_lon']
    setattr(grid_corner_lon_var, 'units', 'degrees')
  
  setattr(ncfile, 'title', ttlname)
  setattr(ncfile, 'modifydate', tm)

  if hasattr(ncfile, 'grid_name'):
    delattr(ncfile, 'grid_name')

  if hasattr(ncfile, 'map_method'):
    delattr(ncfile, 'map_method')

  ncfile.sync()
  ncfile.close()
 def __init__(self, filename):
     self.filename = filename
     self.file = NetCDFFile(self.filename, 'r')
     try:
         self.block_size = self.file.dimensions['minor_step_number']
     except KeyError:
         self.block_size = 1
     self._countSteps()
Exemple #10
0
    def __init__(self, filename):
        from Scientific.IO.NetCDF import NetCDFFile
        self.nc = NetCDFFile(filename, 'w')

        self.nc.file_format = 'ETSF Nanoquanta'
        self.nc.file_format_version = np.array([3.3], dtype=np.float32)
        self.nc.Conventions = 'http://www.etsf.eu/fileformats/'
        self.nc.history = 'File generated by ASE'
 def select_file(self, filename):
     self.currentFilename = filename
     f = NetCDFFile(filename,"r")
     variables = f.variables
     
     if not variables.has_key('molecular_trace'):
         raise DensitySuperpositionError('Trace file format not compatible with Plugin')
     
     self.dim.SetValue(str(variables['molecular_trace'].getValue().shape))
     f.close()
Exemple #12
0
def OPENPIV2D2C(filename, ux_out, uy_out, x_out, y_out, flag1, flag2, flag3):
    """Storage in NetCDF format: 2D2C PIV datas with 3 flags used in OPENPIV"""
    # open a new netCDF file for writing.
    ncfile = Dataset(filename, 'w')
    # create the x and y dimensions.
    nx, ny = ux_out.shape
    ncfile.createDimension('x', nx)
    ncfile.createDimension('y', ny)
    # create the variable (4 byte integer in this case)
    # first argument is name of variable, second is datatype, third is
    # a tuple with the names of dimensions.
    #data = ncfile.createVariable('data',np.dtype('int32').char,('x','y'))
    xvar = ncfile.createVariable('xvar', 'd', ('x', 'y'))
    yvar = ncfile.createVariable('yvar', 'd', ('x', 'y'))
    ux = ncfile.createVariable('ux', 'd', ('x', 'y'))
    uy = ncfile.createVariable('uy', 'd', ('x', 'y'))
    Flags1 = ncfile.createVariable('flag1', 'd', ('x', 'y'))
    Flags2 = ncfile.createVariable('flag2', 'd', ('x', 'y'))
    Flags3 = ncfile.createVariable('flag3', 'd', ('x', 'y'))
    # write data to variable.
    xvar[:] = x_out
    yvar[:] = y_out
    ux[:] = ux_out
    uy[:] = uy_out
    Flags1[:] = flag1
    Flags2[:] = flag2
    Flags3[:] = flag3
    # close the file.
    ncfile.close()
    print '*** SUCCESS writing:', filename
def get_single_model_data(dataset,start_dates,var,lat,lon,plev=None,models='all',n_ens=None):
    
    data_dir_main=get_data_dir_main()
    
    fname_coords=get_fname_coords(lat,lon,plev=plev)
    
    fname='get_seasfc_data_'+dataset+'_'+var+fname_coords
    
    nc_file=NetCDFFile(data_dir_main+fname+'.nc')
    
    #Convert intuitive variable name to name used inside files
    var_data_name=get_var_data_name(var)  
    
    if models=='all':
        if dataset=='ENSEMBLES':
            models=['ECMWF', 'INGV', 'Kiel', 'MetFr', 'MO']

    data_fc_sm={}  #dict to hold single model forecasts
    for start_date in start_dates:
        data_fc_sm[start_date]={}
        
        for model in models:
            data_name=model.lower()+'_'+start_date.lower()+'_'+var_data_name
            data_fc_sm[start_date][model]={}
            if n_ens:
                data_fc_sm[start_date][model]['data']=nc_file.variables[data_name][...,:n_ens]  #reads in data with dimensions [forecast month, year of forecast start, ensemble member]
            else:
                data_fc_sm[start_date][model]['data']=nc_file.variables[data_name][:]
                
            
            #Get the array containing the corresponding dates of the verification dataset in YYYYMM format.
            #This array has dimension [forecast month, year of forecast start]
            ver_month_arr_name_pos=getattr(nc_file.variables[data_name],'coordinates').find('verifying_month')
            ver_month_arr_name=getattr(nc_file.variables[data_name],'coordinates')[ver_month_arr_name_pos:ver_month_arr_name_pos+17]
            data_fc_sm[start_date][model]['verifying_month']=nc_file.variables[ver_month_arr_name][:]
            
            #Get lead times
            if len(nc_file.variables['forecast_lead_month'])==data_fc_sm[start_date][model]['data'].shape[0]:
                lead_time_dim_name='forecast_lead_month'
            elif 'forecast_lead_month_0' in nc_file.variables and len(nc_file.variables['forecast_lead_month_0'])==data_fc_sm[start_date][model]['data'].shape[0]:
                lead_time_dim_name='forecast_lead_month_0'
            else:
                print 'Forecast lead time dimension not known', dataset, start_date, model
                
            data_fc_sm[start_date][model]['lead times']=nc_file.variables[lead_time_dim_name][:]
            
            if dataset=='ENSEMBLES':
                lead_time_units='months'
            
            data_fc_sm[start_date][model]['lead time units']=lead_time_units


    nc_file.close()
    
    return data_fc_sm
def test_modis():
        
    modis_file      = "MYD06_L2.A2010100.0755.051.2010108054555.hdf"
    print "****** Reading MODIS data from file: ", modis_file
    modis           = FEMODIS.front_end_modis_cloud_1km_dev(modis_file)
    tim=modis.get_time()
    lat=modis.get_latitude()
    lon=modis.get_longitude() 
    dat=modis.get_data()
    print dat.keys()
    cwp=dat['Cloud_Water_Path']
 
    # print lat, lon, lwp
    ncfile = Dataset('modis_1km.nc','w')
    ndim = len(lat)
    ncfile.createDimension('time',ndim)
    time = ncfile.createVariable('time',dtype('float32').char,('time', ))
    lats = ncfile.createVariable('latitude',dtype('float32').char,('time', ))
    lons = ncfile.createVariable('longitude',dtype('float32').char,('time', ))
    cwps = ncfile.createVariable('cloud_water_path',dtype('float32').char,('time', ))
    time[:] = N.cast['float32'](tim)
    lats[:] = N.cast['float32'](lat)
    lons[:] = N.cast['float32'](lon)
    cwps[:] = N.cast['float32'](cwp[1])
    ncfile.close()    
Exemple #15
0
def OPENPIV2D2C(filename,ux_out,uy_out,x_out,y_out,flag1,flag2,flag3):
    """Storage in NetCDF format: 2D2C PIV datas with 3 flags used in OPENPIV"""
    # open a new netCDF file for writing.
    ncfile = Dataset(filename,'w') 
    # create the x and y dimensions.
    nx,ny=ux_out.shape
    ncfile.createDimension('x',nx)
    ncfile.createDimension('y',ny)
    # create the variable (4 byte integer in this case)
    # first argument is name of variable, second is datatype, third is
    # a tuple with the names of dimensions.
    #data = ncfile.createVariable('data',np.dtype('int32').char,('x','y'))
    xvar = ncfile.createVariable('xvar','d',('x','y'))
    yvar = ncfile.createVariable('yvar','d',('x','y'))
    ux = ncfile.createVariable('ux','d',('x','y'))
    uy = ncfile.createVariable('uy','d',('x','y'))
    Flags1 = ncfile.createVariable('flag1','d',('x','y'))
    Flags2 = ncfile.createVariable('flag2','d',('x','y'))
    Flags3 = ncfile.createVariable('flag3','d',('x','y'))
    # write data to variable.
    xvar[:] = x_out
    yvar[:] = y_out
    ux[:] = ux_out
    uy[:] = uy_out
    Flags1[:] = flag1
    Flags2[:] = flag2
    Flags3[:] = flag3
    # close the file.
    ncfile.close()
    print '*** SUCCESS writing:',filename
Exemple #16
0
def load_rmpwfile(fname):
  ncfile = Dataset(fname, 'r')
  src_coord_lat = ncfile.variables['src_grid_center_lat'][:].tolist()
  src_coord_lon = ncfile.variables['src_grid_center_lon'][:].tolist()
  dst_coord_lat = ncfile.variables['dst_grid_center_lat'][:].tolist()
  dst_coord_lon = ncfile.variables['dst_grid_center_lon'][:].tolist()
  remap_src_indx = ncfile.variables['remap_src_indx'][:].tolist()
  remap_dst_indx = ncfile.variables['remap_dst_indx'][:].tolist()
  remap_matrix_compact = ncfile.variables['remap_matrix'][:].tolist()
  ncfile.close()
  return src_coord_lat, src_coord_lon, dst_coord_lat, dst_coord_lon, remap_src_indx, remap_dst_indx, remap_matrix_compact
Exemple #17
0
def open_rl_data(filespec):
    if isinstance(filespec,type([])):
        return map(open_rl_data,filespec)    
    ep_data = NetCDFFile(filespec,'r')
    step_file_name = filespec.split('-episodes.cdf')[0]+'-steps.cdf'
    if os.access(step_file_name,os.F_OK):
        step_data = NetCDFFile(step_file_name,'r')
    else:
        step_data = None

    return ep_data,step_data
def load_file(file_name = file_name, time_start = time_start, 
		      time_end = time_end, lat_start = lat_start, lat_end = lat_end,
	              lon_start = lon_start, lon_end = lon_end, masked_value = masked_value):
		      
	nc = NetCDFFile(file_name, 'r')
	new_array = nc.variables['Cflx'][time_start:time_end, lat_start:lat_end, 
                                     lon_start:lon_end]
	nc.close()
	new_array = ma.masked_values(new_array, masked_value)
	new_array = new_array*1e08
	return new_array    
Exemple #19
0
class CoordTransfer(Exception):
  def __init__(self, srcfile, dstfile, newfile):
    self.srcfile = srcfile
    self.dstfile = dstfile
    self.newfile = newfile
    
  def loadsrcoords(self):
    self.ncfile = Dataset(self.srcfile, 'r')
    variable_name = 'grid_center_lat'
    __grid_center_lat = self.ncfile.variables[variable_name][:]
    variable_name = 'grid_center_lon'
    __grid_center_lon = self.ncfile.variables[variable_name][:]
    self.__grid_center_lat = __grid_center_lat.tolist()
    self.__grid_center_lon = __grid_center_lon.tolist()
    
  def loadstinfo(self):
   self.nc_obj = Loadnc(self.dstfile)
   self.grid_size, self.grid_corners, self.grid_rank, self.grid_dims, ach1, ach2, self.grid_imask = self.nc_obj.load()
   
  def transfercoord(self):
    self.resncfile = Dataset(self.newfile, 'w')
    tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
    
    # set dimension info 
    self.resncfile.createDimension('grid_size', self.grid_size)
    self.resncfile.createDimension('grid_rank', self.grid_rank)
    self.resncfile.createDimension('grid_corners', self.grid_corners)

    # set variable info
    grid_dims_var = self.resncfile.createVariable('grid_dims', dtype('int32').char, ('grid_rank',))
    grid_center_lat_var = self.resncfile.createVariable('grid_center_lat', dtype('d').char, ('grid_size',))
    grid_center_lat_var.units = 'degrees'
    grid_center_lon_var = self.resncfile.createVariable('grid_center_lon', dtype('d').char, ('grid_size',))
    grid_center_lon_var.units = 'degrees'
    grid_imask_var = self.resncfile.createVariable('grid_imask', dtype('i').char, ('grid_size',))
    grid_imask_var.units = 'unitless'

    grid_dims_var[:] = self.grid_dims
    grid_center_lat_var[:] = np.array(self.__grid_center_lat)
    grid_center_lon_var[:] = np.array(self.__grid_center_lon)
    buffer1 = [np.int32(i) for i in self.grid_imask]
    grid_imask_var[:] = np.array(buffer1)

    setattr(self.resncfile, 'title', 'Threp ' + self.newfile)
    setattr(self.resncfile, 'createdate', tm)
    setattr(self.resncfile, 'conventions', 'Threp')
    setattr(self.resncfile, 'grid', self.newfile)

  def finish(self):
    self.resncfile.close()
    self.nc_obj.closenc()
Exemple #20
0
    def __init__(self,
                 quantity_name,
                 file_name,
                 time_step_count,
                 time_step,
                 lon,
                 lat):
        """Instantiate a Write_nc instance (NetCDF file writer).

        time_step_count is the number of time steps.
        time_step is the time step size

        pre-condition: quantity_name must be 'HA', 'UA'or 'VA'.
        """

        self.quantity_name = quantity_name
        quantity_units = {'HA':'CENTIMETERS',
                          'UA':'CENTIMETERS/SECOND',
                          'VA':'CENTIMETERS/SECOND'}

        multiplier_dic = {'HA':100.0,   # To convert from m to cm
                          'UA':100.0,   #             and m/s to cm/sec
                          'VA':-100.0}  # MUX files have positive x in the
                                        # Southern direction.  This corrects
                                        # for it, when writing nc files.

        self.quantity_multiplier =  multiplier_dic[self.quantity_name]

        #self.file_name = file_name
        self.time_step_count = time_step_count
        self.time_step = time_step

        # NetCDF file definition
        self.outfile = NetCDFFile(file_name, netcdf_mode_w)
        outfile = self.outfile

        #Create new file
        nc_lon_lat_header(outfile, lon, lat)

        # TIME
        outfile.createDimension(time_name, None)
        outfile.createVariable(time_name, precision, (time_name,))

        #QUANTITY
        outfile.createVariable(self.quantity_name, precision,
                               (time_name, lat_name, lon_name))
        outfile.variables[self.quantity_name].missing_value = -1.e+034
        outfile.variables[self.quantity_name].units = \
                                 quantity_units[self.quantity_name]
        outfile.variables[lon_name][:]= ensure_numeric(lon)
        outfile.variables[lat_name][:]= ensure_numeric(lat)
def check_results(outfile, vobsfile, vobsvariable, balancevel):
    ###### check the results
    fo = NetCDFFile(outfile, 'r')
    thickness = fo.variables['thickness'][0, :]
    ux = fo.variables['uReconstructX'][0, :, :]  # use the first time level
    uy = fo.variables['uReconstructY'][0, :, :]  # use the first time level
    if balancevel:
        vModel = ((ux[:, 0]**2 + uy[:, 0]**2)**0.5).mean(
            1)  # get vertically averaged velocity
    else:
        vModel = (ux[:, 0]**2 +
                  uy[:, 0]**2)**0.5  # surface velocity (top vertical layer)

    cellMask = fo.variables['cellMask'][0, :]
    ind = (
        (cellMask & 4) == 4
    )  # 4 bit = grounded ice  # this gets the indices that are grounded ice.

    fv = NetCDFFile('../' + vobsfile, 'r')
    vOb = fv.variables[vobsvariable][0, :]

    rmse = (((vModel[ind] - vOb[ind])**2).sum() /
            ind.sum())**0.5  # TODO  check if this is calculating right
    print "RMSE=" + str(rmse)
    fo.close()
    fv.close()
Exemple #22
0
class Loadreal(Exception):
    def __init__(self, file_name):
        self.filename = file_name
        self.ncfile = Dataset(file_name, 'r')

    def closenc(self):
        self.ncfile.close()

    def load(self):
        dimension_name = 'grid_size'
        grid_size = self.ncfile.dimensions[dimension_name]
        variable_name = 'data'
        data = self.ncfile.variables[variable_name][:]
        return grid_size, data
Exemple #23
0
    def gen(self):
        ncfile = Dataset(self.fname, 'w')
        tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))

        # set dimension info
        ncfile.createDimension('grid_size', self.obj.grid_size)

        # set variable info
        grid_center_lat_var = ncfile.createVariable('grid_center_lat',
                                                    dtype('d').char,
                                                    ('grid_size', ))
        grid_center_lon_var = ncfile.createVariable('grid_center_lon',
                                                    dtype('d').char,
                                                    ('grid_size', ))
        physical_variable = ncfile.createVariable('physical_variable',
                                                  dtype('d').char,
                                                  ('grid_size', ))

        grid_center_lat_var[:] = np.array(self.obj.grid_center_lat)
        grid_center_lon_var[:] = np.array(self.obj.grid_center_lon)
        physical_variable[:] = np.array(self.obj.physical_variable)

        setattr(ncfile, 'title', 'Threp ' + self.fname)
        setattr(ncfile, 'createdate', tm)
        setattr(ncfile, 'map_method', self.method)
        setattr(ncfile, 'conventions', 'Threp')
        setattr(ncfile, 'src_grid', self.obj.src_grid_name)
        setattr(ncfile, 'dst_grid', self.obj.dst_grid_name)

        ncfile.close()
        print '*** Successfully generated netcdf file for ncl usage. ***'
Exemple #24
0
    def __init__(self, filename='gpaw', title='gpaw'):
        if not filename.endswith('-etsf.nc'):
            if filename.endswith('.nc'):
                filename = filename[:-3] + '-etsf.nc'
            else:
                filename = filename + '-etsf.nc'

        self.nc = NetCDFFile(filename, 'w')

        self.nc.file_format = 'ETSF Nanoquanta'
        self.nc.file_format_version = np.array([3.3], dtype=np.float32)
        self.nc.Conventions = 'http://www.etsf.eu/fileformats/'
        self.nc.history = 'File generated by GPAW'
        self.nc.title = title
Exemple #25
0
def ncep2fall3d(ncep_filename, fall3d_ncep_filename, verbose=True):
    """Convert standard NCEP file to fall3d NCEP format
    """

    # Copy standard NCEP file to fall3d NCEP file
    s = 'cp %s %s' % (ncep_filename, fall3d_ncep_filename)
    os.system(s)

    # Open files
    infile = NetCDFFile(ncep_filename)
    outfile = NetCDFFile(ncep_filename, 'a')

    # Establish special global attributes for fall3 NCEP format

    print 'Found dimensions:', infile.dimensions.keys()
    print 'Found variables:', infile.variables.keys()

    lon = infile.variables['lon'][:]
    lonmin = min(lon)
    lonmax = max(lon)

    lat = infile.variables['lat'][:]
    latmin = min(lat)
    latmax = max(lat)

    nx = infile.dimensions['lon']
    ny = infile.dimensions['lat']
    np = infile.dimensions['pres']
    nt = infile.dimensions['time']
    print nx, ny, np, nt

    infile.close()
    outfile.close()
Exemple #26
0
class Loadreal(Exception):
  
  def __init__(self, file_name):
    self.filename = file_name
    self.ncfile = Dataset(file_name, 'r')
  
  def closenc(self):
    self.ncfile.close()
  
  def load(self):
    dimension_name = 'grid_size'
    grid_size = self.ncfile.dimensions[dimension_name]
    variable_name = 'data'
    data = self.ncfile.variables[variable_name][:]
    return grid_size, data
Exemple #27
0
    def open (self, filename):
        self.ncfile = NetCDFFile(filename,'r')

        # initialize variable name list
        self.VarNameList =  {}
        for VarName in self.ncfile.variables.keys():
            self.VarNameList[VarName] = True
Exemple #28
0
  def transfercoord(self):
    self.resncfile = Dataset(self.newfile, 'w')
    tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
    
    # set dimension info 
    self.resncfile.createDimension('grid_size', self.grid_size)
    self.resncfile.createDimension('grid_rank', self.grid_rank)
    self.resncfile.createDimension('grid_corners', self.grid_corners)

    # set variable info
    grid_dims_var = self.resncfile.createVariable('grid_dims', dtype('int32').char, ('grid_rank',))
    grid_center_lat_var = self.resncfile.createVariable('grid_center_lat', dtype('d').char, ('grid_size',))
    grid_center_lat_var.units = 'degrees'
    grid_center_lon_var = self.resncfile.createVariable('grid_center_lon', dtype('d').char, ('grid_size',))
    grid_center_lon_var.units = 'degrees'
    grid_imask_var = self.resncfile.createVariable('grid_imask', dtype('i').char, ('grid_size',))
    grid_imask_var.units = 'unitless'

    grid_dims_var[:] = self.grid_dims
    grid_center_lat_var[:] = np.array(self.__grid_center_lat)
    grid_center_lon_var[:] = np.array(self.__grid_center_lon)
    buffer1 = [np.int32(i) for i in self.grid_imask]
    grid_imask_var[:] = np.array(buffer1)

    setattr(self.resncfile, 'title', 'Threp ' + self.newfile)
    setattr(self.resncfile, 'createdate', tm)
    setattr(self.resncfile, 'conventions', 'Threp')
    setattr(self.resncfile, 'grid', self.newfile)
Exemple #29
0
 def __parinit__(self, pid, nprocs, filename, split_dimension,
                 mode = 'r', local_access = False):
     """
     @param filename: the name of the netCDF file
     @type filename: C{str}
     @param split_dimension: the name of the dimension along which the data
                             is distributed over the processors
     @type split_dimension: C{str}
     @param mode: read ('r'), write ('w'), or append ('a')
     @type mode: C{str}
     @param local_access: if C{False}, processor 0 is the only one to
                          access the file, all others communicate with
                          processor 0. If C{True} (only for reading), each
                          processor accesses the file directly. In the
                          latter case, the file must be accessible on all
                          processors under the same name. A third mode is
                          'auto', which uses some heuristics to decide
                          if the file is accessible everywhere: it checks
                          for existence of the file, then compares
                          the size on all processors, and finally verifies
                          that the same variables exist everywhere, with
                          identical names, types, and sizes.
     @type local_access: C{bool} or C{str}
     """
     if mode != 'r':
         local_access = 0
     self.pid = pid
     self.nprocs = nprocs
     self.filename = filename
     self.split = split_dimension
     self.local_access = local_access
     self.read_only = mode == 'r'
     if local_access or pid == 0:
         self.file = NetCDFFile(filename, mode)
         try:
             length = self.file.dimensions[split_dimension]
             if length is None:
                 length = -1
         except KeyError:
             length = None
         variables = {}
         for name, var in self.file.variables.items():
             variables[name] = (name, var.dimensions)
             if length < 0 and split_dimension in var.dimensions:
                 index = list(var.dimensions).index(split_dimension)
                 length = var.shape[index]
     else:
         self.file = None
         self.split = split_dimension
         length = None
         variables = None
     if not local_access:
         length = self.broadcast(length)
         variables = self.broadcast(variables)
     if length is not None:
         self._divideData(length)
     self.variables = {}
     for name, var in variables.items():
         self.variables[name] = _ParNetCDFVariable(self, var[0], var[1],
                                                   split_dimension)
    def ckpt_restore_state(self,filename):
        from plastk import pkl
        ckpt = pkl.load(filename)

        self.verbose("Restoring checkpoint state")
        for a in self.ckpt_attribs:
            self.verbose(a,' = ', ckpt[a])
            setattr(self,a,ckpt[a])
            
        rand.seed(*ckpt['rand_seed'])

        self.env.sim = self.agent.sim = self
        
        self.episode_data = NetCDFFile(self.episode_filename,'a')
        if self.step_vars:
            self.step_data = NetCDFFile(self.step_filename,'a')
        return ckpt
 def __init__(self, filename):
     self.filename = filename
     self.file = NetCDFFile(self.filename, 'r')
     try:
         self.block_size = self.file.dimensions['minor_step_number']
     except KeyError:
         self.block_size = 1
     self._countSteps()
Exemple #32
0
    def __init__(self,
                 Case1='uh0',
                 Case2='uh1',
                 FileNumber=0):
        # Identify files
        # NOTE: assumes directories contain full years of data !!
        Home = os.getenv('HOME')
        Dir = '%s/cam/runs' % Home
        Dir1 = '%s/%s' % (Dir,Case1)
        Dir2 = '%s/%s' % (Dir,Case2)
        FileNames1 = glob.glob('%s/*cam2*h1*.nc' % Dir1)
        FileNames2 = glob.glob('%s/*cam2*h1*.nc' % Dir2)
        FileNames1.sort()
        FileNames2.sort()
        N1 = len(FileNames1)
        N2 = len(FileNames2)
        N = min(N1,N2)
        if N1 > N:
            for i in range(N1-N): FileNames1.pop()
        if N2 > N:
            for i in range(N2-N): FileNames2.pop()
        #self.Files1 = [NetCDFFile(File,'r') for File in FileNames1]
        #self.Files2 = [NetCDFFile(File,'r') for File in FileNames2]
        self.N = N
        self.FileName1 = FileNames1[FileNumber]
        self.FileName2 = FileNames2[FileNumber]
        self.File1 = NetCDFFile(self.FileName1,'r')
        self.File2 = NetCDFFile(self.FileName2,'r')

        print 'Using input files:'
        print self.FileName1
        print self.FileName2

        #  Extract hybrid coord coefficients and ref press
        File = self.File1
        self.hyam = File.variables['hyam'][:]
        self.hybm = File.variables['hybm'][:]
        self.hyai = File.variables['hyai'][:]
        self.hybi = File.variables['hybi'][:]
        self.p0   = File.variables['P0'].getValue()
        # Extract lat, lon, lev, time
        self.lat = File.variables['lat'][:]
        self.lon = File.variables['lon'][:]
        self.lev = File.variables['lev'][:]
        self.time = File.variables['time'][:]
Exemple #33
0
 def gen(self):
   ncfile = Dataset(self.fname, 'w')
   tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
   
   # set dimension info 
   ncfile.createDimension('grid_size', self.obj.grid_size)
   
   # set variable info
   grid_center_lat_var = ncfile.createVariable('grid_center_lat', dtype('d').char, ('grid_size',))
   grid_center_lon_var = ncfile.createVariable('grid_center_lon', dtype('d').char, ('grid_size',))
   physical_variable = ncfile.createVariable('physical_variable', dtype('d').char, ('grid_size',))
   
   grid_center_lat_var[:] = np.array(self.obj.grid_center_lat)
   grid_center_lon_var[:] = np.array(self.obj.grid_center_lon)
   physical_variable[:] = np.array(self.obj.physical_variable)
    
   setattr(ncfile, 'title', 'Threp ' + self.fname)
   setattr(ncfile, 'createdate', tm)
   setattr(ncfile, 'map_method', self.method)
   setattr(ncfile, 'conventions', 'Threp')
   setattr(ncfile, 'src_grid', self.obj.src_grid_name)
   setattr(ncfile, 'dst_grid', self.obj.dst_grid_name)
   
   ncfile.close() 
   print '*** Successfully generated netcdf file for ncl usage. ***'
 def __init__(self, filename, axesnames, variablename, default=None):
     from Scientific.IO.NetCDF import NetCDFFile
     self.file = NetCDFFile(filename, 'r')
     self.axes = map(lambda n, f=self.file: f.variables[n], axesnames)
     self.values = self.file.variables[variablename]
     self.default = default
     self.shape = ()
     for axis in self.axes:
         self.shape = self.shape + axis.shape
 def on_superpose(self, event):
     self.on_clear()
     rendtype = self.rendlist.GetValue()
     opacity = float(self.opacity.GetValue())
     filename = self.get_file()
     
     f = NetCDFFile(filename,"r")
     variables = f.variables
     
     data = variables['molecular_trace'].getValue()
     origin = variables['origin'].getValue()
     spacing = variables['spacing'].getValue()
     
     f.close()
     
     mi, ma = self.draw_isosurface(data, rendtype, opacity, origin, spacing)
     self.isov_slider.SetRange(mi, ma)
     self.isov_slider.Enable()
Exemple #36
0
    def runTest(self):

        # Some information about the test to run are displayed in the console and file loggers.
        LogMessage('info',
                   'ANALYSIS: %s --- TEST No: %s' % (self.longName, self.id),
                   ['console', 'file'])

        subprocess.call([
            sys.executable, GVAR['pmoldyn_path'], self.pMoldynArg, "--input",
            os.path.join(self.testPath,
                         self.shortName + "%s_Reference.py" % self.id)
        ])
        subprocess.call([
            sys.executable,
            os.path.join(self.testPath,
                         self.shortName + "%d_Current.py" % self.id)
        ])

        refFileName = os.path.join(TEMP_DIR, self.shortName + "_Reference.nc")
        curFileName = os.path.join(TEMP_DIR, self.shortName + "_Current.nc")

        refFile = NetCDFFile(refFileName, 'r')
        curFile = NetCDFFile(curFileName, 'r')

        for key, val in refFile.variables.items():

            if val.typecode() != 'c':

                if not curFile.variables.has_key(key):
                    continue

                refValue = val.getValue()
                curValue = curFile.variables[key].getValue()

                # Their difference is computed.
                errorMax = max(abs(N.ravel(refValue - curValue)))

                self.errors[key] = errorMax

                # Throws an assertion error if the difference is bigger than 1.0E-6.
                self.assertAlmostEqual(errorMax, 0.0, 6)

        curFile.close()

        refFile.close()

        try:
            os.remove(refFileName)
            os.remove(curFileName)
        except:
            pass
def ncep2fall3d(ncep_filename, fall3d_ncep_filename, verbose=True):
    """Convert standard NCEP file to fall3d NCEP format
    """
    
    # Copy standard NCEP file to fall3d NCEP file
    s = 'cp %s %s' % (ncep_filename, fall3d_ncep_filename)
    os.system(s)
    
    # Open files
    infile = NetCDFFile(ncep_filename)
    outfile = NetCDFFile(ncep_filename, 'a')
    
    # Establish special global attributes for fall3 NCEP format     
    
    print 'Found dimensions:', infile.dimensions.keys()    
    print 'Found variables:', infile.variables.keys()
    
    lon = infile.variables['lon'][:]
    lonmin = min(lon)
    lonmax = max(lon)    
    
    lat = infile.variables['lat'][:]
    latmin = min(lat)
    latmax = max(lat)    
    
    nx = infile.dimensions['lon']
    ny = infile.dimensions['lat']        
    np = infile.dimensions['pres']                
    nt = infile.dimensions['time']            
    print nx, ny, np, nt
    

    infile.close()
    outfile.close()
def load_geoinfo (SrcFilename, DstFilename, GridSize, LatName, LonName):
    # get lat/long values from 400/640 level files
    ncfile = front_end_NetCDF_helper()

    ncfile.open (SrcFilename)

    # read lat array
    SrcLatData = ncfile.read_data (LatName)
    #print SrcLatData

    # read lon array
    SrcLonData = ncfile.read_data (LonName)
    #print SrcLonData

    ncfile.close ()

    # create down-sampled long array    
    DstLonData = N.zeros (GridSize)

    # 
    for DstLonNo in range(0, GridSize):
        SrcLonNo = DstLonNo * 2

        # down-sampling strategy 1, use avarage
        #DstLonData[DstLonNo] = (SrcLonData[SrcLonNo] + SrcLonData[SrcLonNo+1])/2.0
        #print DstLonNo, SrcLonNo, DstLonData[DstLonNo], SrcLonData[SrcLonNo], SrcLonData[SrcLonNo+1]

        # start with 0 and skip alternate points
        DstLonData[DstLonNo] = SrcLonData[SrcLonNo]
        #print DstLonNo, SrcLonNo, DstLonData[DstLonNo], SrcLonData[SrcLonNo]

    # write NetCDF file
    # open output file
    dst_ncfile = NetCDFFile (DstFilename, 'w')

    # define dimensions
    dst_lat_dim = dst_ncfile.createDimension (LatName, GridSize)
    dst_lon_dim = dst_ncfile.createDimension (LonName, GridSize)

    # define variables
    dst_lat_var = dst_ncfile.createVariable (LatName, 'd', (LatName,))
    #dst_lat_var.setattr (
    # NetCDFFile.setattr (dst_lat_var, 'attrname', attr_val)
    
    dst_lon_var = dst_ncfile.createVariable (LonName, 'd', (LonName,))

    # write lat data
    dst_lat_var.assignValue(SrcLatData)

    # write lon data
    dst_lon_var.assignValue(DstLonData)

    # close output file
    dst_ncfile.close ()
Exemple #39
0
    def __init__(self,
                 quantity_name,
                 file_name,
                 time_step_count,
                 time_step,
                 lon,
                 lat):
        """Instantiate a Write_nc instance (NetCDF file writer).

        time_step_count is the number of time steps.
        time_step is the time step size

        pre-condition: quantity_name must be 'HA', 'UA'or 'VA'.
        """

        self.quantity_name = quantity_name
        quantity_units = {'HA':'CENTIMETERS',
                          'UA':'CENTIMETERS/SECOND',
                          'VA':'CENTIMETERS/SECOND'}

        multiplier_dic = {'HA':100.0,   # To convert from m to cm
                          'UA':100.0,   #             and m/s to cm/sec
                          'VA':-100.0}  # MUX files have positive x in the
                                        # Southern direction.  This corrects
                                        # for it, when writing nc files.

        self.quantity_multiplier =  multiplier_dic[self.quantity_name]

        #self.file_name = file_name
        self.time_step_count = time_step_count
        self.time_step = time_step

        # NetCDF file definition
        self.outfile = NetCDFFile(file_name, netcdf_mode_w)
        outfile = self.outfile

        #Create new file
        nc_lon_lat_header(outfile, lon, lat)

        # TIME
        outfile.createDimension(time_name, None)
        outfile.createVariable(time_name, precision, (time_name,))

        #QUANTITY
        outfile.createVariable(self.quantity_name, precision,
                               (time_name, lat_name, lon_name))
        outfile.variables[self.quantity_name].missing_value = -1.e+034
        outfile.variables[self.quantity_name].units = \
                                 quantity_units[self.quantity_name]
        outfile.variables[lon_name][:]= ensure_numeric(lon)
        outfile.variables[lat_name][:]= ensure_numeric(lat)
def regrid_array(data=data_cflux):
	'''
	#Could be put with plotting tools???
	# Regrid array to be used with Basemap
	# Only works if the same latitudes and longitudes are selected from netdcf file and grid
	# Uses the ORCA netcdf file
	### transform the longitude of ORCA onto something that basemap can read
	### The ORCA grid starts at 80 and goes to 440
	### What we want: starts at 80 and goes to 180 and then switches to -180 and goes to 80
	### this method 
	'''
	from Scientific.IO.NetCDF import NetCDFFile
	#nc_grid_file = choose_netcdf_file()
	#~ indir = raw_input('Where is the ORCA netcdf file located? \n')
	nc_grid = NetCDFFile(NC_PATH+ 'ORCA2.0_grid.nc','r')
	lon = nc_grid.variables['lon'][0:40,:]
	lat = nc_grid.variables['lat'][0:40,:]
	area = nc_grid.variables['area'][0:40,:]
	mask = nc_grid.variables['mask'][0,0:40,:]
	nc_grid.close()
	
	lon_min = lon.copy()
	i,j = np.where(lon_min >= 180.) # elements of lon_min that are over 180
	lon_min[i,j] = lon_min[i,j] - 360. # takes those elements and subtracts 360 from them

	### ==============================================================================================================
	### get rid of the funny extra lon and do the same for the lat array ! 
	iw = np.where(lon_min[0,:] >= lon_min[0][0])[0] # are the elements that are greater or equal to the first element ie. 78.000038
	ie = np.where(lon_min[0,:] < lon_min[0][0])[0] # are the elements less than 78.000038

	### puts the lon in order from -180 to 180 and removes the extra 80 at the end
	lon = np.concatenate((np.take(lon_min,ie,axis=1),np.take(lon_min,iw,axis=1)),axis=1)[:,:-1]
	lat = np.concatenate((np.take(lat,ie,axis=1),np.take(lat,iw,axis=1)),axis=1)[:,:-1]

	# The data that is to be plotted needs to be regridded
	bm_array = [ma.concatenate((ma.take(data[i, :, :],ie,axis=1),ma.take(data[i, :, :],iw,axis=1)),axis=1)[:,:-1] for i in range(3650)]
	bm_array = ma.array(bm_array)
	return bm_array
Exemple #41
0
def gen_realdata(path, filename, var):
  ncfile = Dataset(path + filename, 'r')
  filename = './realdata/T42_' + var + '-' + filename.split('.')[-2] + '.nc'
  nc = Dataset(filename, 'w')
  tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
  nx = 128
  ny = 64
  grid_size = ncfile.dimensions['n_a']

  # load data
  data = ncfile.variables[var][:, :, :]
  long_name = ncfile.variables[var].long_name
  units = ncfile.variables[var].units
  missing_value = ncfile.variables[var].missing_value
  _FillValue = ncfile.variables[var]._FillValue
  cell_method = ncfile.variables[var].cell_method
  tmp = []
  for i in range(1):
    for j in range(ny):
      for k in range(nx):
        tmp.append(data[i][j][k])
  data = scipy.array(tmp)

  # create variables
  nc.createDimension('grid_size', nx * ny)

  # create varibels
  data_var = nc.createVariable('data', dtype('d').char, ('grid_size',))
  
  data_var[:] = data
  data_var.long_name = long_name
  data_var.units = units
  data_var.missing_value = missing_value
  data_var._FillValue = _FillValue
  data_var.cell_method = cell_method
 
  string = 'Threp' + var + ' data'
  setattr(nc, 'title', string)
  setattr(nc, 'createdata', tm)

  nc.close()
  ncfile.close()
  print '*** Successfully generating real data file. ***'
def get_veri_data(veri_datasets,var,lat,lon,plev=None,time=None):
    
    data_dir_main=get_data_dir_main()    
    
    if var=='sst': var_veri='sea_surface_temperature'  #the name of the variable as used inside the NetCDF file
    else: var_veri=get_var_data_name(var)
    
    data_veri={}
    for veri in veri_datasets:
        fname='get_seasfc_data_'+veri+'_'+var
        if time is not None:  fname=fname+'_'+'{:02d}'.format(time)+'00'
        fname_coords=get_fname_coords(lat,lon,plev=plev)
        fname=fname+fname_coords
        nc_file_veri=NetCDFFile(data_dir_main+fname+'.nc')
        data_name=veri.lower()+'_'+var_veri
        
        data_veri[veri]={}
        data_veri[veri]['data']=nc_file_veri.variables[data_name][:]  #values of monthly means of the variable. The first dimension should correspond to time.
        data_veri[veri]['date']=nc_file_veri.variables['date'][:]  #the months in YYYYMM format (assumed to be a 1D array)
    
        nc_file_veri.close()
        
        return data_veri
    def __init__(self, filename='gpaw', title='gpaw'):
        if not filename.endswith('-etsf.nc'):
            if filename.endswith('.nc'):
                filename = filename[:-3] + '-etsf.nc'
            else:
                filename = filename + '-etsf.nc'
            
        self.nc = NetCDFFile(filename, 'w')

        self.nc.file_format = 'ETSF Nanoquanta'
        self.nc.file_format_version = np.array([3.3], dtype=np.float32)
        self.nc.Conventions = 'http://www.etsf.eu/fileformats/'
        self.nc.history = 'File generated by GPAW'
        self.nc.title = title
 def __parinit__(self, pid, nprocs, filename, split_dimension,
                 mode = 'r', local_access = 0):
     if mode != 'r': local_access = 0
     self.pid = pid
     self.nprocs = nprocs
     self.filename = filename
     self.split = split_dimension
     self.local_access = local_access
     self.read_only = mode == 'r'
     if local_access or pid == 0:
         self.file = NetCDFFile(filename, mode)
         try:
             length = self.file.dimensions[split_dimension]
             if length is None:
                 length = -1
         except KeyError:
             length = None
         vars = {}
         for name, var in self.file.variables.items():
             vars[name] = (name, var.dimensions)
             if length < 0 and split_dimension in var.dimensions:
                 index = list(var.dimensions).index(split_dimension)
                 length = var.shape[index]
     else:
         self.file = None
         self.split = split_dimension
         length = None
         vars = None
     if not local_access:
         length = self.broadcast(length)
         vars = self.broadcast(vars)
     if length is not None:
         self._divideData(length)
     self.variables = {}
     for name, var in vars.items():
         self.variables[name] = _ParNetCDFVariable(self, var[0], var[1],
                                                   split_dimension)
Exemple #45
0
    def __init__(self,
                 filename,
                 axesnames,
                 variablename,
                 default=None,
                 period=None):
        """
        @param filename: the name of the netCDF file
        @type filename: C{str}

        @param axesnames: the names of the netCDF variables that contain the
            axes information
        @type axesnames: sequence of C{str}

        @param variablename: the name of the netCDF variable that contains
            the data values
        @type variablename: C{str}

        @param default: the value of the function outside the grid. A value
            of C{None} means that the function is undefined outside
            the grid and that any attempt to evaluate it there
            raises an exception.
        @type default: number or C{None}

        @param period: the period for each of the variables, or C{None} for
            variables in which the function is not periodic.
        @type period: sequence of numbers or C{None}
        """
        from Scientific.IO.NetCDF import NetCDFFile
        self.file = NetCDFFile(filename, 'r')
        self.axes = [self.file.variables[n] for n in axesnames]
        for a in self.axes:
            if len(a.dimensions) != 1:
                raise ValueError("axes must be 1d arrays")
        self.values = self.file.variables[variablename]
        if tuple(v.dimensions[0] for v in self.axes) != self.values.dimensions:
            raise ValueError("axes and values have incompatible dimensions")
        self.default = default
        self.shape = ()
        for axis in self.axes:
            self.shape = self.shape + axis.shape
        if period is None:
            period = len(self.axes) * [None]
        self.period = period
        if len(self.period) != len(self.axes):
            raise ValueError('Inconsistent arguments')
        for a, p in zip(self.axes, self.period):
            if p is not None and a[0] + p <= a[-1]:
                raise ValueError('Period too short')
class NetCDFInputData(InputFileData):
    
    type = "netcdf_data"
    
    extension = "nc"
    
    def load(self):
        
        try:
            self._netcdf = NetCDFFile(self._filename,"r")
            
        except IOError:
            raise InputDataError("The data stored in %r filename could not be loaded property." % self._filename)

        else:
            self._data = collections.OrderedDict()
            variables = self._netcdf.variables
            for k in variables:
                self._data[k]={}
                try :
                    if vars[k].axis:
                        self._data[k]['axis'] =  variables[k].axis.split('|')
                    else:
                        self._data[k]['axis'] = []
                except:
                    self._data[k]['axis'] = []
                self._data[k]['data'] = variables[k].getValue()
                self._data[k]['units'] = getattr(variables[k], 'units', 'au')

    def close(self):
        self._netcdf.close()
        
    @property
    def netcdf(self):
        
        return self._netcdf
Exemple #47
0
def load_geoinfo(SrcFilename, DstFilename, GridSize, LatName, LonName):
    # get lat/long values from 400/640 level files
    ncfile = front_end_NetCDF_helper()

    ncfile.open(SrcFilename)

    # read lat array
    SrcLatData = ncfile.read_data(LatName)
    # print SrcLatData

    # read lon array
    SrcLonData = ncfile.read_data(LonName)
    # print SrcLonData

    ncfile.close()

    # create down-sampled long array
    DstLonData = N.zeros(GridSize)

    #
    for DstLonNo in range(0, GridSize):
        SrcLonNo = DstLonNo * 2

        # down-sampling strategy 1, use avarage
        # DstLonData[DstLonNo] = (SrcLonData[SrcLonNo] + SrcLonData[SrcLonNo+1])/2.0
        # print DstLonNo, SrcLonNo, DstLonData[DstLonNo], SrcLonData[SrcLonNo], SrcLonData[SrcLonNo+1]

        # start with 0 and skip alternate points
        DstLonData[DstLonNo] = SrcLonData[SrcLonNo]
        # print DstLonNo, SrcLonNo, DstLonData[DstLonNo], SrcLonData[SrcLonNo]

    # write NetCDF file
    # open output file
    dst_ncfile = NetCDFFile(DstFilename, "w")

    # define dimensions
    dst_lat_dim = dst_ncfile.createDimension(LatName, GridSize)
    dst_lon_dim = dst_ncfile.createDimension(LonName, GridSize)

    # define variables
    dst_lat_var = dst_ncfile.createVariable(LatName, "d", (LatName,))
    # dst_lat_var.setattr (
    # NetCDFFile.setattr (dst_lat_var, 'attrname', attr_val)

    dst_lon_var = dst_ncfile.createVariable(LonName, "d", (LonName,))

    # write lat data
    dst_lat_var.assignValue(SrcLatData)

    # write lon data
    dst_lon_var.assignValue(DstLonData)

    # close output file
    dst_ncfile.close()
Exemple #48
0
def write_elevation_nc(file_out, lon, lat, depth_vector):
    """Write an nc elevation file."""

    # NetCDF file definition
    outfile = NetCDFFile(file_out, netcdf_mode_w)

    #Create new file
    nc_lon_lat_header(outfile, lon, lat)

    # ELEVATION
    zname = 'ELEVATION'
    outfile.createVariable(zname, precision, (lat_name, lon_name))
    outfile.variables[zname].units = 'CENTIMETERS'
    outfile.variables[zname].missing_value = -1.e+034

    outfile.variables[lon_name][:] = ensure_numeric(lon)
    outfile.variables[lat_name][:] = ensure_numeric(lat)

    depth = num.reshape(depth_vector, (len(lat), len(lon)))
    outfile.variables[zname][:] = depth

    outfile.close()
Exemple #49
0
def read_dem(filename):
    ncf = NetCDFFile(filename, 'r')
    # Set ANUGA file - UTM grid params from netcdf header
    if hasattr(ncf, 'cellsize'):
        cellsz = ncf.cellsize[0]
        nrows = ncf.nrows[0]
        ncols = ncf.ncols[0]
        xll = ncf.xllcorner[0]  # Easting of lower left corner
        yll = ncf.yllcorner[0]  # Northing of lower left corner
        xur = xll + (ncols - 1) * cellsz
        yur = yll + (nrows - 1) * cellsz
        x = np.linspace(xll, xur, ncols)
        y = np.linspace(yll, yur, ncols)
        zone = ncf.zone[0]
        zone = 51
        zdat = np.flipud(ncf.variables['elevation'][:])
    # Made from GMT?
    elif 'x_range' in ncf.variables:
        xrng = ncf.variables['x_range']
        yrng = ncf.variables['y_range']
        zrng = ncf.variables['z_range']
        dxdy = ncf.variables['spacing']
        lnll = xrng[0]
        lnur = xrng[1]
        ltll = yrng[0]
        ltur = yrng[1]
        # Pixel registration
        if ncf.variables['z'].node_offset[0] == 1:
            lnll += 0.5 * dxdy[0]
            lnur -= 0.5 * dxdy[0]
            ltll += 0.5 * dxdy[1]
            ltur -= 0.5 * dxdy[1]
        nx = 1 + int(0.1 + (lnur - lnll) / dxdy[0])
        ny = 1 + int(0.1 + (ltur - ltll) / dxdy[1])
        zdat = ncf.variables['z'][:].reshape(ny, nx)
        x = np.linspace(lnll, lnur, nx)
        y = np.linspace(ltur, ltll, ny)
    elif ncf.Conventions == 'COARDS/CF-1.0':
        x = ncf.variables['x'][:]
        y = ncf.variables['y'][:]
        zdat = ncf.variables['z'][:]
    else:
        print 'Not GDAL/GMT netcdf file: %s' % filename
        return (-1)
    return (x, y, zdat)
    def write(cls, filename, data, header=""):
        '''
        Write a set of output variables into a NetCDF file.
                
        :param filename: the path to the output NetCDF file.
        :type filename: str
        :param data: the data to be written out.
        :type data: dict of Framework.OutputVariables.IOutputVariable
        :param header: the header to add to the output file.
        :type header: str
        '''
                
        filename = os.path.splitext(filename)[0]

        filename = "%s%s" % (filename,cls.extensions[0])
       
        # The NetCDF output file is opened for writing.
        outputFile = NetCDFFile(filename, 'w')
        
        if header:
            outputFile.header = header
        
        # Loop over the OutputVariable instances to write.
        
        for var in data.values():
                                    
            varName = str(var.name).strip().encode('string-escape').replace('/', '|')
            
            # The NetCDF dimensions are created for all the dimensions of the OutputVariable instance.
            dimensions = []
            for i,v in enumerate(var.shape):
                name = str("%s_%d" % (varName,i))
                dimensions.append(name)
                outputFile.createDimension(name, int(v))

            # A NetCDF variable instance is created for the running OutputVariable instance.        
            NETCDFVAR = outputFile.createVariable(varName, numpy.dtype(var.dtype).char, tuple(dimensions))

            # The array stored in the OutputVariable instance is written to the NetCDF file.
            NETCDFVAR.assignValue(var)  

            # All the attributes stored in the OutputVariable instance are written to the NetCDF file.
            for k, v in vars(var).items():
                setattr(NETCDFVAR,str(k),str(v))
        
        # The NetCDF file is closed.
        outputFile.close()
Exemple #51
0
def write_elevation_nc(file_out, lon, lat, depth_vector):
    """Write an nc elevation file."""

    # NetCDF file definition
    outfile = NetCDFFile(file_out, netcdf_mode_w)

    #Create new file
    nc_lon_lat_header(outfile, lon, lat)

    # ELEVATION
    zname = 'ELEVATION'
    outfile.createVariable(zname, precision, (lat_name, lon_name))
    outfile.variables[zname].units = 'CENTIMETERS'
    outfile.variables[zname].missing_value = -1.e+034

    outfile.variables[lon_name][:] = ensure_numeric(lon)
    outfile.variables[lat_name][:] = ensure_numeric(lat)

    depth = num.reshape(depth_vector, (len(lat), len(lon)))
    outfile.variables[zname][:] = depth

    outfile.close()
Exemple #52
0
    def __init__(self, master, filename):
        Frame.__init__(self, master)

        file = NetCDFFile(filename)
        self.q = file.variables['q'][1:]
        self.t = file.variables['time'][:]
        self.fn = file.variables['sf'][1:, :]

        Label(self, text='S(t) for various q').pack(side=TOP, fill=X)
        self.plot1 = PlotCanvas(self, 600, 250, zoom=1)
        self.plot1.pack(side=TOP, fill=BOTH, expand=YES)
        Label(self, text='S(q) for various t').pack(side=TOP, fill=X)
        self.plot2 = PlotCanvas(self, 600, 250, zoom=1)
        self.plot2.pack(side=TOP, fill=BOTH, expand=YES)
        frame = Frame(self)
        frame.pack(side=TOP, fill=X)
        self.first_q = IntEntry(frame, "q range:  from ", 0, 0, len(self.q))
        self.first_q.grid(row=0, column=0)
        self.first_q.bind('<Return>', self.draw)
        self.last_q = IntEntry(frame, " to ", len(self.q), 0, len(self.q))
        self.last_q.grid(row=0, column=1)
        self.skip_q = IntEntry(frame, " skip ", (len(self.q) + 10) / 10, 1,
                               len(self.q))
        self.skip_q.grid(row=0, column=2)
        self.first_t = IntEntry(frame, "t range:  from ", 0, 0, len(self.t))
        self.first_t.grid(row=1, column=0)
        self.last_t = IntEntry(frame, " to ", len(self.t), 0, len(self.t))
        self.last_t.grid(row=1, column=1)
        self.skip_t = IntEntry(frame, " skip ", (len(self.t) + 10) / 10, 1,
                               len(self.t))
        self.skip_t.grid(row=1, column=2)

        self.first_q.bind('<Return>', self.draw)
        self.last_q.bind('<Return>', self.draw)
        self.skip_q.bind('<Return>', self.draw)
        self.first_t.bind('<Return>', self.draw)
        self.last_t.bind('<Return>', self.draw)
        self.skip_t.bind('<Return>', self.draw)

        self.draw()
    def openNetCDFFile(self, event=None):
        """
        This method opens a NetCDF file and updates the dialog with the data read from that file.
        Arguments:
            -event: Tkinter event.
        """

        self.variablesInfo.text.config(state=NORMAL)
        self.variablesInfo.text.delete('2.0', END)
        self.variablesInfo.text.config(state=DISABLED)

        # Case where the user enters a file name directly in the entry widget without using the browser.
        if event is not None:
            if event.widget == self.inputFileBrowser.entry:
                filename = self.inputFileBrowser.getValue()
            else:
                return

        else:

            # The name of the NetCDF file to load.
            filename = askopenfilename(parent = self,\
                                       filetypes = [('NetCDF file','*.nc')],\
                                       initialdir = PREFERENCES['outputfile_path'])

        # The file must exist otherwise do nothing.
        if filename:
            self.netcdf = NetCDFFile(filename, 'r')

            self.displayNetCDFContents()

            # The filebrowser entry is updated with the loaded filename.
            self.inputFileBrowser.setValue(filename)

            # A default filename for the output ASCII file is built.
            self.outputFileBrowser.setValue(
                os.path.splitext(filename)[0] + '.cdl')

        return 'break'
Exemple #54
0
def NetCDFFile(file_name, netcdf_mode=netcdf_mode_r):
    """Wrapper to isolate changes of the netcdf libray.

    In theory we should be able to change over to NetCDF4 via this
    wrapper, by ensuring the interface to the NetCDF library isthe same as the
    the old Scientific.IO.NetCDF library.

    There is a difference between extracting dimensions. We have used the following
    to cover netcdf4 and scientific python

    try: # works with netcdf4
        number_of_timesteps = len(fid.dimensions['number_of_timesteps'])
        number_of_points = len(fid.dimensions['number_of_points'])
    except: # works with Scientific.IO.NetCDF
        number_of_timesteps = fid.dimensions['number_of_timesteps']
        number_of_points = fid.dimensions['number_of_points']
    
    """
   
    using_scientific = using_netcdf4 = False
    
    try:
        from netCDF4 import Dataset
        using_netcdf4 = True
    except: 
        from Scientific.IO.NetCDF import NetCDFFile
        using_scientific = True

    assert using_scientific or using_netcdf4

    if using_scientific:
        return NetCDFFile(file_name, netcdf_mode)

    if using_netcdf4:
        if netcdf_mode == 'wl' :
            return Dataset(file_name, 'w', format='NETCDF3_64BIT')
        else:
            return Dataset(file_name, netcdf_mode, format='NETCDF3_64BIT')
Exemple #55
0
    def transfercoord(self):
        self.resncfile = Dataset(self.newfile, 'w')
        tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))

        # set dimension info
        self.resncfile.createDimension('grid_size', self.grid_size)
        self.resncfile.createDimension('grid_rank', self.grid_rank)
        self.resncfile.createDimension('grid_corners', self.grid_corners)

        # set variable info
        grid_dims_var = self.resncfile.createVariable('grid_dims',
                                                      dtype('int32').char,
                                                      ('grid_rank', ))
        grid_center_lat_var = self.resncfile.createVariable(
            'grid_center_lat',
            dtype('d').char, ('grid_size', ))
        grid_center_lat_var.units = 'degrees'
        grid_center_lon_var = self.resncfile.createVariable(
            'grid_center_lon',
            dtype('d').char, ('grid_size', ))
        grid_center_lon_var.units = 'degrees'
        grid_imask_var = self.resncfile.createVariable('grid_imask',
                                                       dtype('i').char,
                                                       ('grid_size', ))
        grid_imask_var.units = 'unitless'

        grid_dims_var[:] = self.grid_dims
        grid_center_lat_var[:] = np.array(self.__grid_center_lat)
        grid_center_lon_var[:] = np.array(self.__grid_center_lon)
        buffer1 = [np.int32(i) for i in self.grid_imask]
        grid_imask_var[:] = np.array(buffer1)

        setattr(self.resncfile, 'title', 'Threp ' + self.newfile)
        setattr(self.resncfile, 'createdate', tm)
        setattr(self.resncfile, 'conventions', 'Threp')
        setattr(self.resncfile, 'grid', self.newfile)
Exemple #56
0
class _ParNetCDFFile(ParBase):

    """
    Distributed netCDF file

    A ParNetCDFFile object acts as much as possible like a NetCDFFile object.
    Variables become ParNetCDFVariable objects, which behave like
    distributed sequences. Variables that use the dimension named by
    |split_dimension| are automatically distributed among the processors
    such that each treats only one slice of the whole file.
    """

    def __parinit__(self, pid, nprocs, filename, split_dimension,
                    mode = 'r', local_access = False):
        """
        @param filename: the name of the netCDF file
        @type filename: C{str}
        @param split_dimension: the name of the dimension along which the data
                                is distributed over the processors
        @type split_dimension: C{str}
        @param mode: read ('r'), write ('w'), or append ('a')
        @type mode: C{str}
        @param local_access: if C{False}, processor 0 is the only one to
                             access the file, all others communicate with
                             processor 0. If C{True} (only for reading), each
                             processor accesses the file directly. In the
                             latter case, the file must be accessible on all
                             processors under the same name. A third mode is
                             'auto', which uses some heuristics to decide
                             if the file is accessible everywhere: it checks
                             for existence of the file, then compares
                             the size on all processors, and finally verifies
                             that the same variables exist everywhere, with
                             identical names, types, and sizes.
        @type local_access: C{bool} or C{str}
        """
        if mode != 'r':
            local_access = 0
        self.pid = pid
        self.nprocs = nprocs
        self.filename = filename
        self.split = split_dimension
        self.local_access = local_access
        self.read_only = mode == 'r'
        if local_access or pid == 0:
            self.file = NetCDFFile(filename, mode)
            try:
                length = self.file.dimensions[split_dimension]
                if length is None:
                    length = -1
            except KeyError:
                length = None
            variables = {}
            for name, var in self.file.variables.items():
                variables[name] = (name, var.dimensions)
                if length < 0 and split_dimension in var.dimensions:
                    index = list(var.dimensions).index(split_dimension)
                    length = var.shape[index]
        else:
            self.file = None
            self.split = split_dimension
            length = None
            variables = None
        if not local_access:
            length = self.broadcast(length)
            variables = self.broadcast(variables)
        if length is not None:
            self._divideData(length)
        self.variables = {}
        for name, var in variables.items():
            self.variables[name] = _ParNetCDFVariable(self, var[0], var[1],
                                                      split_dimension)

    def __repr__(self):
        return repr(self.filename)

    def close(self):
        if self.local_access or self.pid == 0:
            self.file.close()

    def createDimension(self, name, length):
        if name == self.split:
            if length is None:
                raise ValueError("Split dimension cannot be unlimited")
            self._divideData(length)
        if self.pid == 0:
            self.file.createDimension(name, length)

    def createVariable(self, name, typecode, dimensions):
        if self.pid == 0:
            var = self.file.createVariable(name, typecode, dimensions)
            dim = var.dimensions
        else:
            dim = 0
        name, dim = self.broadcast((name, dim))
        self.variables[name] = _ParNetCDFVariable(self, name, dim, self.split)
        return self.variables[name]

    def _divideData(self, length):
        chunk = (length+self.nprocs-1)/self.nprocs
        self.first = min(self.pid*chunk, length)
        self.last = min(self.first+chunk, length)
        if (not self.local_access) and self.pid == 0:
            self.parts = []
            for pid in range(self.nprocs):
                first = pid*chunk
                last = min(first+chunk, length)
                self.parts.append((first, last))

    def sync(self):
        if self.pid == 0:
            self.file.sync()
    flush = sync