def test_modis():
        
    modis_file      = "MYD06_L2.A2010100.0755.051.2010108054555.hdf"
    print "****** Reading MODIS data from file: ", modis_file
    modis           = FEMODIS.front_end_modis_cloud_1km_dev(modis_file)
    tim=modis.get_time()
    lat=modis.get_latitude()
    lon=modis.get_longitude() 
    dat=modis.get_data()
    print dat.keys()
    cwp=dat['Cloud_Water_Path']
 
    # print lat, lon, lwp
    ncfile = Dataset('modis_1km.nc','w')
    ndim = len(lat)
    ncfile.createDimension('time',ndim)
    time = ncfile.createVariable('time',dtype('float32').char,('time', ))
    lats = ncfile.createVariable('latitude',dtype('float32').char,('time', ))
    lons = ncfile.createVariable('longitude',dtype('float32').char,('time', ))
    cwps = ncfile.createVariable('cloud_water_path',dtype('float32').char,('time', ))
    time[:] = N.cast['float32'](tim)
    lats[:] = N.cast['float32'](lat)
    lons[:] = N.cast['float32'](lon)
    cwps[:] = N.cast['float32'](cwp[1])
    ncfile.close()    
Пример #2
0
    def gen(self):
        ncfile = Dataset(self.fname, 'w')
        tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))

        # set dimension info
        ncfile.createDimension('grid_size', self.obj.grid_size)

        # set variable info
        grid_center_lat_var = ncfile.createVariable('grid_center_lat',
                                                    dtype('d').char,
                                                    ('grid_size', ))
        grid_center_lon_var = ncfile.createVariable('grid_center_lon',
                                                    dtype('d').char,
                                                    ('grid_size', ))
        physical_variable = ncfile.createVariable('physical_variable',
                                                  dtype('d').char,
                                                  ('grid_size', ))

        grid_center_lat_var[:] = np.array(self.obj.grid_center_lat)
        grid_center_lon_var[:] = np.array(self.obj.grid_center_lon)
        physical_variable[:] = np.array(self.obj.physical_variable)

        setattr(ncfile, 'title', 'Threp ' + self.fname)
        setattr(ncfile, 'createdate', tm)
        setattr(ncfile, 'map_method', self.method)
        setattr(ncfile, 'conventions', 'Threp')
        setattr(ncfile, 'src_grid', self.obj.src_grid_name)
        setattr(ncfile, 'dst_grid', self.obj.dst_grid_name)

        ncfile.close()
        print '*** Successfully generated netcdf file for ncl usage. ***'
Пример #3
0
def OPENPIV2D2C(filename,ux_out,uy_out,x_out,y_out,flag1,flag2,flag3):
    """Storage in NetCDF format: 2D2C PIV datas with 3 flags used in OPENPIV"""
    # open a new netCDF file for writing.
    ncfile = Dataset(filename,'w') 
    # create the x and y dimensions.
    nx,ny=ux_out.shape
    ncfile.createDimension('x',nx)
    ncfile.createDimension('y',ny)
    # create the variable (4 byte integer in this case)
    # first argument is name of variable, second is datatype, third is
    # a tuple with the names of dimensions.
    #data = ncfile.createVariable('data',np.dtype('int32').char,('x','y'))
    xvar = ncfile.createVariable('xvar','d',('x','y'))
    yvar = ncfile.createVariable('yvar','d',('x','y'))
    ux = ncfile.createVariable('ux','d',('x','y'))
    uy = ncfile.createVariable('uy','d',('x','y'))
    Flags1 = ncfile.createVariable('flag1','d',('x','y'))
    Flags2 = ncfile.createVariable('flag2','d',('x','y'))
    Flags3 = ncfile.createVariable('flag3','d',('x','y'))
    # write data to variable.
    xvar[:] = x_out
    yvar[:] = y_out
    ux[:] = ux_out
    uy[:] = uy_out
    Flags1[:] = flag1
    Flags2[:] = flag2
    Flags3[:] = flag3
    # close the file.
    ncfile.close()
    print '*** SUCCESS writing:',filename
Пример #4
0
class CoordTransfer(Exception):
    def __init__(self, srcfile, dstfile, newfile):
        self.srcfile = srcfile
        self.dstfile = dstfile
        self.newfile = newfile

    def loadsrcoords(self):
        self.ncfile = Dataset(self.srcfile, 'r')
        variable_name = 'grid_center_lat'
        __grid_center_lat = self.ncfile.variables[variable_name][:]
        variable_name = 'grid_center_lon'
        __grid_center_lon = self.ncfile.variables[variable_name][:]
        self.__grid_center_lat = __grid_center_lat.tolist()
        self.__grid_center_lon = __grid_center_lon.tolist()

    def loadstinfo(self):
        self.nc_obj = Loadnc(self.dstfile)
        self.grid_size, self.grid_corners, self.grid_rank, self.grid_dims, ach1, ach2, self.grid_imask = self.nc_obj.load(
        )

    def transfercoord(self):
        self.resncfile = Dataset(self.newfile, 'w')
        tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))

        # set dimension info
        self.resncfile.createDimension('grid_size', self.grid_size)
        self.resncfile.createDimension('grid_rank', self.grid_rank)
        self.resncfile.createDimension('grid_corners', self.grid_corners)

        # set variable info
        grid_dims_var = self.resncfile.createVariable('grid_dims',
                                                      dtype('int32').char,
                                                      ('grid_rank', ))
        grid_center_lat_var = self.resncfile.createVariable(
            'grid_center_lat',
            dtype('d').char, ('grid_size', ))
        grid_center_lat_var.units = 'degrees'
        grid_center_lon_var = self.resncfile.createVariable(
            'grid_center_lon',
            dtype('d').char, ('grid_size', ))
        grid_center_lon_var.units = 'degrees'
        grid_imask_var = self.resncfile.createVariable('grid_imask',
                                                       dtype('i').char,
                                                       ('grid_size', ))
        grid_imask_var.units = 'unitless'

        grid_dims_var[:] = self.grid_dims
        grid_center_lat_var[:] = np.array(self.__grid_center_lat)
        grid_center_lon_var[:] = np.array(self.__grid_center_lon)
        buffer1 = [np.int32(i) for i in self.grid_imask]
        grid_imask_var[:] = np.array(buffer1)

        setattr(self.resncfile, 'title', 'Threp ' + self.newfile)
        setattr(self.resncfile, 'createdate', tm)
        setattr(self.resncfile, 'conventions', 'Threp')
        setattr(self.resncfile, 'grid', self.newfile)

    def finish(self):
        self.resncfile.close()
        self.nc_obj.closenc()
Пример #5
0
 def openOutputFile(self):
     # Create file
     os.system('mkdir -p results/%s-%s' % (self.Case1,self.Case2))
     FileName = 'results/%s-%s/Cam3Feedbacks.%s-%s.%03i.nc' % \
                (self.Case1,self.Case2,self.Case1,self.Case2,self.FileNumber)
     if not os.path.exists(FileName):
         print 'creating %s ...' % FileName
         File = NetCDFFile(FileName,'w')
         File.createDimension('lat',len(self.data.lat))
         var = File.createVariable('lat','f',('lat',))
         var.long_name = 'latitude'
         var.units = 'degrees_north'
         var[:] = self.data.lat.astype('f')
         File.createDimension('lon',len(self.data.lon))
         var = File.createVariable('lon','f',('lon',))
         var.long_name = 'longitude'
         var.units = 'degrees_east'
         var[:] = self.data.lon.astype('f')
         # create variables
         for Field in ['dR_Ts','dR_lapse','dR_q','dR_cld_sw','dR_cld_lw','dR_alb','dR_co2']:
             var = File.createVariable(Field,'f',('lat','lon'))
             var.long_name = 'TOA radiative perturbation'
             var.units = 'W m-2'
             var[:,:] = 0.
         File.NsnapsDone = 0
         return 0, File
     else:
         File = NetCDFFile(FileName,'a')
         NsnapsDone = int(File.NsnapsDone[0])
         if NsnapsDone < len(self.data.time):
             return NsnapsDone, File
         else:
             print 'No more snaps to be done'
             sys.exit(0)
Пример #6
0
 def gen(self):
   ncfile = Dataset(self.fname, 'w')
   tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
   
   # set dimension info 
   ncfile.createDimension('grid_size', self.obj.grid_size)
   
   # set variable info
   grid_center_lat_var = ncfile.createVariable('grid_center_lat', dtype('d').char, ('grid_size',))
   grid_center_lon_var = ncfile.createVariable('grid_center_lon', dtype('d').char, ('grid_size',))
   physical_variable = ncfile.createVariable('physical_variable', dtype('d').char, ('grid_size',))
   
   grid_center_lat_var[:] = np.array(self.obj.grid_center_lat)
   grid_center_lon_var[:] = np.array(self.obj.grid_center_lon)
   physical_variable[:] = np.array(self.obj.physical_variable)
    
   setattr(ncfile, 'title', 'Threp ' + self.fname)
   setattr(ncfile, 'createdate', tm)
   setattr(ncfile, 'map_method', self.method)
   setattr(ncfile, 'conventions', 'Threp')
   setattr(ncfile, 'src_grid', self.obj.src_grid_name)
   setattr(ncfile, 'dst_grid', self.obj.dst_grid_name)
   
   ncfile.close() 
   print '*** Successfully generated netcdf file for ncl usage. ***'
Пример #7
0
def OPENPIV2D2C(filename, ux_out, uy_out, x_out, y_out, flag1, flag2, flag3):
    """Storage in NetCDF format: 2D2C PIV datas with 3 flags used in OPENPIV"""
    # open a new netCDF file for writing.
    ncfile = Dataset(filename, 'w')
    # create the x and y dimensions.
    nx, ny = ux_out.shape
    ncfile.createDimension('x', nx)
    ncfile.createDimension('y', ny)
    # create the variable (4 byte integer in this case)
    # first argument is name of variable, second is datatype, third is
    # a tuple with the names of dimensions.
    #data = ncfile.createVariable('data',np.dtype('int32').char,('x','y'))
    xvar = ncfile.createVariable('xvar', 'd', ('x', 'y'))
    yvar = ncfile.createVariable('yvar', 'd', ('x', 'y'))
    ux = ncfile.createVariable('ux', 'd', ('x', 'y'))
    uy = ncfile.createVariable('uy', 'd', ('x', 'y'))
    Flags1 = ncfile.createVariable('flag1', 'd', ('x', 'y'))
    Flags2 = ncfile.createVariable('flag2', 'd', ('x', 'y'))
    Flags3 = ncfile.createVariable('flag3', 'd', ('x', 'y'))
    # write data to variable.
    xvar[:] = x_out
    yvar[:] = y_out
    ux[:] = ux_out
    uy[:] = uy_out
    Flags1[:] = flag1
    Flags2[:] = flag2
    Flags3[:] = flag3
    # close the file.
    ncfile.close()
    print '*** SUCCESS writing:', filename
Пример #8
0
    def finalize(self):
        """Finalizes the calculations (e.g. averaging the total term, output files creations ...).
        """

        # The NetCDF output file is opened for writing.
        outputFile = NetCDFFile(self.output, 'w')
        outputFile.title = self.__class__.__name__
        outputFile.jobinfo = self.information + '\nOutput file written on: %s\n\n' % asctime(
        )

        # Dictionnary whose keys are of the form Gi where i is the group number
        # and the entries are the list of the index of the atoms building the group.
        comp = 1
        for g in self.group:
            outputFile.jobinfo += 'Group %d: %s\n' % (comp,
                                                      [index for index in g])
            comp += 1

        # Some dimensions are created.
        outputFile.createDimension('NFRAMES', self.nFrames)

        # Creation of the NetCDF output variables.
        # The time.
        TIMES = outputFile.createVariable('time', N.Float, ('NFRAMES', ))
        TIMES[:] = self.times[:]
        TIMES.units = 'ps'

        avacfTotal = N.zeros((self.nFrames), typecode=N.Float)

        for k in self.AVACF.keys():

            AVACF = outputFile.createVariable('avacf-group%s' % k, N.Float,
                                              ('NFRAMES', ))
            AVACF[:] = self.AVACF[k][:]
            AVACF.units = 'rad^2*ps^-2'

            N.add(avacfTotal, self.AVACF[k], avacfTotal)

        avacfTotal /= self.nGroups

        AVACF = outputFile.createVariable('avacf-total', N.Float,
                                          ('NFRAMES', ))
        AVACF[:] = avacfTotal[:]
        AVACF.units = 'rad^2*ps^-2'

        asciiVar = sorted(outputFile.variables.keys())

        outputFile.close()

        self.toPlot = {
            'netcdf': self.output,
            'xVar': 'time',
            'yVar': 'avacf-total'
        }

        # Create an ASCII version of the NetCDF output file.
        convertNetCDFToASCII(inputFile = self.output,\
                             outputFile = os.path.splitext(self.output)[0] + '.cdl',\
                             variables = asciiVar)
Пример #9
0
 def write(self):
   ncfile = Dataset(self.fname, 'w')
   tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
   
   # set dimension info 
   ncfile.createDimension('src_grid_size', self.obj.src_grid_size)
   ncfile.createDimension('dst_grid_size', self.obj.dst_grid_size)
   ncfile.createDimension('n_wgt', self.n_wgt)
   ncfile.createDimension('src_grid_rank', self.obj.src_grid_rank)
   ncfile.createDimension('dst_grid_rank', self.obj.dst_grid_rank)
   ncfile.createDimension('num_wgts', 1)
   ncfile.createDimension('src_grid_corners', self.obj.src_grid_corners)
   ncfile.createDimension('dst_grid_corners', self.obj.dst_grid_corners)
   
   # set variable info
   src_grid_dims_var = ncfile.createVariable('src_grid_dims', dtype('int32').char, ('src_grid_rank',))
   dst_grid_dims_var = ncfile.createVariable('dst_grid_dims', dtype('int32').char, ('dst_grid_rank',))
   src_grid_center_lat_var = ncfile.createVariable('src_grid_center_lat', dtype('d').char, ('src_grid_size',))
   src_grid_center_lon_var = ncfile.createVariable('src_grid_center_lon', dtype('d').char, ('src_grid_size',))
   dst_grid_center_lat_var = ncfile.createVariable('dst_grid_center_lat', dtype('d').char, ('dst_grid_size',))
   dst_grid_center_lon_var = ncfile.createVariable('dst_grid_center_lon', dtype('d').char, ('dst_grid_size',))
   src_grid_imask_var = ncfile.createVariable('src_grid_imask', dtype('i').char, ('src_grid_size',))
   dst_grid_imask_var = ncfile.createVariable('dst_grid_imask', dtype('i').char, ('dst_grid_size',))
   remap_src_indx_var = ncfile.createVariable('remap_src_indx', dtype('i').char, ('n_wgt',))
   remap_dst_indx_var = ncfile.createVariable('remap_dst_indx', dtype('i').char, ('n_wgt',))
   remap_matrix_var = ncfile.createVariable('remap_matrix', dtype('d').char, ('n_wgt',))
   
   src_grid_dims_var[:] = self.obj.src_grid_dims
   dst_grid_dims_var[:] = self.obj.dst_grid_dims
   src_grid_center_lat_var[:] = np.array(self.obj.original_src_grid_center_lat)
   src_grid_center_lon_var[:] = np.array(self.obj.original_src_grid_center_lon)
   dst_grid_center_lat_var[:] = np.array(self.obj.dst_grid_center_lat)
   dst_grid_center_lon_var[:] = np.array(self.obj.dst_grid_center_lon)
   #src_grid_imask_var[:] = np.array(self.obj.original_src_grid_imask)
   buffer1 = [np.int32(i) for i in self.obj.original_src_grid_imask]
   src_grid_imask_var[:] = np.array(buffer1)
   buffer2 = [np.int32(i) for i in self.obj.dst_grid_imask]
   dst_grid_imask_var[:] = np.array(buffer2)
   #dst_grid_imask_var[:] = np.array(self.obj.dst_grid_imask)
   buffer3 = [np.int32(i) for i in self.obj.remap_src_indx]
   remap_src_indx_var[:] = np.array(buffer3)
   #remap_src_indx_var[:] = np.array(self.obj.remap_src_indx)
   buffer4 = [np.int32(i) for i in self.obj.remap_dst_indx]
   remap_dst_indx_var[:] = np.array(buffer4)
   #remap_dst_indx_var[:] = np.array(self.obj.remap_dst_indx)
   remap_matrix_var[:] = np.array(self.obj.remap_matrix_compact)
   
   setattr(ncfile, 'title', 'Threp ' + self.fname)
   setattr(ncfile, 'createdate', tm)
   setattr(ncfile, 'map_method', self.method)
   setattr(ncfile, 'conventions', 'Threp')
   setattr(ncfile, 'src_grid', self.obj.src_grid_name)
   setattr(ncfile, 'dst_grid', self.obj.dst_grid_name)
   
   ncfile.close() 
   print '*** Successfully generate remap matrix file. ***'
Пример #10
0
def write_to_file(CS,nmax_coarse, nmax_fine, nblocks ,hw_ph,filename):
    """
    Writes the results of a computation to a netcdf file.
    Takes a Compute_Loop_Function object as input; it is assumed that 
    this object has already computed what we wish to write!
    """

    #--------------------------------------------
    # Write to netcdf file 
    #--------------------------------------------
    ncfile   = Dataset(filename,'w')

    # --- set various attributes, identifying the parameters of the computation ----
    setattr(ncfile,'mu',CS.mu) 
    setattr(ncfile,'beta',CS.beta) 
    setattr(ncfile,'acell',acell) 
    setattr(ncfile,'Area',Area) 
    setattr(ncfile,'nmax_coarse',nmax_coarse) 
    setattr(ncfile,'nmax_fine',nmax_fine) 
    setattr(ncfile,'n_blocks_coarse_to_fine',nblocks) 
    setattr(ncfile,'Gamma_width',CS.Gamma) 
    setattr(ncfile,'phonon_frequency',hw_ph) 


    # --- Create dimensions ----
    ncfile.createDimension("number_of_frequencies",CS.list_hw.shape[0])
    ncfile.createDimension("xy",2)
    ncfile.createDimension("gamma_i",2)
    ncfile.createDimension("uv",2)
    ncfile.createDimension("phonon_alpha_kappa",6)


    # --- Write data ----
    Q      = ncfile.createVariable("q_phonon",'d',('xy',))
    REPH   = ncfile.createVariable("Re_E_phonon",'d',('phonon_alpha_kappa',))
    IEPH   = ncfile.createVariable("Im_E_phonon",'d',('phonon_alpha_kappa',))
    HW     = ncfile.createVariable("list_hw",'d',('number_of_frequencies',))

    RH     = ncfile.createVariable("Re_H",'d',('xy','gamma_i','uv','number_of_frequencies'))
    IH     = ncfile.createVariable("Im_H",'d',('xy','gamma_i','uv','number_of_frequencies'))


    Q[:]    = CS.q
    REPH[:] = N.real(CS.E_ph)
    IEPH[:] = N.imag(CS.E_ph)
    HW[:]   = N.real(CS.list_hw)

    RH[:,:,:,:] = N.real(CS.Hq)
    IH[:,:,:,:] = N.imag(CS.Hq)



    ncfile.close()
Пример #11
0
class CoordTransfer(Exception):
  def __init__(self, srcfile, dstfile, newfile):
    self.srcfile = srcfile
    self.dstfile = dstfile
    self.newfile = newfile
    
  def loadsrcoords(self):
    self.ncfile = Dataset(self.srcfile, 'r')
    variable_name = 'grid_center_lat'
    __grid_center_lat = self.ncfile.variables[variable_name][:]
    variable_name = 'grid_center_lon'
    __grid_center_lon = self.ncfile.variables[variable_name][:]
    self.__grid_center_lat = __grid_center_lat.tolist()
    self.__grid_center_lon = __grid_center_lon.tolist()
    
  def loadstinfo(self):
   self.nc_obj = Loadnc(self.dstfile)
   self.grid_size, self.grid_corners, self.grid_rank, self.grid_dims, ach1, ach2, self.grid_imask = self.nc_obj.load()
   
  def transfercoord(self):
    self.resncfile = Dataset(self.newfile, 'w')
    tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
    
    # set dimension info 
    self.resncfile.createDimension('grid_size', self.grid_size)
    self.resncfile.createDimension('grid_rank', self.grid_rank)
    self.resncfile.createDimension('grid_corners', self.grid_corners)

    # set variable info
    grid_dims_var = self.resncfile.createVariable('grid_dims', dtype('int32').char, ('grid_rank',))
    grid_center_lat_var = self.resncfile.createVariable('grid_center_lat', dtype('d').char, ('grid_size',))
    grid_center_lat_var.units = 'degrees'
    grid_center_lon_var = self.resncfile.createVariable('grid_center_lon', dtype('d').char, ('grid_size',))
    grid_center_lon_var.units = 'degrees'
    grid_imask_var = self.resncfile.createVariable('grid_imask', dtype('i').char, ('grid_size',))
    grid_imask_var.units = 'unitless'

    grid_dims_var[:] = self.grid_dims
    grid_center_lat_var[:] = np.array(self.__grid_center_lat)
    grid_center_lon_var[:] = np.array(self.__grid_center_lon)
    buffer1 = [np.int32(i) for i in self.grid_imask]
    grid_imask_var[:] = np.array(buffer1)

    setattr(self.resncfile, 'title', 'Threp ' + self.newfile)
    setattr(self.resncfile, 'createdate', tm)
    setattr(self.resncfile, 'conventions', 'Threp')
    setattr(self.resncfile, 'grid', self.newfile)

  def finish(self):
    self.resncfile.close()
    self.nc_obj.closenc()
Пример #12
0
def filter_netcdf(filename1, filename2, first=0, last=None, step=1):
    """Filter data file, selecting timesteps first:step:last.
    
    Read netcdf filename1, pick timesteps first:step:last and save to
    nettcdf file filename2
    """

    from Scientific.IO.NetCDF import NetCDFFile

    # Get NetCDF
    infile = NetCDFFile(filename1, netcdf_mode_r)  #Open existing file for read
    outfile = NetCDFFile(filename2, netcdf_mode_w)  #Open new file

    # Copy dimensions
    for d in infile.dimensions:
        outfile.createDimension(d, infile.dimensions[d])

    # Copy variable definitions
    for name in infile.variables:
        var = infile.variables[name]
        outfile.createVariable(name, var.dtype.char, var.dimensions)

    # Copy the static variables
    for name in infile.variables:
        if name == 'time' or name == 'stage':
            pass
        else:
            outfile.variables[name][:] = infile.variables[name][:]

    # Copy selected timesteps
    time = infile.variables['time']
    stage = infile.variables['stage']

    newtime = outfile.variables['time']
    newstage = outfile.variables['stage']

    if last is None:
        last = len(time)

    selection = range(first, last, step)
    for i, j in enumerate(selection):
        log.critical('Copying timestep %d of %d (%f)'
                     % (j, last-first, time[j]))
        newtime[i] = time[j]
        newstage[i,:] = stage[j,:]

    # Close
    infile.close()
    outfile.close()
Пример #13
0
def filter_netcdf(filename1, filename2, first=0, last=None, step=1):
    """Filter data file, selecting timesteps first:step:last.
    
    Read netcdf filename1, pick timesteps first:step:last and save to
    nettcdf file filename2
    """

    from Scientific.IO.NetCDF import NetCDFFile

    # Get NetCDF
    infile = NetCDFFile(filename1, netcdf_mode_r)  #Open existing file for read
    outfile = NetCDFFile(filename2, netcdf_mode_w)  #Open new file

    # Copy dimensions
    for d in infile.dimensions:
        outfile.createDimension(d, infile.dimensions[d])

    # Copy variable definitions
    for name in infile.variables:
        var = infile.variables[name]
        outfile.createVariable(name, var.dtype.char, var.dimensions)

    # Copy the static variables
    for name in infile.variables:
        if name == 'time' or name == 'stage':
            pass
        else:
            outfile.variables[name][:] = infile.variables[name][:]

    # Copy selected timesteps
    time = infile.variables['time']
    stage = infile.variables['stage']

    newtime = outfile.variables['time']
    newstage = outfile.variables['stage']

    if last is None:
        last = len(time)

    selection = range(first, last, step)
    for i, j in enumerate(selection):
        log.critical('Copying timestep %d of %d (%f)'
                     % (j, last-first, time[j]))
        newtime[i] = time[j]
        newstage[i,:] = stage[j,:]

    # Close
    infile.close()
    outfile.close()
Пример #14
0
    def finalize(self):
        """Finalizes the calculations (e.g. averaging the total term, output files creations ...).
        """

        outputFile = NetCDFFile(self.output, 'w')
        outputFile.title = self.__class__.__name__
        outputFile.jobinfo = self.information + '\nOutput file written on: %s\n\n' % asctime(
        )

        outputFile.createDimension('NGROUPS', self.nGroups)
        outputFile.createDimension('NFRAMES', self.nFrames)

        TIMES = outputFile.createVariable('time', N.Float, ('NFRAMES', ))
        TIMES[:] = self.times
        TIMES.units = 'ps'

        GROUPNUMBER = outputFile.createVariable('group_number', N.Int32,
                                                ('NGROUPS', ))
        P2 = outputFile.createVariable('p2', N.Float, ('NGROUPS', 'NFRAMES'))
        P2AVG = outputFile.createVariable('p2-groupavg', N.Float,
                                          ('NFRAMES', ))
        S2 = outputFile.createVariable('s2', N.Float, ('NGROUPS', ))

        p2Avg = N.zeros((self.nFrames), typecode=N.Float)

        comp = 0
        for bKey in sorted(self.bondNames.keys()):

            bName = self.bondNames[bKey]

            GROUPNUMBER[comp] = bName
            S2[comp] = self.S2[bName]
            P2[comp, :] = self.P2[bName]
            N.add(p2Avg, self.P2[bName], p2Avg)
            comp += 1

        P2AVG[:] = p2Avg / float(self.nGroups)

        asciiVar = sorted(outputFile.variables.keys())

        outputFile.close()

        self.toPlot = {'netcdf': self.output, 'xVar': 'pair', 'yVar': 'S2'}

        # Creates an ASCII version of the NetCDF output file.
        convertNetCDFToASCII(inputFile = self.output,\
                             outputFile = os.path.splitext(self.output)[0] + '.cdl',\
                             variables = asciiVar)
Пример #15
0
    def write(cls, filename, data, header=""):
        '''
        Write a set of output variables into a NetCDF file.
                
        :param filename: the path to the output NetCDF file.
        :type filename: str
        :param data: the data to be written out.
        :type data: dict of Framework.OutputVariables.IOutputVariable
        :param header: the header to add to the output file.
        :type header: str
        '''
                
        filename = os.path.splitext(filename)[0]

        filename = "%s%s" % (filename,cls.extensions[0])
       
        # The NetCDF output file is opened for writing.
        outputFile = NetCDFFile(filename, 'w')
        
        if header:
            outputFile.header = header
        
        # Loop over the OutputVariable instances to write.
        
        for var in data.values():
                                    
            varName = str(var.name).strip().encode('string-escape').replace('/', '|')
            
            # The NetCDF dimensions are created for all the dimensions of the OutputVariable instance.
            dimensions = []
            for i,v in enumerate(var.shape):
                name = str("%s_%d" % (varName,i))
                dimensions.append(name)
                outputFile.createDimension(name, int(v))

            # A NetCDF variable instance is created for the running OutputVariable instance.        
            NETCDFVAR = outputFile.createVariable(varName, numpy.dtype(var.dtype).char, tuple(dimensions))

            # The array stored in the OutputVariable instance is written to the NetCDF file.
            NETCDFVAR.assignValue(var)  

            # All the attributes stored in the OutputVariable instance are written to the NetCDF file.
            for k, v in vars(var).items():
                setattr(NETCDFVAR,str(k),str(v))
        
        # The NetCDF file is closed.
        outputFile.close()
Пример #16
0
def saveNetCDF(data, filename, title):

    file = NetCDFFile(filename, 'w', 'Created ')
    file.title = title
    file.createDimension('TIME', len(data[2]))
    file.createDimension('LENGTH', len(data[1]))
    SF = file.createVariable('SF', N.Float, ('LENGTH', 'TIME'))
    time = file.createVariable('time', N.Float, ('TIME', ))
    qlenght = file.createVariable('qlenght', N.Float, ('LENGTH', ))

    for i in range(len(data[0])):
        for j in range(len(data[1])):
            SF[j, i] = data[2][i][j]
    for i in range(len(data[0])):
        time[i] = data[0][i]
    for i in range(len(data[1])):
        qlenght[i] = data[1][i]
    file.close()
Пример #17
0
def gen_realdata(path, filename, var):
  ncfile = Dataset(path + filename, 'r')
  filename = './realdata/T42_' + var + '-' + filename.split('.')[-2] + '.nc'
  nc = Dataset(filename, 'w')
  tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
  nx = 128
  ny = 64
  grid_size = ncfile.dimensions['n_a']

  # load data
  data = ncfile.variables[var][:, :, :]
  long_name = ncfile.variables[var].long_name
  units = ncfile.variables[var].units
  missing_value = ncfile.variables[var].missing_value
  _FillValue = ncfile.variables[var]._FillValue
  cell_method = ncfile.variables[var].cell_method
  tmp = []
  for i in range(1):
    for j in range(ny):
      for k in range(nx):
        tmp.append(data[i][j][k])
  data = scipy.array(tmp)

  # create variables
  nc.createDimension('grid_size', nx * ny)

  # create varibels
  data_var = nc.createVariable('data', dtype('d').char, ('grid_size',))
  
  data_var[:] = data
  data_var.long_name = long_name
  data_var.units = units
  data_var.missing_value = missing_value
  data_var._FillValue = _FillValue
  data_var.cell_method = cell_method
 
  string = 'Threp' + var + ' data'
  setattr(nc, 'title', string)
  setattr(nc, 'createdata', tm)

  nc.close()
  ncfile.close()
  print '*** Successfully generating real data file. ***'
Пример #18
0
    def writeNetCDF(self, filename):
        from Scientific.IO.NetCDF import NetCDFFile

        ncfile = NetCDFFile(filename, "w")

        for dim, i in (("x", 0), ("y", 1), ("z", 2)):
            ncfile.createDimension(dim, self.voxels[i])
            ncfile.createVariable(dim, "d", (dim,))[:] = arange(self.voxels[i]) * self.vector[i][i]

        ncfile.createVariable("data", "d", ("x", "y", "z"))
        ncfile.variables["data"][:] = self.data

        ncfile.Natom = self.Natom
        ncfile.origin = self.origin
        for n in range(self.Natom):
            setattr(ncfile, "atom%i.Type" % n, self.atomType[n])
            setattr(ncfile, "atom%i.Pos" % n, self.atomPos[n])

        ncfile.close()
Пример #19
0
    def finalize(self):
        """Finalizes the calculations (e.g. averaging the total term, output files creations ...).
        """

        # The NetCDF output file is opened for writing.
        outputFile = NetCDFFile(self.output, 'w')
        outputFile.title = self.__class__.__name__
        outputFile.jobinfo = self.information + '\nOutput file written on: %s\n\n' % asctime(
        )

        outputFile.createDimension('NFRAMES', self.nFrames)
        TIMES = outputFile.createVariable('time', N.Float, ('NFRAMES', ))
        TIMES[:] = self.times[:]
        TIMES.units = 'ps'

        for oName, seq in self.sequence.items():
            outputFile.createDimension('SEQ%s' % oName, len(seq))

            SEQUENCE = outputFile.createVariable('%s_sequence' % oName,
                                                 N.Int32, ('SEQ%s' % oName, ))
            SEQUENCE[:] = N.array(seq)
            SEQUENCE.units = 'unitless'

            S2 = outputFile.createVariable('%s_s2' % oName, N.Float,
                                           ('SEQ%s' % oName, 'NFRAMES'))
            S2[:] = self.S2[oName][:, :]
            S2.units = 'unitless'

            S2AVG = outputFile.createVariable('%s_s2_timeavg' % oName, N.Float,
                                              ('SEQ%s' % oName, ))
            S2AVG[:] = self.S2[oName][:, :].sum(1) / float(self.nFrames)
            S2AVG.units = 'unitless'

        asciiVar = sorted(outputFile.variables.keys())

        outputFile.close()

        self.toPlot = None

        # Creates an ASCII version of the NetCDF output file.
        convertNetCDFToASCII(inputFile = self.output,\
                             outputFile = os.path.splitext(self.output)[0] + '.cdl',\
                             variables = asciiVar)
Пример #20
0
def load_geoinfo (SrcFilename, DstFilename, GridSize, LatName, LonName):
    # get lat/long values from 400/640 level files
    ncfile = front_end_NetCDF_helper()

    ncfile.open (SrcFilename)

    # read lat array
    SrcLatData = ncfile.read_data (LatName)
    #print SrcLatData

    # read lon array
    SrcLonData = ncfile.read_data (LonName)
    #print SrcLonData

    ncfile.close ()

    # create down-sampled long array    
    DstLonData = N.zeros (GridSize)

    # 
    for DstLonNo in range(0, GridSize):
        SrcLonNo = DstLonNo * 2

        # down-sampling strategy 1, use avarage
        #DstLonData[DstLonNo] = (SrcLonData[SrcLonNo] + SrcLonData[SrcLonNo+1])/2.0
        #print DstLonNo, SrcLonNo, DstLonData[DstLonNo], SrcLonData[SrcLonNo], SrcLonData[SrcLonNo+1]

        # start with 0 and skip alternate points
        DstLonData[DstLonNo] = SrcLonData[SrcLonNo]
        #print DstLonNo, SrcLonNo, DstLonData[DstLonNo], SrcLonData[SrcLonNo]

    # write NetCDF file
    # open output file
    dst_ncfile = NetCDFFile (DstFilename, 'w')

    # define dimensions
    dst_lat_dim = dst_ncfile.createDimension (LatName, GridSize)
    dst_lon_dim = dst_ncfile.createDimension (LonName, GridSize)

    # define variables
    dst_lat_var = dst_ncfile.createVariable (LatName, 'd', (LatName,))
    #dst_lat_var.setattr (
    # NetCDFFile.setattr (dst_lat_var, 'attrname', attr_val)
    
    dst_lon_var = dst_ncfile.createVariable (LonName, 'd', (LonName,))

    # write lat data
    dst_lat_var.assignValue(SrcLatData)

    # write lon data
    dst_lon_var.assignValue(DstLonData)

    # close output file
    dst_ncfile.close ()
Пример #21
0
def load_geoinfo(SrcFilename, DstFilename, GridSize, LatName, LonName):
    # get lat/long values from 400/640 level files
    ncfile = front_end_NetCDF_helper()

    ncfile.open(SrcFilename)

    # read lat array
    SrcLatData = ncfile.read_data(LatName)
    # print SrcLatData

    # read lon array
    SrcLonData = ncfile.read_data(LonName)
    # print SrcLonData

    ncfile.close()

    # create down-sampled long array
    DstLonData = N.zeros(GridSize)

    #
    for DstLonNo in range(0, GridSize):
        SrcLonNo = DstLonNo * 2

        # down-sampling strategy 1, use avarage
        # DstLonData[DstLonNo] = (SrcLonData[SrcLonNo] + SrcLonData[SrcLonNo+1])/2.0
        # print DstLonNo, SrcLonNo, DstLonData[DstLonNo], SrcLonData[SrcLonNo], SrcLonData[SrcLonNo+1]

        # start with 0 and skip alternate points
        DstLonData[DstLonNo] = SrcLonData[SrcLonNo]
        # print DstLonNo, SrcLonNo, DstLonData[DstLonNo], SrcLonData[SrcLonNo]

    # write NetCDF file
    # open output file
    dst_ncfile = NetCDFFile(DstFilename, "w")

    # define dimensions
    dst_lat_dim = dst_ncfile.createDimension(LatName, GridSize)
    dst_lon_dim = dst_ncfile.createDimension(LonName, GridSize)

    # define variables
    dst_lat_var = dst_ncfile.createVariable(LatName, "d", (LatName,))
    # dst_lat_var.setattr (
    # NetCDFFile.setattr (dst_lat_var, 'attrname', attr_val)

    dst_lon_var = dst_ncfile.createVariable(LonName, "d", (LonName,))

    # write lat data
    dst_lat_var.assignValue(SrcLatData)

    # write lon data
    dst_lon_var.assignValue(DstLonData)

    # close output file
    dst_ncfile.close()
Пример #22
0
    def write_netcdf(self,filename,rlon,rlat,z,title=None):
	#
	from Scientific.IO.NetCDF import NetCDFFile
	#nc = Dataset(filename,'w',format='NETCDF3_CLASSIC')
	print 'netcdf filename: ', filename
	print rlon[0], rlon[-1], rlat[0], rlat[-1], z.min(), z.max()
	print len(rlon), len(rlat), z.shape
	#nc = netcdf.netcdf_file(filename,'w')
	nc = NetCDFFile(filename,'w')
	if title is None:
	    title=''
	nc.title = title
	nc.source = ''
	nc.createDimension('side',2)
	nc.createDimension('xysize',len(rlon)*len(rlat))
	y_range = nc.createVariable('y_range','d', ('side',))
	y_range.units = 'y'
	y_range[:] = [rlat[0],rlat[-1]]
	x_range = nc.createVariable('x_range','d', ('side',))
	x_range.units = 'x'
	x_range[:] = [rlon[0],rlon[-1]]
	z_range = nc.createVariable('z_range','d', ('side',))
	z_range.units = 'z'
	z_range[:] = [z.min(),z.max()]
	spacing = nc.createVariable('spacing','d',('side',))
	spacing[:] = [rlon[1]-rlon[0],rlat[1]-rlat[0]]
	dimension = nc.createVariable('dimension','i',('side',))
	dimension[:] = [len(rlon),len(rlat)]
	grid_data = nc.createVariable('z','f', ('xysize',))
	grid_data.scale_factor = np.array([1.])
	grid_data.add_offset = np.array([0.])
	grid_data.node_offset = np.array([0])
	q = np.flipud(z)
	q = q.flatten()
	grid_data[:] = q.astype('float32')
	nc.close()
Пример #23
0
def writeNcFile(data, fileName=None, oldStyle=1):
    if not ncOk:
        raise Exception('module Scientific.IO.NetCDF not found, writeNcFile() failed!')
    if not fileName:
        fileName = data['name']+'_weather.nc'
    f = NetCDFFile(fileName, 'w')
    f.createDimension('time', data['time'].shape[0])
    f.file_format = file_format
    if oldStyle:
        f.createDimension('scalar', 1)
    if data.has_key('comment'):
        f.comment = data['comment']
    else:
        f.comment = 'created by MeteonormFile.py (v%s)' % version
    if data.has_key('source_file'):
        f.source_file = str(data['source_file'])
    for vn in ('latitude', 'longitude', 'height'):
        setattr(f, vn, data[vn])
        if oldStyle:
            v = f.createVariable(vn, 'd', ('scalar', ))
            v[:] = [data[vn]]
    setattr(f, 'longitude_0', 15.0*data['timezone'])
    if oldStyle:
        v = f.createVariable('longitude_0', 'd', ('scalar', ))
        v[:] = [15.0 * data['timezone']]
    for vn in variables.keys():
        t = variables[vn][1]
        v = f.createVariable(vn, t, ('time',))
        v[:] = data[vn].astype(t)
        oname = variables[vn][0]
        if oname.startswith('<'): oname = oname[1:]
        if oname.endswith('>'): oname = oname[:-1]
        v.original_name = oname
        v.unit = variables[vn][2]
    f.sync()
    f.close()
Пример #24
0
def write_to_file(CS, nmax_coarse, nmax_fine, nblocks, hw_ph, filename):

    """
    Writes the results of a computation to a netcdf file.
    Takes a Compute_Loop_Function object as input; it is assumed that 
    this object has already computed what we wish to write!
    """

    # --------------------------------------------
    # Write to netcdf file
    # --------------------------------------------
    ncfile = Dataset(filename, "w")

    # --- set various attributes, identifying the parameters of the computation ----
    setattr(ncfile, "mu", CS.mu)
    setattr(ncfile, "beta", CS.beta)
    setattr(ncfile, "acell", acell)
    setattr(ncfile, "Area", Area)
    setattr(ncfile, "nmax_coarse", nmax_coarse)
    setattr(ncfile, "nmax_fine", nmax_fine)
    setattr(ncfile, "n_blocks_coarse_to_fine", nblocks)
    setattr(ncfile, "Green_Gamma_width", CS.Green_Gamma_width)
    setattr(ncfile, "kernel_Gamma_width", CS.kernel_Gamma_width)
    setattr(ncfile, "phonon_frequency", hw_ph)

    # --- Create dimensions ----
    ncfile.createDimension("xy", 2)
    ncfile.createDimension("L_AB", 2)
    ncfile.createDimension("phonon_alpha_kappa", 6)

    # --- Write data ----
    Q = ncfile.createVariable("q_phonon", "d", ("xy",))
    REPH = ncfile.createVariable("Re_E_phonon", "d", ("phonon_alpha_kappa",))
    IEPH = ncfile.createVariable("Im_E_phonon", "d", ("phonon_alpha_kappa",))

    Re_R = ncfile.createVariable("Re_R", "d", ("xy", "L_AB"))
    Im_R = ncfile.createVariable("Im_R", "d", ("xy", "L_AB"))
    Re_I = ncfile.createVariable("Re_I", "d", ("xy", "L_AB"))
    Im_I = ncfile.createVariable("Im_I", "d", ("xy", "L_AB"))

    Q[:] = CS.q
    REPH[:] = N.real(CS.E_ph)
    IEPH[:] = N.imag(CS.E_ph)

    Re_R[:, :] = N.real(CS.Rq)
    Im_R[:, :] = N.imag(CS.Rq)
    Re_I[:, :] = N.real(CS.Iq)
    Im_I[:, :] = N.imag(CS.Iq)

    ncfile.close()

    return
Пример #25
0
 def test(self):
     # Create file
     FileName = '%s_out.nc' % self.Case
     print 'creating %s ...' % FileName
     File = NetCDFFile(FileName,'w')
     File.createDimension('time',None)
     var = File.createVariable('time','f',('time',))
     var.long_name = 'time'
     var.units = ' '
     File.createDimension('lat',len(self.data.lat))
     var = File.createVariable('lat','f',('lat',))
     var.long_name = 'latitude'
     var.units = 'degrees_north'
     var[:] = self.data.lat.astype('f')
     File.createDimension('lon',len(self.data.lon))
     var = File.createVariable('lon','f',('lon',))
     var.long_name = 'longitude'
     var.units = 'degrees_east'
     var[:] = self.data.lon.astype('f')
     for Field in ['SwToa','LwToa','SwToaCf','LwToaCf']:
         var = File.createVariable(Field,'f',('time','lat','lon'))
         var.long_name = ''
         var.units = 'W m-2'
     lmax = 3
     for l in range(lmax):
         print 'doing %s of %s' % (l+1,lmax)
         # get data
         Data = self.getFields(l)[0]
         Data.update(self.Fixed)
         # compute
         self.r(**Data)
         File.variables['SwToa'][l] = self.r['SwToa'].astype('f')
         File.variables['LwToa'][l] = -self.r['LwToa'].astype('f')
         File.variables['SwToaCf'][l] = self.r['SwToaCf'].astype('f')
         File.variables['LwToaCf'][l] = self.r['LwToaCf'].astype('f')
     File.close()
Пример #26
0
    def finalize(self):
        """Finalizes the calculations (e.g. averaging the total term, output files creations ...).
        """

        if self.architecture == 'monoprocessor':
            t = self.trajectory
        else:
            # Load the whole trajectory set.
            t = Trajectory(None, self.trajectoryFilename, 'r')

        orderedAtoms = sorted(t.universe.atomList(),
                              key=operator.attrgetter('index'))
        groups = [
            Collection([orderedAtoms[ind] for ind in g]) for g in self.group
        ]

        # 'freqencies' = 1D Numeric array. Frequencies at which the DOS was computed
        frequencies = N.arange(self.nFrames) / (2.0 * self.nFrames * self.dt)

        # The NetCDF output file is opened for writing.
        outputFile = NetCDFFile(self.output, 'w')
        outputFile.title = self.__class__.__name__
        outputFile.jobinfo = self.information + '\nOutput file written on: %s\n\n' % asctime(
        )

        # Dictionnary whose keys are of the form Gi where i is the group number
        # and the entries are the list of the index of the atoms building the group.
        comp = 1
        for g in self.group:
            outputFile.jobinfo += 'Group %d: %s\n' % (comp,
                                                      [index for index in g])
            comp += 1

        # Some dimensions are created.
        outputFile.createDimension('NFRAMES', self.nFrames)

        # Creation of the NetCDF output variables.
        # The time.
        TIMES = outputFile.createVariable('time', N.Float, ('NFRAMES', ))
        TIMES[:] = self.times[:]
        TIMES.units = 'ps'

        # The resolution function.
        RESOLUTIONFUNCTION = outputFile.createVariable('resolution_function',
                                                       N.Float, ('NFRAMES', ))
        RESOLUTIONFUNCTION[:] = self.resolutionFunction[:]
        RESOLUTIONFUNCTION.units = 'unitless'

        # Creation of the NetCDF output variables.
        # The frequencies.
        FREQUENCIES = outputFile.createVariable('frequency', N.Float,
                                                ('NFRAMES', ))
        FREQUENCIES[:] = frequencies[:]
        FREQUENCIES.units = 'THz'

        OMEGAS = outputFile.createVariable('angular_frequency', N.Float,
                                           ('NFRAMES', ))
        OMEGAS[:] = 2.0 * N.pi * frequencies[:]
        OMEGAS.units = 'rad ps-1'

        avacfTotal = N.zeros((self.nFrames), typecode=N.Float)
        adosTotal = N.zeros((self.nFrames), typecode=N.Float)

        comp = 1
        totalMass = 0.0
        for g in groups:

            AVACF = outputFile.createVariable('avacf-group%s' % comp, N.Float,
                                              ('NFRAMES', ))
            AVACF[:] = self.AVACF[comp][:]
            AVACF.units = 'rad^2*ps^-2'

            N.add(avacfTotal, self.AVACF[comp], avacfTotal)

            ADOS = outputFile.createVariable('ados-group%s' % comp, N.Float,
                                             ('NFRAMES', ))
            ADOS[:] = self.ADOS[comp][:]
            ADOS.units = 'rad^2*ps^-1'

            N.add(adosTotal, g.mass() * self.ADOS[comp], adosTotal)

            comp += 1
            totalMass += g.mass()

        adosTotal *= 0.5 * self.dt / (self.nGroups * totalMass)

        AVACF = outputFile.createVariable('avacf-total', N.Float,
                                          ('NFRAMES', ))
        AVACF[:] = avacfTotal
        AVACF.units = 'rad^2*ps^-2'

        ADOS = outputFile.createVariable('ados-total', N.Float, ('NFRAMES', ))
        ADOS[:] = adosTotal
        ADOS.units = 'rad^2*ps^-1'

        asciiVar = sorted(outputFile.variables.keys())

        outputFile.close()

        self.toPlot = {
            'netcdf': self.output,
            'xVar': 'angular_frequency',
            'yVar': 'ados-total'
        }

        # Create an ASCII version of the NetCDF output file.
        convertNetCDFToASCII(inputFile = self.output,\
                             outputFile = os.path.splitext(self.output)[0] + '.cdl',\
                             variables = asciiVar)
    setattr(Cncfile,'k_nmax_block_smooth',knmax_block_smooth)

    setattr(Cncfile,'k_nmax_coarse_singular',knmax_coarse_singular)
    setattr(Cncfile,'k_nmax_fine_singular',knmax_fine_singular)
    setattr(Cncfile,'k_nmax_block_singular',knmax_block_singular)



    setattr(Cncfile,'q_nmax_coarse',nq[0])
    setattr(Cncfile,'q_nmax_fine',nq[1])
    setattr(Cncfile,'q_nmax_block',nq[2])



    # --- Create dimensions ----
    Cncfile.createDimension("number_of_frequencies",list_hw.shape[0])
    Cncfile.createDimension("xy",2)
    Cncfile.createDimension("phonon_alpha_kappa",6)
    Cncfile.createDimension("nu",len(list_nu))
    Cncfile.createDimension("number_of_q_points",Nph)


    # --- Write data ----
    HW     = Cncfile.createVariable("list_hw",'d',('number_of_frequencies',))
    HW[:]  = N.real(list_hw)

    NU     = Cncfile.createVariable("phonon_nu",'d',('nu',))
    NU[:]  = list_nu


    Q = Cncfile.createVariable( "list_q",'d',('number_of_q_points','xy'))
Пример #28
0
    def write_output(self):
        local_dimension_directory={}
	if self.format == 'netCDF':
	    # open a new netCDF file for writing.
	    ncfile = Dataset(self.file,'w')

	    ndim = len(self.target_time)
	    #--print 'ndim: ', ndim
	    ncfile.createDimension('time', ndim)

	    # create variables
	    # first argument is name of variable, second is datatype, third is
	    # a tuple with the names of dimensions.
	    time = ncfile.createVariable('time',dtype('float64').char,('time', ))
	    lats = ncfile.createVariable('latitude',dtype('float64').char,('time', ))
	    lons = ncfile.createVariable('longitude',dtype('float64').char,('time', ))

	    time.units = 'second (since midnight of 1/1/1970)'
	    lats.units = 'degree'
	    lons.units = 'degree'

	    # create variables for levels
	    # first argument is name of variable, second is datatype, third is
	    # a tuple with the names of dimensions.
	    lkeys = self.target_levels.keys()
	    #--print 'lkeys: ', lkeys
	    if len(lkeys) > 0:
		self.lvars = [0]*len(lkeys)
	    kk = 0
	    for k in lkeys:
		#--print 'k: ', k
		kname = k.replace(' ', '_')
		#--print 'kk: ', kk, ', kname: ', kname

		atuple = self.target_levels[k]
		attribute = atuple[0]
		#--print 'attribute: ', attribute
		local_level = atuple[1]
		### print 'local_level: ', local_level
		#--print 'local_level.shape: ', local_level.shape

		lc = 'lc-' + str(kk)
		#--print 'lc: ', lc

                if attribute.has_key('dimension1'):
                       lc = attribute['dimension1']
                       if (local_dimension_directory.has_key(lc) == False):
                          ncfile.createDimension(lc, len(local_level))
                          local_dimension_directory[lc] = len(local_level) 
                else:
		       ncfile.createDimension(lc, len(local_level))

		self.lvars[kk] = ncfile.createVariable(kname, dtype('float64').char, (lc, ))

		if attribute.has_key('units'):
		    self.lvars[kk].units = attribute['units']
		#else:
		#    self.lvars[kk].units = ''

		if attribute.has_key('long_name'):
		    self.lvars[kk].long_name = attribute['long_name']
		kk += 1

	    # end of for k loop

	    # write data to variables for levels
	    for kk in range(len(lkeys)):
		### print 'kk: ', kk
		### print 'self.target_levels[lkeys[kk]][1].shape: ', self.target_levels[lkeys[kk]][1].shape
		### print 'len(self.target_levels[lkeys[kk]][1]): ', len(self.target_levels[lkeys[kk]][1])
		self.target_levels[lkeys[kk]][1].shape = (len(self.target_levels[lkeys[kk]][1]), )
		self.lvars[kk][:] = self.target_levels[lkeys[kk]][1]
	    # end of for kk loop

	    # create variables for data
	    # first argument is name of variable, second is datatype, third is
	    # a tuple with the names of dimensions.
	    keys = self.target_data.keys()
	    #--print 'keys: ', keys
	    self.vars = [0]*len(keys)
	    kk = 0
	    for k in keys:
		#--print 'k: ', k
		kname = k.replace(' ', '_')
		#--print 'kk: ', kk, ', kname: ', kname

		#--print 'attribute keys: ', self.target_data[k][0].keys()
		s = self.target_data[k][1].shape
		d2 = len(s)
		cc = 'cc-' + str(kk)
		#--print 'cc: ', cc
		if d2 == 1:
		    #--print '--- 1D data'
		    self.vars[kk] = ncfile.createVariable(kname, dtype('float64').char,('time', ))
		elif d2 == 2:
		    #--print '--- 2D data'
                    if self.target_data[k][0].has_key('dimension1'):
                       local_dimension = self.target_data[k][0]['dimension1']
                       cc = local_dimension
                       if (local_dimension_directory.has_key(local_dimension) == False):
                          #print 'local dimension =', local_dimension
                          ncfile.createDimension(local_dimension, s[1])
                          local_dimension_directory[local_dimension] = s[1]
                    else: 
		      ncfile.createDimension(cc, s[1])
		    self.vars[kk] = ncfile.createVariable(kname, dtype('float64').char,('time', cc))
		elif d2 == 3:
		    #--print '--- 3D data'
		    cc1 = cc+'1'
		    cc2 = cc+'2'
		    ncfile.createDimension(cc1, s[1])
		    ncfile.createDimension(cc2, s[2])
		    self.vars[kk] = ncfile.createVariable(kname, dtype('float64').char,('time', cc1, cc2))
		elif d2 == 4:
		    #--print '--- 4D data'
		    cc1 = cc+'1'
		    cc2 = cc+'2'
		    cc3 = cc+'3'
		    ncfile.createDimension(cc1, s[1])
		    ncfile.createDimension(cc2, s[2])
		    ncfile.createDimension(cc3, s[3])
		    self.vars[kk] = ncfile.createVariable(kname, dtype('float64').char,('time', cc1, cc2, cc3))

		if self.target_data[k][0].has_key('units'):
		    self.vars[kk].units = self.target_data[k][0]['units']
		#else:
		#    self.vars[kk].units = ''
		if self.target_data[k][0].has_key('long_name'):
		    self.vars[kk].long_name = self.target_data[k][0]['long_name']
		#else:
		#    self.vars[kk].long_name = ''


                # add missing_value in the variable attribute
                self.vars[kk].missing_value = UT.NAN

                # add invalid_data in the variable attribute from collocation
                self.vars[kk].collocation_invalid_value = self.invalid_data

		kk += 1

	    # end of for k loop

	    # write data to variables
	    self.target_time.shape = (ndim, )
	    self.target_lat.shape = (ndim, )
	    self.target_lon.shape = (ndim, )

	    time[:] = self.target_time
	    lats[:] = self.target_lat
	    lons[:] = self.target_lon

	    #--print 'in backend: self.target_data[keys[0]][1]: ', self.target_data[keys[0]][1]

	    # write data to variables for data
	    for kk in range(len(keys)):
		#--print 'kk: ', kk
		#--print 'self.target_data[keys[kk]][1].shape: ', self.target_data[keys[kk]][1].shape
		s3 = self.target_data[keys[kk]][1].shape
		d3 = len(s3)
		if d3 == 1:
		    self.target_data[keys[kk]][1].shape = (ndim, )
		elif d3 == 2:
		    self.target_data[keys[kk]][1].shape = (ndim, s3[1])
		elif d3 == 3:
		    self.target_data[keys[kk]][1].shape = (ndim, s3[1], s3[2])
		elif d3 == 4:
		    self.target_data[keys[kk]][1].shape = (ndim, s3[1], s3[2], s3[3])

		#--print 'self.target_data[keys[kk]][1].shape: ', self.target_data[keys[kk]][1].shape
		self.vars[kk][:] = self.target_data[keys[kk]][1]

	    # end of for kk loop

	    ncfile.close()
Пример #29
0
class _ParNetCDFFile(ParBase):
    """
    Distributed netCDF file

    A ParNetCDFFile object acts as much as possible like a NetCDFFile object.
    Variables become ParNetCDFVariable objects, which behave like
    distributed sequences. Variables that use the dimension named by
    |split_dimension| are automatically distributed among the processors
    such that each treats only one slice of the whole file.
    """
    def __parinit__(self,
                    pid,
                    nprocs,
                    filename,
                    split_dimension,
                    mode='r',
                    local_access=False):
        """
        @param filename: the name of the netCDF file
        @type filename: C{str}
        @param split_dimension: the name of the dimension along which the data
                                is distributed over the processors
        @type split_dimension: C{str}
        @param mode: read ('r'), write ('w'), or append ('a')
        @type mode: C{str}
        @param local_access: if C{False}, processor 0 is the only one to
                             access the file, all others communicate with
                             processor 0. If C{True} (only for reading), each
                             processor accesses the file directly. In the
                             latter case, the file must be accessible on all
                             processors under the same name. A third mode is
                             'auto', which uses some heuristics to decide
                             if the file is accessible everywhere: it checks
                             for existence of the file, then compares
                             the size on all processors, and finally verifies
                             that the same variables exist everywhere, with
                             identical names, types, and sizes.
        @type local_access: C{bool} or C{str}
        """
        if mode != 'r':
            local_access = 0
        self.pid = pid
        self.nprocs = nprocs
        self.filename = filename
        self.split = split_dimension
        self.local_access = local_access
        self.read_only = mode == 'r'
        if local_access or pid == 0:
            self.file = NetCDFFile(filename, mode)
            try:
                length = self.file.dimensions[split_dimension]
                if length is None:
                    length = -1
            except KeyError:
                length = None
            variables = {}
            for name, var in self.file.variables.items():
                variables[name] = (name, var.dimensions)
                if length < 0 and split_dimension in var.dimensions:
                    index = list(var.dimensions).index(split_dimension)
                    length = var.shape[index]
        else:
            self.file = None
            self.split = split_dimension
            length = None
            variables = None
        if not local_access:
            length = self.broadcast(length)
            variables = self.broadcast(variables)
        if length is not None:
            self._divideData(length)
        self.variables = {}
        for name, var in variables.items():
            self.variables[name] = _ParNetCDFVariable(self, var[0], var[1],
                                                      split_dimension)

    def __repr__(self):
        return repr(self.filename)

    def close(self):
        if self.local_access or self.pid == 0:
            self.file.close()

    def createDimension(self, name, length):
        if name == self.split:
            if length is None:
                raise ValueError("Split dimension cannot be unlimited")
            self._divideData(length)
        if self.pid == 0:
            self.file.createDimension(name, length)

    def createVariable(self, name, typecode, dimensions):
        if self.pid == 0:
            var = self.file.createVariable(name, typecode, dimensions)
            dim = var.dimensions
        else:
            dim = 0
        name, dim = self.broadcast((name, dim))
        self.variables[name] = _ParNetCDFVariable(self, name, dim, self.split)
        return self.variables[name]

    def _divideData(self, length):
        chunk = (length + self.nprocs - 1) / self.nprocs
        self.first = min(self.pid * chunk, length)
        self.last = min(self.first + chunk, length)
        if (not self.local_access) and self.pid == 0:
            self.parts = []
            for pid in range(self.nprocs):
                first = pid * chunk
                last = min(first + chunk, length)
                self.parts.append((first, last))

    def sync(self):
        if self.pid == 0:
            self.file.sync()

    flush = sync
Пример #30
0
    def write(self):
        ncfile = Dataset(self.fname, 'w')
        tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))

        # set dimension info
        ncfile.createDimension('src_grid_size', self.obj.src_grid_size)
        ncfile.createDimension('dst_grid_size', self.obj.dst_grid_size)
        ncfile.createDimension('n_wgt', self.n_wgt)
        ncfile.createDimension('src_grid_rank', self.obj.src_grid_rank)
        ncfile.createDimension('dst_grid_rank', self.obj.dst_grid_rank)
        ncfile.createDimension('num_wgts', 1)
        ncfile.createDimension('src_grid_corners', self.obj.src_grid_corners)
        ncfile.createDimension('dst_grid_corners', self.obj.dst_grid_corners)

        # set variable info
        src_grid_dims_var = ncfile.createVariable('src_grid_dims',
                                                  dtype('int32').char,
                                                  ('src_grid_rank', ))
        dst_grid_dims_var = ncfile.createVariable('dst_grid_dims',
                                                  dtype('int32').char,
                                                  ('dst_grid_rank', ))
        src_grid_center_lat_var = ncfile.createVariable(
            'src_grid_center_lat',
            dtype('d').char, ('src_grid_size', ))
        src_grid_center_lon_var = ncfile.createVariable(
            'src_grid_center_lon',
            dtype('d').char, ('src_grid_size', ))
        dst_grid_center_lat_var = ncfile.createVariable(
            'dst_grid_center_lat',
            dtype('d').char, ('dst_grid_size', ))
        dst_grid_center_lon_var = ncfile.createVariable(
            'dst_grid_center_lon',
            dtype('d').char, ('dst_grid_size', ))
        src_grid_imask_var = ncfile.createVariable('src_grid_imask',
                                                   dtype('i').char,
                                                   ('src_grid_size', ))
        dst_grid_imask_var = ncfile.createVariable('dst_grid_imask',
                                                   dtype('i').char,
                                                   ('dst_grid_size', ))
        remap_src_indx_var = ncfile.createVariable('remap_src_indx',
                                                   dtype('i').char,
                                                   ('n_wgt', ))
        remap_dst_indx_var = ncfile.createVariable('remap_dst_indx',
                                                   dtype('i').char,
                                                   ('n_wgt', ))
        remap_matrix_var = ncfile.createVariable('remap_matrix',
                                                 dtype('d').char, ('n_wgt', ))

        src_grid_dims_var[:] = self.obj.src_grid_dims
        dst_grid_dims_var[:] = self.obj.dst_grid_dims
        src_grid_center_lat_var[:] = np.array(
            self.obj.original_src_grid_center_lat)
        src_grid_center_lon_var[:] = np.array(
            self.obj.original_src_grid_center_lon)
        dst_grid_center_lat_var[:] = np.array(self.obj.dst_grid_center_lat)
        dst_grid_center_lon_var[:] = np.array(self.obj.dst_grid_center_lon)
        #src_grid_imask_var[:] = np.array(self.obj.original_src_grid_imask)
        buffer1 = [np.int32(i) for i in self.obj.original_src_grid_imask]
        src_grid_imask_var[:] = np.array(buffer1)
        buffer2 = [np.int32(i) for i in self.obj.dst_grid_imask]
        dst_grid_imask_var[:] = np.array(buffer2)
        #dst_grid_imask_var[:] = np.array(self.obj.dst_grid_imask)
        buffer3 = [np.int32(i) for i in self.obj.remap_src_indx]
        remap_src_indx_var[:] = np.array(buffer3)
        #remap_src_indx_var[:] = np.array(self.obj.remap_src_indx)
        buffer4 = [np.int32(i) for i in self.obj.remap_dst_indx]
        remap_dst_indx_var[:] = np.array(buffer4)
        #remap_dst_indx_var[:] = np.array(self.obj.remap_dst_indx)
        remap_matrix_var[:] = np.array(self.obj.remap_matrix_compact)

        setattr(ncfile, 'title', 'Threp ' + self.fname)
        setattr(ncfile, 'createdate', tm)
        setattr(ncfile, 'map_method', self.method)
        setattr(ncfile, 'conventions', 'Threp')
        setattr(ncfile, 'src_grid', self.obj.src_grid_name)
        setattr(ncfile, 'dst_grid', self.obj.dst_grid_name)

        ncfile.close()
        print '*** Successfully generate remap matrix file. ***'
Пример #31
0
#         _                                         _  cp/Rd
#        | g p0^Rd/cp  /  1        1  \              |
# p(z) = | ---------  | ------ - ----- | + p_c^Rd/cp |
#        |_  cp st     \ th(z)   th_c /             _|
#                        
def pres(z, st, th_c, p_c):
  return (
    g*p0**(Rdcp)/cp/st*(1/theta(z,st,th_c)-1/th_c)+p_c**(Rdcp)
  )**(1./Rdcp)

############################################################################
# first: creating a netCDF file with the initial condition                 #
############################################################################
f = NetCDFFile('ini.nc', 'w')

f.createDimension('X', nx)
f.createDimension('Y', 1) #TODO: should not be needed
f.createDimension('Z', 1) #TODO: should not be needed
f.createDimension('dlevel', nz - 1)

v_dp = [None]*nz
v_qx = [None]*nz
v_qy = [None]*nz
for lev in range(nz) :
  v_dp[lev] = f.createVariable('dp_' + str(lev), 'd', ('X',))
  v_qx[lev] = f.createVariable('qx_' + str(lev), 'd', ('X',))

# potential temperatures of the layers (characteristic values)
v_dtheta = f.createVariable('dtheta', 'd', ('dlevel',))
for lev in range(nz-1) :
  v_dtheta[lev] = theta((lev+1.5) * dz, st0, th_surf) - theta((lev+.5) * dz, st0, th_surf) 
Пример #32
0
def create(missingdata = False, missingdimension = False, missingvariable = False, incorrectdimension = False, incorrectvariable = False):

  if (missingdata):
    filename = 'missingdata.nc'
    description = ', with missing data.'
  elif (missingdimension):
    filename = 'missingdimension.nc'
    description = ', with a missing dimension.'
  elif (missingvariable):
    filename = 'missingvariable.nc'
    description = ', with a missing variable.'
  elif (incorrectdimension):
    filename = 'incorrectdimension.nc'
    description = ', with an incorrect dimension label.'
  elif (incorrectvariable):
    filename = 'incorrectvariable.nc'
    description = ', with an incorrect variable label.'
  else:
    filename = 'valid.nc'
    description = '.'

  print "Creating " + filename + description

  f = NetCDFFile(filename, 'w')
  f.description = 'Example free surface height' + description
  
  if (missingdata):
    offset = -0.4
  else:
    offset = 0.0

  x = arange(-1.2 + offset, 1.21 + offset, 0.2)
  y = arange(-1.2, 1.21, 0.2)
  h = zeros((len(x),len(y)))

  for i in range(len(x)):
    for j in range(len(y)):
      # Nice ordering netCDF API - y,x !
      h[j,i] = x[i] * y[j]

  if (not incorrectdimension):
    xdimlabel = 'x'
  else:
    xdimlabel = 'lat'
  ydimlabel = 'y'
  if (not incorrectvariable):
    zdimlabel = 'z'
  else:
    zdimlabel = 'height'

  print xdimlabel, ydimlabel, zdimlabel

  # dimensions
  f.createDimension(xdimlabel, len(x))
  if (not missingdimension):
    f.createDimension(ydimlabel, len(y))

  # variables
  fx = f.createVariable(xdimlabel, 'd', (xdimlabel,))
  fx[:] = x
  if (not missingdimension):
    fy = f.createVariable(ydimlabel, 'd', (ydimlabel,))
    fy[:] = y
    if (not missingvariable):
      fh = f.createVariable(zdimlabel, 'd', (xdimlabel, ydimlabel,))
      fh[:] = h
  else:
    fh = f.createVariable(zdimlabel, 'd', (xdimlabel,))
    fh[:] = x

  f.close()
Пример #33
0
from Scientific.IO.NetCDF import NetCDFFile
import numpy as np

sys.path.append(os.environ['HOME'] + "/cmaq_forcing/forcing/src/")
from bcolours import bcolours as bc

src_file="FWD.0704"
var_name="O3"

src=NetCDFFile(src_file, 'r')
src_var = src.variables[var_name]

# Create a new file
dest = NetCDFFile("%s.%s"%(src_file, var_name), 'w')
# Create the dimensions
for d,v in src.dimensions.iteritems():
	dest.createDimension(d, v)

# Create variable
dest_var = dest.createVariable(var_name, 'f', ('TSTEP', 'LAY', 'ROW', 'COL'))
dest_var[:] = src_var[:]
dest_var = dest.createVariable('TFLAG', 'i', ('TSTEP', 'VAR', 'DATE-TIME'))
dest_var[:] = src.variables['TFLAG']


# Copy SDATE
setattr(dest, 'SDATE', getattr(src, 'SDATE'))

src.close()
dest.close()
Пример #34
0
    def write_output(self):
        local_dimension_directory = {}
        if self.format == "netCDF":
            # open a new netCDF file for writing.
            ncfile = Dataset(self.file, "w")

            ndim = len(self.target_time)
            # --print 'ndim: ', ndim
            ncfile.createDimension("time", ndim)

            # create variables
            # first argument is name of variable, second is datatype, third is
            # a tuple with the names of dimensions.
            time = ncfile.createVariable("time", dtype("float64").char, ("time",))
            lats = ncfile.createVariable("latitude", dtype("float64").char, ("time",))
            lons = ncfile.createVariable("longitude", dtype("float64").char, ("time",))

            time.units = "second (since midnight of 1/1/1970)"
            lats.units = "degree"
            lons.units = "degree"

            # create variables for levels
            # first argument is name of variable, second is datatype, third is
            # a tuple with the names of dimensions.
            lkeys = self.target_levels.keys()
            # --print 'lkeys: ', lkeys
            if len(lkeys) > 0:
                self.lvars = [0] * len(lkeys)
            kk = 0
            for k in lkeys:
                # --print 'k: ', k
                kname = k.replace(" ", "_")
                # --print 'kk: ', kk, ', kname: ', kname

                atuple = self.target_levels[k]
                attribute = atuple[0]
                # --print 'attribute: ', attribute
                local_level = atuple[1]
                ### print 'local_level: ', local_level
                # --print 'local_level.shape: ', local_level.shape

                lc = "lc-" + str(kk)
                # --print 'lc: ', lc

                if attribute.has_key("dimension1"):
                    lc = attribute["dimension1"]
                    if local_dimension_directory.has_key(lc) == False:
                        ncfile.createDimension(lc, len(local_level))
                        local_dimension_directory[lc] = len(local_level)
                else:
                    ncfile.createDimension(lc, len(local_level))

                self.lvars[kk] = ncfile.createVariable(kname, dtype("float64").char, (lc,))

                if attribute.has_key("units"):
                    self.lvars[kk].units = attribute["units"]
                    # else:
                    #    self.lvars[kk].units = ''

                if attribute.has_key("long_name"):
                    self.lvars[kk].long_name = attribute["long_name"]
                kk += 1

                # end of for k loop

                # write data to variables for levels
            for kk in range(len(lkeys)):
                ### print 'kk: ', kk
                ### print 'self.target_levels[lkeys[kk]][1].shape: ', self.target_levels[lkeys[kk]][1].shape
                ### print 'len(self.target_levels[lkeys[kk]][1]): ', len(self.target_levels[lkeys[kk]][1])
                self.target_levels[lkeys[kk]][1].shape = (len(self.target_levels[lkeys[kk]][1]),)
                self.lvars[kk][:] = self.target_levels[lkeys[kk]][1]
                # end of for kk loop

                # create variables for data
                # first argument is name of variable, second is datatype, third is
                # a tuple with the names of dimensions.
            keys = self.target_data.keys()
            # --print 'keys: ', keys
            self.vars = [0] * len(keys)
            kk = 0
            for k in keys:
                # --print 'k: ', k
                kname = k.replace(" ", "_")
                # --print 'kk: ', kk, ', kname: ', kname

                # --print 'attribute keys: ', self.target_data[k][0].keys()
                s = self.target_data[k][1].shape
                d2 = len(s)
                cc = "cc-" + str(kk)
                # --print 'cc: ', cc
                if d2 == 1:
                    # --print '--- 1D data'
                    self.vars[kk] = ncfile.createVariable(kname, dtype("float64").char, ("time",))
                elif d2 == 2:
                    # --print '--- 2D data'
                    if self.target_data[k][0].has_key("dimension1"):
                        local_dimension = self.target_data[k][0]["dimension1"]
                        cc = local_dimension
                        if local_dimension_directory.has_key(local_dimension) == False:
                            # print 'local dimension =', local_dimension
                            ncfile.createDimension(local_dimension, s[1])
                            local_dimension_directory[local_dimension] = s[1]
                    else:
                        ncfile.createDimension(cc, s[1])
                    self.vars[kk] = ncfile.createVariable(kname, dtype("float64").char, ("time", cc))
                elif d2 == 3:
                    # --print '--- 3D data'
                    cc1 = cc + "1"
                    cc2 = cc + "2"
                    ncfile.createDimension(cc1, s[1])
                    ncfile.createDimension(cc2, s[2])
                    self.vars[kk] = ncfile.createVariable(kname, dtype("float64").char, ("time", cc1, cc2))
                elif d2 == 4:
                    # --print '--- 4D data'
                    cc1 = cc + "1"
                    cc2 = cc + "2"
                    cc3 = cc + "3"
                    ncfile.createDimension(cc1, s[1])
                    ncfile.createDimension(cc2, s[2])
                    ncfile.createDimension(cc3, s[3])
                    self.vars[kk] = ncfile.createVariable(kname, dtype("float64").char, ("time", cc1, cc2, cc3))

                if self.target_data[k][0].has_key("units"):
                    self.vars[kk].units = self.target_data[k][0]["units"]
                    # else:
                    #    self.vars[kk].units = ''
                if self.target_data[k][0].has_key("long_name"):
                    self.vars[kk].long_name = self.target_data[k][0]["long_name"]
                    # else:
                    #    self.vars[kk].long_name = ''

                    # add missing_value in the variable attribute
                self.vars[kk].missing_value = UT.NAN

                # add invalid_data in the variable attribute from collocation
                self.vars[kk].collocation_invalid_value = self.invalid_data

                kk += 1

                # end of for k loop

                # write data to variables
            self.target_time.shape = (ndim,)
            self.target_lat.shape = (ndim,)
            self.target_lon.shape = (ndim,)

            time[:] = self.target_time
            lats[:] = self.target_lat
            lons[:] = self.target_lon

            # --print 'in backend: self.target_data[keys[0]][1]: ', self.target_data[keys[0]][1]

            # write data to variables for data
            for kk in range(len(keys)):
                # --print 'kk: ', kk
                # --print 'self.target_data[keys[kk]][1].shape: ', self.target_data[keys[kk]][1].shape
                s3 = self.target_data[keys[kk]][1].shape
                d3 = len(s3)
                if d3 == 1:
                    self.target_data[keys[kk]][1].shape = (ndim,)
                elif d3 == 2:
                    self.target_data[keys[kk]][1].shape = (ndim, s3[1])
                elif d3 == 3:
                    self.target_data[keys[kk]][1].shape = (ndim, s3[1], s3[2])
                elif d3 == 4:
                    self.target_data[keys[kk]][1].shape = (ndim, s3[1], s3[2], s3[3])

                    # --print 'self.target_data[keys[kk]][1].shape: ', self.target_data[keys[kk]][1].shape
                self.vars[kk][:] = self.target_data[keys[kk]][1]

                # end of for kk loop

            ncfile.close()
Пример #35
0
def AsapFileToTrajectory(oldfile, newfile, firstframe=None, lastframe=None):
    # Check if input file is a filename or a NetCDF file
    if isinstance(oldfile, types.StringTypes):
        oldfile = NetCDFFile(oldfile)

    pos = oldfile.variables['cartesianPositions']  # Must be present
    (nframes, natoms, three) = pos.shape
    print natoms, three, nframes
    firstframe = normalize(firstframe, nframes, 0)
    lastframe = normalize(lastframe, nframes, -1)
    if lastframe < firstframe:
        raise ValueError, "No frames to copy, giving up."

    print "Preparing to copy frames", firstframe, "to", lastframe
    # Now open the output file, and define the variables.
    if isinstance(newfile, types.StringTypes):
        newfile = NetCDFFile(newfile, "w")
    oncevars = []
    manyvars = []
    for v in oldfile.variables.keys():
        try:
            newname = old_names[v]
        except KeyError:
            print "WARNING: Skipping data named", v
            continue
        if new_names[newname][2]:
            shape = new_names[newname][0]
            oncevars.append((v, newname))
        else:
            shape = ("unlim",) + new_names[newname][0]
            manyvars.append((v, newname))
        shape2 = []
        for d in shape:
            if isinstance(d, types.IntType):
                n = d
                d = str(d)
            elif d == 'natoms':
                n = natoms
            elif d == 'unlim':
                n = None
            else:
                raise RuntimeError, "Unknown dimension "+str(d)
            if not newfile.dimensions.has_key(d):
                newfile.createDimension(d, n)
            shape2.append(d)
        print v, "-->", newname, " shape", shape2
        var = newfile.createVariable(newname, oldfile.variables[v].typecode(),
                                     tuple(shape2))
        var.once = new_names[newname][2]
        var.units = new_names[newname][3]
        
    # Now copy the data
    print "Copying global data"
    newfile.history = 'ASE trajectory'
    newfile.version = '0.1'
    newfile.lengthunit = 'Ang'
    newfile.energyunit = 'eV'
    for oldname, newname in oncevars:
        newfile.variables[newname][:] = oldfile.variables[oldname][:]
    
    for n in range(firstframe, lastframe+1):
        print "Copying frame", n
        for oldname, newname in manyvars:
            newfile.variables[newname][n] = oldfile.variables[oldname][n]
    newfile.close()
Пример #36
0
class ETSFWriter:
    def __init__(self, filename):
        from Scientific.IO.NetCDF import NetCDFFile
        self.nc = NetCDFFile(filename, 'w')

        self.nc.file_format = 'ETSF Nanoquanta'
        self.nc.file_format_version = np.array([3.3], dtype=np.float32)
        self.nc.Conventions = 'http://www.etsf.eu/fileformats/'
        self.nc.history = 'File generated by ASE'

    def write_atoms(self, atoms):
        specie_a = np.empty(len(atoms), np.int32)
        nspecies = 0
        species = {}
        numbers = []
        for a, Z in enumerate(atoms.get_atomic_numbers()):
            if Z not in species:
                species[Z] = nspecies
                nspecies += 1
                numbers.append(Z)
            specie_a[a] = species[Z]
            
        dimensions = [
            ('character_string_length', 80),
            ('number_of_atoms', len(atoms)),
            ('number_of_atom_species', nspecies),
            ('number_of_cartesian_directions', 3),
            ('number_of_reduced_dimensions', 3),
            ('number_of_vectors', 3)]

        for name, size in dimensions:
            self.nc.createDimension(name, size)

        var = self.add_variable
        
        var('primitive_vectors',
            ('number_of_vectors', 'number_of_cartesian_directions'),
            atoms.cell / Bohr, units='atomic units')
        var('atom_species', ('number_of_atoms',), specie_a + 1)
        var('reduced_atom_positions',
            ('number_of_atoms', 'number_of_reduced_dimensions'),
            atoms.get_scaled_positions())
        var('atomic_numbers', ('number_of_atom_species',),
            np.array(numbers, dtype=float))

    def close(self):
        self.nc.close()
    
    def add_variable(self, name, dims, data=None, **kwargs):
        if data is None:
            char = 'd'
        else:
            if isinstance(data, np.ndarray):
                char = data.dtype.char
            elif isinstance(data, float):
                char = 'd'
            elif isinstance(data, int):
                char = 'i'
            else:
                char = 'c'

        var = self.nc.createVariable(name, char, dims)
        for attr, value in kwargs.items():
            setattr(var, attr, value)
        if data is not None:
            if len(dims) == 0:
                var.assignValue(data)
            else:
                if char == 'c':
                    if len(dims) == 1:
                        var[:len(data)] = data
                    else:
                        for i, x in enumerate(data):
                            var[i, :len(x)] = x
                else:
                    var[:] = data
        return var
Пример #37
0
#!/usr/bin/env python

# Create a 2D netCDF file.
from Scientific.IO.NetCDF import NetCDFFile as Dataset
#from netCDF4_classic import Dataset
from numpy import arange, dtype  # array module from http://numpy.scipy.org

# the output array to write will be nx x ny
nx = 6
ny = 12
# open a new netCDF file for writing.
ncfile = Dataset('simple_xy.nc', 'w')
# create the output data.
data_out = arange(nx * ny)  # 1d array
data_out.shape = (nx, ny)  # reshape to 2d array
# create the x and y dimensions.
ncfile.createDimension('x', nx)
ncfile.createDimension('y', ny)
# create the variable (4 byte integer in this case)
# first argument is name of variable, second is datatype, third is
# a tuple with the names of dimensions.
data = ncfile.createVariable('data', dtype('int32').char, ('x', 'y'))
# write data to variable.
data[:] = data_out
# close the file.
ncfile.close()
print '*** SUCCESS writing example file simple_xy.nc!'
Пример #38
0
import height

f = NetCDFFile('height.nc', 'w')
f.description = 'Example free surface height.'

x = arange(-1.2, 1.21, 0.2)
y = arange(-1.2, 1.21, 0.2)
h = zeros((len(x), len(y)))

for i in range(len(x)):
    for j in range(len(y)):
        # Nice ordering netCDF API - y,x !
        h[j, i] = height.function([x[i], y[j]])

# dimensions
f.createDimension('x', len(x))
f.createDimension('y', len(y))

# variables
fx = f.createVariable('x', 'd', ('x', ))
fy = f.createVariable('y', 'd', ('y', ))
fh = f.createVariable('z', 'd', (
    'x',
    'y',
))

fx[:] = x
fy[:] = y
fh[:] = h

f.close()
Пример #39
0
def create(missingdata=False,
           missingdimension=False,
           missingvariable=False,
           incorrectdimension=False,
           incorrectvariable=False):

    if (missingdata):
        filename = 'missingdata.nc'
        description = ', with missing data.'
    elif (missingdimension):
        filename = 'missingdimension.nc'
        description = ', with a missing dimension.'
    elif (missingvariable):
        filename = 'missingvariable.nc'
        description = ', with a missing variable.'
    elif (incorrectdimension):
        filename = 'incorrectdimension.nc'
        description = ', with an incorrect dimension label.'
    elif (incorrectvariable):
        filename = 'incorrectvariable.nc'
        description = ', with an incorrect variable label.'
    else:
        filename = 'valid.nc'
        description = '.'

    print "Creating " + filename + description

    f = NetCDFFile(filename, 'w')
    f.description = 'Example free surface height' + description

    if (missingdata):
        offset = -0.4
    else:
        offset = 0.0

    x = arange(-1.2 + offset, 1.21 + offset, 0.2)
    y = arange(-1.2, 1.21, 0.2)
    h = zeros((len(x), len(y)))

    for i in range(len(x)):
        for j in range(len(y)):
            # Nice ordering netCDF API - y,x !
            h[j, i] = x[i] * y[j]

    if (not incorrectdimension):
        xdimlabel = 'x'
    else:
        xdimlabel = 'lat'
    ydimlabel = 'y'
    if (not incorrectvariable):
        zdimlabel = 'z'
    else:
        zdimlabel = 'height'

    print xdimlabel, ydimlabel, zdimlabel

    # dimensions
    f.createDimension(xdimlabel, len(x))
    if (not missingdimension):
        f.createDimension(ydimlabel, len(y))

    # variables
    fx = f.createVariable(xdimlabel, 'd', (xdimlabel, ))
    fx[:] = x
    if (not missingdimension):
        fy = f.createVariable(ydimlabel, 'd', (ydimlabel, ))
        fy[:] = y
        if (not missingvariable):
            fh = f.createVariable(zdimlabel, 'd', (
                xdimlabel,
                ydimlabel,
            ))
            fh[:] = h
    else:
        fh = f.createVariable(zdimlabel, 'd', (xdimlabel, ))
        fh[:] = x

    f.close()
Пример #40
0
class ETSFWriter:
    def __init__(self, filename='gpaw', title='gpaw'):
        if not filename.endswith('-etsf.nc'):
            if filename.endswith('.nc'):
                filename = filename[:-3] + '-etsf.nc'
            else:
                filename = filename + '-etsf.nc'

        self.nc = NetCDFFile(filename, 'w')

        self.nc.file_format = 'ETSF Nanoquanta'
        self.nc.file_format_version = np.array([3.3], dtype=np.float32)
        self.nc.Conventions = 'http://www.etsf.eu/fileformats/'
        self.nc.history = 'File generated by GPAW'
        self.nc.title = title

    def write(self, calc, spacegroup=1):

        #sg = Spacegroup(spacegroup)
        #print sg

        wfs = calc.wfs
        setups = wfs.setups
        bd = wfs.bd
        kd = wfs.kd

        atoms = calc.atoms
        natoms = len(atoms)

        if wfs.kd.symmetry is None:
            op_scc = np.eye(3, dtype=int).reshape((1, 3, 3))
        else:
            op_scc = wfs.kd.symmetry.op_scc

        specie_a = np.empty(natoms, np.int32)
        nspecies = 0
        species = {}
        names = []
        symbols = []
        numbers = []
        charges = []
        for a, id in enumerate(setups.id_a):
            if id not in species:
                species[id] = nspecies
                nspecies += 1
                names.append(setups[a].symbol)
                symbols.append(setups[a].symbol)
                numbers.append(setups[a].Z)
                charges.append(setups[a].Nv)
            specie_a[a] = species[id]

        dimensions = [('character_string_length', 80),
                      ('max_number_of_states', bd.nbands),
                      ('number_of_atoms', len(atoms)),
                      ('number_of_atom_species', nspecies),
                      ('number_of_cartesian_directions', 3),
                      ('number_of_components', 1),
                      ('number_of_kpoints', kd.nibzkpts),
                      ('number_of_reduced_dimensions', 3),
                      ('number_of_spinor_components', 1),
                      ('number_of_spins', wfs.nspins),
                      ('number_of_symmetry_operations', len(op_scc)),
                      ('number_of_vectors', 3),
                      ('real_or_complex_coefficients', 2),
                      ('symbol_length', 2)]

        for name, size in dimensions:
            print(('%-34s %d' % (name, size)))
            self.nc.createDimension(name, size)

        var = self.add_variable

        var('space_group', (), np.array(spacegroup, dtype=int))
        var('primitive_vectors',
            ('number_of_vectors', 'number_of_cartesian_directions'),
            wfs.gd.cell_cv,
            units='atomic units')
        var('reduced_symmetry_matrices',
            ('number_of_symmetry_operations', 'number_of_reduced_dimensions',
             'number_of_reduced_dimensions'),
            op_scc.astype(np.int32),
            symmorphic='yes')
        var('reduced_symmetry_translations',
            ('number_of_symmetry_operations', 'number_of_reduced_dimensions'),
            np.zeros((len(op_scc), 3), dtype=np.int32))
        var('atom_species', ('number_of_atoms', ), specie_a + 1)
        var('reduced_atom_positions',
            ('number_of_atoms', 'number_of_reduced_dimensions'),
            atoms.get_scaled_positions())
        var('atomic_numbers', ('number_of_atom_species', ),
            np.array(numbers, dtype=float))
        var('valence_charges', ('number_of_atom_species', ),
            np.array(charges, dtype=float))
        var('atom_species_names',
            ('number_of_atom_species', 'character_string_length'), names)
        var('chemical_symbols', ('number_of_atom_species', 'symbol_length'),
            symbols)
        var('pseudopotential_types',
            ('number_of_atom_species', 'character_string_length'),
            ['HGH'] * nspecies)
        var('fermi_energy', (),
            calc.occupations.fermilevel,
            units='atomic units')
        var('smearing_scheme', ('character_string_length', ), 'fermi-dirac')
        var('smearing_width', (), calc.occupations.width, units='atomic units')
        var('number_of_states', ('number_of_spins', 'number_of_kpoints'),
            np.zeros((wfs.nspins, kd.nibzkpts), np.int32) + bd.nbands,
            k_dependent='no')
        var('eigenvalues',
            ('number_of_spins', 'number_of_kpoints', 'max_number_of_states'),
            np.array([[
                calc.get_eigenvalues(k, s) / Hartree
                for k in range(kd.nibzkpts)
            ] for s in range(wfs.nspins)]),
            units='atomic units')
        var(
            'occupations',
            ('number_of_spins', 'number_of_kpoints', 'max_number_of_states'),
            np.array([[
                calc.get_occupation_numbers(k, s) / kd.weight_k[k]
                for k in range(kd.nibzkpts)
            ] for s in range(wfs.nspins)]))
        var('reduced_coordinates_of_kpoints',
            ('number_of_kpoints', 'number_of_reduced_dimensions'), kd.ibzk_kc)
        var('kpoint_weights', ('number_of_kpoints', ), kd.weight_k)
        var('basis_set', ('character_string_length', ), 'plane_waves')
        var('number_of_electrons', (), np.array(wfs.nvalence, dtype=np.int32))
        self.nc.close()

    def add_variable(self, name, dims, data=None, **kwargs):
        if data is None:
            char = 'd'
        else:
            if isinstance(data, np.ndarray):
                char = data.dtype.char
            elif isinstance(data, float):
                char = 'd'
            elif isinstance(data, int):
                char = 'i'
            else:
                char = 'c'
        print(('%-34s %s%s' %
               (name, char, tuple([self.nc.dimensions[dim] for dim in dims]))))
        var = self.nc.createVariable(name, char, dims)
        for attr, value in kwargs.items():
            setattr(var, attr, value)
        if data is not None:
            if len(dims) == 0:
                var.assignValue(data)
            else:
                if char == 'c':
                    if len(dims) == 1:
                        var[:len(data)] = data
                    else:
                        for i, x in enumerate(data):
                            var[i, :len(x)] = x
                else:
                    var[:] = data
        return var
Пример #41
0
class ETSFWriter:
    def __init__(self, filename='gpaw', title='gpaw'):
        if not filename.endswith('-etsf.nc'):
            if filename.endswith('.nc'):
                filename = filename[:-3] + '-etsf.nc'
            else:
                filename = filename + '-etsf.nc'

        self.nc = NetCDFFile(filename, 'w')

        self.nc.file_format = 'ETSF Nanoquanta'
        self.nc.file_format_version = np.array([3.3], dtype=np.float32)
        self.nc.Conventions = 'http://www.etsf.eu/fileformats/'
        self.nc.history = 'File generated by GPAW'
        self.nc.title = title

    def write(self, calc, ecut=40 * Hartree, spacegroup=1):

        #sg = Spacegroup(spacegroup)
        #print sg

        wfs = calc.wfs
        setups = wfs.setups
        bd = wfs.bd
        kd = wfs.kd

        atoms = calc.atoms
        natoms = len(atoms)

        if wfs.symmetry is None:
            op_scc = np.eye(3, dtype=int).reshape((1, 3, 3))
        else:
            op_scc = wfs.symmetry.op_scc

        pwd = PWDescriptor(ecut / Hartree, wfs.gd, kd.ibzk_kc)
        N_c = pwd.gd.N_c
        i_Qc = np.indices(N_c, np.int32).transpose((1, 2, 3, 0))
        i_Qc += N_c // 2
        i_Qc %= N_c
        i_Qc -= N_c // 2
        i_Qc.shape = (-1, 3)
        i_Gc = i_Qc[pwd.Q_G]

        B_cv = 2.0 * np.pi * wfs.gd.icell_cv
        G_Qv = np.dot(i_Gc, B_cv).reshape((-1, 3))
        G2_Q = (G_Qv**2).sum(axis=1)

        specie_a = np.empty(natoms, np.int32)
        nspecies = 0
        species = {}
        names = []
        symbols = []
        numbers = []
        charges = []
        for a, id in enumerate(setups.id_a):
            if id not in species:
                species[id] = nspecies
                nspecies += 1
                names.append(setups[a].symbol)
                symbols.append(setups[a].symbol)
                numbers.append(setups[a].Z)
                charges.append(setups[a].Nv)
            specie_a[a] = species[id]

        dimensions = [('character_string_length', 80),
                      ('max_number_of_coefficients', len(i_Gc)),
                      ('max_number_of_states', bd.nbands),
                      ('number_of_atoms', len(atoms)),
                      ('number_of_atom_species', nspecies),
                      ('number_of_cartesian_directions', 3),
                      ('number_of_components', 1),
                      ('number_of_grid_points_vector1', N_c[0]),
                      ('number_of_grid_points_vector2', N_c[1]),
                      ('number_of_grid_points_vector3', N_c[2]),
                      ('number_of_kpoints', kd.nibzkpts),
                      ('number_of_reduced_dimensions', 3),
                      ('number_of_spinor_components', 1),
                      ('number_of_spins', wfs.nspins),
                      ('number_of_symmetry_operations', len(op_scc)),
                      ('number_of_vectors', 3),
                      ('real_or_complex_coefficients', 2),
                      ('symbol_length', 2)]

        for name, size in dimensions:
            print('%-34s %d' % (name, size))
            self.nc.createDimension(name, size)

        var = self.add_variable

        var('space_group', (), np.array(spacegroup, dtype=int))
        var('primitive_vectors',
            ('number_of_vectors', 'number_of_cartesian_directions'),
            wfs.gd.cell_cv,
            units='atomic units')
        var('reduced_symmetry_matrices',
            ('number_of_symmetry_operations', 'number_of_reduced_dimensions',
             'number_of_reduced_dimensions'),
            op_scc.astype(np.int32),
            symmorphic='yes')
        var('reduced_symmetry_translations',
            ('number_of_symmetry_operations', 'number_of_reduced_dimensions'),
            np.zeros((len(op_scc), 3), dtype=np.int32))
        var('atom_species', ('number_of_atoms', ), specie_a + 1)
        var('reduced_atom_positions',
            ('number_of_atoms', 'number_of_reduced_dimensions'),
            atoms.get_scaled_positions())
        var('atomic_numbers', ('number_of_atom_species', ),
            np.array(numbers, dtype=float))
        var('valence_charges', ('number_of_atom_species', ),
            np.array(charges, dtype=float))
        var('atom_species_names',
            ('number_of_atom_species', 'character_string_length'), names)
        var('chemical_symbols', ('number_of_atom_species', 'symbol_length'),
            symbols)
        var('pseudopotential_types',
            ('number_of_atom_species', 'character_string_length'),
            ['HGH'] * nspecies)
        var('fermi_energy', (),
            calc.occupations.fermilevel,
            units='atomic units')
        var('smearing_scheme', ('character_string_length', ), 'fermi-dirac')
        var('smearing_width', (), calc.occupations.width, units='atomic units')
        var('number_of_states', ('number_of_spins', 'number_of_kpoints'),
            np.zeros((wfs.nspins, kd.nibzkpts), np.int32) + bd.nbands,
            k_dependent='no')
        var('eigenvalues',
            ('number_of_spins', 'number_of_kpoints', 'max_number_of_states'),
            np.array([[
                calc.get_eigenvalues(k, s) / Hartree
                for k in range(kd.nibzkpts)
            ] for s in range(wfs.nspins)]),
            units='atomic units')
        var(
            'occupations',
            ('number_of_spins', 'number_of_kpoints', 'max_number_of_states'),
            np.array([[
                calc.get_occupation_numbers(k, s) / kd.weight_k[k]
                for k in range(kd.nibzkpts)
            ] for s in range(wfs.nspins)]))
        var('reduced_coordinates_of_kpoints',
            ('number_of_kpoints', 'number_of_reduced_dimensions'), kd.ibzk_kc)
        var('kpoint_weights', ('number_of_kpoints', ), kd.weight_k)
        var('basis_set', ('character_string_length', ), 'plane_waves')
        var('kinetic_energy_cutoff', (), 1.0 * ecut, units='atomic units')
        var('number_of_coefficients', ('number_of_kpoints', ),
            np.zeros(kd.nibzkpts, np.int32) + len(i_Gc),
            k_dependent='no')
        var('reduced_coordinates_of_plane_waves',
            ('max_number_of_coefficients', 'number_of_reduced_dimensions'),
            i_Gc[np.argsort(G2_Q)],
            k_dependent='no')
        var('number_of_electrons', (), np.array(wfs.nvalence, dtype=np.int32))

        #var('exchange_functional', ('character_string_length',),
        #    calc.hamiltonian.xc.name)
        #var('correlation_functional', ('character_string_length',),
        #    calc.hamiltonian.xc.name)

        psit_skn1G2 = var(
            'coefficients_of_wavefunctions',
            ('number_of_spins', 'number_of_kpoints', 'max_number_of_states',
             'number_of_spinor_components', 'max_number_of_coefficients',
             'real_or_complex_coefficients'))

        x = atoms.get_volume()**0.5 / N_c.prod()
        psit_Gx = np.empty((len(i_Gc), 2))
        for s in range(wfs.nspins):
            for k in range(kd.nibzkpts):
                for n in range(bd.nbands):
                    psit_G = pwd.fft(calc.get_pseudo_wave_function(
                        n, k, s))[np.argsort(G2_Q)]
                    psit_G *= x
                    psit_Gx[:, 0] = psit_G.real
                    psit_Gx[:, 1] = psit_G.imag
                    psit_skn1G2[s, k, n, 0] = psit_Gx

        self.nc.close()

    def add_variable(self, name, dims, data=None, **kwargs):
        if data is None:
            char = 'd'
        else:
            if isinstance(data, np.ndarray):
                char = data.dtype.char
            elif isinstance(data, float):
                char = 'd'
            elif isinstance(data, int):
                char = 'i'
            else:
                char = 'c'
        print('%-34s %s%s' %
              (name, char, tuple([self.nc.dimensions[dim] for dim in dims])))
        var = self.nc.createVariable(name, char, dims)
        for attr, value in kwargs.items():
            setattr(var, attr, value)
        if data is not None:
            if len(dims) == 0:
                var.assignValue(data)
            else:
                if char == 'c':
                    if len(dims) == 1:
                        var[:len(data)] = data
                    else:
                        for i, x in enumerate(data):
                            var[i, :len(x)] = x
                else:
                    var[:] = data
        return var
Пример #42
0
if options.nx:
    nx = int(options.nx)
else:
    nx = int((xmax - xmin) / resolution - 1)
if options.ny:
    ny = int(options.ny)
else:
    ny = int((ymax - ymin) / resolution)

# ==== Define the new file to be output - this should be made a variant of the input file name
fileoutname = options.filename + ".regulargrid.nc"
fileout = NetCDFFile(fileoutname, "w")

# ====Create dimensions in output file
# Create the new x,y dimensions for the new file
fileout.createDimension('x', nx)
fileout.createDimension('y', ny)
# Copy over all the dimensions to the new file
for dim in filein.dimensions.keys():
    # print 'DIMENSION: ', dim
    # print 'HAS VALUE: ', filein.dimensions[dim]
    if netCDF_module == 'Scientific.IO.NetCDF':
        fileout.createDimension(dim, filein.dimensions[dim])
    else:
        fileout.createDimension(dim, len(filein.dimensions[dim]))

# ====Copy over global attributes
for a in dir(filein):
    if not (any(x in a for x in (
            'close', 'createDimension', 'createVariable', 'flush', 'sync',
            'groups', 'dimensions', 'variables', 'dtype', 'file_format',
  print ""
  print "***"
  print age
  print "***"
  print ""
  Qll = np.array(grass.vector_db_select('discharge_to_coast_'+age).values()[0].values(), dtype=float) # Any possible point precision issues can be rounded;
  for row in Qll:
    # Summing these into gridded bins here
    discharge_grid[lats == round(row[-1],3), lons == round(row[-2],2)] += row[1]
  t.append(age)
  Qout.append(discharge_grid.copy()) # "copy" important! otherwise points to zeroed grid.
  print "Total discharge = ", np.sum(Qout[-1])
  discharge_grid *= 0

# Towards netcdf export
#shutil.copyfile('qrparm.waterfix.hadcm3_bbc15ka.nc', dst)
newnc = NetCDFFile(outname, 'w')
newnc.createDimension('t', len(t))
newnc.createDimension('longitude', len(lons))
newnc.createDimension('latitude', len(lats))
newnc.createVariable('t', 'i', ('t',))
newnc.createVariable('longitude', 'f', ('longitude',))
newnc.createVariable('latitude', 'f', ('latitude',))
newnc.createVariable('discharge', 'd', ('t', 'latitude', 'longitude'))
newnc.variables['t'][:] = t
newnc.variables['longitude'][:] = elons
newnc.variables['latitude'][:] = lats
newnc.variables['discharge'][:] = np.array(Qout)
newnc.close()

Пример #44
0
lon = ncfile.variables['domain_a_lon'][:, :]
tmp = []
for i in range(ny):
  for j in range(nx):
   tmp.append(lon[i][j])
lon = scipy.array(tmp)

imask = ncfile.variables['domain_l_mask'][:, :]
tmp = []
for i in range(ny):
  for j in range(nx):
   tmp.append(int(imask[i][j]) ^ 1)
imask = scipy.array(tmp)

# create dimensions
nc.createDimension('grid_size', 8192)
nc.createDimension('grid_rank', 2)
nc.createDimension('grid_corners', 4)

# create variables
grid_dims_var = nc.createVariable('grid_dims', dtype('int32').char, ('grid_rank',))
lat_var = nc.createVariable('grid_center_lat', dtype('d').char, ('grid_size',))
lat_var.units = 'degrees'
lon_var = nc.createVariable('grid_center_lon', dtype('d').char, ('grid_size',))
lon_var.units = 'degrees'
grid_imask_var = nc.createVariable('grid_imask', dtype('int32').char, ('grid_size',))
grid_imask_var.units = 'unitless'

grid_dims_var[:] = dims 
lat_var[:] = lat 
lon_var[:] = lon
Пример #45
0
lon = ncfile.variables['domain_a_lon'][:, :]
tmp = []
for i in range(ny):
    for j in range(nx):
        tmp.append(lon[i][j])
lon = scipy.array(tmp)

imask = ncfile.variables['domain_l_mask'][:, :]
tmp = []
for i in range(ny):
    for j in range(nx):
        tmp.append(int(imask[i][j]) ^ 1)
imask = scipy.array(tmp)

# create dimensions
nc.createDimension('grid_size', 8192)
nc.createDimension('grid_rank', 2)
nc.createDimension('grid_corners', 4)

# create variables
grid_dims_var = nc.createVariable('grid_dims',
                                  dtype('int32').char, ('grid_rank', ))
lat_var = nc.createVariable('grid_center_lat',
                            dtype('d').char, ('grid_size', ))
lat_var.units = 'degrees'
lon_var = nc.createVariable('grid_center_lon',
                            dtype('d').char, ('grid_size', ))
lon_var.units = 'degrees'
grid_imask_var = nc.createVariable('grid_imask',
                                   dtype('int32').char, ('grid_size', ))
grid_imask_var.units = 'unitless'
Пример #46
0
    def internalRun(self):
        """Runs the analysis."""
        
        self.chrono = default_timer()

        orderedAtoms = sorted(self.universe.atomList(), key = operator.attrgetter('index'))

        selectedAtoms = Collection([orderedAtoms[ind] for ind in self.subset])
                
        M1_2 = N.zeros((3*self.nSelectedAtoms,), N.Float)
        
        weightList = [N.sqrt(el[1]) for el in sorted([(at.index, at.mass()) for at in selectedAtoms])]
        
        for i in range(len(weightList)):
            M1_2[3*i:3*(i+1)] = weightList[i]
            
        invM1_2 = (1.0/M1_2)*N.identity(3*self.nSelectedAtoms, typecode = N.Float)
            
        # The initial structure configuration.
        initialStructure = self.trajectory.configuration[self.first]

        averageStructure = ParticleVector(self.universe)

        for conf in self.trajectory.configuration[self.first:self.last:self.skip]:
            averageStructure += self.universe.configurationDifference(initialStructure, conf)

        averageStructure = averageStructure/self.nFrames
        averageStructure = initialStructure + averageStructure        
        
        mdr  = N.zeros((self.nFrames, 3*self.nSelectedAtoms), N.Float)
        
        # Calculate the fluctuation matrix.
        sigmaPrim = N.zeros((3*self.nSelectedAtoms, 3*self.nSelectedAtoms), N.Float)
        comp = 0
        for conf in self.trajectory.configuration[self.first:self.last:self.skip]:
            mdr[comp,:] = M1_2*N.ravel(N.compress(self.mask,(conf-averageStructure).array,0)) 
            sigmaPrim += mdr[comp,:, N.NewAxis] * mdr[N.NewAxis, comp, :]            
            comp += 1
                        
        sigmaPrim = sigmaPrim/float(self.nFrames)

        try:
            # Calculate the quasiharmonic modes
            omega, dx = Heigenvectors(sigmaPrim)
            
        except MemoryError:
            raise Error('Not enough memory to diagonalize the %sx%s fluctuation matrix.' % sigmaPrim.shape)
            
        # Due to numerical imprecisions, the result can have imaginary parts.
        # In that case, throw the imaginary parts away.
        # Conversion from uma*nm2 to kg*m2 (SI)
        omega = omega.real/(Units.kg*Units.m**2)        
        omega = (Units.Hz/Units.invcm)*N.sqrt((Units.k_B*Units.K*self.temperature/Units.J)*(1.0/omega))

        dx = dx.real
        dx = N.dot(invM1_2, dx)

        # Sort eigen vectors by decreasing fluctuation amplitude.
        indices = N.argsort(omega)[::-1]
        
        omega = N.take(omega, indices)
        dx = N.take(dx, indices)

        # Eq 66 of the reference paper.
        mdr = N.take(mdr, indices, axis = 1)
        at = N.dot(mdr,N.transpose(dx))
        
        # The NetCDF output file is opened.
        outputFile       = NetCDFFile(self.output, 'w')
        outputFile.title = self.__class__.__name__
        outputFile.jobinfo = self.information + '\nOutput file written on: %s\n\n' % asctime()

        # The universe is emptied from its objects keeping just its topology.
        self.universe.removeObject(self.universe.objectList()[:])
        # The atoms of the subset are copied
        atoms = copy.deepcopy(selectedAtoms.atomList())
        
        # And their parent attribute removed to allow their transfer in the empty universe.
        for a in atoms:
            a.parent = None
        ac = AtomCluster(atoms,name='QHACluster')
        self.universe.addObject(ac)

        # Some dimensions are created.
        # NEIVALS = the number eigen values
        outputFile.createDimension('NEIGENVALS', len(omega))

        # UDESCR = the universe description length
        outputFile.createDimension('UDESCR', len(self.universe.description()))

        # NATOMS = the number of atoms of the universe
        outputFile.createDimension('NATOMS', self.nSelectedAtoms)

        # NFRAMES = the number of frames.
        outputFile.createDimension('NFRAMES', self.nFrames)

        # NCOORDS = the number of coordinates (always = 3).
        outputFile.createDimension('NCOORDS', 3)

        # 3N.
        outputFile.createDimension('3N', 3*self.nSelectedAtoms)

        if self.universe.cellParameters() is not None:
            outputFile.createDimension('BOXDIM', len(self.universe.cellParameters()))

        # Creation of the NetCDF output variables.
        # EIVALS = the eigen values.
        OMEGA = outputFile.createVariable('omega', N.Float, ('NEIGENVALS',))
        OMEGA[:] = omega

        # EIVECS = array of eigen vectors.
        DX = outputFile.createVariable('dx', N.Float, ('NEIGENVALS','NEIGENVALS'))
        DX[:,:] = dx

        # The time.
        TIMES = outputFile.createVariable('time', N.Float, ('NFRAMES',))
        TIMES[:] = self.times[:]
        TIMES.units = 'ps'

        # MODE = the mode number.
        MODE = outputFile.createVariable('mode', N.Float, ('3N',))
        MODE[:] = 1 + N.arange(3*self.nSelectedAtoms)
                
        # LCI = local character indicator. See eq 56.
        LCI = outputFile.createVariable('local_character_indicator', N.Float, ('3N',))
        LCI[:] = (dx**4).sum(0)
        
        # GCI = global character indicator. See eq 57.
        GCI = outputFile.createVariable('global_character_indicator', N.Float, ('3N',))
        GCI[:] = (N.sqrt(3.0*self.nSelectedAtoms)/(N.absolute(dx)).sum(0))**4

        # Projection of MD traj onto normal modes. See eq 66..
        AT = outputFile.createVariable('at', N.Float, ('NFRAMES','3N'))
        AT[:,:] = at[:,:]
        
        # DESCRIPTION = the universe description.
        DESCRIPTION = outputFile.createVariable('description', N.Character, ('UDESCR',))
        DESCRIPTION[:] = self.universe.description()

        # AVGSTRUCT = the average structure.
        AVGSTRUCT = outputFile.createVariable('avgstruct', N.Float, ('NATOMS','NCOORDS'))
        AVGSTRUCT[:,:] = N.compress(self.mask,averageStructure.array,0)

        # If the universe is periodic, create an extra variable storing the box size.
        if self.universe.cellParameters() is not None:
            BOXSIZE = outputFile.createVariable('box_size', N.Float, ('BOXDIM',))
            BOXSIZE[:] = self.universe.cellParameters()

        outputFile.close()

        self.toPlot = None

        self.chrono = default_timer() - self.chrono
        
        return None
Пример #47
0
    def write_output(self, afl):
        print afl
        local_dimension_directory={}
	if self.format == 'netCDF':
	    # open a new netCDF file for writing.
	    ncfile = Dataset(self.file,'w')

	    ndim = len(self.target_time)
	    #--print 'ndim: ', ndim
	    ncfile.createDimension('time', ndim)

	    # create variables
	    # first argument is name of variable, second is datatype, third is
	    # a tuple with the names of dimensions.
	    time = ncfile.createVariable('time',dtype('float64').char,('time', ))
	    lats = ncfile.createVariable('latitude',dtype('float32').char,('time', ))
	    lons = ncfile.createVariable('longitude',dtype('float32').char,('time', ))

	    time.units = 'second (since midnight of 1/1/1970)'
	    lats.units = 'degree'
	    lons.units = 'degree'

            # write time, lat, lon to variables
            self.target_time.shape = (ndim, )
            self.target_lat.shape = (ndim, )
            self.target_lon.shape = (ndim, )

            time[:] = N.cast['float64'](self.target_time)
            lats[:] = N.cast['float32'](self.target_lat)
            lons[:] = N.cast['float32'](self.target_lon)


	    # create variables for levels
	    # first argument is name of variable, second is datatype, third is
	    # a tuple with the names of dimensions.
	    lkeys = self.target_levels.keys()
	    print 'lkeys: ', lkeys
	    if len(lkeys) > 0:
		self.lvars = [0]*len(lkeys)
	    kk = 0
	    for k in lkeys:
		print 'k: ', k
		kname = k.replace(' ', '_')
		#---print 'kk: ', kk, ', kname: ', kname

		atuple = self.target_levels[k]
		attribute = atuple[0]
		#---print 'attribute: ', attribute
		local_level = atuple[1]
		#--print 'local_level: ', local_level
		#--print 'local_level.shape: ', local_level.shape

		lc = 'lc-' + str(kk)
		print 'lc: ', lc

                if attribute.has_key('dimension1'):
                       lc = attribute['dimension1']
                       if (local_dimension_directory.has_key(lc) == False):
                          ncfile.createDimension(lc, len(local_level))
                          local_dimension_directory[lc] = len(local_level) 
                elif kname!='P0':
		       ncfile.createDimension(lc, len(local_level))
                else: # 'P0'
                       ncfile.createDimension(lc, 1)

		self.lvars[kk] = ncfile.createVariable(kname, dtype('float32').char, (lc, ))

		if attribute.has_key('units'):
		    self.lvars[kk].units = attribute['units']
		#else:
		#    self.lvars[kk].units = ''

		if attribute.has_key('long_name'):
		    self.lvars[kk].long_name = attribute['long_name']
		kk += 1

	    # end of for k loop

	    # write data to variables for levels
	    for kk in range(len(lkeys)):
		print 'kk: ', kk
                if(lkeys[kk]!='P0'):
		  #---print 'self.target_levels[lkeys[kk]][1].shape: ', self.target_levels[lkeys[kk]][1].shape
		  #---print 'len(self.target_levels[lkeys[kk]][1]): ', len(self.target_levels[lkeys[kk]][1])
		  self.target_levels[lkeys[kk]][1].shape = (len(self.target_levels[lkeys[kk]][1]), )
		  self.lvars[kk][:] = N.cast['float32'](self.target_levels[lkeys[kk]][1])
                else: 
                  self.lvars[kk][:] = N.cast['float32']([self.target_levels[lkeys[kk]][1]])
	    # end of for kk loop

	    # create variables for data
	    # first argument is name of variable, second is datatype, third is
	    # a tuple with the names of dimensions.
	    keys = self.target_data.keys()
	    #--print 'keys: ', keys
	    self.vars = [0]*len(keys)
	    kk = 0
	    for k in keys:
		#--print 'k: ', k
		kname = k.replace(' ', '_')
		#--print 'k: ', k, ', kname: ', kname

                # check whether the data type should be inter or float
                data_type = self.check_data_type(kname)

		# check whether 2D array can be collapsed to 1D array
		s = self.target_data[k][1].shape
		d2 = len(s)
		if d2 == 2 and s[1] == 1: 
		    tmp = N.reshape(self.target_data[k][1], s[0])
                    attribute = self.target_data[k][0]
                    self.target_data[k] = (attribute, tmp)
	
		#--print 'attribute keys: ', self.target_data[k][0].keys()
		s = self.target_data[k][1].shape
		d2 = len(s)
		cc = 'cc-' + str(kk)
		#--print 'cc: ', cc
		if d2 == 1:
		    #--print '--- 1D data'
		    self.vars[kk] = ncfile.createVariable(kname, data_type, ('time', ))
		elif d2 == 2:
		    #--print '--- 2D data'
                    if self.target_data[k][0].has_key('dimension1'):
                       local_dimension = self.target_data[k][0]['dimension1']
                       cc = local_dimension
                       if (local_dimension_directory.has_key(local_dimension) == False):
                          #print 'local dimension =', local_dimension
                          ncfile.createDimension(local_dimension, s[1])
                          local_dimension_directory[local_dimension] = s[1]
                    else: 
		      ncfile.createDimension(cc, s[1])
		    self.vars[kk] = ncfile.createVariable(kname, data_type,('time', cc))
		elif d2 == 3:
		    #--print '--- 3D data'
		    cc1 = cc+'1'
		    cc2 = cc+'2'
		    ncfile.createDimension(cc1, s[1])
		    ncfile.createDimension(cc2, s[2])
		    self.vars[kk] = ncfile.createVariable(kname, data_type,('time', cc1, cc2))
		elif d2 == 4:
		    #--print '--- 4D data'
		    cc1 = cc+'1'
		    cc2 = cc+'2'
		    cc3 = cc+'3'
		    ncfile.createDimension(cc1, s[1])
		    ncfile.createDimension(cc2, s[2])
		    ncfile.createDimension(cc3, s[3])
		    self.vars[kk] = ncfile.createVariable(kname, data_type,('time', cc1, cc2, cc3))

		if self.target_data[k][0].has_key('units'):
		    self.vars[kk].units = self.target_data[k][0]['units']

		if self.target_data[k][0].has_key('long_name'):
		    self.vars[kk].long_name = self.target_data[k][0]['long_name']

                if self.target_data[k][0].has_key('_FillValue'):
                    self.vars[kk].FillValue = self.target_data[k][0]['_FillValue']

                if self.target_data[k][0].has_key('missing_value'):
                    self.vars[kk].missing_value = self.target_data[k][0]['missing_value']

                if self.target_data[k][0].has_key('scale_factor'):
                    self.vars[kk].scale_factor = self.target_data[k][0]['scale_factor']

                if self.target_data[k][0].has_key('add_offset'):
                    self.vars[kk].add_offset = self.target_data[k][0]['add_offset']

                if self.target_data[k][0].has_key('valid_range'):
                    self.vars[kk].valid_range = self.target_data[k][0]['valid_range']

                if self.target_data[k][0].has_key('Parameter_Type'):
                    self.vars[kk].Parameter_Type = self.target_data[k][0]['Parameter_Type']

                if self.target_data[k][0].has_key('Cell_Along_Swath_Sampling'):
                    self.vars[kk].Cell_Along_Swath_Sampling = self.target_data[k][0]['Cell_Along_Swath_Sampling']

                if self.target_data[k][0].has_key('Cell_Across_Swath_Sampling'):
                    self.vars[kk].Cell_Across_Swath_Sampling = self.target_data[k][0]['Cell_Across_Swath_Sampling']

                if self.target_data[k][0].has_key('Geolocation_Pointer'):
                    self.vars[kk].Geolocation_Pointer = self.target_data[k][0]['Geolocation_Pointer']

                # add missing_value in the variable attribute if given by the XML input parameter
                if (afl.missing_value !='None' and self.target_data[k][0].has_key('missing_value')==False 
                     and  self.target_data[k][0].has_key('_FillValue')==False ): 
                    self.vars[kk].missing_value = afl.missing_value

                # add invalid_data in the variable attribute from collocation
                self.vars[kk].collocation_invalid_value = self.invalid_data

		kk += 1

	    # end of for k loop

	    #--print 'in backend: self.target_data[keys[0]][1]: ', self.target_data[keys[0]][1]

	    # write data to variables for data
	    for kk in range(len(keys)):
                # check whether the data type should be inter or float
                data_type = self.check_data_type(keys[kk])
		#--print 'kk: ', kk
		#--print 'self.target_data[keys[kk]][1].shape: ', self.target_data[keys[kk]][1].shape
		s3 = self.target_data[keys[kk]][1].shape
		d3 = len(s3)
		if d3 == 1:
		    self.target_data[keys[kk]][1].shape = (ndim, )
		elif d3 == 2:
		    self.target_data[keys[kk]][1].shape = (ndim, s3[1])
		elif d3 == 3:
		    self.target_data[keys[kk]][1].shape = (ndim, s3[1], s3[2])
		elif d3 == 4:
		    self.target_data[keys[kk]][1].shape = (ndim, s3[1], s3[2], s3[3])

		#--print 'self.target_data[keys[kk]][1].shape: ', self.target_data[keys[kk]][1].shape
                #--print 'self.target_data[keys[kk]][1] data type: ', type(self.target_data[keys[kk]][1])

                if data_type=='i':
		    self.vars[kk][:] =  N.cast['int32'](self.target_data[keys[kk]][1])
                #elif data_type=='f':
		#    self.vars[kk][:] =  N.cast['float32'](self.target_data[keys[kk]][1])
                else: 
		    self.vars[kk][:] = self.target_data[keys[kk]][1]  # float32

	    # end of for kk loop

	    ncfile.close()
Пример #48
0
# Calcul
print "calcul cartoprox sce"
dcss = []
for i, pol in enumerate(polluants):
    data = dcrs[i] - (dsrs[i] - dsss[i])
    dcss.append(data)
    print " > %s (min = %.1f, max = %1.f)" % (pol, data.min(), data.max())
del dsrs, dsss, dcrs

# Enregistrement des résultats
print "enregistrement des données"
if fncs[-3:] != '.nc': fncs = "%s.nc" % fncs
ncf = NetCDFFile(fncs, 'w')

# dimensions
ncf.createDimension('Time', None)
ncf.createDimension('DateStrLen', 19)
ncf.createDimension('Point', np)
ncf.sync()
print " > dimensions"

# variables annexes
ncf.createVariable('Times', 'c', ('Time', 'DateStrLen'))
ncf.createVariable('area_pts', 'f', ('Point', ))
ncf.createVariable('easting_pts', 'f', ('Point', ))
ncf.createVariable('northing_pts', 'f', ('Point', ))
ncf.createVariable('x_pts', 'f', ('Point', ))
ncf.createVariable('y_pts', 'f', ('Point', ))
ncf.sync()
print " > variables"
Пример #49
0
def AsapFileToTrajectory(oldfile, newfile, firstframe=None, lastframe=None):
    # Check if input file is a filename or a NetCDF file
    if isinstance(oldfile, types.StringTypes):
        oldfile = NetCDFFile(oldfile)

    pos = oldfile.variables['cartesianPositions']  # Must be present
    (nframes, natoms, three) = pos.shape
    print natoms, three, nframes
    firstframe = normalize(firstframe, nframes, 0)
    lastframe = normalize(lastframe, nframes, -1)
    if lastframe < firstframe:
        raise ValueError, "No frames to copy, giving up."

    print "Preparing to copy frames", firstframe, "to", lastframe
    # Now open the output file, and define the variables.
    if isinstance(newfile, types.StringTypes):
        newfile = NetCDFFile(newfile, "w")
    oncevars = []
    manyvars = []
    for v in oldfile.variables.keys():
        try:
            newname = old_names[v]
        except KeyError:
            print "WARNING: Skipping data named", v
            continue
        if new_names[newname][2]:
            shape = new_names[newname][0]
            oncevars.append((v, newname))
        else:
            shape = ("unlim", ) + new_names[newname][0]
            manyvars.append((v, newname))
        shape2 = []
        for d in shape:
            if isinstance(d, types.IntType):
                n = d
                d = str(d)
            elif d == 'natoms':
                n = natoms
            elif d == 'unlim':
                n = None
            else:
                raise RuntimeError, "Unknown dimension " + str(d)
            if not newfile.dimensions.has_key(d):
                newfile.createDimension(d, n)
            shape2.append(d)
        print v, "-->", newname, " shape", shape2
        var = newfile.createVariable(newname, oldfile.variables[v].typecode(),
                                     tuple(shape2))
        var.once = new_names[newname][2]
        var.units = new_names[newname][3]

    # Now copy the data
    print "Copying global data"
    newfile.history = 'ASE trajectory'
    newfile.version = '0.1'
    newfile.lengthunit = 'Ang'
    newfile.energyunit = 'eV'
    for oldname, newname in oncevars:
        newfile.variables[newname][:] = oldfile.variables[oldname][:]

    for n in range(firstframe, lastframe + 1):
        print "Copying frame", n
        for oldname, newname in manyvars:
            newfile.variables[newname][n] = oldfile.variables[oldname][n]
    newfile.close()
Пример #50
0
class ETSFWriter:
    def __init__(self, filename='gpaw', title='gpaw'):
        if not filename.endswith('-etsf.nc'):
            if filename.endswith('.nc'):
                filename = filename[:-3] + '-etsf.nc'
            else:
                filename = filename + '-etsf.nc'
            
        self.nc = NetCDFFile(filename, 'w')

        self.nc.file_format = 'ETSF Nanoquanta'
        self.nc.file_format_version = np.array([3.3], dtype=np.float32)
        self.nc.Conventions = 'http://www.etsf.eu/fileformats/'
        self.nc.history = 'File generated by GPAW'
        self.nc.title = title

    def write(self, calc, spacegroup=1):

        #sg = Spacegroup(spacegroup)
        #print sg
        
        wfs = calc.wfs
        setups = wfs.setups
        bd = wfs.bd
        kd = wfs.kd
        
        atoms = calc.atoms
        natoms = len(atoms)
        
        if wfs.kd.symmetry is None:
            op_scc = np.eye(3, dtype=int).reshape((1, 3, 3))
        else:
            op_scc = wfs.kd.symmetry.op_scc

        specie_a = np.empty(natoms, np.int32)
        nspecies = 0
        species = {}
        names = []
        symbols = []
        numbers = []
        charges = []
        for a, id in enumerate(setups.id_a):
            if id not in species:
                species[id] = nspecies
                nspecies += 1
                names.append(setups[a].symbol)
                symbols.append(setups[a].symbol)
                numbers.append(setups[a].Z)
                charges.append(setups[a].Nv)
            specie_a[a] = species[id]
            
        dimensions = [
            ('character_string_length', 80),
            ('max_number_of_states', bd.nbands),
            ('number_of_atoms', len(atoms)),
            ('number_of_atom_species', nspecies),
            ('number_of_cartesian_directions', 3),
            ('number_of_components', 1),
            ('number_of_kpoints', kd.nibzkpts),
            ('number_of_reduced_dimensions', 3),
            ('number_of_spinor_components', 1),
            ('number_of_spins', wfs.nspins),
            ('number_of_symmetry_operations', len(op_scc)),
            ('number_of_vectors', 3),
            ('real_or_complex_coefficients', 2),
            ('symbol_length', 2)]

        for name, size in dimensions:
            print(('%-34s %d' % (name, size)))
            self.nc.createDimension(name, size)

        var = self.add_variable
        
        var('space_group', (), np.array(spacegroup, dtype=int))
        var('primitive_vectors',
            ('number_of_vectors', 'number_of_cartesian_directions'),
            wfs.gd.cell_cv, units='atomic units')
        var('reduced_symmetry_matrices',
            ('number_of_symmetry_operations',
             'number_of_reduced_dimensions', 'number_of_reduced_dimensions'),
            op_scc.astype(np.int32), symmorphic='yes')
        var('reduced_symmetry_translations',
            ('number_of_symmetry_operations', 'number_of_reduced_dimensions'),
            np.zeros((len(op_scc), 3), dtype=np.int32))
        var('atom_species', ('number_of_atoms',), specie_a + 1)
        var('reduced_atom_positions',
            ('number_of_atoms', 'number_of_reduced_dimensions'),
            atoms.get_scaled_positions())
        var('atomic_numbers', ('number_of_atom_species',),
            np.array(numbers, dtype=float))
        var('valence_charges', ('number_of_atom_species',),
            np.array(charges, dtype=float))
        var('atom_species_names',
            ('number_of_atom_species', 'character_string_length'), names)
        var('chemical_symbols', ('number_of_atom_species', 'symbol_length'),
            symbols)
        var('pseudopotential_types',
            ('number_of_atom_species', 'character_string_length'),
            ['HGH'] * nspecies)
        var('fermi_energy', (), calc.occupations.fermilevel,
            units='atomic units')
        var('smearing_scheme', ('character_string_length',), 'fermi-dirac')
        var('smearing_width', (), calc.occupations.width, units='atomic units')
        var('number_of_states', ('number_of_spins', 'number_of_kpoints'),
            np.zeros((wfs.nspins, kd.nibzkpts), np.int32) + bd.nbands,
            k_dependent='no')
        var('eigenvalues',
            ('number_of_spins', 'number_of_kpoints', 'max_number_of_states'),
            np.array([[calc.get_eigenvalues(k, s) / Hartree
                       for k in range(kd.nibzkpts)]
                      for s in range(wfs.nspins)]), units='atomic units')
        var('occupations',
            ('number_of_spins', 'number_of_kpoints', 'max_number_of_states'),
            np.array([[calc.get_occupation_numbers(k, s) / kd.weight_k[k]
                       for k in range(kd.nibzkpts)]
                      for s in range(wfs.nspins)]))
        var('reduced_coordinates_of_kpoints',
            ('number_of_kpoints', 'number_of_reduced_dimensions'), kd.ibzk_kc)
        var('kpoint_weights', ('number_of_kpoints',), kd.weight_k)
        var('basis_set', ('character_string_length',), 'plane_waves')
        var('number_of_electrons', (), np.array(wfs.nvalence, dtype=np.int32))
        self.nc.close()

    def add_variable(self, name, dims, data=None, **kwargs):
        if data is None:
            char = 'd'
        else:
            if isinstance(data, np.ndarray):
                char = data.dtype.char
            elif isinstance(data, float):
                char = 'd'
            elif isinstance(data, int):
                char = 'i'
            else:
                char = 'c'
        print(('%-34s %s%s' % (
            name, char,
            tuple([self.nc.dimensions[dim] for dim in dims]))))
        var = self.nc.createVariable(name, char, dims)
        for attr, value in kwargs.items():
            setattr(var, attr, value)
        if data is not None:
            if len(dims) == 0:
                var.assignValue(data)
            else:
                if char == 'c':
                    if len(dims) == 1:
                        var[:len(data)] = data
                    else:
                        for i, x in enumerate(data):
                            var[i, :len(x)] = x
                else:
                    var[:] = data
        return var
Пример #51
0
#!/usr/bin/env python

import numpy as np
from Scientific.IO.NetCDF import NetCDFFile

tdata = np.loadtxt('az_training.txt', delimiter=',')
mags = np.loadtxt('m.txt')
dists = np.loadtxt('r.txt')

fout = 'az_training.nc'

nid = NetCDFFile(fout, 'w')
nid.createDimension('time', 1)
nid.createDimension('traces', tdata.shape[0])
nid.createDimension('filter', tdata.shape[1])

t = nid.createVariable('time', np.dtype(float).char, ('time', ))
t.units = 's'
f = nid.createVariable('filter', np.dtype(float).char, ('filter', ))
f.units = 'Hz'
m = nid.createVariable('magnitude', np.dtype(float).char, ('traces', ))
m[:] = mags
ed = nid.createVariable('epicdist', np.dtype(float).char, ('traces', ))
ed.units = 'km'
ed[:] = dists
z = nid.createVariable('z', np.dtype(float).char, ('time', 'traces', 'filter'))
z[0, :, :] = np.log10(tdata)
h = nid.createVariable('h', np.dtype(float).char, ('time', 'traces', 'filter'))
h[0, :, :] = np.log10(tdata)
nid.close()
Пример #52
0
def regrid_cons_rg(data,lons,lats,outgrid,spacedim=None):

    #Produce a NetCDF file containing the data
    ncfile=NetCDFFile('regrid_cons_rg.nc','w')
    
    ndims=len(data.shape)
    spacedim_set_flag=0  #to stop the function creating the spatial dimension twice if there is more than one dimension with the same length as xvals
    dim_names=['']*ndims

    if spacedim:
        dim_name='space'
        ncfile.createDimension(dim_name,data.shape[spacedim])
        spacedim_set_flag=1
        dim_names[spacedim]=dim_name
  
    for i in range(ndims):
        if i!=spacedim:
            if data.shape[i]==len(lons) and spacedim_set_flag==0:
                dim_name='space'
                spacedim_set_flag=1
            else:
                dim_name='dim_'+str(i)
                
            ncfile.createDimension(dim_name,data.shape[i])
            dim_names[i]=dim_name

    dtype=type(np.ravel(data)[0])
    ncdata=ncfile.createVariable('data',np.dtype(dtype).char,tuple(dim_names))
    ncdata[:]=data
    lon1d=ncfile.createVariable('lon1d',np.dtype(dtype).char,('space',))  #lons1d is the dimension name required by the ncl ESMF_regrid function
    lon1d[:]=lons
    lat1d=ncfile.createVariable('lat1d',np.dtype(dtype).char,('space',))  #lats1d is the dimension name required by the ncl ESMF_regrid function
    lat1d[:]=lats

#    #Set the coordinates as attributes, as required for the ncl ESMF_regrid function
#    setattr(ncdata,'lon1d',xvals)
#    setattr(ncdata,'lat1d',yvals)

    #Create corner information for the cells of input grid. This is particular to reduced Gaussian grids, which have the points arranged in lines of constant latitude.
    ncfile.createDimension('corner',4)
    
    corner_lons=np.zeros((len(lons),4))
    corner_lats=np.zeros((len(lons),4))
    lats_unique=list(sorted(set(lats)))  #gets sorted list of the unique latitudes
    for i in range(len(lats_unique)):
        
        lat=lats_unique[i]
        ind=np.where(lats==lat)[0]

        #Set longitudes of corners at this latitude
        lons_at_lat=lons[ind]
        dlon=lons_at_lat[1]-lons_at_lat[0]
        
        #Pick convention that corner 0 is bottom left, corner 1 is top left, corner 2 is top right and corner 3 is bottom right (where north is up and east is right).
        corner_lons[ind,0]=lons_at_lat-dlon/2.
        corner_lons[ind,1]=lons_at_lat-dlon/2.
        corner_lons[ind,2]=lons_at_lat+dlon/2.
        corner_lons[ind,3]=lons_at_lat+dlon/2.
        
        #Set latitudes of corners at this latitude
        if i==0:  #minimum lat
            dlat1=lats_unique[i+1]-lats_unique[i]  #the step to the next latitude
        
            corner_lats[ind,0]=-90.  #set southern corner latitudes to be -90 for the southmost row.
            corner_lats[ind,1]=lat+dlat1/2.  #set other corners to be halfway between this and the next row
            corner_lats[ind,2]=lat+dlat1/2.
            corner_lats[ind,3]=-90.

        elif i==(len(lats_unique)-1):  #max lat
            dlat2=lats_unique[i]-lats_unique[i-1]  #the step from the previous latitude
            corner_lats[ind,0]=lat-dlat2/2.
            corner_lats[ind,1]=90.  #set northern corner latitudes to be 90 for the northmost row.
            corner_lats[ind,2]=90.
            corner_lats[ind,3]=lat-dlat2/2.

        else:
            dlat1=lats_unique[i+1]-lats_unique[i]
            dlat2=lats_unique[i]-lats_unique[i-1]
            corner_lats[ind,0]=lat-dlat2/2.
            corner_lats[ind,1]=lat+dlat1/2.
            corner_lats[ind,2]=lat+dlat1/2.
            corner_lats[ind,3]=lat-dlat2/2.

    corner_lons_nc=ncfile.createVariable('corner_lons',np.dtype(dtype).char,('space','corner'))
    corner_lons_nc[:,:]=corner_lons
    corner_lats_nc=ncfile.createVariable('corner_lats',np.dtype(dtype).char,('space','corner'))
    corner_lats_nc[:,:]=corner_lats

    ncfile.close()
    
    stop
Пример #53
0
from numpy import arange, zeros
import height

f = NetCDFFile('height.nc', 'w')
f.description = 'Example free surface height.'

x = arange(-1.2, 1.21, 0.2)
y = arange(-1.2, 1.21, 0.2)
h = zeros((len(x),len(y)))

for i in range(len(x)):
  for j in range(len(y)):
    # Nice ordering netCDF API - y,x !
    h[j,i] = height.function([x[i], y[j]])

# dimensions
f.createDimension('x', len(x))
f.createDimension('y', len(y))

# variables
fx = f.createVariable('x', 'd', ('x',))
fy = f.createVariable('y', 'd', ('y',))
fh = f.createVariable('z', 'd', ('x', 'y',))

fx[:] = x
fy[:] = y
fh[:] = h

f.close()

Пример #54
0
class _ParNetCDFFile(ParBase):

    """
    Distributed netCDF file

    A ParNetCDFFile object acts as much as possible like a NetCDFFile object.
    Variables become ParNetCDFVariable objects, which behave like
    distributed sequences. Variables that use the dimension named by
    |split_dimension| are automatically distributed among the processors
    such that each treats only one slice of the whole file.
    """

    def __parinit__(self, pid, nprocs, filename, split_dimension,
                    mode = 'r', local_access = False):
        """
        @param filename: the name of the netCDF file
        @type filename: C{str}
        @param split_dimension: the name of the dimension along which the data
                                is distributed over the processors
        @type split_dimension: C{str}
        @param mode: read ('r'), write ('w'), or append ('a')
        @type mode: C{str}
        @param local_access: if C{False}, processor 0 is the only one to
                             access the file, all others communicate with
                             processor 0. If C{True} (only for reading), each
                             processor accesses the file directly. In the
                             latter case, the file must be accessible on all
                             processors under the same name. A third mode is
                             'auto', which uses some heuristics to decide
                             if the file is accessible everywhere: it checks
                             for existence of the file, then compares
                             the size on all processors, and finally verifies
                             that the same variables exist everywhere, with
                             identical names, types, and sizes.
        @type local_access: C{bool} or C{str}
        """
        if mode != 'r':
            local_access = 0
        self.pid = pid
        self.nprocs = nprocs
        self.filename = filename
        self.split = split_dimension
        self.local_access = local_access
        self.read_only = mode == 'r'
        if local_access or pid == 0:
            self.file = NetCDFFile(filename, mode)
            try:
                length = self.file.dimensions[split_dimension]
                if length is None:
                    length = -1
            except KeyError:
                length = None
            variables = {}
            for name, var in self.file.variables.items():
                variables[name] = (name, var.dimensions)
                if length < 0 and split_dimension in var.dimensions:
                    index = list(var.dimensions).index(split_dimension)
                    length = var.shape[index]
        else:
            self.file = None
            self.split = split_dimension
            length = None
            variables = None
        if not local_access:
            length = self.broadcast(length)
            variables = self.broadcast(variables)
        if length is not None:
            self._divideData(length)
        self.variables = {}
        for name, var in variables.items():
            self.variables[name] = _ParNetCDFVariable(self, var[0], var[1],
                                                      split_dimension)

    def __repr__(self):
        return repr(self.filename)

    def close(self):
        if self.local_access or self.pid == 0:
            self.file.close()

    def createDimension(self, name, length):
        if name == self.split:
            if length is None:
                raise ValueError("Split dimension cannot be unlimited")
            self._divideData(length)
        if self.pid == 0:
            self.file.createDimension(name, length)

    def createVariable(self, name, typecode, dimensions):
        if self.pid == 0:
            var = self.file.createVariable(name, typecode, dimensions)
            dim = var.dimensions
        else:
            dim = 0
        name, dim = self.broadcast((name, dim))
        self.variables[name] = _ParNetCDFVariable(self, name, dim, self.split)
        return self.variables[name]

    def _divideData(self, length):
        chunk = (length+self.nprocs-1)/self.nprocs
        self.first = min(self.pid*chunk, length)
        self.last = min(self.first+chunk, length)
        if (not self.local_access) and self.pid == 0:
            self.parts = []
            for pid in range(self.nprocs):
                first = pid*chunk
                last = min(first+chunk, length)
                self.parts.append((first, last))

    def sync(self):
        if self.pid == 0:
            self.file.sync()
    flush = sync
Пример #55
0
#!/usr/bin/env python

# Create a 2D netCDF file.
from Scientific.IO.NetCDF import NetCDFFile as Dataset
#from netCDF4_classic import Dataset
from numpy import arange, dtype # array module from http://numpy.scipy.org

# the output array to write will be nx x ny
nx = 6; ny = 12
# open a new netCDF file for writing.
ncfile = Dataset('simple_xy.nc','w') 
# create the output data.
data_out = arange(nx*ny) # 1d array
data_out.shape = (nx,ny) # reshape to 2d array
# create the x and y dimensions.
ncfile.createDimension('x',nx)
ncfile.createDimension('y',ny)
# create the variable (4 byte integer in this case)
# first argument is name of variable, second is datatype, third is
# a tuple with the names of dimensions.
data = ncfile.createVariable('data',dtype('int32').char,('x','y'))
# write data to variable.
data[:] = data_out
# close the file.
ncfile.close()
print '*** SUCCESS writing example file simple_xy.nc!'