Ejemplo n.º 1
0
    def __init__(self, path=None, xlim=None, ylim=None, tlim=None):
        # Initializes the variables to default values. The indices 'n', 'k',
        # 'j' and 'i' refer to the temporal, height, meridional and zonal
        # coordinates respectively. If one of these indexes is set to 'None',
        # then it is assumed infinite size, which is relevant for the 'time'
        # coordinate.
        self.attributes = dict()
        self.dimensions = dict(n=0, k=0, j=0, i=0)
        self.coordinates = dict(n=None, k=None, j=None, i=None)
        self.variables = dict()
        self.params = dict()
        self.data = dict()
        self.stencil_coeffs = dict()
        self.stencil_params = dict()
        self.alias = dict()

        # Sets global parameters for grid.
        if path == None:
            path = ('/home/sebastian/academia/data/ncep.reanalysis2/'
                'gaussian_grid')
        self.params['path'] = path
        self.params['var_list'] = []
        self.params['year_list'] = []

        # Generates list of files, tries to match them to the pattern and to
        # extract the time. To help understanding the naming convetion and
        # pattern, see the following example:
        #   uwnd.2015.nc
        file_pattern = '(.*).([0-9]{4}).nc'
        flist = listdir(self.params['path'])
        flist, match = reglist(flist, file_pattern)
        self.params['file_list'] = flist

        # Gets list of variables from file match.
        _vars, _years = zip(*match)
        self.params['var_list'] = unique(_vars)
        self.params['year_list'] = unique(_years)

        # Loads data from first variable and loads longitude and latitude data.
        # We assume that all data is homogeneous throughout the dataset. Then
        # walks through each year and loads time vector.
        _var = self.params['var_list'][0]
        for _i, _year in enumerate(self.params['year_list']):
            fname = '{}.{}.nc'.format(_var, _year)
            try:
                data.close()
            except:
                pass
            data = self._open_file(fname)
            #
            if _i == 0:
                lon = data.variables['lon'].data
                lat = data.variables['lat'].data
                time = data.variables['time'].data
            else:
                time = hstack([time, data.variables['time'].data])

        # Time in dataset is given in `hours since 1800-1-1 00:00:0.0` and we
        # convert it to matplotlib's date format.

        if data.variables['time'].units == 'hours since 1800-1-1 00:00:0.0':
            self.params['t0'] = dates.date2num(dates.datetime.datetime(1800, 1, 1, 0, 0))
            time = self.params['t0'] + time / 24.

        # If lon_0 is set, calculate how many indices have to be moved in
        # order for latitude array to start at lon_0.
        lon, lat, xlim, ylim, ii, jj = self.getLongitudeLatitudeLimits(lon,
            lat, xlim, ylim)

        self.params['xlim'], self.params['ylim'] = xlim, ylim
        self.params['lon_i'], self.params['lat_j'] = ii, jj

        # Initializes the grid attributes, dimensions, coordinates and
        # variables.
        self.name = 'ncep_reanalysis'
        self.description = ('NCEP Reanalysis project is analysis/forecast '
            'system to perform data assimilation using past data from 1979 '
            'owards.')
        self.attributes['institution'] = data.institution
        self.dimensions = dict(n=time.size, k=0, j=lat.size, i=lon.size)
        self.coordinates = dict(n='time', k='height', j='latitude',
            i='longitude')
        self.variables = dict(
            time = atlantis.data.Variable(),
            height = atlantis.data.get_standard_variable('height'),
            latitude = atlantis.data.get_standard_variable('latitude'),
            longitude = atlantis.data.get_standard_variable('longitude'),
            xm = atlantis.data.Variable(),
            ym = atlantis.data.Variable(),
        )
        #
        self.variables['time'].data = time
        self.variables['time'].canonical_units = 'days since 0001-01-01 UTC'
        #
        self.variables['height'].data = 0.
        self.variables['latitude'].data = lat
        self.variables['longitude'].data = lon
        #
        self.variables['xm'].canonical_units = 'km'
        self.variables['xm'].description = 'Zonal distance.'
        self.variables['ym'].canonical_units = 'km'
        self.variables['ym'].description = 'Meridional distance.'
        self.variables['xm'].data, self.variables['ym'].data = (
            metergrid(self.variables['longitude'].data,
            self.variables['latitude'].data, units='km')
        )
        #
        data.close()

        # Walks through each variable file for the first year, reads their
        # attributes and adds to the dataset definition.
        self._message('\n')
        _year = self.params['year_list'][0]
        for _var in self.params['var_list']:
            fname = '{}.{}.nc'.format(_var, _year)
            data = self._open_file(fname)
            self._message('{}: '.format(_var))
            for _key in data.variables.keys():
                self._message('{} '.format(_key))
                if _key in ['time', 'time_bnds', 'level', 'level_bnds', 'lat',
                    'lon']:
                    continue
                try:
                    self.variables[_key] = atlantis.data.get_standard_variable(
                        data.variables[_key].standard_name,
                        units=data.variables[_key].units,
                        long_name=data.variables[_key].long_name,
                    )
                except:
                    self._message('*  ')
                    self.variables[_key] = atlantis.data.Variable(
                        units=data.variables[_key].units,
                        standard_name=data.variables[_key].standard_name,
                        long_name=data.variables[_key].long_name,
                        description=data.variables[_key].var_desc,
                    )
                self.alias[_key] = _var
            #
            self._message('\n')
            data.close()
        #
        return
Ejemplo n.º 2
0
 def __init__(self, path=None, mask_file=None, xlim=None, ylim=None,
     tlim=None, useqd=False):
     # Initializes the variables to default values. The indices 'n', 'k',
     # 'j' and 'i' refer to the temporal, height, meridional and zonal
     # coordinates respectively. If one of these indexes is set to 'None',
     # then it is assumed infinite size, which is relevant for the 'time'
     # coordinate.
     self.attributes = dict()
     self.dimensions = dict(n=0, k=0, j=0, i=0)
     self.coordinates = dict(n=None, k=None, j=None, i=None)
     self.variables = dict()
     self.params = dict()
     self.stencil_coeffs = dict()
     self.stencil_params = dict()
     if useqd:
         self.params['datasets'] = [dict(id='h', var='h_qd')]
         self.params['var_dict'] = dict(h_Grid_0001 = 'h_qd')
         self.params['var_tcid'] = dict(h_qd=['h', 'h_qd', 'Grid_0001'])
     else:
         self.params['datasets'] = [dict(id='h', var='h'),
             dict(id='uv', var='uv'), dict(id='err', var='err')]
         self.params['var_dict'] = dict(
             h_Grid_0001 = 'h',
             uv_Grid_0001 = 'u',
             uv_Grid_0002 = 'v',
             err_Grid_0001 = 'err'
         )
         self.params['var_tcid'] = dict(
             h = ['h', 'h', 'Grid_0001'],
             u = ['uv', 'uv', 'Grid_0001'],
             v = ['uv', 'uv', 'Grid_0002'],
             err = ['err', 'err', 'Grid_0001']
         )
     # Creates an universally unique identifiers (UUID) for this instance
     self.params['uuid'] = str(uuid())
     
     # Sets global parameters for grid.
     if path == None:
         path = ('/home/sebastian/academia/data/aviso/msla/merged')
     self.params['path'] = path
     self.params['mask_file'] = mask_file
     self.params['missing_value'] = -9999.
     
     # Generates list of files, tries to match them to the pattern and to 
     # extract the time.
     file_pattern = ('dt_ref_global_merged_msla_(%s)_(\d*)_(\d*)_(\d*)'
         '.nc.gz' % ('|'.join([item['var'] for item in
         self.params['datasets']])))
     flist = listdir('%s/%s' % (self.params['path'],
         self.params['datasets'][0]['id']))
     flist.sort()
     flist, match = reglist(flist, file_pattern)
     
     # Convert dates to matplotlib format, i.e. days since 0001-01-01 UTC.
     time_list = array(dates.datestr2num(['%4s-%2s-%2s 12:00' %
         (item[1][:4], item[1][4:6], item[1][6:]) for item in match]))
     
     # If tlim are set, calculate the time limits of the dataset and
     # corresponding files.
     if tlim != None:
         for i, t in enumerate(tlim):
             if type(t) == str:
                 tlim[i] = dates.datestr2num(t)
         #
         t_sel = flatnonzero(((time_list >= tlim[0]) &
             (time_list <= tlim[1])))
         time_list = time_list[t_sel]
     else:
         t_sel = range(len(time_list))
     
     fdict = [dict(start=match[n][1], end=match[n][2],
         creation=match[n][3]) for n in t_sel]
     self.params['file_list'] = fdict
     if len(flist) == 0:
         return
     
     # Reads first file in dataset to determine array geometry and 
     # dimenstions (lon, lat)
     params = dict(path=self.params['path'],
         dataset=self.params['datasets'][0]['id'],
         datavar=self.params['datasets'][0]['var'],
         **self.params['file_list'][0])
     fname = self.create_filename(**params)
     data = self.read_file(fname)
     lat = data.variables['NbLatitudes'].data
     lon = data.variables['NbLongitudes'].data
     
     # If xlim and ylim are set, calculate how many indices have to be moved
     # in order for latitude array to start at xlim[0].
     lon, lat, xlim, ylim, ii, jj = self.getLongitudeLatitudeLimits(lon,
         lat, xlim, ylim)
     self.params['xlim'], self.params['ylim'] = xlim, ylim
     self.params['lon_i'], self.params['lat_j'] = ii, jj
     self.params['dlon'] = lon[1] - lon[0]
     self.params['dlat'] = lat[1] - lat[0]
     
     # Initializes the grid attributes, dimensions, coordinates and
     # variables.
     self.name = 'sea_level_anomaly_geostrophic_velocities'
     for attr, attr_value in vars(data).iteritems():
         if attr in ['mode', 'filename']:
             continue
         if type(attr_value) == str:
             if attr in ['name']:
                 self.name = attr_value
             elif attr in ['description', 'summary', 'title']:
                 self.description = attr_value
             else:
                 self.attributes[attr.lower()] = attr_value
     self.dimensions = dict(n=time_list.size, k=1, j=lat.size, i=lon.size)
     self.coordinates = dict(n='time', k='height', j='latitude',
         i='longitude')
     #
     self.variables = dict(
         time = atlantis.data.variable(
             canonical_units='days since 0001-01-01 UTC',
             data=time_list,
         ),
         height = atlantis.data.get_standard_variable('height', data=[0.]),
         latitude = atlantis.data.get_standard_variable('latitude',
             data=lat),
         longitude = atlantis.data.get_standard_variable('longitude',
             data=lon),
         xm = atlantis.data.variable(
             canonical_units = 'km',
             description = 'Zonal distance.'
         ),
         ym = atlantis.data.variable(
             canonical_units = 'km',
             description = 'Meridional distance.'
         ),
     )
     #
     self.variables['xm'].data, self.variables['ym'].data = (
         metergrid(self.variables['longitude'].data, 
         self.variables['latitude'].data, unit='km')
     )
     # Walks through every dataset to read list of variables.
     self.params['var_list'] = list()
     for i, dataset in enumerate(self.params['datasets']):
         if i > 0:
             params = dict(path=self.params['path'],
                 dataset=dataset['id'], datavar=dataset['var'],
                 **self.params['file_list'][0])
             fname = self.create_filename(**params)
             data = self.read_file(fname)
         # Walks through every variable in NetCDF file
         for var in data.variables.keys():
             if var in ['Grid_0001', 'Grid_0002']:
                 nvar = self.params['var_dict']['{0}_{1}'.format(
                     dataset['id'], var)]
                 attribs = dict(
                     missing_value = data.variables[var]._FillValue,
                     canonical_units = data.variables[var].units,
                     description = data.variables[var].long_name,
                     dataset = dataset,
                     variable = var
                 )
                 self.variables[nvar] = atlantis.data.variable(**attribs)
                 self.params['var_list'].append(nvar)
         # Closes the data access and removes temporary NetCDF file
         self.close_file(data)
     
     return
Ejemplo n.º 3
0
    r'0$^{\circ}$', r'0.5$^{\circ}$', r'1$^{\circ}$', r'1.5$^{\circ}$',
    r'2$^{\circ}$', r'2.5$^{\circ}$'
]
#
pyplot.close('all')
pyplot.ion()

# Initializes dummy dataset
grid = dummy.Grid()
x = grid.variables['longitude'].data
y = grid.variables['latitude'].data

# Loads ETOPO 0.25 topography and mask
ex, ey, _, ez = klib.file.load_map(etopo_file, lon=x)
mx, my, _, mz = klib.file.load_map(mask_file, lon=ex, lat=ey)
xm, ym = metergrid([1.], ey, unit='km')

mz[numpy.isnan(mz)] = 0
mask = (mz == 0)

# Loads phase speed of first-mode baroclinic gravity waves and Rossby radius
# of deformation as of Chelton et al. (1998).
fname = 'rossrad.dat'
lat, lon, c, Ro = numpy.loadtxt(fname, unpack=True)
lon = klib.common.lon_n(lon, x.max())

# Saves data into temporary file and interpolates it using GMT (Smith & Wessel)
numpy.savetxt('dump_%d.xyz' % (nproc), numpy.array([lon, lat, Ro]).T)
ret = subprocess.call(['./%s' % (icall), '%s' % nproc],
                      stdout=subprocess.PIPE,
                      stderr=subprocess.PIPE)
Ejemplo n.º 4
0
    def __init__(self, path=None, mask_file=None, xlim=None, ylim=None):
        # Initializes the variables to default values. The indices 'n', 'k',
        # 'j' and 'i' refer to the temporal, height, meridional and zonal
        # coordinates respectively. If one of these indexes is set to 'None',
        # then it is assumed infinite size, which is relevant for the 'time'
        # coordinate.
        self.attributes = dict()
        self.dimensions = dict(n=0, k=0, j=0, i=0)
        self.coordinates = dict(n=None, k=None, j=None, i=None)
        self.variables = dict()
        self.params = dict()
        self.stencil_coeffs = dict()
        self.stencil_params = dict()

        # Sets global parameters for grid.
        if path == None:
            path = ('/home/sebastian/academia/data/ncdc.noaa/seawinds/stress/'
                    'daily')
        self.params['path'] = path
        self.params['mask_file'] = mask_file
        self.params['missing_value'] = -9999.

        # Generates list of files, tries to match them to the pattern and to
        # extract the time.
        file_pattern = 'tauxy([0-9]{8}).nc'
        flist = listdir(self.params['path'])
        flist, match = reglist(flist, file_pattern)
        self.params['file_list'] = flist
        if len(flist) == 0:
            return

        # Convert dates to matplotlib format, i.e. days since 0001-01-01 UTC.
        time_list = array([dates.datestr2num(item) for item in match])

        # Reads first file in dataset to determine array geometry and
        # dimenstions (lon, lat)
        data = netcdf(
            '%s/%s' % (self.params['path'], self.params['file_list'][0]), 'r')
        for var in data.variables.keys():
            if var in ['latitude', 'lat']:
                lat = data.variables[var].data
            elif var in ['longitude', 'lon']:
                lon = data.variables[var].data

        # If xlim and ylim are set, calculate how many indices have to be moved
        # in order for latitude array to start at xlim[0].
        if (xlim != None) | (ylim != None):
            if xlim == None:
                xlim = (lon.min(), lon.max())
            if ylim == None:
                ylim = (lat.min(), lat.max())
            #
            LON = lon_n(lon, xlim[1])
            i = argsort(LON)
            selx = i[flatnonzero((LON[i] >= xlim[0]) & (LON[i] <= xlim[1]))]
            sely = flatnonzero((lat >= ylim[0]) & (lat <= ylim[1]))
            ii, jj = meshgrid(selx, sely)
            lon = LON[selx]
            lat = lat[sely]
            self.params['xlim'] = xlim
            self.params['ylim'] = ylim
            self.params['lon_i'] = ii
            self.params['lat_j'] = jj
        self.params['dlon'] = lon[1] - lon[0]
        self.params['dlat'] = lat[1] - lat[0]

        # Initializes the grid attributes, dimensions, coordinates and
        # variables.
        self.name = 'sea_surface_wind_stress'
        for attr, attr_value in vars(data).iteritems():
            if attr in ['mode', 'filename']:
                continue
            if type(attr_value) == str:
                if attr in ['name']:
                    self.name = attr_value
                elif attr in ['description', 'summary']:
                    self.description = attr_value
                else:
                    self.attributes[attr.lower()] = attr_value
        self.dimensions = dict(n=time_list.size, k=1, j=lat.size, i=lon.size)
        self.coordinates = dict(n='time',
                                k='height',
                                j='latitude',
                                i='longitude')
        #
        self.variables = dict(
            time=atlantis.data.variable(
                canonical_units='days since 0001-01-01 UTC',
                data=time_list,
            ),
            height=atlantis.data.get_standard_variable('height', data=[0.]),
            latitude=atlantis.data.get_standard_variable('latitude', data=lat),
            longitude=atlantis.data.get_standard_variable('longitude',
                                                          data=lon),
            xm=atlantis.data.variable(canonical_units='km',
                                      description='Zonal distance.'),
            ym=atlantis.data.variable(canonical_units='km',
                                      description='Meridional distance.'),
        )
        #
        self.variables['xm'].data, self.variables['ym'].data = (metergrid(
            self.variables['longitude'].data,
            self.variables['latitude'].data,
            unit='km'))
        #
        self.params['var_list'] = list()
        for var in data.variables.keys():
            if var in ['tau', 'taux', 'tauy', 'tau_div', 'tau_curl']:
                attribs = dict()
                for attr, attr_value in vars(data.variables[var]).iteritems():
                    if attr == '_FillValue':
                        attribs['missing_value'] = attr_value
                    elif attr == 'data':
                        continue
                    elif attr == 'long_name':
                        attribs['description'] = attr_value
                    elif attr == 'units':
                        if attr_value == 'N/m**2':
                            a = 'N m-2'
                        else:
                            a = attr_value
                        attribs['canonical_units'] = a
                    else:
                        attribs[attr] = attr_value
                self.variables[var] = atlantis.data.variable(**attribs)
                self.params['var_list'].append(var)
                if self.variables[var].missing_value == None:
                    self.variables[var].missing_value = (
                        self.params['missing_value'])
        #
        data.close()
        return
Ejemplo n.º 5
0
 def test_metergrid_nm(self):
     x, y = astronomy.metergrid(self.lon_1, self.lat_1, unit='nm')
     numpy.testing.assert_array_almost_equal(self.x_1 / 1852, x)
     numpy.testing.assert_array_almost_equal(self.y_1 / 1852, y)
Ejemplo n.º 6
0
    def __init__(self, path=None, xlim=None, ylim=None, tlim=None):
        # Initializes the variables to default values. The indices 'n', 'k',
        # 'j' and 'i' refer to the temporal, height, meridional and zonal
        # coordinates respectively. If one of these indexes is set to 'None',
        # then it is assumed infinite size, which is relevant for the 'time'
        # coordinate.
        self.attributes = dict()
        self.dimensions = dict(n=0, k=0, j=0, i=0)
        self.coordinates = dict(n=None, k=None, j=None, i=None)
        self.variables = dict()
        self.params = dict()
        self.data = dict()
        self.stencil_coeffs = dict()
        self.stencil_params = dict()
        self.alias = dict()

        # Sets global parameters for grid.
        if path == None:
            path = ('/home/sebastian/academia/data/ncep.reanalysis2/'
                    'gaussian_grid')
        self.params['path'] = path
        self.params['var_list'] = []
        self.params['year_list'] = []

        # Generates list of files, tries to match them to the pattern and to
        # extract the time. To help understanding the naming convetion and
        # pattern, see the following example:
        #   uwnd.2015.nc
        file_pattern = '(.*).([0-9]{4}).nc'
        flist = listdir(self.params['path'])
        flist, match = reglist(flist, file_pattern)
        self.params['file_list'] = flist

        # Gets list of variables from file match.
        _vars, _years = zip(*match)
        self.params['var_list'] = unique(_vars)
        self.params['year_list'] = unique(_years)

        # Loads data from first variable and loads longitude and latitude data.
        # We assume that all data is homogeneous throughout the dataset. Then
        # walks through each year and loads time vector.
        _var = self.params['var_list'][0]
        for _i, _year in enumerate(self.params['year_list']):
            fname = '{}.{}.nc'.format(_var, _year)
            try:
                data.close()
            except:
                pass
            data = self._open_file(fname)
            #
            if _i == 0:
                lon = data.variables['lon'].data
                lat = data.variables['lat'].data
                time = data.variables['time'].data
            else:
                time = hstack([time, data.variables['time'].data])

        # Time in dataset is given in `hours since 1800-1-1 00:00:0.0` and we
        # convert it to matplotlib's date format.

        if data.variables['time'].units == 'hours since 1800-1-1 00:00:0.0':
            self.params['t0'] = dates.date2num(
                dates.datetime.datetime(1800, 1, 1, 0, 0))
            time = self.params['t0'] + time / 24.

        # If lon_0 is set, calculate how many indices have to be moved in
        # order for latitude array to start at lon_0.
        lon, lat, xlim, ylim, ii, jj = self.getLongitudeLatitudeLimits(
            lon, lat, xlim, ylim)

        self.params['xlim'], self.params['ylim'] = xlim, ylim
        self.params['lon_i'], self.params['lat_j'] = ii, jj

        # Initializes the grid attributes, dimensions, coordinates and
        # variables.
        self.name = 'ncep_reanalysis'
        self.description = (
            'NCEP Reanalysis project is analysis/forecast '
            'system to perform data assimilation using past data from 1979 '
            'owards.')
        self.attributes['institution'] = data.institution
        self.dimensions = dict(n=time.size, k=0, j=lat.size, i=lon.size)
        self.coordinates = dict(n='time',
                                k='height',
                                j='latitude',
                                i='longitude')
        self.variables = dict(
            time=atlantis.data.Variable(),
            height=atlantis.data.get_standard_variable('height'),
            latitude=atlantis.data.get_standard_variable('latitude'),
            longitude=atlantis.data.get_standard_variable('longitude'),
            xm=atlantis.data.Variable(),
            ym=atlantis.data.Variable(),
        )
        #
        self.variables['time'].data = time
        self.variables['time'].canonical_units = 'days since 0001-01-01 UTC'
        #
        self.variables['height'].data = 0.
        self.variables['latitude'].data = lat
        self.variables['longitude'].data = lon
        #
        self.variables['xm'].canonical_units = 'km'
        self.variables['xm'].description = 'Zonal distance.'
        self.variables['ym'].canonical_units = 'km'
        self.variables['ym'].description = 'Meridional distance.'
        self.variables['xm'].data, self.variables['ym'].data = (metergrid(
            self.variables['longitude'].data,
            self.variables['latitude'].data,
            units='km'))
        #
        data.close()

        # Walks through each variable file for the first year, reads their
        # attributes and adds to the dataset definition.
        self._message('\n')
        _year = self.params['year_list'][0]
        for _var in self.params['var_list']:
            fname = '{}.{}.nc'.format(_var, _year)
            data = self._open_file(fname)
            self._message('{}: '.format(_var))
            for _key in data.variables.keys():
                self._message('{} '.format(_key))
                if _key in [
                        'time', 'time_bnds', 'level', 'level_bnds', 'lat',
                        'lon'
                ]:
                    continue
                try:
                    self.variables[_key] = atlantis.data.get_standard_variable(
                        data.variables[_key].standard_name,
                        units=data.variables[_key].units,
                        long_name=data.variables[_key].long_name,
                    )
                except:
                    self._message('*  ')
                    self.variables[_key] = atlantis.data.Variable(
                        units=data.variables[_key].units,
                        standard_name=data.variables[_key].standard_name,
                        long_name=data.variables[_key].long_name,
                        description=data.variables[_key].var_desc,
                    )
                self.alias[_key] = _var
            #
            self._message('\n')
            data.close()
        #
        return
Ejemplo n.º 7
0
mask_file = "/home/sebastian/academia/data/aviso/misc/mask025.xy.gz"
deg = numpy.array([0, 0.5, 1, 1.5, 2, 2.5])
deglbl = [r"0$^{\circ}$", r"0.5$^{\circ}$", r"1$^{\circ}$", r"1.5$^{\circ}$", r"2$^{\circ}$", r"2.5$^{\circ}$"]
#
pyplot.close("all")
pyplot.ion()

# Initializes dummy dataset
grid = dummy.Grid()
x = grid.variables["longitude"].data
y = grid.variables["latitude"].data

# Loads ETOPO 0.25 topography and mask
ex, ey, _, ez = klib.file.load_map(etopo_file, lon=x)
mx, my, _, mz = klib.file.load_map(mask_file, lon=ex, lat=ey)
xm, ym = metergrid([1.0], ey, unit="km")

mz[numpy.isnan(mz)] = 0
mask = mz == 0

# Loads phase speed of first-mode baroclinic gravity waves and Rossby radius
# of deformation as of Chelton et al. (1998).
fname = "rossrad.dat"
lat, lon, c, Ro = numpy.loadtxt(fname, unpack=True)
lon = klib.common.lon_n(lon, x.max())

# Saves data into temporary file and interpolates it using GMT (Smith & Wessel)
numpy.savetxt("dump_%d.xyz" % (nproc), numpy.array([lon, lat, Ro]).T)
ret = subprocess.call(["./%s" % (icall), "%s" % nproc], stdout=subprocess.PIPE, stderr=subprocess.PIPE)

data = netcdf("%s_%d.grd" % ("output", nproc), "r")
Ejemplo n.º 8
0
 def test_metergrid_km(self):
     x, y = astronomy.metergrid(self.lon_1, self.lat_1, unit='km')
     numpy.testing.assert_array_almost_equal(self.x_1 * 1e-3, x)
     numpy.testing.assert_array_almost_equal(self.y_1 * 1e-3, y)
Ejemplo n.º 9
0
    def __init__(self, path=None, sensor='SeaWiFS', resolution='9km', 
        mask_file=None, xlim=None, ylim=None):
        # Initializes the variables to default values. The indices 'n', 'k',
        # 'j' and 'i' refer to the temporal, height, meridional and zonal
        # coordinates respectively. If one of these indexes is set to 'None',
        # then it is assumed infinite size, which is relevant for the 'time'
        # coordinate.
        self.attributes = dict()
        self.dimensions = dict(n=0, k=0, j=0, i=0)
        self.coordinates = dict(n=None, k=None, j=None, i=None)
        self.variables = dict()
        self.params = dict()
        self.data = dict()
        self.stencil_coeffs = dict()
        self.stencil_params = dict()

        # Sets global parameters for grid.
        if path == None:
            path = '/academia/data/raw/oceancolor'
        self.params['path'] = '%s/%s' % (path, sensor)
        self.params['mask_file'] = mask_file
        self.params['uuid'] = str(_uuid())
        self.params['var_list'] = ['chla']
        
        # Generates list of files, tries to match them to the pattern and to 
        # extract the time. To help understanding the naming convetion and 
        # pattern, see the following example:
        #   A20131612013168.L3m_8D_CHL_chlor_a_9km.bz2
        # resolution = '[0-9]+km'
        if sensor == 'SeaWiFS':
            sensor_prefix = 'S'
        elif sensor == 'MODISA':
            sensor_prefix = 'A'
        else:
            sensor = '.*'
        file_pattern = ('(%s)([0-9]{4})([0-9]{3})([0-9]{4})([0-9]{3}).(L3m)_'
            '(8D)_(CHL)_(chlor_a)_(%s).bz2') % (sensor_prefix, resolution)
        flist = listdir(self.params['path'])
        flist, match = _reglist(flist, file_pattern)
        self.params['file_list'] = flist

        # Reads first file in dataset to determine array geometry and 
        # dimenstions (lon, lat)
        HDF = self._open_HDF('%s/%s' % (self.params['path'], 
            self.params['file_list'][0]))
        HDF_att = HDF.attributes()
        lon = arange(HDF_att['Westernmost Longitude'], 
            HDF_att['Easternmost Longitude'], HDF_att['Longitude Step'])
        lat = arange(HDF_att['Northernmost Latitude'], 
            HDF_att['Southernmost Latitude'], -HDF_att['Latitude Step'])
        
        # If lon_0 is set, calculate how many indices have to be moved in 
        # order for latitude array to start at lon_0.
        lon, lat, xlim, ylim, ii, jj = self.getLongitudeLatitudeLimits(lon,
            lat, xlim, ylim)
        self.params['xlim'], self.params['ylim'] = xlim, ylim
        self.params['lon_i'], self.params['lat_j'] = ii, jj
        
        # Creates a structured array for start year, start day, end year and 
        # end day. Aftwerwards, the dates are converted from julian day to 
        # matplotlib format, i.e. days since 0001-01-01 UTC.
        time_list = array([('%s-01-01' % (item[1]), atof(item[2]), 
            '%s-01-01' % (item[3]), atof(item[4])) for item in match], 
            dtype=[('start_year', 'a10'), ('start_day', 'f2'), 
            ('end_year', 'a10'), ('end_day', 'f2')])
        time_start = (dates.datestr2num(time_list['start_year']) + 
            time_list['start_day'] - 1)
        time_end = (dates.datestr2num(time_list['end_year']) + 
            time_list['end_day'] - 1)
        time_middle = 0.5 * (time_start + time_end)
        
        # Initializes the grid attributes, dimensions, coordinates and
        # variables.
        self.name = 'mass_concentration_of_chlorophyll_a_in_sea_water'
        self.description = ('Chlorophyll-a pigment concentration '
            'inferred from satellite visible light radiance measurements.')
        self.attributes['institution'] = HDF_att['Data Center']
        self.attributes['sensor name'] = HDF_att['Sensor Name']
        self.dimensions = dict(n=time_middle.size, k=0, j=lat.size, i=lon.size)
        self.coordinates = dict(n='time', k='height', j='latitude',
            i='longitude')
        self.variables = dict(
            time = atlantis.data.Variable(),
            height = atlantis.data.get_standard_variable('height'),
            latitude = atlantis.data.get_standard_variable('latitude'),
            longitude = atlantis.data.get_standard_variable('longitude'),
            chla = atlantis.data.get_standard_variable(
                'mass_concentration_of_chlorophyll_a_in_sea_water'
            ),
            xm = atlantis.data.Variable(),
            ym = atlantis.data.Variable(),
        )
        self.variables['time'].data = time_middle
        self.variables['time'].canonical_units = 'days since 0001-01-01 UTC' 
        #
        self.variables['height'].data = 0.
        self.variables['latitude'].data = lat
        self.variables['longitude'].data = lon
        self.variables['chla'].canonical_units = 'mg m-3'
        #
        self.variables['xm'].canonical_units = 'km'
        self.variables['xm'].description = 'Zonal distance.'
        self.variables['ym'].canonical_units = 'km'
        self.variables['ym'].description = 'Meridional distance.'
        self.variables['xm'].data, self.variables['ym'].data = (
            metergrid(self.variables['longitude'].data, 
            self.variables['latitude'].data, units='km')
        )
        return
Ejemplo n.º 10
0
    def __init__(self, path=None, mask_file=None, xlim=None, ylim=None):
        # Initializes the variables to default values. The indices 'n', 'k',
        # 'j' and 'i' refer to the temporal, height, meridional and zonal
        # coordinates respectively. If one of these indexes is set to 'None',
        # then it is assumed infinite size, which is relevant for the 'time'
        # coordinate.
        self.attributes = dict()
        self.dimensions = dict(n=0, k=0, j=0, i=0)
        self.coordinates = dict(n=None, k=None, j=None, i=None)
        self.variables = dict()
        self.params = dict()
        self.stencil_coeffs = dict()
        self.stencil_params = dict()

        # Sets global parameters for grid.
        if path == None:
            path = ('/home/sebastian/academia/data/ncdc.noaa/seawinds/stress/'
                'daily')
        self.params['path'] = path
        self.params['mask_file'] = mask_file
        self.params['missing_value'] = -9999.
        
        # Generates list of files, tries to match them to the pattern and to 
        # extract the time.
        file_pattern = 'tauxy([0-9]{8}).nc'
        flist = listdir(self.params['path'])
        flist, match = reglist(flist, file_pattern)
        self.params['file_list'] = flist
        if len(flist) == 0:
            return

        # Convert dates to matplotlib format, i.e. days since 0001-01-01 UTC.
        time_list = array([dates.datestr2num(item) for item in match])
        
        # Reads first file in dataset to determine array geometry and 
        # dimenstions (lon, lat)
        data = netcdf('%s/%s' % (self.params['path'],
            self.params['file_list'][0]), 'r')
        for var in data.variables.keys():
            if var in ['latitude', 'lat']:
                lat = data.variables[var].data
            elif var in ['longitude', 'lon']:
                lon = data.variables[var].data
        
        # If xlim and ylim are set, calculate how many indices have to be moved
        # in order for latitude array to start at xlim[0].
        if (xlim != None) | (ylim != None):
            if xlim == None:
                xlim = (lon.min(), lon.max())
            if ylim == None:
                ylim = (lat.min(), lat.max())
            #
            LON = lon_n(lon, xlim[1])
            i = argsort(LON)
            selx = i[flatnonzero((LON[i] >= xlim[0]) & (LON[i] <= xlim[1]))]
            sely = flatnonzero((lat >= ylim[0]) & (lat <= ylim[1]))
            ii, jj = meshgrid(selx, sely)
            lon = LON[selx]
            lat = lat[sely]
            self.params['xlim'] = xlim
            self.params['ylim'] = ylim
            self.params['lon_i'] = ii
            self.params['lat_j'] = jj
        self.params['dlon'] = lon[1] - lon[0]
        self.params['dlat'] = lat[1] - lat[0]
        
        # Initializes the grid attributes, dimensions, coordinates and
        # variables.
        self.name = 'sea_surface_wind_stress'
        for attr, attr_value in vars(data).iteritems():
            if attr in ['mode', 'filename']:
                continue
            if type(attr_value) == str:
                if attr in ['name']:
                    self.name = attr_value
                elif attr in ['description', 'summary']:
                    self.description = attr_value
                else:
                    self.attributes[attr.lower()] = attr_value
        self.dimensions = dict(n=time_list.size, k=1, j=lat.size, i=lon.size)
        self.coordinates = dict(n='time', k='height', j='latitude',
            i='longitude')
        #
        self.variables = dict(
            time = atlantis.data.variable(
                canonical_units='days since 0001-01-01 UTC',
                data=time_list,
            ),
            height = atlantis.data.get_standard_variable('height', data=[0.]),
            latitude = atlantis.data.get_standard_variable('latitude',
                data=lat),
            longitude = atlantis.data.get_standard_variable('longitude',
                data=lon),
            xm = atlantis.data.variable(
                canonical_units = 'km',
                description = 'Zonal distance.'
            ),
            ym = atlantis.data.variable(
                canonical_units = 'km',
                description = 'Meridional distance.'
            ),
        )
        #
        self.variables['xm'].data, self.variables['ym'].data = (
            metergrid(self.variables['longitude'].data, 
            self.variables['latitude'].data, unit='km')
        )
        #
        self.params['var_list'] = list()
        for var in data.variables.keys():
            if var in ['tau', 'taux', 'tauy', 'tau_div', 'tau_curl']:
                attribs = dict()
                for attr, attr_value in vars(data.variables[var]).iteritems():
                    if attr == '_FillValue':
                        attribs['missing_value'] = attr_value
                    elif attr == 'data':
                        continue
                    elif attr == 'long_name':
                        attribs['description'] = attr_value
                    elif attr == 'units':
                        if attr_value == 'N/m**2':
                            a = 'N m-2'
                        else:
                            a = attr_value
                        attribs['canonical_units'] = a
                    else:
                        attribs[attr] = attr_value
                self.variables[var] = atlantis.data.variable(**attribs)
                self.params['var_list'].append(var)
                if self.variables[var].missing_value == None:
                    self.variables[var].missing_value = (
                        self.params['missing_value'])
        #
        data.close()
        return
Ejemplo n.º 11
0
    def __init__(self, resolution=25, xlim=None, ylim=None):
        # Initializes the variables to default values. The indices 'n', 'k',
        # 'j' and 'i' refer to the temporal, height, meridional and zonal
        # coordinates respectively. If one of these indexes is set to 'None',
        # then it is assumed infinite size, which is relevant for the 'time'
        # coordinate.
        self.attributes = dict()
        self.dimensions = dict(n=0, k=0, j=0, i=0)
        self.coordinates = dict(n=None, k=None, j=None, i=None)
        self.variables = dict()
        self.params = dict()
        self._data = dict()
        self.stencil_coeffs = dict()
        self.stencil_params = dict()
        
        # Loads land / ocean mask
        _path = path.dirname(__file__)
        mask_file = '%s/mask%03d.xy.gz' % (_path, resolution)
        dat = loadtxt('%s' % (mask_file))
        lon = dat[0, 1:]
        lat = dat[1:, 0]
        mz = dat[1:, 1:]
        #lat = arange(-90., 90., dy)  + dy / 2.
        #lon = 20. + arange(0., 360., dx) - dx / 2.

        # If xlim and ylim are set, calculate how many indices have to be moved
        # in order for latitude array to start at xlim[0].
        lon, lat, xlim, ylim, ii, jj = self.getLongitudeLatitudeLimits(lon,
            lat, xlim, ylim)
        self.params['xlim'], self.params['ylim'] = xlim, ylim
        self.params['lon_i'], self.params['lat_j'] = ii, jj
        
        #
        self.name = 'sea_surface_height_above_sea_level'
        self.description = ('Dummy sea surface height anomaly data set, for '
            'test purposes only. The data set consists of a global 0.25 degree'
            ' grid with a seasonal cycle, westward propagating planetary wave '
            'field, meso-scale eddies and random noise. The temporal time-step'
            ' is one day.')
        self.dimensions = dict(n=None, k=0, j=lat.size, i=lon.size)
        self.coordinates = dict(n='time', k='height', j='latitude',
            i='longitude')
        self.variables = dict(
            time = atlantis.data.get_standard_variable('time', data=None),
            height = atlantis.data.get_standard_variable('height'),
            latitude = atlantis.data.get_standard_variable('latitude', 
                data=lat),
            longitude = atlantis.data.get_standard_variable('longitude', 
                data=lon),
            ssh = atlantis.data.get_standard_variable(
                'sea_surface_height_above_sea_level'),
            xm = atlantis.data.variable(),
            ym = atlantis.data.variable(),
            mask = atlantis.data.variable(
                description = ('Land and ocean mask defined as follows: land '
                    '(0), Atlantic Ocean (1), Pacific Ocean (2),  Indian Ocean'
                    ' (3), Caribbean Sea (4), Gulf of Mexico (5), Tasman Sea '
                    '(6), Bay of Bengal (7)'),
                data = mz[jj, ii]
            )
        )
        self.variables['height'].data = 0.
        #
        self.variables['xm'].canonical_units = 'm'
        self.variables['xm'].description = 'Zonal distance.'
        self.variables['ym'].canonical_units = 'm'
        self.variables['ym'].description = 'Meridional distance.'
        self.variables['xm'].data, self.variables['ym'].data = (
            astronomy.metergrid(self.variables['longitude'].data,
            self.variables['latitude'].data, units='km')
        )

        # Determines the function parameters for
        #
        # f(t) = a + b\,t + c\,\sin(\omega\,t + \phi) + 
        #   d\,\cos(\omega\,t + \phi)
        #
        i, j = self.dimensions['i'], self.dimensions['j']
        self.params['a'] = 100 * randn(j, i)
        self.params['b'] = 0.0005 + 0.001 * randn(j, i)
        self.params['c'] = 25 * rand(j, i)
        self.params['omega'] = 0.017202791695176148 + 0.001 * randn(j, i)
        self.params['phi'] = pi * rand(j, i)
Ejemplo n.º 12
0
    def __init__(self,
                 path=None,
                 mask_file=None,
                 xlim=None,
                 ylim=None,
                 tlim=None,
                 useqd=False):
        # Initializes the variables to default values. The indices 'n', 'k',
        # 'j' and 'i' refer to the temporal, height, meridional and zonal
        # coordinates respectively. If one of these indexes is set to 'None',
        # then it is assumed infinite size, which is relevant for the 'time'
        # coordinate.
        self.attributes = dict()
        self.dimensions = dict(n=0, k=0, j=0, i=0)
        self.coordinates = dict(n=None, k=None, j=None, i=None)
        self.variables = dict()
        self.params = dict()
        self.stencil_coeffs = dict()
        self.stencil_params = dict()
        if useqd:
            self.params['datasets'] = [dict(id='h', var='h_qd')]
            self.params['var_dict'] = dict(h_Grid_0001='h_qd')
            self.params['var_tcid'] = dict(h_qd=['h', 'h_qd', 'Grid_0001'])
        else:
            self.params['datasets'] = [
                dict(id='h', var='h'),
                dict(id='uv', var='uv'),
                dict(id='err', var='err')
            ]
            self.params['var_dict'] = dict(h_Grid_0001='h',
                                           uv_Grid_0001='u',
                                           uv_Grid_0002='v',
                                           err_Grid_0001='err')
            self.params['var_tcid'] = dict(h=['h', 'h', 'Grid_0001'],
                                           u=['uv', 'uv', 'Grid_0001'],
                                           v=['uv', 'uv', 'Grid_0002'],
                                           err=['err', 'err', 'Grid_0001'])
        # Creates an universally unique identifiers (UUID) for this instance
        self.params['uuid'] = str(uuid())

        # Sets global parameters for grid.
        if path == None:
            path = ('/home/sebastian/academia/data/aviso/msla/merged')
        self.params['path'] = path
        self.params['mask_file'] = mask_file
        self.params['missing_value'] = -9999.

        # Generates list of files, tries to match them to the pattern and to
        # extract the time.
        file_pattern = (
            'dt_ref_global_merged_msla_(%s)_(\d*)_(\d*)_(\d*)'
            '.nc.gz' %
            ('|'.join([item['var'] for item in self.params['datasets']])))
        flist = listdir(
            '%s/%s' % (self.params['path'], self.params['datasets'][0]['id']))
        flist.sort()
        flist, match = reglist(flist, file_pattern)

        # Convert dates to matplotlib format, i.e. days since 0001-01-01 UTC.
        time_list = array(
            dates.datestr2num([
                '%4s-%2s-%2s 12:00' % (item[1][:4], item[1][4:6], item[1][6:])
                for item in match
            ]))

        # If tlim are set, calculate the time limits of the dataset and
        # corresponding files.
        if tlim != None:
            for i, t in enumerate(tlim):
                if type(t) == str:
                    tlim[i] = dates.datestr2num(t)
            #
            t_sel = flatnonzero(
                ((time_list >= tlim[0]) & (time_list <= tlim[1])))
            time_list = time_list[t_sel]
        else:
            t_sel = range(len(time_list))

        fdict = [
            dict(start=match[n][1], end=match[n][2], creation=match[n][3])
            for n in t_sel
        ]
        self.params['file_list'] = fdict
        if len(flist) == 0:
            return

        # Reads first file in dataset to determine array geometry and
        # dimenstions (lon, lat)
        params = dict(path=self.params['path'],
                      dataset=self.params['datasets'][0]['id'],
                      datavar=self.params['datasets'][0]['var'],
                      **self.params['file_list'][0])
        fname = self.create_filename(**params)
        data = self.read_file(fname)
        lat = data.variables['NbLatitudes'].data
        lon = data.variables['NbLongitudes'].data

        # If xlim and ylim are set, calculate how many indices have to be moved
        # in order for latitude array to start at xlim[0].
        lon, lat, xlim, ylim, ii, jj = self.getLongitudeLatitudeLimits(
            lon, lat, xlim, ylim)
        self.params['xlim'], self.params['ylim'] = xlim, ylim
        self.params['lon_i'], self.params['lat_j'] = ii, jj
        self.params['dlon'] = lon[1] - lon[0]
        self.params['dlat'] = lat[1] - lat[0]

        # Initializes the grid attributes, dimensions, coordinates and
        # variables.
        self.name = 'sea_level_anomaly_geostrophic_velocities'
        for attr, attr_value in vars(data).iteritems():
            if attr in ['mode', 'filename']:
                continue
            if type(attr_value) == str:
                if attr in ['name']:
                    self.name = attr_value
                elif attr in ['description', 'summary', 'title']:
                    self.description = attr_value
                else:
                    self.attributes[attr.lower()] = attr_value
        self.dimensions = dict(n=time_list.size, k=1, j=lat.size, i=lon.size)
        self.coordinates = dict(n='time',
                                k='height',
                                j='latitude',
                                i='longitude')
        #
        self.variables = dict(
            time=atlantis.data.variable(
                canonical_units='days since 0001-01-01 UTC',
                data=time_list,
            ),
            height=atlantis.data.get_standard_variable('height', data=[0.]),
            latitude=atlantis.data.get_standard_variable('latitude', data=lat),
            longitude=atlantis.data.get_standard_variable('longitude',
                                                          data=lon),
            xm=atlantis.data.variable(canonical_units='km',
                                      description='Zonal distance.'),
            ym=atlantis.data.variable(canonical_units='km',
                                      description='Meridional distance.'),
        )
        #
        self.variables['xm'].data, self.variables['ym'].data = (metergrid(
            self.variables['longitude'].data,
            self.variables['latitude'].data,
            unit='km'))
        # Walks through every dataset to read list of variables.
        self.params['var_list'] = list()
        for i, dataset in enumerate(self.params['datasets']):
            if i > 0:
                params = dict(path=self.params['path'],
                              dataset=dataset['id'],
                              datavar=dataset['var'],
                              **self.params['file_list'][0])
                fname = self.create_filename(**params)
                data = self.read_file(fname)
            # Walks through every variable in NetCDF file
            for var in data.variables.keys():
                if var in ['Grid_0001', 'Grid_0002']:
                    nvar = self.params['var_dict']['{0}_{1}'.format(
                        dataset['id'], var)]
                    attribs = dict(
                        missing_value=data.variables[var]._FillValue,
                        canonical_units=data.variables[var].units,
                        description=data.variables[var].long_name,
                        dataset=dataset,
                        variable=var)
                    self.variables[nvar] = atlantis.data.variable(**attribs)
                    self.params['var_list'].append(nvar)
            # Closes the data access and removes temporary NetCDF file
            self.close_file(data)

        return
Ejemplo n.º 13
0
 def test_metergrid_nm(self):
     x, y = astronomy.metergrid(self.lon_1, self.lat_1, unit='nm')
     numpy.testing.assert_array_almost_equal(self.x_1 / 1852, x)
     numpy.testing.assert_array_almost_equal(self.y_1 / 1852, y)
Ejemplo n.º 14
0
 def test_metergrid_km(self):
     x, y = astronomy.metergrid(self.lon_1, self.lat_1, unit='km')
     numpy.testing.assert_array_almost_equal(self.x_1 * 1e-3, x)
     numpy.testing.assert_array_almost_equal(self.y_1 * 1e-3, y)
Ejemplo n.º 15
0
    def __init__(self,
                 path=None,
                 sensor='SeaWiFS',
                 resolution='9km',
                 mask_file=None,
                 xlim=None,
                 ylim=None):
        # Initializes the variables to default values. The indices 'n', 'k',
        # 'j' and 'i' refer to the temporal, height, meridional and zonal
        # coordinates respectively. If one of these indexes is set to 'None',
        # then it is assumed infinite size, which is relevant for the 'time'
        # coordinate.
        self.attributes = dict()
        self.dimensions = dict(n=0, k=0, j=0, i=0)
        self.coordinates = dict(n=None, k=None, j=None, i=None)
        self.variables = dict()
        self.params = dict()
        self.data = dict()
        self.stencil_coeffs = dict()
        self.stencil_params = dict()

        # Sets global parameters for grid.
        if path == None:
            path = '/academia/data/raw/oceancolor'
        self.params['path'] = '%s/%s' % (path, sensor)
        self.params['mask_file'] = mask_file
        self.params['uuid'] = str(_uuid())
        self.params['var_list'] = ['chla']

        # Generates list of files, tries to match them to the pattern and to
        # extract the time. To help understanding the naming convetion and
        # pattern, see the following example:
        #   A20131612013168.L3m_8D_CHL_chlor_a_9km.bz2
        # resolution = '[0-9]+km'
        if sensor == 'SeaWiFS':
            sensor_prefix = 'S'
        elif sensor == 'MODISA':
            sensor_prefix = 'A'
        else:
            sensor = '.*'
        file_pattern = ('(%s)([0-9]{4})([0-9]{3})([0-9]{4})([0-9]{3}).(L3m)_'
                        '(8D)_(CHL)_(chlor_a)_(%s).bz2') % (sensor_prefix,
                                                            resolution)
        flist = listdir(self.params['path'])
        flist, match = _reglist(flist, file_pattern)
        self.params['file_list'] = flist

        # Reads first file in dataset to determine array geometry and
        # dimenstions (lon, lat)
        HDF = self._open_HDF(
            '%s/%s' % (self.params['path'], self.params['file_list'][0]))
        HDF_att = HDF.attributes()
        lon = arange(HDF_att['Westernmost Longitude'],
                     HDF_att['Easternmost Longitude'],
                     HDF_att['Longitude Step'])
        lat = arange(HDF_att['Northernmost Latitude'],
                     HDF_att['Southernmost Latitude'],
                     -HDF_att['Latitude Step'])

        # If lon_0 is set, calculate how many indices have to be moved in
        # order for latitude array to start at lon_0.
        lon, lat, xlim, ylim, ii, jj = self.getLongitudeLatitudeLimits(
            lon, lat, xlim, ylim)
        self.params['xlim'], self.params['ylim'] = xlim, ylim
        self.params['lon_i'], self.params['lat_j'] = ii, jj

        # Creates a structured array for start year, start day, end year and
        # end day. Aftwerwards, the dates are converted from julian day to
        # matplotlib format, i.e. days since 0001-01-01 UTC.
        time_list = array([('%s-01-01' % (item[1]), atof(item[2]), '%s-01-01' %
                            (item[3]), atof(item[4])) for item in match],
                          dtype=[('start_year', 'a10'), ('start_day', 'f2'),
                                 ('end_year', 'a10'), ('end_day', 'f2')])
        time_start = (dates.datestr2num(time_list['start_year']) +
                      time_list['start_day'] - 1)
        time_end = (dates.datestr2num(time_list['end_year']) +
                    time_list['end_day'] - 1)
        time_middle = 0.5 * (time_start + time_end)

        # Initializes the grid attributes, dimensions, coordinates and
        # variables.
        self.name = 'mass_concentration_of_chlorophyll_a_in_sea_water'
        self.description = (
            'Chlorophyll-a pigment concentration '
            'inferred from satellite visible light radiance measurements.')
        self.attributes['institution'] = HDF_att['Data Center']
        self.attributes['sensor name'] = HDF_att['Sensor Name']
        self.dimensions = dict(n=time_middle.size, k=0, j=lat.size, i=lon.size)
        self.coordinates = dict(n='time',
                                k='height',
                                j='latitude',
                                i='longitude')
        self.variables = dict(
            time=atlantis.data.Variable(),
            height=atlantis.data.get_standard_variable('height'),
            latitude=atlantis.data.get_standard_variable('latitude'),
            longitude=atlantis.data.get_standard_variable('longitude'),
            chla=atlantis.data.get_standard_variable(
                'mass_concentration_of_chlorophyll_a_in_sea_water'),
            xm=atlantis.data.Variable(),
            ym=atlantis.data.Variable(),
        )
        self.variables['time'].data = time_middle
        self.variables['time'].canonical_units = 'days since 0001-01-01 UTC'
        #
        self.variables['height'].data = 0.
        self.variables['latitude'].data = lat
        self.variables['longitude'].data = lon
        self.variables['chla'].canonical_units = 'mg m-3'
        #
        self.variables['xm'].canonical_units = 'km'
        self.variables['xm'].description = 'Zonal distance.'
        self.variables['ym'].canonical_units = 'km'
        self.variables['ym'].description = 'Meridional distance.'
        self.variables['xm'].data, self.variables['ym'].data = (metergrid(
            self.variables['longitude'].data,
            self.variables['latitude'].data,
            units='km'))
        return