Exemplo n.º 1
0
 def close_file(self, data):
     """Closes and deletes temporary file."""
     #
     # Closes NetCDF file.
     try:
         data.close()
     except:
         pass
     
     # Removes the temporary dump file.
     try:
         remove('%s_%s.nc' % (self.params['uuid'], 'dump'))
     except:
         pass
Exemplo n.º 2
0
    def close_file(self, data):
        """Closes and deletes temporary file."""
        #
        # Closes NetCDF file.
        try:
            data.close()
        except:
            pass

        # Removes the temporary dump file.
        try:
            remove('%s_%s.nc' % (self.params['uuid'], 'dump'))
        except:
            pass
Exemplo n.º 3
0
    def __init__(self, path=None, xlim=None, ylim=None, tlim=None):
        # Initializes the variables to default values. The indices 'n', 'k',
        # 'j' and 'i' refer to the temporal, height, meridional and zonal
        # coordinates respectively. If one of these indexes is set to 'None',
        # then it is assumed infinite size, which is relevant for the 'time'
        # coordinate.
        self.attributes = dict()
        self.dimensions = dict(n=0, k=0, j=0, i=0)
        self.coordinates = dict(n=None, k=None, j=None, i=None)
        self.variables = dict()
        self.params = dict()
        self.data = dict()
        self.stencil_coeffs = dict()
        self.stencil_params = dict()
        self.alias = dict()

        # Sets global parameters for grid.
        if path == None:
            path = ('/home/sebastian/academia/data/ncep.reanalysis2/'
                'gaussian_grid')
        self.params['path'] = path
        self.params['var_list'] = []
        self.params['year_list'] = []

        # Generates list of files, tries to match them to the pattern and to
        # extract the time. To help understanding the naming convetion and
        # pattern, see the following example:
        #   uwnd.2015.nc
        file_pattern = '(.*).([0-9]{4}).nc'
        flist = listdir(self.params['path'])
        flist, match = reglist(flist, file_pattern)
        self.params['file_list'] = flist

        # Gets list of variables from file match.
        _vars, _years = zip(*match)
        self.params['var_list'] = unique(_vars)
        self.params['year_list'] = unique(_years)

        # Loads data from first variable and loads longitude and latitude data.
        # We assume that all data is homogeneous throughout the dataset. Then
        # walks through each year and loads time vector.
        _var = self.params['var_list'][0]
        for _i, _year in enumerate(self.params['year_list']):
            fname = '{}.{}.nc'.format(_var, _year)
            try:
                data.close()
            except:
                pass
            data = self._open_file(fname)
            #
            if _i == 0:
                lon = data.variables['lon'].data
                lat = data.variables['lat'].data
                time = data.variables['time'].data
            else:
                time = hstack([time, data.variables['time'].data])

        # Time in dataset is given in `hours since 1800-1-1 00:00:0.0` and we
        # convert it to matplotlib's date format.

        if data.variables['time'].units == 'hours since 1800-1-1 00:00:0.0':
            self.params['t0'] = dates.date2num(dates.datetime.datetime(1800, 1, 1, 0, 0))
            time = self.params['t0'] + time / 24.

        # If lon_0 is set, calculate how many indices have to be moved in
        # order for latitude array to start at lon_0.
        lon, lat, xlim, ylim, ii, jj = self.getLongitudeLatitudeLimits(lon,
            lat, xlim, ylim)

        self.params['xlim'], self.params['ylim'] = xlim, ylim
        self.params['lon_i'], self.params['lat_j'] = ii, jj

        # Initializes the grid attributes, dimensions, coordinates and
        # variables.
        self.name = 'ncep_reanalysis'
        self.description = ('NCEP Reanalysis project is analysis/forecast '
            'system to perform data assimilation using past data from 1979 '
            'owards.')
        self.attributes['institution'] = data.institution
        self.dimensions = dict(n=time.size, k=0, j=lat.size, i=lon.size)
        self.coordinates = dict(n='time', k='height', j='latitude',
            i='longitude')
        self.variables = dict(
            time = atlantis.data.Variable(),
            height = atlantis.data.get_standard_variable('height'),
            latitude = atlantis.data.get_standard_variable('latitude'),
            longitude = atlantis.data.get_standard_variable('longitude'),
            xm = atlantis.data.Variable(),
            ym = atlantis.data.Variable(),
        )
        #
        self.variables['time'].data = time
        self.variables['time'].canonical_units = 'days since 0001-01-01 UTC'
        #
        self.variables['height'].data = 0.
        self.variables['latitude'].data = lat
        self.variables['longitude'].data = lon
        #
        self.variables['xm'].canonical_units = 'km'
        self.variables['xm'].description = 'Zonal distance.'
        self.variables['ym'].canonical_units = 'km'
        self.variables['ym'].description = 'Meridional distance.'
        self.variables['xm'].data, self.variables['ym'].data = (
            metergrid(self.variables['longitude'].data,
            self.variables['latitude'].data, units='km')
        )
        #
        data.close()

        # Walks through each variable file for the first year, reads their
        # attributes and adds to the dataset definition.
        self._message('\n')
        _year = self.params['year_list'][0]
        for _var in self.params['var_list']:
            fname = '{}.{}.nc'.format(_var, _year)
            data = self._open_file(fname)
            self._message('{}: '.format(_var))
            for _key in data.variables.keys():
                self._message('{} '.format(_key))
                if _key in ['time', 'time_bnds', 'level', 'level_bnds', 'lat',
                    'lon']:
                    continue
                try:
                    self.variables[_key] = atlantis.data.get_standard_variable(
                        data.variables[_key].standard_name,
                        units=data.variables[_key].units,
                        long_name=data.variables[_key].long_name,
                    )
                except:
                    self._message('*  ')
                    self.variables[_key] = atlantis.data.Variable(
                        units=data.variables[_key].units,
                        standard_name=data.variables[_key].standard_name,
                        long_name=data.variables[_key].long_name,
                        description=data.variables[_key].var_desc,
                    )
                self.alias[_key] = _var
            #
            self._message('\n')
            data.close()
        #
        return
Exemplo n.º 4
0
    def __init__(self, path=None, mask_file=None, xlim=None, ylim=None):
        # Initializes the variables to default values. The indices 'n', 'k',
        # 'j' and 'i' refer to the temporal, height, meridional and zonal
        # coordinates respectively. If one of these indexes is set to 'None',
        # then it is assumed infinite size, which is relevant for the 'time'
        # coordinate.
        self.attributes = dict()
        self.dimensions = dict(n=0, k=0, j=0, i=0)
        self.coordinates = dict(n=None, k=None, j=None, i=None)
        self.variables = dict()
        self.params = dict()
        self.stencil_coeffs = dict()
        self.stencil_params = dict()

        # Sets global parameters for grid.
        if path == None:
            path = ('/home/sebastian/academia/data/ncdc.noaa/seawinds/stress/'
                    'daily')
        self.params['path'] = path
        self.params['mask_file'] = mask_file
        self.params['missing_value'] = -9999.

        # Generates list of files, tries to match them to the pattern and to
        # extract the time.
        file_pattern = 'tauxy([0-9]{8}).nc'
        flist = listdir(self.params['path'])
        flist, match = reglist(flist, file_pattern)
        self.params['file_list'] = flist
        if len(flist) == 0:
            return

        # Convert dates to matplotlib format, i.e. days since 0001-01-01 UTC.
        time_list = array([dates.datestr2num(item) for item in match])

        # Reads first file in dataset to determine array geometry and
        # dimenstions (lon, lat)
        data = netcdf(
            '%s/%s' % (self.params['path'], self.params['file_list'][0]), 'r')
        for var in data.variables.keys():
            if var in ['latitude', 'lat']:
                lat = data.variables[var].data
            elif var in ['longitude', 'lon']:
                lon = data.variables[var].data

        # If xlim and ylim are set, calculate how many indices have to be moved
        # in order for latitude array to start at xlim[0].
        if (xlim != None) | (ylim != None):
            if xlim == None:
                xlim = (lon.min(), lon.max())
            if ylim == None:
                ylim = (lat.min(), lat.max())
            #
            LON = lon_n(lon, xlim[1])
            i = argsort(LON)
            selx = i[flatnonzero((LON[i] >= xlim[0]) & (LON[i] <= xlim[1]))]
            sely = flatnonzero((lat >= ylim[0]) & (lat <= ylim[1]))
            ii, jj = meshgrid(selx, sely)
            lon = LON[selx]
            lat = lat[sely]
            self.params['xlim'] = xlim
            self.params['ylim'] = ylim
            self.params['lon_i'] = ii
            self.params['lat_j'] = jj
        self.params['dlon'] = lon[1] - lon[0]
        self.params['dlat'] = lat[1] - lat[0]

        # Initializes the grid attributes, dimensions, coordinates and
        # variables.
        self.name = 'sea_surface_wind_stress'
        for attr, attr_value in vars(data).iteritems():
            if attr in ['mode', 'filename']:
                continue
            if type(attr_value) == str:
                if attr in ['name']:
                    self.name = attr_value
                elif attr in ['description', 'summary']:
                    self.description = attr_value
                else:
                    self.attributes[attr.lower()] = attr_value
        self.dimensions = dict(n=time_list.size, k=1, j=lat.size, i=lon.size)
        self.coordinates = dict(n='time',
                                k='height',
                                j='latitude',
                                i='longitude')
        #
        self.variables = dict(
            time=atlantis.data.variable(
                canonical_units='days since 0001-01-01 UTC',
                data=time_list,
            ),
            height=atlantis.data.get_standard_variable('height', data=[0.]),
            latitude=atlantis.data.get_standard_variable('latitude', data=lat),
            longitude=atlantis.data.get_standard_variable('longitude',
                                                          data=lon),
            xm=atlantis.data.variable(canonical_units='km',
                                      description='Zonal distance.'),
            ym=atlantis.data.variable(canonical_units='km',
                                      description='Meridional distance.'),
        )
        #
        self.variables['xm'].data, self.variables['ym'].data = (metergrid(
            self.variables['longitude'].data,
            self.variables['latitude'].data,
            unit='km'))
        #
        self.params['var_list'] = list()
        for var in data.variables.keys():
            if var in ['tau', 'taux', 'tauy', 'tau_div', 'tau_curl']:
                attribs = dict()
                for attr, attr_value in vars(data.variables[var]).iteritems():
                    if attr == '_FillValue':
                        attribs['missing_value'] = attr_value
                    elif attr == 'data':
                        continue
                    elif attr == 'long_name':
                        attribs['description'] = attr_value
                    elif attr == 'units':
                        if attr_value == 'N/m**2':
                            a = 'N m-2'
                        else:
                            a = attr_value
                        attribs['canonical_units'] = a
                    else:
                        attribs[attr] = attr_value
                self.variables[var] = atlantis.data.variable(**attribs)
                self.params['var_list'].append(var)
                if self.variables[var].missing_value == None:
                    self.variables[var].missing_value = (
                        self.params['missing_value'])
        #
        data.close()
        return
Exemplo n.º 5
0
    def read(self,
             t=None,
             z=None,
             y=None,
             x=None,
             N=None,
             K=None,
             J=None,
             I=None,
             var=None,
             nonan=True,
             result='full',
             profile=False,
             dummy=False):
        """Reads dataset.

        PARAMETERS
            t, z, y, x (array like, optional) :
                Sets the time, height, latitude and longitude for which
                the data will be read.
            N, K, J, I (array like, optional) :
                Sets the temporal, vertical, meridional and zonal
                indices for which the data will be read.
            var (string, optional) :
                Indicates which variable of the grid will be read. If
                the parameter is a list of variables, then the data will
                be returned as a list of arrays.
            nonan (boolean, optional) :
                If set to true (default) changes data values containing
                NaN to zero, preserving the mask.
            result (string, optional) :
                Determines wheter all time, height, latitude, longitude
                and data will be returned ('full', default), if
                temporal, vertical, meridional and zonal indices
                are returned instead ('indices'), or if only
                variable data is returned ('var only').
            components (list, optional) :
                A list containing which components will be included in
                the calculation. Options are the seasonal cycle
                ('seasonal'), westward propagating planetary waves
                ('planetary'), eddy fields ('eddy') and noise ('noise').
            profile (boolean, optional) :
                Sets whether the status is send to screen.
            dummy (boolean, optional) :
                If set to true, does not load data and returns the shape
                of the array that would have been returned.

        RETURNS
            t, z, y, x, dat (array like) :
                If 'result' is set to 'full', then all coordinates and
                data variables are returned.
            N, K, J, I, var (array like) :
                If 'result' is set to 'indices', then all indices and
                data variables are returned.
            dat (array like) :
                If 'result' is set to 'var only', then the data is
                returned.

        """
        global DEBUG
        t1 = time()

        # Checks input variables for consistency.
        if (t != None) & (N != None):
            raise ValueError('Both time and temporal index were provided.')
        if (z != None) & (K != None):
            raise ValueError('Both height and vertical index were provided.')
        if (y != None) & (J != None):
            raise ValueError(
                'Both latitude and meridional index were provided.')
        if (x != None) & (I != None):
            raise ValueError('Both latitude and zonal index were provided.')
        if var == None:
            var = self.params['var_list']

        # Checks for variables indices. Intersects desired input values with
        # dataset dimesion data. In this dataset, since only surface data is
        # available, the height values are always zero.
        if t != None:
            N = flatnonzero(in1d(self.variables['time'].data, t))
        elif N == None:
            N = arange(self.dimensions['n'])
        if z != None:
            K = [0]
        elif K == None:
            K = [0]
        elif K != None:
            K = [0]
        if y != None:
            J = flatnonzero(in1d(self.variables['latitude'].data, y))
        elif J == None:
            J = arange(self.dimensions['j'])
        if x != None:
            I = flatnonzero(in1d(self.variables['longitude'].data, y))
        elif I == None:
            I = arange(self.dimensions['i'])

        # Sets the shape of the data array.
        shape = (len(N), 1, len(J), len(I))
        if dummy:
            return shape
        # Selects data according to indices.
        t = self.variables['time'].data[N]
        z = self.variables['height'].data
        y = self.variables['latitude'].data[J]
        x = self.variables['longitude'].data[I]
        xx, yy = meshgrid(x, y)
        II, JJ = meshgrid(I, J)
        # Ressets variables
        Var = dict()
        if ('taux' in var) & ('tauy' in var):
            tauxy = True
        else:
            tauxy = False
        for item in var:
            if (item == 'taux') & tauxy:
                Var['tauxy'] = ma.zeros(shape, dtype=complex)
            elif (item == 'tauy') & tauxy:
                continue
            else:
                Var[item] = ma.zeros(shape)
        # Walks through every time index and loads data range from maps.
        for n, T in enumerate(t):
            t2 = time()
            if profile:
                s = '\rLoading data... %s ' % (profiler(
                    shape[0], n + 1, 0, t1, t2), )
                stdout.write(s)
                stdout.flush()
            # Reads NetCDF file
            data = netcdf(
                '%s/%s' %
                (self.params['path'], self.params['file_list'][N[n]]), 'r')
            for item in var:
                if (('lon_i' in self.params.keys()) &
                    ('lat_j' in self.params.keys())):
                    P = data.variables[item].data[0, 0, self.params['lat_j'],
                                                  self.params['lon_i']][JJ, II]
                else:
                    P = data.variables[item].data[0, 0, JJ, II]
                P[P <= self.variables[item].missing_value] = nan
                P = ma.masked_where(isnan(P), P)
                if nonan:
                    P.data[P.mask] = 0
                #
                if (item == 'taux') & tauxy:
                    Var['tauxy'][n, 0, :, :] += P[:, :]
                elif (item == 'tauy') & tauxy:
                    Var['tauxy'][n, 0, :, :] += 1j * P[:, :]
                else:
                    Var[item][n, 0, :, :] += P[:, :]

            data.close()

        # If result dictionary contains only one item, return only the value
        # of this item.
        if len(Var.keys()) == 1:
            Var = Var[Var.keys()[0]]

        if profile:
            stdout.write('\r\n')
            stdout.flush()

        if DEBUG:
            print 't: ', t
            print 'z: ', z
            print 'y:', y
            print 'x:', x
            print 'var: ', Var
            print 'N: ', N
            print 'K: ', K
            print 'J: ', J
            print 'I:', I
            print 'shape: ', shape

        if result == 'full':
            return t, z, y, x, Var
        elif result == 'indices':
            return N, K, J, I, Var
        elif result == 'var only':
            return Var
        else:
            raise Warning("Result parameter set imporperly to '%s', "
                          "assuming 'var only'." % (result))
            return Var
Exemplo n.º 6
0
    def __init__(self, path=None, xlim=None, ylim=None, tlim=None):
        # Initializes the variables to default values. The indices 'n', 'k',
        # 'j' and 'i' refer to the temporal, height, meridional and zonal
        # coordinates respectively. If one of these indexes is set to 'None',
        # then it is assumed infinite size, which is relevant for the 'time'
        # coordinate.
        self.attributes = dict()
        self.dimensions = dict(n=0, k=0, j=0, i=0)
        self.coordinates = dict(n=None, k=None, j=None, i=None)
        self.variables = dict()
        self.params = dict()
        self.data = dict()
        self.stencil_coeffs = dict()
        self.stencil_params = dict()
        self.alias = dict()

        # Sets global parameters for grid.
        if path == None:
            path = ('/home/sebastian/academia/data/ncep.reanalysis2/'
                    'gaussian_grid')
        self.params['path'] = path
        self.params['var_list'] = []
        self.params['year_list'] = []

        # Generates list of files, tries to match them to the pattern and to
        # extract the time. To help understanding the naming convetion and
        # pattern, see the following example:
        #   uwnd.2015.nc
        file_pattern = '(.*).([0-9]{4}).nc'
        flist = listdir(self.params['path'])
        flist, match = reglist(flist, file_pattern)
        self.params['file_list'] = flist

        # Gets list of variables from file match.
        _vars, _years = zip(*match)
        self.params['var_list'] = unique(_vars)
        self.params['year_list'] = unique(_years)

        # Loads data from first variable and loads longitude and latitude data.
        # We assume that all data is homogeneous throughout the dataset. Then
        # walks through each year and loads time vector.
        _var = self.params['var_list'][0]
        for _i, _year in enumerate(self.params['year_list']):
            fname = '{}.{}.nc'.format(_var, _year)
            try:
                data.close()
            except:
                pass
            data = self._open_file(fname)
            #
            if _i == 0:
                lon = data.variables['lon'].data
                lat = data.variables['lat'].data
                time = data.variables['time'].data
            else:
                time = hstack([time, data.variables['time'].data])

        # Time in dataset is given in `hours since 1800-1-1 00:00:0.0` and we
        # convert it to matplotlib's date format.

        if data.variables['time'].units == 'hours since 1800-1-1 00:00:0.0':
            self.params['t0'] = dates.date2num(
                dates.datetime.datetime(1800, 1, 1, 0, 0))
            time = self.params['t0'] + time / 24.

        # If lon_0 is set, calculate how many indices have to be moved in
        # order for latitude array to start at lon_0.
        lon, lat, xlim, ylim, ii, jj = self.getLongitudeLatitudeLimits(
            lon, lat, xlim, ylim)

        self.params['xlim'], self.params['ylim'] = xlim, ylim
        self.params['lon_i'], self.params['lat_j'] = ii, jj

        # Initializes the grid attributes, dimensions, coordinates and
        # variables.
        self.name = 'ncep_reanalysis'
        self.description = (
            'NCEP Reanalysis project is analysis/forecast '
            'system to perform data assimilation using past data from 1979 '
            'owards.')
        self.attributes['institution'] = data.institution
        self.dimensions = dict(n=time.size, k=0, j=lat.size, i=lon.size)
        self.coordinates = dict(n='time',
                                k='height',
                                j='latitude',
                                i='longitude')
        self.variables = dict(
            time=atlantis.data.Variable(),
            height=atlantis.data.get_standard_variable('height'),
            latitude=atlantis.data.get_standard_variable('latitude'),
            longitude=atlantis.data.get_standard_variable('longitude'),
            xm=atlantis.data.Variable(),
            ym=atlantis.data.Variable(),
        )
        #
        self.variables['time'].data = time
        self.variables['time'].canonical_units = 'days since 0001-01-01 UTC'
        #
        self.variables['height'].data = 0.
        self.variables['latitude'].data = lat
        self.variables['longitude'].data = lon
        #
        self.variables['xm'].canonical_units = 'km'
        self.variables['xm'].description = 'Zonal distance.'
        self.variables['ym'].canonical_units = 'km'
        self.variables['ym'].description = 'Meridional distance.'
        self.variables['xm'].data, self.variables['ym'].data = (metergrid(
            self.variables['longitude'].data,
            self.variables['latitude'].data,
            units='km'))
        #
        data.close()

        # Walks through each variable file for the first year, reads their
        # attributes and adds to the dataset definition.
        self._message('\n')
        _year = self.params['year_list'][0]
        for _var in self.params['var_list']:
            fname = '{}.{}.nc'.format(_var, _year)
            data = self._open_file(fname)
            self._message('{}: '.format(_var))
            for _key in data.variables.keys():
                self._message('{} '.format(_key))
                if _key in [
                        'time', 'time_bnds', 'level', 'level_bnds', 'lat',
                        'lon'
                ]:
                    continue
                try:
                    self.variables[_key] = atlantis.data.get_standard_variable(
                        data.variables[_key].standard_name,
                        units=data.variables[_key].units,
                        long_name=data.variables[_key].long_name,
                    )
                except:
                    self._message('*  ')
                    self.variables[_key] = atlantis.data.Variable(
                        units=data.variables[_key].units,
                        standard_name=data.variables[_key].standard_name,
                        long_name=data.variables[_key].long_name,
                        description=data.variables[_key].var_desc,
                    )
                self.alias[_key] = _var
            #
            self._message('\n')
            data.close()
        #
        return
Exemplo n.º 7
0
    def __init__(self, path=None, mask_file=None, xlim=None, ylim=None):
        # Initializes the variables to default values. The indices 'n', 'k',
        # 'j' and 'i' refer to the temporal, height, meridional and zonal
        # coordinates respectively. If one of these indexes is set to 'None',
        # then it is assumed infinite size, which is relevant for the 'time'
        # coordinate.
        self.attributes = dict()
        self.dimensions = dict(n=0, k=0, j=0, i=0)
        self.coordinates = dict(n=None, k=None, j=None, i=None)
        self.variables = dict()
        self.params = dict()
        self.stencil_coeffs = dict()
        self.stencil_params = dict()

        # Sets global parameters for grid.
        if path == None:
            path = ('/home/sebastian/academia/data/ncdc.noaa/seawinds/stress/'
                'daily')
        self.params['path'] = path
        self.params['mask_file'] = mask_file
        self.params['missing_value'] = -9999.
        
        # Generates list of files, tries to match them to the pattern and to 
        # extract the time.
        file_pattern = 'tauxy([0-9]{8}).nc'
        flist = listdir(self.params['path'])
        flist, match = reglist(flist, file_pattern)
        self.params['file_list'] = flist
        if len(flist) == 0:
            return

        # Convert dates to matplotlib format, i.e. days since 0001-01-01 UTC.
        time_list = array([dates.datestr2num(item) for item in match])
        
        # Reads first file in dataset to determine array geometry and 
        # dimenstions (lon, lat)
        data = netcdf('%s/%s' % (self.params['path'],
            self.params['file_list'][0]), 'r')
        for var in data.variables.keys():
            if var in ['latitude', 'lat']:
                lat = data.variables[var].data
            elif var in ['longitude', 'lon']:
                lon = data.variables[var].data
        
        # If xlim and ylim are set, calculate how many indices have to be moved
        # in order for latitude array to start at xlim[0].
        if (xlim != None) | (ylim != None):
            if xlim == None:
                xlim = (lon.min(), lon.max())
            if ylim == None:
                ylim = (lat.min(), lat.max())
            #
            LON = lon_n(lon, xlim[1])
            i = argsort(LON)
            selx = i[flatnonzero((LON[i] >= xlim[0]) & (LON[i] <= xlim[1]))]
            sely = flatnonzero((lat >= ylim[0]) & (lat <= ylim[1]))
            ii, jj = meshgrid(selx, sely)
            lon = LON[selx]
            lat = lat[sely]
            self.params['xlim'] = xlim
            self.params['ylim'] = ylim
            self.params['lon_i'] = ii
            self.params['lat_j'] = jj
        self.params['dlon'] = lon[1] - lon[0]
        self.params['dlat'] = lat[1] - lat[0]
        
        # Initializes the grid attributes, dimensions, coordinates and
        # variables.
        self.name = 'sea_surface_wind_stress'
        for attr, attr_value in vars(data).iteritems():
            if attr in ['mode', 'filename']:
                continue
            if type(attr_value) == str:
                if attr in ['name']:
                    self.name = attr_value
                elif attr in ['description', 'summary']:
                    self.description = attr_value
                else:
                    self.attributes[attr.lower()] = attr_value
        self.dimensions = dict(n=time_list.size, k=1, j=lat.size, i=lon.size)
        self.coordinates = dict(n='time', k='height', j='latitude',
            i='longitude')
        #
        self.variables = dict(
            time = atlantis.data.variable(
                canonical_units='days since 0001-01-01 UTC',
                data=time_list,
            ),
            height = atlantis.data.get_standard_variable('height', data=[0.]),
            latitude = atlantis.data.get_standard_variable('latitude',
                data=lat),
            longitude = atlantis.data.get_standard_variable('longitude',
                data=lon),
            xm = atlantis.data.variable(
                canonical_units = 'km',
                description = 'Zonal distance.'
            ),
            ym = atlantis.data.variable(
                canonical_units = 'km',
                description = 'Meridional distance.'
            ),
        )
        #
        self.variables['xm'].data, self.variables['ym'].data = (
            metergrid(self.variables['longitude'].data, 
            self.variables['latitude'].data, unit='km')
        )
        #
        self.params['var_list'] = list()
        for var in data.variables.keys():
            if var in ['tau', 'taux', 'tauy', 'tau_div', 'tau_curl']:
                attribs = dict()
                for attr, attr_value in vars(data.variables[var]).iteritems():
                    if attr == '_FillValue':
                        attribs['missing_value'] = attr_value
                    elif attr == 'data':
                        continue
                    elif attr == 'long_name':
                        attribs['description'] = attr_value
                    elif attr == 'units':
                        if attr_value == 'N/m**2':
                            a = 'N m-2'
                        else:
                            a = attr_value
                        attribs['canonical_units'] = a
                    else:
                        attribs[attr] = attr_value
                self.variables[var] = atlantis.data.variable(**attribs)
                self.params['var_list'].append(var)
                if self.variables[var].missing_value == None:
                    self.variables[var].missing_value = (
                        self.params['missing_value'])
        #
        data.close()
        return
Exemplo n.º 8
0
    def read(self, t=None, z=None, y=None, x=None, N=None, K=None, J=None,
        I=None, var=None, nonan=True, result='full', profile=False,
        dummy=False):
        """Reads dataset.

        PARAMETERS
            t, z, y, x (array like, optional) :
                Sets the time, height, latitude and longitude for which
                the data will be read.
            N, K, J, I (array like, optional) :
                Sets the temporal, vertical, meridional and zonal
                indices for which the data will be read.
            var (string, optional) :
                Indicates which variable of the grid will be read. If
                the parameter is a list of variables, then the data will
                be returned as a list of arrays.
            nonan (boolean, optional) :
                If set to true (default) changes data values containing
                NaN to zero, preserving the mask.
            result (string, optional) :
                Determines wheter all time, height, latitude, longitude
                and data will be returned ('full', default), if
                temporal, vertical, meridional and zonal indices
                are returned instead ('indices'), or if only
                variable data is returned ('var only').
            components (list, optional) :
                A list containing which components will be included in
                the calculation. Options are the seasonal cycle
                ('seasonal'), westward propagating planetary waves
                ('planetary'), eddy fields ('eddy') and noise ('noise').
            profile (boolean, optional) :
                Sets whether the status is send to screen.
            dummy (boolean, optional) :
                If set to true, does not load data and returns the shape
                of the array that would have been returned.

        RETURNS
            t, z, y, x, dat (array like) :
                If 'result' is set to 'full', then all coordinates and
                data variables are returned.
            N, K, J, I, var (array like) :
                If 'result' is set to 'indices', then all indices and
                data variables are returned.
            dat (array like) :
                If 'result' is set to 'var only', then the data is
                returned.

        """
        global DEBUG
        t1 = time()
        
        # Checks input variables for consistency.
        if (t != None) & (N != None):
            raise ValueError('Both time and temporal index were provided.')
        if (z != None) & (K != None):
            raise ValueError('Both height and vertical index were provided.')
        if (y != None) & (J != None):
            raise ValueError(
                'Both latitude and meridional index were provided.')
        if (x != None) & (I != None):
            raise ValueError('Both latitude and zonal index were provided.')
        if var == None:
            var = self.params['var_list']

        # Checks for variables indices. Intersects desired input values with
        # dataset dimesion data. In this dataset, since only surface data is
        # available, the height values are always zero.
        if t != None:
            N = flatnonzero(in1d(self.variables['time'].data, t))
        elif N == None:
            N = arange(self.dimensions['n'])
        if z != None:
            K = [0]
        elif K == None:
            K = [0]
        elif K != None:
            K = [0]
        if y != None:
            J = flatnonzero(in1d(self.variables['latitude'].data, y))
        elif J == None:
            J = arange(self.dimensions['j'])
        if x != None:
            I = flatnonzero(in1d(self.variables['longitude'].data, y))
        elif I == None:
            I = arange(self.dimensions['i'])

        # Sets the shape of the data array.
        shape = (len(N), 1, len(J), len(I))
        if dummy:
            return shape
        # Selects data according to indices.
        t = self.variables['time'].data[N]
        z = self.variables['height'].data
        y = self.variables['latitude'].data[J]
        x = self.variables['longitude'].data[I]
        xx, yy = meshgrid(x, y)
        II, JJ = meshgrid(I, J)
        # Ressets variables
        Var = dict()
        if ('taux' in var) & ('tauy' in var):
            tauxy = True
        else:
            tauxy = False
        for item in var:
            if (item == 'taux') & tauxy:
                Var['tauxy'] = ma.zeros(shape, dtype=complex)
            elif (item == 'tauy') & tauxy:
                continue
            else:
                Var[item] = ma.zeros(shape)
        # Walks through every time index and loads data range from maps.
        for n, T in enumerate(t):
            t2 = time()
            if profile:
                s = '\rLoading data... %s ' % (profiler(shape[0], n + 1, 0, 
                    t1, t2),)
                stdout.write(s)
                stdout.flush()
            # Reads NetCDF file
            data = netcdf('%s/%s' % (self.params['path'],
                self.params['file_list'][N[n]]), 'r')
            for item in var:
                if (('lon_i' in self.params.keys()) &
                    ('lat_j' in self.params.keys())):
                    P = data.variables[item].data[0, 0, self.params['lat_j'],
                        self.params['lon_i']][JJ, II]
                else:
                    P = data.variables[item].data[0, 0, JJ, II]
                P[P <= self.variables[item].missing_value] = nan
                P = ma.masked_where(isnan(P), P)
                if nonan:
                    P.data[P.mask] = 0
                #
                if (item == 'taux') & tauxy:
                    Var['tauxy'][n, 0, :, :] += P[:, :]
                elif (item == 'tauy') & tauxy:
                    Var['tauxy'][n, 0, :, :] += 1j * P[:, :]
                else:
                    Var[item][n, 0, :, :] += P[:, :]
            
            data.close()
        
        # If result dictionary contains only one item, return only the value
        # of this item.
        if len(Var.keys()) == 1:
            Var = Var[Var.keys()[0]]
        
        if profile:
            stdout.write('\r\n')
            stdout.flush()
        
        if DEBUG:
            print 't: ', t
            print 'z: ', z
            print 'y:', y
            print 'x:', x
            print 'var: ', Var
            print 'N: ', N
            print 'K: ', K
            print 'J: ', J
            print 'I:', I
            print 'shape: ', shape
        
        if result == 'full':
            return t, z, y, x, Var
        elif result == 'indices':
            return N, K, J, I, Var
        elif result == 'var only':
            return Var
        else:
            raise Warning("Result parameter set imporperly to '%s', "
                "assuming 'var only'." % (result))
            return Var