Esempio n. 1
0
 def __init__(self, url=None, sltype=None, mode=READ_ONLY, **kwargs):
     if mode != READ_ONLY:
         raise Exception("This mapper can only be used in read_only mode.")
     if sltype not in ['i', 'c', 'a', 'b']:
         raise Exception("Unknown SLSTR product type")
     super(SAFESLFile, self).__init__(url=url, mode=mode, **kwargs)
     self.__sltype = sltype
     self.__data_handlers = []
     self.__oblique_fields = []
     # coordinate files
     geodetic = "geodetic_%sn.nc" % sltype
     times = "time_%sn.nc" % sltype
     coordinate_files = [geodetic, times]
     # detect the data files and instanciate mappers for each one
     datafiles = []
     for fname in glob.glob(os.path.join(url, "*_%s[n,o].nc" % sltype)):
         if os.path.basename(fname) not in coordinate_files:
             datafiles.append(os.path.basename(fname))
     for f in datafiles:
         fname = os.path.join(url, f)
         self.__data_handlers.append(NCFile(url=fname, mode=mode, **kwargs))
     # instantiate mappers for each coordinate file
     self.__geod_handler = NCFile(os.path.join(url, geodetic),
                                  mode=mode, **kwargs)
     self.__time_handler = NCFile(os.path.join(url, times),
                                  mode=mode, **kwargs)
     self.__fieldlocator = {}
     self.__geofieldlocator = {}
     self.__fieldtranslate = {}
     # offset between nadir and oblique swath edge
     self.nadir_to_oblique_offset = None
Esempio n. 2
0
 def __init__(self, url=None, sltype=None, mode=READ_ONLY, **kwargs):
     if mode != READ_ONLY:
         raise Exception("This mapper can only be used in read_only mode.")
     if sltype not in ['i', 'c', 'a', 'b']:
         raise Exception("Unknown SLSTR product type")
     super(SAFESLFile, self).__init__(url=url, mode=mode, **kwargs)
     self.__sltype = sltype
     self.__data_handlers = []
     self.__oblique_fields = []
     # coordinate files
     geodetic = "geodetic_%sn.nc" % sltype
     times = "time_%sn.nc" % sltype
     coordinate_files = [geodetic, times]
     # detect the data files and instanciate mappers for each one
     datafiles = []
     for fname in glob.glob(os.path.join(url, "*_%s[n,o].nc" % sltype)):
         if os.path.basename(fname) not in coordinate_files:
             datafiles.append(os.path.basename(fname))
     for f in datafiles:
         fname = os.path.join(url, f)
         self.__data_handlers.append(NCFile(url=fname, mode=mode, **kwargs))
     # instantiate mappers for each coordinate file
     self.__geod_handler = NCFile(os.path.join(url, geodetic),
                                  mode=mode,
                                  **kwargs)
     self.__time_handler = NCFile(os.path.join(url, times),
                                  mode=mode,
                                  **kwargs)
     self.__fieldlocator = {}
     self.__geofieldlocator = {}
     self.__fieldtranslate = {}
     # offset between nadir and oblique swath edge
     self.nadir_to_oblique_offset = None
Esempio n. 3
0
    def __init__(self, url=None, mode=READ_ONLY, **kwargs):
        if mode != READ_ONLY:
            raise Exception("This mapper can only be used in read_only mode.")
        super(SAFEOLFile, self).__init__(url=url, mode=mode, **kwargs)
        self.__data_handlers = []
        self.__fields = {}
        # ancillary files
        cartesian = "geo_coordinates.nc"
        instrument = "instrument_data.nc"
        times = "time_coordinates.nc"

        # handlers for ancillary fields
        self.__time_handler = NCFile(os.path.join(url, times),
                                     mode=mode, **kwargs)
        self.__fields[times] = []
        self.__coord_handler = NCFile(os.path.join(url, cartesian),
                                      mode=mode, **kwargs)
        self.__fields[cartesian] = []
        self.__instr_handler = NCFile(os.path.join(url, instrument),
                                      mode=mode, **kwargs)
        self.__fields[instrument] = []
        # get product type
        safefolder = os.path.basename(os.path.normpath(url))
        print safefolder
        if "_OL_1_ERR" in safefolder or "_OL_1_EFR" in safefolder:
            datafiles = DATAFILES["L1B"]
        elif "OL_2_LRR" in safefolder or "OL_2_LFR" in safefolder:
            datafiles = DATAFILES["L2LAND"]
        elif "OL_2_WRR" in safefolder or "OL_2_WFR" in safefolder:
            datafiles = DATAFILES["L2WATER"]
        else:
            raise Exception("Unknown product type")
        # detect the data files and instanciate mappers for each one
        for f in datafiles:
            if '*' in f:
                fnames = glob.glob(os.path.join(url, f))
                for fname in fnames:
                    self.__data_handlers.append(NCFile(url=fname, mode=mode,
                                                       **kwargs))
                    self.__fields[f] = []
            else:
                fname = os.path.join(url, f)
                self.__data_handlers.append(NCFile(url=fname, mode=mode,
                                                   **kwargs))
                self.__fields[f] = []
        self.__fieldlocator = {}
        self.__geofieldlocator = {}
Esempio n. 4
0
    def read_field(self, fieldname):
        """
        Return the field, without its values.

        Actual values can be retrieved with read_values() method.
        """
        if fieldname in ['wind_speed', 'wind_direction']:
            # create a virtual field
            variable = Variable(
                    shortname=fieldname,
                    description=VIRTUALFIELD_DESCR[fieldname],
                    authority=self.get_naming_authority(),
                    standardname=VIRTUALFIELD_STDNAME[fieldname]
                    )
            field = Field(
                    variable,
                    OrderedDict([('time', 1),
                                 #('z', 1),
                                 ('y', self.get_dimsize('y')),
                                 ('x', self.get_dimsize('x'))
                                 ]),
                    datatype=numpy.dtype(numpy.float32),
                    units=VIRTUALFIELD_UNITS[fieldname]
                    )
            field.attach_storage(self.get_field_handler(fieldname))
        else:
            field = NCFile.read_field(self, fieldname)
            vava=field.variable
            uni=field.units
            if 'z' in field.get_dimnames():
                field = Field(
                    vava,
                    OrderedDict([('time', 1),
                                 #('z', self.get_dimsize('z')),
                                 ('y', self.get_dimsize('y')),
                                 ('x', self.get_dimsize('x'))
                                 ]),
                    datatype=numpy.dtype(numpy.float32),
                    units=uni
                    )
                field.attach_storage(self.get_field_handler(fieldname))
        return field
        
Esempio n. 5
0
    def read_field(self, fieldname):
        """
        Return the field, without its values.

        Actual values can be retrieved with read_values() method.
        """
        if fieldname in ['wind_speed', 'wind_direction']:
            # create a virtual field
            variable = Variable(
                    shortname=fieldname,
                    description=VIRTUALFIELD_DESCR[fieldname],
                    authority=self.get_naming_authority(),
                    standardname=VIRTUALFIELD_STDNAME[fieldname]
                    )
            field = Field(
                    variable,
                    OrderedDict([('time', 1),
                                 #('z', 1),
                                 ('y', self.get_dimsize('y')),
                                 ('x', self.get_dimsize('x'))
                                 ]),
                    datatype=numpy.dtype(numpy.float32),
                    units=VIRTUALFIELD_UNITS[fieldname]
                    )
            field.attach_storage(self.get_field_handler(fieldname))
        else:
            field = NCFile.read_field(self, fieldname)
            vava=field.variable
            uni=field.units
            if 'z' in field.get_dimnames():
                field = Field(
                    vava,
                    OrderedDict([('time', 1),
                                 #('z', self.get_dimsize('z')),
                                 ('y', self.get_dimsize('y')),
                                 ('x', self.get_dimsize('x'))
                                 ]),
                    datatype=numpy.dtype(numpy.float32),
                    units=uni
                    )
                field.attach_storage(self.get_field_handler(fieldname))
        return field
        
Esempio n. 6
0
def eodyn_current(infile,
                  outdir,
                  vmin=0.,
                  vmax=5.08,
                  vmin_pal=0.,
                  vmax_pal=2.,
                  write_netcdf=False):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    ncfile = NCFile(infile)
    if 'id' in ncfile.read_global_attributes():
        l4id = ncfile.read_global_attribute('id')
#        l4id = 'e-Odyn' #ncfile.read_global_attribute('id')
    elif re.match(r'^e-Odyn_.*\.nc', os.path.basename(infile)) is not None:
        l4id = 'e-Odyn'
    else:
        raise Exception('Unknown GlobCurrent L4 file.')
    # /TMP
    ucur = ncfile.read_values(L4_MAPS[l4id]['uname'])[::, ::-1, 0]
    ucur = np.transpose(ucur)
    vcur = ncfile.read_values(L4_MAPS[l4id]['vname'])[::, ::-1, 0]
    vcur = np.transpose(vcur)
    masku = [ucur == -9999]
    maskv = [vcur == -9999]
    if l4id not in ['CourantGeostr']:
        lon = ncfile.read_values('lon')[0:2].astype('float64')
        lat = ncfile.read_values('lat')[-1:-3:-1].astype('float64')
        for i in range(2):  # avoid rounding errors
            lon[i] = np.round(lon[i] * 10000) / 10000
            lat[i] = np.round(lat[i] * 10000) / 10000
    else:
        lon = ncfile.read_values('lon')[:]
        shift = -np.where(lon < 0)[0][0]
        ucur = np.roll(ucur, shift, axis=1)
        vcur = np.roll(vcur, shift, axis=1)
        lon = lon[shift:shift + 2]
        lat = ncfile.read_values('lat')[-1:-3:-1]
    lon0, dlon, lat0, dlat = lon[0], lon[1] - lon[0], lat[0], lat[1] - lat[0]
    #dtime_units = ncfile.read_field('time').units
    #dtime = num2date(ncfile.read_values('time')[0], dtime_units)
    timefmt = '%Y-%m-%dT%H:%M:%S.%fZ'
    start_time = datetime.strptime(
        ncfile.read_global_attribute('time_coverage_start'), timefmt)
    stop_time = datetime.strptime(
        ncfile.read_global_attribute('time_coverage_end'), timefmt)

    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    # rundtime = ncfile.read_global_attribute('date_modified')
    # rundtime = datetime.strptime(rundtime, '%Y%m%dT%H%M%SZ')
    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    metadata['product_name'] = L4_MAPS[l4id]['productname']
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    #metadata['time_range'] = L4_MAPS[l4id]['timerange']
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'e-Odyn'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = ['current velocity', 'current direction']
    # metadata['type'] = 'model'
    # metadata['model_longitude_resolution'] = abs(dlon)
    # metadata['model_latitude_resolution'] = abs(dlat)
    # metadata['model_analysis_datetime'] = stfmt.format_time(rundtime)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
    ]
    band = []
    mask = ucur.mask | vcur.mask
    print(mask)
    curvel = np.sqrt(ucur.data**2 + vcur.data**2)
    curdir = np.mod(
        np.arctan2(vcur.data, ucur.data) * 180. / np.pi + 360., 360.)
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(curvel, vmin, vmax, out=curvel)
    array = np.round((curvel - offset) / scale).astype('uint8')
    array[mask] = 255
    array[masku] = 255
    array[maskv] = 255
    print(array)
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'current velocity',
        'unittype': 'm/s',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    array = np.round(curdir / 360. * 254.).astype('uint8')
    array[mask] = 255
    array[masku] = 255
    array[maskv] = 255
    band.append({
        'array': array,
        'scale': 360. / 254.,
        'offset': 0.,
        'description': 'current direction',
        'unittype': 'deg',
        'nodatavalue': 255,
        'parameter_range': [0, 360.]
    })
    # Write geotiff
    if write_netcdf == False:
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
    elif write_netcdf == True:
        print 'Write netcdf'
        # u/v -> bands
        band = []
        mask = ucur.mask | vcur.mask
        vmin = -vmax
        offset, scale = vmin, (vmax - vmin) / 254.
        u = np.clip(ucur.data, vmin, vmax)
        array = np.round((u - offset) / scale).astype('uint8')
        array[mask] = 255
        band.append({
            'array': array,
            'scale': scale,
            'offset': offset,
            'description': 'current u',
            'unittype': 'm/s',
            'nodatavalue': 255,
            'parameter_range': [vmin, vmax]
        })
        v = np.clip(vcur.data, vmin, vmax)
        array = np.round((v - offset) / scale).astype('uint8')
        array[mask] = 255
        band.append({
            'array': array,
            'scale': scale,
            'offset': offset,
            'description': 'current v',
            'unittype': 'm/s',
            'nodatavalue': 255,
            'parameter_range': [vmin, vmax]
        })
        # Write
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        stfmt.write_netcdf(ncfile,
                           metadata,
                           geolocation,
                           band,
                           dgcpy=1.,
                           dgcpx=1.)
Esempio n. 7
0
def current(infile,
            outdir,
            vmin=0.,
            vmax=1.50,
            vmin_pal=0.,
            vmax_pal=1.5,
            write_netcdf=False):
    """
    """
    # Read/Process data
    print('Read/Process data')
    ncfile = NCFile(infile)
    if 'id' in ncfile.read_global_attributes():
        l4id = ncfile.read_global_attribute('id')
    elif (re.match(r'^dt_global_allsat_madt_uv.*\.nc',
                   os.path.basename(infile)) is not None):
        l4id = 'Surface_height'
        # vmin = 0.; vmax = 2.; vmin_pal = 0.; vmax_pal = 2.
    elif (re.match(r'^dt_global_allsat_msla_uv.*\.nc',
                   os.path.basename(infile)) is not None):
        l4id = 'Sea_Level_Anomaly'
        # vmin = 0; vmax = 1; vmin_pal = 0.; vmax_pal = 1
    elif re.match(r'^mdt.*\.nc', os.path.basename(infile)) is not None:
        l4id = 'Mean_Dynamic_Topo'
        # vmin = 0; vmax = 1.5; vmin_pal = 0; vmax_pal = 1.5
    elif re.match(r'^Tide_.*\.nc', os.path.basename(infile)) is not None:
        l4id = 'Tide'
        # vmin = 0; vmax = 1.5; vmin_pal = 0; vmax_pal = 1.5
    else:
        raise Exception('Unknown file.')
    # /TMP
    ucur = ncfile.read_values(L4_MAPS[l4id]['uname'])[0, ::-1, :]
    vcur = ncfile.read_values(L4_MAPS[l4id]['vname'])[0, ::-1, :]
    lon = ncfile.read_values('lon')[:].astype('float64')
    lat = ncfile.read_values('lat')[-1:-3:-1].astype('float64')
    lon[lon > 180.] = lon[lon > 180.] - 360.
    indsorted = np.argsort(lon)
    lon = lon[indsorted]
    ucur = ucur[:, indsorted]
    vcur = vcur[:, indsorted]
    lon0, dlon, lat0, dlat = lon[0], lon[1] - lon[0], lat[0], lat[1] - lat[0]
    if l4id in [
            'Mean_Dynamic_Topo',
    ]:
        dtime = datetime(2014, 12, 1)
    else:
        dtime_units = ncfile.read_field('time').units
        dtime = num2date(ncfile.read_values('time')[0], dtime_units)
    # rundtime = ncfile.read_global_attribute('date_modified')
    # rundtime = datetime.strptime(rundtime, '%Y%m%dT%H%M%SZ')
    # Construct metadata/geolocation/band(s)
    print('Construct metadata/geolocation/band(s)')
    metadata = {}
    metadata['product_name'] = L4_MAPS[l4id]['productname']
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = stfmt.format_time(dtime)
    metadata['time_range'] = L4_MAPS[l4id]['timerange']
    metadata['source_URI'] = infile
    metadata['source_provider'] = '*****@*****.**'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = [L4_MAPS[l4id]['parameter']]
    # metadata['type'] = 'model'
    # metadata['model_longitude_resolution'] = abs(dlon)
    # metadata['model_latitude_resolution'] = abs(dlat)
    # metadata['model_analysis_datetime'] = stfmt.format_time(rundtime)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    if l4id in [
            'Tide',
    ]:
        ucur = ucur[:-1, ::]
        vcur = vcur[:-1, ::]
        geolocation['geotransform'] = [lon0, dlon, 0, lat0, 0, dlat]
    else:
        geolocation['geotransform'] = [
            lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
        ]
    band = []
    offset, scale = vmin, (vmax - vmin) / 254.
    mask = ucur.mask | vcur.mask
    curvel = np.sqrt(ucur.data**2 + vcur.data**2)
    curdir = np.mod(
        np.arctan2(vcur.data, ucur.data) * 180. / np.pi + 360., 360.)
    np.clip(curvel, vmin, vmax, out=curvel)
    array = np.round((curvel - offset) / scale).astype('uint8')
    array[mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': L4_MAPS[l4id]['productname'],
        'unittype': 'm/s',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    array = np.round(curdir / 360. * 254.).astype('uint8')
    array[mask] = 255
    band.append({
        'array': array,
        'scale': 360. / 254.,
        'offset': 0.,
        'description': 'current direction',
        'unittype': 'deg',
        'nodatavalue': 255,
        'parameter_range': [0, 360.]
    })

    # Write geotiff
    print('Write geotiff')
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Esempio n. 8
0
import os
import sys
import numpy
import netCDF4
from cerbere.mapper.ncfile import NCFile
from matplotlib import pyplot as plt
if __name__ == '__main__':
    filou = "/home/cerdata/provider/neodc/l4/esacci_sst/2010/03/02/20100302120000-ESACCI-L4_GHRSST-SSTdepth-OSTIA-GLOB_LT-v02.0-fv01.0.nc"
    print filou
    nc = NCFile(filou)
    filout = '/tmp/ostia_sst.png'
    filout2 = '/tmp/ostia_sst2.png'
    sst = nc.read_values('analysed_sst')
    sst = numpy.squeeze(sst)
    print sst.shape
    nc.close()

    nc = netCDF4.Dataset(filou)
    sst2 = nc.variables['analysed_sst'][:]
    sst2 = numpy.squeeze(sst2)
    print sst2.shape
    nc.close()

    #plot
    plt.figure()
    plt.pcolor(sst[200:800, 100:600])
    plt.colorbar()
    plt.savefig(filout)
    #plot
    plt.figure()
    plt.pcolor(sst2[200:800, 100:600])
Esempio n. 9
0
def fsle_gridded(infile,
                 outdir,
                 vmin=-1.,
                 vmax=0.,
                 vmin_pal=-1.,
                 vmax_pal=0.,
                 write_netcdf=False):
    """
    """
    # Read/Process data
    print('Read/Process data')
    ncfile = NCFile(infile)
    if 'id' in ncfile.read_global_attributes():
        l4id = ncfile.read_global_attribute('id')
    elif (re.match(r'^dt_global_allsat_madt_fsle.*\.nc',
                   os.path.basename(infile)) is not None):
        l4id = 'FSLE'
    else:
        raise Exception('Unknown file.')
    h = ncfile.read_values(L4_MAPS[l4id]['hname'])[0, ::-1, :]
    lon = ncfile.read_values('lon')[:].astype('float64')
    lat = ncfile.read_values('lat')[-1:-3:-1].astype('float64')
    lon[lon > 180.] = lon[lon > 180.] - 360.
    indsorted = np.argsort(lon)
    lon = lon[indsorted]
    h = h[:, indsorted]

    for i in range(2):  # avoid rounding errors
        lon[i] = np.round(lon[i] * 10000) / 10000
        lat[i] = np.round(lat[i] * 10000) / 10000
    lon0, dlon, lat0, dlat = lon[0], lon[1] - lon[0], lat[0], lat[1] - lat[0]
    dlon = 0.04
    dlat = -0.04
    dtime_units = ncfile.read_field('time').units
    dtime = num2date(ncfile.read_values('time')[0], dtime_units)
    # Construct metadata/geolocation/band(s)
    print('Construct metadata/geolocation/band(s)')
    metadata = {}
    metadata['product_name'] = L4_MAPS[l4id]['productname']
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = stfmt.format_time(dtime)
    metadata['time_range'] = L4_MAPS[l4id]['timerange']
    metadata['source_URI'] = infile
    metadata['source_provider'] = '*****@*****.**'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = [L4_MAPS[l4id]['parameter']]
    # metadata['type'] = 'model'
    # metadata['model_longitude_resolution'] = abs(dlon)
    # metadata['model_latitude_resolution'] = abs(dlat)
    # metadata['model_analysis_datetime'] = stfmt.format_time(rundtime)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
    ]
    band = []
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(h, vmin, vmax, out=h)
    array = np.round((h - offset) / scale).astype('uint8')
    array[h.mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet_r',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'FSLE',
        'unittype': 'day',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    # Write geotiff
    print('Write geotiff')
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Esempio n. 10
0
def sea_level_gridded(infile,
                      outdir,
                      vmin=-1.,
                      vmax=1.0,
                      vmin_pal=-1.,
                      vmax_pal=1.,
                      write_netcdf=False):
    """
    """
    # Read/Process data
    print('Read/Process data')
    ncfile = NCFile(infile)
    if 'id' in ncfile.read_global_attributes():
        l4id = ncfile.read_global_attribute('id')
    elif (re.match(r'^MSL_Map_MERGED_Global_IB_RWT_NoGIA.*\.nc',
                   os.path.basename(infile)) is not None):
        l4id = 'Mean_Sea_Level'
        # vmin = -10; vmax = 10; vmin_pal = -10.; vmax_pal = 10
    elif (re.match(r'^dt_global_allsat_madt_h.*\.nc', os.path.basename(infile))
          is not None):
        l4id = 'Surface_height'
        # vmin = -2; vmax = 2; vmin_pal = -2; vmax_pal = 2
    elif (re.match(r'^dt_global_allsat_msla_h.*\.nc', os.path.basename(infile))
          is not None):
        l4id = 'Sea_Level_Anomaly'
        # vmin = -0.2; vmax = 0.2; vmin_pal = -0.2; vmax_pal = 0.2
    elif re.match(r'^mdt.*\.nc', os.path.basename(infile)) is not None:
        l4id = 'Mean_Dynamic_Topo'
        # vmin = -1.5; vmax = 1.5; vmin_pal = -1.5; vmax_pal = 1.5
    elif re.match(r'^mss.*\.nc', os.path.basename(infile)) is not None:
        l4id = 'Mean_Sea_Surface'
        # vmin = -80.; vmax = 80.; vmin_pal = -80.; vmax_pal = 80.
    elif (re.match(r'^nrt_merged_mswh.*\.nc', os.path.basename(infile))
          is not None):
        l4id = 'Sea_Wave_Height'
        # vmin = 0.; vmax = 6.0; vmin_pal = 0.; vmax_pal = 6.0
    elif (re.match(r'^nrt_merged_mwind.*\.nc', os.path.basename(infile))
          is not None):
        l4id = 'Wind'
        # vmin = 0.; vmax = 20.0; vmin_pal = 0.; vmax_pal = 20.0
    elif re.match(r'^Tide.*\.nc', os.path.basename(infile)) is not None:
        l4id = 'Tide'
        # vmin = -1.5; vmax = 1.5; vmin_pal = -1.5; vmax_pal = 1.50
    else:
        raise Exception('Unknown file.')
    # /TMP
    if l4id in [
            'Mean_Sea_Level',
    ]:
        h = ncfile.read_values(L4_MAPS[l4id]['hname'])[::-1, :]
        lon = ncfile.read_values('longitude')[:].astype('float64')
        lat = ncfile.read_values('latitude')[-1:-3:-1].astype('float64')
    elif l4id in ['Mean_Sea_Surface', 'Sea_Wave_Height', 'Wind']:
        h = ncfile.read_values(L4_MAPS[l4id]['hname'])[:, ::-1]
        h = np.transpose(h)
        lon = ncfile.read_values('NbLongitudes')[:].astype('float64')
        lat = ncfile.read_values('NbLatitudes')[-1:-3:-1].astype('float64')
    else:
        h = ncfile.read_values(L4_MAPS[l4id]['hname'])[0, ::-1, :]
        lon = ncfile.read_values('lon')[:].astype('float64')
        lat = ncfile.read_values('lat')[-1:-3:-1].astype('float64')
    lon[lon > 180.] = lon[lon > 180.] - 360.
    indsorted = np.argsort(lon)
    lon = lon[indsorted]
    h = h[:, indsorted]
    lon0, dlon, lat0, dlat = lon[0], lon[1] - lon[0], lat[0], lat[1] - lat[0]
    if l4id in ['Mean_Sea_Level', 'Mean_Dynamic_Topo', 'Mean_Sea_Surface']:
        dtime = datetime(2014, 12, 1)
    elif l4id in ['Sea_Wave_Height', 'Wind']:
        dtime = datetime(int(infile[-20:-16]), int(infile[-16:-14]),
                         int(infile[-14:-12]))
    else:
        dtime_units = ncfile.read_field('time').units
        dtime = num2date(ncfile.read_values('time')[0], dtime_units)
    # rundtime = ncfile.read_global_attribute('date_modified')
    # rundtime = datetime.strptime(rundtime, '%Y%m%dT%H%M%SZ')
    # Construct metadata/geolocation/band(s)
    print('Construct metadata/geolocation/band(s)')
    metadata = {}
    metadata['product_name'] = L4_MAPS[l4id]['productname']
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = stfmt.format_time(dtime)
    metadata['time_range'] = L4_MAPS[l4id]['timerange']
    metadata['source_URI'] = infile
    metadata['source_provider'] = '*****@*****.**'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = [L4_MAPS[l4id]['parameter']]
    # metadata['type'] = 'model'
    # metadata['model_longitude_resolution'] = abs(dlon)
    # metadata['model_latitude_resolution'] = abs(dlat)
    # metadata['model_analysis_datetime'] = stfmt.format_time(rundtime)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    if l4id in ['Mean_Sea_Surface', 'Sea_Wave_Height', 'Wind']:
        geolocation['geotransform'] = [
            lon0 - dlon, dlon, 0, lat0 - dlat, 0, dlat
        ]
    elif l4id in [
            'Tide',
    ]:
        h = h[:-1, ::]
        geolocation['geotransform'] = [lon0, dlon, 0, lat0, 0, dlat]
    else:
        geolocation['geotransform'] = [
            lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
        ]
    band = []
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(h, vmin, vmax, out=h)
    array = np.round((h - offset) / scale).astype('uint8')
    array[h.mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': L4_MAPS[l4id]['hname'],
        'unittype': 'm',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    # Write geotiff
    print('Write geotiff')
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def sar_doppler(infile, outdir):
    """
    """
    # tmp
    #infile = '/local/home/fab/data/sar/ASA/agulhas/ASA_WSM_1PNPDE20110518_210602_000002143102_00330_48189_1274/SAR_doppler.nc'
    # infile = '/local/home/fab/data/sar/ASA/agulhas/ASA_WSM_1PNPDE20110824_211403_000002143106_00014_49597_2093/SAR_doppler.nc'
    # outdir = '/local/home/data/syntool_inputs'
    # /tmp
    # Read/Process data
    print 'Read/Process data'
    sardop = NCFile(infile)
    product_ref = sardop.read_global_attribute('SOURCE_PRODUCT_REF')
    start_time = sardop.read_global_attribute('SOURCE_START_DATE')
    start_time = datetime.strptime(start_time, '%Y%m%d%H%M%S.%f')
    duration = sardop.read_global_attribute('SOURCE_ACQ_DURATION')
    stop_time = start_time + timedelta(seconds=duration)
    polarisation = sardop.read_global_attribute('SOURCE_POLARIZATION')
    lon = sardop.read_values('longitude')[::-1, :]
    lat = sardop.read_values('latitude')[::-1, :]
    #dopano = sardop.read_values('dopanomaly')[::-1, :]
    radvel = sardop.read_values('radial_vel')[::-1, :]
    validity = sardop.read_values('validity')[::-1, :]
    track_angle = sardop.read_global_attribute('SOURCE_TRACK_ANGLE')
    if track_angle < 0:
        radvel *= -1
    shp = lon.shape
    nlines = np.ceil(shp[0] / 4.) + 1
    lines = np.round(np.linspace(0, shp[0] - 1, num=nlines)).astype('int32')
    npixels = np.ceil(shp[1] / 4) + 1
    pixels = np.round(np.linspace(0, shp[1] - 1, num=npixels)).astype('int32')
    gcplin = np.tile(lines.reshape(nlines, 1), (1, npixels))
    gcppix = np.tile(pixels.reshape(1, npixels), (nlines, 1))
    gcplon = lon[gcplin, gcppix]
    gcplat = lat[gcplin, gcppix]
    gcphei = np.zeros((nlines, npixels))
    gcppix = gcppix + 0.5
    gcplin = gcplin + 0.5
    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    metadata['product_name'] = 'SAR_doppler'
    metadata['name'] = product_ref
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'ESA'
    metadata['processing_center'] = 'CLS'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'radial horizontal velocities'
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'SAR'
    metadata['sensor_name'] = 'ASAR'
    metadata['sensor_platform'] = 'ENVISAT'
    metadata['sensor_mode'] = 'WSM'
    #metadata['sensor_swath'] = sensor_swath
    metadata['sensor_polarisation'] = polarisation
    #metadata['sensor_pass'] = sensor_pass
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection(geogcs='WGS84')
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcphei, gcppix,
                                                gcplin)
    # band = []
    # scale = (vmax-vmin)/254.
    # offset = vmin
    # indzero = np.where(validity == 0)
    # array = np.clip(np.round((radvel-offset)/scale), 0, 254).astype('uint8')
    # array[indzero] = 255
    # band.append({'array':array, 'scale':scale, 'offset':offset,
    #              'description':'radial horizontal velocities', 'unittype':'m/s',
    #              'nodatavalue':255, 'parameter_range':[vmin, vmax]})
    band = []
    cmap = doppler_colormap()
    norm = Normalize(vmin=-2.5, vmax=2.5)
    rgb = cmap(norm(radvel))
    indnodata = np.where(validity == 0)
    for ich in range(3):
        channel = np.round(rgb[:, :, ich] * 255).astype('uint8')
        channel[indnodata] = 0
        band.append({'array': channel, 'nodatavalue': 0})
    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Esempio n. 12
0
elif model == 'Image':
    rows, cells = geodims
    r0, r1 = rows / 2 - width, rows / 2 + width
    c0, c1 = cells / 2 - width, cells / 2 + width
    print "Subset "
    print "row : ", r0, r1
    print "cell: ", c0, c1
    subset = modelobj.extract_subset(slices={'row': slice(r0, r1, 1),
                                             'cell': slice(c0, c1, 1)})
elif model == 'Grid':
    nj, ni = geodims
    j0, j1 = nj / 2 - width, nj / 2 + width
    i0, i1 = ni / 2 - width, ni / 2 + width
    subset = modelobj.extract_subset(slices={'y': slice(j0, j1, 1),
                                             'x': slice(i0, i1, 1)})

# save subset
logging.info("Save subset")
subsetfname = 'hrdds21.nc'
if os.path.exists(subsetfname):
    os.remove(subsetfname)
oncf = NCFile(url=subsetfname, mode=WRITE_NEW, ncformat='NETCDF4')
subset.save(oncf)
oncf.close()

# read subset
logging.info("Read subset")
f = NCFile(url=subsetfname)
modelobj = modelreader()
modelobj.load(f)
Esempio n. 13
0
class SAFEOLFile(AbstractMapper):
    """Abstract class for SAFE OLCI files.

    This mapper concatenates together the files within a SAFE folder that
    share the same dimensions.

    url: the path to the product SAFE folder

    L1B
    geo_coordinates.nc, instrument_data.nc, *_radiance.nc, time_coordinates.nc

    L2 LAND
    geo_coordinates.nc, instrument_data.nc, iwv.nc, lqsf.nc, ogvi.nc, otci.nc,
    rc_ogvi.nc, time_coordinates.nc

    L2 WATER
    geo_coordinates.nc, instrument_data.nc, chl_nn, chl_oc4me.nc, iop_nn.nc,
    *_reflectance.nc, par.nc, trsp.nc, tsm_nn, w_aer.nc, wqsf.nc,
    time_coordinates.nc
    """
    def __init__(self, url=None, mode=READ_ONLY, **kwargs):
        if mode != READ_ONLY:
            raise Exception("This mapper can only be used in read_only mode.")
        super(SAFEOLFile, self).__init__(url=url, mode=mode, **kwargs)
        self.__data_handlers = []
        self.__fields = {}
        # ancillary files
        cartesian = "geo_coordinates.nc"
        instrument = "instrument_data.nc"
        times = "time_coordinates.nc"

        # handlers for ancillary fields
        self.__time_handler = NCFile(os.path.join(url, times),
                                     mode=mode, **kwargs)
        self.__fields[times] = []
        self.__coord_handler = NCFile(os.path.join(url, cartesian),
                                      mode=mode, **kwargs)
        self.__fields[cartesian] = []
        self.__instr_handler = NCFile(os.path.join(url, instrument),
                                      mode=mode, **kwargs)
        self.__fields[instrument] = []
        # get product type
        safefolder = os.path.basename(os.path.normpath(url))
        print safefolder
        if "_OL_1_ERR" in safefolder or "_OL_1_EFR" in safefolder:
            datafiles = DATAFILES["L1B"]
        elif "OL_2_LRR" in safefolder or "OL_2_LFR" in safefolder:
            datafiles = DATAFILES["L2LAND"]
        elif "OL_2_WRR" in safefolder or "OL_2_WFR" in safefolder:
            datafiles = DATAFILES["L2WATER"]
        else:
            raise Exception("Unknown product type")
        # detect the data files and instanciate mappers for each one
        for f in datafiles:
            if '*' in f:
                fnames = glob.glob(os.path.join(url, f))
                for fname in fnames:
                    self.__data_handlers.append(NCFile(url=fname, mode=mode,
                                                       **kwargs))
                    self.__fields[f] = []
            else:
                fname = os.path.join(url, f)
                self.__data_handlers.append(NCFile(url=fname, mode=mode,
                                                   **kwargs))
                self.__fields[f] = []
        self.__fieldlocator = {}
        self.__geofieldlocator = {}

    def open(self,
             view=None,
             datamodel=None,
             datamodel_geolocation_dims=None):
        """
        Args:
            view (dict, optional): a dictionary where keys are dimension names
                and values are slices. A view can be set on a file, meaning
                that only the subset defined by this view will be accessible.
                This view is expressed as any subset (see :func:`get_values`).
                For example::

                view = {'row':slice(200,250), 'cell':slice(200,300)}

            datamodel (str): type of feature read or written. Internal argument
                only used by the classes from :mod:`~cerbere.datamodel`
                package. Can be 'Grid', 'Swath', etc...

            datamodel_geolocation_dims (list, optional): list of the name of the
                geolocation dimensions defining the data model to be read in
                the file. Optional argument, only used by the datamodel
                classes, in case the mapper class can store different types of
                data models.

        Returns:
            an handler on the opened file
        """
        # open each related file in the SAFE repo
        if view is None:
            rowview = None
        else:
            rowview = {'row': view['row']}
        for hdlr in self.__data_handlers:
            hdlr.open(view, datamodel,
                      datamodel_geolocation_dims)
        self.__coord_handler.open(view, datamodel,
                                  datamodel_geolocation_dims)
        self.__time_handler.open(rowview, datamodel,
                                 datamodel_geolocation_dims)
        self.__instr_handler.open(view, datamodel,
                                  datamodel_geolocation_dims)
        # build the two-way dictionaries of fields
        # ...for data
        for hdlr in self.__data_handlers:
            self.__fields[os.path.basename(hdlr.get_url())]\
                = hdlr.get_fieldnames()
            for fieldname in hdlr.get_fieldnames():
                self.__fieldlocator[fieldname] = hdlr

    def close(self):
        """Close handler on storage"""
        for hdlr in self.__data_handlers:
            hdlr.close()
        self.__data_handlers = None
        self.__coord_handler.close()
        self.__time_handler.close()
        self.__instr_handler.close()
        self.__coord_handler = None
        self.__time_handler = None
        self.__instr_handler = None

    def get_dimsize(self, dimname):
        """Return the size of a dimension.

        Args:
            dimname (str): name of the dimension.

        Returns:
            int: size of the dimension.
        """
        dim = self.get_matching_dimname(dimname)
        return self.__coord_handler.get_dimsize(dim)

    def get_dimensions(self, fieldname=None):
        """Return the dimension names of a file or a field in the
        file. For temporal and spatial dimensions, the cerbere standard names
        are returned.

        Args:
            fieldname (str): the name of the field from which to get the
                dimensions. For a geolocation field, use the cerbere standard
                name (time, lat, lon), though native field name will work too.

        Returns:
            tuple<str>: the standard dimensions of the field or file.
        """
        if fieldname is None:
            return self.__coord_handler.get_dimensions()
        if fieldname in ['time', 'lat', 'lon']:
            # Should all have the same dimension as lat
            native_fieldname = self.get_geolocation_field('lat')
            dims = self.__coord_handler.get_dimensions(native_fieldname)
        else:
            handler = self.__fieldlocator[fieldname]
            dims = handler.get_dimensions(fieldname)
        # convert geolocation dims to standard names
        newdims = []
        for dim in list(dims):
            newdims.append(self.get_standard_dimname(dim))
        return tuple(newdims)

    def get_matching_dimname(self, dimname):
        """Return the equivalent name in the native format for a standard
        dimension.

        This is a translation of the standard names to native ones. It is used
        for internal purpose only and should not be called directly.

        The standard dimension names are:

        * x, y, time for :class:`~cerbere.datamodel.grid.Grid`
        * row, cell, time for :class:`~cerbere.datamodel.swath.Swath` or
          :class:`~cerbere.datamodel.image.Image`

        To be derived when creating an inherited data mapper class. This is
        mandatory for geolocation dimensions which must be standard.

        Args:
            dimname (str): standard dimension name.

        Returns:
            str: return the native name for the dimension. Return `dimname` if
                the input dimension has no standard name.

        See Also:
            see :func:`get_standard_dimname` for the reverse operation
        """
        matching = {'time': 'time', 'row': 'rows', 'cell': 'columns'}
        if dimname in matching:
            return matching[dimname]
        return dimname

    def get_standard_dimname(self, dimname):
        """
        Returns the equivalent standard dimension name for a
        dimension in the native format.

        This is a translation of the native names to standard ones. It is used
        for internal purpose and should not be called directly.

        To be derived when creating an inherited data mapper class. This is
        mandatory for geolocation dimensions which must be standard.

        Args:
            dimname (string): native dimension name

        Return:
            str: the (translated) standard name for the dimension. Return
            `dimname` if the input dimension has no standard name.

        See Also:
            see :func:`get_matching_dimname` for the reverse operation
        """
        matching = {'time': 'time', 'rows': 'row', 'columns': 'cell'}
        if dimname in matching:
            return matching[dimname]
        return dimname

    def get_fieldnames(self):
        """Returns the list of geophysical fields stored for the feature.

        The geolocation field names are excluded from this list.

        Returns:
            list<string>: list of field names
        """
        return self.__fieldlocator.keys()

    def __get_native_fieldname(self, fieldname):
        """Returns the native name of a field.

        Args:
            fieldname (str): name of the field.

        Returns:
            str: the native name of the field. The same as input
                if the field is not a geolocation field.
        """
        if fieldname in ['lat', 'lon', 'time', 'z']:
            return self.get_geolocation_field(fieldname)
        return fieldname

    def get_geolocation_field(self, fieldname):
        """Return the equivalent field name in the file format for a standard
        geolocation field (lat, lon, time, z).

        Used for internal purpose and should not be called directly.

        Args:
            fieldname (str): name of the standard geolocation field (lat, lon
                or time)

        Return:
            str: name of the corresponding field in the native file format.
                Returns None if no matching is found
        """
        MATCHES = {'lat': 'latitude', 'lon': 'longitude', 'time': 'time'}
        if fieldname in MATCHES:
            return MATCHES[fieldname]
        return None

    def read_field(self, fieldname):
        """
        Return the :class:`cerbere.field.Field` object corresponding to
        the requested fieldname.

        The :class:`cerbere.field.Field` class contains all the metadata
        describing a field (equivalent to a variable in netCDF).

        Args:
            fieldname (str): name of the field

        Returns:
            :class:`cerbere.field.Field`: the corresponding field object
        """
        if fieldname == 'time':
            rows = self.get_dimsize('row')
            cols = self.get_dimsize('cell')
            variable = Variable(
                shortname='time',
                description='time of measurement',
                authority=None,
                standardname=None
                )
            field = Field(
                variable,
                OrderedDict([('row', rows), ('cell', cols)]),
                datatype=dtype(int64)
                )
            field.attach_storage(self.get_field_handler(fieldname))
            field.units = self.__time_handler.get_handler().\
                variables['time_stamp'].units
            return field
        elif fieldname in ['lat', 'lon']:
            native_name = self.get_geolocation_field(fieldname)
            geofield = self.__coord_handler.read_field(
                native_name
                )
            geofield.name = fieldname
            geofield.attach_storage(self.get_field_handler(fieldname))
            return geofield
        else:
            native_name = self.__get_native_fieldname(fieldname)
            return self.__fieldlocator[native_name].read_field(native_name)

    def read_values(self, fieldname, slices=None):
        """Read the data of a field.

        Args:
            fieldname (str): name of the field which to read the data from

            slices (list of slice, optional): list of slices for the field if
                subsetting is requested. A slice must then be provided for each
                field dimension. The slices are relative to the opened view
                (see :func:open) if a view was set when opening the file.

        Return:
            MaskedArray: array of data read. Array type is the same as the
                storage type.
        """
        native_name = self.__get_native_fieldname(fieldname)
        if fieldname == 'time':
            if slices is not None:
                tslices = [slices[0]]
            else:
                tslices = slices
            time = self.__time_handler.read_values('time_stamp',
                                                   slices=tslices)
            # reshape as a 2D field
            rows = self.get_dimsize('row')
            cols = self.get_dimsize('cell')
            if slices is None:
                shape = (cols, rows)
            else:
                newslices = self._fill_slices(slices, (rows, cols))
                shape = (newslices[1].stop - newslices[1].start,
                         newslices[0].stop - newslices[0].start)
            time = ma.resize(time, shape).transpose()
            return time
        elif fieldname in ['lat', 'lon']:
            return self.__coord_handler.read_values(native_name,
                                                    slices)
        else:
            return self.__fieldlocator[native_name].read_values(native_name,
                                                                slices)

    def read_fillvalue(self, fieldname):
        """Read the fill value of a field.

        Args:
            fieldname (str): name of the field.

        Returns:
            number or char or str: fill value of the field. The type is the
                as the type of the data in the field.
        """
        return self.__fieldlocator[fieldname].read_fillvalue(fieldname)

    def read_global_attributes(self):
        """Returns the names of the global attributes.

        Returns:
            list<str>: the list of the attribute names.
        """
        # all files seem to have the same list og global attributes.
        return self.__coord_handler.read_global_attributes()

    def read_global_attribute(self, name):
        """Returns the value of a global attribute.

        Args:
            name (str): name of the global attribute.

        Returns:
            str, number or datetime: value of the corresponding attribute.
        """
        # all files seem to have the same list or global attributes.
        return self.__coord_handler.read_global_attribute(name)

    def read_field_attributes(self, fieldname):
        """Return the specific attributes of a field.

        Args:
            fieldname (str): name of the field.

        Returns:
            dict<string, string or number or datetime>: a dictionary where keys
                are the attribute names.
        """
        return self.__fieldlocator[fieldname].read_field_attributes(fieldname)

    def get_start_time(self):
        """Returns the minimum date of the file temporal coverage.

        Returns:
            datetime: start time of the data in file.
        """
        varname = 'time_stamp'
        vardate = self.__time_handler.get_handler().variables[varname]
        return num2date(vardate[0], vardate.units)

    def get_end_time(self):
        """Returns the maximum date of the file temporal coverage.

        Returns:
            datetime: end time of the data in file.
        """
        # WRONG!!!
        varname = 'time_stamp'
        vardate = self.__time_handler.get_handler().variables[varname]
        return num2date(vardate[-1], vardate.units)

    def get_bbox(self):
        """Returns the bounding box of the feature, as a tuple.

        Returns:
            tuple: bbox expressed as (lonmin, latmin, lonmax, latmax)
        """
        return None

    def write_global_attributes(self, attrs):
        """Write the global attributes of the file.

        Args:
            attrs (dict<string, string or number or datetime>): a dictionary
                containing the attributes names and values to be written.
        """
        raise NotImplementedError

    def create_field(self, field, dim_translation=None):
        """Creates a new field in the mapper.

        Creates the field structure but don't write yet its values array.

        Args:
            field (Field): the field to be created.

        See also:
            :func:`write_field` for writing the values array.
        """
        raise NotImplementedError

    def create_dim(self, dimname, size=None):
        """Add a new dimension.

        Args:
            dimname (str): name of the dimension.
            size (int): size of the dimension (unlimited if None)
        """
        raise NotImplementedError

    def write_field(self, fieldname):
        """Writes the field data on disk.

        Args:
            fieldname (str): name of the field to write.
        """
        raise NotImplementedError
Esempio n. 14
0
class SAFESLFile(AbstractMapper):
    """Abstract class for SAFE SLSTR files (except L2P).
    """
    def __init__(self, url=None, sltype=None, mode=READ_ONLY, **kwargs):
        if mode != READ_ONLY:
            raise Exception("This mapper can only be used in read_only mode.")
        if sltype not in ['i', 'c', 'a', 'b']:
            raise Exception("Unknown SLSTR product type")
        super(SAFESLFile, self).__init__(url=url, mode=mode, **kwargs)
        self.__sltype = sltype
        self.__data_handlers = []
        self.__oblique_fields = []
        # coordinate files
        geodetic = "geodetic_%sn.nc" % sltype
        times = "time_%sn.nc" % sltype
        coordinate_files = [geodetic, times]
        # detect the data files and instanciate mappers for each one
        datafiles = []
        for fname in glob.glob(os.path.join(url, "*_%s[n,o].nc" % sltype)):
            if os.path.basename(fname) not in coordinate_files:
                datafiles.append(os.path.basename(fname))
        for f in datafiles:
            fname = os.path.join(url, f)
            self.__data_handlers.append(NCFile(url=fname, mode=mode, **kwargs))
        # instantiate mappers for each coordinate file
        self.__geod_handler = NCFile(os.path.join(url, geodetic),
                                     mode=mode,
                                     **kwargs)
        self.__time_handler = NCFile(os.path.join(url, times),
                                     mode=mode,
                                     **kwargs)
        self.__fieldlocator = {}
        self.__geofieldlocator = {}
        self.__fieldtranslate = {}
        # offset between nadir and oblique swath edge
        self.nadir_to_oblique_offset = None

    def __is_oblique(self, fieldname):
        """Test if a field corresponds to an oblique view subproduct.

        Returns:
            bool: True f a field corresponds to an oblique view
        """
        return (fieldname in self.__oblique_fields)

    def open(self, view=None, datamodel=None, datamodel_geolocation_dims=None):
        """
        Args:
            view (dict, optional): a dictionary where keys are dimension names
                and values are slices. A view can be set on a file, meaning
                that only the subset defined by this view will be accessible.
                This view is expressed as any subset (see :func:`get_values`).
                For example::

                view = {'row':slice(200,250), 'cell':slice(200,300)}

            datamodel (str): type of feature read or written. Internal argument
                only used by the classes from :mod:`~cerbere.datamodel`
                package. Can be 'Grid', 'Swath', etc...

            datamodel_geolocation_dims (list, optional): list of the name of
                the geolocation dimensions defining the data model to be read
                in the file. Optional argument, only used by the datamodel
                classes, in case the mapper class can store different types of
                data models.

        Returns:
            an handler on the opened file
        """
        # open each related file in the SAFE repo
        if view is None:
            rowview = None
        else:
            rowview = {'row': view['row']}
            # modify view for oblique fields which are narrower
            if 'cell' in view:
                obliqueview = view
                obliqueview['cell'] = (view['cell'] -
                                       self.nadir_to_oblique_offset)
        for hdlr in self.__data_handlers:
            f = os.path.basename(hdlr.get_url())
            is_oblique = (f[-4] == 'o')
            newview = view
            if is_oblique and view is not None:
                newview = obliqueview
            hdlr.open(newview, datamodel, datamodel_geolocation_dims)
        self.__geod_handler.open(view, datamodel, datamodel_geolocation_dims)
        self.__time_handler.open(rowview, datamodel,
                                 datamodel_geolocation_dims)
        # build the two-way dictionaries of fields
        # ...for data
        for hdlr in self.__data_handlers:
            f = os.path.basename(hdlr.get_url())
            is_oblique = (f[-4] == 'o')
            for fieldname in hdlr.get_fieldnames():
                ncvar = hdlr.get_handler().variables[fieldname]
                if 'long_name' in (ncvar.ncattrs()):
                    longname = ncvar.long_name
                    newfieldname = self.__get_fieldname(fieldname, longname)
                else:
                    newfieldname = fieldname
                self.__fieldtranslate[newfieldname] = fieldname
                self.__fieldlocator[newfieldname] = hdlr
                if is_oblique:
                    self.__oblique_fields.append(newfieldname)
        # ...for geodetic coordinates
        for fieldname in self.__geod_handler.get_fieldnames():
            self.__geofieldlocator[fieldname] = self.__geod_handler
        # define nadir/oblique offset
        nadir_track_offset = (self.__geofieldlocator[
            'latitude_orphan_%sn' %
            self.__sltype].read_global_attribute('track_offset'))
        oblique_track_offset = (self.__fieldlocator[
            'latitude_orphan_%so' %
            self.__sltype].read_global_attribute('track_offset'))
        self.nadir_to_oblique_offset = int(
            round(nadir_track_offset - oblique_track_offset))

    def close(self):
        """Close handler on storage"""
        for hdlr in self.__data_handlers:
            hdlr.close()
        self.__data_handlers = None
        self.__geod_handler.close()
        self.__time_handler.close()
        self.__geod_handler = None
        self.__time_handler = None

    def get_dimsize(self, dimname):
        """Return the size of a dimension.

        Args:
            dimname (str): name of the dimension.

        Returns:
            int: size of the dimension.
        """
        dim = self.get_matching_dimname(dimname)
        return self.__geod_handler.get_dimsize(dim)

    def get_dimensions(self, fieldname=None):
        """Return the dimension names of a file or a field in the
        file. For temporal and spatial dimensions, the cerbere standard names
        are returned.

        Args:
            fieldname (str): the name of the field from which to get the
                dimensions. For a geolocation field, use the cerbere standard
                name (time, lat, lon), though native field name will work too.

        Returns:
            tuple<str>: the standard dimensions of the field or file.
        """
        if fieldname is None:
            dims = self.__geod_handler.get_dimensions()
            if 'orphan_pixels' in dims:
                # same dimension name (but not size) in oblique/nadir views so
                # we have to create two dimensions since we merge the two views
                newdims = []
                for dim in dims:
                    if dim != 'orphan_pixels':
                        newdims.append(dim)
                    else:
                        newdims.extend([dim + '_n', dim + '_o'])
                return newdims
            return dims
        if fieldname in ['time', 'lat', 'lon', 'z']:
            # Should all have the same dimension as lat
            native_fieldname = self.get_geolocation_field('lat')
            dims = self.__geod_handler.get_dimensions(native_fieldname)
        else:
            handler = self.__fieldlocator[fieldname]
            dims = handler.get_dimensions(
                self.__get_native_fieldname(fieldname))
        # convert geolocation dims to standard names
        newdims = []
        for dim in list(dims):
            if self.__is_oblique(fieldname):
                view = 'o'
            else:
                view = 'n'
            newdims.append(self.get_standard_dimname(dim, view))
        return tuple(newdims)

    def get_matching_dimname(self, dimname):
        """Return the equivalent name in the native format for a standard
        dimension.

        This is a translation of the standard names to native ones. It is used
        for internal purpose only and should not be called directly.

        The standard dimension names are:

        * x, y, time for :class:`~cerbere.datamodel.grid.Grid`
        * row, cell, time for :class:`~cerbere.datamodel.swath.Swath` or
          :class:`~cerbere.datamodel.image.Image`

        To be derived when creating an inherited data mapper class. This is
        mandatory for geolocation dimensions which must be standard.

        Args:
            dimname (str): standard dimension name.

        Returns:
            str: return the native name for the dimension. Return `dimname` if
                the input dimension has no standard name.

        See Also:
            see :func:`get_standard_dimname` for the reverse operation
        """
        matching = {
            'time': 'time',
            'row': 'rows',
            'cell': 'columns',
            'z': 'elevation'
        }
        # remove the oblique/nadir suffix
        if dimname[-2:] in ['_n', '_o']:
            dimname = dimname.strip('_n').strip('_o')
        if dimname in matching:
            return matching[dimname]
        return dimname

    def get_standard_dimname(self, dimname, view=None):
        """
        Returns the equivalent standard dimension name for a
        dimension in the native format.

        This is a translation of the native names to standard ones. It is used
        for internal purpose and should not be called directly.

        To be derived when creating an inherited data mapper class. This is
        mandatory for geolocation dimensions which must be standard.

        Args:
            dimname (string): native dimension name

        Return:
            str: the (translated) standard name for the dimension. Return
            `dimname` if the input dimension has no standard name.

        See Also:
            see :func:`get_matching_dimname` for the reverse operation
        """
        matching = {
            'time': 'time',
            'rows': 'row',
            'columns': 'cell',
            'elevation': 'z'
        }
        if dimname in matching:
            return matching[dimname]
        # for other dimensions, add the oblique/nadir suffix
        if dimname in ['row', 'cell', 'time', 'z']:
            return dimname
        if view is 'n':
            dimname += '_n'
        elif view is 'o':
            dimname += '_o'
        return dimname

    def get_fieldnames(self):
        """Returns the list of geophysical fields stored for the feature.

        The geolocation field names are excluded from this list.

        Returns:
            list<string>: list of field names
        """
        return self.__fieldlocator.keys()

    def __get_native_fieldname(self, fieldname):
        """Returns the native name of a field.

        Args:
            fieldname (str): name of the field.

        Returns:
            str: the native name of the field. The same as input
                if the field is not a geolocation field.
        """
        if fieldname in ['lat', 'lon', 'time', 'z']:
            return self.get_geolocation_field(fieldname)
        if fieldname in self.__fieldtranslate:
            return self.__fieldtranslate[fieldname]
        return fieldname

    def __get_fieldname(self, fieldname, longname):
        """Returns a unique field name built from the long name for ambiguous
        field names.

        Used because some sub files use the same variable names.

        Args:
            fieldname (str): field name to replace with a new name, if not
                unique.
            longname (str): longname from which to build a new unique field
                name
        Returns:
            str: a unique field name among all the files in a SAFE container.
        """
        if fieldname in ["SST", "SST_uncertainty", "exception"]:
            return longname.replace(' ', '_')
        return fieldname

    def get_geolocation_field(self, fieldname):
        """Return the equivalent field name in the file format for a standard
        geolocation field (lat, lon, time, z).

        Used for internal purpose and should not be called directly.

        Args:
            fieldname (str): name of the standard geolocation field (lat, lon
                or time)

        Return:
            str: name of the corresponding field in the native file format.
                Returns None if no matching is found
        """
        if fieldname == 'time':
            return 'time'
        matching = {
            'lat': 'latitude',
            'lon': 'longitude',
            'time': 'time',
            'z': 'elevation'
        }[fieldname]
        native_fieldname = matching + '_%sn' % self.__sltype
        return native_fieldname

    def read_field(self, fieldname):
        """
        Return the :class:`cerbere.field.Field` object corresponding to
        the requested fieldname.

        The :class:`cerbere.field.Field` class contains all the metadata
        describing a field (equivalent to a variable in netCDF).

        Args:
            fieldname (str): name of the field

        Returns:
            :class:`cerbere.field.Field`: the corresponding field object
        """
        if fieldname == 'time':
            rows = self.get_dimsize('row')
            cols = self.get_dimsize('cell')
            variable = Variable(shortname='time',
                                description='time of measurement',
                                authority=None,
                                standardname=None)
            field = Field(variable,
                          OrderedDict([('row', rows), ('cell', cols)]),
                          datatype=dtype(int64))
            field.attach_storage(self.get_field_handler(fieldname))
            field.units = self.__time_handler.get_handler().\
                variables['time_stamp_%s' % self.__sltype[0]].units
            return field
        elif fieldname in ['lat', 'lon', 'z']:
            native_name = self.get_geolocation_field(fieldname)
            geofield = self.__geofieldlocator[native_name].read_field(
                native_name)
            geofield.name = fieldname
            return geofield
        else:
            native_name = self.__get_native_fieldname(fieldname)
            field = self.__fieldlocator[fieldname].read_field(native_name)
            field.name = fieldname
            field.attach_storage(self.get_field_handler(fieldname))
            dims = field.dimensions
            renamed_dims = OrderedDict()
            for dim in dims:
                newdim = dim
                if dim not in ['row', 'cell', 'time', 'z']:
                    if self.__is_oblique(fieldname):
                        newdim += '_o'
                    else:
                        newdim += '_n'
                renamed_dims[newdim] = dims[dim]
            field.dimensions = renamed_dims
            # for oblique view, the swath is narrower. It is padded with
            # dummy values to stack it over nadir view fields
            if fieldname in self.__oblique_fields:
                if 'cell' in field.get_dimnames():
                    field.dimensions['cell'] = self.get_dimsize('cell')
            return field

    def read_values(self, fieldname, slices=None):
        """Read the data of a field.

        Args:
            fieldname (str): name of the field which to read the data from

            slices (list of slice, optional): list of slices for the field if
                subsetting is requested. A slice must then be provided for each
                field dimension. The slices are relative to the opened view
                (see :func:open) if a view was set when opening the file.

        Return:
            MaskedArray: array of data read. Array type is the same as the
                storage type.
        """
        native_name = self.__get_native_fieldname(fieldname)
        rowslice = None
        if slices is not None:
            rowslice = [slices[0]]
        if fieldname == 'time':
            suffix = self.__sltype
            SCANSYNC = self.__time_handler.read_values('SCANSYNC')[0]
            PIXSYNC_i = self.__time_handler.read_values('PIXSYNC_%s' %
                                                        suffix)[0]
            prefix = PREFIX['n']
            first_scan_i\
                = self.__time_handler.read_values(
                    '%s_First_scan_%s' % (prefix, suffix),
                    slices=rowslice)
            first_min_ts\
                = self.__time_handler.read_values(
                    '%s_Minimal_ts_%s' % (prefix, suffix),
                    slices=rowslice)
            scanfield = 'scan_%sn' % suffix
            pixelfield = 'pixel_%sn' % suffix
            indices_handler = self.__fieldlocator[scanfield]
            scan = indices_handler.read_values(scanfield, slices=slices)
            pixel = indices_handler.read_values(pixelfield, slices=slices)
            time = first_min_ts.reshape((-1, 1))\
                + (scan - first_scan_i.reshape((-1, 1)))\
                * SCANSYNC + pixel * PIXSYNC_i
            # mask wrong times (which occur in test data)
            maxdate = date2num(self.get_end_time(),
                               "microseconds since 2000-01-01T00:00:00Z")
            mindate = date2num(self.get_start_time(),
                               "microseconds since 2000-01-01T00:00:00Z")

            time = ma.masked_where(((time < mindate) | (time > maxdate)),
                                   time,
                                   copy=False)
            return time
        elif fieldname in ['lat', 'lon', 'z']:
            return self.__geofieldlocator[native_name].read_values(
                native_name, slices)
        elif self.__is_oblique(fieldname):
            # oblique views has not the same cell dimension than nadir
            # we want to stack fields from both views by padding fillvalues
            celldim = None
            try:
                dims = self.__fieldlocator[fieldname].get_dimensions(
                    native_name)
                empty = False
                if 'cell' in dims:
                    dimsizes = [self.get_dimsize(dim) for dim in dims]
                    celldim = list(dims).index('cell')
                    nadir_slices = cerbere.mapper.slices.get_nice_slices(
                        slices, dimsizes)
                    sli = nadir_slices[celldim]
                    nad_start, nad_end, step = sli.start, sli.stop, sli.step
                    obliquewidth = (
                        self.__fieldlocator[fieldname].get_dimsize('cell'))
                    obl_start, obl_end = nad_start, nad_end
                    obl_start = max(0,
                                    obl_start - self.nadir_to_oblique_offset)
                    if obl_start > obliquewidth:
                        obl_start = obliquewidth
                    obl_end = max(
                        0,
                        min(obliquewidth,
                            obl_end - self.nadir_to_oblique_offset))
                    newslices = list(nadir_slices)
                    newslices[celldim] = slice(obl_start, obl_end, step)
                    if obl_start >= obl_end:
                        empty = True
                else:
                    # case of some fields such as orphan pixels which don't
                    # have a cell dimension
                    newslices = list(slices) if slices is not None else None
            except ValueError:
                raise

            # read values in oblique view grid
            if celldim is not None and empty:
                values = ma.masked_all((),
                                       dtype=self.__fieldlocator[fieldname].
                                       _handler.variables[native_name].dtype)
            else:
                values = self.__fieldlocator[fieldname].read_values(
                    self.__fieldtranslate[fieldname], newslices)
            # padding for missing oblique values to match nadir view grid
            if celldim is not None:
                shape = cerbere.mapper.slices.get_shape_from_slice(
                    cerbere.mapper.slices.get_nice_slices(slices, dimsizes))
                padded_values = ma.masked_all(
                    tuple(shape),
                    dtype=values.dtype,
                )
                if values.shape == () or min(list(values.shape)) <= 0:
                    # empty result => return padded values only
                    return padded_values
                padded_slice = []
                for dim in dims:
                    if dim != 'cell':
                        padded_slice.append(slice(None, None, None))
                    else:
                        offset = (self.nadir_to_oblique_offset - nad_start +
                                  obl_start)
                        padded_slice.append(
                            slice(offset, offset + values.shape[celldim]))
                padded_values[padded_slice] = values
                return padded_values
            else:
                return values
        else:
            return self.__fieldlocator[fieldname].read_values(
                native_name, slices)

    def read_fillvalue(self, fieldname):
        """Read the fill value of a field.

        Args:
            fieldname (str): name of the field.

        Returns:
            number or char or str: fill value of the field. The type is the
                as the type of the data in the field.
        """
        return self.__fieldlocator[fieldname].read_fillvalue(fieldname)

    def read_global_attributes(self):
        """Returns the names of the global attributes.

        Returns:
            list<str>: the list of the attribute names.
        """
        # all files seem to have the same list og global attributes.
        return self.__geod_handler.read_global_attributes()

    def read_global_attribute(self, name):
        """Returns the value of a global attribute.

        Args:
            name (str): name of the global attribute.

        Returns:
            str, number or datetime: value of the corresponding attribute.
        """
        # all files seem to have the same list or global attributes.
        return self.__geod_handler.read_global_attribute(name)

    def read_field_attributes(self, fieldname):
        """Return the specific attributes of a field.

        Args:
            fieldname (str): name of the field.

        Returns:
            dict<string, string or number or datetime>: a dictionary where keys
                are the attribute names.
        """
        return self.__fieldlocator[fieldname].read_field_attributes(fieldname)

    def get_start_time(self):
        """Returns the minimum date of the file temporal coverage.

        Returns:
            datetime: start time of the data in file.
        """
        varname = 'time_stamp_%s' % self.__sltype[0]
        vardate = self.__time_handler.get_handler().variables[varname]
        return num2date(vardate[0], vardate.units)

    def get_end_time(self):
        """Returns the maximum date of the file temporal coverage.

        Returns:
            datetime: end time of the data in file.
        """
        # WRONG!!!
        varname = 'time_stamp_%s' % self.__sltype[0]
        vardate = self.__time_handler.get_handler().variables[varname]
        return num2date(vardate[-1], vardate.units)

    def get_bbox(self):
        """Returns the bounding box of the feature, as a tuple.

        Returns:
            tuple: bbox expressed as (lonmin, latmin, lonmax, latmax)
        """
        return None

    def write_global_attributes(self, attrs):
        """Write the global attributes of the file.

        Args:
            attrs (dict<string, string or number or datetime>): a dictionary
                containing the attributes names and values to be written.
        """
        raise NotImplementedError

    def create_field(self, field, dim_translation=None):
        """Creates a new field in the mapper.

        Creates the field structure but don't write yet its values array.

        Args:
            field (Field): the field to be created.

        See also:
            :func:`write_field` for writing the values array.
        """
        raise NotImplementedError

    def create_dim(self, dimname, size=None):
        """Add a new dimension.

        Args:
            dimname (str): name of the dimension.
            size (int): size of the dimension (unlimited if None)
        """
        raise NotImplementedError

    def write_field(self, fieldname):
        """Writes the field data on disk.

        Args:
            fieldname (str): name of the field to write.
        """
        raise NotImplementedError
def ww3_model_wave(infile,
                   outdir,
                   date=None,
                   max_forecast_hours=None,
                   vmin=0.,
                   vmax=25.4,
                   vmin_pal=0.,
                   vmax_pal=10.,
                   v2=False):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    ncfile = NCFile(infile)
    ww3 = {}
    ww3['time'] = ncfile.read_values('time')
    ww3['time_units'] = ncfile.read_field('time').units
    ww3uniqtime = ww3['time'].size == 1
    if ww3uniqtime:  # assume 3h
        ww3['deltatime'] = 180
    else:
        t01 = num2date(np.array(ww3['time'][0:2]), ww3['time_units'])
        ww3['deltatime'] = np.round(
            (t01[1] - t01[0]).total_seconds() / 60.).astype('int')
    if date != None:
        tind = np.where(ww3['time'] == date2num(date, ww3['time_units']))[0]
        if tind.size == 1:
            tsl = slice(tind[0], tind[0] + 1)
            ww3['time'] = ww3['time'][tsl]
        else:
            raise Exception('Date not found in WW3 file.')
    else:
        tsl = slice(0, ww3['time'].size)
    if max_forecast_hours is not None:
        if ww3['time'].size != 1:
            raise Exception(
                'max_forecast_hours option works with only 1 time.')
        ww3time = num2date(ww3['time'][0], ww3['time_units'])
        if 'date_cycle' not in ncfile.read_global_attributes():
            raise Exception(
                'max_forecast_hours option works with date_cycle attribute.')
        cycletime_str = ncfile.read_global_attribute('date_cycle')
        if 10 == len(cycletime_str):
            cycletime_format = '%Y%m%d%H'
        elif 20 == len(cycletime_str) and cycletime_str.endswith('Z'):
            cycletime_format = '%Y-%m-%dT%H:%M:%SZ'
        else:
            raise Exception(
                'Cycletime format is not supported: {}'.format(cycletime_str))

        cycletime = datetime.strptime(cycletime_str, cycletime_format)
        forecast_hours = (ww3time - cycletime).total_seconds() / 3600.
        if forecast_hours > max_forecast_hours:
            raise Exception('Exceeds max_forecast_hours.')
    slices = [
        tsl,
        slice(0, ncfile.get_dimsize('latitude')),
        slice(0, ncfile.get_dimsize('longitude'))
    ]
    # slices = [tsl,
    #           slice(ncfile.get_dimsize('latitude')-1, None, -1),
    #           slice(0, 2*ncfile.get_dimsize('longitude'))]
    ncfieldnames = ncfile.get_fieldnames()
    fieldnames = [
        'hs', 'phs0', 'phs1', 'phs2', 'phs3', 'ptp0', 'ptp1', 'ptp2', 'ptp3',
        'pdir0', 'pdir1', 'pdir2', 'pdir3'
    ]
    ww3['source'] = [infile]
    for fieldname in fieldnames:
        if fieldname in ncfieldnames:
            ww3[fieldname] = ncfile.read_values(fieldname,
                                                slices=slices)[:, ::-1, :]
        else:
            infile2 = split_ww3_fname(infile, fieldname)
            if not os.path.exists(infile2):
                infile2 = other_split_ww3_fname(infile, fieldname)
            if os.path.exists(infile2):
                ncfile2 = NCFile(infile2)
                ww3[fieldname] = ncfile2.read_values(fieldname,
                                                     slices=slices)[:, ::-1, :]
                ncfile2.close()
                ww3['source'].append(infile2)
    ww3['npart'] = 0
    for indp in range(4):
        pin = [name + str(indp) in ww3 for name in ['phs', 'ptp', 'pdir']]
        if all(pin):
            ww3['npart'] += 1
        else:
            break
    if ww3['npart'] == 0:
        raise Exception('Could not find all partition variables.')
    ww3['area'] = ncfile.read_global_attribute('area')
    if 'global' in ww3['area'].lower():
        ww3['lon'] = ncfile.read_values('lon')
        ww3['lat'] = ncfile.read_values('lat')[::-1]
        ww3['lon_res'] = float(
            ncfile.read_global_attribute('longitude_resolution'))
        ww3['lat_res'] = float(
            ncfile.read_global_attribute('latitude_resolution'))
    elif ww3['area'] == 'ARCTIC-12km':
        ww3['lon'] = ncfile.read_values('lon')[::-1, :]
        ww3['lat'] = ncfile.read_values('lat')[::-1, :]
    else:
        raise Exception('Not implemented : area = "{}"'.format(ww3['area']))
    if 'run_time' in ncfile.read_global_attributes():
        run_time = ncfile.read_global_attribute('run_time')
        ww3['rundtime'] = datetime.strptime(run_time, '%Y-%m-%dT%H:%M:%SZ')
    ncfile.close()

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    #metadata['product_name'] =
    #metadata['name'] =
    #metadata['datetime'] =
    metadata['time_range'] = [
        '-{:d}m'.format(ww3['deltatime'] / 2),
        '+{:d}m'.format(ww3['deltatime'] / 2)
    ]
    metadata['source_URI'] = ww3['source']
    metadata['source_provider'] = ['SHOM', 'Ifremer']
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    #metadata['parameter'] =
    metadata['type'] = 'model'
    #metadata['model_longitude_resolution'] =
    #metadata['model_latitude_resolution'] =
    if 'rundtime' in ww3:
        metadata['model_analysis_datetime'] = stfmt.format_time(
            ww3['rundtime'])
    geolocation = {}
    if 'global' in ww3['area'].lower():
        metadata['model_longitude_resolution'] = ww3['lon_res']
        metadata['model_latitude_resolution'] = ww3['lat_res']
        geolocation['projection'] = stfmt.format_gdalprojection()
        lon0, dlon = ww3['lon'][0], ww3['lon'][1] - ww3['lon'][0]
        lat0, dlat = ww3['lat'][0], ww3['lat'][1] - ww3['lat'][0]
        geolocation['geotransform'] = [
            lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
        ]
        ww3['grid'] = 'GLOBAL'
    elif ww3['area'] == 'ARCTIC-12km':
        import pyproj
        srs = osr.SpatialReference()
        srs.ImportFromEPSG(3411)
        proj = pyproj.Proj(srs.ExportToProj4())
        x01, dummy = proj(ww3['lon'][:, [0, -1]], ww3['lat'][:, [0, -1]])
        x0, x1 = x01.mean(axis=0)
        dx = (x1 - x0) / (ww3['lon'].shape[1] - 1)
        dummy, y01 = proj(ww3['lon'][[0, -1], :], ww3['lat'][[0, -1], :])
        y0, y1 = y01.mean(axis=1)
        dy = (y1 - y0) / (ww3['lon'].shape[0] - 1)
        geolocation['projection'] = srs.ExportToWkt()
        geolocation['geotransform'] = [
            x0 - dx / 2., dx, 0, y0 - dy / 2., 0, dy
        ]
        # geolocation['geotransform'] = [-2600051.73564, 12500.2285676, 0,
        #                                2787547.79214, 0, -12500.2262608]
        ww3['grid'] = 'ARCTIC'
    else:
        raise Exception('Not implemented : area = "{}"'.format(ww3['area']))

    # Loop on time
    for itime in range(ww3['time'].size):
        dtime = num2date(ww3['time'][itime], ww3['time_units'])
        metadata['datetime'] = stfmt.format_time(dtime)
        if len(ww3['source']) == 1:
            basename = os.path.splitext(os.path.basename(infile))[0]
            if not ww3uniqtime:
                _date = dtime.strftime('%Y%m%d')
                _datetime = dtime.strftime('%Y%m%dT%H')
                if _date in basename and _datetime not in basename:
                    basename = basename.replace(_date,
                                                dtime.strftime('%Y%m%dT%HZ'))
                else:
                    raise Exception
        else:
            basename = 'WW3-' + ww3['grid'] + '-' + dtime.strftime(
                '%Y%m%dT%HZ')
            if 'hindcast' in infile.lower():
                basename = 'HINDCAST_' + basename

        ### Total HS ###
        # Update metadata
        metadata['product_name'] = 'WW3_model_wave_hs'
        if v2 == True:
            metadata['product_name'] += '_v2'
        metadata['name'] = basename + '-hs'
        metadata['parameter'] = 'wave significant height'
        # Make band
        band = []
        hs = ww3['hs'][itime, :, :]
        offset, scale = vmin, (vmax - vmin) / 254.0
        np.clip(hs, vmin, vmax, out=hs)
        array = np.round((hs - offset) / scale).astype('uint8')
        array[hs.mask] = 255
        colortable = stfmt.format_colortable('matplotlib_jet',
                                             vmin=vmin,
                                             vmax=vmax,
                                             vmin_pal=vmin_pal,
                                             vmax_pal=vmax_pal)
        band.append({
            'array': array,
            'scale': scale,
            'offset': offset,
            'description': metadata['parameter'],
            'unittype': 'm',
            'nodatavalue': 255,
            'parameter_range': [vmin, vmax],
            'colortable': colortable
        })
        # Write geotiff
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)

        ### Partitions ###
        phslst = [
            ww3['phs' + str(i)][itime, :, :] for i in range(ww3['npart'])
        ]
        phs = np.ma.dstack(phslst)
        ptplst = [
            ww3['ptp' + str(i)][itime, :, :] for i in range(ww3['npart'])
        ]
        ptp = np.ma.dstack(ptplst)
        pdirlst = [
            ww3['pdir' + str(i)][itime, :, :] for i in range(ww3['npart'])
        ]
        pdir = np.ma.dstack(pdirlst)
        # Reorder partitions by HS -> Keep WW3 order
        # phs.data[phs.mask] = -1000 # make sure masked values don't interfere with sorting
        # index = np.ogrid[:phs.shape[0], :phs.shape[1], :phs.shape[2]]
        # index[2] = (-phs).argsort(axis=2, kind='mergesort')
        # phs = phs[index]
        # ptp = ptp[index]
        # pdir = pdir[index]
        # pdir from_direction -> to_direction
        pdir = np.mod(pdir + 180.0, 360.0)
        # pdir clockwise from north -> counter clockwise from east
        pdir = np.mod(90.0 - pdir, 360.0)
        # Write each partition in a geotiff
        for i in range(ww3['npart']):
            # Update metadata
            lpartnum = 'partition ' + str(i)
            spartnum = 'part' + str(i)
            metadata['product_name'] = 'WW3_model_wave_' + spartnum
            if v2 == True:
                metadata['product_name'] += '_v2'
            metadata['name'] = basename + '-' + spartnum
            metadata['parameter'] = [
                'wave significant height ' + lpartnum,
                'wave peak period ' + lpartnum,
                'wave mean direction ' + lpartnum
            ]
            # Make bands
            band = []
            iphs, iptp, ipdir = phs[:, :, i], ptp[:, :, i], pdir[:, :, i]
            # HS
            #_vmin, _vmax = 0.0, 25.4
            offset, scale = vmin, (vmax - vmin) / 254.0
            np.clip(iphs, vmin, vmax, out=iphs)
            array = np.round((iphs - offset) / scale).astype('uint8')
            array[iphs.mask] = 255
            band.append({
                'array': array,
                'scale': scale,
                'offset': offset,
                'description': metadata['parameter'][0],
                'unittype': 'm',
                'nodatavalue': 255,
                'parameter_range': [vmin, vmax]
            })
            # Period
            _vmin, _vmax = 0.0, 25.4
            offset, scale = _vmin, (_vmax - _vmin) / 254.0
            np.clip(iptp, _vmin, _vmax, out=iptp)
            array = np.round((iptp - offset) / scale).astype('uint8')
            array[iptp.mask] = 255
            band.append({
                'array': array,
                'scale': scale,
                'offset': offset,
                'description': metadata['parameter'][1],
                'unittype': 's',
                'nodatavalue': 255,
                'parameter_range': [_vmin, _vmax]
            })
            # Direction
            _vmin, _vmax = 0.0, 360.0
            offset, scale = _vmin, (_vmax - _vmin) / 254.0
            np.clip(ipdir, _vmin, _vmax, out=ipdir)
            array = np.round((ipdir - offset) / scale).astype('uint8')
            array[ipdir.mask] = 255
            band.append({
                'array': array,
                'scale': scale,
                'offset': offset,
                'description': metadata['parameter'][2],
                'unittype': 'degree',
                'nodatavalue': 255,
                'parameter_range': [vmin, vmax]
            })
            # Write geotiff
            print 'Write geotiff'
            tifffile = stfmt.format_tifffilename(outdir,
                                                 metadata,
                                                 create_dir=True)
            stfmt.write_geotiff(tifffile,
                                metadata,
                                geolocation,
                                band,
                                drv_opts=['PHOTOMETRIC=MINISBLACK'])
Esempio n. 16
0
def bathymetry_gebco(infile,
                     outdir,
                     vmin=-6000,
                     vmax=0,
                     vmin_pal=-6000.,
                     vmax_pal=0.):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    ncfile = NCFile(infile)
    bat = ncfile.read_values('elevation')[:, :]
    bat = bat.astype('float32')
    bat[(bat < 0) & (bat >= -25)] = -25
    bat[(bat < -25) & (bat >= -50)] = -50
    bat[(bat < -50) & (bat >= -100)] = -100
    bat[(bat < -100) & (bat >= -500)] = -500
    bat[(bat < -500) & (bat >= -1000)] = -1000
    bat[(bat < -1000) & (bat >= -2000)] = -2000
    bat[(bat < -2000) & (bat >= -3000)] = -3000
    bat[(bat < -3000) & (bat >= -4000)] = -4000
    bat[(bat < -4000) & (bat >= -5000)] = -5000
    bat[(bat < -5000) & (bat >= -6000)] = -6000
    bat[(bat < -6000) & (bat >= -10000)] = -10000
    mask = [bat >= 0]
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(bat, vmin, vmax, out=bat)
    bat -= offset
    bat /= scale
    bat = np.round(bat).astype('uint8')
    lon = ncfile.read_values('lon')[:2:1]
    lat = ncfile.read_values('lat')[:2:1]
    lon0 = lon[0]
    dlon = lon[-1] - lon[0]
    lat0 = lat[0]
    dlat = lat[-1] - lat[0]

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    metadata['product_name'] = 'GEBCO bathymetry'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = stfmt.format_time(datetime(2012, 1, 1))
    metadata['time_range'] = ['-3660d', '+3660d']
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'GEBCO'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'bathymetry '
    metadata['type'] = 'remote sensing'
    metadata['longitude_resolution'] = abs(dlon)
    metadata['latitude_resolution'] = abs(dlat)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
    ]
    band = []
    bat[mask] = 255
    colortable = stfmt.format_colortable('ibcso',
                                         vmax=vmax,
                                         vmax_pal=vmax_pal,
                                         vmin=vmin,
                                         vmin_pal=vmin_pal)

    band.append({
        'array': bat,
        'scale': scale,
        'offset': offset,
        'description': 'bathymetry',
        'unittype': 'm',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Esempio n. 17
0
import os
import sys
import numpy
import netCDF4
from cerbere.mapper.ncfile import NCFile
from matplotlib import pyplot as plt
if __name__ =='__main__':
    filou = "/home/cerdata/provider/neodc/l4/esacci_sst/2010/03/02/20100302120000-ESACCI-L4_GHRSST-SSTdepth-OSTIA-GLOB_LT-v02.0-fv01.0.nc"
    print filou
    nc = NCFile(filou)
    filout = '/tmp/ostia_sst.png'
    filout2 = '/tmp/ostia_sst2.png'
    sst = nc.read_values('analysed_sst')
    sst = numpy.squeeze(sst)
    print sst.shape
    nc.close()
    
    nc = netCDF4.Dataset(filou)
    sst2 = nc.variables['analysed_sst'][:]
    sst2 = numpy.squeeze(sst2)
    print sst2.shape
    nc.close()
    
    #plot
    plt.figure()
    plt.pcolor(sst[200:800,100:600])
    plt.colorbar()
    plt.savefig(filout)
    #plot
    plt.figure()
    plt.pcolor(sst2[200:800,100:600])
Esempio n. 18
0
class SAFESLFile(AbstractMapper):
    """Abstract class for SAFE SLSTR files (except L2P).
    """
    def __init__(self, url=None, sltype=None, mode=READ_ONLY, **kwargs):
        if mode != READ_ONLY:
            raise Exception("This mapper can only be used in read_only mode.")
        if sltype not in ['i', 'c', 'a', 'b']:
            raise Exception("Unknown SLSTR product type")
        super(SAFESLFile, self).__init__(url=url, mode=mode, **kwargs)
        self.__sltype = sltype
        self.__data_handlers = []
        self.__oblique_fields = []
        # coordinate files
        geodetic = "geodetic_%sn.nc" % sltype
        times = "time_%sn.nc" % sltype
        coordinate_files = [geodetic, times]
        # detect the data files and instanciate mappers for each one
        datafiles = []
        for fname in glob.glob(os.path.join(url, "*_%s[n,o].nc" % sltype)):
            if os.path.basename(fname) not in coordinate_files:
                datafiles.append(os.path.basename(fname))
        for f in datafiles:
            fname = os.path.join(url, f)
            self.__data_handlers.append(NCFile(url=fname, mode=mode, **kwargs))
        # instantiate mappers for each coordinate file
        self.__geod_handler = NCFile(os.path.join(url, geodetic),
                                     mode=mode, **kwargs)
        self.__time_handler = NCFile(os.path.join(url, times),
                                     mode=mode, **kwargs)
        self.__fieldlocator = {}
        self.__geofieldlocator = {}
        self.__fieldtranslate = {}
        # offset between nadir and oblique swath edge
        self.nadir_to_oblique_offset = None

    def __is_oblique(self, fieldname):
        """Test if a field corresponds to an oblique view subproduct.

        Returns:
            bool: True f a field corresponds to an oblique view
        """
        return (fieldname in self.__oblique_fields)

    def open(self,
             view=None,
             datamodel=None,
             datamodel_geolocation_dims=None):
        """
        Args:
            view (dict, optional): a dictionary where keys are dimension names
                and values are slices. A view can be set on a file, meaning
                that only the subset defined by this view will be accessible.
                This view is expressed as any subset (see :func:`get_values`).
                For example::

                view = {'row':slice(200,250), 'cell':slice(200,300)}

            datamodel (str): type of feature read or written. Internal argument
                only used by the classes from :mod:`~cerbere.datamodel`
                package. Can be 'Grid', 'Swath', etc...

            datamodel_geolocation_dims (list, optional): list of the name of
                the geolocation dimensions defining the data model to be read
                in the file. Optional argument, only used by the datamodel
                classes, in case the mapper class can store different types of
                data models.

        Returns:
            an handler on the opened file
        """
        # open each related file in the SAFE repo
        if view is None:
            rowview = None
        else:
            rowview = {'row': view['row']}
            # modify view for oblique fields which are narrower
            if 'cell' in view:
                obliqueview = view
                obliqueview['cell'] = (
                    view['cell'] - self.nadir_to_oblique_offset)
        for hdlr in self.__data_handlers:
            f = os.path.basename(hdlr.get_url())
            is_oblique = (f[-4] == 'o')
            newview = view
            if is_oblique and view is not None:
                newview = obliqueview
            hdlr.open(newview, datamodel,
                      datamodel_geolocation_dims)
        self.__geod_handler.open(view, datamodel,
                                 datamodel_geolocation_dims)
        self.__time_handler.open(rowview, datamodel,
                                 datamodel_geolocation_dims)
        # build the two-way dictionaries of fields
        # ...for data
        for hdlr in self.__data_handlers:
            f = os.path.basename(hdlr.get_url())
            is_oblique = (f[-4] == 'o')
            for fieldname in hdlr.get_fieldnames():
                ncvar = hdlr.get_handler().variables[fieldname]
                if 'long_name' in (ncvar.ncattrs()):
                    longname = ncvar.long_name
                    newfieldname = self.__get_fieldname(fieldname,
                                                        longname)
                else:
                    newfieldname = fieldname
                self.__fieldtranslate[newfieldname] = fieldname
                self.__fieldlocator[newfieldname] = hdlr
                if is_oblique:
                    self.__oblique_fields.append(newfieldname)
        # ...for geodetic coordinates
        for fieldname in self.__geod_handler.get_fieldnames():
            self.__geofieldlocator[fieldname] = self.__geod_handler
        # define nadir/oblique offset
        nadir_track_offset = (
            self.__geofieldlocator['latitude_orphan_%sn' % self.__sltype]
            .read_global_attribute('track_offset'))
        oblique_track_offset = (
            self.__fieldlocator['latitude_orphan_%so' % self.__sltype]
            .read_global_attribute('track_offset'))
        self.nadir_to_oblique_offset = int(round(
            nadir_track_offset - oblique_track_offset))

    def close(self):
        """Close handler on storage"""
        for hdlr in self.__data_handlers:
            hdlr.close()
        self.__data_handlers = None
        self.__geod_handler.close()
        self.__time_handler.close()
        self.__geod_handler = None
        self.__time_handler = None

    def get_dimsize(self, dimname):
        """Return the size of a dimension.

        Args:
            dimname (str): name of the dimension.

        Returns:
            int: size of the dimension.
        """
        dim = self.get_matching_dimname(dimname)
        return self.__geod_handler.get_dimsize(dim)

    def get_dimensions(self, fieldname=None):
        """Return the dimension names of a file or a field in the
        file. For temporal and spatial dimensions, the cerbere standard names
        are returned.

        Args:
            fieldname (str): the name of the field from which to get the
                dimensions. For a geolocation field, use the cerbere standard
                name (time, lat, lon), though native field name will work too.

        Returns:
            tuple<str>: the standard dimensions of the field or file.
        """
        if fieldname is None:
            dims = self.__geod_handler.get_dimensions()
            if 'orphan_pixels' in dims:
                # same dimension name (but not size) in oblique/nadir views so
                # we have to create two dimensions since we merge the two views
                newdims = []
                for dim in dims:
                    if dim != 'orphan_pixels':
                        newdims.append(dim)
                    else:
                        newdims.extend([dim + '_n', dim + '_o'])
                return newdims
            return dims
        if fieldname in ['time', 'lat', 'lon', 'z']:
            # Should all have the same dimension as lat
            native_fieldname = self.get_geolocation_field('lat')
            dims = self.__geod_handler.get_dimensions(native_fieldname)
        else:
            handler = self.__fieldlocator[fieldname]
            dims = handler.get_dimensions(
                self.__get_native_fieldname(fieldname))
        # convert geolocation dims to standard names
        newdims = []
        for dim in list(dims):
            if self.__is_oblique(fieldname):
                view = 'o'
            else:
                view = 'n'
            newdims.append(self.get_standard_dimname(dim, view))
        return tuple(newdims)

    def get_matching_dimname(self, dimname):
        """Return the equivalent name in the native format for a standard
        dimension.

        This is a translation of the standard names to native ones. It is used
        for internal purpose only and should not be called directly.

        The standard dimension names are:

        * x, y, time for :class:`~cerbere.datamodel.grid.Grid`
        * row, cell, time for :class:`~cerbere.datamodel.swath.Swath` or
          :class:`~cerbere.datamodel.image.Image`

        To be derived when creating an inherited data mapper class. This is
        mandatory for geolocation dimensions which must be standard.

        Args:
            dimname (str): standard dimension name.

        Returns:
            str: return the native name for the dimension. Return `dimname` if
                the input dimension has no standard name.

        See Also:
            see :func:`get_standard_dimname` for the reverse operation
        """
        matching = {'time': 'time', 'row': 'rows', 'cell': 'columns',
                    'z': 'elevation'}
        # remove the oblique/nadir suffix
        if dimname[-2:] in ['_n', '_o']:
            dimname = dimname.strip('_n').strip('_o')
        if dimname in matching:
            return matching[dimname]
        return dimname

    def get_standard_dimname(self, dimname, view=None):
        """
        Returns the equivalent standard dimension name for a
        dimension in the native format.

        This is a translation of the native names to standard ones. It is used
        for internal purpose and should not be called directly.

        To be derived when creating an inherited data mapper class. This is
        mandatory for geolocation dimensions which must be standard.

        Args:
            dimname (string): native dimension name

        Return:
            str: the (translated) standard name for the dimension. Return
            `dimname` if the input dimension has no standard name.

        See Also:
            see :func:`get_matching_dimname` for the reverse operation
        """
        matching = {'time': 'time', 'rows': 'row', 'columns': 'cell',
                    'elevation': 'z'}
        if dimname in matching:
            return matching[dimname]
        # for other dimensions, add the oblique/nadir suffix
        if dimname in ['row', 'cell', 'time', 'z']:
            return dimname
        if view is 'n':
            dimname += '_n'
        elif view is 'o':
            dimname += '_o'
        return dimname

    def get_fieldnames(self):
        """Returns the list of geophysical fields stored for the feature.

        The geolocation field names are excluded from this list.

        Returns:
            list<string>: list of field names
        """
        return self.__fieldlocator.keys()

    def __get_native_fieldname(self, fieldname):
        """Returns the native name of a field.

        Args:
            fieldname (str): name of the field.

        Returns:
            str: the native name of the field. The same as input
                if the field is not a geolocation field.
        """
        if fieldname in ['lat', 'lon', 'time', 'z']:
            return self.get_geolocation_field(fieldname)
        if fieldname in self.__fieldtranslate:
            return self.__fieldtranslate[fieldname]
        return fieldname

    def __get_fieldname(self, fieldname, longname):
        """Returns a unique field name built from the long name for ambiguous
        field names.

        Used because some sub files use the same variable names.

        Args:
            fieldname (str): field name to replace with a new name, if not
                unique.
            longname (str): longname from which to build a new unique field
                name
        Returns:
            str: a unique field name among all the files in a SAFE container.
        """
        if fieldname in ["SST", "SST_uncertainty", "exception"]:
            return longname.replace(' ', '_')
        return fieldname

    def get_geolocation_field(self, fieldname):
        """Return the equivalent field name in the file format for a standard
        geolocation field (lat, lon, time, z).

        Used for internal purpose and should not be called directly.

        Args:
            fieldname (str): name of the standard geolocation field (lat, lon
                or time)

        Return:
            str: name of the corresponding field in the native file format.
                Returns None if no matching is found
        """
        if fieldname == 'time':
            return 'time'
        matching = {'lat': 'latitude', 'lon': 'longitude', 'time': 'time',
                    'z': 'elevation'}[fieldname]
        native_fieldname = matching + '_%sn' % self.__sltype
        return native_fieldname

    def read_field(self, fieldname):
        """
        Return the :class:`cerbere.field.Field` object corresponding to
        the requested fieldname.

        The :class:`cerbere.field.Field` class contains all the metadata
        describing a field (equivalent to a variable in netCDF).

        Args:
            fieldname (str): name of the field

        Returns:
            :class:`cerbere.field.Field`: the corresponding field object
        """
        if fieldname == 'time':
            rows = self.get_dimsize('row')
            cols = self.get_dimsize('cell')
            variable = Variable(
                shortname='time',
                description='time of measurement',
                authority=None,
                standardname=None
                )
            field = Field(
                variable,
                OrderedDict([('row', rows), ('cell', cols)]),
                datatype=dtype(int64)
                )
            field.attach_storage(self.get_field_handler(fieldname))
            field.units = self.__time_handler.get_handler().\
                variables['time_stamp_%s' % self.__sltype[0]].units
            return field
        elif fieldname in ['lat', 'lon', 'z']:
            native_name = self.get_geolocation_field(fieldname)
            geofield = self.__geofieldlocator[native_name].read_field(
                native_name
                )
            geofield.name = fieldname
            return geofield
        else:
            native_name = self.__get_native_fieldname(fieldname)
            field = self.__fieldlocator[fieldname].read_field(native_name)
            field.name = fieldname
            field.attach_storage(self.get_field_handler(fieldname))
            dims = field.dimensions
            renamed_dims = OrderedDict()
            for dim in dims:
                newdim = dim
                if dim not in ['row', 'cell', 'time', 'z']:
                    if self.__is_oblique(fieldname):
                        newdim += '_o'
                    else:
                        newdim += '_n'
                renamed_dims[newdim] = dims[dim]
            field.dimensions = renamed_dims
            # for oblique view, the swath is narrower. It is padded with
            # dummy values to stack it over nadir view fields
            if fieldname in self.__oblique_fields:
                if 'cell' in field.get_dimnames():
                    field.dimensions['cell'] = self.get_dimsize('cell')
            return field

    def read_values(self, fieldname, slices=None):
        """Read the data of a field.

        Args:
            fieldname (str): name of the field which to read the data from

            slices (list of slice, optional): list of slices for the field if
                subsetting is requested. A slice must then be provided for each
                field dimension. The slices are relative to the opened view
                (see :func:open) if a view was set when opening the file.

        Return:
            MaskedArray: array of data read. Array type is the same as the
                storage type.
        """
        native_name = self.__get_native_fieldname(fieldname)
        rowslice = None
        if slices is not None:
            rowslice = [slices[0]]
        if fieldname == 'time':
            suffix = self.__sltype
            SCANSYNC = self.__time_handler.read_values('SCANSYNC')[0]
            PIXSYNC_i = self.__time_handler.read_values(
                'PIXSYNC_%s' % suffix)[0]
            prefix = PREFIX['n']
            first_scan_i\
                = self.__time_handler.read_values(
                    '%s_First_scan_%s' % (prefix, suffix),
                    slices=rowslice)
            first_min_ts\
                = self.__time_handler.read_values(
                    '%s_Minimal_ts_%s' % (prefix, suffix),
                    slices=rowslice)
            scanfield = 'scan_%sn' % suffix
            pixelfield = 'pixel_%sn' % suffix
            indices_handler = self.__fieldlocator[scanfield]
            scan = indices_handler.read_values(scanfield,
                                               slices=slices)
            pixel = indices_handler.read_values(pixelfield,
                                                slices=slices)
            time = first_min_ts.reshape((-1, 1))\
                + (scan - first_scan_i.reshape((-1, 1)))\
                * SCANSYNC + pixel * PIXSYNC_i
            # mask wrong times (which occur in test data)
            maxdate = date2num(self.get_end_time(),
                               "microseconds since 2000-01-01T00:00:00Z")
            mindate = date2num(self.get_start_time(),
                               "microseconds since 2000-01-01T00:00:00Z")

            time = ma.masked_where(
                ((time < mindate) | (time > maxdate)),
                time,
                copy=False
                )
            return time
        elif fieldname in ['lat', 'lon', 'z']:
            return self.__geofieldlocator[native_name].read_values(native_name,
                                                                   slices)
        elif self.__is_oblique(fieldname):
            # oblique views has not the same cell dimension than nadir
            # we want to stack fields from both views by padding fillvalues
            celldim = None
            try:
                dims = self.__fieldlocator[fieldname].get_dimensions(
                    native_name
                    )
                empty = False
                if 'cell' in dims:
                    dimsizes = [self.get_dimsize(dim) for dim in dims]
                    celldim = list(dims).index('cell')
                    nadir_slices = cerbere.mapper.slices.get_nice_slices(
                        slices,
                        dimsizes)
                    sli = nadir_slices[celldim]
                    nad_start, nad_end, step = sli.start, sli.stop, sli.step
                    obliquewidth = (self.__fieldlocator[fieldname]
                                    .get_dimsize('cell'))
                    obl_start, obl_end = nad_start, nad_end
                    obl_start = max(0,
                                    obl_start - self.nadir_to_oblique_offset)
                    if obl_start > obliquewidth:
                        obl_start = obliquewidth
                    obl_end = max(0,
                                  min(obliquewidth,
                                      obl_end - self.nadir_to_oblique_offset)
                                  )
                    newslices = list(nadir_slices)
                    newslices[celldim] = slice(obl_start, obl_end, step)
                    if obl_start >= obl_end:
                        empty = True
                else:
                    # case of some fields such as orphan pixels which don't
                    # have a cell dimension
                    newslices = list(slices) if slices is not None else None
            except ValueError:
                raise

            # read values in oblique view grid
            if celldim is not None and empty:
                values = ma.masked_all(
                    (),
                    dtype=self.__fieldlocator[fieldname]._handler.variables[native_name].dtype)
            else:
                values = self.__fieldlocator[fieldname].read_values(
                    self.__fieldtranslate[fieldname],
                    newslices)
            # padding for missing oblique values to match nadir view grid
            if celldim is not None:
                shape = cerbere.mapper.slices.get_shape_from_slice(
                    cerbere.mapper.slices.get_nice_slices(slices, dimsizes))
                padded_values = ma.masked_all(
                    tuple(shape),
                    dtype=values.dtype,
                    )
                if values.shape == () or min(list(values.shape)) <= 0:
                    # empty result => return padded values only
                    return padded_values
                padded_slice = []
                for dim in dims:
                    if dim != 'cell':
                        padded_slice.append(slice(None, None, None))
                    else:
                        offset = (self.nadir_to_oblique_offset -
                                  nad_start + obl_start)
                        padded_slice.append(slice(
                            offset,
                            offset + values.shape[celldim]
                        ))
                padded_values[padded_slice] = values
                return padded_values
            else:
                return values
        else:
            return self.__fieldlocator[fieldname].read_values(native_name,
                                                              slices)

    def read_fillvalue(self, fieldname):
        """Read the fill value of a field.

        Args:
            fieldname (str): name of the field.

        Returns:
            number or char or str: fill value of the field. The type is the
                as the type of the data in the field.
        """
        return self.__fieldlocator[fieldname].read_fillvalue(fieldname)

    def read_global_attributes(self):
        """Returns the names of the global attributes.

        Returns:
            list<str>: the list of the attribute names.
        """
        # all files seem to have the same list og global attributes.
        return self.__geod_handler.read_global_attributes()

    def read_global_attribute(self, name):
        """Returns the value of a global attribute.

        Args:
            name (str): name of the global attribute.

        Returns:
            str, number or datetime: value of the corresponding attribute.
        """
        # all files seem to have the same list or global attributes.
        return self.__geod_handler.read_global_attribute(name)

    def read_field_attributes(self, fieldname):
        """Return the specific attributes of a field.

        Args:
            fieldname (str): name of the field.

        Returns:
            dict<string, string or number or datetime>: a dictionary where keys
                are the attribute names.
        """
        return self.__fieldlocator[fieldname].read_field_attributes(fieldname)

    def get_start_time(self):
        """Returns the minimum date of the file temporal coverage.

        Returns:
            datetime: start time of the data in file.
        """
        varname = 'time_stamp_%s' % self.__sltype[0]
        vardate = self.__time_handler.get_handler().variables[varname]
        return num2date(vardate[0], vardate.units)

    def get_end_time(self):
        """Returns the maximum date of the file temporal coverage.

        Returns:
            datetime: end time of the data in file.
        """
        # WRONG!!!
        varname = 'time_stamp_%s' % self.__sltype[0]
        vardate = self.__time_handler.get_handler().variables[varname]
        return num2date(vardate[-1], vardate.units)

    def get_bbox(self):
        """Returns the bounding box of the feature, as a tuple.

        Returns:
            tuple: bbox expressed as (lonmin, latmin, lonmax, latmax)
        """
        return None

    def write_global_attributes(self, attrs):
        """Write the global attributes of the file.

        Args:
            attrs (dict<string, string or number or datetime>): a dictionary
                containing the attributes names and values to be written.
        """
        raise NotImplementedError

    def create_field(self, field, dim_translation=None):
        """Creates a new field in the mapper.

        Creates the field structure but don't write yet its values array.

        Args:
            field (Field): the field to be created.

        See also:
            :func:`write_field` for writing the values array.
        """
        raise NotImplementedError

    def create_dim(self, dimname, size=None):
        """Add a new dimension.

        Args:
            dimname (str): name of the dimension.
            size (int): size of the dimension (unlimited if None)
        """
        raise NotImplementedError

    def write_field(self, fieldname):
        """Writes the field data on disk.

        Args:
            fieldname (str): name of the field to write.
        """
        raise NotImplementedError
'''
Created on 18 juin 2012

Test reading/displaying time series file

@author: jfpiolle
'''
import logging

import matplotlib.pyplot as plt
import matplotlib.colors

from cerbere.datamodel.pointtimeseries import PointTimeSeries
from cerbere.mapper.ncfile import NCFile




logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)-5s %(message)s', datefmt='%d/%m/%Y %I:%M:%S')


ff = '/home/cercache/project/globwave/data/globwave/oceansites/ocean_temperature_sensor/2015/05/WMO61280_20150501T0000_20150515T0100_Lat_40.69N_Lon_1.48E.nc'
ncf = NCFile( url = ff)
traj = PointTimeSeries()
traj.load( ncf )


#val = traj.getData('greyscale_threshold')
traj.display_timeseries('sea_surface_temperature', palette=plt.cm.gray, pretty=True, range=[273,305,0.25], output='toto.png')
def add_swh_era(feature_file, outpath, linear=False, suffix=None):
    """

    :param feature_file: the file in which you want to append a new field
    :param outpath: (str)
    :param linear: bool
    :param suffix: str
    :return:
    """
    # ERA5 model definition
    model = RegularGriddedModel('/home/ref-ecmwf/ERA5/',
                                '%Y/%m/era_5-copernicus__%Y%m%d.nc',
                                1,
                                0.50,
                                'ERA5025NCDataset',
                                -90,
                                0,
                                720,
                                361,
                                modelfeature='CylindricalGridTimeSeries')
    new = None
    # open feature file
    if new is not None:
        mode = 'r'
    else:
        mode = 'r+'

    # feature = cerbere.open_as_feature(
    #     options.feature, feature_file, options.reader, mode=mode
    # )
    hh = NCFile(feature_file)
    feature = Trajectory.load(hh)

    # remap model fields
    intmode = 'closest'
    if linear:
        intmode = 'linear'
    fieldnames = NAMING_CONVENTION.keys()
    fields = model.get_model_fields(feature, fieldnames, mode=intmode)

    # add metadata
    for fieldname, field in fields.items():

        newname = NAMING_CONVENTION[fieldname]
        field.name = newname
        field.standardname = STANDARD_NAME[fieldname]
        if field.units in UNITS:
            field.units = UNITS[field.units]
        field.attrs['source'] = "Copernicus ERA5 Reanalysis by ECMWF"

    # add to current feature
    if suffix is None:

        for fieldname, field in fields.items():
            if field in feature_file.fieldnames:
                raise Exception('%s field already in file' % fieldname)

            feature.add_field(field)

    # or create a new file with same structure
    else:
        basef, ext = os.path.splitext(os.path.basename(feature_file))
        if outpath is None:
            outpath = os.path.dirname(feature_file)
        auxf = os.path.join(outpath, basef + suffix + '.nc')
        if os.path.exists(auxf):
            os.remove(auxf)
        print("Save to: {}".format(auxf))

        # create empty feature
        auxfeature = feature.extract(fields=[])

        for fieldname, field in fields.items():
            auxfeature.add_field(field)

            # coordinates attribute
            exfield = feature.get_field(feature.fieldnames[0])
            if 'coordinates' in exfield.attrs:
                field.attrs['coordinates'] = \
                    exfield.attrs['coordinates']

        # save
        auxfeature.save(auxf)

    feature.close()