def __init__(self, file_name):

        # ~~> empty Selafin
        Selafin.__init__(self, '')
        self.datetime = []

        # ~~> variables
        self.title = ''
        self.nbv1 = 1
        self.nvar = self.nbv1
        self.varindex = range(self.nvar)
        self.varnames = ['BOTTOM          ']
        self.varunits = ['M               ']

        # ~~ Openning files ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        self.fle = {}
        self.fle.update({'name':file_name})
        # "<" means little-endian, ">" means big-endian
        self.fle.update({'endian':">"})
        self.fle.update({'integer':('i', 4)}) #'i' size 4
        self.fle.update({'float': ('f', 4)}) #'f' size 4, 'd' = size 8
        self.fle.update({'hook':open(file_name, 'r')})
        fle = iter(self.fle['hook'])

        # ~~ Read/Write dimensions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        # Note:
        #    The section MeshFormat is mandatory
        line = fle.next()
        proc = re.match(self.frst_keys, line)
        if proc:
            if proc.group('key') != "MeshFormat":
                raise TelemacException(\
                    '... Could not recognise your MSH file format. '
                    'Missing MeshFormat key.')
            line = fle.next().split()
            if line[0] != "2.2":
                raise TelemacException(\
                    '... Could not read your MSH file format. '
                    'Only the version 2.2 is allowed.')
            file_type = int(line[1])
            if file_type == 1:
                print('... I have never done this before. Do check it works')
                line = fle.next()
                _, _, _ = unpack('>i', line.read(4+4+4))
            float_size = int(line[2])
            if float_size == 8:
                self.fle['float'] = ('d', 8)
        line = fle.next()
        proc = re.match(self.last_keys, line)
        if proc:
            if proc.group('key') != "MeshFormat":
                raise TelemacException(\
                        '... Could not complete reading the header of you MSH '
                        'file format. Missing EndMeshFormat key.')

        # ~~ Loop on sections ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        while True:
            try:
                line = fle.next()
            except StopIteration:
                break
            proc = re.match(self.frst_keys, line)
            if not proc:
                raise TelemacException(\
                    '... Was expecting a new Section starter. '
                    'Found this instead: {}'.format(line))
            key = proc.group('key')

        # ~~ Section Nodes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
            if key == "Nodes":
                print('     +> mesh x,y,z')
                npoin = int(fle.next())
                if self.fle['float'][0] == 'd':
                    meshx = np.zeros(npoin, dtype=np.float64)
                    meshy = np.zeros(npoin, dtype=np.float64)
                    meshz = np.zeros(npoin, dtype=np.float64)
                else:
                    meshx = np.zeros(npoin, dtype=np.float)
                    meshy = np.zeros(npoin, dtype=np.float)
                    meshz = np.zeros(npoin, dtype=np.float)
                #map_nodes = []
                for i in range(npoin):
                    line = fle.next().split()
                    #map_nodes.append(int(line[0]))
                    meshx[i] = np.float(line[1])
                    meshy[i] = np.float(line[2])
                    meshz[i] = np.float(line[3])
                # TODO: renumbering nodes according to map_nodes ?
                #map_nodes = np.asarray(map_nodes)
                self.npoin2 = npoin
                self.meshx = meshx
                self.meshy = meshy
                self.meshz = meshz

                line = fle.next()

        # ~~ Section Nodes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
            elif proc.group('key') == "Elements":
                print('     +> renumbered connectivity')
                nelem = int(fle.next())
                ikle2 = - np.ones((nelem, 3), dtype=np.int)
                for i in range(nelem):
                    line = fle.next().split()
                    if int(line[1]) != 2:
                        continue
                    expr = line[int(line[2])+3:]
                    ikle2[i] = [np.int(expr[0]), np.int(expr[1]),
                                np.int(expr[2])]

                self.ikle2 = ikle2[np.not_equal(*(np.sort(ikle2).T[0::2]))] - 1
                self.nelem2 = len(self.ikle2)

                line = fle.next()
                # TODO: fitting the unique node numbers with map_nodes ?

        # ~~ Unnecessary section ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
            else:
                while True:
                    line = fle.next()
                    if re.match(self.last_keys, line):
                        break

        proc = re.match(self.last_keys, line)
        if proc:
            if proc.group('key') != key:
                raise TelemacException(\
                        '... Could not complete reading the header of your '
                        'MSH file format. Missing {} end key.'.format(key))

        # ~~> sizes
        print('     +> sizes')
        self.ndp3 = 3
        self.ndp2 = 3
        self.nplan = 1
        self.nelem3 = self.nelem2
        self.npoin3 = self.npoin2
        self.ikle3 = self.ikle2
        self.iparam = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0]

        print('     +> boundaries')
        # ~~> establish neighborhood
        _ = Triangulation(self.meshx, self.meshy, self.ikle3)\
                         .get_cpp_triangulation().get_neighbors()
        # ~~> build the enssemble of boundary segments
        # ~~> define ipobO from an arbitrary start point
        self.ipob3 = np.ones(self.npoin3, dtype=np.int)
        self.ipob2 = self.ipob3
    def __init__(self, filename=None, name=None, proj4=None):
        def vardic(vars_slf):
            """
            Match the selafin variables from Telemac 3D to the variables used in
            OpenDrift.
            """
            # Define all the variables used in OpenDrift as a dictionary
            # This is done to the best of our knowledge
            Vars_OD = {
                'VELOCITY U      ': 'x_sea_water_velocity',
                'VELOCITY V      ': 'y_sea_water_velocity',
                'VELOCITY W      ': 'upward_sea_water_velocity',
                'TURBULENT ENERGY': 'turbulent_kinetic_energy',
                'TEMPERATURE     ': 'sea_water_temperature',
                'SALINITY        ': 'sea_water_salinity',
                'NUZ FOR VELOCITY': 'ocean_vertical_diffusivity',
            }

            No_OD_equiv = {
                'x_wind', 'y_wind', 'wind_speed',
                'sea_floor_depth_below_sea_level', 'wind_from_direction',
                'sea_ice_x_velocity', 'sea_ice_y_velocity',
                'sea_surface_wave_significant_height',
                'sea_surface_wave_stokes_drift_x_velocity',
                'sea_surface_wave_stokes_drift_y_velocity',
                'sea_surface_wave_period_at_variance_spectral_density_maximum',
                'sea_surface_wave_mean_period_from_variance_spectral_density_second_frequency_moment',
                'sea_ice_area_fraction', 'surface_downward_x_stress',
                'surface_downward_y_stress', 'turbulent_generic_length_scale'
            }
            # Sea-floor depth could be extracted from the variable Z but
            # it would need a specific treatment in get variable
            No_Telemac_equiv = {
                'NUX FOR VELOCITY',
                'NUY FOR VELOCITY',
                'DISSIPATION     ',
                'ELEVATION Z     ',
            }
            variables = []
            var_idx = []
            for i, var in enumerate(vars_slf):
                try:
                    variables.append(Vars_OD[var])
                    var_idx.append(i)
                except:
                    logger.info(
                        "Selafin variable {} has no equivalent in OpenDrift".
                        format(var))
            return np.array(variables), np.array(var_idx)

        self.name = name if name is not None else filename

        self.timer_start("open dataset")
        logger.info('Opening dataset: %s' % filename)
        self.slf = Selafin(filename)

        logger.info("File:\n{}\nTitle:\n{}".format(self.slf.file, \
                                self.slf.title))
        logger.info('Using projection: %s.' % proj4)
        self.proj4 = proj4

        logger.info('Reading 2D grid')

        # Run constructor of parent Reader class
        super().__init__()
        self.boundary = self._build_boundary_polygon_( \
                        self.slf.meshx,self.slf.meshy)

        self.timer_start("build index")
        logger.debug("building index of nodes..")
        # using selafin method (scipy)
        self.x, self.y = self.slf.meshx, self.slf.meshy

        logger.debug('nodes: %d' % len(self.x))

        # using scipy directly
        self.tree = self._build_ckdtree_(self.slf.meshx, self.slf.meshy)
        #bounds
        self.xmin,self.ymin,self.xmax, self.ymax= self.slf.meshx.min(),\
                self.slf.meshy.min(), self.slf.meshx.max(),self.slf.meshy.max()

        # time management
        self.start_time = datetime(self.slf.datetime[0], self.slf.datetime[1],
                                   self.slf.datetime[2], self.slf.datetime[3],
                                   self.slf.datetime[4])
        # self.start_time=np.datetime64(datetime(self.slf.datetime[0],self.slf.datetime[1],
        #         self.slf.datetime[2],self.slf.datetime[3],self.slf.datetime[4]))
        # self.time = self.start_time + self.slf.tags['times'].astype('timedelta64[s]')
        self.times = []
        for i in range(len(self.slf.tags['times'])):
            self.times.append(self.start_time +
                              timedelta(seconds=self.slf.tags['times'][i]))
        self.end_time = self.times[-1]

        self.variables, self.var_idx = vardic(self.slf.varnames)

        self.timer_end("build index")
        self.timer_end("open dataset")
Esempio n. 3
0
    def __init__(self, dataset, request, stream):

        self.request = request
        # ~~> inheritence
        self.slf2d = Selafin('')
        self.slf2d.title = ''
        self.slf2d.fole = {}
        self.dataset = []
        self.variables = []
        self.byrowdown = False
        self.maskx = None
        self.masky = None
        self.nx1d = None
        self.ny1d = None
        self.nb_direct = None
        self.nb_freq = None
        self.freq = None
        self.dirc = None

        print('   +> identifying relevant files, by variables')
        # ~~> filter requested variables
        self.variables = []
        found_dataset = []
        ibar = 0
        pbar = ProgressBar(maxval=len(dataset)).start()
        for data in dataset:
            grbs = pygrib.open(data)
            for grb in grbs:
                if str(grb.indicatorOfParameter) in request['param']\
                                                        .split('/'):
                    if data not in found_dataset:
                        found_dataset.append(data)
                    if grb.indicatorOfParameter not in self.variables:
                        self.variables.append(grb.indicatorOfParameter)
                    else:
                        break
            grbs.close()
            ibar += 1
            pbar.update(ibar)
        pbar.finish()
        if self.variables == []:
            raise TelemacException('... could not find\
                the requested valiables.\n\n')

        print('   +> sorting out timeline')
        # ~~>  checking consistency of origin of date and time
        for data in found_dataset:
            grbs = pygrib.open(data)
            for grb in grbs:
                at0 = pygrib.julian_to_datetime(grb.julianDay)
                break
            break
        print('      - start date and time', at0)
        self.slf2d.datetime = [d for d in at0.timetuple()[0:6]]
        # ~~>  recording times from origin of date and time
        ats = []
        dts = []
        ibar = 0
        pbar = ProgressBar(maxval=len(found_dataset)).start()
        for data in found_dataset:
            grbs = pygrib.open(data)
            for grb in grbs:
                date = str(grb.validityDate)
                ats.append(
                    datetime(int(date[:4]), int(date[4:6]), int(date[6:])) +
                    timedelta(seconds=int(grb.validityTime * 36)))
                dts.append((ats[-1] - at0).total_seconds())
                break
            ibar += 1
            pbar.update(ibar)
        pbar.finish()
        print('      - finish date and time', ats[-1])
        # ~~>  checking if the list is sorted
        if not all(ats[i] < ats[i + 1] for i in range(len(ats) - 1)):
            raise TelemacException('... your dataset is not sorted. '
                                   'Here is the time profile in seconds:\n'
                                   '{}\n\n'.format(repr(dts)))
        # ~~> filter requested times
        udates = [
            datetime(*[int(a) for a in d.split('-')])
            for d in request['date'].split('/to/')
        ]
        self.slf2d.tags = {'times': []}
        udates[1] = udates[1] + timedelta(hours=24.)
        for i in range(len(ats)):
            if udates[0] <= ats[i] and ats[i] <= udates[1]:
                self.slf2d.tags['times'].append(dts[i])
                self.dataset.append(found_dataset[i])
        times = self.slf2d.tags['times']
        print('   +> actual timeline')
        print('      - start date and time  ',
              at0 + timedelta(seconds=times[0]))
        print('      - finish date and time ',
              at0 + timedelta(seconds=times[-1]))

        # ~> Other initialisations
        self.typ = stream

        # ~~> spatial sizes
        print('   +> checking out sizes')
        grbs = pygrib.open(self.dataset[0])
        for grb in grbs:
            self.missing_value = grb.missingValue
            self.scale_values_by = grb.scaleValuesBy
            self.offset = grb.offset
            break
        grbs.close()
class Reader(BaseReader, UnstructuredReader):
    """
    A reader for unstructured (irregularily gridded) `Telemac3D` files.

    Args:
        :param filename: A single Selafin file
        :type filename: string, required.

        :param name: Name of reader
        :type name: string, optional

        :param proj4: PROJ.4 string describing projection of data.
        :type proj4: string, optional

    .. seealso::

        py:mod:`opendrift.readers.basereader.unstructured`.
    """

    # node_variables = ['z','salinity','temperature',
    #     'eastward_sea_water_velocity',
    #     'northward_sea_water_velocity',
    #     'upward_sea_water_velocity'
    def __init__(self, filename=None, name=None, proj4=None):
        def vardic(vars_slf):
            """
            Match the selafin variables from Telemac 3D to the variables used in
            OpenDrift.
            """
            # Define all the variables used in OpenDrift as a dictionary
            # This is done to the best of our knowledge
            Vars_OD = {
                'VELOCITY U      ': 'x_sea_water_velocity',
                'VELOCITY V      ': 'y_sea_water_velocity',
                'VELOCITY W      ': 'upward_sea_water_velocity',
                'TURBULENT ENERGY': 'turbulent_kinetic_energy',
                'TEMPERATURE     ': 'sea_water_temperature',
                'SALINITY        ': 'sea_water_salinity',
                'NUZ FOR VELOCITY': 'ocean_vertical_diffusivity',
            }

            No_OD_equiv = {
                'x_wind', 'y_wind', 'wind_speed',
                'sea_floor_depth_below_sea_level', 'wind_from_direction',
                'sea_ice_x_velocity', 'sea_ice_y_velocity',
                'sea_surface_wave_significant_height',
                'sea_surface_wave_stokes_drift_x_velocity',
                'sea_surface_wave_stokes_drift_y_velocity',
                'sea_surface_wave_period_at_variance_spectral_density_maximum',
                'sea_surface_wave_mean_period_from_variance_spectral_density_second_frequency_moment',
                'sea_ice_area_fraction', 'surface_downward_x_stress',
                'surface_downward_y_stress', 'turbulent_generic_length_scale'
            }
            # Sea-floor depth could be extracted from the variable Z but
            # it would need a specific treatment in get variable
            No_Telemac_equiv = {
                'NUX FOR VELOCITY',
                'NUY FOR VELOCITY',
                'DISSIPATION     ',
                'ELEVATION Z     ',
            }
            variables = []
            var_idx = []
            for i, var in enumerate(vars_slf):
                try:
                    variables.append(Vars_OD[var])
                    var_idx.append(i)
                except:
                    logger.info(
                        "Selafin variable {} has no equivalent in OpenDrift".
                        format(var))
            return np.array(variables), np.array(var_idx)

        self.name = name if name is not None else filename

        self.timer_start("open dataset")
        logger.info('Opening dataset: %s' % filename)
        self.slf = Selafin(filename)

        logger.info("File:\n{}\nTitle:\n{}".format(self.slf.file, \
                                self.slf.title))
        logger.info('Using projection: %s.' % proj4)
        self.proj4 = proj4

        logger.info('Reading 2D grid')

        # Run constructor of parent Reader class
        super().__init__()
        self.boundary = self._build_boundary_polygon_( \
                        self.slf.meshx,self.slf.meshy)

        self.timer_start("build index")
        logger.debug("building index of nodes..")
        # using selafin method (scipy)
        self.x, self.y = self.slf.meshx, self.slf.meshy

        logger.debug('nodes: %d' % len(self.x))

        # using scipy directly
        self.tree = self._build_ckdtree_(self.slf.meshx, self.slf.meshy)
        #bounds
        self.xmin,self.ymin,self.xmax, self.ymax= self.slf.meshx.min(),\
                self.slf.meshy.min(), self.slf.meshx.max(),self.slf.meshy.max()

        # time management
        self.start_time = datetime(self.slf.datetime[0], self.slf.datetime[1],
                                   self.slf.datetime[2], self.slf.datetime[3],
                                   self.slf.datetime[4])
        # self.start_time=np.datetime64(datetime(self.slf.datetime[0],self.slf.datetime[1],
        #         self.slf.datetime[2],self.slf.datetime[3],self.slf.datetime[4]))
        # self.time = self.start_time + self.slf.tags['times'].astype('timedelta64[s]')
        self.times = []
        for i in range(len(self.slf.tags['times'])):
            self.times.append(self.start_time +
                              timedelta(seconds=self.slf.tags['times'][i]))
        self.end_time = self.times[-1]

        self.variables, self.var_idx = vardic(self.slf.varnames)

        self.timer_end("build index")
        self.timer_end("open dataset")

    def plot_mesh(self):
        """
        Plot the grid mesh. Does not automatically show the figure.
        """
        title = 'Unstructured grid: %s\n%s' % (self.name, self.proj)
        from importlib.util import find_spec
        if find_spec("pyvista") is not None:
            import pyvista as pv
            cells = np.hstack(
                ((np.ones(len(self.slf.ikle2), dtype=np.int) * 3)[:, None],
                 self.slf.ikle2))
            points = np.vstack((self.slf.meshx, self.slf.meshy,
                                np.zeros(len(self.slf.meshx)))).T
            u = pv.PolyData(points, cells)
            plotter = pv.Plotter()
            plotter.add_mesh(u, show_edges=True)
            plotter.show_bounds(mesh=u)
            plotter.view_xy()
            plotter.show(title=title, window_size=[800, 640])
        else:
            import matplotlib.pyplot as plt
            plt.figure()
            plt.scatter(self.slf.meshx,
                        self.slf.meshy,
                        marker='x',
                        color='blue',
                        label='nodes')
            x, y = getattr(self.boundary, 'context').exterior.xy
            plt.plot(x, y, color='green', label='boundary')

            plt.legend()
            plt.title(title)
            plt.xlabel('x [m]')
            plt.ylabel('y [m]')

    def get_variables(self,
                      requested_variables,
                      time=None,
                      x=None,
                      y=None,
                      z=None):
        """
        - Query variables based on the particle coordinates x, y, z
        - find the nearest node in the KD tree
        - extract the z array corresponding.
        - extract the index of the node within the 3D mesh
        - extract the variables at the point

        Args:

            x,y,z: np.arrays(float)
                3D coordinates of the particles
            time: np.datetime64
                age of the particle set
            variables: np.array(int)
                indexes of variables

        Returns:

            variables: dictionary of numpy arrays
        """
        def nearest_idx(array, value):
            """
                we are looking for a tuple describing where the sample is and at which
                distance. So we can calculate the FE solution of the variable value.
                input:
                    array: a 1D numpy array
                    monotonic array
                output:
                    bounds: a tupple
                    tupple describing the bounding indexes
                    dist = tupple
                    the distance between the sample and the index values
                    so that dist[0]+dist[1]=0
                """
            distance = (array - value).astype(float)
            nearest = np.argsort(abs(distance))[:2]

            # test exact match and out of bounds
            if (distance
                    == 0).any() | (distance > 0).all() | (distance < 0).all():
                bounds = (nearest[0], None)
                dist = (1, 0)
            else:
                bounds = nearest
                if distance[nearest[0]] == distance[nearest[1]] * -1:
                    dist = (.5, .5)
                else:
                    prop= abs(distance[nearest[0]])/ \
                       (abs(distance[nearest[0]])+abs(distance[nearest[1]]))
                    dist = (1 - prop, prop)
            return bounds, dist

        ### nearest time tupple
        frames, duration = nearest_idx(
            np.array(self.times).astype('datetime64[s]'), np.datetime64(time))

        ### nearest node in 2D
        iii = self.__nearest_ckdtree__(self.tree, x, y)

        # build depth ndarrays of each fibre
        niii = len(iii)
        idx_3D = np.arange(self.slf.nplan).reshape(self.slf.nplan,
                                                   1) * self.slf.npoin2 + iii
        depths1 = self.slf.get_variables_at(frames[0], [0])[0, idx_3D]
        if frames[1] is not None:
            depths2 = self.slf.get_variables_at(frames[1], [0])[0, idx_3D]
        else:
            depths2 = 0
        # locate the profile dimension
        pm = duration[0] * depths1 + duration[1] * depths2
        # calculate distance from particles to nearest point depth
        idx_layer = np.abs(pm - z).argmin(axis=0)
        vars = {}
        #var_i=cfvar(self, requested_variables)
        for i in range(len(requested_variables)):
            idx_v = self.var_idx[self.variables == requested_variables[i]]
            vectors1 = self.slf.get_variables_at(
                frames[0], [idx_v])[0, idx_3D[idx_layer][0]].ravel()
            if frames[1] is not None:
                vectors2 = self.slf.get_variables_at(
                    frames[1], [idx_v])[0, idx_3D[idx_layer][0]].ravel()
            else:
                vectors2 = 0
            vars[requested_variables[
                i]] = duration[0] * vectors1 + duration[1] * vectors2
        return vars

    def filter_points(self, indexes):
        """
        Filter points that are not within the grid.
        use finite element method to evaluate properties
        ~~~ To be continued ~~~
        """
        # test they correspond to faces:
        ifaces = np.where(
            (np.sort(iii, axis=1)[:,
                                  None] == np.sort(self.slf.ikle2,
                                                   axis=1)).all(-1).any(-1))[0]
        # in the future
        # extract profile from the 2 frames bounding t.
        # z is always the variable idx 0
        p1 = self.slf.get_variables_at(frames[0], [0])[0,
                                                       self.slf.ikle3[ifaces]]
        p2 = self.slf.get_variables_at(frames[1], [0])[0,
                                                       self.slf.ikle3[ifaces]]
        x1 = slef.slf.meshx[ifaces]
        y1 = slef.slf.meshy[ifaces]
Esempio n. 5
0
class Grib(object):
    def __init__(self, dataset, request, stream):

        self.request = request
        # ~~> inheritence
        self.slf2d = Selafin('')
        self.slf2d.title = ''
        self.slf2d.fole = {}
        self.dataset = []
        self.variables = []
        self.byrowdown = False
        self.maskx = None
        self.masky = None
        self.nx1d = None
        self.ny1d = None
        self.nb_direct = None
        self.nb_freq = None
        self.freq = None
        self.dirc = None

        print('   +> identifying relevant files, by variables')
        # ~~> filter requested variables
        self.variables = []
        found_dataset = []
        ibar = 0
        pbar = ProgressBar(maxval=len(dataset)).start()
        for data in dataset:
            grbs = pygrib.open(data)
            for grb in grbs:
                if str(grb.indicatorOfParameter) in request['param']\
                                                        .split('/'):
                    if data not in found_dataset:
                        found_dataset.append(data)
                    if grb.indicatorOfParameter not in self.variables:
                        self.variables.append(grb.indicatorOfParameter)
                    else:
                        break
            grbs.close()
            ibar += 1
            pbar.update(ibar)
        pbar.finish()
        if self.variables == []:
            raise TelemacException('... could not find\
                the requested valiables.\n\n')

        print('   +> sorting out timeline')
        # ~~>  checking consistency of origin of date and time
        for data in found_dataset:
            grbs = pygrib.open(data)
            for grb in grbs:
                at0 = pygrib.julian_to_datetime(grb.julianDay)
                break
            break
        print('      - start date and time', at0)
        self.slf2d.datetime = [d for d in at0.timetuple()[0:6]]
        # ~~>  recording times from origin of date and time
        ats = []
        dts = []
        ibar = 0
        pbar = ProgressBar(maxval=len(found_dataset)).start()
        for data in found_dataset:
            grbs = pygrib.open(data)
            for grb in grbs:
                date = str(grb.validityDate)
                ats.append(
                    datetime(int(date[:4]), int(date[4:6]), int(date[6:])) +
                    timedelta(seconds=int(grb.validityTime * 36)))
                dts.append((ats[-1] - at0).total_seconds())
                break
            ibar += 1
            pbar.update(ibar)
        pbar.finish()
        print('      - finish date and time', ats[-1])
        # ~~>  checking if the list is sorted
        if not all(ats[i] < ats[i + 1] for i in range(len(ats) - 1)):
            raise TelemacException('... your dataset is not sorted. '
                                   'Here is the time profile in seconds:\n'
                                   '{}\n\n'.format(repr(dts)))
        # ~~> filter requested times
        udates = [
            datetime(*[int(a) for a in d.split('-')])
            for d in request['date'].split('/to/')
        ]
        self.slf2d.tags = {'times': []}
        udates[1] = udates[1] + timedelta(hours=24.)
        for i in range(len(ats)):
            if udates[0] <= ats[i] and ats[i] <= udates[1]:
                self.slf2d.tags['times'].append(dts[i])
                self.dataset.append(found_dataset[i])
        times = self.slf2d.tags['times']
        print('   +> actual timeline')
        print('      - start date and time  ',
              at0 + timedelta(seconds=times[0]))
        print('      - finish date and time ',
              at0 + timedelta(seconds=times[-1]))

        # ~> Other initialisations
        self.typ = stream

        # ~~> spatial sizes
        print('   +> checking out sizes')
        grbs = pygrib.open(self.dataset[0])
        for grb in grbs:
            self.missing_value = grb.missingValue
            self.scale_values_by = grb.scaleValuesBy
            self.offset = grb.offset
            break
        grbs.close()

    def open_grib(self, file_name):

        self.slf2d.fole.update({'hook': open(file_name, 'wb')})
        self.slf2d.fole.update({'name': file_name})
        self.slf2d.fole.update({'endian': ">"})  # big endian
        self.slf2d.fole.update({'float': ('f', 4)})  # single precision

    def close_grib(self):
        self.slf2d.fole['hook'].close()

    def set_geometry(self):

        # ~~> header
        self.byrowdown = False

        # ~~> 2D grid
        print('   +> set the mesh and connectivity')
        x_1, y_1, x_2, y_2 = self.request['area'].split('/')
        grbs = pygrib.open(self.dataset[0])
        for grb in grbs:
            y, x = grb.latlons()
            self.maskx = np.logical_and(float(x_1) <= x[0], x[0] <= float(x_2))
            l_x = x[0][self.maskx]
            if not np.any(self.maskx):
                self.maskx = np.logical_and(
                    float(x_1) <= x[0] - 360., x[0] - 360. <= float(x_2))
                l_x = x[0][self.maskx] - 360.
            if not np.any(self.maskx):
                raise TelemacException(
                    '... your spatial range seems out of bound:\n       '
                    'you asked for [ {} - {}], while x is:\n       '
                    '{}\n\n'.format(x_1, x_2, repr(x)))
            self.nx1d = len(l_x)
            self.masky = np.logical_and(
                float(y_1) <= y.T[0], y.T[0] <= float(y_2))
            l_y = y.T[0][self.masky]
            if not np.any(self.masky):
                raise TelemacException(
                    '... your spatial range seems out of bound:\n       '
                    'you asked for [ {} - {}], while x is:\n       '
                    '{}\n\n'.format(y_1, y_2, repr(y)))
            self.ny1d = len(l_y)
            if self.byrowdown:
                self.slf2d.meshx = np.ravel(
                    np.tile(l_x, self.ny1d).reshape(self.ny1d, self.nx1d))
                self.slf2d.meshy = np.ravel(
                    np.tile(l_y, self.nx1d).reshape(self.nx1d, self.ny1d).T)
            else:
                self.slf2d.meshx = np.ravel(
                    np.tile(l_x, self.ny1d).reshape(self.ny1d, self.nx1d).T)
                self.slf2d.meshy = np.ravel(
                    np.tile(l_y, self.nx1d).reshape(self.nx1d, self.ny1d))
            break
        grbs.close()

        self.slf2d.nplan = 1
        self.slf2d.ndp2 = 3
        self.slf2d.ndp3 = self.slf2d.ndp2
        self.slf2d.npoin2 = self.nx1d * self.ny1d
        self.slf2d.npoin3 = self.slf2d.npoin2
        self.slf2d.nelem2 = 2 * (self.nx1d - 1) * (self.ny1d - 1)
        self.slf2d.nelem3 = self.slf2d.nelem2

        # ~~> Connectivity - numbered by rows
        ielem = 0
        pbar = ProgressBar(maxval=self.slf2d.nelem3).start()
        self.slf2d.ikle3 = np.zeros((self.slf2d.nelem3, self.slf2d.ndp3),
                                    dtype=np.int)
        if self.byrowdown:
            for j in range(1, self.ny1d):
                for i in range(1, self.nx1d):
                    ipoin = (j - 1) * self.nx1d + i - 1
                    # ~~> first triangle
                    self.slf2d.ikle3[ielem][0] = ipoin
                    self.slf2d.ikle3[ielem][1] = ipoin + self.nx1d
                    self.slf2d.ikle3[ielem][2] = ipoin + 1
                    ielem = ielem + 1
                    pbar.update(ielem)
                    # ~~> second triangle
                    self.slf2d.ikle3[ielem][0] = ipoin + self.nx1d
                    self.slf2d.ikle3[ielem][1] = ipoin + self.nx1d + 1
                    self.slf2d.ikle3[ielem][2] = ipoin + 1
                    ielem = ielem + 1
                    pbar.update(ielem)
        else:
            for j in range(1, self.ny1d):
                for i in range(1, self.nx1d):
                    ipoin = j - 1 + (i - 1) * self.ny1d
                    # ~~> first triangle
                    self.slf2d.ikle3[ielem][0] = ipoin
                    self.slf2d.ikle3[ielem][1] = ipoin + 1
                    self.slf2d.ikle3[ielem][2] = ipoin + self.ny1d
                    ielem = ielem + 1
                    pbar.update(ielem)
                    # ~~> second triangle
                    self.slf2d.ikle3[ielem][0] = ipoin + self.ny1d
                    self.slf2d.ikle3[ielem][1] = ipoin + 1
                    self.slf2d.ikle3[ielem][2] = ipoin + self.ny1d + 1
                    ielem = ielem + 1
                    pbar.update(ielem)
        pbar.finish()

        # ~~> Boundaries
        self.slf2d.ipob3 = np.zeros(self.slf2d.npoin3, dtype=np.int)

        if self.byrowdown:
            # ~~> around the box
            for i in range(self.nx1d):
                ipoin = i
                self.slf2d.ipob3[i] = ipoin
            for i in range(self.nx1d):
                ipoin = self.nx1d + self.ny1d - 2 + i
                self.slf2d.ipob3[self.nx1d * self.ny1d - i - 1] = ipoin
            for j in range(1, self.ny1d - 1):
                ipoin = j + self.nx1d - 1
                self.slf2d.ipob3[(j + 1) * self.nx1d - 1] = ipoin
            for j in range(1, self.ny1d - 1):
                ipoin = self.ny1d + 2 * self.nx1d + j - 3
                self.slf2d.ipob3[self.nx1d*self.ny1d-j*self.nx1d-self.nx1d] = \
                    ipoin
        else:
            # ~~> around the box
            for j in range(self.ny1d):
                ipoin = j
                self.slf2d.ipob3[j] = ipoin
            for j in range(self.ny1d):
                ipoin = self.ny1d + self.nx1d - 2 + j
                self.slf2d.ipob3[self.ny1d * self.nx1d - j - 1] = ipoin
            for i in range(1, self.nx1d - 1):
                ipoin = i + self.ny1d - 1
                self.slf2d.ipob3[(i + 1) * self.ny1d - 1] = ipoin
            for i in range(1, self.nx1d - 1):
                ipoin = self.nx1d + 2 * self.ny1d + i - 3
                self.slf2d.ipob3[self.ny1d*self.nx1d-i*self.ny1d-self.ny1d] = \
                    ipoin

        # ~~> Boundary points
        self.slf2d.iparam = [
            0, 0, 0, 0, 0, 0, 0, 2 * self.nx1d + 2 * (self.ny1d - 2), 0, 1
        ]

    def put_geometry(self, file_name):

        print('   +> writing up the geometry file')

        self.slf2d.fole = {}
        self.slf2d.fole.update({'hook': open(file_name, 'wb')})
        self.slf2d.fole.update({'name': file_name})
        self.slf2d.fole.update({'endian': ">"})  # big endian
        self.slf2d.fole.update({'float': ('f', 4)})  # single precision

        self.slf2d.varnames = ['RANGE          ']
        self.slf2d.varunits = ['UI             ']
        self.slf2d.nbv1 = len(self.slf2d.varnames)
        self.slf2d.nvar = self.slf2d.nbv1
        self.slf2d.varindex = range(self.slf2d.nvar)

        print('       - Write Selafin header')
        self.slf2d.append_header_slf()

        # ~~> A few more number and a spectral template for input/output
        grbs = pygrib.open(self.dataset[0])
        for grb in grbs:
            nb_direct = grb.numberOfDirections
            nb_freq = grb.numberOfFrequencies
            break
        grbs.close()
        spec = np.zeros((nb_direct, nb_freq, self.nx1d, self.ny1d),
                        dtype=np.float)
        var = np.zeros((self.nx1d, self.ny1d), dtype=np.float)

        print('       - Write Selafin core')
        ibar = 0
        pbar = ProgressBar(maxval=len(self.slf2d.tags['times'])).start()
        for itime in range(len(self.slf2d.tags['times'])):

            self.slf2d.append_core_time_slf(self.slf2d.tags['times'][itime])
            grbs = pygrib.open(self.dataset[itime])
            for grb in grbs:
                i_i = 0
                data = grb.values.data
                data[np.where(np.absolute(data) <= 0.001)] = np.nan
                data[np.where(data == self.missing_value)] = np.nan
                data = 10.**data
                data[np.isnan(data)] = 0.
                for i_y in range(len(self.masky)):
                    if self.masky[i_y]:
                        spec[grb.directionNumber-1,
                             grb.frequencyNumber-1, :, i_i] = \
                              data[i_y][self.maskx]
                        i_i += 1
            grbs.close()

            ibar += 1
            pbar.update(ibar)
            for i_x in range(self.nx1d):
                for i_y in range(self.ny1d):
                    var[i_x, i_y] = max(spec[:, :, i_x, i_y].ravel()) -\
                                      min(spec[:, :, i_x, i_y].ravel())
            self.slf2d.append_core_vars_slf([var.ravel()])

        pbar.finish()
        self.slf2d.fole['hook'].close()

    def set_spectral(self):

        print('   +> reseting the header of the spectral file')

        print('      - read the spectra definition')
        grbs = pygrib.open(self.dataset[0])
        for grb in grbs:
            self.nb_direct = grb.numberOfDirections
            self.nb_freq = grb.nb_freq
            self.freq = np.asarray(grb.scaledFrequencies, dtype=np.float) / \
                grb.frequencyScalingFactor
            #  /!? only so that TOMAWAC works
            self.dirc = np.asarray(grb.scaledDirections, dtype=np.float) / \
                grb.directionScalingFactor - 7.5
            break
        grbs.close()

        # ~~> sizes (spectral numbers)
        self.slf2d.nplan = 1
        self.slf2d.ndp2 = 4
        self.slf2d.ndp3 = self.slf2d.ndp2
        self.slf2d.npoin2 = self.nb_direct * self.nb_freq
        self.slf2d.npoin3 = self.slf2d.npoin2
        self.slf2d.nelem2 = self.nb_direct * (self.nb_freq - 1)
        self.slf2d.nelem3 = self.slf2d.nelem2
        self.slf2d.nptfr = 2 * self.nb_direct
        self.slf2d.iparam = [0, 0, 0, 0, 0, 0, 0, 2 * self.nb_direct, 0, 1]

        # ~~> 2D grid (spectral grid) - TODO: use numpy here !
        self.slf2d.meshx = np.zeros(self.slf2d.npoin2, dtype=np.float)
        self.slf2d.meshy = np.zeros(self.slf2d.npoin2, dtype=np.float)
        print('      - set the mesh')
        ipoin = 0
        pbar = ProgressBar(maxval=self.slf2d.npoin2).start()
        for j_f in range(self.nb_freq):
            for i_i in range(self.nb_direct):
                self.slf2d.meshx[i_i+self.nb_direct*j_f] = \
                    self.freq[j_f]*math.sin(math.pi*self.dirc[i_i]/180.)
                self.slf2d.meshy[i_i+self.nb_direct*j_f] = \
                    self.freq[j_f]*math.cos(math.pi*self.dirc[i_i]/180.)
                ipoin += 1
                pbar.update(ipoin)
        pbar.finish()

        # ~~> Connectivity - TODO: use numpy here !
        print('      - set the connectivity')
        ielem = 0
        pbar = ProgressBar(maxval=self.slf2d.nelem3).start()
        self.slf2d.ikle3 = np.zeros((self.slf2d.nelem3, self.slf2d.ndp3),
                                    dtype=np.int)
        for j_f in range(self.nb_freq - 1):
            for i_i in range(self.nb_direct):
                self.slf2d.ikle3[ielem][0] = (i_i+1) % self.nb_direct + \
                                                      j_f*self.nb_direct
                ielem += 1
        for ielem in range(self.slf2d.nelem3):
            self.slf2d.ikle3[ielem][1] = ielem
            self.slf2d.ikle3[ielem][2] = ielem + self.nb_direct
            self.slf2d.ikle3[ielem][3] = self.slf2d.ikle3[ielem][0] + \
                self.nb_direct
            pbar.update(ielem)
        pbar.finish()

        # ~~> Boundaries - TODO: use numpy here !
        pbar = ProgressBar(maxval=self.nx1d + self.ny1d).start()
        self.slf2d.ipob3 = np.zeros(self.slf2d.npoin3, dtype=np.int)
        # ~~> along the ?-axis
        for i_i in range(self.nb_direct):
            self.slf2d.ipob3[i_i] = i_i
        for i_i in range(self.nb_direct, 2 * self.nb_direct):
            self.slf2d.ipob3[i_i] = self.nb_direct * \
                                          (self.nb_freq+1) - i_i
        pbar.finish()

    def append_header_grib(self):

        self.slf2d.varnames = []
        self.slf2d.varunits = []
        if self.typ == 'wave':
            # TODO: codes for waves
            raise TelemacException('... waves, not coded yet')
        elif self.typ == 'oper':
            for i in self.variables:
                if 151 == i:
                    self.slf2d.varnames.append('SURFACE PRESSURE')
                    self.slf2d.varunits.append('UI              ')
                if 165 == i:
                    self.slf2d.varnames.append('WIND VELOCITY U ')
                    self.slf2d.varunits.append('M/S             ')
                if 166 == i:
                    self.slf2d.varnames.append('WIND VELOCITY V ')
                    self.slf2d.varunits.append('M/S             ')
                if 167 == i:
                    self.slf2d.varnames.append('AIR TEMPERATURE ')
                    self.slf2d.varunits.append('DEGREES         ')
            for var in self.slf2d.varnames:
                print('    - ', var)
        elif self.typ == 'spec':
            if 251 in self.variables:
                for i in range(self.nx1d * self.ny1d):
                    self.slf2d.varnames.append(
                        ('F PT ' + str(i + 1) + '                ')[:16])
                    self.slf2d.varunits.append('UI              ')
            print('    - from ', self.slf2d.varnames[0], ' to ',
                  self.slf2d.varnames[-1])
        if self.slf2d.varnames == []:
            raise TelemacException(
                '... could not match requested valiable with type of file.\n\n'
            )
        self.slf2d.nbv1 = len(self.slf2d.varnames)
        self.slf2d.nvar = self.slf2d.nbv1
        self.slf2d.varindex = range(self.slf2d.nvar)

        self.slf2d.append_header_slf()

    def append_core_time_grib(self, itime):

        self.slf2d.append_core_time_slf(self.slf2d.tags['times'][itime])

    def append_core_vars_grib(self, itime):

        if self.typ == 'wave':
            pass
            # ~~> WAVE HEIGHT == 'swh'
            # ~~> SIGNIFICANT WAVE PERIOD == 'mwp'
            # ~~> MEAN WAVE DIRECTION == 'mwd'

        elif self.typ == 'oper':
            var2d = np.zeros((self.slf2d.nvar, self.slf2d.npoin2),
                             dtype=np.float)
            grbs = pygrib.open(self.dataset[itime])
            for grb in grbs:
                if grb.indicatorOfParameter in self.variables:
                    jvar = self.variables.index(grb.indicatorOfParameter)
                    var2d[jvar, :] = np.ravel(grb.values.T)
            grbs.close()
            for jvar in range(self.slf2d.nvar):
                self.slf2d.append_core_vars_slf([var2d[jvar, :]])

        elif self.typ == 'spec':

            spec = np.zeros(
                (self.nb_direct, self.nb_freq, self.nx1d, self.ny1d),
                dtype=np.float)
            grbs = pygrib.open(self.dataset[itime])
            ibar = 0
            maxval = self.nb_direct * self.nb_freq
            pbar = ProgressBar(maxval=maxval).start()
            for grb in grbs:
                i_i = 0
                data = grb.values.data
                data[np.where(np.absolute(data) <= 0.001)] = np.nan
                data[np.where(data == self.missing_value)] = np.nan
                data = 10.**data
                data[np.isnan(data)] = 0.
                for i_y in range(len(self.masky)):
                    if self.masky[i_y]:
                        spec[grb.directionNumber-1, grb.frequencyNumber-1, :,
                             i_i] = \
                                  data[i_y][self.maskx]
                        i_i += 1
                ibar += 1
                pbar.update(ibar)
            pbar.finish()
            grbs.close()

            for i_x in range(self.nx1d):
                for i_y in range(self.ny1d):
                    self.slf2d.append_core_vars_slf(
                        [np.ravel(spec[:, :, i_x, i_y].T)])

    def put_content(self, file_name, showbar=True):

        self.open_grib(file_name)

        print('     +> Write Selafin header')
        self.append_header_grib()

        print('     +> Write Selafin core')
        if showbar:
            pbar = ProgressBar(maxval=len(self.dataset)).start()
        for itime in range(len(self.dataset)):
            seconds = int(self.slf2d.tags['times'][itime])
            date = (datetime(*self.slf2d.datetime) +
                    timedelta(seconds=seconds)).timetuple()[0:6]
            print("        - {}-{}-{} {}:{}:{}".format(date[2], date[1],
                                                       date[0], date[3],
                                                       date[4], date[5]))
            self.append_core_time_grib(itime)
            self.append_core_vars_grib(itime)
            if showbar:
                pbar.update(itime)
        if showbar:
            pbar.finish()

        self.close_grib()