Ejemplo n.º 1
0
 def test_identify_CF_isopressure(self):
     hybrid_name, hybrid_var, orientation, units, lt = identify_vertical_axis(self.ncfile_pl)
     assert hybrid_name == "isobaric"
     assert hybrid_var.size == 14
     assert units == "hPa"
     assert lt == "pl"
     assert orientation == 1
Ejemplo n.º 2
0
 def test_identify_CF_isopotvort(self):
     hybrid_name, hybrid_var, orientation, units, lt = identify_vertical_axis(self.ncfile_pv)
     assert hybrid_name == "isopv"
     assert hybrid_var.size == 5
     assert units == "PVU"
     assert lt == "pv"
     assert orientation == 1
Ejemplo n.º 3
0
 def test_identify_CF_hybrid(self):
     hybrid_name, hybrid_var, orientation, units, lt = identify_vertical_axis(self.ncfile_ml)
     assert hybrid_name == "hybrid"
     assert hybrid_var.size == 18
     assert units == "sigma"
     assert lt == "ml"
     assert orientation == 1
Ejemplo n.º 4
0
 def test_identify_CF_isopottemp(self):
     hybrid_name, hybrid_var, orientation, units, lt = identify_vertical_axis(self.ncfile_tl)
     assert hybrid_name == "isentropic"
     assert hybrid_var.size == 8
     assert units == "K"
     assert lt == "tl"
     assert orientation == 1
Ejemplo n.º 5
0
 def test_identify_CF_isoaltitude(self):
     hybrid_name, hybrid_var, orientation, units, lt = identify_vertical_axis(self.ncfile_al)
     assert hybrid_name == "height"
     assert hybrid_var.size == 21
     assert units == "m"
     assert lt == "al"
     assert orientation == 1
Ejemplo n.º 6
0
    def _parse_file(self, filename):
        elevations = {"filename": filename, "levels": [], "units": None}
        with netCDF4.Dataset(os.path.join(self._root_path,
                                          filename)) as dataset:
            time_name, time_var = netCDF4tools.identify_CF_time(dataset)
            init_time = netCDF4tools.num2date(0, time_var.units)
            if not self.uses_inittime_dimension():
                init_time = None
            valid_times = netCDF4tools.num2date(time_var[:], time_var.units)
            if not self.uses_validtime_dimension():
                if len(valid_times) > 0:
                    raise IOError(
                        f"Skipping file '{filename}: no support for valid time, but multiple "
                        f"time steps present")
                valid_times = [None]
            lat_name, lat_var, lon_name, lon_var = netCDF4tools.identify_CF_lonlat(
                dataset)
            vert_name, vert_var, _, _, vert_type = netCDF4tools.identify_vertical_axis(
                dataset)

            if len(time_var.dimensions
                   ) != 1 or time_var.dimensions[0] != time_name:
                raise IOError("Problem with time coordinate variable")
            if len(lat_var.dimensions
                   ) != 1 or lat_var.dimensions[0] != lat_name:
                raise IOError("Problem with latitude coordinate variable")
            if len(lon_var.dimensions
                   ) != 1 or lon_var.dimensions[0] != lon_name:
                raise IOError("Problem with longitude coordinate variable")

            if vert_type != "sfc":
                elevations = {
                    "filename": filename,
                    "levels": vert_var[:],
                    "units": getattr(vert_var, "units", "dimensionless")
                }
                if vert_type in self._elevations:
                    if len(vert_var[:]) != len(
                            self._elevations[vert_type]["levels"]):
                        raise IOError(
                            f"Number of vertical levels does not fit to levels of "
                            f"previous file '{self._elevations[vert_type]['filename']}'."
                        )
                    if not np.allclose(vert_var[:],
                                       self._elevations[vert_type]["levels"]):
                        raise IOError(
                            f"vertical levels do not fit to levels of previous "
                            f"file '{self._elevations[vert_type]['filename']}'."
                        )
                    if elevations["units"] != self._elevations[vert_type][
                            "units"]:
                        raise IOError(
                            f"vertical level units do not match previous "
                            f"file '{self._elevations[vert_type]['filename']}'"
                        )

            standard_names = []
            for ncvarname, ncvar in dataset.variables.items():
                if hasattr(ncvar,
                           "standard_name") and (len(ncvar.dimensions) >= 3):
                    if (ncvar.dimensions[0] != time_name
                            or ncvar.dimensions[-2] != lat_name
                            or ncvar.dimensions[-1] != lon_name):
                        logging.error(
                            "Skipping variable '%s' in file '%s': Incorrect order of dimensions",
                            ncvarname, filename)
                        continue
                    if not hasattr(ncvar, "units"):
                        logging.error(
                            "Skipping variable '%s' in file '%s': No units attribute",
                            ncvarname, filename)
                        continue
                    if ncvar.standard_name != "time":
                        try:
                            units(ncvar.units)
                        except (AttributeError, ValueError,
                                pint.UndefinedUnitError,
                                pint.DefinitionSyntaxError):
                            logging.error(
                                "Skipping variable '%s' in file '%s': unparseable units attribute '%s'",
                                ncvarname, filename, ncvar.units)
                            continue
                    if len(ncvar.shape) == 4 and vert_name in ncvar.dimensions:
                        standard_names.append(ncvar.standard_name)
                    elif len(ncvar.shape) == 3 and vert_type == "sfc":
                        standard_names.append(ncvar.standard_name)
        return {
            "vert_type": vert_type,
            "elevations": elevations,
            "init_time": init_time,
            "valid_times": valid_times,
            "standard_names": standard_names
        }
Ejemplo n.º 7
0
    def _set_time(self, init_time, fc_time):
        """
        Open the dataset that corresponds to a forecast field specified
        by an initialisation and a valid time.

        This method
          determines the files that correspond to an init time and forecast step
          checks if an open NetCDF dataset exists
            if yes, checks whether it contains the requested valid time
              if not, closes the dataset and opens the corresponding one
          loads dimension data if required.
        """
        if len(self.plot_object.required_datafields) == 0:
            logging.debug("no datasets required.")
            self.dataset = None
            self.filenames = []
            self.init_time = None
            self.fc_time = None
            self.times = np.array([])
            self.lat_data = np.array([])
            self.lon_data = np.array([])
            self.lat_order = 1
            self.vert_data = None
            self.vert_order = None
            self.vert_units = None
            return

        if self.uses_inittime_dimension():
            logging.debug("\trequested initialisation time %s", init_time)
            if fc_time < init_time:
                msg = "Forecast valid time cannot be earlier than " \
                      "initialisation time."
                logging.error(msg)
                raise ValueError(msg)
        self.fc_time = fc_time
        logging.debug("\trequested forecast valid time %s", fc_time)

        # Check if a dataset is open and if it contains the requested times.
        # (a dataset will only be open if the used layer has not changed,
        # i.e. the required variables have not changed as well).
        if (self.dataset is not None) and (self.init_time
                                           == init_time) and (fc_time
                                                              in self.times):
            logging.debug(
                "\tinit time correct and forecast valid time contained (%s).",
                fc_time)
            if not self.data_access.is_reload_required(self.filenames):
                return
            logging.debug("need to re-open input files.")
            self.dataset.close()
            self.dataset = None

        # Determine the input files from the required variables and the
        # requested time:

        # Create the names of the files containing the required parameters.
        self.filenames = []
        for vartype, var, _ in self.plot_object.required_datafields:
            filename = self.data_access.get_filename(var,
                                                     vartype,
                                                     init_time,
                                                     fc_time,
                                                     fullpath=True)
            if filename not in self.filenames:
                self.filenames.append(filename)
            logging.debug("\tvariable '%s' requires input file '%s'", var,
                          os.path.basename(filename))

        if len(self.filenames) == 0:
            raise ValueError("no files found that correspond to the specified "
                             "datafields. Aborting..")

        self.init_time = init_time

        # Open NetCDF files as one dataset with common dimensions.
        logging.debug("opening datasets.")
        dsKWargs = self.data_access.mfDatasetArgs()
        dataset = netCDF4tools.MFDatasetCommonDims(self.filenames, **dsKWargs)

        # Load and check time dimension. self.dataset will remain None
        # if an Exception is raised here.
        timename, timevar = netCDF4tools.identify_CF_time(dataset)
        times = netCDF4tools.num2date(timevar[:], timevar.units)
        # removed after discussion, see
        # https://mss-devel.slack.com/archives/emerge/p1486658769000007
        # if init_time != netCDF4tools.num2date(0, timevar.units):
        #     dataset.close()
        #     raise ValueError("wrong initialisation time in input")

        if fc_time not in times:
            msg = f"Forecast valid time '{fc_time}' is not available."
            logging.error(msg)
            dataset.close()
            raise ValueError(msg)

        # Load lat/lon dimensions.
        try:
            lat_data, lon_data, lat_order = netCDF4tools.get_latlon_data(
                dataset)
        except Exception as ex:
            logging.error("ERROR: %s %s", type(ex), ex)
            dataset.close()
            raise

        _, vert_data, vert_orientation, vert_units, _ = netCDF4tools.identify_vertical_axis(
            dataset)
        self.vert_data = vert_data[:] if vert_data is not None else None
        self.vert_order = vert_orientation
        self.vert_units = vert_units

        self.dataset = dataset
        self.times = times
        self.lat_data = lat_data
        self.lon_data = lon_data
        self.lat_order = lat_order

        # Identify the variable objects from the NetCDF file that correspond
        # to the data fields required by the plot object.
        self._find_data_vars()