Exemplo n.º 1
0
def test_targets():
    for standard_name in Targets.get_targets():
        unit = Targets.get_unit(standard_name)
        units(unit[0])  # ensure that the unit may be parsed
        assert unit[1] == 1  # no conversion outside pint!
        Targets.get_range(standard_name)
        Targets.get_thresholds(standard_name)
        Targets.get_range(standard_name)
        Targets.UNITS[standard_name]
        Targets.TITLES[standard_name]
Exemplo n.º 2
0
def test_targets():
    for standard_name in Targets.get_targets():
        unit = Targets.get_unit(standard_name)
        units(unit)  # ensure that the unit may be parsed
        Targets.get_range(standard_name)
        Targets.get_thresholds(standard_name)
        Targets.get_range(standard_name)
        Targets.UNITS[standard_name]
        Targets.TITLES[standard_name]

    for ent in Targets.THRESHOLDS:
        units(Targets.THRESHOLDS[ent][0])
Exemplo n.º 3
0
def omega_to_w(omega, p, t):
    """
    Convert pressure vertical velocity to geometric vertical velocity.

    Arguments:
    omega -- vertical velocity in pressure coordinates, in [Pa/s]
    p -- pressure in [Pa]
    t -- temperature in [K]

    All inputs can be scalars or NumPy arrays.

    Returns the vertical velocity in geometric coordinates, [m/s].
    """
    return mpcalc.vertical_velocity(
        units("Pa/s") * omega, units.Pa * p, units.K * t).to("m/s").m
Exemplo n.º 4
0
def test_units():
    assert units("10 degree_north").to("degree").m == 10
    assert units("10 degrees_north").to("degree").m == 10
    assert units("10 degreeN").to("degree").m == 10
    assert units("10 degree_N").to("degree").m == 10
    assert units("-10 degree_south").to("degree").m == 10
    assert units("-10 degrees_south").to("degree").m == 10
    assert units("-10 degreeS").to("degree").m == 10
    assert units("-10 degree_S").to("degree").m == 10
    assert units("10 degree_east").to("degree").m == 10
    assert units("10 degrees_east").to("degree").m == 10
    assert units("10 degreeE").to("degree").m == 10
    assert units("10 degree_E").to("degree").m == 10
    assert units("-10 degree_west").to("degree").m == 10
    assert units("-10 degrees_west").to("degree").m == 10
    assert units("-10 degreeW").to("degree").m == 10
    assert units("-10 degree_W").to("degree").m == 10
    assert units("1 percent").to("dimensionless").m == 0.01
    assert units("1 permille").to("dimensionless").m == 0.001
    assert units("1 ppm").to("dimensionless").m == pytest.approx(1e-6)
    assert units("1 ppb").to("dimensionless").m == pytest.approx(1e-9)
    assert units("1 ppt").to("dimensionless").m == pytest.approx(1e-12)
    assert units("1 ppmv").to("dimensionless").m == pytest.approx(1e-6)
    assert units("1 ppbv").to("dimensionless").m == pytest.approx(1e-9)
    assert units("1 pptv").to("dimensionless").m == pytest.approx(1e-12)

    assert units("1 PVU").to_base_units().m == pytest.approx(
        units("1E-6 m^2 s^-1 K kg^-1").to_base_units().m)
Exemplo n.º 5
0
    def plot_vsection(self,
                      data,
                      lats,
                      lons,
                      valid_time,
                      init_time,
                      resolution=(-1, -1),
                      bbox=(-1, 1050, -1, 200),
                      style=None,
                      show=False,
                      highlight=None,
                      noframe=False,
                      figsize=(960, 480),
                      draw_verticals=False,
                      numlabels=10,
                      orography_color='k',
                      transparent=False,
                      return_format="image/png"):
        """
        """
        # Check if required data is available.
        self.data_units = self.driver.data_units.copy()
        for datatype, dataitem, dataunit in self.required_datafields:
            if dataitem not in data:
                raise KeyError(f"required data field '{dataitem}' not found")
            origunit = self.driver.data_units[dataitem]
            if dataunit is not None:
                data[dataitem] = convert_to(data[dataitem], origunit, dataunit)
                self.data_units[dataitem] = dataunit
            else:
                logging.debug("Please add units to plot variables")

        # Copy parameters to properties.
        self.data = data
        self.lats = lats
        self.lat_inds = np.arange(len(lats))
        self.lons = lons
        self.valid_time = valid_time
        self.init_time = init_time
        self.resolution = resolution
        self.style = style
        self.highlight = highlight
        self.noframe = noframe
        self.draw_verticals = draw_verticals
        self.p_bot = bbox[1] * 100
        self.p_top = bbox[3] * 100
        self.numlabels = numlabels
        self.orography_color = orography_color

        # Provide an air_pressured 2-D field in 'Pa' from vertical axis
        if (("air_pressure" not in self.data)
                and units(self.driver.vert_units).check("[pressure]")):
            self.data_units["air_pressure"] = "Pa"
            self.data["air_pressure"] = convert_to(
                self.driver.vert_data[::-self.driver.vert_order,
                                      np.newaxis], self.driver.vert_units,
                self.data_units["air_pressure"]).repeat(len(self.lats), axis=1)
        if (("air_potential_temperature" not in self.data)
                and units(self.driver.vert_units).check("[temperature]")):
            self.data_units["air_potential_temperature"] = "K"
            self.data["air_potential_temperature"] = convert_to(
                self.driver.vert_data[::-self.driver.vert_order,
                                      np.newaxis], self.driver.vert_units,
                self.data_units["air_potential_temperature"]).repeat(len(
                    self.lats),
                                                                     axis=1)

        # Derive additional data fields and make the plot.
        self._prepare_datafields()
        if "air_pressure" not in self.data:
            raise KeyError(
                "'air_pressure' need to be available for VSEC plots."
                "Either provide as data or compute in _prepare_datafields")

        # Code for producing a png image with Matplotlib.
        # ===============================================
        if return_format == "image/png":

            logging.debug("creating figure..")
            dpi = 80
            figsize = (figsize[0] / dpi), (figsize[1] / dpi)
            facecolor = "white"
            self.fig = mpl.figure.Figure(figsize=figsize,
                                         dpi=dpi,
                                         facecolor=facecolor)
            logging.debug("\twith frame and legends"
                          if not noframe else "\twithout frame")
            if noframe:
                self.ax = self.fig.add_axes([0.0, 0.0, 1.0, 1.0])
            else:
                self.ax = self.fig.add_axes([0.07, 0.17, 0.9, 0.72])

            # prepare horizontal axis
            self.horizontal_coordinate = self.lat_inds[np.newaxis, :].repeat(
                self.data["air_pressure"].shape[0], axis=0)

            self._plot_style()

            # Set transparency for the output image.
            if transparent:
                self.fig.patch.set_alpha(0.)

            # Return the image as png embedded in a StringIO stream.
            canvas = FigureCanvas(self.fig)
            output = io.BytesIO()
            canvas.print_png(output)

            if show:
                logging.debug("saving figure to mpl_vsec.png ..")
                canvas.print_png("mpl_vsec.png")

            # Convert the image to an 8bit palette image with a significantly
            # smaller file size (~factor 4, from RGBA to one 8bit value, plus the
            # space to store the palette colours).
            # NOTE: PIL at the current time can only create an adaptive palette for
            # RGB images, hence alpha values are lost here. If transparency is
            # requested, the figure face colour is stored as the "transparent"
            # colour in the image. This works in most cases, but might lead to
            # visible artefacts in some cases.
            logging.debug("converting image to indexed palette.")
            # Read the above stored png into a PIL image and create an adaptive
            # colour palette.
            output.seek(0)  # necessary for PIL.Image.open()
            palette_img = PIL.Image.open(output).convert(mode="RGB").convert(
                "P", palette=PIL.Image.ADAPTIVE)
            output = io.BytesIO()
            if not transparent:
                logging.debug("saving figure as non-transparent PNG.")
                palette_img.save(
                    output,
                    format="PNG")  # using optimize=True doesn't change much
            else:
                # If the image has a transparent background, we need to find the
                # index of the background colour in the palette. See the
                # documentation for PIL's ImagePalette module
                # (http://www.pythonware.com/library/pil/handbook/imagepalette.htm). The
                # idea is to create a 256 pixel image with the same colour palette
                # as the original image and use it as a lookup-table. Converting the
                # lut image back to RGB gives us a list of all colours in the
                # palette. (Why doesn't PIL provide a method to directly access the
                # colours in a palette??)
                lut = palette_img.resize((256, 1))
                lut.putdata(list(range(256)))
                lut = [c[1] for c in lut.convert("RGB").getcolors()]
                facecolor_rgb = list(
                    mpl.colors.hex2color(mpl.colors.cnames[facecolor]))
                for i in [0, 1, 2]:
                    facecolor_rgb[i] = int(facecolor_rgb[i] * 255)
                facecolor_index = lut.index(tuple(facecolor_rgb))

                logging.debug(
                    "saving figure as transparent PNG with transparency index %i.",
                    facecolor_index)
                palette_img.save(output,
                                 format="PNG",
                                 transparency=facecolor_index)

            logging.debug("returning figure..")
            return output.getvalue()

        # Code for generating an XML document with the data values in ASCII format.
        # =========================================================================
        elif return_format == "text/xml":

            impl = getDOMImplementation()
            xmldoc = impl.createDocument(None, "MSS_VerticalSection_Data",
                                         None)

            # Title of this section.
            node = xmldoc.createElement("Title")
            node.appendChild(xmldoc.createTextNode(self.title))
            xmldoc.documentElement.appendChild(node)

            # Time information of this section.
            node = xmldoc.createElement("ValidTime")
            node.appendChild(
                xmldoc.createTextNode(
                    self.valid_time.strftime("%Y-%m-%dT%H:%M:%SZ")))
            xmldoc.documentElement.appendChild(node)

            node = xmldoc.createElement("InitTime")
            node.appendChild(
                xmldoc.createTextNode(
                    self.init_time.strftime("%Y-%m-%dT%H:%M:%SZ")))
            xmldoc.documentElement.appendChild(node)

            # Longitude data.
            node = xmldoc.createElement("Longitude")
            node.setAttribute("num_waypoints", f"{len(self.lons)}")

            data_str = ""
            for value in self.lons:
                data_str += str(value) + ","
            data_str = data_str[:-1]

            node.appendChild(xmldoc.createTextNode(data_str))
            xmldoc.documentElement.appendChild(node)

            # Latitude data.
            node = xmldoc.createElement("Latitude")
            node.setAttribute("num_waypoints", f"{len(self.lats)}")

            data_str = ""
            for value in self.lats:
                data_str += str(value) + ","
            data_str = data_str[:-1]

            node.appendChild(xmldoc.createTextNode(data_str))
            xmldoc.documentElement.appendChild(node)

            # Variable data.
            data_node = xmldoc.createElement("Data")

            for var in self.data:
                node = xmldoc.createElement(var)
                data_shape = self.data[var].shape
                node.setAttribute("num_levels", f"{data_shape[0]}")
                node.setAttribute("num_waypoints", f"{data_shape[1]}")

                data_str = ""
                for data_row in self.data[var]:
                    for value in data_row:
                        data_str += str(value) + ","
                    data_str = data_str[:-1] + "\n"
                data_str = data_str[:-1]

                node.appendChild(xmldoc.createTextNode(data_str))
                data_node.appendChild(node)

            xmldoc.documentElement.appendChild(data_node)

            # Return the XML document as formatted string.
            return xmldoc.toprettyxml(indent="  ")
Exemplo n.º 6
0
    def _parse_file(self, filename):
        elevations = {"filename": filename, "levels": [], "units": None}
        with netCDF4.Dataset(os.path.join(self._root_path,
                                          filename)) as dataset:
            time_name, time_var = netCDF4tools.identify_CF_time(dataset)
            init_time = netCDF4tools.num2date(0, time_var.units)
            if not self.uses_inittime_dimension():
                init_time = None
            valid_times = netCDF4tools.num2date(time_var[:], time_var.units)
            if not self.uses_validtime_dimension():
                if len(valid_times) > 0:
                    raise IOError(
                        f"Skipping file '{filename}: no support for valid time, but multiple "
                        f"time steps present")
                valid_times = [None]
            lat_name, lat_var, lon_name, lon_var = netCDF4tools.identify_CF_lonlat(
                dataset)
            vert_name, vert_var, _, _, vert_type = netCDF4tools.identify_vertical_axis(
                dataset)

            if len(time_var.dimensions
                   ) != 1 or time_var.dimensions[0] != time_name:
                raise IOError("Problem with time coordinate variable")
            if len(lat_var.dimensions
                   ) != 1 or lat_var.dimensions[0] != lat_name:
                raise IOError("Problem with latitude coordinate variable")
            if len(lon_var.dimensions
                   ) != 1 or lon_var.dimensions[0] != lon_name:
                raise IOError("Problem with longitude coordinate variable")

            if vert_type != "sfc":
                elevations = {
                    "filename": filename,
                    "levels": vert_var[:],
                    "units": getattr(vert_var, "units", "dimensionless")
                }
                if vert_type in self._elevations:
                    if len(vert_var[:]) != len(
                            self._elevations[vert_type]["levels"]):
                        raise IOError(
                            f"Number of vertical levels does not fit to levels of "
                            f"previous file '{self._elevations[vert_type]['filename']}'."
                        )
                    if not np.allclose(vert_var[:],
                                       self._elevations[vert_type]["levels"]):
                        raise IOError(
                            f"vertical levels do not fit to levels of previous "
                            f"file '{self._elevations[vert_type]['filename']}'."
                        )
                    if elevations["units"] != self._elevations[vert_type][
                            "units"]:
                        raise IOError(
                            f"vertical level units do not match previous "
                            f"file '{self._elevations[vert_type]['filename']}'"
                        )

            standard_names = []
            for ncvarname, ncvar in dataset.variables.items():
                if hasattr(ncvar,
                           "standard_name") and (len(ncvar.dimensions) >= 3):
                    if (ncvar.dimensions[0] != time_name
                            or ncvar.dimensions[-2] != lat_name
                            or ncvar.dimensions[-1] != lon_name):
                        logging.error(
                            "Skipping variable '%s' in file '%s': Incorrect order of dimensions",
                            ncvarname, filename)
                        continue
                    if not hasattr(ncvar, "units"):
                        logging.error(
                            "Skipping variable '%s' in file '%s': No units attribute",
                            ncvarname, filename)
                        continue
                    if ncvar.standard_name != "time":
                        try:
                            units(ncvar.units)
                        except (AttributeError, ValueError,
                                pint.UndefinedUnitError,
                                pint.DefinitionSyntaxError):
                            logging.error(
                                "Skipping variable '%s' in file '%s': unparseable units attribute '%s'",
                                ncvarname, filename, ncvar.units)
                            continue
                    if len(ncvar.shape) == 4 and vert_name in ncvar.dimensions:
                        standard_names.append(ncvar.standard_name)
                    elif len(ncvar.shape) == 3 and vert_type == "sfc":
                        standard_names.append(ncvar.standard_name)
        return {
            "vert_type": vert_type,
            "elevations": elevations,
            "init_time": init_time,
            "valid_times": valid_times,
            "standard_names": standard_names
        }