コード例 #1
0
def mean_vertical_eddy_flux(cubelist, quantity, weight_by_density=True, coord=UM_HGT):
    vf = spatial(vertical_eddy_flux(cubelist, quantity, w_dens=True), "mean")
    if weight_by_density:
        weight_by = spatial(cubelist.extract_strict("air_density"), "mean")
    else:
        weight_by = None
    mean_vf = vertical_mean(vf, coord=coord, weight_by=weight_by)
    return mean_vf
コード例 #2
0
def mean_dry_lapse_rate(cubelist, coord=UM_HGT):
    temp = cubelist.extract_strict("air_temperature")
    rho = cubelist.extract_strict("air_density")
    temp = spatial(temp, "mean")
    rho = spatial(rho, "mean")
    dtdz = differentiate(temp, coord)
    dtdz = dtdz.interpolate([(coord, rho.coord(coord).points)], iris.analysis.Linear())
    res = vertical_mean(dtdz, coord=coord, weight_by=rho)
    return res
コード例 #3
0
def vertical_eddy_flux(cubelist, quantity, w_dens=True):
    """Vertical flux."""
    w = cubelist.extract_strict("upward_air_velocity")
    q = cubelist.extract_strict(quantity)
    w_eddy = w - spatial(w, "mean")
    q_eddy = q - spatial(q, "mean")
    vf = w_eddy * q_eddy
    if w_dens:
        vf *= cubelist.extract_strict("air_density")
    vf.rename(f"vertical_flux_of_{quantity}")
    return vf
コード例 #4
0
def nondim_rossby_deformation_radius(cubelist, const=None, method="direct"):
    r"""
    Estimate the non-dimensional Rossby radius of deformation.

    Parameters
    ----------
    cubelist: iris.cube.CubeList
        Input cubelist.
    const: aeolus.const.const.ConstContainer, optional
        If not given, constants are attempted to be retrieved from
        attributes of a cube in the cube list.
    method: str, optional
        Method of calculation.
        "direct" (default): estimate scale height and BV frequency from air temperature.
        "leconte2013": use isothermal approximation.

    Returns
    -------
    iris.cube.Cube
        Cube with collapsed spatial dimensions.

    References
    ----------
    * Leconte et al. (2013), https://doi.org/10.1051/0004-6361/201321042
      (for `method=leconte2013`)
    .. math::

        \lambda_{Rossby} = \sqrt{\frac{NH}{2\Omega R_p}}
        \approx \sqrt{\frac{R}{c_p^{1/2}} \frac{T^{1/2}}{2\Omega R_p}}

    """
    if const is None:
        const = cubelist[0].attributes["planet_conf"]

    omega = (const.day / (2 * np.pi)) ** (-1)
    double_omega_radius = ScalarCube.from_cube(2 * omega * const.radius)

    if method == "direct":
        rho = cubelist.extract_strict("air_density").copy()
        bv_freq_proxy = spatial(vertical_mean(bv_freq_sq(cubelist), weight_by=rho), "mean") ** 0.5
        temp_proxy = spatial(
            vertical_mean(cubelist.extract_strict("air_temperature"), weight_by=rho), "mean"
        )
        scale_height = const.dry_air_gas_constant.asc * temp_proxy / const.gravity.asc
        nondim_rossby = (bv_freq_proxy * scale_height / double_omega_radius.asc) ** 0.5
    elif method == "leconte2013":
        temp_proxy = toa_eff_temp(cubelist)
        _const_term = ScalarCube.from_cube(const.dry_air_gas_constant / double_omega_radius)
        sqrt_t_over_cp = (temp_proxy / const.dry_air_spec_heat_press.asc) ** 0.5
        nondim_rossby = (sqrt_t_over_cp * _const_term.asc) ** 0.5
    nondim_rossby.convert_units("1")
    nondim_rossby.rename("nondimensional_rossby_deformation_radius")
    return nondim_rossby
コード例 #5
0
def mse_hdiv_mean(cubelist, zcoord=UM_HGT):
    rho = cubelist.extract_strict("air_density")
    ensure_bounds(rho, [zcoord])
    u = cubelist.extract_strict("x_wind")
    ensure_bounds(u, [zcoord])
    v = cubelist.extract_strict("y_wind")
    ensure_bounds(v, [zcoord])

    mse_cmpnts = moist_static_energy(cubelist)

    mse_hdiv_cmpnts = {}
    for key, cmpnt in mse_cmpnts.items():
        ensure_bounds(cmpnt, [zcoord])
        flux_x = u * cmpnt
        flux_x.rename(f"eastward_{cmpnt.name()}_flux")
        flux_y = v * cmpnt
        flux_y.rename(f"northward_{cmpnt.name()}_flux")

        fluxes_vec = iris.cube.CubeList([flux_x, flux_y])

        flux_div = hdiv(fluxes_vec, *[i.name() for i in fluxes_vec])

        flux_div_vmean = integrate(rho * flux_div, zcoord)
        flux_div_mean = spatial(flux_div_vmean, "mean")
        try:
            flux_div_mean = flux_div_mean.collapsed(UM_TIME, iris.analysis.MEAN)
        except iris.exceptions.CoordinateCollapseError:
            pass
        flux_div_mean.rename(f"integrated_horizontal_divergence_of_{cmpnt.name()}")
        flux_div_mean.convert_units("W m^-2")
        mse_hdiv_cmpnts[key] = flux_div_mean
    return mse_hdiv_cmpnts
コード例 #6
0
 def _wspd_typical(cubelist, aggr="mean"):
     u = cubelist.extract_strict("x_wind")
     v = cubelist.extract_strict("y_wind")
     return spatial((u ** 2 + v ** 2) ** 0.5, aggr)
コード例 #7
0
def wspd_typical(cubelist, aggr="median"):
    u = cubelist.extract_strict("x_wind")
    v = cubelist.extract_strict("y_wind")
    return spatial((u ** 2 + v ** 2) ** 0.5, aggr).collapsed(
        [UM_TIME, UM_HGT], getattr(iris.analysis, aggr.upper())
    )
コード例 #8
0
MODEL_TIMESTEP = 86400 / 72
FILE_REGEX = r"umglaa.p[a-z]{1}[0]{6}(?P<timestamp>[0-9]{2,4})_00"

ONLY_GLOBAL = ["eta", "b_alb", "t_sfc_diff_dn", "nondim_rossby", "nondim_rhines"]
ONLY_LAM = ["hflux_q", "hflux_t"]

DIAGS = {
    "mse_hdiv": lambda cl: mse_hdiv_mean(cl)["mse"],
    "dse_hdiv": lambda cl: mse_hdiv_mean(cl)["dse"],
    "lse_hdiv": lambda cl: mse_hdiv_mean(cl)["lse"],
    "nondim_rossby": lambda cl: nondim_rossby_deformation_radius(cl),
    "nondim_rhines": lambda cl: nondim_rhines_number(cl.extract(hgt_cnstr_5_20km)),
    "e_net_toa": toa_net_energy,
    "eta": lambda cl: heat_redist_eff(cl, NIGHTSIDE, DAYSIDE),
    "b_alb": bond_albedo,
    "toa_olr": lambda cl: spatial(cl.extract_strict("toa_outgoing_longwave_flux"), "mean"),
    "toa_osr": lambda cl: spatial(cl.extract_strict("toa_outgoing_shortwave_flux"), "mean"),
    "cre_sw": lambda cl: toa_cloud_radiative_effect(cl, kind="sw"),
    "cre_lw": lambda cl: toa_cloud_radiative_effect(cl, kind="lw"),
    "cre_tot": lambda cl: toa_cloud_radiative_effect(cl, kind="total"),
    "gh_norm": ghe_norm,
    "e_net_sfc": sfc_net_energy,
    "t_sfc_diff_dn": lambda cl: region_mean_diff(cl, "surface_temperature", DAYSIDE, NIGHTSIDE),
    "t_sfc": lambda cl: spatial(cl.extract_strict("surface_temperature"), "mean"),
    "t_sfc_min": lambda cl: spatial(cl.extract_strict("surface_temperature"), "min"),
    "t_sfc_max": lambda cl: spatial(cl.extract_strict("surface_temperature"), "max"),
    "t_sfc_extremes": lambda cl: minmaxdiff(cl, name="surface_temperature"),
    "t_eff": toa_eff_temp,
    "wspd_rms": lambda cl: wspd_typical(cl, "rms"),
    "wspd_rms_0_15km": lambda cl: wspd_typical(cl.extract(hgt_cnstr_0_15km), "rms"),
    "wspd_mean": lambda cl: wspd_typical(cl, "mean"),
コード例 #9
0
def main():
    """Main function."""
    # Create a subdirectory for processed data
    outdir = mypaths.nsdir / "_processed"
    outdir.mkdir(parents=True, exist_ok=True)

    for planet in PLANET_ALIASES.keys():
        L.info(f"planet = {planet}")
        for run_key in NS_RUN_ALIASES.keys():
            L.info(f"run_key = {run_key}")
            subdir = f"{planet}_{run_key}"
            for model_type, model_specs in NS_MODEL_TYPES.items():
                L.info(f"model_type = {model_type}")
                label = f"{planet}_{run_key}_{model_type}"
                tmp_cl = iris.cube.CubeList()
                for i in range(NS_CYCLE_TIMES[subdir]["ndays"]):
                    _cycle = (NS_CYCLE_TIMES[subdir]["start"] +
                              timedelta(days=i)).strftime(DT_FMT)
                    L.info(f"_cycle = {_cycle}")
                    fpath = (mypaths.nsdir / subdir / _cycle /
                             model_specs["path"].parent / "_processed" /
                             f"{label}_{_cycle}.nc")
                    L.info(f"fpath = {fpath}")
                    # Initialise a `Run` by loading processed data
                    run = Run(
                        files=fpath,
                        name=label,
                        planet=planet,
                        model_type=model_type,
                        timestep=model_specs["timestep"],
                        processed=True,
                    )
                    # Derive additional fields
                    run.add_data(calc_derived_cubes)

                    for cube in run.proc.extract(DIM_CONSTR_ZYX):
                        L.info(f"cube = {cube.name()}")
                        ave_cube = spatial(cube.extract(SS_REGION.constraint),
                                           "mean")
                        tmp_cl.append(ave_cube)

                for cube in tmp_cl:
                    try:
                        cube.attributes.pop("planet_conf")
                    except KeyError:
                        pass
                tmp_cl = tmp_cl.merge()
                L.debug(f"merged cubelist = {tmp_cl}")
                _dict = {}
                for cube in tmp_cl:
                    # cube.attributes = {
                    #     k: v for k, v in cube.attributes.items() if k != "planet_conf"
                    # }
                    _dict[cube.name()] = xr.DataArray.from_iris(cube)
                ds = xr.merge([_dict], compat="override")
                L.debug(f"dataset = {ds}")
                ds.attrs.update(
                    {
                        "region": SS_REGION.to_str(),
                        "model_type": run.model_type,
                        "run_key": run_key,
                        "planet": planet,
                    }, )
                fname_out = outdir / f"ns_area_mean_vprof_{run.name}.nc"
                ds.to_netcdf(fname_out)
                L.success(f"Saved to {fname_out}")