def test_exponentiated_weibull_distribution_pdf():
    """
    Tests the PDF of the exponentiated Weibull distribution.
    """
    # Define dist with parameters from the distribution fitted to
    # dataset A with the MLE in https://arxiv.org/pdf/1911.12835.pdf .
    dist = ExponentiatedWeibullDistribution(alpha=0.0373,
                                            beta=0.4743,
                                            delta=46.6078)

    # PDF(0.7) should be roughly 1.1, see Figure 3 in
    # https://arxiv.org/pdf/1911.12835.pdf .
    x = dist.pdf(0.7)
    assert x > 0.6
    assert x < 1.5

    # PDF(2) should be roughly 0.1, see Figure 12
    # in https://arxiv.org/pdf/1911.12835.pdf .
    x = dist.pdf(2)
    assert x > 0.05
    assert x < 0.2

    # PDF(value less than 0) should be 0.
    x = dist.pdf(-1)
    assert x == 0
def test_exponentiated_weibull_distribution_icdf():
    """
    Tests the ICDF of the exponentiated Weibull distribution.
    """
    # Define dist with parameters from the distribution fitted to
    # dataset A with the MLE in https://arxiv.org/pdf/1911.12835.pdf .
    dist = ExponentiatedWeibullDistribution(alpha=0.0373,
                                            beta=0.4743,
                                            delta=46.6078)

    # ICDF(0.5) should be roughly 0.8, see Figure 12 in
    # https://arxiv.org/pdf/1911.12835.pdf .
    x = dist.icdf(0.5)
    assert x > 0.5
    assert x < 1

    # ICDF(0.9) should be roughly 1.8, see Figure 12
    # in https://arxiv.org/pdf/1911.12835.pdf .
    x = dist.icdf(0.9)
    assert x > 1
    assert x < 2

    # ICDF(value greater than 1) should be nan.
    x = dist.icdf(5)
    assert np.isnan(x)
def test_ExponentiatedWeibull_pdf_cdf_icdf(exp_weibull_reference_data):
    OMAE2020_param = {"alpha": 10.0, "beta": 2.42, "delta": 0.761}
    x = np.linspace(2, 15, num=100)
    p = np.linspace(0.01, 0.99, num=100)
    my_expweibull = ExponentiatedWeibullDistribution(**OMAE2020_param)
    my_pdf = my_expweibull.pdf(x)
    my_cdf = my_expweibull.cdf(x)
    my_icdf = my_expweibull.icdf(p)

    ref_pdf = exp_weibull_reference_data["ref_pdf"]
    ref_cdf = exp_weibull_reference_data["ref_cdf"]
    ref_icdf = exp_weibull_reference_data["ref_icdf"]

    np.testing.assert_allclose(my_pdf, ref_pdf)
    np.testing.assert_allclose(my_cdf, ref_cdf)
    np.testing.assert_allclose(my_icdf, ref_icdf)
def test_fitting_exponentiated_weibull():
    """
    Tests estimating the parameters of the  exponentiated Weibull distribution.
    """

    dist = ExponentiatedWeibullDistribution()

    # Draw 1000 samples from a Weibull distribution with shape=1.5 and scale=3,
    # which represents significant wave height.
    hs = sts.weibull_min.rvs(1.5, loc=0, scale=3, size=1000, random_state=42)

    dist.fit(hs, method="wlsq", weights="quadratic")

    # shape parameter/ beta should be about 1.5.
    assert dist.parameters["beta"] > 1
    assert dist.parameters["beta"] < 2

    # scale parameter / alpha should be about 3.
    assert dist.parameters["alpha"] > 2
    assert dist.parameters["alpha"] < 4

    # shape2 parameter / delta should be about 1.
    assert dist.parameters["delta"] > 0.5
    assert dist.parameters["delta"] < 2

    dist = ExponentiatedWeibullDistribution(f_delta=1)

    dist.fit(hs, method="wlsq", weights="quadratic")

    # shape parameter/ beta should be about 1.5.
    assert dist.parameters["beta"] > 1
    assert dist.parameters["beta"] < 2

    # scale parameter / alpha should be about 3.
    assert dist.parameters["alpha"] > 2
    assert dist.parameters["alpha"] < 4

    # shape2 parameter / delta should be 1.
    assert dist.parameters["delta"] == 1

    # Check whether the fitted distribution has a working CDF and PDF.
    assert dist.cdf(2) > 0
    assert dist.pdf(2) > 0
def test_fit_exponentiated_weibull_with_zero():
    """
    Tests fitting the exponentiated Weibull distribution if the dataset
    contains 0s.
    """

    dist = ExponentiatedWeibullDistribution()

    # Draw 1000 samples from a Weibull distribution with shape=1.5 and scale=3,
    # which represents significant wave height.
    hs = sts.weibull_min.rvs(1.5, loc=0, scale=3, size=1000, random_state=42)

    # Add zero-elements to the dataset.
    hs = np.append(hs, [0, 0, 1.3])

    dist.fit(hs, method="wlsq", weights="quadratic")

    assert dist.parameters["beta"] == pytest.approx(1.5, abs=0.5)
    assert dist.parameters["alpha"] == pytest.approx(3, abs=1)
    assert dist.parameters["delta"] == pytest.approx(1, abs=0.5)
def test_exponentiated_weibull_distribution_cdf():
    """
    Tests the CDF of the exponentiated Weibull distribution.
    """
    # Define dist with parameters from the distribution fitted to
    # dataset A with the MLE in https://arxiv.org/pdf/1911.12835.pdf .
    dist = ExponentiatedWeibullDistribution(alpha=0.0373,
                                            beta=0.4743,
                                            delta=46.6078)

    # CDF(1) should be roughly 0.7, see Figure 12 in
    # https://arxiv.org/pdf/1911.12835.pdf .
    p = dist.cdf(1)
    np.testing.assert_allclose(p, 0.7, atol=0.1)

    # CDF(4) should be roughly 0.993, see Figure 12 in
    # https://arxiv.org/pdf/1911.12835.pdf .
    p = dist.cdf(4)
    assert p > 0.99
    assert p < 0.999

    # CDF(negative value) should be 0
    p = dist.cdf(-1)
    assert p == 0
def test_ISORM(reference_coordinates_ISORM):

    # Logarithmic square function.
    def _lnsquare2(x, a=3.62, b=5.77):
        return np.log(a + b * np.sqrt(x / 9.81))

    # 3-parameter function that asymptotically decreases (a dependence function).
    def _asymdecrease3(x, a=0, b=0.324, c=0.404):
        return a + b / (1 + c * x)

    lnsquare2 = DependenceFunction(_lnsquare2)
    asymdecrease3 = DependenceFunction(_asymdecrease3)

    dist_description_0 = {
        "distribution":
        ExponentiatedWeibullDistribution(alpha=0.207, beta=0.684, delta=7.79),
    }

    dist_description_1 = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {
            "mu": lnsquare2,
            "sigma": asymdecrease3
        },
    }

    ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])

    state_duration = 3
    return_period = 20
    alpha = calculate_alpha(state_duration, return_period)
    my_isorm = ISORMContour(ghm, alpha)

    my_coordinates = my_isorm.coordinates

    np.testing.assert_allclose(my_coordinates, reference_coordinates_ISORM)
def test_ExponentiatedWeibull_wlsq_fit(exp_weibull_reference_data_wlsq_fit):
    x = np.linspace(2, 15, num=100)
    p = np.linspace(0.01, 0.99, num=100)

    true_alpha = 10
    true_beta = 2.42
    true_delta = 0.761
    expweibull_samples = sts.exponweib.rvs(a=true_delta,
                                           c=true_beta,
                                           loc=0,
                                           scale=true_alpha,
                                           size=100,
                                           random_state=42)

    my_expweibull = ExponentiatedWeibullDistribution()

    my_expweibull.fit(expweibull_samples, method="lsq", weights="quadratic")

    my_pdf = my_expweibull.pdf(x)
    my_cdf = my_expweibull.cdf(x)
    my_icdf = my_expweibull.icdf(p)
    my_alpha = my_expweibull.alpha
    my_beta = my_expweibull.beta
    my_delta = my_expweibull.delta

    ref_pdf = exp_weibull_reference_data_wlsq_fit["ref_pdf"]
    ref_cdf = exp_weibull_reference_data_wlsq_fit["ref_cdf"]
    ref_icdf = exp_weibull_reference_data_wlsq_fit["ref_icdf"]
    ref_alpha = exp_weibull_reference_data_wlsq_fit["ref_alpha"]
    ref_beta = exp_weibull_reference_data_wlsq_fit["ref_beta"]
    ref_delta = exp_weibull_reference_data_wlsq_fit["ref_delta"]

    np.testing.assert_allclose(my_alpha, ref_alpha)
    np.testing.assert_allclose(my_beta, ref_beta)
    np.testing.assert_allclose(my_delta, ref_delta)
    np.testing.assert_allclose(my_pdf, ref_pdf)
    np.testing.assert_allclose(my_cdf, ref_cdf)
    np.testing.assert_allclose(my_icdf, ref_icdf)
def get_OMAE2020_V_Hs():
    """
    Get OMAE2020 wind speed and significant wave height model.
    
    Get the descriptions necessary to create the wind speed and 
    significant wave height model as described by Haselsteiner et al. [1]_. 
    
    Returns
    -------
    dist_descriptions : list of dict
        List of dictionaries containing the dist descriptions for each dimension.
        Can be used to create a GlobalHierarchicalModel.
    fit_descriptions : list of dict
        List of dictionaries containing the fit description for each dimension.
        Can be passed to fit function of GlobalHierarchicalModel.
    semantics : dict
        Dictionary with a semantic description of the model.
        Can be passed to plot functions.
        
    References
    ----------
    .. [1] Haselsteiner, A.F.; Sander, A.; Ohlendorf, J.H.; Thoben, K.D. (2020)
        Global hierarchical models for wind and wave contours: Physical
        interpretations of the dependence functions. OMAE 2020, Fort Lauderdale,
        USA. Proceedings of the 39th International Conference on Ocean, 
        Offshore and Arctic Engineering.
    """

    def _logistics4(x, a=1, b=1, c=-1, d=1):
        return a + b / (1 + np.exp(c * (x - d)))

    def _alpha3(x, a, b, c, d_of_x):
        return (a + b * x ** c) / 2.0445 ** (1 / d_of_x(x))

    logistics_bounds = [(0, None), (0, None), (None, 0), (0, None)]

    alpha_bounds = [(0, None), (0, None), (None, None)]

    beta_dep = DependenceFunction(_logistics4, logistics_bounds, weights=lambda x, y: y)
    alpha_dep = DependenceFunction(
        _alpha3, alpha_bounds, d_of_x=beta_dep, weights=lambda x, y: y
    )

    dist_description_v = {
        "distribution": ExponentiatedWeibullDistribution(),
        "intervals": WidthOfIntervalSlicer(2, min_n_points=50),
    }

    dist_description_hs = {
        "distribution": ExponentiatedWeibullDistribution(f_delta=5),
        "conditional_on": 0,
        "parameters": {"alpha": alpha_dep, "beta": beta_dep,},
    }

    dist_descriptions = [dist_description_v, dist_description_hs]

    fit_description_v = {"method": "wlsq", "weights": "quadratic"}
    fit_description_hs = {"method": "wlsq", "weights": "quadratic"}
    fit_descriptions = [fit_description_v, fit_description_hs]

    semantics = {
        "names": ["Mean wind speed", "Significant wave height"],
        "symbols": ["V", "H_s"],
        "units": ["m s$^{-1}$", "m",],
    }

    return dist_descriptions, fit_descriptions, semantics
def get_OMAE2020_Hs_Tz():
    """
    Get OMAE2020 significant wave height and wave period model.
    
    Get the descriptions necessary to create the significant wave height 
    and wave period model as described by Haselsteiner et al. [1]_. 
    
    Returns
    -------
    dist_descriptions : list of dict
        List of dictionaries containing the dist descriptions for each dimension.
        Can be used to create a GlobalHierarchicalModel.
    fit_descriptions : list of dict
        List of dictionaries containing the fit description for each dimension.
        Can be passed to fit function of GlobalHierarchicalModel.
    semantics : dict
        Dictionary with a semantic description of the model.
        Can be passed to plot functions.
        
    References
    ----------
    .. [1] Haselsteiner, A.F.; Sander, A.; Ohlendorf, J.H.; Thoben, K.D. (2020)
        Global hierarchical models for wind and wave contours: Physical
        interpretations of the dependence functions. OMAE 2020, Fort Lauderdale,
        USA. Proceedings of the 39th International Conference on Ocean, 
        Offshore and Arctic Engineering.
    """

    def _asymdecrease3(x, a, b, c):
        return a + b / (1 + c * x)

    def _lnsquare2(x, a, b, c):
        return np.log(a + b * np.sqrt(np.divide(x, 9.81)))

    bounds = [(0, None), (0, None), (None, None)]

    sigma_dep = DependenceFunction(_asymdecrease3, bounds=bounds)
    mu_dep = DependenceFunction(_lnsquare2, bounds=bounds)

    dist_description_hs = {
        "distribution": ExponentiatedWeibullDistribution(),
        "intervals": WidthOfIntervalSlicer(width=0.5, min_n_points=50),
    }

    dist_description_tz = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {"sigma": sigma_dep, "mu": mu_dep,},
    }

    dist_descriptions = [dist_description_hs, dist_description_tz]

    fit_description_hs = {"method": "wlsq", "weights": "quadratic"}
    fit_descriptions = [fit_description_hs, None]

    semantics = {
        "names": ["Significant wave height", "Zero-crossing wave period"],
        "symbols": ["H_s", "T_z"],
        "units": ["m", "s"],
    }

    return dist_descriptions, fit_descriptions, semantics
Beispiel #11
0

logistics_bounds = [(0, None), (0, None), (None, 0), (0, None)]

alpha_bounds = [(0, None), (0, None), (None, None)]

beta_dep = DependenceFunction(_logistics4,
                              logistics_bounds,
                              weights=lambda x, y: y)
alpha_dep = DependenceFunction(_alpha3,
                               alpha_bounds,
                               d_of_x=beta_dep,
                               weights=lambda x, y: y)

dist_description_vs = {
    "distribution": ExponentiatedWeibullDistribution(),
    "intervals": WidthOfIntervalSlicer(2, min_n_points=50),
}

dist_description_hs = {
    "distribution": ExponentiatedWeibullDistribution(f_delta=5),
    "conditional_on": 0,
    "parameters": {
        "alpha": alpha_dep,
        "beta": beta_dep,
    },
}

ghm = GlobalHierarchicalModel([dist_description_vs, dist_description_hs])

fit_description_vs = {"method": "wlsq", "weights": "quadratic"}
#                   "delta" : 0.761
#                   }

x = np.linspace(2, 15, num=100)
p = np.linspace(0.01, 0.99, num=100)

true_alpha = 10
true_beta = 2.42
true_delta = 0.761
expweibull_samples = sts.exponweib.rvs(
    a=true_delta, c=true_beta, loc=0, scale=true_alpha, size=100, random_state=42
)

# %%
# my_expweibull = ExponentiatedWeibullDistribution(**OMAE2020_param)
my_expweibull = ExponentiatedWeibullDistribution()

my_expweibull.fit(expweibull_samples, method="lsq", weights="quadratic")

my_pdf = my_expweibull.pdf(x)
my_cdf = my_expweibull.cdf(x)
my_icdf = my_expweibull.icdf(p)

my_alpha = my_expweibull.alpha
my_beta = my_expweibull.beta
my_delta = my_expweibull.delta

# %%
import sys

sys.path.append("../viroconcom")
def test_v_hs_hd_contour():
    """
    Use a wind speed - wave height dataset, fit the joint 
    distribution that was proposed by Haselsteiner et al. (2020)
    and compute a highest density contour. This test reproduces
    the results presented in Haselestiner et al. (2020). The
    coorindates are availble at https://github.com/ec-benchmark-organizers/
    ec-benchmark/blob/master/results/exercise-1/contribution-4/haselsteiner_
    andreas_dataset_d_50.txt

    Such a work flow is for example typical when generationg 
    a 50-year contour for DLC 1.6 in the offshore wind standard
    IEC 61400-3-1.

    Haselsteiner, A. F., Sander, A., Ohlendorf, J.-H., & Thoben, K.-D. (2020). 
    Global hierarchical models for wind and wave contours: Physical 
    interpretations of the dependence functions. Proc. 39th International 
    Conference on Ocean, Offshore and Arctic Engineering (OMAE 2020). 
    https://doi.org/10.1115/OMAE2020-18668

    International Electrotechnical Commission. (2019). Wind energy 
    generation systems - Part 3-1: Design requirements for fixed 
    offshore wind turbines (IEC 61400-3-1).
    """

    data = read_ec_benchmark_dataset("datasets/ec-benchmark_dataset_D.txt")

    def _logistics4(x, a=1, b=1, c=-1, d=1):
        return a + b / (1 + np.exp(c * (x - d)))

    def _alpha3(x, a, b, c, d_of_x):
        return (a + b * x**c) / 2.0445**(1 / d_of_x(x))

    logistics_bounds = [(0, None), (0, None), (None, 0), (0, None)]

    alpha_bounds = [(0, None), (0, None), (None, None)]

    beta_dep = DependenceFunction(_logistics4,
                                  logistics_bounds,
                                  weights=lambda x, y: y)
    alpha_dep = DependenceFunction(_alpha3,
                                   alpha_bounds,
                                   d_of_x=beta_dep,
                                   weights=lambda x, y: y)

    dist_description_v = {
        "distribution": ExponentiatedWeibullDistribution(),
        "intervals": WidthOfIntervalSlicer(width=2),
    }

    dist_description_hs = {
        "distribution": ExponentiatedWeibullDistribution(f_delta=5),
        "conditional_on": 0,
        "parameters": {
            "alpha": alpha_dep,
            "beta": beta_dep,
        },
    }

    model = GlobalHierarchicalModel([dist_description_v, dist_description_hs])

    fit_description_vs = {"method": "wlsq", "weights": "quadratic"}
    fit_description_hs = {"method": "wlsq", "weights": "quadratic"}

    model.fit(data, [fit_description_vs, fit_description_hs])

    axs = plot_marginal_quantiles(model, data)
    axs = plot_dependence_functions(model)
    ax = plot_2D_isodensity(model, data)

    alpha = calculate_alpha(1, 50)
    limits = [(0, 35), (0, 20)]
    contour = HighestDensityContour(model,
                                    alpha,
                                    limits=limits,
                                    deltas=[0.2, 0.2])

    coordinates = contour.coordinates
    np.testing.assert_allclose(max(coordinates[:, 0]), 29.9, atol=0.2)
    np.testing.assert_allclose(max(coordinates[:, 1]), 15.5, atol=0.2)
    np.testing.assert_allclose(min(coordinates[:, 0]), 0, atol=0.1)
    np.testing.assert_allclose(min(coordinates[:, 1]), 0, atol=0.1)

    ax = plot_2D_contour(contour, sample=data)
Beispiel #14
0
# Logarithmic square function.
def _lnsquare2(x, a=3.62, b=5.77):
    return np.log(a + b * np.sqrt(x / 9.81))


# 3-parameter function that asymptotically decreases (a dependence function).
def _asymdecrease3(x, a=0, b=0.324, c=0.404):
    return a + b / (1 + c * x)


lnsquare2 = DependenceFunction(_lnsquare2)
asymdecrease3 = DependenceFunction(_asymdecrease3)

dist_description_0 = {
    "distribution": ExponentiatedWeibullDistribution(
        alpha=0.207, beta=0.684, delta=7.79
    ),
}

dist_description_1 = {
    "distribution": LogNormalDistribution(),
    "conditional_on": 0,
    "parameters": {"mu": lnsquare2, "sigma": asymdecrease3},
}

ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])


my_f = ghm.pdf(x)

my_f_expweib = ghm.distributions[0].pdf(x[:, 0])
def test_OMAE2020(dataset_omae2020_vhs, refdata_omae2020_vhs):
    def _logistics4(x, a=1, b=1, c=-1, d=1):
        return a + b / (1 + np.exp(c * (x - d)))

    def _alpha3(x, a, b, c, d_of_x):
        return (a + b * x**c) / 2.0445**(1 / d_of_x(x))

    logistics_bounds = [(0, None), (0, None), (None, 0), (0, None)]

    alpha_bounds = [(0, None), (0, None), (None, None)]

    beta_dep = DependenceFunction(_logistics4,
                                  logistics_bounds,
                                  weights=lambda x, y: y)
    alpha_dep = DependenceFunction(_alpha3,
                                   alpha_bounds,
                                   d_of_x=beta_dep,
                                   weights=lambda x, y: y)

    dist_description_vs = {
        "distribution": ExponentiatedWeibullDistribution(),
        "intervals": WidthOfIntervalSlicer(width=2),
    }

    dist_description_hs = {
        "distribution": ExponentiatedWeibullDistribution(f_delta=5),
        "conditional_on": 0,
        "parameters": {
            "alpha": alpha_dep,
            "beta": beta_dep,
        },
    }

    ghm = GlobalHierarchicalModel([dist_description_vs, dist_description_hs])

    fit_description_vs = {"method": "wlsq", "weights": "quadratic"}
    fit_description_hs = {"method": "wlsq", "weights": "quadratic"}

    ghm.fit(dataset_omae2020_vhs, [fit_description_vs, fit_description_hs])

    x = np.linspace([0.1, 0.1], [30, 12], num=100)

    my_f_expweib0 = ghm.distributions[0].pdf(x[:, 0])
    my_expweib0_params = (
        ghm.distributions[0].alpha,
        ghm.distributions[0].beta,
        ghm.distributions[0].delta,
    )

    my_expweib1 = ghm.distributions[1]
    my_givens = my_expweib1.conditioning_values
    my_f_expweib1 = []
    for given in my_givens:
        my_f_expweib1.append(my_expweib1.pdf(x[:, 1], given))

    my_f_expweib1 = np.stack(my_f_expweib1, axis=1)

    my_alphas = np.array(
        [par["alpha"] for par in my_expweib1.parameters_per_interval])
    my_betas = np.array(
        [par["beta"] for par in my_expweib1.parameters_per_interval])
    my_intervals = my_expweib1.data_intervals

    ref_expweib0_params = refdata_omae2020_vhs["ref_expweib0_params"]
    ref_f_expweib0 = refdata_omae2020_vhs["ref_f_expweib0"]
    ref_intervals = refdata_omae2020_vhs["ref_intervals"]
    ref_givens = refdata_omae2020_vhs["ref_givens"]
    ref_alphas = refdata_omae2020_vhs["ref_alphas"]
    ref_betas = refdata_omae2020_vhs["ref_betas"]
    ref_f_expweib1 = refdata_omae2020_vhs["ref_f_expweib1"]

    np.testing.assert_almost_equal(my_expweib0_params, ref_expweib0_params)
    np.testing.assert_almost_equal(my_f_expweib0, ref_f_expweib0)
    for my_interval, ref_interval in zip(my_intervals, ref_intervals):
        np.testing.assert_almost_equal(np.sort(my_interval),
                                       np.sort(ref_interval))
    np.testing.assert_almost_equal(my_givens, ref_givens)
    np.testing.assert_almost_equal(my_alphas, ref_alphas)
    np.testing.assert_almost_equal(my_betas, ref_betas)
    np.testing.assert_almost_equal(my_f_expweib1, ref_f_expweib1)