def seastate_model():
    """
    This joint distribution model described by Vanem and Bitner-Gregersen (2012)
    is widely used in academia. Here, we use it for evaluation. 
    DOI: 10.1016/j.apor.2012.05.006
    """
    def _power3(x, a=0.1000, b=1.489, c=0.1901):
        return a + b * x**c

    # A 3-parameter exponential function (a dependence function).
    def _exp3(x, a=0.0400, b=0.1748, c=-0.2243):
        return a + b * np.exp(c * x)

    bounds = [(0, None), (0, None), (None, None)]
    power3 = DependenceFunction(_power3, bounds)
    exp3 = DependenceFunction(_exp3, bounds)

    dist_description_0 = {
        "distribution": WeibullDistribution(alpha=2.776,
                                            beta=1.471,
                                            gamma=0.8888),
    }
    dist_description_1 = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {
            "mu": power3,
            "sigma": exp3
        },
    }
    model = GlobalHierarchicalModel([dist_description_0, dist_description_1])

    return model
def get_DNVGL_Hs_Tz():
    """
    Get DNVGL significant wave height and wave period model.
    
    Get the descriptions necessary to create th significant wave height 
    and wave period model as defined in DNVGL [1]_ in section 3.6.3.
    
    Returns
    -------
    dist_descriptions : list of dict
        List of dictionaries containing the dist descriptions for each dimension.
        Can be used to create a GlobalHierarchicalModel.
    fit_descriptions : None
        Default fit is used so None is returned. 
        Can be passed to fit function of GlobalHierarchicalModel.
    semantics : dict
        Dictionary with a semantic description of the model.
        Can be passed to plot functions.
    
    References
    ----------
    .. [1] DNV GL (2017). Recommended practice DNVGL-RP-C205: Environmental 
        conditions and environmental loads.
        
    """
    # TODO docstrings with links to literature
    # DNVGL 3.6.3
    def _power3(x, a, b, c):
        return a + b * x ** c

    def _exp3(x, a, b, c):
        return a + b * np.exp(c * x)

    bounds = [(0, None), (0, None), (None, None)]

    power3 = DependenceFunction(_power3, bounds)
    exp3 = DependenceFunction(_exp3, bounds)

    dist_description_hs = {
        "distribution": WeibullDistribution(),
        "intervals": WidthOfIntervalSlicer(width=0.5),
    }

    dist_description_tz = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {"mu": power3, "sigma": exp3},
    }

    dist_descriptions = [dist_description_hs, dist_description_tz]

    fit_descriptions = None

    semantics = {
        "names": ["Significant wave height", "Zero-crossing wave period"],
        "symbols": ["H_s", "T_z"],
        "units": ["m", "s"],
    }

    return dist_descriptions, fit_descriptions, semantics
def test_HDC(reference_coordinates_HDC):
    def _power3(x, a=0.1000, b=1.489, c=0.1901):
        return a + b * x**c

    # A 3-parameter exponential function (a dependence function).
    def _exp3(x, a=0.0400, b=0.1748, c=-0.2243):
        return a + b * np.exp(c * x)

    bounds = [(0, None), (0, None), (None, None)]
    power3 = DependenceFunction(_power3, bounds)
    exp3 = DependenceFunction(_exp3, bounds)

    dist_description_0 = {
        "distribution": WeibullDistribution(alpha=2.776,
                                            beta=1.471,
                                            gamma=0.8888),
    }
    dist_description_1 = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {
            "mu": power3,
            "sigma": exp3
        },
    }
    ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])

    alpha = calculate_alpha(3, 50)
    limits = [(0, 20), (0, 18)]
    deltas = [0.1, 0.1]
    my_contour = HighestDensityContour(ghm, alpha, limits, deltas)

    my_coordinates = my_contour.coordinates

    np.testing.assert_allclose(my_coordinates, reference_coordinates_HDC)
def test_ISORM(reference_coordinates_ISORM):

    # Logarithmic square function.
    def _lnsquare2(x, a=3.62, b=5.77):
        return np.log(a + b * np.sqrt(x / 9.81))

    # 3-parameter function that asymptotically decreases (a dependence function).
    def _asymdecrease3(x, a=0, b=0.324, c=0.404):
        return a + b / (1 + c * x)

    lnsquare2 = DependenceFunction(_lnsquare2)
    asymdecrease3 = DependenceFunction(_asymdecrease3)

    dist_description_0 = {
        "distribution":
        ExponentiatedWeibullDistribution(alpha=0.207, beta=0.684, delta=7.79),
    }

    dist_description_1 = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {
            "mu": lnsquare2,
            "sigma": asymdecrease3
        },
    }

    ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])

    state_duration = 3
    return_period = 20
    alpha = calculate_alpha(state_duration, return_period)
    my_isorm = ISORMContour(ghm, alpha)

    my_coordinates = my_isorm.coordinates

    np.testing.assert_allclose(my_coordinates, reference_coordinates_ISORM)
def test_DirectSamplingContour(reference_data_DSContour):

    sample = reference_data_DSContour["sample"]
    ref_coordinates = reference_data_DSContour["ref_coordinates"]

    def _power3(x, a=0.1000, b=1.489, c=0.1901):
        return a + b * x**c

    # A 3-parameter exponential function (a dependence function).
    def _exp3(x, a=0.0400, b=0.1748, c=-0.2243):
        return a + b * np.exp(c * x)

    bounds = [(0, None), (0, None), (None, None)]
    power3 = DependenceFunction(_power3, bounds)
    exp3 = DependenceFunction(_exp3, bounds)

    dist_description_0 = {
        "distribution": WeibullDistribution(alpha=2.776,
                                            beta=1.471,
                                            gamma=0.8888),
    }
    dist_description_1 = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {
            "mu": power3,
            "sigma": exp3
        },
    }
    ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])

    alpha = calculate_alpha(3, 50)
    my_ds_contour = DirectSamplingContour(ghm, alpha, sample=sample)

    my_coordinates = my_ds_contour.coordinates

    np.testing.assert_allclose(my_coordinates, ref_coordinates)
def get_OMAE2020_Hs_Tz():
    """
    Get OMAE2020 significant wave height and wave period model.
    
    Get the descriptions necessary to create the significant wave height 
    and wave period model as described by Haselsteiner et al. [1]_. 
    
    Returns
    -------
    dist_descriptions : list of dict
        List of dictionaries containing the dist descriptions for each dimension.
        Can be used to create a GlobalHierarchicalModel.
    fit_descriptions : list of dict
        List of dictionaries containing the fit description for each dimension.
        Can be passed to fit function of GlobalHierarchicalModel.
    semantics : dict
        Dictionary with a semantic description of the model.
        Can be passed to plot functions.
        
    References
    ----------
    .. [1] Haselsteiner, A.F.; Sander, A.; Ohlendorf, J.H.; Thoben, K.D. (2020)
        Global hierarchical models for wind and wave contours: Physical
        interpretations of the dependence functions. OMAE 2020, Fort Lauderdale,
        USA. Proceedings of the 39th International Conference on Ocean, 
        Offshore and Arctic Engineering.
    """

    def _asymdecrease3(x, a, b, c):
        return a + b / (1 + c * x)

    def _lnsquare2(x, a, b, c):
        return np.log(a + b * np.sqrt(np.divide(x, 9.81)))

    bounds = [(0, None), (0, None), (None, None)]

    sigma_dep = DependenceFunction(_asymdecrease3, bounds=bounds)
    mu_dep = DependenceFunction(_lnsquare2, bounds=bounds)

    dist_description_hs = {
        "distribution": ExponentiatedWeibullDistribution(),
        "intervals": WidthOfIntervalSlicer(width=0.5, min_n_points=50),
    }

    dist_description_tz = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {"sigma": sigma_dep, "mu": mu_dep,},
    }

    dist_descriptions = [dist_description_hs, dist_description_tz]

    fit_description_hs = {"method": "wlsq", "weights": "quadratic"}
    fit_descriptions = [fit_description_hs, None]

    semantics = {
        "names": ["Significant wave height", "Zero-crossing wave period"],
        "symbols": ["H_s", "T_z"],
        "units": ["m", "s"],
    }

    return dist_descriptions, fit_descriptions, semantics

# A 3-parameter exponential function (a dependence function).
def _exp3(x, a=0.0, b=0.308, c=-0.250):
    return a + b * np.exp(c * x)


power3 = DependenceFunction(_power3)
exp3 = DependenceFunction(_exp3)

dist_description_0 = {
    "distribution": WeibullDistribution(alpha=0.944, beta=1.48, gamma=0.0981),
}

dist_description_1 = {
    "distribution": LogNormalDistribution(),
    "conditional_on": 0,
    "parameters": {
        "mu": power3,
        "sigma": exp3
    },
}

ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])

steps = 5
x, dx = np.linspace(1, (10, 15), num=steps, retstep=True)

F_my = ghm.cdf(x)

# %%
def test_hs_tz_iform_contour():
    """
    Use a sea state dataset with the variables Hs and Tz,
    fit the join distribution recommended in DNVGL-RP-C203 to 
    it and compute an IFORM contour. This tests reproduces
    the results published in Haseltseiner et al. (2019).

    Such a work flow is for example typical in ship design.

    Haselsteiner, A. F., Coe, R. G., Manuel, L., Nguyen, P. T. T., 
    Martin, N., & Eckert-Gallup, A. (2019). A benchmarking exercise 
    on estimating extreme environmental conditions: Methodology & 
    baseline results. Proc. 38th International Conference on Ocean, 
    Offshore and Arctic Engineering (OMAE 2019). 
    https://doi.org/10.1115/OMAE2019-96523
    
    DNV GL. (2017). Recommended practice DNVGL-RP-C205: 
    Environmental conditions and environmental loads.
    """

    data = read_ec_benchmark_dataset("datasets/ec-benchmark_dataset_A.txt")

    # A 3-parameter power function (a dependence function).
    def _power3(x, a, b, c):
        return a + b * x**c

    # A 3-parameter exponential function (a dependence function).
    def _exp3(x, a, b, c):
        return a + b * np.exp(c * x)

    bounds = [(0, None), (0, None), (None, None)]
    power3 = DependenceFunction(_power3, bounds)
    exp3 = DependenceFunction(_exp3, bounds)

    dist_description_0 = {
        "distribution": WeibullDistribution(),
        "intervals": WidthOfIntervalSlicer(width=0.5),
    }
    dist_description_1 = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {
            "mu": power3,
            "sigma": exp3
        },
    }
    model = GlobalHierarchicalModel([dist_description_0, dist_description_1])
    model.fit(data)

    axs = plot_marginal_quantiles(model, data)
    axs = plot_dependence_functions(model)
    ax = plot_2D_isodensity(model, data)

    alpha = calculate_alpha(1, 20)
    contour = IFORMContour(model, alpha)

    coordinates = contour.coordinates
    np.testing.assert_allclose(max(coordinates[:, 0]), 5.0, atol=0.5)
    np.testing.assert_allclose(max(coordinates[:, 1]), 16.1, atol=0.5)

    ax = plot_2D_contour(contour, sample=data)
def test_DNVGL_Hs_Tz_model(dataset_dnvgl_hstz, refdata_dnvgl_hstz):
    # A 3-parameter power function (a dependence function).
    def _power3(x, a, b, c):
        return a + b * x**c

    # A 3-parameter exponential function (a dependence function).
    def _exp3(x, a, b, c):
        return a + b * np.exp(c * x)

    bounds = [(0, None), (0, None), (None, None)]
    power3 = DependenceFunction(_power3, bounds)
    exp3 = DependenceFunction(_exp3, bounds)

    x, dx = np.linspace([0.1, 0.1], [6, 22], num=100, retstep=True)

    dist_description_0 = {
        "distribution": WeibullDistribution(),
        "intervals": WidthOfIntervalSlicer(width=0.5),
    }
    dist_description_1 = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {
            "mu": power3,
            "sigma": exp3
        },
    }
    ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])
    ghm.fit(dataset_dnvgl_hstz)
    f_weibull = ghm.distributions[0].pdf(x[:, 0])
    weibull_params = (
        ghm.distributions[0].beta,
        ghm.distributions[0].gamma,
        ghm.distributions[0].alpha,
    )

    lognorm = ghm.distributions[1]
    intervals = lognorm.data_intervals
    givens = lognorm.conditioning_values
    f_lognorm = []
    for given in givens:
        f_lognorm.append(lognorm.pdf(x[:, 1], given))

    f_lognorm = np.stack(f_lognorm, axis=1)
    mus = np.array([par["mu"] for par in lognorm.parameters_per_interval])
    sigmas = np.array(
        [par["sigma"] for par in lognorm.parameters_per_interval])

    ref_f_weibull = refdata_dnvgl_hstz["ref_f_weibull"]
    ref_weibull_params = refdata_dnvgl_hstz["ref_weibull_params"]
    ref_intervals = 11
    ref_givens = refdata_dnvgl_hstz["ref_givens"]
    ref_f_lognorm = refdata_dnvgl_hstz["ref_f_lognorm"]
    ref_mus = refdata_dnvgl_hstz["ref_mus"]
    ref_sigmas = refdata_dnvgl_hstz["ref_sigmas"]

    assert len(intervals) == len(ref_intervals)
    for i in range(len(ref_intervals)):
        assert sorted(intervals[i]) == sorted(ref_intervals[i])

    np.testing.assert_allclose(f_weibull, ref_f_weibull)
    np.testing.assert_allclose(weibull_params, ref_weibull_params)
    np.testing.assert_allclose(givens, ref_givens)
    np.testing.assert_allclose(f_lognorm, ref_f_lognorm, rtol=1e-5)
    np.testing.assert_allclose(mus, ref_mus)
    np.testing.assert_allclose(sigmas, ref_sigmas)