def test_HDC(reference_coordinates_HDC):
    def _power3(x, a=0.1000, b=1.489, c=0.1901):
        return a + b * x**c

    # A 3-parameter exponential function (a dependence function).
    def _exp3(x, a=0.0400, b=0.1748, c=-0.2243):
        return a + b * np.exp(c * x)

    bounds = [(0, None), (0, None), (None, None)]
    power3 = DependenceFunction(_power3, bounds)
    exp3 = DependenceFunction(_exp3, bounds)

    dist_description_0 = {
        "distribution": WeibullDistribution(alpha=2.776,
                                            beta=1.471,
                                            gamma=0.8888),
    }
    dist_description_1 = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {
            "mu": power3,
            "sigma": exp3
        },
    }
    ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])

    alpha = calculate_alpha(3, 50)
    limits = [(0, 20), (0, 18)]
    deltas = [0.1, 0.1]
    my_contour = HighestDensityContour(ghm, alpha, limits, deltas)

    my_coordinates = my_contour.coordinates

    np.testing.assert_allclose(my_coordinates, reference_coordinates_HDC)
def seastate_model():
    """
    This joint distribution model described by Vanem and Bitner-Gregersen (2012)
    is widely used in academia. Here, we use it for evaluation. 
    DOI: 10.1016/j.apor.2012.05.006
    """
    def _power3(x, a=0.1000, b=1.489, c=0.1901):
        return a + b * x**c

    # A 3-parameter exponential function (a dependence function).
    def _exp3(x, a=0.0400, b=0.1748, c=-0.2243):
        return a + b * np.exp(c * x)

    bounds = [(0, None), (0, None), (None, None)]
    power3 = DependenceFunction(_power3, bounds)
    exp3 = DependenceFunction(_exp3, bounds)

    dist_description_0 = {
        "distribution": WeibullDistribution(alpha=2.776,
                                            beta=1.471,
                                            gamma=0.8888),
    }
    dist_description_1 = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {
            "mu": power3,
            "sigma": exp3
        },
    }
    model = GlobalHierarchicalModel([dist_description_0, dist_description_1])

    return model
def test_ISORM(reference_coordinates_ISORM):

    # Logarithmic square function.
    def _lnsquare2(x, a=3.62, b=5.77):
        return np.log(a + b * np.sqrt(x / 9.81))

    # 3-parameter function that asymptotically decreases (a dependence function).
    def _asymdecrease3(x, a=0, b=0.324, c=0.404):
        return a + b / (1 + c * x)

    lnsquare2 = DependenceFunction(_lnsquare2)
    asymdecrease3 = DependenceFunction(_asymdecrease3)

    dist_description_0 = {
        "distribution":
        ExponentiatedWeibullDistribution(alpha=0.207, beta=0.684, delta=7.79),
    }

    dist_description_1 = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {
            "mu": lnsquare2,
            "sigma": asymdecrease3
        },
    }

    ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])

    state_duration = 3
    return_period = 20
    alpha = calculate_alpha(state_duration, return_period)
    my_isorm = ISORMContour(ghm, alpha)

    my_coordinates = my_isorm.coordinates

    np.testing.assert_allclose(my_coordinates, reference_coordinates_ISORM)
def test_DirectSamplingContour(reference_data_DSContour):

    sample = reference_data_DSContour["sample"]
    ref_coordinates = reference_data_DSContour["ref_coordinates"]

    def _power3(x, a=0.1000, b=1.489, c=0.1901):
        return a + b * x**c

    # A 3-parameter exponential function (a dependence function).
    def _exp3(x, a=0.0400, b=0.1748, c=-0.2243):
        return a + b * np.exp(c * x)

    bounds = [(0, None), (0, None), (None, None)]
    power3 = DependenceFunction(_power3, bounds)
    exp3 = DependenceFunction(_exp3, bounds)

    dist_description_0 = {
        "distribution": WeibullDistribution(alpha=2.776,
                                            beta=1.471,
                                            gamma=0.8888),
    }
    dist_description_1 = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {
            "mu": power3,
            "sigma": exp3
        },
    }
    ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])

    alpha = calculate_alpha(3, 50)
    my_ds_contour = DirectSamplingContour(ghm, alpha, sample=sample)

    my_coordinates = my_ds_contour.coordinates

    np.testing.assert_allclose(my_coordinates, ref_coordinates)
def test_v_hs_hd_contour():
    """
    Use a wind speed - wave height dataset, fit the joint 
    distribution that was proposed by Haselsteiner et al. (2020)
    and compute a highest density contour. This test reproduces
    the results presented in Haselestiner et al. (2020). The
    coorindates are availble at https://github.com/ec-benchmark-organizers/
    ec-benchmark/blob/master/results/exercise-1/contribution-4/haselsteiner_
    andreas_dataset_d_50.txt

    Such a work flow is for example typical when generationg 
    a 50-year contour for DLC 1.6 in the offshore wind standard
    IEC 61400-3-1.

    Haselsteiner, A. F., Sander, A., Ohlendorf, J.-H., & Thoben, K.-D. (2020). 
    Global hierarchical models for wind and wave contours: Physical 
    interpretations of the dependence functions. Proc. 39th International 
    Conference on Ocean, Offshore and Arctic Engineering (OMAE 2020). 
    https://doi.org/10.1115/OMAE2020-18668

    International Electrotechnical Commission. (2019). Wind energy 
    generation systems - Part 3-1: Design requirements for fixed 
    offshore wind turbines (IEC 61400-3-1).
    """

    data = read_ec_benchmark_dataset("datasets/ec-benchmark_dataset_D.txt")

    def _logistics4(x, a=1, b=1, c=-1, d=1):
        return a + b / (1 + np.exp(c * (x - d)))

    def _alpha3(x, a, b, c, d_of_x):
        return (a + b * x**c) / 2.0445**(1 / d_of_x(x))

    logistics_bounds = [(0, None), (0, None), (None, 0), (0, None)]

    alpha_bounds = [(0, None), (0, None), (None, None)]

    beta_dep = DependenceFunction(_logistics4,
                                  logistics_bounds,
                                  weights=lambda x, y: y)
    alpha_dep = DependenceFunction(_alpha3,
                                   alpha_bounds,
                                   d_of_x=beta_dep,
                                   weights=lambda x, y: y)

    dist_description_v = {
        "distribution": ExponentiatedWeibullDistribution(),
        "intervals": WidthOfIntervalSlicer(width=2),
    }

    dist_description_hs = {
        "distribution": ExponentiatedWeibullDistribution(f_delta=5),
        "conditional_on": 0,
        "parameters": {
            "alpha": alpha_dep,
            "beta": beta_dep,
        },
    }

    model = GlobalHierarchicalModel([dist_description_v, dist_description_hs])

    fit_description_vs = {"method": "wlsq", "weights": "quadratic"}
    fit_description_hs = {"method": "wlsq", "weights": "quadratic"}

    model.fit(data, [fit_description_vs, fit_description_hs])

    axs = plot_marginal_quantiles(model, data)
    axs = plot_dependence_functions(model)
    ax = plot_2D_isodensity(model, data)

    alpha = calculate_alpha(1, 50)
    limits = [(0, 35), (0, 20)]
    contour = HighestDensityContour(model,
                                    alpha,
                                    limits=limits,
                                    deltas=[0.2, 0.2])

    coordinates = contour.coordinates
    np.testing.assert_allclose(max(coordinates[:, 0]), 29.9, atol=0.2)
    np.testing.assert_allclose(max(coordinates[:, 1]), 15.5, atol=0.2)
    np.testing.assert_allclose(min(coordinates[:, 0]), 0, atol=0.1)
    np.testing.assert_allclose(min(coordinates[:, 1]), 0, atol=0.1)

    ax = plot_2D_contour(contour, sample=data)
def test_hs_tz_iform_contour():
    """
    Use a sea state dataset with the variables Hs and Tz,
    fit the join distribution recommended in DNVGL-RP-C203 to 
    it and compute an IFORM contour. This tests reproduces
    the results published in Haseltseiner et al. (2019).

    Such a work flow is for example typical in ship design.

    Haselsteiner, A. F., Coe, R. G., Manuel, L., Nguyen, P. T. T., 
    Martin, N., & Eckert-Gallup, A. (2019). A benchmarking exercise 
    on estimating extreme environmental conditions: Methodology & 
    baseline results. Proc. 38th International Conference on Ocean, 
    Offshore and Arctic Engineering (OMAE 2019). 
    https://doi.org/10.1115/OMAE2019-96523
    
    DNV GL. (2017). Recommended practice DNVGL-RP-C205: 
    Environmental conditions and environmental loads.
    """

    data = read_ec_benchmark_dataset("datasets/ec-benchmark_dataset_A.txt")

    # A 3-parameter power function (a dependence function).
    def _power3(x, a, b, c):
        return a + b * x**c

    # A 3-parameter exponential function (a dependence function).
    def _exp3(x, a, b, c):
        return a + b * np.exp(c * x)

    bounds = [(0, None), (0, None), (None, None)]
    power3 = DependenceFunction(_power3, bounds)
    exp3 = DependenceFunction(_exp3, bounds)

    dist_description_0 = {
        "distribution": WeibullDistribution(),
        "intervals": WidthOfIntervalSlicer(width=0.5),
    }
    dist_description_1 = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {
            "mu": power3,
            "sigma": exp3
        },
    }
    model = GlobalHierarchicalModel([dist_description_0, dist_description_1])
    model.fit(data)

    axs = plot_marginal_quantiles(model, data)
    axs = plot_dependence_functions(model)
    ax = plot_2D_isodensity(model, data)

    alpha = calculate_alpha(1, 20)
    contour = IFORMContour(model, alpha)

    coordinates = contour.coordinates
    np.testing.assert_allclose(max(coordinates[:, 0]), 5.0, atol=0.5)
    np.testing.assert_allclose(max(coordinates[:, 1]), 16.1, atol=0.5)

    ax = plot_2D_contour(contour, sample=data)
示例#7
0
lnsquare2 = DependenceFunction(_lnsquare2)
asymdecrease3 = DependenceFunction(_asymdecrease3)

dist_description_0 = {
    "distribution": ExponentiatedWeibullDistribution(
        alpha=0.207, beta=0.684, delta=7.79
    ),
}

dist_description_1 = {
    "distribution": LogNormalDistribution(),
    "conditional_on": 0,
    "parameters": {"mu": lnsquare2, "sigma": asymdecrease3},
}

ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])


my_f = ghm.pdf(x)

my_f_expweib = ghm.distributions[0].pdf(x[:, 0])
my_expweib_param = (
    ghm.distributions[0].delta,
    ghm.distributions[0].beta,
    ghm.distributions[0].alpha,
)


my_ln = ghm.distributions[1]
my_given = np.arange(1, 10)
my_f_ln = []
dist_description_0 = {
    "distribution": WeibullDistribution(),
    "intervals": WidthOfIntervalSlicer(width=0.5),
}

dist_description_1 = {
    "distribution": LogNormalDistribution(),
    "conditional_on": 0,
    "parameters": {
        "mu": power3,
        "sigma": exp3
    },
}

ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])

ghm.fit(data)

state_duration = 3
return_period = 50
alpha = calculate_alpha(state_duration, return_period)
iform_contour = IFORMContour(ghm, alpha)

semantics = {
    "names": ["Significant wave height", "Energy wave period"],
    "symbols": ["H_s", "T_e"],
    "units": ["m", "s"],
}  # TODO check if correct or other wave period

# %%
示例#9
0
data = pd.read_csv("datasets/OMAE2020_Dataset_D.txt", sep=";")
data.columns = ["Datetime", "V", "Hs"]
data = data[["Hs", "V"]]

x, dx = np.linspace([0.1, 0.1], [6, 22], num=100, retstep=True)

# given_hs = list(range(1, 7))

# %% # vc2
from virocon import GlobalHierarchicalModel
from virocon.predefined import get_DNVGL_Hs_U

dist_descriptions, fit_descriptions, semantics = get_DNVGL_Hs_U()

ghm = GlobalHierarchicalModel(dist_descriptions)
ghm.fit(data, fit_descriptions=fit_descriptions)

# %%
from virocon.plotting import plot_2D_isodensity

plot_2D_isodensity(ghm, data, semantics=semantics)

# %%

my_f = ghm.pdf(x)

my_f_weibull3 = ghm.distributions[0].pdf(x[:, 0])
my_weibull3_params = (
    ghm.distributions[0].beta,
    ghm.distributions[0].gamma,
示例#10
0
dist_description_vs = {
    "distribution": ExponentiatedWeibullDistribution(),
    "intervals": WidthOfIntervalSlicer(2, min_n_points=50),
}

dist_description_hs = {
    "distribution": ExponentiatedWeibullDistribution(f_delta=5),
    "conditional_on": 0,
    "parameters": {
        "alpha": alpha_dep,
        "beta": beta_dep,
    },
}

ghm = GlobalHierarchicalModel([dist_description_vs, dist_description_hs])

fit_description_vs = {"method": "wlsq", "weights": "quadratic"}
fit_description_hs = {"method": "wlsq", "weights": "quadratic"}

ghm.fit(data, [fit_description_vs, fit_description_hs])
# %% printing

# print(repr(beta_dep))
# print(repr(alpha_dep))
# print()
# print(ghm.distributions[0])
# print(ghm.distributions[1])
print()
print(ghm)
# print(beta_dep)
def test_WES4(dataset_wes_sigmau, refdata_wes_sigmau):
    # https://doi.org/10.5194/wes-4-325-2019

    class MyIntervalSlicer(WidthOfIntervalSlicer):
        def _slice(self, data):

            interval_slices, interval_references, interval_boundaries = super(
            )._slice(data)

            # discard slices below 4 m/s
            ok_slices = []
            ok_references = []
            ok_boundaries = []
            for slice_, reference, boundaries in zip(interval_slices,
                                                     interval_references,
                                                     interval_boundaries):
                if reference >= 4:
                    ok_slices.append(slice_)
                    ok_references.append(reference)
                    ok_boundaries.append(boundaries)

            return ok_slices, ok_references, ok_boundaries

    def _poly3(x, a, b, c, d):
        return a * x**3 + b * x**2 + c * x + d

    def _poly2(x, a, b, c):
        return a * x**2 + b * x + c

    poly3 = DependenceFunction(_poly3)
    poly2 = DependenceFunction(_poly2)

    dim0_description = {
        "distribution": WeibullDistribution(),
        "intervals": MyIntervalSlicer(width=1,
                                      reference="left",
                                      min_n_points=5),
    }

    dim1_description = {
        "distribution": LogNormalNormFitDistribution(),
        "conditional_on": 0,
        "parameters": {
            "mu_norm": poly3,
            "sigma_norm": poly2
        },
    }

    ghm = GlobalHierarchicalModel([dim0_description, dim1_description])
    ghm.fit(dataset_wes_sigmau)

    alpha = 1 / (5 * len(dataset_wes_sigmau))
    iform = IFORMContour(ghm, alpha)
    my_coordinates = iform.coordinates

    x_U = np.linspace(2, 40, num=100)
    x_sigma = np.linspace(0.02, 3.6, num=100)

    U_dist = ghm.distributions[0]
    my_weib_param = list(U_dist.parameters.values())
    my_f_weib = U_dist.pdf(x_U)

    my_ln = ghm.distributions[1]
    my_intervals = my_ln.data_intervals
    my_givens = my_ln.conditioning_values
    my_f_ln = []
    for given in my_givens:
        my_f_ln.append(my_ln.pdf(x_sigma, given))

    my_f_ln = np.stack(my_f_ln, axis=1)

    my_mu_norms = np.array(
        [par["mu_norm"] for par in my_ln.parameters_per_interval])
    my_sigma_norms = np.array(
        [par["sigma_norm"] for par in my_ln.parameters_per_interval])
    my_intervals = my_ln.data_intervals
    my_sigmas = [dist.sigma for dist in my_ln.distributions_per_interval]
    my_mus = [dist.mu for dist in my_ln.distributions_per_interval]

    ref_weib_param = refdata_wes_sigmau["ref_weib_param"]
    ref_f_weib = refdata_wes_sigmau["ref_f_weib"]
    ref_intervals = refdata_wes_sigmau["ref_intervals"]
    ref_givens = refdata_wes_sigmau["ref_givens"]
    ref_mu_norms = refdata_wes_sigmau["ref_mu_norms"]
    ref_sigma_norms = refdata_wes_sigmau["ref_sigma_norms"]
    ref_mus = refdata_wes_sigmau["ref_mus"]
    ref_sigmas = refdata_wes_sigmau["ref_sigmas"]
    ref_f_ln = refdata_wes_sigmau["ref_f_ln"]
    ref_coordinates = refdata_wes_sigmau["ref_coordinates"]

    np.testing.assert_allclose(my_weib_param, ref_weib_param)
    np.testing.assert_allclose(my_f_weib, ref_f_weib)

    assert len(my_intervals) == len(ref_intervals)
    for i in range(len(ref_intervals)):
        assert sorted(my_intervals[i]) == sorted(ref_intervals[i])

    np.testing.assert_allclose(my_givens, ref_givens)
    np.testing.assert_allclose(my_mu_norms, ref_mu_norms)
    np.testing.assert_allclose(my_sigma_norms, ref_sigma_norms)
    np.testing.assert_allclose(my_mus, ref_mus)
    np.testing.assert_allclose(my_sigmas, ref_sigmas)
    np.testing.assert_allclose(my_f_ln, ref_f_ln)
    np.testing.assert_allclose(my_coordinates, ref_coordinates)
def test_OMAE2020(dataset_omae2020_vhs, refdata_omae2020_vhs):
    def _logistics4(x, a=1, b=1, c=-1, d=1):
        return a + b / (1 + np.exp(c * (x - d)))

    def _alpha3(x, a, b, c, d_of_x):
        return (a + b * x**c) / 2.0445**(1 / d_of_x(x))

    logistics_bounds = [(0, None), (0, None), (None, 0), (0, None)]

    alpha_bounds = [(0, None), (0, None), (None, None)]

    beta_dep = DependenceFunction(_logistics4,
                                  logistics_bounds,
                                  weights=lambda x, y: y)
    alpha_dep = DependenceFunction(_alpha3,
                                   alpha_bounds,
                                   d_of_x=beta_dep,
                                   weights=lambda x, y: y)

    dist_description_vs = {
        "distribution": ExponentiatedWeibullDistribution(),
        "intervals": WidthOfIntervalSlicer(width=2),
    }

    dist_description_hs = {
        "distribution": ExponentiatedWeibullDistribution(f_delta=5),
        "conditional_on": 0,
        "parameters": {
            "alpha": alpha_dep,
            "beta": beta_dep,
        },
    }

    ghm = GlobalHierarchicalModel([dist_description_vs, dist_description_hs])

    fit_description_vs = {"method": "wlsq", "weights": "quadratic"}
    fit_description_hs = {"method": "wlsq", "weights": "quadratic"}

    ghm.fit(dataset_omae2020_vhs, [fit_description_vs, fit_description_hs])

    x = np.linspace([0.1, 0.1], [30, 12], num=100)

    my_f_expweib0 = ghm.distributions[0].pdf(x[:, 0])
    my_expweib0_params = (
        ghm.distributions[0].alpha,
        ghm.distributions[0].beta,
        ghm.distributions[0].delta,
    )

    my_expweib1 = ghm.distributions[1]
    my_givens = my_expweib1.conditioning_values
    my_f_expweib1 = []
    for given in my_givens:
        my_f_expweib1.append(my_expweib1.pdf(x[:, 1], given))

    my_f_expweib1 = np.stack(my_f_expweib1, axis=1)

    my_alphas = np.array(
        [par["alpha"] for par in my_expweib1.parameters_per_interval])
    my_betas = np.array(
        [par["beta"] for par in my_expweib1.parameters_per_interval])
    my_intervals = my_expweib1.data_intervals

    ref_expweib0_params = refdata_omae2020_vhs["ref_expweib0_params"]
    ref_f_expweib0 = refdata_omae2020_vhs["ref_f_expweib0"]
    ref_intervals = refdata_omae2020_vhs["ref_intervals"]
    ref_givens = refdata_omae2020_vhs["ref_givens"]
    ref_alphas = refdata_omae2020_vhs["ref_alphas"]
    ref_betas = refdata_omae2020_vhs["ref_betas"]
    ref_f_expweib1 = refdata_omae2020_vhs["ref_f_expweib1"]

    np.testing.assert_almost_equal(my_expweib0_params, ref_expweib0_params)
    np.testing.assert_almost_equal(my_f_expweib0, ref_f_expweib0)
    for my_interval, ref_interval in zip(my_intervals, ref_intervals):
        np.testing.assert_almost_equal(np.sort(my_interval),
                                       np.sort(ref_interval))
    np.testing.assert_almost_equal(my_givens, ref_givens)
    np.testing.assert_almost_equal(my_alphas, ref_alphas)
    np.testing.assert_almost_equal(my_betas, ref_betas)
    np.testing.assert_almost_equal(my_f_expweib1, ref_f_expweib1)
power3 = DependenceFunction(_power3, bounds)
exp3 = DependenceFunction(_exp3, bounds)

dist_description_0 = {
    "distribution": WeibullDistribution(),
    "intervals": WidthOfIntervalSlicer(width=0.5),
}
dist_description_1 = {
    "distribution": LogNormalDistribution(),
    "conditional_on": 0,
    "parameters": {
        "mu": power3,
        "sigma": exp3
    },
}
model = GlobalHierarchicalModel([dist_description_0, dist_description_1])

# Define a dictionary that describes the model.
semantics = {
    "names": ["Significant wave height", "Zero-crossing period"],
    "symbols": ["H_s", "T_z"],
    "units": ["m", "s"],
}

# Fit the model to the data (estimate the model's parameter values).
model.fit(data)

# Print the estimated parameter values
print(model)

# Create plots to inspect the model's goodness-of-fit.
bounds = [(0, None), (0, None), (None, None)]
power3 = DependenceFunction(_power3, bounds)
exp3 = DependenceFunction(_exp3, bounds)

dist_description_0 = {
    "distribution": WeibullDistribution(alpha=2.776, beta=1.471, gamma=0.8888),
}
dist_description_1 = {
    "distribution": LogNormalDistribution(),
    "conditional_on": 0,
    "parameters": {
        "mu": power3,
        "sigma": exp3
    },
}
ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])

alpha = calculate_alpha(3, 50)
n = 10000
sample = ghm.draw_sample(n)
my_ds_contour = DirectSamplingContour(ghm, alpha, sample=sample)

my_coordinates = my_ds_contour.coordinates

# %% viroconcom v1
import sys

sys.path.append("../viroconcom")
from viroconcom.distributions import (
    WeibullDistribution,
    LognormalDistribution,
示例#15
0
dist_description_0 = {
    "distribution":
    ExponentiatedWeibullDistribution(alpha=0.207, beta=0.684, delta=7.79),
}

dist_description_1 = {
    "distribution": LogNormalDistribution(),
    "conditional_on": 0,
    "parameters": {
        "mu": lnsquare2,
        "sigma": asymdecrease3
    },
}

ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])

steps = 5

x, dx = np.linspace(0, 20, num=steps, retstep=True)
# x = x[:, np.newaxis]

my_marg_f0 = ghm.marginal_pdf(x, dim=0)
my_marg_f1 = ghm.marginal_pdf(x, dim=1)

my_marg_F0 = ghm.marginal_cdf(x, dim=0)
my_marg_F1 = ghm.marginal_cdf(x, dim=1)

p = np.linspace(0.0001, 0.999, num=steps)
my_marg_x0 = ghm.marginal_icdf(p, dim=0)
my_marg_x1 = ghm.marginal_icdf(p, dim=1)
示例#16
0
axs[0].plot([25, 25], [0, 1.1], '--k', linewidth=0.8)
axs[0].text(23.5,
            0.25,
            'Cut-out wind speed',
            fontsize=fs,
            rotation=90,
            verticalalignment='center')
axs[0].spines['right'].set_visible(False)
axs[0].spines['top'].set_visible(False)
axs[0].set_ylabel(r_label)
axs[0].set_ylim([0, 1.1])

# Load data, fit joint model, compute contour.
data = read_ec_benchmark_dataset('ec-benchmark_dataset_D.txt')
dist_descriptions, fit_descriptions, semantics = get_OMAE2020_V_Hs()
model = GlobalHierarchicalModel(dist_descriptions)
model.fit(data, fit_descriptions)
c = IFORMContour(model, 1 / (50 * 365.25 * 24))

contour_v = c.coordinates[:, 0] / 0.95
contour_hs = c.coordinates[:, 1]
contour_v = contour_v * (90 / 10)**0.14  # Convert wind speed to hub height.

axs[1].plot(np.append(contour_v, contour_v[0]),
            np.append(contour_hs, contour_hs[0]),
            '--b',
            label='Environmental contour')

axs[1].annotate('environmental contour',
                xy=(7.4, 4.6),
                xytext=(3, 2.5),
示例#17
0
matplotlib.rcParams["mathtext.fontset"] = "custom"
matplotlib.rcParams["mathtext.rm"] = "Arial"
matplotlib.rcParams["mathtext.it"] = "Arial:italic"
matplotlib.rcParams["mathtext.bf"] = "Arial:bold"

# Read dataset A, B  or C.
DATASET_CHARS = ["A", "B", "C"]

fig, axes = plt.subplots(2, 3, figsize=(7, 5), sharey="row")
for i, dataset_char in enumerate(DATASET_CHARS):
    file_path = "datasets/" + dataset_char + ".txt"
    sample = read_ec_benchmark_dataset(file_path)

    # Define the structure of the joint distribution model and fit it to the data.
    dist_descriptions, fit_descriptions, semantics = get_OMAE2020_Hs_Tz()
    model = GlobalHierarchicalModel(dist_descriptions)
    model.fit(sample)

    two_axes = [axes[0, i], axes[1, i]]
    plot_marginal_quantiles(model, sample, semantics, two_axes)

    for j, ax in enumerate(two_axes):
        if j == 0:
            ax.set_title("Dataset $" + dataset_char + "$")
        ax.spines["right"].set_visible(False)
        ax.spines["top"].set_visible(False)
        if i > 0:
            ax.set_ylabel("")

fig.tight_layout()
fig.savefig("figs/marginals-qq-datasets-abc.svg", dpi=300)
def test_DNVGL_Hs_Tz_model(dataset_dnvgl_hstz, refdata_dnvgl_hstz):
    # A 3-parameter power function (a dependence function).
    def _power3(x, a, b, c):
        return a + b * x**c

    # A 3-parameter exponential function (a dependence function).
    def _exp3(x, a, b, c):
        return a + b * np.exp(c * x)

    bounds = [(0, None), (0, None), (None, None)]
    power3 = DependenceFunction(_power3, bounds)
    exp3 = DependenceFunction(_exp3, bounds)

    x, dx = np.linspace([0.1, 0.1], [6, 22], num=100, retstep=True)

    dist_description_0 = {
        "distribution": WeibullDistribution(),
        "intervals": WidthOfIntervalSlicer(width=0.5),
    }
    dist_description_1 = {
        "distribution": LogNormalDistribution(),
        "conditional_on": 0,
        "parameters": {
            "mu": power3,
            "sigma": exp3
        },
    }
    ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])
    ghm.fit(dataset_dnvgl_hstz)
    f_weibull = ghm.distributions[0].pdf(x[:, 0])
    weibull_params = (
        ghm.distributions[0].beta,
        ghm.distributions[0].gamma,
        ghm.distributions[0].alpha,
    )

    lognorm = ghm.distributions[1]
    intervals = lognorm.data_intervals
    givens = lognorm.conditioning_values
    f_lognorm = []
    for given in givens:
        f_lognorm.append(lognorm.pdf(x[:, 1], given))

    f_lognorm = np.stack(f_lognorm, axis=1)
    mus = np.array([par["mu"] for par in lognorm.parameters_per_interval])
    sigmas = np.array(
        [par["sigma"] for par in lognorm.parameters_per_interval])

    ref_f_weibull = refdata_dnvgl_hstz["ref_f_weibull"]
    ref_weibull_params = refdata_dnvgl_hstz["ref_weibull_params"]
    ref_intervals = 11
    ref_givens = refdata_dnvgl_hstz["ref_givens"]
    ref_f_lognorm = refdata_dnvgl_hstz["ref_f_lognorm"]
    ref_mus = refdata_dnvgl_hstz["ref_mus"]
    ref_sigmas = refdata_dnvgl_hstz["ref_sigmas"]

    assert len(intervals) == len(ref_intervals)
    for i in range(len(ref_intervals)):
        assert sorted(intervals[i]) == sorted(ref_intervals[i])

    np.testing.assert_allclose(f_weibull, ref_f_weibull)
    np.testing.assert_allclose(weibull_params, ref_weibull_params)
    np.testing.assert_allclose(givens, ref_givens)
    np.testing.assert_allclose(f_lognorm, ref_f_lognorm, rtol=1e-5)
    np.testing.assert_allclose(mus, ref_mus)
    np.testing.assert_allclose(sigmas, ref_sigmas)
exp3 = DependenceFunction(_exp3)

dist_description_0 = {
    "distribution": WeibullDistribution(alpha=0.944, beta=1.48, gamma=0.0981),
}

dist_description_1 = {
    "distribution": LogNormalDistribution(),
    "conditional_on": 0,
    "parameters": {
        "mu": power3,
        "sigma": exp3
    },
}

ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])

steps = 5
x, dx = np.linspace(1, (10, 15), num=steps, retstep=True)

F_my = ghm.cdf(x)

# %%

import sys

sys.path.append("../viroconcom")
# sys.path.append("../../viroconcom")
from viroconcom.params import FunctionParam
from viroconcom.distributions import (
    WeibullDistribution,
    GlobalHierarchicalModel,
    get_DNVGL_Hs_U,
    get_OMAE2020_V_Hs,
    plot_marginal_quantiles,
    plot_2D_isodensity,
)

# Load dataset (this is dataset D from a benchmarking exercise on environmental
# contours, see https://github.com/ec-benchmark-organizers/ec-benchmark
data = read_ec_benchmark_dataset("datasets/ec-benchmark_dataset_D.txt")

# Define the structure of the first joint distribution model. This model
# is recommended in the DNVGL's "Recommended practice DNVGL-RP-C205: Environmental
# conditions and environmental loads." (2017).
dist_descriptions1, fit_descriptions1, semantics1 = get_DNVGL_Hs_U()
model1 = GlobalHierarchicalModel(dist_descriptions1)

# Define the structure of the first joint distribution model. This model
# was proposed at the OMAE 2020 conference by Haselesteiner et al:
# Haselsteiner, A. F., Sander, A., Ohlendorf, J.-H., & Thoben, K.-D. (2020).
# Global hierarchical models for wind and wave contours: Physical interpretations
# of the dependence functions. Proc. 39th International Conference on Ocean,
# Offshore and Arctic Engineering (OMAE 2020). https://doi.org/10.1115/OMAE2020-18668
dist_descriptions2, fit_descriptions2, semantics2 = get_OMAE2020_V_Hs()
model2 = GlobalHierarchicalModel(dist_descriptions2)

# Fit the two models to the data (estimate their parameter values).
# For model 1, we need the variables in the order hs, v (instead of v, hs)
v = data["wind speed (m/s)"].to_numpy()
hs = data["significant wave height (m)"].to_numpy()
hs_v = np.transpose(np.array([hs, v]))
    CS_empirical.append(
        ax.contour(
            tgrid,
            hgrid,
            Z,
            levels=levels[-2:],
            linestyles="-",
            linewidths=1,
            colors="k",
            zorder=2,
        ))
    CS_empirical[-1].collections[0].set_label("KDE, constant density")

    # Define the structure of the joint distribution model and fit it to the data.
    dist_descriptions, fit_descriptions, semantics = get_OMAE2020_Hs_Tz()
    model = GlobalHierarchicalModel(dist_descriptions)
    data = np.array([hs, tz])
    data = data.T
    model.fit(data)

    f = np.empty_like(hgrid)
    for i in range(hgrid.shape[0]):
        for j in range(hgrid.shape[1]):
            f[i, j] = model.pdf([hgrid[i, j], tgrid[i, j]])

    CS.append(
        ax.contour(
            tgrid,
            hgrid,
            f,
            levels=levels,
示例#22
0
    #                 "beta" : 2.02,
    #                 "gamma" : 2.2},
}

dim1_description = {
    "distribution": LogNormalNormFitDistribution(),
    "conditional_on": 0,
    "parameters": {
        "mu_norm": poly3,
        "sigma_norm": poly2
    },
}

# shape, loc, scale lognorm?

ghm = GlobalHierarchicalModel([dim0_description, dim1_description])

ghm.fit(data)

# %%

state_duration = 1 / 6
return_period = 50
# alpha = calculate_alpha(state_duration, return_period)

alpha = 1 / (5 * len(data))

# alpha = 10 min / 50 years
# alpha = 10 min / 50 * 365.25 days
# alpha = 10 min / 50 * 365.25 *24 hours
# alpha = 10 min / 50 * 365.25 *24 * 60 min