def get_DNVGL_Hs_Tz(): """ Get DNVGL significant wave height and wave period model. Get the descriptions necessary to create th significant wave height and wave period model as defined in DNVGL [1]_ in section 3.6.3. Returns ------- dist_descriptions : list of dict List of dictionaries containing the dist descriptions for each dimension. Can be used to create a GlobalHierarchicalModel. fit_descriptions : None Default fit is used so None is returned. Can be passed to fit function of GlobalHierarchicalModel. semantics : dict Dictionary with a semantic description of the model. Can be passed to plot functions. References ---------- .. [1] DNV GL (2017). Recommended practice DNVGL-RP-C205: Environmental conditions and environmental loads. """ # TODO docstrings with links to literature # DNVGL 3.6.3 def _power3(x, a, b, c): return a + b * x ** c def _exp3(x, a, b, c): return a + b * np.exp(c * x) bounds = [(0, None), (0, None), (None, None)] power3 = DependenceFunction(_power3, bounds) exp3 = DependenceFunction(_exp3, bounds) dist_description_hs = { "distribution": WeibullDistribution(), "intervals": WidthOfIntervalSlicer(width=0.5), } dist_description_tz = { "distribution": LogNormalDistribution(), "conditional_on": 0, "parameters": {"mu": power3, "sigma": exp3}, } dist_descriptions = [dist_description_hs, dist_description_tz] fit_descriptions = None semantics = { "names": ["Significant wave height", "Zero-crossing wave period"], "symbols": ["H_s", "T_z"], "units": ["m", "s"], } return dist_descriptions, fit_descriptions, semantics
def test_HDC(reference_coordinates_HDC): def _power3(x, a=0.1000, b=1.489, c=0.1901): return a + b * x**c # A 3-parameter exponential function (a dependence function). def _exp3(x, a=0.0400, b=0.1748, c=-0.2243): return a + b * np.exp(c * x) bounds = [(0, None), (0, None), (None, None)] power3 = DependenceFunction(_power3, bounds) exp3 = DependenceFunction(_exp3, bounds) dist_description_0 = { "distribution": WeibullDistribution(alpha=2.776, beta=1.471, gamma=0.8888), } dist_description_1 = { "distribution": LogNormalDistribution(), "conditional_on": 0, "parameters": { "mu": power3, "sigma": exp3 }, } ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1]) alpha = calculate_alpha(3, 50) limits = [(0, 20), (0, 18)] deltas = [0.1, 0.1] my_contour = HighestDensityContour(ghm, alpha, limits, deltas) my_coordinates = my_contour.coordinates np.testing.assert_allclose(my_coordinates, reference_coordinates_HDC)
def seastate_model(): """ This joint distribution model described by Vanem and Bitner-Gregersen (2012) is widely used in academia. Here, we use it for evaluation. DOI: 10.1016/j.apor.2012.05.006 """ def _power3(x, a=0.1000, b=1.489, c=0.1901): return a + b * x**c # A 3-parameter exponential function (a dependence function). def _exp3(x, a=0.0400, b=0.1748, c=-0.2243): return a + b * np.exp(c * x) bounds = [(0, None), (0, None), (None, None)] power3 = DependenceFunction(_power3, bounds) exp3 = DependenceFunction(_exp3, bounds) dist_description_0 = { "distribution": WeibullDistribution(alpha=2.776, beta=1.471, gamma=0.8888), } dist_description_1 = { "distribution": LogNormalDistribution(), "conditional_on": 0, "parameters": { "mu": power3, "sigma": exp3 }, } model = GlobalHierarchicalModel([dist_description_0, dist_description_1]) return model
def get_DNVGL_Hs_U(): """ Get DNVGL significant wave height and wind speed model. Get the descriptions necessary to create the significant wave height and wind speed model as defined in DNVGL [1]_ in section 3.6.4. Returns ------- dist_descriptions : list of dict List of dictionaries containing the dist descriptions for each dimension. Can be used to create a GlobalHierarchicalModel. fit_descriptions : None Default fit is used so None is returned. Can be passed to fit function of GlobalHierarchicalModel. semantics : dict Dictionary with a semantic description of the model. Can be passed to plot functions. References ---------- .. [1] DNV GL (2017). Recommended practice DNVGL-RP-C205: Environmental conditions and environmental loads. """ def _power3(x, a, b, c): return a + b * x**c bounds = [(0, None), (0, None), (None, None)] alpha_dep = DependenceFunction(_power3, bounds=bounds) beta_dep = DependenceFunction(_power3, bounds=bounds) dist_description_hs = { "distribution": WeibullDistribution(), "intervals": WidthOfIntervalSlicer(width=0.5, min_n_points=20), } dist_description_u = { "distribution": WeibullDistribution(f_gamma=0), "conditional_on": 0, "parameters": { "alpha": alpha_dep, "beta": beta_dep, }, } dist_descriptions = [dist_description_hs, dist_description_u] fit_descriptions = None semantics = { "names": ["Significant wave height", "Mean wind speed"], "symbols": ["H_s", "U"], "units": ["m", "m s$^{-1}$"], } return dist_descriptions, fit_descriptions, semantics
def test_DirectSamplingContour(reference_data_DSContour): sample = reference_data_DSContour["sample"] ref_coordinates = reference_data_DSContour["ref_coordinates"] def _power3(x, a=0.1000, b=1.489, c=0.1901): return a + b * x**c # A 3-parameter exponential function (a dependence function). def _exp3(x, a=0.0400, b=0.1748, c=-0.2243): return a + b * np.exp(c * x) bounds = [(0, None), (0, None), (None, None)] power3 = DependenceFunction(_power3, bounds) exp3 = DependenceFunction(_exp3, bounds) dist_description_0 = { "distribution": WeibullDistribution(alpha=2.776, beta=1.471, gamma=0.8888), } dist_description_1 = { "distribution": LogNormalDistribution(), "conditional_on": 0, "parameters": { "mu": power3, "sigma": exp3 }, } ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1]) alpha = calculate_alpha(3, 50) my_ds_contour = DirectSamplingContour(ghm, alpha, sample=sample) my_coordinates = my_ds_contour.coordinates np.testing.assert_allclose(my_coordinates, ref_coordinates)
def test_ISORM(reference_coordinates_ISORM): # Logarithmic square function. def _lnsquare2(x, a=3.62, b=5.77): return np.log(a + b * np.sqrt(x / 9.81)) # 3-parameter function that asymptotically decreases (a dependence function). def _asymdecrease3(x, a=0, b=0.324, c=0.404): return a + b / (1 + c * x) lnsquare2 = DependenceFunction(_lnsquare2) asymdecrease3 = DependenceFunction(_asymdecrease3) dist_description_0 = { "distribution": ExponentiatedWeibullDistribution(alpha=0.207, beta=0.684, delta=7.79), } dist_description_1 = { "distribution": LogNormalDistribution(), "conditional_on": 0, "parameters": { "mu": lnsquare2, "sigma": asymdecrease3 }, } ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1]) state_duration = 3 return_period = 20 alpha = calculate_alpha(state_duration, return_period) my_isorm = ISORMContour(ghm, alpha) my_coordinates = my_isorm.coordinates np.testing.assert_allclose(my_coordinates, reference_coordinates_ISORM)
def get_OMAE2020_Hs_Tz(): """ Get OMAE2020 significant wave height and wave period model. Get the descriptions necessary to create the significant wave height and wave period model as described by Haselsteiner et al. [1]_. Returns ------- dist_descriptions : list of dict List of dictionaries containing the dist descriptions for each dimension. Can be used to create a GlobalHierarchicalModel. fit_descriptions : list of dict List of dictionaries containing the fit description for each dimension. Can be passed to fit function of GlobalHierarchicalModel. semantics : dict Dictionary with a semantic description of the model. Can be passed to plot functions. References ---------- .. [1] Haselsteiner, A.F.; Sander, A.; Ohlendorf, J.H.; Thoben, K.D. (2020) Global hierarchical models for wind and wave contours: Physical interpretations of the dependence functions. OMAE 2020, Fort Lauderdale, USA. Proceedings of the 39th International Conference on Ocean, Offshore and Arctic Engineering. """ def _asymdecrease3(x, a, b, c): return a + b / (1 + c * x) def _lnsquare2(x, a, b, c): return np.log(a + b * np.sqrt(np.divide(x, 9.81))) bounds = [(0, None), (0, None), (None, None)] sigma_dep = DependenceFunction(_asymdecrease3, bounds=bounds) mu_dep = DependenceFunction(_lnsquare2, bounds=bounds) dist_description_hs = { "distribution": ExponentiatedWeibullDistribution(), "intervals": WidthOfIntervalSlicer(width=0.5, min_n_points=50), } dist_description_tz = { "distribution": LogNormalDistribution(), "conditional_on": 0, "parameters": {"sigma": sigma_dep, "mu": mu_dep,}, } dist_descriptions = [dist_description_hs, dist_description_tz] fit_description_hs = {"method": "wlsq", "weights": "quadratic"} fit_descriptions = [fit_description_hs, None] semantics = { "names": ["Significant wave height", "Zero-crossing wave period"], "symbols": ["H_s", "T_z"], "units": ["m", "s"], } return dist_descriptions, fit_descriptions, semantics
) # %% # OMEA2019 A # A 3-parameter power function (a dependence function). def _power3(x, a=1.47, b=0.214, c=0.641): return a + b * x**c # A 3-parameter exponential function (a dependence function). def _exp3(x, a=0.0, b=0.308, c=-0.250): return a + b * np.exp(c * x) power3 = DependenceFunction(_power3) exp3 = DependenceFunction(_exp3) dist_description_0 = { "distribution": WeibullDistribution(alpha=0.944, beta=1.48, gamma=0.0981), } dist_description_1 = { "distribution": LogNormalDistribution(), "conditional_on": 0, "parameters": { "mu": power3, "sigma": exp3 }, }
# _logistics4 = lambda x, a, b, c=-1, d=1 : a + b / (1 + np.exp(c * (x - d))) # A 3-parameter function designed for the scale parameter (alpha) of an # exponentiated Weibull distribution with shape2=5 (see 'Global hierarchical # models for wind and wave contours'). def _alpha3(x, a, b, c, d_of_x): return (a + b * x**c) / 2.0445**(1 / d_of_x(x)) logistics_bounds = [(0, None), (0, None), (None, 0), (0, None)] alpha_bounds = [(0, None), (0, None), (None, None)] beta_dep = DependenceFunction(_logistics4, logistics_bounds, weights=lambda x, y: y) alpha_dep = DependenceFunction(_alpha3, alpha_bounds, d_of_x=beta_dep, weights=lambda x, y: y) dist_description_vs = { "distribution": ExponentiatedWeibullDistribution(), "intervals": WidthOfIntervalSlicer(2, min_n_points=50), } dist_description_hs = { "distribution": ExponentiatedWeibullDistribution(f_delta=5), "conditional_on": 0, "parameters": {
def test_v_hs_hd_contour(): """ Use a wind speed - wave height dataset, fit the joint distribution that was proposed by Haselsteiner et al. (2020) and compute a highest density contour. This test reproduces the results presented in Haselestiner et al. (2020). The coorindates are availble at https://github.com/ec-benchmark-organizers/ ec-benchmark/blob/master/results/exercise-1/contribution-4/haselsteiner_ andreas_dataset_d_50.txt Such a work flow is for example typical when generationg a 50-year contour for DLC 1.6 in the offshore wind standard IEC 61400-3-1. Haselsteiner, A. F., Sander, A., Ohlendorf, J.-H., & Thoben, K.-D. (2020). Global hierarchical models for wind and wave contours: Physical interpretations of the dependence functions. Proc. 39th International Conference on Ocean, Offshore and Arctic Engineering (OMAE 2020). https://doi.org/10.1115/OMAE2020-18668 International Electrotechnical Commission. (2019). Wind energy generation systems - Part 3-1: Design requirements for fixed offshore wind turbines (IEC 61400-3-1). """ data = read_ec_benchmark_dataset("datasets/ec-benchmark_dataset_D.txt") def _logistics4(x, a=1, b=1, c=-1, d=1): return a + b / (1 + np.exp(c * (x - d))) def _alpha3(x, a, b, c, d_of_x): return (a + b * x**c) / 2.0445**(1 / d_of_x(x)) logistics_bounds = [(0, None), (0, None), (None, 0), (0, None)] alpha_bounds = [(0, None), (0, None), (None, None)] beta_dep = DependenceFunction(_logistics4, logistics_bounds, weights=lambda x, y: y) alpha_dep = DependenceFunction(_alpha3, alpha_bounds, d_of_x=beta_dep, weights=lambda x, y: y) dist_description_v = { "distribution": ExponentiatedWeibullDistribution(), "intervals": WidthOfIntervalSlicer(width=2), } dist_description_hs = { "distribution": ExponentiatedWeibullDistribution(f_delta=5), "conditional_on": 0, "parameters": { "alpha": alpha_dep, "beta": beta_dep, }, } model = GlobalHierarchicalModel([dist_description_v, dist_description_hs]) fit_description_vs = {"method": "wlsq", "weights": "quadratic"} fit_description_hs = {"method": "wlsq", "weights": "quadratic"} model.fit(data, [fit_description_vs, fit_description_hs]) axs = plot_marginal_quantiles(model, data) axs = plot_dependence_functions(model) ax = plot_2D_isodensity(model, data) alpha = calculate_alpha(1, 50) limits = [(0, 35), (0, 20)] contour = HighestDensityContour(model, alpha, limits=limits, deltas=[0.2, 0.2]) coordinates = contour.coordinates np.testing.assert_allclose(max(coordinates[:, 0]), 29.9, atol=0.2) np.testing.assert_allclose(max(coordinates[:, 1]), 15.5, atol=0.2) np.testing.assert_allclose(min(coordinates[:, 0]), 0, atol=0.1) np.testing.assert_allclose(min(coordinates[:, 1]), 0, atol=0.1) ax = plot_2D_contour(contour, sample=data)
def test_hs_tz_iform_contour(): """ Use a sea state dataset with the variables Hs and Tz, fit the join distribution recommended in DNVGL-RP-C203 to it and compute an IFORM contour. This tests reproduces the results published in Haseltseiner et al. (2019). Such a work flow is for example typical in ship design. Haselsteiner, A. F., Coe, R. G., Manuel, L., Nguyen, P. T. T., Martin, N., & Eckert-Gallup, A. (2019). A benchmarking exercise on estimating extreme environmental conditions: Methodology & baseline results. Proc. 38th International Conference on Ocean, Offshore and Arctic Engineering (OMAE 2019). https://doi.org/10.1115/OMAE2019-96523 DNV GL. (2017). Recommended practice DNVGL-RP-C205: Environmental conditions and environmental loads. """ data = read_ec_benchmark_dataset("datasets/ec-benchmark_dataset_A.txt") # A 3-parameter power function (a dependence function). def _power3(x, a, b, c): return a + b * x**c # A 3-parameter exponential function (a dependence function). def _exp3(x, a, b, c): return a + b * np.exp(c * x) bounds = [(0, None), (0, None), (None, None)] power3 = DependenceFunction(_power3, bounds) exp3 = DependenceFunction(_exp3, bounds) dist_description_0 = { "distribution": WeibullDistribution(), "intervals": WidthOfIntervalSlicer(width=0.5), } dist_description_1 = { "distribution": LogNormalDistribution(), "conditional_on": 0, "parameters": { "mu": power3, "sigma": exp3 }, } model = GlobalHierarchicalModel([dist_description_0, dist_description_1]) model.fit(data) axs = plot_marginal_quantiles(model, data) axs = plot_dependence_functions(model) ax = plot_2D_isodensity(model, data) alpha = calculate_alpha(1, 20) contour = IFORMContour(model, alpha) coordinates = contour.coordinates np.testing.assert_allclose(max(coordinates[:, 0]), 5.0, atol=0.5) np.testing.assert_allclose(max(coordinates[:, 1]), 16.1, atol=0.5) ax = plot_2D_contour(contour, sample=data)
ISORMContour, ) x = np.linspace((0, 0), (10, 10), num=100) # Logarithmic square function. def _lnsquare2(x, a=3.62, b=5.77): return np.log(a + b * np.sqrt(x / 9.81)) # 3-parameter function that asymptotically decreases (a dependence function). def _asymdecrease3(x, a=0, b=0.324, c=0.404): return a + b / (1 + c * x) lnsquare2 = DependenceFunction(_lnsquare2) asymdecrease3 = DependenceFunction(_asymdecrease3) dist_description_0 = { "distribution": ExponentiatedWeibullDistribution( alpha=0.207, beta=0.684, delta=7.79 ), } dist_description_1 = { "distribution": LogNormalDistribution(), "conditional_on": 0, "parameters": {"mu": lnsquare2, "sigma": asymdecrease3}, } ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1])
data = pd.read_csv("datasets/NDBC_buoy_46025.csv", sep=",")[["Hs", "T"]] # A 3-parameter power function (a dependence function). def _power3(x, a, b, c): return a + b * x**c # A 3-parameter exponential function (a dependence function). def _exp3(x, a, b, c): return a + b * np.exp(c * x) bounds = [(0, None), (0, None), (None, None)] power3 = DependenceFunction(_power3, bounds) exp3 = DependenceFunction(_exp3, bounds) dist_description_0 = { "distribution": WeibullDistribution(), "intervals": WidthOfIntervalSlicer(width=0.5), } dist_description_1 = { "distribution": LogNormalDistribution(), "conditional_on": 0, "parameters": { "mu": power3, "sigma": exp3 }, }
def _exp3(x, a=2, b=2, c=2): return a + b * np.exp(c * x) smallest_positive_float = np.nextafter(0, 1) # 0 < a < inf # 0 < b < inf # exp_bounds = [(smallest_positive_float, np.inf), # (smallest_positive_float, np.inf), # (-np.inf, np.inf)] # exp_bounds = [(0, np.inf), # (0, np.inf), # (-np.inf, np.inf)] exp_bounds = [(0, None), (0, None), (None, None)] linear = DependenceFunction(_linear) # exp3 = DependenceFunction(_exp3) exp3 = DependenceFunction(_exp3, bounds=exp_bounds) rng = np.random.RandomState(42) x = np.linspace(0.1, 10, num=50) linear_param = (3.6, 6) y_linear = linear(x, *linear_param) + 5 * rng.normal(scale=1, size=x.shape) exp_param = (3, 1, 0.5) y_exp = exp3(x, *exp_param) + 2 * rng.normal(scale=3, size=x.shape) my_linear_param = fit_function(linear, x, y_linear, (1, 1), "lsq", None, None) ref_linear_param = curve_fit(linear, x, y_linear, (1, 1))[0] exp_p0 = tuple(exp3.parameters.values())
def test_DNVGL_Hs_Tz_model(dataset_dnvgl_hstz, refdata_dnvgl_hstz): # A 3-parameter power function (a dependence function). def _power3(x, a, b, c): return a + b * x**c # A 3-parameter exponential function (a dependence function). def _exp3(x, a, b, c): return a + b * np.exp(c * x) bounds = [(0, None), (0, None), (None, None)] power3 = DependenceFunction(_power3, bounds) exp3 = DependenceFunction(_exp3, bounds) x, dx = np.linspace([0.1, 0.1], [6, 22], num=100, retstep=True) dist_description_0 = { "distribution": WeibullDistribution(), "intervals": WidthOfIntervalSlicer(width=0.5), } dist_description_1 = { "distribution": LogNormalDistribution(), "conditional_on": 0, "parameters": { "mu": power3, "sigma": exp3 }, } ghm = GlobalHierarchicalModel([dist_description_0, dist_description_1]) ghm.fit(dataset_dnvgl_hstz) f_weibull = ghm.distributions[0].pdf(x[:, 0]) weibull_params = ( ghm.distributions[0].beta, ghm.distributions[0].gamma, ghm.distributions[0].alpha, ) lognorm = ghm.distributions[1] intervals = lognorm.data_intervals givens = lognorm.conditioning_values f_lognorm = [] for given in givens: f_lognorm.append(lognorm.pdf(x[:, 1], given)) f_lognorm = np.stack(f_lognorm, axis=1) mus = np.array([par["mu"] for par in lognorm.parameters_per_interval]) sigmas = np.array( [par["sigma"] for par in lognorm.parameters_per_interval]) ref_f_weibull = refdata_dnvgl_hstz["ref_f_weibull"] ref_weibull_params = refdata_dnvgl_hstz["ref_weibull_params"] ref_intervals = 11 ref_givens = refdata_dnvgl_hstz["ref_givens"] ref_f_lognorm = refdata_dnvgl_hstz["ref_f_lognorm"] ref_mus = refdata_dnvgl_hstz["ref_mus"] ref_sigmas = refdata_dnvgl_hstz["ref_sigmas"] assert len(intervals) == len(ref_intervals) for i in range(len(ref_intervals)): assert sorted(intervals[i]) == sorted(ref_intervals[i]) np.testing.assert_allclose(f_weibull, ref_f_weibull) np.testing.assert_allclose(weibull_params, ref_weibull_params) np.testing.assert_allclose(givens, ref_givens) np.testing.assert_allclose(f_lognorm, ref_f_lognorm, rtol=1e-5) np.testing.assert_allclose(mus, ref_mus) np.testing.assert_allclose(sigmas, ref_sigmas)
def test_WES4(dataset_wes_sigmau, refdata_wes_sigmau): # https://doi.org/10.5194/wes-4-325-2019 class MyIntervalSlicer(WidthOfIntervalSlicer): def _slice(self, data): interval_slices, interval_references, interval_boundaries = super( )._slice(data) # discard slices below 4 m/s ok_slices = [] ok_references = [] ok_boundaries = [] for slice_, reference, boundaries in zip(interval_slices, interval_references, interval_boundaries): if reference >= 4: ok_slices.append(slice_) ok_references.append(reference) ok_boundaries.append(boundaries) return ok_slices, ok_references, ok_boundaries def _poly3(x, a, b, c, d): return a * x**3 + b * x**2 + c * x + d def _poly2(x, a, b, c): return a * x**2 + b * x + c poly3 = DependenceFunction(_poly3) poly2 = DependenceFunction(_poly2) dim0_description = { "distribution": WeibullDistribution(), "intervals": MyIntervalSlicer(width=1, reference="left", min_n_points=5), } dim1_description = { "distribution": LogNormalNormFitDistribution(), "conditional_on": 0, "parameters": { "mu_norm": poly3, "sigma_norm": poly2 }, } ghm = GlobalHierarchicalModel([dim0_description, dim1_description]) ghm.fit(dataset_wes_sigmau) alpha = 1 / (5 * len(dataset_wes_sigmau)) iform = IFORMContour(ghm, alpha) my_coordinates = iform.coordinates x_U = np.linspace(2, 40, num=100) x_sigma = np.linspace(0.02, 3.6, num=100) U_dist = ghm.distributions[0] my_weib_param = list(U_dist.parameters.values()) my_f_weib = U_dist.pdf(x_U) my_ln = ghm.distributions[1] my_intervals = my_ln.data_intervals my_givens = my_ln.conditioning_values my_f_ln = [] for given in my_givens: my_f_ln.append(my_ln.pdf(x_sigma, given)) my_f_ln = np.stack(my_f_ln, axis=1) my_mu_norms = np.array( [par["mu_norm"] for par in my_ln.parameters_per_interval]) my_sigma_norms = np.array( [par["sigma_norm"] for par in my_ln.parameters_per_interval]) my_intervals = my_ln.data_intervals my_sigmas = [dist.sigma for dist in my_ln.distributions_per_interval] my_mus = [dist.mu for dist in my_ln.distributions_per_interval] ref_weib_param = refdata_wes_sigmau["ref_weib_param"] ref_f_weib = refdata_wes_sigmau["ref_f_weib"] ref_intervals = refdata_wes_sigmau["ref_intervals"] ref_givens = refdata_wes_sigmau["ref_givens"] ref_mu_norms = refdata_wes_sigmau["ref_mu_norms"] ref_sigma_norms = refdata_wes_sigmau["ref_sigma_norms"] ref_mus = refdata_wes_sigmau["ref_mus"] ref_sigmas = refdata_wes_sigmau["ref_sigmas"] ref_f_ln = refdata_wes_sigmau["ref_f_ln"] ref_coordinates = refdata_wes_sigmau["ref_coordinates"] np.testing.assert_allclose(my_weib_param, ref_weib_param) np.testing.assert_allclose(my_f_weib, ref_f_weib) assert len(my_intervals) == len(ref_intervals) for i in range(len(ref_intervals)): assert sorted(my_intervals[i]) == sorted(ref_intervals[i]) np.testing.assert_allclose(my_givens, ref_givens) np.testing.assert_allclose(my_mu_norms, ref_mu_norms) np.testing.assert_allclose(my_sigma_norms, ref_sigma_norms) np.testing.assert_allclose(my_mus, ref_mus) np.testing.assert_allclose(my_sigmas, ref_sigmas) np.testing.assert_allclose(my_f_ln, ref_f_ln) np.testing.assert_allclose(my_coordinates, ref_coordinates)
def test_OMAE2020(dataset_omae2020_vhs, refdata_omae2020_vhs): def _logistics4(x, a=1, b=1, c=-1, d=1): return a + b / (1 + np.exp(c * (x - d))) def _alpha3(x, a, b, c, d_of_x): return (a + b * x**c) / 2.0445**(1 / d_of_x(x)) logistics_bounds = [(0, None), (0, None), (None, 0), (0, None)] alpha_bounds = [(0, None), (0, None), (None, None)] beta_dep = DependenceFunction(_logistics4, logistics_bounds, weights=lambda x, y: y) alpha_dep = DependenceFunction(_alpha3, alpha_bounds, d_of_x=beta_dep, weights=lambda x, y: y) dist_description_vs = { "distribution": ExponentiatedWeibullDistribution(), "intervals": WidthOfIntervalSlicer(width=2), } dist_description_hs = { "distribution": ExponentiatedWeibullDistribution(f_delta=5), "conditional_on": 0, "parameters": { "alpha": alpha_dep, "beta": beta_dep, }, } ghm = GlobalHierarchicalModel([dist_description_vs, dist_description_hs]) fit_description_vs = {"method": "wlsq", "weights": "quadratic"} fit_description_hs = {"method": "wlsq", "weights": "quadratic"} ghm.fit(dataset_omae2020_vhs, [fit_description_vs, fit_description_hs]) x = np.linspace([0.1, 0.1], [30, 12], num=100) my_f_expweib0 = ghm.distributions[0].pdf(x[:, 0]) my_expweib0_params = ( ghm.distributions[0].alpha, ghm.distributions[0].beta, ghm.distributions[0].delta, ) my_expweib1 = ghm.distributions[1] my_givens = my_expweib1.conditioning_values my_f_expweib1 = [] for given in my_givens: my_f_expweib1.append(my_expweib1.pdf(x[:, 1], given)) my_f_expweib1 = np.stack(my_f_expweib1, axis=1) my_alphas = np.array( [par["alpha"] for par in my_expweib1.parameters_per_interval]) my_betas = np.array( [par["beta"] for par in my_expweib1.parameters_per_interval]) my_intervals = my_expweib1.data_intervals ref_expweib0_params = refdata_omae2020_vhs["ref_expweib0_params"] ref_f_expweib0 = refdata_omae2020_vhs["ref_f_expweib0"] ref_intervals = refdata_omae2020_vhs["ref_intervals"] ref_givens = refdata_omae2020_vhs["ref_givens"] ref_alphas = refdata_omae2020_vhs["ref_alphas"] ref_betas = refdata_omae2020_vhs["ref_betas"] ref_f_expweib1 = refdata_omae2020_vhs["ref_f_expweib1"] np.testing.assert_almost_equal(my_expweib0_params, ref_expweib0_params) np.testing.assert_almost_equal(my_f_expweib0, ref_f_expweib0) for my_interval, ref_interval in zip(my_intervals, ref_intervals): np.testing.assert_almost_equal(np.sort(my_interval), np.sort(ref_interval)) np.testing.assert_almost_equal(my_givens, ref_givens) np.testing.assert_almost_equal(my_alphas, ref_alphas) np.testing.assert_almost_equal(my_betas, ref_betas) np.testing.assert_almost_equal(my_f_expweib1, ref_f_expweib1)
def get_OMAE2020_V_Hs(): """ Get OMAE2020 wind speed and significant wave height model. Get the descriptions necessary to create the wind speed and significant wave height model as described by Haselsteiner et al. [1]_. Returns ------- dist_descriptions : list of dict List of dictionaries containing the dist descriptions for each dimension. Can be used to create a GlobalHierarchicalModel. fit_descriptions : list of dict List of dictionaries containing the fit description for each dimension. Can be passed to fit function of GlobalHierarchicalModel. semantics : dict Dictionary with a semantic description of the model. Can be passed to plot functions. References ---------- .. [1] Haselsteiner, A.F.; Sander, A.; Ohlendorf, J.H.; Thoben, K.D. (2020) Global hierarchical models for wind and wave contours: Physical interpretations of the dependence functions. OMAE 2020, Fort Lauderdale, USA. Proceedings of the 39th International Conference on Ocean, Offshore and Arctic Engineering. """ def _logistics4(x, a=1, b=1, c=-1, d=1): return a + b / (1 + np.exp(c * (x - d))) def _alpha3(x, a, b, c, d_of_x): return (a + b * x ** c) / 2.0445 ** (1 / d_of_x(x)) logistics_bounds = [(0, None), (0, None), (None, 0), (0, None)] alpha_bounds = [(0, None), (0, None), (None, None)] beta_dep = DependenceFunction(_logistics4, logistics_bounds, weights=lambda x, y: y) alpha_dep = DependenceFunction( _alpha3, alpha_bounds, d_of_x=beta_dep, weights=lambda x, y: y ) dist_description_v = { "distribution": ExponentiatedWeibullDistribution(), "intervals": WidthOfIntervalSlicer(2, min_n_points=50), } dist_description_hs = { "distribution": ExponentiatedWeibullDistribution(f_delta=5), "conditional_on": 0, "parameters": {"alpha": alpha_dep, "beta": beta_dep,}, } dist_descriptions = [dist_description_v, dist_description_hs] fit_description_v = {"method": "wlsq", "weights": "quadratic"} fit_description_hs = {"method": "wlsq", "weights": "quadratic"} fit_descriptions = [fit_description_v, fit_description_hs] semantics = { "names": ["Mean wind speed", "Significant wave height"], "symbols": ["V", "H_s"], "units": ["m s$^{-1}$", "m",], } return dist_descriptions, fit_descriptions, semantics
ok_references.append(reference) ok_boundaries.append(boundaries) return ok_slices, ok_references, ok_boundaries # %% def _poly3(x, a, b, c, d): return a * x**3 + b * x**2 + c * x + d def _poly2(x, a, b, c): return a * x**2 + b * x + c poly3 = DependenceFunction(_poly3) poly2 = DependenceFunction(_poly2) dim0_description = { "distribution": WeibullDistribution(), "intervals": MyIntervalSlicer(width=1, reference="left", min_n_points=5), # "parameters" : {"alpha" : 9.74, # "beta" : 2.02, # "gamma" : 2.2}, } dim1_description = { "distribution": LogNormalNormFitDistribution(), "conditional_on": 0, "parameters": { "mu_norm": poly3,