def multi_fidelity_borehole_function(high_noise_std_deviation=0, low_noise_std_deviation=0): """ Two level borehole function. The Borehole function models water flow through a borehole. Its simplicity and quick evaluation makes it a commonly used function for testing a wide variety of methods in computer experiments. See reference for equations: https://www.sfu.ca/~ssurjano/borehole.html :param high_noise_std_deviation: Standard deviation of Gaussian observation noise on high fidelity observations. Defaults to zero. :param low_noise_std_deviation: Standard deviation of Gaussian observation noise on low fidelity observations. Defaults to zero. :return: Tuple of user function object and parameter space """ parameter_space = ParameterSpace([ ContinuousParameter('borehole_radius', 0.05, 0.15), ContinuousParameter('radius_of_influence', 100, 50000), ContinuousParameter('upper_aquifer_transmissivity', 63070, 115600), ContinuousParameter('upper_aquifer_head', 990, 1110), ContinuousParameter('lower_aquifer_transmissivity', 63.1, 116), ContinuousParameter('lower_aquifer_head', 700, 820), ContinuousParameter('borehole_length', 1120, 1680), ContinuousParameter('hydraulic_conductivity', 9855, 12045), InformationSourceParameter(2)]) user_function = MultiSourceFunctionWrapper([ lambda x: borehole_low(x, low_noise_std_deviation), lambda x: borehole_high(x, high_noise_std_deviation)]) return user_function, parameter_space
def multi_fidelity_non_linear_sin(high_fidelity_noise_std_deviation=0, low_fidelity_noise_std_deviation=0): """ Two level non-linear sin function where high fidelity is given by: .. math:: f_{high}(x) = (x - \sqrt{2}) f_{low}(x)^2 and the low fidelity is: .. math:: f_{low}(x) = \sin(8 \pi x) Reference: Nonlinear information fusion algorithms for data-efficient multi-fidelity modelling. P. Perdikaris, M. Raissi, A. Damianou, N. D. Lawrence and G. E. Karniadakis (2017) http://web.mit.edu/parisp/www/assets/20160751.full.pdf """ parameter_space = ParameterSpace( [ContinuousParameter("x1", -5, 10), InformationSourceParameter(2)]) user_function = MultiSourceFunctionWrapper([ lambda x: nonlinear_sin_low(x, low_fidelity_noise_std_deviation), lambda x: nonlinear_sin_high(x, high_fidelity_noise_std_deviation), ]) return user_function, parameter_space
def multi_source_entropy_search_acquisition(gpy_model): space = ParameterSpace( [ContinuousParameter("x1", 0, 1), InformationSourceParameter(2)]) return MultiInformationSourceEntropySearch(gpy_model, space, num_representer_points=10)
def multi_source_optimizer(): mock_acquisition_optimizer = mock.create_autospec(AcquisitionOptimizer) mock_acquisition_optimizer.optimize.return_value = (np.array([[0.]]), None) space = ParameterSpace( [ContinuousParameter('x', 0, 1), InformationSourceParameter(2)]) return MultiSourceAcquisitionOptimizer(mock_acquisition_optimizer, space)
def multi_fidelity_forrester_function(high_fidelity_noise_std_deviation=0, low_fidelity_noise_std_deviation=0): """ Two-level multi-fidelity forrester function where the high fidelity is given by: .. math:: f(x) = (6x - 2)^2 \sin(12x - 4) and the low fidelity approximation given by: .. math:: f_{low}(x) = 0.5 f_{high}(x) + 10 (x - 0.5) + 5 :param high_fidelity_noise_std_deviation: Standard deviation of observation noise on high fidelity observations. Defaults to zero. :param low_fidelity_noise_std_deviation: Standard deviation of observation noise on low fidelity observations. Defaults to zero. :return: Tuple of user function object and parameter space object """ parameter_space = ParameterSpace([ContinuousParameter("x", 0, 1), InformationSourceParameter(2)]) user_function = MultiSourceFunctionWrapper( [ lambda x: forrester_low(x, low_fidelity_noise_std_deviation), lambda x: forrester(x, high_fidelity_noise_std_deviation), ] ) return user_function, parameter_space
def test_random_search_acquisition_optimizer_with_context(simple_square_acquisition): space = ParameterSpace([CategoricalParameter('x', OrdinalEncoding(np.arange(0, 100))), InformationSourceParameter(10)]) optimizer = RandomSearchAcquisitionOptimizer(space, 1000) source_encoding = 1 opt_x, opt_val = optimizer.optimize(simple_square_acquisition, {'source': source_encoding}) assert_array_equal(opt_x, np.array([[1., source_encoding]])) assert_array_equal(opt_val, np.array([[0. + source_encoding]]))
def test_multi_source_acquisition_optimizer(simple_square_acquisition): space = ParameterSpace( [ContinuousParameter("x", 0, 1), InformationSourceParameter(2)]) single_optimizer = GradientAcquisitionOptimizer(space) optimizer = MultiSourceAcquisitionOptimizer(single_optimizer, space) opt_x, opt_val = optimizer.optimize(simple_square_acquisition) assert_array_equal(opt_x, np.array([[0.0, 1.0]])) assert_array_equal(opt_val, np.array([[2.0]]))
def test_multi_source_acquisition_optimizer(): space = ParameterSpace( [ContinuousParameter('x', 0, 1), InformationSourceParameter(2)]) acquisition = SimpleSquareAcquisition() single_optimizer = AcquisitionOptimizer(space) optimizer = MultiSourceAcquisitionOptimizer(single_optimizer, space) opt_x, opt_val = optimizer.optimize(acquisition) np.testing.assert_array_equal(opt_x, np.array([[0., 1.]])) np.testing.assert_array_equal(opt_val, np.array([[2.]]))
def test_local_search_acquisition_optimizer_with_context( simple_square_acquisition): space = ParameterSpace([ CategoricalParameter("x", OrdinalEncoding(np.arange(0, 100))), InformationSourceParameter(10) ]) optimizer = LocalSearchAcquisitionOptimizer(space, 1000, 3) source_encoding = 1 opt_x, opt_val = optimizer.optimize(simple_square_acquisition, {"source": source_encoding}) np.testing.assert_array_equal(opt_x, np.array([[1.0, source_encoding]])) np.testing.assert_array_equal(opt_val, np.array([[0.0 + source_encoding]]))
def test_multi_source_sequential_with_source_context(): # Check that we can fix a non-information source parameter with context mock_acquisition = mock.create_autospec(Acquisition) mock_acquisition.has_gradients = False mock_acquisition.evaluate = lambda x: np.sum(x**2, axis=1)[:, None] space = ParameterSpace( [ContinuousParameter("x", 0, 1), ContinuousParameter("y", 0, 1), InformationSourceParameter(2)] ) acquisition_optimizer = GradientAcquisitionOptimizer(space) multi_source_acquisition_optimizer = MultiSourceAcquisitionOptimizer(acquisition_optimizer, space) loop_state_mock = mock.create_autospec(LoopState) seq = SequentialPointCalculator(mock_acquisition, multi_source_acquisition_optimizer) next_points = seq.compute_next_points(loop_state_mock, context={"source": 1.0}) # "SequentialPointCalculator" should only ever return 1 value assert len(next_points) == 1 # Context value should be what we set assert np.isclose(next_points[0, 1], 1.0)
def test_two_information_source_parameters_fail(): with pytest.raises(ValueError): ParameterSpace( [InformationSourceParameter(2), InformationSourceParameter(2)])
def MUMBO_acquisition(gpy_model): space = ParameterSpace( [ContinuousParameter("x1", 0, 1), InformationSourceParameter(2)]) return MUMBO(gpy_model, space, num_samples=10, grid_size=5000)
def test_single_value_in_domain_information_source_parameter(): param = InformationSourceParameter(5) assert param.check_in_domain(2) is True assert param.check_in_domain(7) is False
def test_information_source_parameter(): param = InformationSourceParameter(5) assert param.name == 'source' assert param.check_in_domain(np.array([0, 1])) is True assert param.check_in_domain(np.array([4])) is True assert param.check_in_domain(np.array([5])) is False