def test_basinhopping(self): func = lambda x: np.cos(14.5 * x - 0.3) + (x + 0.2) * x x0 = [1.] np.random.seed(555) res = basinhopping(func, x0, minimizer_kwargs={"method": "BFGS"}, niter=200) np.random.seed(555) x, = parameters('x') fit = BasinHopping(func, [x], local_minimizer=BFGS) fit_result = fit.execute(niter=200) # fit_result = fit.execute(minimizer_kwargs={"method": "BFGS"}, niter=200) self.assertEqual(res.x, fit_result.value(x)) self.assertEqual(res.fun, fit_result.objective_value)
def test_basinhopping(): def func(x): return np.cos(14.5 * x - 0.3) + (x + 0.2) * x x0 = [1.] np.random.seed(555) res = basinhopping(func, x0, minimizer_kwargs={"method": "BFGS"}, niter=200) np.random.seed(555) x, = parameters('x') fit = BasinHopping(func, [x], local_minimizer=BFGS) fit_result = fit.execute(niter=200) # fit_result = fit.execute(minimizer_kwargs={"method": "BFGS"}, niter=200) assert res.x == fit_result.value(x) assert res.fun == fit_result.objective_value
def test_basinhopping_large(): """ Test the basinhopping method of scipy.minimize. This is based of scipy's docs as found here: https://docs.scipy.org/doc/scipy-0.13.0/reference/generated/scipy.optimize.anneal.html """ def f1(z, *params): x, y = z a, b, c, d, e, f, g, h, i, j, k, l, scale = params return (a * x ** 2 + b * x * y + c * y ** 2 + d * x + e * y + f) def f2(z, *params): x, y = z a, b, c, d, e, f, g, h, i, j, k, l, scale = params return (-g * np.exp(-((x - h) ** 2 + (y - i) ** 2) / scale)) def f3(z, *params): x, y = z a, b, c, d, e, f, g, h, i, j, k, l, scale = params return (-j * np.exp(-((x - k) ** 2 + (y - l) ** 2) / scale)) def func(z, *params): x, y = z a, b, c, d, e, f, g, h, i, j, k, l, scale = params return f1(z, *params) + f2(z, *params) + f3(z, *params) def f_symfit(x1, x2, params): z = [x1, x2] return func(z, *params) params = (2, 3, 7, 8, 9, 10, 44, -1, 2, 26, 1, -2, 0.5) x0 = np.array([2., 2.]) np.random.seed(555) res = basinhopping(func, x0, minimizer_kwargs={'args': params}) np.random.seed(555) x1, x2 = parameters('x1, x2', value=x0) fit = BasinHopping(partial(f_symfit, params=params), [x1, x2]) fit_result = fit.execute() assert res.x[0] == fit_result.value(x1) assert res.x[1] == fit_result.value(x2) assert res.fun == fit_result.objective_value
def test_basinhopping_large(self): """ Test the basinhopping method of scipy.minimize. This is based of scipy's docs as found here: https://docs.scipy.org/doc/scipy-0.13.0/reference/generated/scipy.optimize.anneal.html """ def f1(z, *params): x, y = z a, b, c, d, e, f, g, h, i, j, k, l, scale = params return (a * x ** 2 + b * x * y + c * y ** 2 + d * x + e * y + f) def f2(z, *params): x, y = z a, b, c, d, e, f, g, h, i, j, k, l, scale = params return (-g * np.exp(-((x - h) ** 2 + (y - i) ** 2) / scale)) def f3(z, *params): x, y = z a, b, c, d, e, f, g, h, i, j, k, l, scale = params return (-j * np.exp(-((x - k) ** 2 + (y - l) ** 2) / scale)) def func(z, *params): x, y = z a, b, c, d, e, f, g, h, i, j, k, l, scale = params return f1(z, *params) + f2(z, *params) + f3(z, *params) def f_symfit(x1, x2, params): z = [x1, x2] return func(z, *params) params = (2, 3, 7, 8, 9, 10, 44, -1, 2, 26, 1, -2, 0.5) x0 = np.array([2., 2.]) np.random.seed(555) res = basinhopping(func, x0, minimizer_kwargs={'args': params}) np.random.seed(555) x1, x2 = parameters('x1, x2', value=x0) fit = BasinHopping(partial(f_symfit, params=params), [x1, x2]) fit_result = fit.execute() self.assertEqual(res.x[0], fit_result.value(x1)) self.assertEqual(res.x[1], fit_result.value(x2)) self.assertEqual(res.fun, fit_result.objective_value)
def test_basinhopping_2d(): def func2d(x): f = np.cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0] df = np.zeros(2) df[0] = -14.5 * np.sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 df[1] = 2. * x[1] + 0.2 return f, df def func2d_symfit(x1, x2): f = np.cos(14.5 * x1 - 0.3) + (x2 + 0.2) * x2 + (x1 + 0.2) * x1 return f def jac2d_symfit(x1, x2): df = np.zeros(2) df[0] = -14.5 * np.sin(14.5 * x1 - 0.3) + 2. * x1 + 0.2 df[1] = 2. * x2 + 0.2 return df np.random.seed(555) minimizer_kwargs = {'method': 'BFGS', 'jac': True} x0 = [1.0, 1.0] res = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, niter=200) np.random.seed(555) x1, x2 = parameters('x1, x2', value=x0) with pytest.raises(TypeError): fit = BasinHopping( func2d_symfit, [x1, x2], local_minimizer=NelderMead(func2d_symfit, [x1, x2], jacobian=jac2d_symfit) ) fit = BasinHopping( func2d_symfit, [x1, x2], local_minimizer=BFGS(func2d_symfit, [x1, x2], jacobian=jac2d_symfit) ) fit_result = fit.execute(niter=200) assert isinstance(fit.local_minimizer.jacobian, MinimizeModel) assert isinstance(fit.local_minimizer.jacobian.model, CallableNumericalModel) assert res.x[0] == fit_result.value(x1) assert res.x[1] == fit_result.value(x2) assert res.fun == fit_result.objective_value # Now compare with the symbolic equivalent np.random.seed(555) model = cos(14.5 * x1 - 0.3) + (x2 + 0.2) * x2 + (x1 + 0.2) * x1 fit = Fit(model, minimizer=BasinHopping) fit_result = fit.execute() assert res.x[0] == fit_result.value(x1) assert res.x[1] == fit_result.value(x2) assert res.fun == fit_result.objective_value assert isinstance(fit.minimizer.local_minimizer, BFGS) # Impose constrains np.random.seed(555) model = cos(14.5 * x1 - 0.3) + (x2 + 0.2) * x2 + (x1 + 0.2) * x1 fit = Fit(model, minimizer=BasinHopping, constraints=[Eq(x1, x2)]) fit_result = fit.execute() assert fit_result.value(x1) == fit_result.value(x2) assert isinstance(fit.minimizer.local_minimizer, SLSQP) # Impose bounds np.random.seed(555) x1.min = 0.0 model = cos(14.5 * x1 - 0.3) + (x2 + 0.2) * x2 + (x1 + 0.2) * x1 fit = Fit(model, minimizer=BasinHopping) fit_result = fit.execute() assert fit_result.value(x1) >= x1.min assert isinstance(fit.minimizer.local_minimizer, LBFGSB)
def test_basinhopping_2d(self): def func2d(x): f = np.cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0] df = np.zeros(2) df[0] = -14.5 * np.sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 df[1] = 2. * x[1] + 0.2 return f, df def func2d_symfit(x1, x2): f = np.cos(14.5 * x1 - 0.3) + (x2 + 0.2) * x2 + (x1 + 0.2) * x1 return f def jac2d_symfit(x1, x2): df = np.zeros(2) df[0] = -14.5 * np.sin(14.5 * x1 - 0.3) + 2. * x1 + 0.2 df[1] = 2. * x2 + 0.2 return df np.random.seed(555) minimizer_kwargs = {'method': 'BFGS', 'jac': True} x0 = [1.0, 1.0] res = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, niter=200) np.random.seed(555) x1, x2 = parameters('x1, x2', value=x0) with self.assertRaises(TypeError): fit = BasinHopping( func2d_symfit, [x1, x2], local_minimizer=NelderMead(func2d_symfit, [x1, x2], jacobian=jac2d_symfit) ) fit = BasinHopping( func2d_symfit, [x1, x2], local_minimizer=BFGS(func2d_symfit, [x1, x2], jacobian=jac2d_symfit) ) fit_result = fit.execute(niter=200) self.assertIsInstance(fit.local_minimizer.jacobian, MinimizeModel) self.assertIsInstance(fit.local_minimizer.jacobian.model, CallableNumericalModel) self.assertEqual(res.x[0] / fit_result.value(x1), 1.0) self.assertEqual(res.x[1] / fit_result.value(x2), 1.0) self.assertEqual(res.fun, fit_result.objective_value) # Now compare with the symbolic equivalent np.random.seed(555) model = cos(14.5 * x1 - 0.3) + (x2 + 0.2) * x2 + (x1 + 0.2) * x1 fit = Fit(model, minimizer=BasinHopping) fit_result = fit.execute() self.assertEqual(res.x[0], fit_result.value(x1)) self.assertEqual(res.x[1], fit_result.value(x2)) self.assertEqual(res.fun, fit_result.objective_value) self.assertIsInstance(fit.minimizer.local_minimizer, BFGS) # Impose constrains np.random.seed(555) model = cos(14.5 * x1 - 0.3) + (x2 + 0.2) * x2 + (x1 + 0.2) * x1 fit = Fit(model, minimizer=BasinHopping, constraints=[Eq(x1, x2)]) fit_result = fit.execute() self.assertEqual(fit_result.value(x1), fit_result.value(x2)) self.assertIsInstance(fit.minimizer.local_minimizer, SLSQP) # Impose bounds np.random.seed(555) x1.min = 0.0 model = cos(14.5 * x1 - 0.3) + (x2 + 0.2) * x2 + (x1 + 0.2) * x1 fit = Fit(model, minimizer=BasinHopping) fit_result = fit.execute() self.assertGreaterEqual(fit_result.value(x1), x1.min) self.assertIsInstance(fit.minimizer.local_minimizer, LBFGSB)