def test_deprecated_alias(self): # Tests Optimisation() r = pints.toy.RosenbrockError() x = np.array([1.1, 1.1]) b = pints.RectangularBoundaries([0.5, 0.5], [1.5, 1.5]) opt = pints.Optimisation(r, x, boundaries=b, method=method) self.assertIsInstance(opt, pints.OptimisationController)
def test_deprecated_alias(self): # Tests Optimisation() r = pints.toy.RosenbrockError() x = np.array([1.1, 1.1]) b = pints.RectangularBoundaries([0.5, 0.5], [1.5, 1.5]) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') opt = pints.Optimisation(r, x, boundaries=b, method=method) self.assertEqual(len(w), 1) self.assertIn('deprecated', str(w[-1].message)) self.assertIsInstance(opt, pints.OptimisationController)
def optimise(self, x, parallel=False): """ Runs the optimisation, this method: (1) generates simulated data and adds noise (2) sets up the optimiser with the method given, trying to optimise the function f(x) = sum of squared error (3) runs the optimisation (4) returns: - the found parameters x, - the ratio of f(x) / f(x_0), where x_0 are the real parameters - time total time taken divided by the time taken to evaluate a single evaluation of f(x) """ the_model = self.model() print('model = ', the_model) values = the_model.simulate(self.real_parameters, self.times) value_range = np.max(values) - np.min(values) values += np.random.normal(0, self.noise * value_range, values.shape) problem = pints.MultiOutputProblem(the_model, self.times, values) score = pints.SumOfSquaresError(problem) middle = [0.5 * (u + l) for l, u in zip(self.lower, self.upper)] sigma = [(1.0/6.0)*(u - l) for l, u in zip(self.lower, self.upper)] print('sigma = ', sigma) boundaries = pints.RectangularBoundaries(self.lower, self.upper) optimisation = pints.Optimisation( score, middle, sigma0=sigma, boundaries=boundaries, method=self.method ) optimisation.optimiser().set_hyper_parameters(x) if parallel: optimisation.set_parallel(int(os.environ['OMP_NUM_THREADS'])) else: optimisation.set_parallel(False) start = timer() found_parameters, found_value = optimisation.run() end = timer() N = 10 start_score = timer() for i in range(N): minimum_value = score(self.real_parameters) end_score = timer() score_duration = (end_score - start_score) / N return found_parameters, \ found_value / minimum_value, \ (end - start) / score_duration
else: x0 = log_prior.sample() print('Initial guess (untransformed model parameters) = ', x0) # Create optimiser and log transform parameters x0 = util.transformer(transform, x0, rate_dict, True) boundaries = rate_checker._get_boundaries(rate_dict) Boundaries = pints.RectangularBoundaries(boundaries[0], boundaries[1]) print('Initial guess LogLikelihood = ', log_likelihood(x0)) print('Initial guess LogPrior = ', log_prior(x0)) print('Initial guess LogPosterior = ', log_posterior(x0)) print('Initial guess (transformed optimisation parameters) = ', x0) opt = pints.Optimisation(log_posterior, x0, boundaries=Boundaries, method=pints.CMAES) opt.set_max_iterations(None) opt.set_parallel(True) log_filename = model_name + '_cell_' + \ str(cell) + '_transform_' + str(transform) + \ '_cmaes_run_' + str(i) + '.log' opt.set_log_to_file(results_log_folder + '/' + log_filename, csv=True) # Run optimisation try: with np.errstate(all='ignore'): # Tell numpy not to issue warnings p, s = opt.run() p = util.transformer(transform, p, rate_dict, False) params.append(p)