def optimize(self, start_point=None, **kwargs): """ Optimize the log(posterior) and returns a best-fit-point summary. :param start_point: Parameter point from which to start the optimization, with the elements in the same order as in eos.Analysis.varied_parameters. If not specified, optimization starts at the current parameter point. :param start_point: iterable, optional """ if start_point == None: start_point = [float(p) for p in self.varied_parameters] default_kwargs = {'method': 'SLSQP', 'options': {'ftol': 1.0e-13}} if kwargs is None: kwargs = default_kwargs res = scipy.optimize.minimize(self.negative_log_pdf, start_point, args=None, bounds=self.bounds, **kwargs) if not res.success: eos.warn('Optimization did not succeed') eos.warn(' optimizer' ' message reas: {}'.format(res.message)) else: eos.info( 'Optimization goal achieved after {nfev} function evaluations'. format(nfev=res.nfev)) for p, v in zip(self.varied_parameters, res.x): p.set(v) return eos.BestFitPoint(self, res.x)
def optimize(self, start_point=None, rng=np.random.mtrand, **kwargs): """ Optimize the log(posterior) and returns a best-fit-point summary. :param start_point: Parameter point from which to start the optimization, with the elements in the same order as in eos.Analysis.varied_parameters. If set to "random", optimization starts at the random point in the space of the priors. If not specified, optimization starts at the current parameter point. :type start_point: iterable, optional :param rng: Optional random number generator """ if start_point == None: start_point = [float(p) for p in self.varied_parameters] elif start_point == "random": start_point = [ p.inverse_cdf(rng.uniform()) for p in self._log_posterior.log_priors() ] default_kwargs = {'method': 'SLSQP', 'options': {'ftol': 1.0e-13}} if kwargs is None: kwargs = default_kwargs res = scipy.optimize.minimize(self.negative_log_pdf, self._par_to_x(start_point), args=None, bounds=[(-1.0, 1.0) for b in self.bounds], **kwargs) if not res.success: eos.warn('Optimization did not succeed') eos.warn(' optimizer' ' message reas: {}'.format(res.message)) else: eos.info( 'Optimization goal achieved after {nfev} function evaluations'. format(nfev=res.nfev)) bfp = self._x_to_par(res.x) for p, v in zip(self.varied_parameters, bfp): p.set(v) return eos.BestFitPoint(self, bfp)