def minimize(self): self.internal_beforeminimization() self.valid, self.why, self.fval, self.edm, ncalls, values, errors, self.covariance, self.fminobj = \ _minuit.domini(self.f, self.npar, self.up, self.names, self.values, self.errors, self.fixed, self.limits, \ self.eps, self.maxcalls, self.tol, self.strategy, self.covariance, 3, self.gradient, self.checkgrad) self.internal_afterminimization(values, errors, ncalls) return self
def minimize(self): """Use MINUIT's `MINIMIZE' to minimize the function. This tries `MIGRAD' first, then `SIMPLEX' if that fails. """ self.internal_beforeminimization() self.valid, self.why, self.fval, self.edm, ncalls, values, errors, self.covariance, self.fminobj = \ _minuit.domini(self.f, self.npar, self.up, self.names, self.values, self.errors, self.fixed, self.limits, \ self.eps, self.maxcalls, self.tol, self.strategy, self.covariance, 3, self.gradient, self.checkgrad) self.internal_afterminimization(values, errors, ncalls) return None
def migrad(self): """Use MINUIT's `MIGRAD' to minimize the function. This is what you want. """ self.internal_beforeminimization() self.valid, self.why, self.fval, self.edm, ncalls, values, errors, self.covariance, self.fminobj = \ _minuit.domini(self.f, self.npar, self.up, self.names, self.values, self.errors, self.fixed, self.limits, \ self.eps, self.maxcalls, self.tol, self.strategy, self.covariance, 2, self.gradient, self.checkgrad) self.internal_afterminimization(values, errors, ncalls) return None
def simplex(self): """Use MINUIT's `SIMPLEX' to minimize the function. This is the cute algorithm described in section 10.4 of Numerical Recipies (Nealer and Mead). It's probably not what you want. Creates no covariance matrix, because no error estimation has been performed. """ self.internal_beforeminimization() self.valid, self.why, self.fval, self.edm, ncalls, values, errors, self.covariance, self.fminobj = \ _minuit.domini(self.f, self.npar, self.up, self.names, self.values, self.errors, self.fixed, self.limits, \ self.eps, self.maxcalls, self.tol, self.strategy, self.covariance, 1, self.gradient, self.checkgrad) self.internal_afterminimization(values, errors, ncalls) return None
def scan_minimize(self): """Use MINUIT's `SCAN' to minimize the function. This is probably not what you want: it steps through function values and picks the best one. You could have done that yourself. Creates no covariance matrix, because no error estimation has been performed. """ self.internal_beforeminimization() self.valid, self.why, self.fval, self.edm, ncalls, values, errors, self.covariance, self.fminobj = \ _minuit.domini(self.f, self.npar, self.up, self.names, self.values, self.errors, self.fixed, self.limits, \ self.eps, self.maxcalls, self.tol, self.strategy, self.covariance, 0, self.gradient, self.checkgrad) self.internal_afterminimization(values, errors, ncalls) return None