def leastsq(self, scale_covar=True, **kws): """ use Levenberg-Marquardt minimization to perform fit. This assumes that ModelParameters have been stored, and a function to minimize has been properly set up. This wraps scipy.optimize.leastsq, and keyward arguments are passed directly as options to scipy.optimize.leastsq When possible, this calculates the estimated uncertainties and variable correlations from the covariance matrix. writes outputs to many internal attributes, and returns True if fit was successful, False if not. """ self.prepare_fit() lskws = dict(full_output=1, xtol=1.e-7, ftol=1.e-7, gtol=1.e-7, maxfev=1000*(self.nvarys+1), Dfun=None) lskws.update(self.kws) lskws.update(kws) if lskws['Dfun'] is not None: self.jacfcn = lskws['Dfun'] lskws['Dfun'] = self.__jacobian lsout = scipy_leastsq(self.__residual, self.vars, **lskws) vbest, cov, infodict, errmsg, ier = lsout self.residual = resid = infodict['fvec'] self.ier = ier self.lmdif_message = errmsg self.message = 'Fit succeeded.' self.success = ier in [1, 2, 3, 4] if ier == 0: self.message = 'Invalid Input Parameters.' elif ier == 5: self.message = self.err_maxfev % lskws['maxfev'] else: self.message = 'Tolerance seems to be too small.' self.nfev = infodict['nfev'] self.ndata = len(resid) sum_sqr = (resid**2).sum() self.chisqr = sum_sqr self.nfree = (self.ndata - self.nvarys) self.redchi = sum_sqr / self.nfree for par in self.params.values(): par.stderr = 0 par.correl = None if hasattr(par, 'ast'): delattr(par, 'ast') if cov is None: self.errorbars = False self.message = '%s. Could not estimate error-bars' else: self.errorbars = True self.covar = cov if self.scale_covar: cov = cov * sum_sqr / self.nfree for ivar, varname in enumerate(self.var_map): par = self.params[varname] par.stderr = sqrt(cov[ivar, ivar]) par.correl = {} for jvar, varn2 in enumerate(self.var_map): if jvar != ivar: par.correl[varn2] = (cov[ivar, jvar]/ (par.stderr * sqrt(cov[jvar, jvar]))) return self.success
def leastsq(self, **kws): """ use Levenberg-Marquardt minimization to perform fit. This assumes that ModelParameters have been stored, and a function to minimize has been properly set up. This wraps scipy.optimize.leastsq, and keyword arguments are passed directly as options to scipy.optimize.leastsq When possible, this calculates the estimated uncertainties and variable correlations from the covariance matrix. writes outputs to many internal attributes, and returns True if fit was successful, False if not. """ self.prepare_fit() lskws = dict(full_output=1, xtol=1.e-7, ftol=1.e-7, gtol=1.e-7, maxfev=2000*(self.nvarys+1), Dfun=None) lskws.update(self.kws) lskws.update(kws) if lskws['Dfun'] is not None: self.jacfcn = lskws['Dfun'] lskws['Dfun'] = self.__jacobian # suppress runtime warnings during fit and error analysis orig_warn_settings = np.geterr() np.seterr(all='ignore') lsout = scipy_leastsq(self.__residual, self.vars, **lskws) _best, _cov, infodict, errmsg, ier = lsout self.residual = resid = infodict['fvec'] self.ier = ier self.lmdif_message = errmsg self.message = 'Fit succeeded.' self.success = ier in [1, 2, 3, 4] if ier == 0: self.message = 'Invalid Input Parameters.' elif ier == 5: self.message = self.err_maxfev % lskws['maxfev'] else: self.message = 'Tolerance seems to be too small.' self.nfev = infodict['nfev'] self.ndata = len(resid) sum_sqr = (resid**2).sum() self.chisqr = sum_sqr self.nfree = (self.ndata - self.nvarys) self.redchi = sum_sqr / self.nfree # need to map _best values to params, then calculate the # grad for the variable parameters grad = ones_like(_best) vbest = ones_like(_best) # ensure that _best, vbest, and grad are not # broken 1-element ndarrays. if len(np.shape(_best)) == 0: _best = np.array([_best]) if len(np.shape(vbest)) == 0: vbest = np.array([vbest]) if len(np.shape(grad)) == 0: grad = np.array([grad]) for ivar, varname in enumerate(self.var_map): par = self.params[varname] grad[ivar] = par.scale_gradient(_best[ivar]) vbest[ivar] = par.value # modified from JJ Helmus' leastsqbound.py infodict['fjac'] = transpose(transpose(infodict['fjac']) / take(grad, infodict['ipvt'] - 1)) rvec = dot(triu(transpose(infodict['fjac'])[:self.nvarys, :]), take(eye(self.nvarys), infodict['ipvt'] - 1, 0)) try: self.covar = inv(dot(transpose(rvec), rvec)) except (LinAlgError, ValueError): self.covar = None has_expr = False for par in self.params.values(): par.stderr, par.correl = 0, None has_expr = has_expr or par.expr is not None if self.covar is not None: if self.scale_covar: self.covar = self.covar * sum_sqr / self.nfree for ivar, varname in enumerate(self.var_map): par = self.params[varname] par.stderr = sqrt(self.covar[ivar, ivar]) par.correl = {} for jvar, varn2 in enumerate(self.var_map): if jvar != ivar: par.correl[varn2] = (self.covar[ivar, jvar] / (par.stderr * sqrt(self.covar[jvar, jvar]))) uvars = None if has_expr: # uncertainties on constrained parameters: # get values with uncertainties (including correlations), # temporarily set Parameter values to these, # re-evaluate contrained parameters to extract stderr # and then set Parameters back to best-fit value try: uvars = uncertainties.correlated_values(vbest, self.covar) except (LinAlgError, ValueError): uvars = None if uvars is not None: for pname, par in self.params.items(): eval_stderr(par, uvars, self.var_map, self.params, self.asteval) # restore nominal values for v, nam in zip(uvars, self.var_map): self.asteval.symtable[nam] = v.nominal_value self.errorbars = True if self.covar is None: self.errorbars = False self.message = '%s. Could not estimate error-bars' np.seterr(**orig_warn_settings) self.unprepare_fit() return self.success
def leastsq(self, scale_covar=True, **kws): """ use Levenberg-Marquardt minimization to perform fit. This assumes that ModelParameters have been stored, and a function to minimize has been properly set up. This wraps scipy.optimize.leastsq, and keyword arguments are passed directly as options to scipy.optimize.leastsq When possible, this calculates the estimated uncertainties and variable correlations from the covariance matrix. writes outputs to many internal attributes, and returns True if fit was successful, False if not. """ # print 'RUNNING LEASTSQ' self.prepare_fit() lskws = dict(full_output=1, xtol=1.0e-7, ftol=1.0e-7, gtol=1.0e-7, maxfev=2000 * (self.nvarys + 1), Dfun=None) lskws.update(self.kws) lskws.update(kws) if lskws["Dfun"] is not None: self.jacfcn = lskws["Dfun"] lskws["Dfun"] = self.__jacobian lsout = scipy_leastsq(self.__residual, self.vars, **lskws) _best, _cov, infodict, errmsg, ier = lsout self.residual = resid = infodict["fvec"] self.ier = ier self.lmdif_message = errmsg self.message = "Fit succeeded." self.success = ier in [1, 2, 3, 4] if ier == 0: self.message = "Invalid Input Parameters." elif ier == 5: self.message = self.err_maxfev % lskws["maxfev"] else: self.message = "Tolerance seems to be too small." self.nfev = infodict["nfev"] self.ndata = len(resid) sum_sqr = (resid ** 2).sum() self.chisqr = sum_sqr self.nfree = self.ndata - self.nvarys self.redchi = sum_sqr / self.nfree # need to map _best values to params, then calculate the # grad for the variable parameters grad = ones_like(_best) vbest = ones_like(_best) for ivar, varname in enumerate(self.var_map): par = self.params[varname] grad[ivar] = par.scale_gradient(_best[ivar]) vbest[ivar] = par.value # modified from JJ Helmus' leastsqbound.py infodict["fjac"] = transpose(transpose(infodict["fjac"]) / take(grad, infodict["ipvt"] - 1)) rvec = dot(triu(transpose(infodict["fjac"])[: self.nvarys, :]), take(eye(self.nvarys), infodict["ipvt"] - 1, 0)) try: cov = inv(dot(transpose(rvec), rvec)) except (LinAlgError, ValueError): cov = None for par in self.params.values(): par.stderr, par.correl = 0, None self.covar = cov if cov is None: self.errorbars = False self.message = "%s. Could not estimate error-bars" else: self.errorbars = True if self.scale_covar: self.covar = cov = cov * sum_sqr / self.nfree for ivar, varname in enumerate(self.var_map): par = self.params[varname] par.stderr = sqrt(cov[ivar, ivar]) par.correl = {} for jvar, varn2 in enumerate(self.var_map): if jvar != ivar: par.correl[varn2] = cov[ivar, jvar] / (par.stderr * sqrt(cov[jvar, jvar])) # set uncertainties on constrained parameters. # Note that first we set all named params to # have values that include uncertainties, then # evaluate all constrained parameters, then set # the values back to the nominal values. if HAS_UNCERT and self.covar is not None: uvars = uncertainties.correlated_values(vbest, self.covar) for v, nam in zip(uvars, self.var_map): self.asteval.symtable[nam] = v for pname, par in self.params.items(): if hasattr(par, "ast"): try: out = self.asteval.run(par.ast) par.stderr = out.std_dev() except: pass for v, nam in zip(uvars, self.var_map): self.asteval.symtable[nam] = v.nominal_value for par in self.params.values(): if hasattr(par, "ast"): delattr(par, "ast") return self.success
def leastsq(self, **kws): """ use Levenberg-Marquardt minimization to perform fit. This assumes that ModelParameters have been stored, and a function to minimize has been properly set up. This wraps scipy.optimize.leastsq, and keyword arguments are passed directly as options to scipy.optimize.leastsq When possible, this calculates the estimated uncertainties and variable correlations from the covariance matrix. writes outputs to many internal attributes, and returns True if fit was successful, False if not. """ self.prepare_fit() lskws = dict(full_output=1, xtol=1.e-7, ftol=1.e-7, gtol=1.e-7, maxfev=2000 * (self.nvarys + 1), Dfun=None) lskws.update(self.kws) lskws.update(kws) if lskws['Dfun'] is not None: self.jacfcn = lskws['Dfun'] lskws['Dfun'] = self.__jacobian lsout = scipy_leastsq(self.__residual, self.vars, **lskws) _best, _cov, infodict, errmsg, ier = lsout self.residual = resid = infodict['fvec'] self.ier = ier self.lmdif_message = errmsg self.message = 'Fit succeeded.' self.success = ier in [1, 2, 3, 4] if ier == 0: self.message = 'Invalid Input Parameters.' elif ier == 5: self.message = self.err_maxfev % lskws['maxfev'] else: self.message = 'Tolerance seems to be too small.' self.nfev = infodict['nfev'] self.ndata = len(resid) sum_sqr = (resid**2).sum() self.chisqr = sum_sqr self.nfree = (self.ndata - self.nvarys) self.redchi = sum_sqr / self.nfree # need to map _best values to params, then calculate the # grad for the variable parameters grad = ones_like(_best) vbest = ones_like(_best) # ensure that _best, vbest, and grad are not # broken 1-element ndarrays. if len(np.shape(_best)) == 0: _best = np.array([_best]) if len(np.shape(vbest)) == 0: vbest = np.array([vbest]) if len(np.shape(grad)) == 0: grad = np.array([grad]) for ivar, varname in enumerate(self.var_map): par = self.params[varname] grad[ivar] = par.scale_gradient(_best[ivar]) vbest[ivar] = par.value # modified from JJ Helmus' leastsqbound.py infodict['fjac'] = transpose( transpose(infodict['fjac']) / take(grad, infodict['ipvt'] - 1)) rvec = dot(triu(transpose(infodict['fjac'])[:self.nvarys, :]), take(eye(self.nvarys), infodict['ipvt'] - 1, 0)) try: cov = inv(dot(transpose(rvec), rvec)) except (LinAlgError, ValueError): cov = None for par in self.params.values(): par.stderr, par.correl = 0, None self.covar = cov if cov is None: self.errorbars = False self.message = '%s. Could not estimate error-bars' else: self.errorbars = True if self.scale_covar: self.covar = cov = cov * sum_sqr / self.nfree # uncertainties on constrained parameters: # get values with uncertainties (including correlations), # temporarily set Parameter values to these, # re-evaluate contrained parameters to extract stderr # and then set Parameters back to best-fit value uvars = uncertainties.correlated_values(vbest, self.covar) for ivar, varname in enumerate(self.var_map): par = self.params[varname] par.stderr = sqrt(cov[ivar, ivar]) par.correl = {} for jvar, varn2 in enumerate(self.var_map): if jvar != ivar: par.correl[varn2] = ( cov[ivar, jvar] / (par.stderr * sqrt(cov[jvar, jvar]))) for pname, par in self.params.items(): eval_stderr(par, uvars, self.var_map, self.params, self.asteval) # restore nominal values for v, nam in zip(uvars, self.var_map): self.asteval.symtable[nam] = v.nominal_value for par in self.params.values(): if hasattr(par, 'ast'): delattr(par, 'ast') return self.success
def leastsq(self, **kws): """ use Levenberg-Marquardt minimization to perform fit. This assumes that ModelParameters have been stored, and a function to minimize has been properly set up. This wraps scipy.optimize.leastsq, and keyward arguments are passed directly as options to scipy.optimize.leastsq When possible, this calculates the estimated uncertainties and variable correlations from the covariance matrix. writes outputs to many internal attributes, and returns True if fit was successful, False if not. """ self.prepare_fit() toler = self.toler lskws = dict(full_output=1, xtol=toler, ftol=toler, gtol=toler, maxfev=1000*(self.nvarys+1), Dfun=None) lskws.update(self.kws) lskws.update(kws) if lskws['Dfun'] is not None: self.jacfcn = lskws['Dfun'] lskws['Dfun'] = self.__jacobian lsout = scipy_leastsq(self.__residual, self.vars, **lskws) vbest, cov, infodict, errmsg, ier = lsout resid = infodict['fvec'] group = self.paramgroup #symtable = self._larch.symtable #if self.paramgroup.__name__ in symtable._sys.searchGroups: # symtable._sys.searchGroups.remove(self.paramgroup.__name__) message = 'Fit succeeded.' if ier == 0: message = 'Invalid Input Parameters.' elif ier == 5: message = self.err_maxfev % lskws['maxfev'] elif ier > 5: message = 'See lmdif_message.' if cov is None: message = '%s Could not estimate error-bars' % message ndata = len(resid) chisqr = (resid**2).sum() nfree = (ndata - self.nvarys) redchi = chisqr / nfree group.lmdif_status = ier group.lmdif_message = errmsg group.lmdif_success = ier in [1, 2, 3, 4] group.toler = self.toler group.nfcn_calls = infodict['nfev'] group.residual = resid group.message = message group.chi_square = chisqr group.chi_reduced = redchi group.nvarys = self.nvarys group.nfree = nfree group.errorbars = cov is not None for name in self.var_names: par = getattr(group, name) par.stderr = 0 par.correl = None if cov is not None: covar = cov if self.scale_covar: cov = cov * chisqr / nfree for ivar, name in enumerate(self.var_names): par = getattr(group, name) par.stderr = sqrt(cov[ivar, ivar]) par.correl = {} for jvar, name2 in enumerate(self.var_names): if jvar != ivar: par.correl[name2] = (cov[ivar, jvar]/ (par.stderr * sqrt(cov[jvar, jvar]))) group.covar_vars = self.var_names group.covar = cov return ier
def leastsq(self, scale_covar=True, **kws): """ use Levenberg-Marquardt minimization to perform fit. This assumes that ModelParameters have been stored, and a function to minimize has been properly set up. This wraps scipy.optimize.leastsq, and keyward arguments are passed directly as options to scipy.optimize.leastsq When possible, this calculates the estimated uncertainties and variable correlations from the covariance matrix. writes outputs to many internal attributes, and returns True if fit was successful, False if not. """ self.prepare_fit() lskws = dict(full_output=1, xtol=1.e-7, ftol=1.e-7, gtol=1.e-7, maxfev=1000 * (self.nvarys + 1), Dfun=None) lskws.update(self.kws) lskws.update(kws) if lskws['Dfun'] is not None: self.jacfcn = lskws['Dfun'] lskws['Dfun'] = self.__jacobian lsout = scipy_leastsq(self.__residual, self.vars, **lskws) vbest, cov, infodict, errmsg, ier = lsout self.residual = resid = infodict['fvec'] self.ier = ier self.lmdif_message = errmsg self.message = 'Fit succeeded.' self.success = ier in [1, 2, 3, 4] if ier == 0: self.message = 'Invalid Input Parameters.' elif ier == 5: self.message = self.err_maxfev % lskws['maxfev'] else: self.message = 'Tolerance seems to be too small.' self.nfev = infodict['nfev'] self.ndata = len(resid) sum_sqr = (resid**2).sum() self.chisqr = sum_sqr self.nfree = (self.ndata - self.nvarys) self.redchi = sum_sqr / self.nfree for par in self.params.values(): par.stderr = 0 par.correl = None if hasattr(par, 'ast'): delattr(par, 'ast') if cov is None: self.errorbars = False self.message = '%s. Could not estimate error-bars' else: self.errorbars = True self.covar = cov if self.scale_covar: cov = cov * sum_sqr / self.nfree for ivar, varname in enumerate(self.var_map): par = self.params[varname] par.stderr = sqrt(cov[ivar, ivar]) par.correl = {} for jvar, varn2 in enumerate(self.var_map): if jvar != ivar: par.correl[varn2] = ( cov[ivar, jvar] / (par.stderr * sqrt(cov[jvar, jvar]))) return self.success
def leastsq(self, params=None, **kws): """ Use Levenberg-Marquardt minimization to perform a fit. This assumes that ModelParameters have been stored, and a function to minimize has been properly set up. This wraps scipy.optimize.leastsq. When possible, this calculates the estimated uncertainties and variable correlations from the covariance matrix. Writes outputs to many internal attributes. Parameters ---------- params : Parameters, optional Parameters to use as starting points. kws : dict, optional Minimizer options to pass to scipy.optimize.leastsq. Returns ------- success : bool True if fit was successful, False if not. """ result = self.prepare_fit(params=params) vars = result.init_vals nvars = len(vars) lskws = dict(full_output=1, xtol=1.e-7, ftol=1.e-7, col_deriv=False, gtol=1.e-7, maxfev=2000 * (nvars + 1), Dfun=None) lskws.update(self.kws) lskws.update(kws) self.col_deriv = False if lskws['Dfun'] is not None: self.jacfcn = lskws['Dfun'] self.col_deriv = lskws['col_deriv'] lskws['Dfun'] = self.__jacobian # suppress runtime warnings during fit and error analysis orig_warn_settings = np.geterr() np.seterr(all='ignore') lsout = scipy_leastsq(self.__residual, vars, **lskws) _best, _cov, infodict, errmsg, ier = lsout result.aborted = self._abort self._abort = False result.residual = resid = infodict['fvec'] result.ier = ier result.lmdif_message = errmsg result.message = 'Fit succeeded.' result.success = ier in [1, 2, 3, 4] if result.aborted: result.message = 'Fit aborted by user callback.' result.success = False elif ier == 0: result.message = 'Invalid Input Parameters.' elif ier == 5: result.message = self.err_maxfev % lskws['maxfev'] else: result.message = 'Tolerance seems to be too small.' result.ndata = len(resid) result.chisqr = (resid**2).sum() result.nfree = (result.ndata - nvars) result.redchi = result.chisqr / result.nfree _log_likelihood = result.ndata * np.log(result.redchi) result.aic = _log_likelihood + 2 * nvars result.bic = _log_likelihood + np.log(result.ndata) * nvars params = result.params # need to map _best values to params, then calculate the # grad for the variable parameters grad = ones_like(_best) vbest = ones_like(_best) # ensure that _best, vbest, and grad are not # broken 1-element ndarrays. if len(np.shape(_best)) == 0: _best = np.array([_best]) if len(np.shape(vbest)) == 0: vbest = np.array([vbest]) if len(np.shape(grad)) == 0: grad = np.array([grad]) for ivar, name in enumerate(result.var_names): grad[ivar] = params[name].scale_gradient(_best[ivar]) vbest[ivar] = params[name].value # modified from JJ Helmus' leastsqbound.py infodict['fjac'] = transpose( transpose(infodict['fjac']) / take(grad, infodict['ipvt'] - 1)) rvec = dot(triu(transpose(infodict['fjac'])[:nvars, :]), take(eye(nvars), infodict['ipvt'] - 1, 0)) try: result.covar = inv(dot(transpose(rvec), rvec)) except (LinAlgError, ValueError): result.covar = None has_expr = False for par in params.values(): par.stderr, par.correl = 0, None has_expr = has_expr or par.expr is not None # self.errorbars = error bars were successfully estimated result.errorbars = (result.covar is not None) if result.aborted: result.errorbars = False if result.errorbars: if self.scale_covar: result.covar *= result.redchi for ivar, name in enumerate(result.var_names): par = params[name] par.stderr = sqrt(result.covar[ivar, ivar]) par.correl = {} try: result.errorbars = result.errorbars and (par.stderr > 0.0) for jvar, varn2 in enumerate(result.var_names): if jvar != ivar: par.correl[varn2] = ( result.covar[ivar, jvar] / (par.stderr * sqrt(result.covar[jvar, jvar]))) except: result.errorbars = False if has_expr: # uncertainties on constrained parameters: # get values with uncertainties (including correlations), # temporarily set Parameter values to these, # re-evaluate contrained parameters to extract stderr # and then set Parameters back to best-fit value try: uvars = uncertainties.correlated_values( vbest, result.covar) except (LinAlgError, ValueError): uvars = None if uvars is not None: for par in params.values(): eval_stderr(par, uvars, result.var_names, params) # restore nominal values for v, nam in zip(uvars, result.var_names): params[nam].value = v.nominal_value if not result.errorbars: result.message = '%s. Could not estimate error-bars' % result.message np.seterr(**orig_warn_settings) return result
def leastsq(self, params=None, **kws): """ Use Levenberg-Marquardt minimization to perform a fit. This assumes that ModelParameters have been stored, and a function to minimize has been properly set up. This wraps scipy.optimize.leastsq. When possible, this calculates the estimated uncertainties and variable correlations from the covariance matrix. Writes outputs to many internal attributes. Parameters ---------- params : Parameters, optional Parameters to use as starting points. kws : dict, optional Minimizer options to pass to scipy.optimize.leastsq. Returns ------- success : bool True if fit was successful, False if not. """ result = self.prepare_fit(params=params) vars = result.init_vals nvars = len(vars) lskws = dict(full_output=1, xtol=1.e-7, ftol=1.e-7, col_deriv=False, gtol=1.e-7, maxfev=2000*(nvars+1), Dfun=None) lskws.update(self.kws) lskws.update(kws) self.col_deriv = False if lskws['Dfun'] is not None: self.jacfcn = lskws['Dfun'] self.col_deriv = lskws['col_deriv'] lskws['Dfun'] = self.__jacobian # suppress runtime warnings during fit and error analysis orig_warn_settings = np.geterr() np.seterr(all='ignore') lsout = scipy_leastsq(self.__residual, vars, **lskws) _best, _cov, infodict, errmsg, ier = lsout result.aborted = self._abort self._abort = False result.residual = resid = infodict['fvec'] result.ier = ier result.lmdif_message = errmsg result.message = 'Fit succeeded.' result.success = ier in [1, 2, 3, 4] if result.aborted: result.message = 'Fit aborted by user callback.' result.success = False elif ier == 0: result.message = 'Invalid Input Parameters.' elif ier == 5: result.message = self.err_maxfev % lskws['maxfev'] else: result.message = 'Tolerance seems to be too small.' result.ndata = len(resid) result.chisqr = (resid**2).sum() result.nfree = (result.ndata - nvars) result.redchi = result.chisqr / result.nfree _log_likelihood = result.ndata * np.log(result.redchi) result.aic = _log_likelihood + 2 * nvars result.bic = _log_likelihood + np.log(result.ndata) * nvars params = result.params # need to map _best values to params, then calculate the # grad for the variable parameters grad = ones_like(_best) vbest = ones_like(_best) # ensure that _best, vbest, and grad are not # broken 1-element ndarrays. if len(np.shape(_best)) == 0: _best = np.array([_best]) if len(np.shape(vbest)) == 0: vbest = np.array([vbest]) if len(np.shape(grad)) == 0: grad = np.array([grad]) for ivar, name in enumerate(result.var_names): grad[ivar] = params[name].scale_gradient(_best[ivar]) vbest[ivar] = params[name].value # modified from JJ Helmus' leastsqbound.py infodict['fjac'] = transpose(transpose(infodict['fjac']) / take(grad, infodict['ipvt'] - 1)) rvec = dot(triu(transpose(infodict['fjac'])[:nvars, :]), take(eye(nvars), infodict['ipvt'] - 1, 0)) try: result.covar = inv(dot(transpose(rvec), rvec)) except (LinAlgError, ValueError): result.covar = None has_expr = False for par in params.values(): par.stderr, par.correl = 0, None has_expr = has_expr or par.expr is not None # self.errorbars = error bars were successfully estimated result.errorbars = (result.covar is not None) if result.aborted: result.errorbars = False if result.errorbars: if self.scale_covar: result.covar *= result.redchi for ivar, name in enumerate(result.var_names): par = params[name] par.stderr = sqrt(result.covar[ivar, ivar]) par.correl = {} try: result.errorbars = result.errorbars and (par.stderr > 0.0) for jvar, varn2 in enumerate(result.var_names): if jvar != ivar: par.correl[varn2] = (result.covar[ivar, jvar] / (par.stderr * sqrt(result.covar[jvar, jvar]))) except: result.errorbars = False if has_expr: # uncertainties on constrained parameters: # get values with uncertainties (including correlations), # temporarily set Parameter values to these, # re-evaluate contrained parameters to extract stderr # and then set Parameters back to best-fit value try: uvars = uncertainties.correlated_values(vbest, result.covar) except (LinAlgError, ValueError): uvars = None if uvars is not None: for par in params.values(): eval_stderr(par, uvars, result.var_names, params) # restore nominal values for v, nam in zip(uvars, result.var_names): params[nam].value = v.nominal_value if not result.errorbars: result.message = '%s. Could not estimate error-bars' % result.message np.seterr(**orig_warn_settings) return result