def fit_report(group, show_correl=True, min_correl=0.1, _larch=None, **kws): """print report of fit statistics given 'fit parameter group' """ if not _larch.symtable.isgroup(group): print 'must pass Group to fit_report()' return topline = '===================== FIT RESULTS =====================' header = '[[%s]]' varformat = ' %12s = % f +/- %f (init= % f)' exprformat = ' %12s = % f = \'%s\'' out = [topline, header % 'Statistics'] npts = len(group.residual) out.append(' npts, nvarys = %i, %i' % (npts, group.nvarys)) out.append(' nfree, nfcn_calls = %i, %i' % (group.nfree, group.nfcn_calls)) out.append(' chi_square = %f' % (group.chi_square)) out.append(' reduced chi_square = %f' % (group.chi_reduced)) out.append(' ') out.append(header % 'Variables') exprs = [] for name in dir(group): var = getattr(group, name) if len(name) < 14: name = (name + ' '*14)[:14] if isParameter(var): if var.vary: out.append(varformat % (name, var.value, var.stderr, var._initval)) elif var.expr is not None: exprs.append(exprformat % (name, var.value, var.expr)) if len(exprs) > 0: out.append(header % 'Constraint Expressions') out.extend(exprs) covar_vars = getattr(group, 'covar_vars', []) if show_correl and len(covar_vars) > 0: out.append(' ') out.append(header % 'Correlations' + ' (unreported correlations are < % .3f)' % min_correl) correls = {} for i, name in enumerate(covar_vars): par = getattr(group, name) if not par.vary: continue if hasattr(par, 'correl') and par.correl is not None: for name2 in covar_vars[i+1:]: if name != name2 and name2 in par.correl: correls["%s, %s" % (name, name2)] = par.correl[name2] sort_correl = sorted(correls.items(), key=lambda it: abs(it[1])) sort_correl.reverse() for name, val in sort_correl: if abs(val) < min_correl: break if len(name) < 20: name = (name + ' '*20)[:20] out.append(' %s = % .3f ' % (name, val)) out.append('='*len(topline)) return '\n'.join(out)
def feffit(params, datasets, _larch=None, rmax_out=10, path_outputs=True, **kws): def _resid(params, datasets=None, _larch=None, **kws): """ this is the residua function """ # print '---feffit residual ' #for i in dir(params): # print i, getattr(params, i) return concatenate([d.residual() for d in datasets]) if isinstance(datasets, FeffitDataSet): datasets = [datasets] for ds in datasets: if not isinstance(ds, FeffitDataSet): print "feffit needs a list of FeffitDataSets" return fitkws = dict(datasets=datasets) fit = Minimizer(_resid, params, fcn_kws=fitkws, _larch=_larch) fit.leastsq() # scale uncertainties to sqrt(n_idp - n_varys) n_idp = 0 for ds in datasets: n_idp += ds.transform.n_idp err_scale = sqrt(n_idp - params.nvarys) for name in dir(params): p = getattr(params, name) if isParameter(p) and p.vary: p.stderr *= err_scale # here we create outputs: for ds in datasets: ds.save_ffts(rmax_out=rmax_out, path_outputs=path_outputs) out = larch.Group(name='feffit fit results', fit = fit, params = params, datasets = datasets) return out
def prepare_fit(self): """prepare parameters for fit determine which parameters are actually variables and which are defined expressions. """ if self.__prepared: return # set larch's paramGroup to this group of parameters if self._larch.symtable.isgroup(self.paramgroup): self._larch.symtable._sys.paramGroup = self.paramgroup else: self._larch.write.write('Minimize Error: invalid parameter group!') return self.nfev_calls = 0 self.var_names = [] self.defvars = [] self.vars = [] self.nvarys = 0 for name in dir(self.paramgroup): par = getattr(self.paramgroup, name) if isParameter(par): if par.expr is not None: par._getval() elif par.vary: self.var_names.append(name) self.vars.append(par.value) if not hasattr(par, 'name') or par.name is None: par.name = name self.nvarys = len(self.vars) # now evaluate make sure initial values are set # are used to set values of the defined expressions. # this also acts as a check of expression syntax. self.__prepared = True
def json_encode(name, symtable=None): """ return json encoded object from larch symbol table """ if symtable is None: return None if not symtable.has_symbol(name): return None obj = symtable.get_symbol(name) if isinstance(obj, np.ndarray): out = {'__class__': 'Array', '__shape__': obj.shape, '__dtype__': obj.dtype.name} out['value'] = json.dumps(obj.tolist()) elif symtable.isgroup(obj): out = {'__class__': 'Group'} for member in dir(obj): try: out[member] = json_encode(getattr(obj, member)) except: pass return out elif isParameter(obj): out = {'__class__': 'Parameter'} for attr in ('value', 'name', 'vary', 'min', 'max', 'expr'): val = getattr(obj, attr, None) if val is not None: out[attr] = val else: try: out = json.dumps(obj) except: try: out = json.dumps(repr(obj)) except: out = None return out
def feffit_report(result, min_correl=0.1, with_paths=True, _larch=None, **kws): """print report of fit for feffit""" good_to_go = False try: fit = result.fit params = result.params datasets = result.datasets good_to_go = True except: pass if not good_to_go: print 'must pass output of feffit()!' return topline = '=================== FEFFIT RESULTS ====================' header = '[[%s]]' varformat = ' %12s = % f +/- %f (init= % f)' exprformat = ' %12s = % f = \'%s\'' out = [topline, header % 'Statistics'] # print 'Params ', dir(params) # print 'Fit ', dir(fit) # for ds in datasets: # print dir(ds) # print dir(ds.transform) npts = len(params.residual) out.append(' npts, nvarys = %i, %i' % (npts, params.nvarys)) out.append(' nfree, nfcn_calls = %i, %i' % (params.nfree, params.nfcn_calls)) out.append(' chi_square = %f' % (params.chi_square)) out.append(' reduced chi_square = %f' % (params.chi_reduced)) out.append(' ') if len(datasets) == 1: out.append(header % 'Data') else: out.append(header % 'Datasets (%i)' % len(datasets)) for i, ds in enumerate(datasets): trans = ds.transform if len(datasets) > 1: out.append(' dataset %i:' % (i+1)) out.append(' n_independent = %.3f ' % (trans.n_idp)) out.append(' eps_k, eps_r = %f, %f' % (ds.transform.epsilon_k, ds.transform.epsilon_r)) out.append(' fit space = %s ' % (trans.fitspace)) out.append(' r-range = %.3f, %.3f' % (trans.rmin, trans.rmax)) out.append(' k-range = %.3f, %.3f' % (trans.kmin, trans.kmax)) kwin = ' k window, dk = %s, %.3f' % (trans.window, trans.dk) if trans.dk2 is not None: kwin = "%s, %.3f" % (kwin, trans.dk2) out.append(kwin) out.append(' k-weight = %s' % (repr(trans.kweight))) out.append(' paths used in fit = %s' % (repr([p.filename for p in ds.pathlist]))) out.append(' ') out.append(header % 'Variables') exprs = [] for name in dir(params): var = getattr(params, name) if len(name) < 14: name = (name + ' '*14)[:14] if isParameter(var): if var.vary: out.append(varformat % (name, var.value, var.stderr, var._initval)) elif var.expr is not None: exprs.append(exprformat % (name, var.value, var.expr)) if len(exprs) > 0: out.append(header % 'Constraint Expressions') out.extend(exprs) covar_vars = getattr(params, 'covar_vars', []) if len(covar_vars) > 0: out.append(' ') out.append(header % 'Correlations' + ' (unreported correlations are < % .3f)' % min_correl) correls = {} for i, name in enumerate(covar_vars): par = getattr(params, name) if not par.vary: continue if hasattr(par, 'correl') and par.correl is not None: for name2 in covar_vars[i+1:]: if name != name2 and name2 in par.correl: correls["%s, %s" % (name, name2)] = par.correl[name2] sort_correl = sorted(correls.items(), key=lambda it: abs(it[1])) sort_correl.reverse() for name, val in sort_correl: if abs(val) < min_correl: break if len(name) < 20: name = (name + ' '*20)[:20] out.append(' %s = % .3f ' % (name, val)) if with_paths: out.append(' ') out.append(header % 'Paths') for ds in datasets: for p in ds.pathlist: out.append('%s\n' % p.report()) out.append('='*len(topline)) return '\n'.join(out)