def report(self): "return text report of parameters" tmpvals = self.__path_params() pathpars = {} for pname in ('degen', 's02', 'e0', 'deltar', 'sigma2', 'third', 'fourth', 'ei'): parname = fix_varname(PATHPAR_FMT % (pname, self.label)) if parname in self.params: pathpars[pname] = (self.params[parname].value, self.params[parname].stderr) geomlabel = ' atom x y z ipot' geomformat = ' %4s % .4f, % .4f, % .4f %i' out = [' Path %s, Feff.dat file = %s' % (self.label, self.filename)] out.append(geomlabel) for atsym, iz, ipot, amass, x, y, z in self.geom: s = geomformat % (atsym, x, y, z, ipot) if ipot == 0: s = "%s (absorber)" % s out.append(s) stderrs = {} out.append(' {:7s}= {:s}'.format('reff', gformat(self._feffdat.reff))) for pname in ('degen', 's02', 'e0', 'r', 'deltar', 'sigma2', 'third', 'fourth', 'ei'): val = strval = getattr(self, pname, 0) parname = fix_varname(PATHPAR_FMT % (pname, self.label)) std = None if pname == 'r': parname = fix_varname(PATHPAR_FMT % ('deltar', self.label)) par = self.params.get(parname, None) val = par.value + self._feffdat.reff strval = 'reff + ' + getattr(self, 'deltar', 0) std = par.stderr else: if pname in pathpars: val, std = pathpars[pname] else: par = self.params.get(parname, None) if par is not None: val = par.value std = par.stderr if std is None or std <= 0: svalue = gformat(val) else: svalue = "{:s} +/-{:s}".format(gformat(val), gformat(std)) if pname == 's02': pname = 'n*s02' svalue = " {:7s}= {:s}".format(pname, svalue) if isinstance(strval, str): svalue = "{:s} '{:s}'".format(svalue, strval) if val == 0 and pname in ('third', 'fourth', 'ei'): continue out.append(svalue) return '\n'.join(out)
def show_fitresult(self, nfit=0, datagroup=None): if datagroup is not None: self.datagroup = datagroup result = self.get_fitresult(nfit=nfit) wids = self.wids wids['data_title'].SetLabel(self.datagroup.filename) wids['hist_info'].SetLabel("Fit #%2.2d of %d" % (nfit+1, len(self.peakfit_history))) parts = [] model_repr = result.model._reprstring(long=True) for word in model_repr.split('Model('): if ',' in word: pref, suff = word.split(', ') parts.append( ("%sModel(%s" % (pref.title(), suff) )) else: parts.append(word) desc = ''.join(parts) parts = [] tlen = 90 while len(desc) >= tlen: i = desc[tlen-1:].find('+') if i < 0: break parts.append(desc[:tlen+i]) desc = desc[tlen+i:] parts.append(desc) wids['model_desc'].SetLabel('\n'.join(parts)) wids['params'].DeleteAllItems() wids['paramsdata'] = [] for param in reversed(result.params.values()): pname = param.name try: val = gformat(param.value) except (TypeError, ValueError): val = ' ??? ' serr = ' N/A ' if param.stderr is not None: serr = gformat(param.stderr, 10) extra = ' ' if param.expr is not None: extra = ' = %s ' % param.expr elif not param.vary: extra = ' (fixed)' elif param.init_value is not None: extra = ' (init=%s)' % gformat(param.init_value, 11) wids['params'].AppendItem((pname, val, serr, extra)) wids['paramsdata'].append(pname) self.Refresh()
def show_fitresult(self, nfit=0, datagroup=None): if datagroup is not None: self.datagroup = datagroup result = self.get_fitresult(nfit=nfit) wids = self.wids wids['data_title'].SetLabel(self.datagroup.filename) wids['hist_info'].SetLabel("Fit #%2.2d of %d" % (nfit+1, len(self.peakfit_history))) parts = [] model_repr = result.model._reprstring(long=True) for word in model_repr.split('Model('): if ',' in word: pref, suff = word.split(', ', 1) parts.append( ("%sModel(%s" % (pref.title(), suff) )) else: parts.append(word) desc = ''.join(parts) parts = [] tlen = 90 while len(desc) >= tlen: i = desc[tlen-1:].find('+') if i < 0: break parts.append(desc[:tlen+i]) desc = desc[tlen+i:] parts.append(desc) wids['model_desc'].SetLabel('\n'.join(parts)) wids['params'].DeleteAllItems() wids['paramsdata'] = [] for param in reversed(result.params.values()): pname = param.name try: val = gformat(param.value) except (TypeError, ValueError): val = ' ??? ' serr = ' N/A ' if param.stderr is not None: serr = gformat(param.stderr, 10) extra = ' ' if param.expr is not None: extra = ' = %s ' % param.expr elif not param.vary: extra = ' (fixed)' elif param.init_value is not None: extra = ' (init=%s)' % gformat(param.init_value, 11) wids['params'].AppendItem((pname, val, serr, extra)) wids['paramsdata'].append(pname) self.Refresh()
def show_fitresult(self, n): fit_result = self.datagroup.lcf_result[n] self.current_fit = n wids = self.wids wids['nfits_title'].SetLabel('Showing Fit # %2.2d' % (n+1)) wids['paramstitle'].SetLabel('[[Parameters for Fit # %2.2d]]' % (n+1)) wids['params'].DeleteAllItems() for pname, par in fit_result.params.items(): args = [pname, gformat(par.value, 12), '--'] if par.stderr is not None: args[2] = gformat(par.stderr, 12) self.wids['params'].AppendItem(tuple(args))
def show_fitresult(self, n): fit_result = self.datagroup.lcf_result[n] self.current_fit = n wids = self.wids wids['nfits_title'].SetLabel('Showing Fit # %2.2d of %i Best Fits' % (n+1, self.nresults)) wids['paramstitle'].SetLabel('[[Parameters for Fit # %2.2d]]' % (n+1)) wids['params'].DeleteAllItems() for pname, par in fit_result.params.items(): args = [pname, gformat(par.value, 11), '--'] if par.stderr is not None: args[2] = gformat(par.stderr, 11) self.wids['params'].AppendItem(tuple(args))
def onBuildPCAModel(self, event=None): self.wids['status'].SetLabel(" training model...") form = self.read_form() selected_groups = self.controller.filelist.GetCheckedStrings() groups = [self.controller.file_groups[cn] for cn in selected_groups] for gname in groups: grp = self.controller.get_group(gname) if not hasattr(grp, 'norm'): self.parent.nb_panels[0].process(grp) groups = ', '.join(groups) opts = dict(groups=groups, arr='norm', xmin=form['xmin'], xmax=form['xmax']) cmd = "pca_result = pca_train([{groups}], arrayname='{arr}', xmin={xmin:.2f}, xmax={xmax:.2f})" self.larch_eval(cmd.format(**opts)) r = self.result = self.larch_get('pca_result') ncomps = len(r.components) wmin = form['weight_min'] nsig = len(np.where(r.variances > wmin)[0]) status = " PCA model built, %d components, %d with weight > %.3f" self.wids['status'].SetLabel(status % (ncomps, nsig, wmin)) self.wids['max_components'].SetValue(min(ncomps, 1+nsig)) for b in ('fit_group',): # , 'save_model'): self.wids[b].Enable() self.wids['stats'].DeleteAllItems() for i, val in enumerate(r.variances): sig = {True: 'Yes', False: 'No'}[val > wmin] self.wids['stats'].AppendItem((' #%d' % (i+1), gformat(val), sig)) self.plot_pca_components() self.plot_pca_weights()
def onSaveAllStats(self, evt=None): "Save All Statistics and Weights " deffile = "LinearFitStats.csv" wcards = 'CVS Files (*.csv)|*.csv|All files (*.*)|*.*' path = FileSave(self, 'Save Statistics Report', default_file=deffile, wildcard=wcards) if path is None: return form = self.form out = [ '# Larch Linear Fit Statistics Report (best results) %s' % time.ctime(), '# Array name: %s' % form['arrayname'], '# Energy fit range: [%f, %f]' % (form['elo'], form['ehi']) ] label = [('Data Set' + ' ' * 25)[:25], 'n_varys', 'chi-square', 'chi-square_red', 'akaike_info', 'bayesian_info'] label.extend(form['comp_names']) label.append('Total') for i in range(len(label)): if len(label[i]) < 12: label[i] = (" %s " % label[i])[:12] label = ', '.join(label) out.append('# %s' % label) for name, dgroup in self.datasets.items(): res = dgroup.lcf_result[0] label = dgroup.filename if len(label) < 25: label = (label + ' ' * 25)[:25] dat = [label] for attr in ('nvarys', 'chisqr', 'redchi', 'aic', 'bic'): dat.append(gformat(getattr(res.result, attr), 12)) for cname in form['comp_names'] + ['total']: val = 0 if cname in res.params: val = res.params[cname].value dat.append(gformat(val, 12)) out.append(', '.join(dat)) out.append('') with open(path, 'w') as fh: fh.write('\n'.join(out))
def report(self): "return text report of parameters" (deg, s02, e0, ei, delr, ss2, c3, c4) = self.__path_params() geomlabel = ' atom x y z ipot' geomformat = ' %4s % .4f, % .4f, % .4f %i' out = [' Path %s, Feff.dat file = %s' % (self.label, self.filename)] out.append(geomlabel) for atsym, iz, ipot, amass, x, y, z in self.geom: s = geomformat % (atsym, x, y, z, ipot) if ipot == 0: s = "%s (absorber)" % s out.append(s) stderrs = {} out.append(' {:7s}= {:s}'.format('reff', gformat(self._feffdat.reff))) for pname in ('degen', 's02', 'e0', 'r', 'deltar', 'sigma2', 'third', 'fourth', 'ei'): val = strval = getattr(self, pname, 0) parname = fix_varname(PATHPAR_FMT % (pname, self.label)) std = None if pname == 'r': parname = fix_varname(PATHPAR_FMT % ('deltar', self.label)) par = self.params.get(parname, None) val = par.value + self._feffdat.reff strval = 'reff + ' + getattr(self, 'deltar', 0) std = par.stderr else: par = self.params.get(parname, None) if par is not None: val = par.value std = par.stderr if std is None or std <= 0: svalue = gformat(val) else: svalue = "{:s} +/-{:s}".format(gformat(val), gformat(std)) if pname == 's02': pname = 'n*s02' svalue = " {:7s}= {:s}".format(pname, svalue) if isinstance(strval, six.string_types): svalue = "{:s} '{:s}'".format(svalue, strval) if val == 0 and pname in ('third', 'fourth', 'ei'): continue out.append(svalue) return '\n'.join(out)
def onSaveAllStats(self, evt=None): "Save All Statistics and Weights " deffile = "LinearFitStats.csv" wcards = 'CVS Files (*.csv)|*.csv|All files (*.*)|*.*' path = FileSave(self, 'Save Statistics Report', default_file=deffile, wildcard=wcards) if path is None: return form = self.form out = ['# Larch Linear Fit Statistics Report (best results) %s' % time.ctime(), '# Array name: %s' % form['arrayname'], '# Energy fit range: [%f, %f]' % (form['elo'], form['ehi'])] label = [('Data Set' + ' '*25)[:25], 'n_varys', 'chi-square', 'chi-square_red', 'akaike_info', 'bayesian_info'] label.extend(form['comp_names']) label.append('Total') for i in range(len(label)): if len(label[i]) < 12: label[i] = (" %s " % label[i])[:12] label = ', '.join(label) out.append('# %s' % label) for name, dgroup in self.datasets.items(): res = dgroup.lcf_result[0] label = dgroup.filename if len(label) < 25: label = (label + ' '*25)[:25] dat = [label] for attr in ('nvarys', 'chisqr', 'redchi', 'aic', 'bic'): dat.append(gformat(getattr(res.result, attr), 12)) for cname in form['comp_names'] + ['total']: val = 0 if cname in res.params: val = res.params[cname].value dat.append(gformat(val, 12)) out.append(', '.join(dat)) out.append('') with open(path, 'w') as fh: fh.write('\n'.join(out))
def onFitGroup(self, event=None): form = self.read_form() if self.result is None: print("need result first!") ncomps = int(form['max_components']) gname = form['groupname'] cmd = "pca_fit(%s, pca_result, ncomps=%d)" % (gname, ncomps) self.larch_eval(cmd) dgroup = self.controller.get_group() pca_chisquare = dgroup.pca_result.chi_square self.wids['fit_chi2'].SetLabel(gformat(pca_chisquare)) self.plot_pca_fit()
def onSaveCSV(self, event=None): dlg = wx.FileDialog(self, message="Save CSV Data File", defaultDir=os.getcwd(), defaultFile=self.save_csvfile, wildcard=FILE_WILDCARDS, style=wx.FD_SAVE) fname = None if dlg.ShowModal() == wx.ID_OK: fname = dlg.GetPath() dlg.Destroy() if fname is None: return self.save_csvfile = os.path.split(fname)[1] buff = [] for row in self.wids['table'].table.data: buff.append("%s, %s, %s" % (row[0], gformat(row[1]), gformat(row[2]))) buff.append('') with open(fname, 'w') as fh: fh.write('\n'.join(buff)) self.write_message('Wrote CSV File %s ' % fname)
def onFitGroup(self, event=None): form = self.read_form() if self.result is None: print("need result first!") ncomps = int(form['max_components']) gname = form['groupname'] cmd = "pca_fit(%s, pca_result, ncomps=%d)" % (gname, ncomps) self.larch_eval(cmd) dgroup = self.controller.get_group() pca_chisquare = dgroup.pca_result.chi_square self.wids['fit_chi2'].SetLabel(gformat(dgroup.pca_result.chi_square)) self.wids['fit_dscale'].SetLabel(gformat(dgroup.pca_result.data_scale)) grid_data = self.wids['table'].table.data for g in grid_data: g[3] = '-' for i, wt in enumerate(dgroup.pca_result.weights): grid_data[i][3] = gformat(wt) self.wids['table'].table.data = grid_data self.wids['table'].table.View.Refresh() self.plot_pca_fit()
def onSaveCSV(self, event=None): dlg = wx.FileDialog(self, message="Save CSV Data File", defaultDir=os.getcwd(), defaultFile=self.save_csvfile, wildcard=FILE_WILDCARDS, style=wx.FD_SAVE) fname = None if dlg.ShowModal() == wx.ID_OK: fname = dlg.GetPath() dlg.Destroy() if fname is None: return self.save_csvfile = os.path.split(fname)[1] buff = [] for row in self.wids['table'].table.data: buff.append("%s, %s, %s" % (row[0], gformat(row[1]), gformat(row[2]))) buff.append('') with open(fname, 'w') as fh: fh.write('\n'.join(buff)) self.write_message('Wrote CSV File %s ' % fname)
def onFitGroup(self, event=None): form = self.read_form() if self.result is None: print("need result first!") ncomps = int(form['max_components']) gname = form['groupname'] cmd = "pca_fit(%s, pca_result, ncomps=%d)" % (gname, ncomps) self.larch_eval(cmd) dgroup = self.controller.get_group() pca_chisquare = dgroup.pca_result.chi_square self.wids['fit_chi2'].SetLabel(gformat(dgroup.pca_result.chi_square)) self.wids['fit_dscale'].SetLabel(gformat(dgroup.pca_result.data_scale)) grid_data = self.wids['table'].table.data for g in grid_data: g[3] = '-' for i, wt in enumerate(dgroup.pca_result.weights): grid_data[i][3] = gformat(wt) self.wids['table'].table.data = grid_data self.wids['table'].table.View.Refresh() self.plot_pca_fit()
def show_results(self): cur = self.get_fitresult() wids = self.wids wids['stats'].DeleteAllItems() for i, res in enumerate(self.peakfit_history): args = ['%2.2d' % (i+1)] for attr in ('ndata', 'nvarys', 'nfev', 'chisqr', 'redchi', 'aic', 'bic'): val = getattr(res.result, attr) if isinstance(val, int): val = '%d' % val else: val = gformat(val, 11) args.append(val) wids['stats'].AppendItem(tuple(args)) wids['data_title'].SetLabel(self.datagroup.filename) self.show_fitresult(nfit=0)
def show_results(self): cur = self.get_fitresult() wids = self.wids wids['stats'].DeleteAllItems() for i, res in enumerate(self.peakfit_history): args = ['%2.2d' % (i+1)] for attr in ('ndata', 'nvarys', 'nfev', 'chisqr', 'redchi', 'aic', 'bic'): val = getattr(res.result, attr) if isinstance(val, int): val = '%d' % val else: val = gformat(val, 11) args.append(val) wids['stats'].AppendItem(tuple(args)) wids['data_title'].SetLabel(self.datagroup.filename) self.show_fitresult(nfit=0)
def export(self, filename): """save result to text file""" buff = [ '# XRF Fit %s: %s' % (self.mca.label, self.label), '#### Fit Script:' ] for a in self.script.split('\n'): buff.append('# %s' % a) buff.append('#' * 60) buff.append('#### Fit Report:') for a in self.fit_report.split('\n'): buff.append('# %s' % a) buff.append('#' * 60) labels = [ 'energy', 'counts', 'best_fit', 'best_energy', 'fit_window', 'fit_weight', 'attenuation' ] labels.extend(list(self.comps.keys())) buff.append('# %s' % (' '.join(labels))) npts = len(self.mca.energy) for i in range(npts): dline = [ gformat(self.mca.energy[i]), gformat(self.mca.counts[i]), gformat(self.best_fit[i]), gformat(self.best_en[i]), gformat(self.fit_window[i]), gformat(self.fit_weight[i]), gformat(self.atten[i]) ] for c in self.comps.values(): dline.append(gformat(c[i])) buff.append(' '.join(dline)) buff.append('\n') with open(filename, 'w') as fh: fh.write('\n'.join(buff))
def export_modelresult(result, filename='fitresult.xdi', datafile=None, ydata=None, yerr=None, _larch=None, **kwargs): """ export an lmfit ModelResult to an XDI data file Arguments --------- result ModelResult, required filename name of output file ['fitresult.xdi'] datafile name of data file [`None`] ydata data array used for fit [`None`] yerr data error array used for fit [`None`] Notes ----- keyword arguments should include independent variables Example ------- result = model.fit(ydata, params, x=x) export_modelresult(result, 'fitresult_1.xdi', x=x, datafile='XYData.txt') """ if not isinstance(result, ModelResult): raise ValueError("export_fit needs a lmfit ModelReult") header = ["XDI/1.1 Lmfit Result File"] hadd = header.append if datafile is not None: hadd(" Datafile.name: %s " % datafile) else: hadd(" Datafile.name: <unknnown>") ndata = len(result.best_fit) columns = OrderedDict() for aname in result.model.independent_vars: val = kwargs.get(aname, None) if val is not None and len(val) == ndata: columns[aname] = val if ydata is not None: columns['ydata'] = ydata if yerr is not None: columns['yerr'] = yerr columns['best_fit'] = result.best_fit columns['init_fit'] = result.init_fit delta_fit = result.eval_uncertainty(result.params, **kwargs) columns['delta_fit'] = delta_fit if len(result.model.components) > 1: comps = result.eval_components(result.params, **kwargs) for name, val in comps.items(): columns[name] = val clabel = [] for i, cname in enumerate(columns): hadd(" Column.%i: %s" % (i+1, cname)) clabel.append('%15s ' % cname) hadd("Fit.Statistics: Start here") hadd(" Fit.model_name: %s" % result.model.name) hadd(" Fit.method: %s" % result.method) hadd(" Fit.n_function_evals: %s" % getfloat_attr(result, 'nfev')) hadd(" Fit.n_data_points: %s" % getfloat_attr(result, 'ndata')) hadd(" Fit.n_variables: %s" % getfloat_attr(result, 'nvarys')) hadd(" Fit.chi_square: %s" % getfloat_attr(result, 'chisqr', length=11)) hadd(" Fit.reduced_chi_square: %s" % getfloat_attr(result, 'redchi', length=11)) hadd(" Fit.akaike_info_crit: %s" % getfloat_attr(result, 'aic', length=11)) hadd(" Fit.bayesian_info_crit: %s" % getfloat_attr(result, 'bic', length=11)) hadd("Param.Statistics: Start here") namelen = max([len(p) for p in result.params]) for name, par in result.params.items(): space = ' '*(namelen+1-len(name)) nout = "Param.%s:%s" % (name, space) inval = '(init= ?)' if par.init_value is not None: inval = '(init=% .7g)' % par.init_value try: sval = gformat(par.value) except (TypeError, ValueError): sval = 'Non Numeric Value?' if par.stderr is not None: serr = gformat(par.stderr, length=9) sval = '%s +/-%s' % (sval, serr) if par.vary: bounds = "[%s: %s]" % (gformat(par.min), gformat(par.max)) hadd(" %s %s %s %s" % (nout, sval, bounds, inval)) elif par.expr is not None: hadd(" %s %s == '%s'" % (nout, sval, par.expr)) else: hadd(" %s % .7g (fixed)" % (nout, par.value)) hadd("//////// Fit Report ////////") for r in result.fit_report().split('\n'): hadd(" %s" % r) hadd("-" * 77) hadd("".join(clabel)[1:]) header[0] = "XDI/1.1 Lmfit Result File %i header lines" % (len(header)) dtable = [] for key, dat in columns.items(): dtable.append(dat) dtable = np.array(dtable).transpose() datatable = [] for i in range(ndata): col = dtable[i, :]*1.0 row = [] for cval in col: try: val = gformat(cval, length=15) except: val = repr(cval) row.append(val) datatable.append(" ".join(row)) datatable.append('') with open(filename, 'w') as fh: fh.write("\n".join(['#%s' % s for s in header])) fh.write("\n") fh.write("\n".join(datatable))
def test_gformat(test_input, expected): """Test gformat function.""" output = gformat(test_input) assert output == expected
def feffit_report(result, min_correl=0.1, with_paths=True, _larch=None): """return a printable report of fit for feffit Parameters: ------------ result: Feffit result, output group from feffit() min_correl: minimum correlation to report [0.1] wit_paths: boolean (True/False) for whether to list all paths [True] Returns: --------- printable string of report. """ input_ok = False try: params = result.params datasets = result.datasets input_ok = True except: pass if not input_ok: print( 'must pass output of feffit()!') return topline = '=================== FEFFIT RESULTS ====================' header = '[[%s]]' varformat = ' %12s = %s +/-%s (init= %s)' fixformat = ' %12s = %s (fixed)' exprformat = ' %12s = %s +/-%s = \'%s\'' out = [topline, header % 'Statistics'] out.append(' nvarys, npts = %i, %i' % (result.nvarys, result.ndata)) out.append(' n_independent = %.3f' % (result.n_independent)) out.append(' chi_square = %s' % gformat(result.chi_square)) out.append(' reduced chi_square = %s' % gformat(result.chi_reduced)) out.append(' r-factor = %s' % gformat(result.rfactor)) out.append(' Akaike info crit = %s' % gformat(result.aic)) out.append(' Bayesian info crit = %s' % gformat(result.bic)) out.append(' ') if len(datasets) == 1: out.append(header % 'Data') else: out.append(header % 'Datasets (%i)' % len(datasets)) for i, ds in enumerate(datasets): tr = ds.transform if len(datasets) > 1: out.append(' dataset %i:' % (i+1)) if isinstance(tr.kweight, Iterable): if isinstance(ds.epsilon_k[0], np.ndarray): msg = [] for eps in ds.epsilon_k: msg.append('Array(mean=%s, std=%s)' % (gformat(eps.mean()).strip(), gformat(eps.std()).strip())) eps_k = ', '.join(msg) else: eps_k = ', '.join([gformat(eps).strip() for eps in ds.epsilon_k]) eps_r = ', '.join([gformat(eps).strip() for eps in ds.epsilon_r]) kweigh = ', '.join(['%i' % kwe for kwe in tr.kweight]) else: if isinstance(ds.epsilon_k, np.ndarray): eps_k = 'Array(mean=%s, std=%s)' % (gformat(ds.epsilon_k.mean()).strip(), gformat(ds.epsilon_k.std()).strip()) else: eps_k = gformat(ds.epsilon_k) eps_r = gformat(ds.epsilon_r).strip() kweigh = '%i' % tr.kweight out.append(' fit space = \'%s\'' % (tr.fitspace)) out.append(' r-range = %.3f, %.3f' % (tr.rmin, tr.rmax)) out.append(' k-range = %.3f, %.3f' % (tr.kmin, tr.kmax)) kwin = ' k window, dk = \'%s\', %.3f' % (tr.window, tr.dk) if tr.dk2 is not None: kwin = "%s, %.3f" % (kwin, tr.dk2) out.append(kwin) pathfiles = [p.filename for p in ds.pathlist] out.append(' paths used in fit = %s' % (repr(pathfiles))) out.append(' k-weight = %s' % kweigh) out.append(' epsilon_k = %s' % eps_k) out.append(' epsilon_r = %s' % eps_r) out.append(' n_independent = %.3f' % (ds.n_idp)) # out.append(' ') out.append(header % 'Variables') # exprs = [] for name, par in params.items(): # var = getattr(params, name) # print(name, par, dir(par)) if len(name) < 14: name = (name + ' '*14)[:14] if isParameter(par): if par.vary: stderr = 'unknown' if par.stderr is not None: stderr = gformat(par.stderr) out.append(varformat % (name, gformat(par.value), stderr, gformat(par.init_value))) elif par.expr is not None: stderr = 'unknown' if par.stderr is not None: stderr = gformat(par.stderr) out.append(exprformat % (name, gformat(par.value), stderr, par.expr)) else: out.append(fixformat % (name, gformaat(par.value))) # if len(exprs) > 0: # out.append(header % 'Constraint Expressions') # out.extend(exprs) covar_vars = result.var_names if len(covar_vars) > 0: out.append(' ') out.append(header % 'Correlations' + ' (unreported correlations are < % .3f)' % min_correl) correls = {} for i, name in enumerate(covar_vars): par = params[name] if not par.vary: continue if hasattr(par, 'correl') and par.correl is not None: for name2 in covar_vars[i+1:]: if name != name2 and name2 in par.correl: correls["%s, %s" % (name, name2)] = par.correl[name2] sort_correl = sorted(correls.items(), key=lambda it: abs(it[1])) sort_correl.reverse() for name, val in sort_correl: if abs(val) < min_correl: break if len(name) < 20: name = (name + ' '*20)[:20] out.append(' %s = % .3f' % (name, val)) if with_paths: out.append(' ') out.append(header % 'Paths') for ids, ds in enumerate(datasets): if len(datasets) > 1: out.append(' dataset %i:' % (ids+1)) for p in ds.pathlist: out.append('%s\n' % p.report()) out.append('='*len(topline)) return '\n'.join(out)
def feffit_report(result, min_correl=0.1, with_paths=True, _larch=None): """return a printable report of fit for feffit Parameters: ------------ result: Feffit result, output group from feffit() min_correl: minimum correlation to report [0.1] wit_paths: boolean (True/False) for whether to list all paths [True] Returns: --------- printable string of report. """ input_ok = False try: params = result.params datasets = result.datasets input_ok = True except: pass if not input_ok: print( 'must pass output of feffit()!') return topline = '=================== FEFFIT RESULTS ====================' header = '[[%s]]' varformat = ' %12s = %s +/-%s (init= %s)' fixformat = ' %12s = %s (fixed)' exprformat = ' %12s = %s +/-%s = \'%s\'' out = [topline, header % 'Statistics'] out.append(' nvarys, npts = %i, %i' % (result.nvarys, result.ndata)) out.append(' n_independent = %.3f' % (result.n_independent)) out.append(' chi_square = %s' % gformat(result.chi_square)) out.append(' reduced chi_square = %s' % gformat(result.chi_reduced)) out.append(' r-factor = %s' % gformat(result.rfactor)) out.append(' Akaike info crit = %s' % gformat(result.aic)) out.append(' Bayesian info crit = %s' % gformat(result.bic)) out.append(' ') if len(datasets) == 1: out.append(header % 'Data') else: out.append(header % 'Datasets (%i)' % len(datasets)) for i, ds in enumerate(datasets): tr = ds.transform if len(datasets) > 1: out.append(' dataset %i:' % (i+1)) if isinstance(tr.kweight, Iterable): if isinstance(ds.epsilon_k[0], np.ndarray): msg = [] for eps in ds.epsilon_k: msg.append('Array(mean=%s, std=%s)' % (gformat(eps.mean()).strip(), gformat(eps.std()).strip())) eps_k = ', '.join(msg) else: eps_k = ', '.join([gformat(eps).strip() for eps in ds.epsilon_k]) eps_r = ', '.join([gformat(eps).strip() for eps in ds.epsilon_r]) kweigh = ', '.join(['%i' % kwe for kwe in tr.kweight]) else: if isinstance(ds.epsilon_k, np.ndarray): eps_k = 'Array(mean=%s, std=%s)' % (gformat(ds.epsilon_k.mean()).strip(), gformat(ds.epsilon_k.std()).strip()) else: eps_k = gformat(ds.epsilon_k) eps_r = gformat(ds.epsilon_r).strip() kweigh = '%i' % tr.kweight out.append(' fit space = \'%s\'' % (tr.fitspace)) out.append(' r-range = %.3f, %.3f' % (tr.rmin, tr.rmax)) out.append(' k-range = %.3f, %.3f' % (tr.kmin, tr.kmax)) kwin = ' k window, dk = \'%s\', %.3f' % (tr.window, tr.dk) if tr.dk2 is not None: kwin = "%s, %.3f" % (kwin, tr.dk2) out.append(kwin) pathfiles = [p.filename for p in ds.pathlist] out.append(' paths used in fit = %s' % (repr(pathfiles))) out.append(' k-weight = %s' % kweigh) out.append(' epsilon_k = %s' % eps_k) out.append(' epsilon_r = %s' % eps_r) out.append(' n_independent = %.3f' % (ds.n_idp)) # out.append(' ') out.append(header % 'Variables') # exprs = [] for name, par in params.items(): # var = getattr(params, name) # print(name, par, dir(par)) if len(name) < 14: name = (name + ' '*14)[:14] if isParameter(par): if par.vary: stderr = 'unknown' if par.stderr is not None: stderr = gformat(par.stderr) out.append(varformat % (name, gformat(par.value), stderr, gformat(par.init_value))) elif par.expr is not None: stderr = 'unknown' if par.stderr is not None: stderr = gformat(par.stderr) out.append(exprformat % (name, gformat(par.value), stderr, par.expr)) else: out.append(fixformat % (name, gformat(par.value))) # if len(exprs) > 0: # out.append(header % 'Constraint Expressions') # out.extend(exprs) covar_vars = result.var_names if len(covar_vars) > 0: out.append(' ') out.append(header % 'Correlations' + ' (unreported correlations are < % .3f)' % min_correl) correls = {} for i, name in enumerate(covar_vars): par = params[name] if not par.vary: continue if hasattr(par, 'correl') and par.correl is not None: for name2 in covar_vars[i+1:]: if name != name2 and name2 in par.correl: correls["%s, %s" % (name, name2)] = par.correl[name2] sort_correl = sorted(correls.items(), key=lambda it: abs(it[1])) sort_correl.reverse() for name, val in sort_correl: if abs(val) < min_correl: break if len(name) < 20: name = (name + ' '*20)[:20] out.append(' %s = % .3f' % (name, val)) if with_paths: out.append(' ') out.append(header % 'Paths') for ids, ds in enumerate(datasets): if len(datasets) > 1: out.append(' dataset %i:' % (ids+1)) for p in ds.pathlist: out.append('%s\n' % p.report()) out.append('='*len(topline)) return '\n'.join(out)
def upload2xdi(opts, upload_folder): """upload result of upload form to XDI file""" fname = opts['filename'] filename = fname.replace('.', '_') filename = secure_filename('%s_%s.xdi' % (random_string(4), filename)) filename = path.abspath(pathjoin(upload_folder, filename)) buff = ['#XDI/1.0 XASDataLibrary/1.0'] arrays = opts['data'] dcolumns = [('energy', arrays['energy'])] if arrays['i0'] is not None: dcolumns.append(('i0', arrays['i0'])) if opts['mode'] == 'transmission': if arrays['itrans'] is not None: dcolumns.append(('itrans', arrays['itrans'])) elif arrays['mu'] is not None: dcolumns.append(('mutrans', arrays['mu'])) else: if arrays['ifluor'] is not None: dcolumns.append(('ifluor', arrays['ifluor'])) elif arrays['mu'] is not None: dcolumns.append(('mufluor', arrays['mu'])) if opts['has_reference'] and arrays.get('irefer', None) is not None: dcolumns.append(('irefer', arrays['irefer'])) icol = 0 array_labels = [] for name, darray in dcolumns: array_labels.append((" %s " % name)[:13]) if name == 'energy': name = 'energy eV' icol += 1 buff.append('# Column.%d: %s' % (icol, name)) buff.append('# Mono.d_spacing: %.6f' % float(opts['d_spacing'])) buff.append('# Mono.name: %s' % opts['mono_name']) for tag, attr in (('Beamline.name', 'beamline'), ('Element.symbol', 'elem_sym'), ('Element.edge', 'edge'), ('Scan.start_time', 'collection_date'), ('Data.upload_date', 'upload_date'), ('Data.submitted_by', 'person_name'), ('Sample.name', 'sample_name'), ('Sample.formula', 'sample_formula'), ('Sample.preparation', 'sample_prep'), ('Sample.notes', 'sample_notes')): attr = opts.get(attr, '') if len(attr) > 0: buff.append('# %s: %s' % (tag, attr)) if opts['has_reference'] and len(opts.get('ref_mode', '')) > 0: buff.append('# Reference.mode: %s' % opts['ref_mode']) buff.append('# ///') comments = opts['comments'].split('\n') for c in comments: buff.append('# %s' % c) buff.append('#-----------------------') buff.append('# %s' % (' '.join(array_labels))) for i in range(opts['npts']): row = [] for name, darray in dcolumns: row.append(gformat(darray[i], 15)) buff.append(' '.join(row)) buff.append('') with open(filename, 'w') as fh: fh.write('\n'.join(buff)) return filename
def export_modelresult(result, filename='fitresult.xdi', datafile=None, ydata=None, yerr=None, _larch=None, **kwargs): """ export an lmfit ModelResult to an XDI data file Arguments --------- result ModelResult, required filename name of output file ['fitresult.xdi'] datafile name of data file [`None`] ydata data array used for fit [`None`] yerr data error array used for fit [`None`] Notes ----- keyword arguments should include independent variables Example ------- result = model.fit(ydata, params, x=x) export_modelresult(result, 'fitresult_1.xdi', x=x, datafile='XYData.txt') """ if not isinstance(result, ModelResult): raise ValueError("export_fit needs a lmfit ModelReult") header = ["XDI/1.1 Lmfit Result File"] hadd = header.append if datafile is not None: hadd(" Datafile.name: %s " % datafile) else: hadd(" Datafile.name: <unknnown>") ndata = len(result.best_fit) columns = OrderedDict() for aname in result.model.independent_vars: val = kwargs.get(aname, None) if val is not None and len(val) == ndata: columns[aname] = val if ydata is not None: columns['ydata'] = ydata if yerr is not None: columns['yerr'] = yerr columns['best_fit'] = result.best_fit columns['init_fit'] = result.init_fit delta_fit = 0.0*result.best_fit if not any([p.stderr is None for p in result.params.values()]): delta_fit = result.eval_uncertainty(result.params, **kwargs) columns['delta_fit'] = delta_fit if len(result.model.components) > 1: comps = result.eval_components(result.params, **kwargs) for name, val in comps.items(): columns[name] = val clabel = [] for i, cname in enumerate(columns): hadd(" Column.%i: %s" % (i+1, cname)) clabel.append('%15s ' % cname) hadd("Fit.Statistics: Start here") hadd(" Fit.model_name: %s" % result.model.name) hadd(" Fit.method: %s" % result.method) hadd(" Fit.n_function_evals: %s" % getfloat_attr(result, 'nfev')) hadd(" Fit.n_data_points: %s" % getfloat_attr(result, 'ndata')) hadd(" Fit.n_variables: %s" % getfloat_attr(result, 'nvarys')) hadd(" Fit.chi_square: %s" % getfloat_attr(result, 'chisqr', length=11)) hadd(" Fit.reduced_chi_square: %s" % getfloat_attr(result, 'redchi', length=11)) hadd(" Fit.akaike_info_crit: %s" % getfloat_attr(result, 'aic', length=11)) hadd(" Fit.bayesian_info_crit: %s" % getfloat_attr(result, 'bic', length=11)) hadd("Param.Statistics: Start here") namelen = max([len(p) for p in result.params]) for name, par in result.params.items(): space = ' '*(namelen+1-len(name)) nout = "Param.%s:%s" % (name, space) inval = '(init= ?)' if par.init_value is not None: inval = '(init=% .7g)' % par.init_value try: sval = gformat(par.value) except (TypeError, ValueError): sval = 'Non Numeric Value?' if par.stderr is not None: serr = gformat(par.stderr, length=9) sval = '%s +/-%s' % (sval, serr) if par.vary: bounds = "[%s: %s]" % (gformat(par.min), gformat(par.max)) hadd(" %s %s %s %s" % (nout, sval, bounds, inval)) elif par.expr is not None: hadd(" %s %s == '%s'" % (nout, sval, par.expr)) else: hadd(" %s % .7g (fixed)" % (nout, par.value)) hadd("//////// Fit Report ////////") for r in result.fit_report().split('\n'): hadd(" %s" % r) hadd("-" * 77) hadd("".join(clabel)[1:]) header[0] = "XDI/1.1 Lmfit Result File %i header lines" % (len(header)) dtable = [] for key, dat in columns.items(): dtable.append(dat) dtable = np.array(dtable).transpose() datatable = [] for i in range(ndata): col = dtable[i, :]*1.0 row = [] for cval in col: try: val = gformat(cval, length=15) except: val = repr(cval) row.append(val) datatable.append(" ".join(row)) datatable.append('') with open(filename, 'w') as fh: fh.write("\n".join(['#%s' % s for s in header])) fh.write("\n") fh.write("\n".join(datatable))
def process(self, dgroup=None, force_mback=False, noskip=False, **kws): """ handle process (pre-edge/normalize) of XAS data from XAS form """ if self.skip_process and not noskip: return if dgroup is None: dgroup = self.controller.get_group() if dgroup is None: return self.skip_process = True conf = self.get_config(dgroup) dgroup.custom_plotopts = {} form = self.read_form() form['group'] = dgroup.groupname if dgroup.datatype != 'xas': self.skip_process = False dgroup.mu = dgroup.ydat * 1.0 opts = {'group': dgroup.groupname, 'scale': conf.get('scale', 1.0)} self.larch_eval("{group:s}.scale = {scale:.8f}".format(**opts)) self.larch_eval( "{group:s}.norm = {scale:.8f}*{group:s}.ydat".format(**opts)) return en_units = getattr(dgroup, 'energy_units', None) if en_units is None: en_units = guess_energy_units(dgroup.energy) if en_units != 'eV': mono_dspace = getattr(dgroup, 'mono_dspace', 1) dlg = EnergyUnitsDialog(self.parent, dgroup.energy, unitname=en_units, dspace=mono_dspace) res = dlg.GetResponse() dlg.Destroy() if res.ok: en_units = res.units dgroup.mono_dspace = res.dspace dgroup.xdat = dgroup.energy = res.energy dgroup.energy_units = en_units e0 = form['e0'] edge_step = form['edge_step'] copts = [dgroup.groupname] if not form['auto_e0']: if e0 < max(dgroup.energy) and e0 > min(dgroup.energy): copts.append("e0=%.4f" % float(e0)) if not form['auto_step']: copts.append("step=%s" % gformat(float(edge_step))) for attr in ('pre1', 'pre2', 'nvict', 'nnorm', 'norm1', 'norm2'): if form[attr] is None: copts.append("%s=None" % attr) else: copts.append("%s=%.2f" % (attr, form[attr])) self.larch_eval("pre_edge(%s)" % (', '.join(copts))) self.larch_eval( "{group:s}.norm_poly = 1.0*{group:s}.norm".format(**form)) norm_method = form['norm_method'].lower() form['normmeth'] = 'poly' if force_mback or norm_method.startswith('mback'): form['normmeth'] = 'mback' copts = [dgroup.groupname] copts.append("z=%d" % atomic_number(form['atsym'])) copts.append("edge='%s'" % form['edge']) for attr in ('pre1', 'pre2', 'nvict', 'nnorm', 'norm1', 'norm2'): if form[attr] is None: copts.append("%s=None" % attr) else: copts.append("%s=%.2f" % (attr, form[attr])) self.larch_eval("mback_norm(%s)" % (', '.join(copts))) if form['auto_step']: norm_expr = """{group:s}.norm = 1.0*{group:s}.norm_{normmeth:s} {group:s}.edge_step = 1.0*{group:s}.edge_step_{normmeth:s}""" self.larch_eval(norm_expr.format(**form)) else: norm_expr = """{group:s}.norm = 1.0*{group:s}.norm_{normmeth:s} {group:s}.norm *= {group:s}.edge_step_{normmeth:s}/{edge_step:.8f}""" self.larch_eval(norm_expr.format(**form)) if norm_method.startswith('area'): form['normmeth'] = 'area' expr = """{group:s}.norm = 1.0*{group:s}.norm_{normmeth:s} {group:s}.edge_step = 1.0*{group:s}.edge_step_{normmeth:s}""" self.larch_eval(expr.format(**form)) self.make_dnormde(dgroup) if form['auto_e0']: self.wids['e0'].SetValue(dgroup.e0) if form['auto_step']: self.wids['step'].SetValue(dgroup.edge_step) autoset_fs_increment(self.wids['step'], dgroup.edge_step) self.wids['atsym'].SetStringSelection(dgroup.atsym) self.wids['edge'].SetStringSelection(dgroup.edge) self.set_nnorm_widget(dgroup.pre_edge_details.nnorm) for attr in ('e0', 'edge_step'): conf[attr] = getattr(dgroup, attr) for attr in ('pre1', 'pre2', 'norm1', 'norm2'): conf[attr] = val = getattr(dgroup.pre_edge_details, attr, None) if val is not None: self.wids[attr].SetValue(val) if hasattr(dgroup, 'mback_params'): # from mback conf['atsym'] = getattr(dgroup.mback_params, 'atsym') conf['edge'] = getattr(dgroup.mback_params, 'edge') self.update_config(conf, dgroup=dgroup) wx.CallAfter(self.unset_skip_process)
def show_results(self, datagroup=None, form=None, larch_eval=None): if datagroup is not None: self.datagroup = datagroup if form is not None: self.form = form if larch_eval is not None: self.larch_eval = larch_eval lcf_history = getattr(self.datagroup, 'lcf_history', []) wids = self.wids wids['data_title'].SetLabel(self.datagroup.filename) wids['show_e0'].SetValue(form['show_e0']) wids['show_fitrange'].SetValue(form['show_fitrange']) wids['stats'].DeleteAllItems() results = self.datagroup.lcf_result[:20] self.nresults = len(results) wids['nfits_title'].SetLabel('showing %i best results' % self.nresults) for i, res in enumerate(results): args = ['%2.2d' % (i+1)] for attr in ('nvarys', 'nfev', 'chisqr', 'redchi', 'aic', 'bic'): val = getattr(res.result, attr) if isinstance(val, int): val = '%d' % val else: val = gformat(val, 11) args.append(val) wids['stats'].AppendItem(tuple(args)) wpan = self.wids['weightspanel'] wpan.DestroyChildren() wview = self.wids['weights'] = dv.DataViewListCtrl(wpan, style=DVSTYLE) wview.Bind(dv.EVT_DATAVIEW_SELECTION_CHANGED, self.onSelectFitParam) wview.AppendTextColumn(' Fit #', width=50) for i, cname in enumerate(form['comp_names']): wview.AppendTextColumn(cname, width=100) wview.AppendTextColumn('Total', width=100) for col in range(len(form['comp_names'])+2): this = wview.Columns[col] isort, align = True, wx.ALIGN_RIGHT if col == 0: align = wx.ALIGN_CENTER this.Sortable = isort this.Alignment = this.Renderer.Alignment = align for i, res in enumerate(results): args = ['%2.2d' % (i+1)] for cname in form['comp_names'] + ['total']: val = '--' if cname in res.params: val = "%.4f" % res.params[cname].value args.append(val) wview.AppendItem(tuple(args)) os = wx.BoxSizer(wx.VERTICAL) os.Add(wview, 1, wx.GROW|wx.ALL) pack(wpan, os) wview.SetMinSize((675, 200)) s1, s2 = self.GetSize() if s2 % 2 == 0: s2 = s2 + 1 else: s2 = s2 - 1 self.SetSize((s1, s2)) self.show_fitresult(0) self.Refresh()
def show_fitresult(self, datagroup=None, fit_number=None): if datagroup is not None: self.datagroup = datagroup fit_history = getattr(self.datagroup, 'fit_history', []) if len(fit_history) < 1: print("No fit reults to show for datagroup ", self.datagroup) if fit_number is None: fit_number = len(fit_history) result = fit_history[fit_number-1] wids = self.wids wids['method'].SetLabel(result.method) wids['ndata'].SetLabel("%d" % result.ndata) wids['nvarys'].SetLabel("%d" % result.nvarys) wids['nfree'].SetLabel("%d" % result.nfree) wids['nfev'].SetLabel("%d" % result.nfev) if abs(result.redchi) < 1.e-3: wids['redchi'].SetLabel("%.5g" % result.redchi) else: wids['redchi'].SetLabel("%f" % result.redchi) if abs(result.chisqr) < 1.e-3: wids['chisqr'].SetLabel("%.5g" % result.chisqr) else: wids['chisqr'].SetLabel("%f" % result.chisqr) wids['aic'].SetLabel("%f" % result.aic) wids['bic'].SetLabel("%f" % result.bic) wids['hist_info'].SetLabel("Fit #%d of %d" % (fit_number, len(fit_history))) wids['data_title'].SetLabel(self.datagroup.filename) desc = result.model_repr parts = [] tlen = 70 while len(desc) >= tlen: i = desc[tlen-1:].find('+') parts.append(desc[:tlen+i]) desc = desc[tlen+i:] parts.append(desc) wids['model_desc'].SetLabel('\n'.join(parts)) wids['params'].DeleteAllItems() wids['paramsdata'] = [] for i, param in enumerate(result.params.values()): pname = param.name try: val = gformat(param.value) except (TypeError, ValueError): val = ' ??? ' serr = ' N/A ' if param.stderr is not None: serr = gformat(param.stderr, length=9) extra = ' ' if param.expr is not None: extra = ' = %s ' % param.expr elif param.init_value is not None: extra = ' (init=% .7g)' % param.init_value elif not param.vary: extra = ' (fixed)' wids['params'].AppendItem((pname, val, serr, extra)) wids['paramsdata'].append(pname) self.Refresh()
def show_results(self, datagroup=None, form=None, larch_eval=None): if datagroup is not None: self.datagroup = datagroup if form is not None: self.form = form if larch_eval is not None: self.larch_eval = larch_eval form = self.form datagroup = self.datagroup lcf_history = getattr(self.datagroup, 'lcf_history', []) wids = self.wids wids['data_title'].SetLabel(self.datagroup.filename) # wids['show_e0'].SetValue(form['show_e0']) wids['show_fitrange'].SetValue(form['show_fitrange']) wids['stats'].DeleteAllItems() results = self.datagroup.lcf_result[:20] self.nresults = len(results) wids['nfits_title'].SetLabel('showing %i best results' % self.nresults) for i, res in enumerate(results): args = ['%2.2d' % (i + 1)] for attr in ('nvarys', 'nfev', 'chisqr', 'redchi', 'aic', 'bic'): val = getattr(res.result, attr) if isinstance(val, int): val = '%d' % val else: val = gformat(val, 12) args.append(val) wids['stats'].AppendItem(tuple(args)) wpan = self.wids['weightspanel'] wpan.DestroyChildren() wview = self.wids['weights'] = dv.DataViewListCtrl(wpan, style=DVSTYLE) wview.Bind(dv.EVT_DATAVIEW_SELECTION_CHANGED, self.onSelectFitParam) wview.AppendTextColumn(' Fit #', width=50) for i, cname in enumerate(form['comp_names']): wview.AppendTextColumn(cname, width=100) wview.AppendTextColumn('Total', width=100) for col in range(len(form['comp_names']) + 2): this = wview.Columns[col] isort, align = True, wx.ALIGN_RIGHT if col == 0: align = wx.ALIGN_CENTER this.Sortable = isort this.Alignment = this.Renderer.Alignment = align for i, res in enumerate(results): args = ['%2.2d' % (i + 1)] for cname in form['comp_names'] + ['total']: val = '--' if cname in res.params: val = "%.4f" % res.params[cname].value args.append(val) wview.AppendItem(tuple(args)) os = wx.BoxSizer(wx.VERTICAL) os.Add(wview, 1, wx.GROW | wx.ALL) pack(wpan, os) wview.SetMinSize((675, 200)) s1, s2 = self.GetSize() if s2 % 2 == 0: s2 = s2 + 1 else: s2 = s2 - 1 self.SetSize((s1, s2)) self.show_fitresult(0) self.Refresh()
def process(self, dgroup=None, force_mback=False, noskip=False, **kws): """ handle process (pre-edge/normalize) of XAS data from XAS form """ if self.skip_process and not noskip: return if dgroup is None: dgroup = self.controller.get_group() if dgroup is None: return self.skip_process = True __conf = self.get_config(dgroup) dgroup.custom_plotopts = {} if dgroup.datatype != 'xas': self.skip_process = False dgroup.mu = dgroup.ydat * 1.0 return en_units = getattr(dgroup, 'energy_units', None) if en_units is None: en_units = 'eV' units = guess_energy_units(dgroup.energy) if units != 'eV': dlg = EnergyUnitsDialog(self.parent, units, dgroup.energy) res = dlg.GetResponse() dlg.Destroy() if res.ok: en_units = res.units dgroup.xdat = dgroup.energy = res.energy dgroup.energy_units = en_units form = self.read_form() e0 = form['e0'] edge_step = form['edge_step'] form['group'] = dgroup.groupname copts = [dgroup.groupname] if not form['auto_e0']: if e0 < max(dgroup.energy) and e0 > min(dgroup.energy): copts.append("e0=%.4f" % float(e0)) if not form['auto_step']: copts.append("step=%s" % gformat(float(edge_step))) for attr in ('pre1', 'pre2', 'nvict', 'nnorm', 'norm1', 'norm2'): copts.append("%s=%.2f" % (attr, form[attr])) self.larch_eval("pre_edge(%s)" % (', '.join(copts))) self.larch_eval("{group:s}.norm_poly = 1.0*{group:s}.norm".format(**form)) norm_method = form['norm_method'].lower() form['normmeth'] = 'poly' if force_mback or norm_method.startswith('mback'): form['normmeth'] = 'mback' copts = [dgroup.groupname] copts.append("z=%d" % atomic_number(form['atsym'])) copts.append("edge='%s'" % form['edge']) for attr in ('pre1', 'pre2', 'nvict', 'nnorm', 'norm1', 'norm2'): copts.append("%s=%.2f" % (attr, form[attr])) self.larch_eval("mback_norm(%s)" % (', '.join(copts))) if form['auto_step']: norm_expr = """{group:s}.norm = 1.0*{group:s}.norm_{normmeth:s} {group:s}.edge_step = 1.0*{group:s}.edge_step_{normmeth:s}""" self.larch_eval(norm_expr.format(**form)) else: norm_expr = """{group:s}.norm = 1.0*{group:s}.norm_{normmeth:s} {group:s}.norm *= {group:s}.edge_step_{normmeth:s}/{edge_step:.8f}""" self.larch_eval(norm_expr.format(**form)) if norm_method.startswith('area'): form['normmeth'] = 'area' expr = """{group:s}.norm = 1.0*{group:s}.norm_{normmeth:s} {group:s}.edge_step = 1.0*{group:s}.edge_step_{normmeth:s}""" self.larch_eval(expr.format(**form)) self.make_dnormde(dgroup) if form['auto_e0']: self.wids['e0'].SetValue(dgroup.e0) if form['auto_step']: self.wids['step'].SetValue(dgroup.edge_step) autoset_fs_increment(self.wids['step'], dgroup.edge_step) self.wids['pre1'].SetValue(dgroup.pre_edge_details.pre1) self.wids['pre2'].SetValue(dgroup.pre_edge_details.pre2) self.wids['norm1'].SetValue(dgroup.pre_edge_details.norm1) self.wids['norm2'].SetValue(dgroup.pre_edge_details.norm2) self.wids['atsym'].SetStringSelection(dgroup.atsym) self.wids['edge'].SetStringSelection(dgroup.edge) conf = {} for attr in ('e0', 'edge_step'): conf[attr] = getattr(dgroup, attr) for attr in ('pre1', 'pre2', 'nnorm', 'norm1', 'norm2'): conf[attr] = getattr(dgroup.pre_edge_details, attr) if hasattr(dgroup, 'mback_params'): # from mback conf['atsym'] = getattr(dgroup.mback_params, 'atsym') conf['edge'] = getattr(dgroup.mback_params, 'edge') self.update_config(conf, dgroup=dgroup) wx.CallAfter(self.unset_skip_process)