def regression(self, fit, title): """ Apply least squares to the free parameters. """ print('Fitting PDF of', title, 'to expt. data.') fit.fithooks[0].verbose = 0 # We can now execute the fit using scipy's least square optimizer. # free parameters one-by-one and fit self.plot_fit(fit, title, num=0) print(title, '[1/3]') fit.free("scale") leastsq(fit.residual, fit.values) self.plot_fit(fit, title, num=1) print(title, '[2/3]') fit.free("delta2") leastsq(fit.residual, fit.values) self.plot_fit(fit, title, num=2) print(title, '[3/3]') fit.free("all") leastsq(fit.residual, fit.values) self.plot_fit(fit, title, num=3) print(title, 'Done!') ContributionResult = FitResults(fit) ContributionResult.saveResults(title + '.results') return
def main(): # Make the data and the recipe data = "../data/MEF_300-00000.gr" basename = "MEF_300K_LS" print basename # Make the recipe from diffpy.Structure import Structure stru1 = Structure(filename='../data/MEF.cif') stru2 = Structure(filename='../data/MEF.xyz') stru3 = Structure(filename='../data/MEF.xyz') recipe = makeRecipe(stru1, stru2, stru3, data) if _plot: from diffpy.srfit.fitbase.fithook import PlotFitHook recipe.pushFitHook(PlotFitHook()) recipe.fithooks[0].verbose = 3 from scipy.optimize import leastsq leastsq(recipe.residual, recipe.values) # Save structures stru1.write(basename + "_Cryst_B_zoomed.stru", "pdffit") stru2.write(basename + "_Mole_B_zoomed.xyz", "xyz") stru2.write(basename + "_Intra_zoomed.xyz", "xyz") profile = recipe.MEF.profile #import IPython.Shell; IPython.Shell.IPShellEmbed(argv=[])() profile.savetxt(basename + ".fit") # Generate and print the FitResults res = FitResults(recipe) res.printResults() header = "MEF Organic PDF fit.\n" res.saveResults(basename + ".res", header=header) # Plot! if _plot: plotResults(recipe)
def main(): # Make the data and the recipe data = "../data/ON_RT_5-00000.gr" basename = "ROY_Least_Squares" print basename # Make the recipe from diffpy.Structure import Structure stru1 = Structure(filename='../data/QAXMEH_ON.cif') stru2 = Structure(filename='../data/QAXMEH_ON.xyz' ) # this is non-distorted structure, keep it fixed. stru3 = Structure(filename='../data/QAXMEH_ON.xyz') recipe = makeRecipe(stru1, stru2, stru3, data) if _plot: from diffpy.srfit.fitbase.fithook import PlotFitHook recipe.pushFitHook(PlotFitHook()) recipe.fithooks[0].verbose = 3 from scipy.optimize import leastsq leastsq(recipe.residual, recipe.values) # Save structures stru1.write(basename + "_Cryst_B_zoomed.stru", "pdffit") stru2.write(basename + "_Mole_B_zoomed.xyz", "xyz") stru2.write(basename + "_Intra_zoomed.xyz", "xyz") profile = recipe.ROY.profile profile.savetxt(basename + ".fit") res = FitResults(recipe) res.printResults() header = "A+B-C.\n" res.saveResults(basename + ".res", header=header) # Plot! if _plot: plotResults(recipe, basename)
def save_res(recipe: FitRecipe, base_name: str, folder: str) -> Path: """Save the fitting results. Parameters ---------- recipe : FitRecipe The refined recipe. base_name : str The base name of the result file. The file name will be "{base_name}.res". folder : str The folder to save the fitting result file. Returns ------- res_file : Path The path to the fitting result file. """ res_file = Path(folder) / "{}.res".format(base_name) res = FitResults(recipe) res.saveResults(str(res_file)) return res_file
def main(): """ This will run by default when the file is executed using "python file.py" in the command line Parameters ---------- None Returns ---------- None """ # Make some folders to store our output files. resdir = Path("res") fitdir = Path("fit") figdir = Path("fig") folders = [resdir, fitdir, figdir] # Loop over all folders for folder in folders: # If the folder does not exist... if not folder.exists(): # ...then we create it. folder.mkdir() # Let the user know what fit we are running by printing to terminal. basename = FIT_ID print(f"\n{basename}\n") # Establish the full location of the data. data = DPATH / GR_NAME # Establish the location of the cif file with the structure of interest # and load it into a diffpy structure object. strudir = DPATH cif_file = strudir / CIF_NAME # Initialize the Fit Recipe by giving it this diffpy structure # as well as the path to the data file. p_cif = getParser('cif') structure = p_cif.parseFile(str(cif_file)) space_group = p_cif.spacegroup.short_name # Initialize the Fit Recipe by giving it this diffpy structure # as well as the path to the data file. recipe = makerecipe(cif_file, data) # Let's set the calculation range! recipe.crystal.profile.setCalculationRange(xmin=PDF_RMIN, xmax=PDF_RMAX, dx=PDF_RSTEP) # Add, initialize, and tag variables in the Fit Recipe object. # In this case we also add psize, which is the NP size. recipe.addVar(recipe.crystal.s1, SCALE_I, tag="scale") # Set an equation, based on your PDF generators. Here we add an extra layer # of complexity, incorporating "f" int our equation. This new term # incorporates damping to our PDF to model the effect of finite crystallite size. # In this case we use a function which models a spherical NP. from diffpy.srfit.pdf.characteristicfunctions import sphericalCF recipe.crystal.registerFunction(sphericalCF, name="f") recipe.crystal.setEquation("s1*G1*f") recipe.addVar(recipe.crystal.psize, PSIZE_I, tag="psize") # Initialize the instrument parameters, Q_damp and Q_broad, and # assign Q_max and Q_min. # Note, here we do not add the qdamp and qbroad parameters to the fit!!! # They are fixed here, because we refined them in the Ni standard fit! recipe.crystal.G1.qdamp.value = QDAMP_I recipe.crystal.G1.qbroad.value = QBROAD_I recipe.crystal.G1.setQmax(QMAX) recipe.crystal.G1.setQmin(QMIN) # Use the srfit function constrainAsSpaceGroup to constrain # the lattice and ADP parameters according to the Fm-3m space group. from diffpy.srfit.structure import constrainAsSpaceGroup spacegroupparams = constrainAsSpaceGroup(recipe.crystal.G1.phase, space_group) # Add and initialize delta, the lattice parameter, and a thermal parameter, # but not instrumental parameters to Fit Recipe. # The instrumental parameters will remain fixed at values obtained from # the Ni calibrant in our previous example. As we have not added them through # recipe.addVar, they cannot be refined. for par in spacegroupparams.latpars: recipe.addVar(par, value=CUBICLAT_I, name="fcc_Lat", tag="lat") for par in spacegroupparams.adppars: recipe.addVar(par, value=UISO_I, name="fcc_ADP", tag="adp") recipe.addVar(recipe.crystal.G1.delta2, name="Pt_Delta2", value=DELTA2_I, tag="d2") # Tell the Fit Recipe we want to write the maximum amount of # information to the terminal during fitting. recipe.fithooks[0].verbose = 0 refine_params = ["scale", "lat", "psize", "adp", "d2", "all"] recipe.fix("all") for params in refine_params: recipe.free(params) print(f"\n****\nFitting {recipe.getNames()} against " f"{GR_NAME} with {CIF_NAME}\n") least_squares(recipe.residual, recipe.values, x_scale="jac") # We use the savetxt method of the profile to write a text file # containing the measured and fitted PDF to disk. # The file is named based on the basename we created earlier, and # written to the fitdir directory. profile = recipe.crystal.profile profile.savetxt(fitdir / (basename + ".fit")) # We use the FitResults function to parse out the results from # the optimized Fit Recipe. res = FitResults(recipe) # We print these results to the terminal. res.printResults() # We grab the fit Rw rw = res.rw # We use the saveResults method of FitResults to write a text file # containing the fitted parameters and fit quality indices to disk. # The file is named based on the basename we created earlier, and # written to the resdir directory. header = "crystal_HF.\n" res.saveResults(resdir / (basename + ".res"), header=header) # We use the plotresults function we created earlier to make a plot of # the measured, calculated, and difference curves. We show this # as an interactive window and then write a pdf file to disk. # The file is named based on the basename we created earlier, and # written to the figdir directory. plotresults(recipe, figdir / basename) # Let make a dictionary to hold our results. This way make reloading the # fit parameters easier later refined_dict = dict() refined_dict['rw'] = rw.item() # We loop over the variable names, the variable values, and the variable uncertainties (esd) for name, val, unc in zip(res.varnames, res.varvals, res.varunc): # We store the refined value for this variable using the "value" key. # We use the ".item()" method because "res.varvals" exist as # numpy.float64 objects, and we want them as regular python floats. if name not in refined_dict: refined_dict[name] = dict() refined_dict[name]["value"] = val.item() refined_dict[name]["uncert"] = unc.item() with open(basename + ".yml", 'w') as outfile: yaml.safe_dump(refined_dict, outfile)
recipe.addVar(contribution.qdamp, 0.03, fixed=True) recipe.addVar(contribution.nickel.delta2, 5) # Give the recipe away so it can be used! return recipe if __name__ == "__main__": # Make the data and the recipe ciffile = "data/ni.cif" data = "data/ni-q27r100-neutron.gr" # Make the recipe recipe = makeRecipe(ciffile, data) # Optimize scipyOptimize(recipe) # Save the file recipe.nickel.savetxt("nickel_example.fit") # Generate, print and save the FitResults res = FitResults(recipe) res.printResults() res.saveResults("nickel_example.res") # Plot! plotResults(recipe) # End of file
recipe.addVar(contribution.scale, 1) recipe.addVar(contribution.qdamp, 0.03, fixed = True) recipe.addVar(contribution.nickel.delta2, 5) # Give the recipe away so it can be used! return recipe if __name__ == "__main__": # Make the data and the recipe ciffile = "data/ni.cif" data = "data/ni-q27r100-neutron.gr" # Make the recipe recipe = makeRecipe(ciffile, data) # Optimize scipyOptimize(recipe) # Save the file recipe.nickel.savetxt("nickel_example.fit") # Generate, print and save the FitResults res = FitResults(recipe) res.printResults() res.saveResults("nickel_example.res") # Plot! plotResults(recipe) # End of file
class ModelBase: """The template for the model class.""" def __init__(self, recipe: md.MyRecipe): self._recipe = recipe self._contribution = next(iter(recipe.contributions.values())) self._fit_result = FitResults(self._recipe, update=False) self._verbose: int = 1 self._order: tp.List[tp.Union[str, tp.Iterable[str]]] = [] self._options: dict = {} self._fit_state = None def parallel(self, ncpu: int) -> None: """Parallel computing. Parameters ---------- ncpu : Number of CPUs. """ fc = self.get_contribution() for g in fc.generators.values(): g.parallel(ncpu) def set_xrange(self, start: float = None, end: float = None, step: float = None) -> None: """Set fitting range. Parameters ---------- start : Start of x. x >= start end : End of x. x <= end step : Step of x. x[i] - x[i-1] == step Returns ------- None """ profile = self.get_profile() profile.setCalculationRange(xmin=start, xmax=end, dx=step) def set_verbose(self, level: int) -> None: """Set verbose level. Parameters ---------- level : The level used. 0 means quiet. Returns ------- None """ self._verbose = level def get_verbose(self) -> int: """Get verbose level Returns ------- Verbose level. """ return self._verbose def set_options(self, **kwargs) -> None: """Set options for fitting. Parameters ---------- kwargs : The options for the scipy.optimize.least_squares. Returns ------- None """ self._options = kwargs def get_options(self) -> dict: """Get options for fitting. Returns ------- A dictionary of options. """ return self._options def set_order(self, *order: tp.Union[str, tp.Iterable[str]]) -> None: """Set the order of fitting parameters. Parameters ---------- order : A list of list or string. Returns ------- None Examples -------- if order is ["A", ["B", "C"]], "A" will be first refined and "B", "C" will be added after and refined. """ order = list(order) self._check_order(order) self._order = order def _check_order(self, order: tp.Any) -> None: """Check the order.""" tags = set(self._recipe._tagmanager.alltags()) if isinstance(order, str): if not hasattr(self._recipe, order) and order not in tags: raise ValueError("'{}' is not in the variable names.".format(order)) elif isinstance(order, tp.Iterable): for x in order: self._check_order(x) else: raise TypeError("'{}' is not allowed.".format(type(order))) def get_order(self) -> tp.List[tp.Union[str, tp.Iterable[str]]]: """Get the order of the parameters Returns ------- A list of parameters. """ return self._order def set_value(self, **kwargs) -> None: """Set the parameter values. Parameters ---------- kwargs : In the format of param = value. Returns ------- None """ self._check_params(kwargs.keys()) for name, value in kwargs.items(): var: Parameter = getattr(self._recipe, name) var.setValue(value) def get_param(self, name: str) -> Parameter: """Get the parameters.""" if not hasattr(self._recipe, name): raise KeyError("No such parameter call '{}' in the recipe.".format(name)) return getattr(self._recipe, name) def set_bound(self, **kwargs) -> None: """Set the bound. Parameters ---------- kwargs : In the form of param = (lb, ub) Returns ------- None """ self._check_params(kwargs.keys()) for name, bound in kwargs.items(): var: Parameter = getattr(self._recipe, name) var.boundRange(*bound) def set_rel_bound(self, **kwargs) -> None: """Set the bound relatively to current value. Parameters ---------- kwargs : In the form of param = (lb, ub) Returns ------- None """ self._check_params(kwargs.keys()) for name, bound in kwargs.items(): var: Parameter = getattr(self._recipe, name) var.boundWindow(*bound) def _check_params(self, params): """Check the parameters.""" for param in params: if not hasattr(self._recipe, param): raise KeyError("There is no parameter called '{}'".format(param)) def _create_recipe(self) -> md.MyRecipe: """Place holder for the method to create the recipe.""" raise NotImplemented def get_contribution(self) -> md.MyContribution: """Get the first contribution in recipe. Returns ------- A FitContribution. """ return self._contribution def get_generators(self) -> tp.Dict[str, tp.Callable]: """Get the generators in a dictionary.""" return self.get_contribution().generators def calc_phase(self, name: str) -> xr.DataArray: """Calculate the data from a generator. Parameters ---------- name : The name of a generator. Returns ------- A xarray.DataArray of calculated y with x as the coordinate. """ gs = self.get_generators() p = self.get_profile() if name not in gs: raise KeyError("There are no generators named '{}'.".format(name)) y = gs[name](p.x) arr = xr.DataArray(y, coords={"x": x}, dims=["x"]) arr["y"].attrs["standard_name"] = "G" arr["y"].attrs["units"] = r"Å$^{-2}$" arr["x"].attrs["standard_name"] = "r" arr["x"].attrs["units"] = "Å" return arr def set_profile(self, profile: Profile) -> None: """Set the data profile. Parameters ---------- profile : A data profile. Returns ------- None """ fc: md.MyContribution = self.get_contribution() fc.setProfile(profile) def get_profile(self) -> Profile: """Get the data profile.""" fc = self.get_contribution() return fc.profile def optimize(self) -> None: """Optimize the model. The scipy.optimize.least_squares is used. Returns ------- None """ if not self._order: raise ValueError("No parameters to refine.") md.optimize(self._recipe, self._order, validate=False, verbose=self._verbose, **self._options) rw = self.get_rw() if self._verbose > 0: print("Optimization result: Rw = {:.6f}.".format(rw)) def get_rw(self) -> float: """Calculate Rw value from profile. ------- Rw value. """ profile = self.get_profile() y, ycalc = profile.y, profile.ycalc return np.sqrt(np.sum((y - ycalc) ** 2) / np.sum(ycalc ** 2)) def update(self) -> None: """Update the result.""" return self._fit_result.update() def show(self) -> None: """Show the values of parameters.""" self._recipe.show() def get_result(self) -> dict: """Get the result in a dictionary""" dct = dict() fr = self._fit_result n = len(fr.varnames) for i in range(n): dct[fr.varnames[i]] = fr.varvals[i] n = len(fr.fixednames) for i in range(n): dct[fr.fixednames[i]] = fr.fixedvals[i] dct["rw"] = self._fit_result.rw return dct def save(self, directory: str, file_prefix: str) -> None: """Save the model parameters. Must update before save. Parameters ---------- directory : The directory to export the files. file_prefix : The prefix of the file name. Returns ------- None """ directory = pathlib.Path(directory) if not directory.is_dir(): directory.mkdir(parents=True) path = directory.joinpath("{}.txt".format(file_prefix)) self._fit_result.saveResults(path) def load(self, filepath: str) -> None: """Load the parameters for the model. Parameters ---------- filepath : The path to the file or the string of the content or a IOstream. Returns ------- None """ initializeRecipe(self._recipe, filepath) def export_result(self) -> xr.Dataset: """Export the result in a dataset.""" dct = self.get_result() ds = xr.Dataset(dct) for name in ds.variables: ds[name].attrs["long_name"] = get_symbol(name) ds[name].attrs["units"] = get_unit(name) ds["rw"].attrs["long_name"] = "$R_w$" return ds def export_fits(self) -> xr.Dataset: """Export the fits in a dataset.""" profile = self.get_profile() ds = xr.Dataset( {"y": (["x"], profile.y), "ycalc": (["x"], profile.ycalc), "yobs": (["xobs"], profile.yobs)}, {"x": (["x"], profile.x), "xobs": (["xobs"], profile.xobs)} ) ds["y"].attrs["standard_name"] = "G" ds["y"].attrs["units"] = r"Å$^{-2}$" ds["ycalc"].attrs["standard_name"] = "G" ds["ycalc"].attrs["units"] = r"Å$^{-2}$" ds["yobs"].attrs["standard_name"] = "G" ds["yobs"].attrs["units"] = r"Å$^{-2}$" ds["x"].attrs["standard_name"] = "r" ds["x"].attrs["units"] = "Å" ds["xobs"].attrs["standard_name"] = "r" ds["xobs"].attrs["units"] = "Å" return ds def save_result(self, directory: str, file_prefix: str) -> None: """Save the fitting result. Parameters ---------- directory : The directory to export the files. file_prefix : The prefix of the file name. Returns ------- None """ directory = pathlib.Path(directory) if not directory.is_dir(): directory.mkdir(parents=True) result = self.export_result() path = directory.joinpath("{}_result.nc".format(file_prefix)) result.to_netcdf(path) def save_fits(self, directory: str, file_prefix: str) -> None: """Save the fitted curves. Parameters ---------- directory : The directory to export the files. file_prefix : The prefix of the file name. Returns ------- None """ directory = pathlib.Path(directory) if not directory.is_dir(): directory.mkdir(parents=True) fits = self.export_fits() path = directory.joinpath("{}_fits.nc".format(file_prefix)) fits.to_netcdf(path) def save_all(self, directory: str, file_prefix: str) -> None: """Save the results, fits and structures in a directory. Parameters ---------- directory : The directory to export the files. file_prefix : The prefix of the file name. Returns ------- None """ self.save(directory, file_prefix) self.save_result(directory, file_prefix) self.save_fits(directory, file_prefix) def plot(self, **kwargs) -> None: """View the fitted curves. Returns ------- None """ fits = self.export_fits() plot_fits(fits, **kwargs)
def main(): """ This will run by default when the file is executed using "python file.py" in the command line Parameters ---------- None Returns ---------- None """ # Make some folders to store our output files. resdir = Path("res") fitdir = Path("fit") figdir = Path("fig") folders = [resdir, fitdir, figdir] # Loop over all folders for folder in folders: # If the folder does not exist... if not folder.exists(): # ...then we create it. folder.mkdir() # Let the user know what fit we are running by printing to terminal. basename = FIT_ID print(f"\n{basename}\n") # Establish the full location of the two datasets. xray_data = DPATH / XRAY_GR_NAME nuetron_data = DPATH / NEUTRON_GR_NAME # Establish the location of the cif file with the structure of interest # and load it into a diffpy structure object. strudir = DPATH cif_file = strudir / CIF_NAME p_cif = getParser('cif') structure = p_cif.parseFile(str(cif_file)) space_group = p_cif.spacegroup.short_name # Initialize the Fit Recipe by giving it this diffpy structure # as well as the path to the data file. # Here we use a new function, which takes both datasets. recipe = makerecipe_coref(cif_file, xray_data, nuetron_data) # We first want to add two scale parameters to our fit recipe, # one for each dataset. recipe.addVar(recipe.xray.s1, XRAY_SCALE_I, tag="scale") recipe.addVar(recipe.neutron.s2, NEUTRON_SCALE_I, tag="scale") # Let's set the calculation range! # Here we use a loop to make it easier to edit both ranges. for cont in recipe._contributions.values(): cont.profile.setCalculationRange(xmin=PDF_RMIN, xmax=PDF_RMAX, dx=PDF_RSTEP) # assign Q_max and Q_min, all part of the PDF Generator object. # It's possible that the PDFParse function we used above # already parsed out ths information, but in case it didn't, we set it # explicitly again here. # We do it for both neutron and PDF configurations recipe.xray.xray_G.setQmax(XRAY_QMAX) recipe.xray.xray_G.setQmin(XRAY_QMIN) recipe.neutron.neutron_G.setQmax(NEUTRON_QMAX) recipe.neutron.neutron_G.setQmin(NEUTRON_QMAX) # Initialize and add the instrument parameters, Q_damp and Q_broad, and # delta and instrumental parameters to Fit Recipe. # We give them unique names, and tag them with our choice of relevant strings. # Again, two datasets means we need to do this for each. recipe.addVar(recipe.xray.xray_G.delta2, name="Ni_Delta2", value=DELTA2_I, tag="d2") recipe.constrain(recipe.neutron.neutron_G.delta2, "Ni_Delta2") recipe.addVar(recipe.xray.xray_G.qdamp, name="xray_Calib_Qdamp", value=XRAY_QDAMP_I, tag="inst") recipe.addVar(recipe.xray.xray_G.qbroad, name="xray_Calib_Qbroad", value=XRAY_QBROAD_I, tag="inst") recipe.addVar(recipe.neutron.neutron_G.qdamp, name="neutron_Calib_Qdamp", value=NEUTRON_QDAMP_I, tag="inst") recipe.addVar(recipe.neutron.neutron_G.qbroad, name="neutron_Calib_Qbroad", value=NEUTRON_QBROAD_I, tag="inst") # Configure some additional fit variables pertaining to symmetry. # We can use the srfit function constrainAsSpaceGroup to constrain # the lattice and ADP parameters according to the Fm-3m space group. # First we establish the relevant parameters, then we cycle through # the parameters and activate and tag them. # We must explicitly set the ADP parameters, because in this case, the # CIF had no ADP data. from diffpy.srfit.structure import constrainAsSpaceGroup # Create the symmetry distinct parameter sets, and constrain them # in the generator. neutron_spacegroupparams = constrainAsSpaceGroup(recipe.neutron.neutron_G.phase, space_group) xray_spacegroupparams = constrainAsSpaceGroup(recipe.xray.xray_G.phase, space_group) # Loop over all the symmetry distinct lattice parameters and add # them to the recipe. # We give them unique names, and tag them with our choice of a relevant string. for xray_par, neutron_par in zip(xray_spacegroupparams.latpars, neutron_spacegroupparams.latpars): recipe.addVar(xray_par, value=CUBICLAT_I, name="fcc_Lat", tag="lat") recipe.constrain(neutron_par, "fcc_Lat") # Loop over all the symmetry distinct ADPs and add # them to the recipe. # We give them unique names, and tag them with our choice of a relevant string. for xray_par, neutron_par in zip(xray_spacegroupparams.adppars, neutron_spacegroupparams.adppars): recipe.addVar(xray_par, value=UISO_I, name="fcc_ADP", tag="adp") recipe.constrain(neutron_par, "fcc_ADP") # Tell the Fit Recipe we want to write the maximum amount of # information to the terminal during fitting. recipe.fithooks[0].verbose = 3 # During the optimization, we fix and free parameters sequentially # as you would in PDFgui. This leads to more stability in the refinement. # We first fix all variables. "all" is a tag which incorporates # every parameter. recipe.fix("all") # Here will will set the weight of each contribution. In this case, we give each equal weight conts = list(recipe._contributions.values()) for cont in conts: recipe.setWeight(cont, 1.0/len(conts)) # We then run a fit using the SciPy function "least_squares" which # takes as its arguments the function to be optimized, here recipe.residual, # as well as initial values for the fitted parameters, provided by # recipe.values. The x_scale="jac" argument is an optional argument # that provides for a bit more stability in the refinement. # "least_squares" is a bit more robust than "leastsq," # which is another optimization function provided by SciPy. # "least_squares" supports bounds on refined parameters, # while "leastsq" does not. refine_params = ["scale", "lat", "adp", "d2", "all"] for params in refine_params: recipe.free(params) print(f"\n****\nFitting {recipe.getNames()} against " f"{XRAY_GR_NAME} and {NEUTRON_GR_NAME} with {CIF_NAME}\n") least_squares(recipe.residual, recipe.values, x_scale="jac") # We use the savetxt method of the profile to write a text file # containing the measured and fitted PDF to disk. # The file is named based on the basename we created earlier, and # written to the fitdir directory. profile = recipe.crystal.profile profile.savetxt(fitdir / (basename + ".fit")) # We use the FitResults function to parse out the results from # the optimized Fit Recipe. res = FitResults(recipe) # We print these results to the terminal. res.printResults() # We grab the fit Rw rw = res.rw # We use the saveResults method of FitResults to write a text file # containing the fitted parameters and fit quality indices to disk. # The file is named based on the basename we created earlier, and # written to the resdir directory. header = "crystal_HF.\n" res.saveResults(resdir / (basename + ".res"), header=header) # We use the plotresults function we created earlier to make a plot of # the measured, calculated, and difference curves. We show this # as an interactive window and then write a pdf file to disk. # The file is named based on the basename we created earlier, and # written to the figdir directory. plotresults(recipe, figdir / basename) # Let make a dictionary to hold our results. This way make reloading the # fit parameters easier later refined_dict = dict() refined_dict['rw'] = rw.item() recipe.free("all") # We loop over the variable names, the variable values, and the variable uncertainties (esd) for name, val, unc in zip(res.varnames, res.varvals, res.varunc): # We store the refined value for this variable using the "value" key. # We use the ".item()" method because "res.varvals" exist as # numpy.float64 objects, and we want them as regular python floats. if name not in refined_dict: refined_dict[name] = dict() refined_dict[name]["value"] = val.item() refined_dict[name]["uncert"] = unc.item() # Finally, let's write our dictionary to a yaml file! with open(basename + ".yml", 'w') as outfile: yaml.safe_dump(refined_dict, outfile)
def main(): """ This will run by default when the file is executed using "python file.py" in the command line Parameters ---------- None Returns ---------- None """ # Make some folders to store our output files. resdir = "res" fitdir = "fit" figdir = "fig" folders = [resdir, fitdir, figdir] # Let's define our working directory. base_dir = Path() yaml_file = base_dir / (FIT_ID_BASE + "refined_params.yml") # This is a bit different than what we've done before. # We are going to look at a set of temperatures, so we want # to find all the relevant data files in the "DPATH" folder # we identified earlier which match a certain pattern. # To do this we will use list comprehension # For every file we find, we will link it up with the data path data_files = list(DPATH.glob(f"*{GR_NAME_BASE}*.gr")) # We now want to grab the temperature at which each file was measured. # We again use list comprehension, and we re-use the variable "temp" # This specific procedure depends on how the file is named. # In our case, we carefully named each file as: # "composition_TTTK.gr" where TTT is the temperature in Kelvin. # First we strip off the directory information for every file in "data_files" # This gives us a list of just full file names, without directories. temps = [f.stem for f in data_files] # Next we split every base filename into a list, delimited by "_" # We keep the second entry from this list, because we know it has the # temperature in the form TTTK. temps = [t.split('_')[1] for t in temps] # We want the temperature as an integer, so we need to drop the "K" # from each string and cast the values as integers using "int()" # Strings can be slides like arrays, so "[:-1]" means "take all the # values except the last one." temps = [int(t[:-1]) for t in temps] # This will sort the data files and temperatures in descending order # based on the temperature values. temps, data_files = zip(*sorted(zip(temps, data_files), reverse=True)) # We want to test two structure models, so we should find both the cif files. # Similar to how we found the data files, we use list comprehension # For every file we find, we will link it up with the data path cif_files = list(DPATH.glob(f"*{CIF_NAME_BASE}*.cif")) # We initialize and empty dictionary, where we will save all # the details of the refined parameters. if yaml_file.exists(): print(f"\n{yaml_file.name} exists, loading!\n") with open(yaml_file, 'r') as infile: refined_dict = yaml.safe_load(infile) else: print(f"\n{yaml_file.name} does not exist, creating!\n") refined_dict = dict() # We want to do a separate temperature series on each of the structures, # so we will use a loop on all the cif files we found. for cif in cif_files: # Let's get the space group, so we can refer to it later. p_cif = getParser('cif') p_cif.parseFile(str(cif)) space_group = p_cif.spacegroup.short_name # Backslashes are bad form, so let's remove them... structure_string = space_group.replace("/", "_on_") # Lets check if we already ran this fit...so we dont duplicate work if structure_string not in refined_dict: print(f"\n{structure_string} IS NOT in dictionary!\n") # Nest a dictionary inside "refined_dict" with a key defined by "structure_string" refined_dict[structure_string] = dict() # This is just for ease of coding/readability sg_dict = refined_dict[structure_string] done = False elif structure_string in refined_dict: print(f"\n{structure_string} IS IN dictionary!\n") sg_dict = refined_dict[structure_string] done = True # Where will we work? here! work_dir = base_dir / structure_string # Make our folders! for folder in folders: new_folder = work_dir / folder if not new_folder.exists(): new_folder.mkdir(parents=True) # Make a recipe based on this cif file, and the first data file in # "data_files" # We can pass in any data file at this point, this is only to initialize # the recipe, and we will replace this data before refining. recipe = makerecipe(cif, data_files[0]) # Let's set the calculation range! recipe.crystal.profile.setCalculationRange(xmin=PDF_RMIN, xmax=PDF_RMAX, dx=PDF_RSTEP) # Initialize the instrument parameters, Q_damp and Q_broad, and # assign Q_max and Q_min. recipe.crystal.G1.qdamp.value = QDAMP_I recipe.crystal.G1.qbroad.value = QBROAD_I recipe.crystal.G1.setQmax(QMAX) recipe.crystal.G1.setQmin(QMIN) # Add, initialize, and tag the scale variable. recipe.addVar(recipe.crystal.s1, SCALE_I, tag="scale") # Use the srfit function constrainAsSpaceGroup to constrain # the lattice and ADP parameters according to the space group setting, # in this case, contained in the function argument "sg" # from diffpy.srfit.structure import constrainAsSpaceGroup spacegroupparams = constrainAsSpaceGroup(recipe.crystal.G1.phase, space_group) for par in spacegroupparams.latpars: recipe.addVar(par, fixed=False, tag="lat") for par in spacegroupparams.adppars: recipe.addVar(par, fixed=False, tag="adp") # Note: here we also can refine atomic coordinates. # In our previous examples, all the atoms were on symmetry # operators, so their positions could not be refined. for par in spacegroupparams.xyzpars: recipe.addVar(par, fixed=False, tag="xyz") # Add delta, but not instrumental parameters to Fit Recipe. recipe.addVar(recipe.crystal.G1.delta2, name="Delta2", value=DELTA1_I, tag="d2") # Tell the Fit Recipe we want to write the maximum amount of # information to the terminal during fitting. recipe.fithooks[0].verbose = 0 # As we are doing a temperature series through a phase transition, we want to fit many # different data sets, each at a different temperature. # for this we will loop over both "temps" and "data_files" for file, temp in zip(data_files, temps): print(f"\nProcessing {file.name}!\n") if temp not in sg_dict: print( f"\nT = {temp} K NOT IN {structure_string} dictionary, creating!\n" ) # Nest a dictionary inside the nested dictionary "refined_dict[stru_type]" # with a key defined by "temp" sg_dict[temp] = dict() temp_dict = sg_dict[temp] done = False elif temp in sg_dict: print(f"\nT = {temp} K IS IN {structure_string} dictionary!\n") temp_dict = sg_dict[temp] done = True # We create a unique string to identify our fit, # using the structure type and the temperature. # This will be used when we write files later. basename = f"{FIT_ID_BASE}{structure_string}_{str(temp)}_K" # Print the basename to the terminal. print(f"\nWorking on {basename}!\n") # We now want to load in the proper dataset for the given temperature. # We do this by creating a new profile object and loading it into the fit recipe. profile = Profile() parser = PDFParser() parser.parseFile(file) profile.loadParsedData(parser) recipe.crystal.setProfile(profile) if not done: print( f"{basename} is NOT DONE with structure {structure_string} at T = {temp} K\n" ) # We are now ready to start the refinement. # During the optimization, fix and free parameters as you would # PDFgui. This leads to more stability in the refinement recipe.fix("all") refine_params = ["scale", "lat", "adp", "d2", "all"] for params in refine_params: recipe.free(params) print(f"\n****\nFitting {recipe.getNames()} against " f"{file.name} with {cif.name}\n") least_squares(recipe.residual, recipe.values, x_scale="jac") elif done: print( f"{basename} IS done with structure {structure_string} at T = {temp} K\n" ) recipe.free("all") print("\nLoading parameters...\n") for var in recipe.getNames(): if var not in temp_dict: print( f"{var} is not in the dictionary!! Let's try to fix it..." ) recipe.fix("all") recipe.free(var) print(f"\nFitting {recipe.getNames()}\n") least_squares(recipe.residual, recipe.values, x_scale="jac") recipe.free("all") elif var in temp_dict: var_dict = temp_dict[var] val = var_dict["value"] recipe.get(var).setValue(val) if not SKIP_DONE: print("\nPolishing...\n") recipe.free("all") print(f"\nFitting {recipe.getNames()}\n") least_squares(recipe.residual, recipe.values, x_scale="jac") print("\nPolishing done\n") if (done and not SKIP_DONE) or not done or REPLOT_DONE: print(f"\nStarting to write results for {basename}" f" with structure {structure_string} at T = {temp} K\n") # Print the fit results to the terminal. res = FitResults(recipe) print("\n******************\n") res.printResults() print("\n******************\n") rw = res.rw # Write the fitted data to a file. profile = recipe.crystal.profile profile.savetxt(work_dir / fitdir / (basename + ".fit")) # Now, as we will use the optimized fit recipe in the next loop, # we want to keep all the refined parameters for this part of the loop # we do this by recording everything in the nested dictionaries we made earlier. # We loop over the variable names, the variable values, and the variable uncertainties (esd) for name, val, unc in zip(res.varnames, res.varvals, res.varunc): # We create a new nested dictionary based on each variable name if name not in temp_dict: temp_dict[name] = dict() var_dict = temp_dict[name] # We store the refined value for this variable using the "value" key. # We use the ".item()" method because "res.varvals" exist as # numpy.float64 objects, and we want them as regular python floats. var_dict["value"] = val.item() var_dict["uncert"] = unc.item() # We also store the fit rw, for posterity. temp_dict['rw'] = rw.item() # Write the fit results to a file. header = "crystal_HF.\n" res.saveResults(work_dir / resdir / (basename + ".res"), header=header) # Write a plot of the fit to a (pdf) file. plotresults(recipe, work_dir / figdir / basename) # plt.ion() # We now write this dictionary to a file for later use. with open(yaml_file, 'w') as outfile: yaml.safe_dump(refined_dict, outfile) # We now write this dictionary to a file for later use. with open(yaml_file, 'w') as outfile: yaml.safe_dump(refined_dict, outfile)