def get_hkl(self, x_array: np.ndarray = None, idx=0, phase_name=None, encoded_name=False) -> dict: # Do we need to re-run a calculation to get the HKL's do_run = False old_x = self.additional_data.get("ivar", np.array(())) if not np.array_equal(old_x, x_array): do_run = True if do_run and x_array is not None: _ = self.calculate(x_array) # Collate and return if phase_name is None: known_phases = list(self.known_phases.values()) phase_name = known_phases[idx] phase_data = self.additional_data.get(phase_name, {}) return phase_data.get( "hkl", { "ttheta": np.array([]), "h": np.array([]), "k": np.array([]), "l": np.array([]), }, )
def polarized_update(func, crystals, profiles, peak_dat, scales, x_str): up = np.array([profile.intensity_up_total for profile in profiles]) down = np.array([profile.intensity_down_total for profile in profiles]) dependent = np.array([func(u, d) for u, d in zip(up, down)]) output = {} for idx, profile in enumerate(profiles): output.update({ crystals[idx].data_name: { "hkl": { x_str: getattr(peak_dat[idx], "numpy_" + x_str), "h": peak_dat[idx].numpy_index_h, "k": peak_dat[idx].numpy_index_k, "l": peak_dat[idx].numpy_index_l, }, "profile": scales[idx] * dependent[idx, :] / normalization, "components": { "total": dependent[idx, :], "up": up[idx, :], "down": down[idx, :], }, "profile_scale": scales[idx], "func": func, } }) return dependent, output
def get_parameters(self) -> List[Parameter]: """" Redefine get_parameters so that the returned values are in the correct order """ list_pars = np.array(super(PointBackground, self).get_parameters()) idx = np.array([item.x.raw_value for item in self]).argsort() return list_pars[idx].tolist()
def __init__(self, name: str = 'Series', x: Union[np.ndarray, list] = None, y: Union[np.ndarray, list] = None, e: Union[np.ndarray, list] = None, data_type: str = 'simulation', x_label: str = 'x', y_label: str = 'y'): if not isinstance(data_type, str): raise AttributeError self._datatype = None self.data_type = data_type if x is None: x = np.array([]) if y is None: y = np.array([]) if e is None: e = np.zeros_like(x) self.name = name if not isinstance(x, np.ndarray): x = np.array(x) if not isinstance(y, np.ndarray): y = np.array(y) self.x = x self.y = y self.e = e self.x_label = x_label self.y_label = y_label self._color = None
def __index_contents(self): """ Index the contents """ x = np.array([item.power.raw_value for item in self]) idx = x.argsort() y = np.array([item.amp.raw_value for item in self]) self._sorted_self = {'idx': idx, 'power': x[idx], 'amp': y[idx]}
def y_sorted_points(self) -> np.ndarray: """ Get the stored y-values based on the sorted x-values :return: Sorted y-values :rtype: np.ndarray """ idx = np.array([item.x.raw_value for item in self]).argsort() y = np.array([item.y.raw_value for item in self]) return y[idx]
def _setAsXml(self): start_time = timeit.default_timer() if self._background_as_obj is None: self._background_as_xml = dicttoxml({}, attr_type=False).decode() else: background = np.array([item.as_dict() for item in self._background_as_obj]) point_index = np.array([item.x.raw_value for item in self._background_as_obj]).argsort() self._background_as_xml = dicttoxml(background[point_index], attr_type=False).decode() print("+ _setAsXml: {0:.3f} s".format(timeit.default_timer() - start_time)) self.asXmlChanged.emit()
def nonPolarized_update(crystal_name, diffraction_pattern, reflection_list, job_info, scales=1): dependent = diffraction_pattern.ycalc hkltth = np.array([[*reflection_list[i].hkl, reflection_list[i].stl] for i in range(reflection_list.nref)]) output = { crystal_name: { "hkl": { "ttheta": np.rad2deg(np.arcsin(hkltth[:, 3] * job_info.lambdas[0])) * 2, "h": hkltth[:, 0], "k": hkltth[:, 1], "l": hkltth[:, 2], }, "profile": scales * dependent, "components": { "total": dependent }, "profile_scale": scales, } } return dependent, output
def x_sorted_points(self) -> np.ndarray: """ Get the stored x-values as a sorted array :return: Sorted x-values :rtype: np.ndarray """ x = np.array([item.x.raw_value for item in self]) x.sort() return x
def _parse_operator(self, obj: V, *args, **kwargs) -> Number: value = obj.raw_value if isinstance(value, list): value = np.array(value) self.aeval.symtable["value1"] = value self.aeval.symtable["value2"] = self.value try: self.aeval.eval(f"value3 = value1 {self.operator} value2") logic = self.aeval.symtable["value3"] if isinstance(logic, np.ndarray): value[not logic] = self.aeval.symtable["value2"] else: if not logic: value = self.aeval.symtable["value2"] except Exception as e: raise e finally: self.aeval.symtable.clear() return value
def nonPolarized_update(crystals, profiles, peak_dat, scales, x_str): dependent = np.array([profile.intensity_total for profile in profiles]) output = {} for idx, profile in enumerate(profiles): output.update({ crystals[idx].data_name: { "hkl": { x_str: getattr(peak_dat[idx], "numpy_" + x_str), "h": peak_dat[idx].numpy_index_h, "k": peak_dat[idx].numpy_index_k, "l": peak_dat[idx].numpy_index_l, }, "profile": scales[idx] * dependent[idx, :] / normalization, "components": { "total": dependent[idx, :] }, "profile_scale": scales[idx], } }) return dependent, output
def _feature_generator(radiation='N', exp_type='CW', sample_type='powder', dimensionality='1D'): radiation_options = exp_type_strings['radiation_options'] if radiation not in radiation_options: raise AttributeError(f'"{radiation}" is not supported, only: {radiation_options}') exp_type_options = exp_type_strings['exp_type_options'] if exp_type not in exp_type_options: raise AttributeError(f'"{exp_type}" is not supported, only: {exp_type_options}') dimensional_options = exp_type_strings['dimensional_options'] if dimensionality not in dimensional_options: raise AttributeError(f'"{dimensionality}" is not supported, only: {dimensional_options}') sample_options = exp_type_strings['sample_options'] if sample_type not in sample_options: raise AttributeError(f'"{sample_type}" is not supported, only: {sample_options}') features = [''.join(item) for item in np.array(np.meshgrid(radiation_options, sample_options, dimensional_options, exp_type_options)).T.reshape(-1, len(exp_type_strings)).tolist()] feature_dict = dict.fromkeys(features, False) return feature_dict
import matplotlib.pyplot as plt i = InterfaceFactory() c = Phases.from_cif_file('tests/SrTiO3.cif') S = Sample(phases=c, parameters=Instrument1DCWParameters.default(), interface=i) x_data = np.linspace(5, 150, 10000) y_data = i.fit_func(x_data) i.switch('CrysPy') S._updateInterface() y_data2 = np.array(i.fit_func(x_data)) fig = plt.figure() axprops = dict() ax1 = fig.add_axes([0.1, 0.5, 0.8, 0.4], **axprops) ax1.plot(x_data, y_data, label="CrysFML") ax1.legend() axprops['sharex'] = ax1 # axprops['sharey'] = ax1 # force x axes to remain in register, even with toolbar navigation ax2 = fig.add_axes([0.1, 0.1, 0.8, 0.4], **axprops) ax2.plot(x_data, y_data2, label="Cryspy") ax2.legend() fig.show() fig.savefig('CFML_Cryspy.png')
def fit(self, fitter, *args, fit_kwargs: dict = None, fn_kwargs: dict = None, vectorize: bool = False, dask: str = 'forbidden', **kwargs) -> FitResults: """ Perform a fit on the given DataArray. This fit utilises a given fitter from `easyCore.Fitting.Fitter`, though there are a few differences to a standard easyCore fit. In particular, key-word arguments to control the optimisation algorithm go in the `fit_kwargs` dictionary, fit function key-word arguments go in the `fn_kwargs` and given key-word arguments control the `xarray.apply_ufunc` function. :param fitter: Fitting object which controls the fitting :type fitter: easyCore.Fitting.Fitter :param args: Arguments to go to the fit function :type args: Any :param dask: Dask control string. See `xarray.apply_ufunc` documentation :type dask: str :param fit_kwargs: Dictionary of key-word arguments to be supplied to the Fitting control :type fit_kwargs: dict :param fn_kwargs: Dictionary of key-words to be supplied to the fit function :type fn_kwargs: dict :param vectorize: Should the fit function be given dependents in a single object or split :type vectorize: bool :param kwargs: Key-word arguments for `xarray.apply_ufunc`. See `xarray.apply_ufunc` documentation :type kwargs: Any :return: Results of the fit :rtype: FitResults """ # Deal with any kwargs which has been given if fn_kwargs is None: fn_kwargs = {} if fit_kwargs is None: fit_kwargs = {} old_fit_func = fitter.fit_function # Wrap and broadcast bdims, f = self.fit_prep(fitter.fit_function) dims = self._obj.dims # Find which coords we need if isinstance(dims, dict): dims = list(dims.keys()) # Wrap the wrap in a callable def local_fit_func(x, *args, **kwargs): """ Function which will be called by the fitter. This will deal with sending the function the correct data. """ kwargs['vectorize'] = vectorize res = xr.apply_ufunc(f, *bdims, *args, dask=dask, kwargs=fn_kwargs, **kwargs) if dask != 'forbidden': res.compute() return res.stack(all_x=dims) # Set the new callable to the fitter and initialize fitter.initialize(fitter.fit_object, local_fit_func) # Make easyCore.Fitting.Fitter compatible `x` x_for_fit = xr.concat(bdims, dim='fit_dim') x_for_fit = x_for_fit.stack(all_x=[d.name for d in bdims]) try: # Deal with any sigmas if supplied if fit_kwargs.get('weights', None) is not None: fit_kwargs['weights'] = xr.DataArray( np.array(fit_kwargs['weights']), dims=['all_x'], coords={'all_x': x_for_fit.all_x}) # Try to perform a fit f_res = fitter.fit(x_for_fit, self._obj.stack(all_x=dims), **fit_kwargs) f_res = check_sanity_single(f_res) finally: # Reset the fit function on the fitter to the old fit function. fitter.fit_function = old_fit_func return f_res
def add_variable(self, variable_name, variable_coordinates: Union[str, List[str]], variable_values: Union[List[T_], np.ndarray], variable_sigma: Union[List[T_], np.ndarray] = None, unit: str = '', auto_sigma: bool = False): """ Create a DataArray from known coordinates and data, assign it to the dataset under a given name. Variances can be calculated assuming gaussian distribution to 1 sigma. :param variable_name: Name of the DataArray which will be created and added to the dataset :type variable_name: str :param variable_coordinates: List of coordinates used in the supplied data array. :type variable_coordinates: str, List[str] :param variable_values: Numpy or list of data which will be assigned to the DataArray :type variable_values: Union[numpy.ndarray, list] :param variable_sigma: If the sigmas of the dataset are known, they can be supplied here. :type variable_sigma: Union[numpy.ndarray, list] :param unit: Unit associated with the DataArray :type unit: str :param auto_sigma: Should the sigma DataArray be automatically calculated assuming gaussian probability? :type auto_sigma: bool :return: None :rtype: None """ # Check if a user has supplied a coordinate as a string. Make it a list of strings if isinstance(variable_coordinates, str): variable_coordinates = [variable_coordinates] # The variable_coordinates can be any iterable object. Though we would assume list/tuple if not isinstance(variable_coordinates, Iterable): raise ValueError( 'The variable coordinates must be a list of strings') # Check to see if the user want to assign a coordinate which does not exist yet. known_keys = self._obj.coords.keys() for dimension in variable_coordinates: if dimension not in known_keys: raise ValueError( f'The supplied coordinate `{dimension}` must first be defined.' ) # Create the dataset. self._obj[variable_name] = (variable_coordinates, variable_values) # Deal with sigmas if variable_sigma is not None: # CASE 1, user has supplied sigmas if isinstance(variable_sigma, Callable): # CASE 1-1, The sigmas are created by some kind of generator self.sigma_generator(variable_name, variable_sigma) elif isinstance(variable_sigma, np.ndarray): # CASE 1-2, The sigmas are a numpy arrays self.sigma_attach(variable_name, variable_sigma) elif isinstance(variable_sigma, list): # CASE 1-3, We have been given a list. Make it a numpy array self.sigma_attach(variable_name, np.array(variable_sigma)) else: raise ValueError( 'User supplied sigmas must be of the form; Callable fn, numpy array, list' ) else: # CASE 2, No sigmas have been supplied. if auto_sigma: # CASE 2-1, Automatically generate the sigmas using gaussian probability self.sigma_generator(variable_name) # Set units for the newly created DataArray self._obj.attrs['units'][variable_name] = ureg.Unit(unit) # If a sigma has been attached, attempt to work out the units. if unit and variable_sigma is None and auto_sigma: self._obj.attrs['units'][self.sigma_label_prefix + variable_name] = ureg.Unit(unit + ' ** 0.5') else: if auto_sigma: self._obj.attrs['units'][self.sigma_label_prefix + variable_name] = ureg.Unit('')
def _loadProject(self): """ """ path = generalizePath(self.project_load_filepath) if not os.path.isfile(path): print("Failed to find project: '{0}'".format(path)) return with open(path, 'r') as xml_file: descr: dict = json.load(xml_file) interface_name = descr.get('interface', None) if interface_name is not None: old_interface_name = self._interface.current_interface_name if old_interface_name != interface_name: self._interface.switch(interface_name) self._sample = Sample.from_dict(descr['sample']) self._sample.interface = self._interface # send signal to tell the proxy we changed phases self.phasesEnabled.emit() self.phasesAsObjChanged.emit() self.structureParametersChanged.emit() # experiment if 'experiments' in descr: self.experimentLoaded(True) self.experimentSkipped(False) self._data.experiments[0].x = np.array(descr['experiments'][0]) self._data.experiments[0].y = np.array(descr['experiments'][1]) self._data.experiments[0].e = np.array(descr['experiments'][2]) self._experiment_data = self._data.experiments[0] self.experiments = [{'name': descr['project_info']['experiments']}] self.setCurrentExperimentDatasetName( descr['project_info']['experiments']) # send signal to tell the proxy we changed experiment self.experimentDataAdded.emit() self.parametersChanged.emit() self.experimentLoadedChanged.emit() else: # delete existing experiment self.removeExperiment() self.experimentLoaded(False) if descr['experiment_skipped']: self.experimentSkipped(True) self.experimentSkippedChanged.emit() # project info self._project_info = descr['project_info'] new_minimizer_settings = descr.get('minimizer', None) if new_minimizer_settings is not None: new_engine = new_minimizer_settings['engine'] new_method = new_minimizer_settings['method'] new_engine_index = self.parent.minimizerNames().index(new_engine) self.currentMinimizerIndex.emit(new_engine_index) new_method_index = self.parent.minimizerMethodNames().index( new_method) self.currentMinimizerMethodIndex.emit(new_method_index) self.parent.fitLogic.fitter.fit_object = self._sample self.resetUndoRedoStack.emit() self.setProjectCreated(True)