Пример #1
0
    def __init__(self, spectrum, auto_background=True, auto_add_edges=True, ll=None, GOS=None, dictionary=None):
        Model.__init__(self, spectrum)
        self._suspend_auto_fine_structure_width = False
        self.convolved = False
        self.low_loss = ll
        self.GOS = GOS
        self.edges = list()
        if dictionary is not None:
            auto_background = False
            auto_add_edges = False
            self._load_dictionary(dictionary)

        if auto_background is True:
            interactive_ns = get_interactive_ns()
            background = PowerLaw()
            background.name = "background"
            warnings.warn(
                'Adding "background" to the user namespace. ' "This feature will be removed in HyperSpy 0.9.",
                VisibleDeprecationWarning,
            )
            interactive_ns["background"] = background
            self.append(background)

        if self.spectrum.subshells and auto_add_edges is True:
            self._add_edges_from_subshells_names()
Пример #2
0
def _estimate_gain(ns, cs,
                   weighted=False,
                   higher_than=None,
                   plot_results=False,
                   binning=0,
                   pol_order=1):
    if binning > 0:
        factor = 2 ** binning
        remainder = np.mod(ns.shape[1], factor)
        if remainder != 0:
            ns = ns[:, remainder:]
            cs = cs[:, remainder:]
        new_shape = (ns.shape[0], ns.shape[1] / factor)
        ns = rebin(ns, new_shape)
        cs = rebin(cs, new_shape)

    noise = ns - cs
    variance = np.var(noise, 0)
    average = np.mean(cs, 0).squeeze()

    # Select only the values higher_than for the calculation
    if higher_than is not None:
        sorting_index_array = np.argsort(average)
        average_sorted = average[sorting_index_array]
        average_higher_than = average_sorted > higher_than
        variance_sorted = variance.squeeze()[sorting_index_array]
        variance2fit = variance_sorted[average_higher_than]
        average2fit = average_sorted[average_higher_than]
    else:
        variance2fit = variance
        average2fit = average

    fit = np.polyfit(average2fit, variance2fit, pol_order)
    if weighted is True:
        from hyperspy._signals.spectrum import Spectrum
        from hyperspy.model import Model
        from hyperspy.components import Line
        s = Spectrum(variance2fit)
        s.axes_manager.signal_axes[0].axis = average2fit
        m = Model(s)
        l = Line()
        l.a.value = fit[1]
        l.b.value = fit[0]
        m.append(l)
        m.fit(weights=True)
        fit[0] = l.b.value
        fit[1] = l.a.value

    if plot_results is True:
        plt.figure()
        plt.scatter(average.squeeze(), variance.squeeze())
        plt.xlabel('Counts')
        plt.ylabel('Variance')
        plt.plot(average2fit, np.polyval(fit, average2fit), color='red')
    results = {'fit': fit, 'variance': variance.squeeze(),
               'counts': average.squeeze()}

    return results
Пример #3
0
 def test_3d_signal(self):
     # This code should run smoothly, any exceptions should trigger failure
     s = self.m_3d.as_signal()
     model = Model(s)
     p = hs.components.Polynomial(order=2)
     model.append(p)
     p.estimate_parameters(s, 0, 100, only_current=False)
     np.testing.assert_allclose(p.coefficients.map['values'],
                                np.tile([0.5, 2, 3], (2, 5, 1)))
Пример #4
0
 def __init__(self, spectrum, auto_background=True,
              auto_add_edges=True, ll=None, 
              GOS=None, *args, **kwargs):
     Model.__init__(self, spectrum, *args, **kwargs)
     self.convolved = False
     self.low_loss = ll
     self.GOS = GOS
     if auto_background is True:
         background = PowerLaw()
         background.name = 'background'
         interactive_ns['background'] = background
         self.append(background)
         
     if self.spectrum.subshells and auto_add_edges is True:
         self._add_edges_from_subshells_names()
Пример #5
0
 def __init__(self, spectrum, auto_background=True,
              auto_add_edges=True, ll=None, 
              GOS=None, *args, **kwargs):
     Model.__init__(self, spectrum, *args, **kwargs)
     self.convolved = False
     self.low_loss = ll
     self.GOS = GOS
     if auto_background is True:
         background = PowerLaw()
         background.name = 'background'
         interactive_ns['background'] = background
         self.append(background)
         
     if self.spectrum.subshells and auto_add_edges is True:
         self._add_edges_from_subshells_names()
Пример #6
0
    def __init__(self, spectrum, auto_background=True, auto_add_edges=True, ll=None, GOS=None, *args, **kwargs):
        Model.__init__(self, spectrum, *args, **kwargs)
        self._suspend_auto_fine_structure_width = False
        self.convolved = False
        self.low_loss = ll
        self.GOS = GOS
        self.edges = list()
        if auto_background is True:
            interactive_ns = get_interactive_ns()
            background = PowerLaw()
            background.name = "background"
            interactive_ns["background"] = background
            self.append(background)

        if self.spectrum.subshells and auto_add_edges is True:
            self._add_edges_from_subshells_names()
Пример #7
0
def create_model(signal, *args, **kwargs):
    from hyperspy.signals.eels import EELSSpectrum
    from hyperspy.models.eelsmodel import EELSModel
    from hyperspy.model import Model
    if isinstance(signal, EELSSpectrum):
        return EELSModel(signal, *args, **kwargs)
    else:
        return Model(signal, *args, **kwargs)
Пример #8
0
 def __init__(self, spectrum, auto_background = True, auto_add_edges = True, 
              ll = None, *args, **kwargs):
     Model.__init__(self, spectrum, *args, **kwargs)
     self.ll = ll
     
     if auto_background is True:
         bg = PowerLaw()
         interactive_ns['bg'] = bg
         self.append(bg)
     if self.ll is not None:
         self.convolved = True
         if self.experiments.convolution_axis is None:
             self.experiments.set_convolution_axis()
     else:
         self.convolved = False
     if self.spectrum.subshells and auto_add_edges is True:
         self._add_edges_from_subshells_names()
Пример #9
0
    def create_model(self):
        """Create a model for the current data.

        Returns
        -------
        model : `Model` instance.

        """

        from hyperspy.model import Model
        model = Model(self)
        return model
Пример #10
0
    def __init__(self,
                 spectrum,
                 auto_background=True,
                 auto_add_edges=True,
                 ll=None,
                 *args,
                 **kwargs):
        Model.__init__(self, spectrum, *args, **kwargs)
        self.ll = ll

        if auto_background is True:
            bg = PowerLaw()
            interactive_ns['bg'] = bg
            self.append(bg)
        if self.ll is not None:
            self.convolved = True
            if self.experiments.convolution_axis is None:
                self.experiments.set_convolution_axis()
        else:
            self.convolved = False
        if self.spectrum.subshells and auto_add_edges is True:
            self._add_edges_from_subshells_names()
Пример #11
0
def create_model(signal, *args, **kwargs):
    """Create a model object
    
    Any extra argument is passes to the Model constructor.
    
    Parameters
    ----------    
    signal: A signal class
    
    If the signal is an EELS signal the following extra parameters
    are available:
    
    auto_background : boolean
        If True, and if spectrum is an EELS instance adds automatically 
        a powerlaw to the model and estimate the parameters by the 
        two-area method.
    auto_add_edges : boolean
        If True, and if spectrum is an EELS instance, it will 
        automatically add the ionization edges as defined in the 
        Spectrum instance. Adding a new element to the spectrum using
        the components.EELSSpectrum.add_elements method automatically
        add the corresponding ionisation edges to the model.
    ll : {None, EELSSpectrum}
        If an EELSSPectrum is provided, it will be assumed that it is
        a low-loss EELS spectrum, and it will be used to simulate the 
        effect of multiple scattering by convolving it with the EELS
        spectrum.
    GOS : {'hydrogenic', 'Hartree-Slater', None}
        The GOS to use when auto adding core-loss EELS edges.
        If None it will use the Hartree-Slater GOS if 
        they are available, otherwise it will use the hydrogenic GOS.
    
    Returns
    -------
    
    A Model class
    
    """

    from hyperspy._signals.eels import EELSSpectrum
    from hyperspy.models.eelsmodel import EELSModel
    from hyperspy.model import Model
    if isinstance(signal, EELSSpectrum):
        return EELSModel(signal, *args, **kwargs)
    else:
        return Model(signal, *args, **kwargs)
Пример #12
0
    def fit(
        self,
        fitter=None,
        method="ls",
        grad=False,
        bounded=False,
        ext_bounding=False,
        update_plot=False,
        kind="std",
        **kwargs
    ):
        """Fits the model to the experimental data

        Parameters
        ----------
        fitter : {None, "leastsq", "odr", "mpfit", "fmin"}
            The optimizer to perform the fitting. If None the fitter
            defined in the Preferences is used. leastsq is the most
            stable but it does not support bounding. mpfit supports
            bounding. fmin is the only one that supports
            maximum likelihood estimation, but it is less robust than
            the Levenberg–Marquardt based leastsq and mpfit, and it is
            better to use it after one of them to refine the estimation.
        method : {'ls', 'ml'}
            Choose 'ls' (default) for least squares and 'ml' for
            maximum-likelihood estimation. The latter only works with
            fitter = 'fmin'.
        grad : bool
            If True, the analytical gradient is used if defined to
            speed up the estimation.
        ext_bounding : bool
            If True, enforce bounding by keeping the value of the
            parameters constant out of the defined bounding area.
        bounded : bool
            If True performs bounded optimization if the fitter
            supports it. Currently only mpfit support bounding.
        update_plot : bool
            If True, the plot is updated during the optimization
            process. It slows down the optimization but it permits
            to visualize the optimization evolution.
        kind : {'std', 'smart'}
            If 'std' (default) performs standard fit. If 'smart'
            performs smart_fit

        **kwargs : key word arguments
            Any extra key word argument will be passed to the chosen
            fitter

        See Also
        --------
        multifit, smart_fit

        """
        if kind == "smart":
            self.smart_fit(
                fitter=fitter,
                method=method,
                grad=grad,
                bounded=bounded,
                ext_bounding=ext_bounding,
                update_plot=update_plot,
                **kwargs
            )
        elif kind == "std":
            Model.fit(
                self,
                fitter=fitter,
                method=method,
                grad=grad,
                bounded=bounded,
                ext_bounding=ext_bounding,
                update_plot=update_plot,
                **kwargs
            )
        else:
            raise ValueError("kind must be either 'std' or 'smart'." "'%s' provided." % kind)
Пример #13
0
 def fit(self, *args, **kwargs):
     if 'kind' in kwargs and kwargs['kind'] == 'smart':
         self.smart_fit(*args, **kwargs)
     else:
         Model.fit(self, *args, **kwargs)
Пример #14
0
    def fit(self,
            fitter=None,
            method='ls',
            grad=False,
            bounded=False,
            ext_bounding=False,
            update_plot=False,
            kind='std',
            **kwargs):
        """Fits the model to the experimental data

        Parameters
        ----------
        fitter : {None, "leastsq", "odr", "mpfit", "fmin"}
            The optimizer to perform the fitting. If None the fitter
            defined in the Preferences is used. leastsq is the most
            stable but it does not support bounding. mpfit supports
            bounding. fmin is the only one that supports
            maximum likelihood estimation, but it is less robust than
            the Levenberg–Marquardt based leastsq and mpfit, and it is
            better to use it after one of them to refine the estimation.
        method : {'ls', 'ml'}
            Choose 'ls' (default) for least squares and 'ml' for
            maximum-likelihood estimation. The latter only works with
            fitter = 'fmin'.
        grad : bool
            If True, the analytical gradient is used if defined to
            speed up the estimation.
        ext_bounding : bool
            If True, enforce bounding by keeping the value of the
            parameters constant out of the defined bounding area.
        bounded : bool
            If True performs bounded optimization if the fitter
            supports it. Currently only mpfit support bounding.
        update_plot : bool
            If True, the plot is updated during the optimization
            process. It slows down the optimization but it permits
            to visualize the optimization evolution.
        kind : {'std', 'smart'}
            If 'std' (default) performs standard fit. If 'smart'
            performs smart_fit

        **kwargs : key word arguments
            Any extra key word argument will be passed to the chosen
            fitter

        See Also
        --------
        multifit, smart_fit

        """
        if kind == 'smart':
            self.smart_fit(fitter=fitter,
                           method=method,
                           grad=grad,
                           bounded=bounded,
                           ext_bounding=ext_bounding,
                           update_plot=update_plot,
                           **kwargs)
        elif kind == 'std':
            Model.fit(self,
                      fitter=fitter,
                      method=method,
                      grad=grad,
                      bounded=bounded,
                      ext_bounding=ext_bounding,
                      update_plot=update_plot,
                      **kwargs)
        else:
            raise ValueError('kind must be either \'std\' or \'smart\'.'
                             '\'%s\' provided.' % kind)
Пример #15
0
def _estimate_gain(ns,
                   cs,
                   weighted=False,
                   higher_than=None,
                   plot_results=False,
                   binning=0,
                   pol_order=1):
    if binning > 0:
        factor = 2**binning
        remainder = np.mod(ns.shape[1], factor)
        if remainder != 0:
            ns = ns[:, remainder:]
            cs = cs[:, remainder:]
        new_shape = (ns.shape[0], ns.shape[1] / factor)
        ns = rebin(ns, new_shape)
        cs = rebin(cs, new_shape)

    noise = ns - cs
    variance = np.var(noise, 0)
    average = np.mean(cs, 0).squeeze()

    # Select only the values higher_than for the calculation
    if higher_than is not None:
        sorting_index_array = np.argsort(average)
        average_sorted = average[sorting_index_array]
        average_higher_than = average_sorted > higher_than
        variance_sorted = variance.squeeze()[sorting_index_array]
        variance2fit = variance_sorted[average_higher_than]
        average2fit = average_sorted[average_higher_than]
    else:
        variance2fit = variance
        average2fit = average

    fit = np.polyfit(average2fit, variance2fit, pol_order)
    if weighted is True:
        from hyperspy._signals.spectrum import Spectrum
        from hyperspy.model import Model
        from hyperspy.components import Line
        s = Spectrum(variance2fit)
        s.axes_manager.signal_axes[0].axis = average2fit
        m = Model(s)
        l = Line()
        l.a.value = fit[1]
        l.b.value = fit[0]
        m.append(l)
        m.fit(weights=True)
        fit[0] = l.b.value
        fit[1] = l.a.value

    if plot_results is True:
        plt.figure()
        plt.scatter(average.squeeze(), variance.squeeze())
        plt.xlabel('Counts')
        plt.ylabel('Variance')
        plt.plot(average2fit, np.polyval(fit, average2fit), color='red')
    results = {
        'fit': fit,
        'variance': variance.squeeze(),
        'counts': average.squeeze()
    }

    return results
Пример #16
0
 def fit(self, *args, **kwargs):
     if 'kind' in kwargs and kwargs['kind'] == 'smart':
         self.smart_fit(*args, **kwargs)
     else:
         Model.fit(self, *args, **kwargs)
Пример #17
0
 def fit(self, fitter=None, method='ls', grad=False, weights=None,
         bounded=False, ext_bounding=False, update_plot=False, 
         kind='std', **kwargs):
     """Fits the model to the experimental data
     
     Parameters
     ----------
     fitter : {None, "leastsq", "odr", "mpfit", "fmin"}
         The optimizer to perform the fitting. If None the fitter
         defined in the Preferences is used. leastsq is the most 
         stable but it does not support bounding. mpfit supports
         bounding. fmin is the only one that supports 
         maximum likelihood estimation, but it is less robust than 
         the Levenberg–Marquardt based leastsq and mpfit, and it is 
         better to use it after one of them to refine the estimation.
     method : {'ls', 'ml'}
         Choose 'ls' (default) for least squares and 'ml' for 
         maximum-likelihood estimation. The latter only works with 
         fitter = 'fmin'.
     grad : bool
         If True, the analytical gradient is used if defined to 
         speed up the estimation. 
     weights : {None, True, numpy.array}
         If None, performs standard least squares. If True 
         performs weighted least squares where the weights are 
         calculated using spectrum.Spectrum.estimate_poissonian_noise_variance. 
         Alternatively, external weights can be supplied by passing
         a weights array of the same dimensions as the signal.
     ext_bounding : bool
         If True, enforce bounding by keeping the value of the 
         parameters constant out of the defined bounding area.
     bounded : bool
         If True performs bounded optimization if the fitter 
         supports it. Currently only mpfit support bounding. 
     update_plot : bool
         If True, the plot is updated during the optimization 
         process. It slows down the optimization but it permits
         to visualize the optimization evolution. 
     kind : {'std', 'smart'}
         If 'std' (default) performs standard fit. If 'smart' 
         performs smart_fit
     
     **kwargs : key word arguments
         Any extra key word argument will be passed to the chosen
         fitter
         
     See Also
     --------
     multifit, smart_fit
         
     """
     if kind == 'smart':
         self.smart_fit(fitter=fitter,
                        method=method,
                        grad=grad,
                        weights=weights,
                        bounded=bounded,
                        ext_bounding=ext_bounding,
                        update_plot=update_plot,
                        **kwargs)
     elif kind == 'std':
         Model.fit(self,
                   fitter=fitter,
                   method=method,
                   grad=grad,
                   weights=weights,
                   bounded=bounded,
                   ext_bounding=ext_bounding,
                   update_plot=update_plot,
                   **kwargs)
     else:
         raise ValueError('kind must be either \'std\' or \'smart\'.'
         '\'%s\' provided.' % kind)
Пример #18
0
 def fit(self, *args, **kwargs):
     if "kind" in kwargs and kwargs["kind"] == "smart":
         self.smart_fit(*args, **kwargs)
     else:
         Model.fit(self, *args, **kwargs)