Esempio n. 1
0
    def __init__(self):
        r"""The axial normal PDF is a Normal distribution wrapped around 0 and :math:`\pi`.

        It's PDF is given by:

        .. math::

            f(\theta; a, b) = \frac{\cosh(a\sin \theta + b\cos \theta)}{\pi I_{0}(\sqrt{a^{2} + b^{2}})}

        where in this implementation :math:`a` and :math:`b` are parameterized with the input variables
        :math:`\mu` and :math:`\sigma` using:

        .. math::

            \begin{align*}
            \kappa &= \frac{1}{\sigma^{2}} \\
            a &= \kappa * \sin \mu \\
            b &= \kappa * \cos \mu
            \end{align*}

        References:
            Barry C. Arnold, Ashis SenGupta (2006). Probability distributions and statistical inference for axial data.
            Environmental and Ecological Statistics, volume 13, issue 3, pages 271-285.
        """
        from mot.model_building.cl_functions.parameters import FreeParameter
        from mot.model_building.cl_functions.library_functions import Bessel, Trigonometrics

        params = [
            FreeParameter(SimpleCLDataType.from_string('mot_float_type'),
                          'mu',
                          True,
                          0,
                          -np.inf,
                          np.inf,
                          sampling_prior=AlwaysOne()),
            FreeParameter(SimpleCLDataType.from_string('mot_float_type'),
                          'sigma',
                          True,
                          1,
                          -np.inf,
                          np.inf,
                          sampling_prior=AlwaysOne())
        ]

        super(AxialNormalPDF,
              self).__init__('''
                float kappa = 1.0 / pown(sigma, 2);
                float a = kappa * sin(mu);
                float b = kappa * cos(mu);

                return exp(log_cosh(a * sin(value) + b * cos(value))
                            - log_bessel_i0(sqrt(pown(a, 2) + pown(b, 2)))
                            - log(M_PI) );
            ''',
                             'axial_normal_pdf',
                             params,
                             cl_preamble=Bessel().get_cl_code() + '\n' +
                             Trigonometrics().get_cl_code())
Esempio n. 2
0
    def get_initial_parameters(self, previous_results=None):
        params = np.ones((1, self.n)) * 3

        if isinstance(previous_results, np.ndarray):
            previous_results = results_to_dict(previous_results, self.get_free_param_names())

        if previous_results:
            for i in range(self.n):
                if i in previous_results:
                    params[0, i] = previous_results[i]
        return SimpleDataAdapter(params, SimpleCLDataType.from_string('double'),
                                 SimpleCLDataType.from_string('double')).get_opencl_data()
Esempio n. 3
0
    def __init__(self,
                 name='Weight',
                 param_name='w',
                 value=0.5,
                 lower_bound=0.0,
                 upper_bound=1.0,
                 parameter_kwargs=None):
        """A class that by itself defines the notion of a Weight.

        Some of the code checks for type Weight, be sure to use this model function if you want to represent a Weight.

        A weight is meant to be a model volume fraction.

        Args:
            name (str): The name of the model
            value (number or ndarray): The initial value for the single free parameter of this function.
            lower_bound (number or ndarray): The initial lower bound for the single free parameter of this function.
            upper_bound (number or ndarray): The initial upper bound for the single free parameter of this function.
        """
        parameter_settings = dict(
            parameter_transform=CosSqrClampTransform(),
            sampling_proposal_std=0.01,
            sampling_prior=UniformWithinBoundsPrior(),
            numdiff_info=SimpleNumDiffInfo(scale_factor=10))
        parameter_settings.update(parameter_kwargs or {})

        super(SimpleWeight, self).__init__(
            'mot_float_type', name, 'Weight',
            (FreeParameter(SimpleCLDataType.from_string('mot_float_type'),
                           param_name, False, value, lower_bound, upper_bound,
                           **parameter_settings), ),
            'return ' + param_name + ';')
Esempio n. 4
0
File: priors.py Progetto: amrka/MDT
    def __init__(self):
        """This is a Gaussian prior meant for use in Automatic Relevance Detection sampling.

        This uses a Gaussian prior with mean at zero and a standard deviation determined by the ``alpha`` parameter
        with the relationship :math:`\sigma = 1/\\sqrt(\\alpha)`.
        """
        from mdt.model_building.parameters import FreeParameter
        extra_params = [
            FreeParameter(SimpleCLDataType.from_string('mot_float_type'),
                          'alpha',
                          False,
                          8,
                          1e-5,
                          1e4,
                          sampling_prior=UniformWithinBoundsPrior(),
                          sampling_proposal_std=20)
        ]

        body = '''
            if(value < 0 || value > 1){
                return 0;
            }
            mot_float_type sigma = 1.0/sqrt(alpha);
            return exp(-pown(value, 2) / (2 * pown(sigma, 2))) / (sigma * sqrt(2 * M_PI));
        '''
        super(ARDGaussian, self).__init__('ard_beta_pdf', body, extra_params)
Esempio n. 5
0
File: priors.py Progetto: amrka/MDT
    def __init__(self):
        r"""This is a collapsed form of the Beta PDF meant for use in Automatic Relevance Detection sampling.

        In this prior the ``alpha`` parameter of the Beta prior is set to 1 which simplifies the equation.
        The parameter ``beta`` is still free and can be changed as desired.

        The implemented prior is:

        .. math::

            B(x; 1, \beta) = \beta * (1 - x)^{\beta - 1}

        """
        from mdt.model_building.parameters import FreeParameter
        extra_params = [
            FreeParameter(SimpleCLDataType.from_string('mot_float_type'),
                          'beta',
                          False,
                          1,
                          1e-4,
                          1000,
                          sampling_prior=ReciprocalPrior(),
                          sampling_proposal_std=0.01)
        ]

        body = '''
            if(value < 0 || value > 1){
                return 0;
            }
            return beta * pow(1 - value, beta - 1);
        '''
        super(ARDBeta, self).__init__('ard_beta_pdf', body, extra_params)
Esempio n. 6
0
 def result(self, ast):
     return SimpleCLDataType(
         self._raw_data_type,
         is_pointer_type=self._is_pointer_type,
         vector_length=self._vector_length,
         address_space_qualifier=self._address_space_qualifier,
         pre_data_type_type_qualifiers=self._pre_data_type_type_qualifiers,
         post_data_type_type_qualifier=self._post_data_type_type_qualifier)
Esempio n. 7
0
    def create_class(self, template):
        """Creates classes with as base class DMRICompositeModel

        Args:
            template (Type[ParameterTemplate]): the configuration for the parameter.
        """
        data_type = template.data_type
        if isinstance(data_type, six.string_types):
            data_type = SimpleCLDataType.from_string(data_type)

        # todo remove in future versions
        if issubclass(template, StaticMapParameterTemplate):
            warnings.warn(
                '"StaticMapParameterTemplate" are deprecated in favor of "ProtocolParameterTemplate" '
                'and will be removed in future versions.')

        if issubclass(template, ProtocolParameterTemplate):

            class AutoProtocolParameter(
                    method_binding_meta(template, ProtocolParameter)):
                def __init__(self, nickname=None):
                    super(AutoProtocolParameter,
                          self).__init__(data_type,
                                         nickname or template.name,
                                         value=template.value)

            return AutoProtocolParameter

        elif issubclass(template, FreeParameterTemplate):
            numdiff_info = template.numdiff_info
            if not isinstance(numdiff_info,
                              NumDiffInfo) and numdiff_info is not None:
                numdiff_info = SimpleNumDiffInfo(**numdiff_info)

            class AutoFreeParameter(
                    method_binding_meta(template, FreeParameter)):
                def __init__(self, nickname=None):
                    super(AutoFreeParameter, self).__init__(
                        data_type,
                        nickname or template.name,
                        template.fixed,
                        template.init_value,
                        template.lower_bound,
                        template.upper_bound,
                        parameter_transform=_resolve_parameter_transform(
                            template.parameter_transform),
                        sampling_proposal_std=template.sampling_proposal_std,
                        sampling_prior=template.sampling_prior,
                        numdiff_info=numdiff_info)
                    self.sampling_proposal_modulus = template.sampling_proposal_modulus

            return AutoFreeParameter
Esempio n. 8
0
    def __init__(self, name='_observation'):
        """This parameter indicates that the model should inject the current observation value in the model.

        Sometimes during model linearization or other mathematical operations the current observation appears on
        both sides of the optimization equation. That is, it sometimes happens you want to use the current observation
        to model that same observation. This parameter is a signal to the model builder to inject the current
        observation.

        You can use this parameter by adding it to your model and then use the current name in your model equation.
        """
        super(CurrentObservationParam,
              self).__init__(SimpleCLDataType.from_string('mot_float_type'),
                             name)
Esempio n. 9
0
    def __init__(self):
        """Johnson noise adds noise to the signal using the formula:

        .. code-block:: c

            sqrt(signal^2 + eta^2)

        """
        super(JohnsonSignalNoise, self).__init__(
            'JohnsonNoise',
            'johnsonNoiseModel',
            (FreeParameter(SimpleCLDataType.from_string('double'), 'eta', False, 0.1, 0, 100,
                           parameter_transform=CosSqrClampTransform()),), ())
Esempio n. 10
0
    def __init__(self):
        r"""Normal PDF on the given value: :math:`P(v) = N(v; \mu, \sigma)`"""
        from mot.model_building.cl_functions.parameters import FreeParameter
        params = [
            FreeParameter(SimpleCLDataType.from_string('mot_float_type'),
                          'mu',
                          True,
                          0,
                          -np.inf,
                          np.inf,
                          sampling_prior=AlwaysOne()),
            FreeParameter(SimpleCLDataType.from_string('mot_float_type'),
                          'sigma',
                          True,
                          1,
                          -np.inf,
                          np.inf,
                          sampling_prior=AlwaysOne())
        ]

        super(NormalPDF, self).__init__(
            'return exp(-pown(value - mu, 2) / (2 * pown(sigma, 2))) / (sigma * sqrt(2 * M_PI));',
            'normal_pdf', params)
Esempio n. 11
0
File: priors.py Progetto: amrka/MDT
    def __init__(self):
        r"""Normal PDF on the given value: :math:`P(v) = N(v; \mu, \sigma)`"""
        from mdt.model_building.parameters import FreeParameter
        extra_params = [
            FreeParameter(SimpleCLDataType.from_string('mot_float_type'),
                          'mu',
                          True,
                          0,
                          -np.inf,
                          np.inf,
                          sampling_prior=AlwaysOne()),
            FreeParameter(SimpleCLDataType.from_string('mot_float_type'),
                          'sigma',
                          True,
                          1,
                          -np.inf,
                          np.inf,
                          sampling_prior=AlwaysOne())
        ]

        super(NormalPDF, self).__init__(
            'normal_pdf',
            'return exp((mot_float_type) (-((value - mu) * (value - mu)) / (2 * sigma * sigma))) '
            '           / (sigma * sqrt(2 * M_PI));', extra_params)
Esempio n. 12
0
    def __init__(self):
        """This uses the log of the Rice PDF for the maximum likelihood estimator and for the log likelihood.

        The PDF is defined as:

        .. code-block:: c

            PDF = (observation/sigma^2)
                    * exp(-(observation^2 + evaluation^2) / (2 * sigma^2))
                    * bessel_i0((observation * evaluation) / sigma^2)

        Where where ``bessel_i0(z)`` is the modified Bessel function of the first kind with order zero. To have the
        joined probability over all instances one would have to take the product over all n instances:

        .. code-block:: c

            product(PDF)

        Instead of taking the product of this PDF over all instances we take the sum of the log of the PDF:

        .. code-block:: c

            sum(log(PDF))

        Where the log of the PDF is given by:

        .. code-block:: c

            log(PDF) = log(observation/sigma^2)
                        - (observation^2 + evaluation^2) / (2 * sigma^2)
                        + log(bessel_i0((observation * evaluation) / sigma^2))

        For the maximum likelihood estimator we use the negative of this sum:

        .. code-block:: c

            -sum(log(PDF)).
        """
        super(RicianEvaluationModel, self).__init__(
            'RicianNoise',
            'ricianNoiseModel',
            (FreeParameter(SimpleCLDataType.from_string('mot_float_type'), 'sigma', True, 1, 0, 'INFINITY',
                           parameter_transform=ClampTransform()),),
            (Bessel(),))
Esempio n. 13
0
    def __init__(self, name='Scalar', param_name='s', value=0.0, lower_bound=0.0, upper_bound=float('inf'),
                 parameter_kwargs=None):
        """A Scalar model function to be used during optimization.

        Args:
            name (str): The name of the model
            value (number or ndarray): The initial value for the single free parameter of this function.
            lower_bound (number or ndarray): The initial lower bound for the single free parameter of this function.
            upper_bound (number or ndarray): The initial upper bound for the single free parameter of this function.
            parameter_kwargs (dict): additional settings for the parameter initialization
        """
        parameter_settings = dict(parameter_transform=ClampTransform(),
                                  sampling_proposal=GaussianProposal(1.0))
        parameter_settings.update(parameter_kwargs or {})

        super(Scalar, self).__init__(
            name,
            'cmScalar',
            (FreeParameter(SimpleCLDataType.from_string('mot_float_type'), param_name,
                           False, value, lower_bound, upper_bound, **parameter_settings),))
Esempio n. 14
0
    def __init__(self):
        """This uses the log of the Gaussian PDF for the maximum likelihood estimator and for the log likelihood.

        The PDF is defined as:

        .. code-block:: c

            PDF = 1/(sigma * sqrt(2*pi)) * exp(-(observation - evaluation)^2 / (2 * sigma^2))

        To have the joined probability over all instances one would normally have to take the product
        over all ``n`` instances:

        .. code-block:: c

            product(PDF)

        Instead of taking the product of this PDF we take the sum of the log of the PDF:

        .. code-block:: c

            sum(log(PDF))

        Where the log of the PDF is given by:

        .. code-block:: c

            log(PDF) = - ((observation - evaluation)^2 / (2 * sigma^2)) - log(sigma * sqrt(2*pi))


        For the maximum likelihood estimator we then need to use the negative of this sum:

        .. code-block:: c

            - sum(log(PDF)).
        """
        super(GaussianEvaluationModel, self).__init__(
            'GaussianNoise',
            'gaussianNoiseModel',
            (FreeParameter(SimpleCLDataType.from_string('mot_float_type'), 'sigma', True, 1, 0, 'INFINITY',
                           parameter_transform=ClampTransform()),), ())
Esempio n. 15
0
def _resolve_parameters(parameter_list):
    """Convert all the parameters in the given parameter list to actual parameter objects.

    This will load all the parameters as :class:`~mdt.model_building.parameters.LibraryParameter`.

    Args:
        parameter_list (list): a list containing a mix of either parameter objects or strings. If it is a parameter
            we add a copy of it to the return list. If it is a string we will autoload it.

    Returns:
        list: the list of actual parameter objects
    """
    parameters = []
    for item in parameter_list:
        if isinstance(item, six.string_types):
            if has_component('parameters', item):
                param = get_component('parameters', item)()
                parameters.append(LibraryParameter(param.data_type, item))
            else:
                parameters.append(LibraryParameter(SimpleCLDataType.from_string('mot_float_type'), item))
        else:
            parameters.append(deepcopy(item))
    return parameters