Пример #1
0
    def _starting_variance_components(self, kind='equal'):
        """
        Starting variance components in optimization.
        Valid values:

        'ols': Starting values are all 0 except residual, which is 
            var(y - X*Beta)
        'EM': the starting values are the variance components after
            100 iterations of expectation-maximization REML (started from all
            equal values).
        'equal': Chooses all variance components (including residual)
            to be equal.


        :param kind: the method to find starting values
        :type kind: string

        :returns: variance components
        :rtype: numpy array of floats
        """
        # 'minque0': Starting values are those from MINQUE with all weights
        #     set equal to 0 except for the residual variance, which is set
        #     to 1. This is the default method used by SAS's PROC MIXED.
        # 'minque1': Starting values are those from MINQUE with all weights
        #     set equal to 1
        # if kind.lower() == 'minque0':
        #     return minque(self, value=0, return_after=1, return_vcs=True)

        # if kind.lower() == 'minque1':
        #     return minque(self, value=1, return_after=1, return_vcs=True)

        # if kind.lower() == 'minquemean':
        #     zero = minque(self, value=0, return_after=1, return_vcs=True)
        #     one = minque(self, value=1, return_after=1, return_vcs=True)
        #     return (zero + one) / 2.0

        if kind.lower() == 'ols':
            vcs_start = np.zeros(len(self.random_effects))
            vcs_start[-1] = self._variance_after_fixefs()
            return vcs_start

        if kind.lower() == 'equal':
            v = self._variance_after_fixefs()
            n = len(self.random_effects)
            vcs_start = [v/float(n)] * n
            return vcs_start

        if kind.lower() == 'em':
            starts = self._starting_variance_components('equal')
            vcs_start = expectation_maximization(self,
                                                 REML(self),
                                                 starts=starts,
                                                 return_after=100)
            return vcs_start.parameters
        else:
            raise ValueError('Unknown method: {}'.format(kind))
Пример #2
0
    def _starting_variance_components(self, kind='equal'):
        """
        Starting variance components in optimization.
        Valid values:

        'ols': Starting values are all 0 except residual, which is 
            var(y - X*Beta)
        'EM': the starting values are the variance components after
            100 iterations of expectation-maximization REML (started from all
            equal values).
        'equal': Chooses all variance components (including residual)
            to be equal.


        :param kind: the method to find starting values
        :type kind: string

        :returns: variance components
        :rtype: numpy array of floats
        """
        # 'minque0': Starting values are those from MINQUE with all weights
        #     set equal to 0 except for the residual variance, which is set
        #     to 1. This is the default method used by SAS's PROC MIXED.
        # 'minque1': Starting values are those from MINQUE with all weights
        #     set equal to 1
        # if kind.lower() == 'minque0':
        #     return minque(self, value=0, return_after=1, return_vcs=True)

        # if kind.lower() == 'minque1':
        #     return minque(self, value=1, return_after=1, return_vcs=True)

        # if kind.lower() == 'minquemean':
        #     zero = minque(self, value=0, return_after=1, return_vcs=True)
        #     one = minque(self, value=1, return_after=1, return_vcs=True)
        #     return (zero + one) / 2.0

        if kind.lower() == 'ols':
            vcs_start = np.zeros(len(self.random_effects))
            vcs_start[-1] = self._variance_after_fixefs()
            return vcs_start

        if kind.lower() == 'equal':
            v = self._variance_after_fixefs()
            n = len(self.random_effects)
            vcs_start = [v / float(n)] * n
            return vcs_start

        if kind.lower() == 'em':
            starts = self._starting_variance_components('equal')
            vcs_start = expectation_maximization(self,
                                                 REML(self),
                                                 starts=starts,
                                                 return_after=100)
            return vcs_start.parameters
        else:
            raise ValueError('Unknown method: {}'.format(kind))
Пример #3
0
    def maximize(self,
                 method="Average Information",
                 restricted=False,
                 starts=None,
                 verbose=False):
        """
        Finds the optimal values for variance components in the model using
        provided optimization methods.

        :param restricted: Uses REML estimation
        :param starts: starting values for the variance components
        :param method: maximization method
        :param verbose: output maximization progress
        :type restricted: bool
        :type method: string
        :type starts: iterable of numerics
        :type verbose: bool:  
        """

        if (isinstance(self.mle, MLEResult)
                and self.maximized.method == method):
            return
        self.fit_model()

        if starts is None:
            starts = self._starting_variance_components()

        likefunc = REML if restricted else ML
        llik = likefunc(self, info=method)
        llik.set_parameters(starts)

        # if method.lower().startswith('minque'):
        #     mle = minque(self, value=0, verbose=verbose, starts=starts)

        if method.lower() in {'em', 'emreml', 'expectation-maximization'}:
            mle = expectation_maximization(self, llik, verbose=verbose)

        elif method.lower() == 'grid':
            mle = grid_search(self, llik, nevals=20, oob=False)

        else:
            mle = newtonlike_maximization(self, llik, verbose=verbose)

        self.mle = mle
        self.set_variance_components(mle.parameters)
        self.fit_model()

        # Get the full loglikelihood at the REML maximimum so we
        # can use it later
        self.mle.full_loglikelihood = full_loglikelihood(
            self.y, self.V, self.X, self.beta)
Пример #4
0
    def maximize(self, method="Average Information", restricted=False,
                 starts=None, verbose=False):
        """
        Finds the optimal values for variance components in the model using
        provided optimization methods.

        :param restricted: Uses REML estimation
        :param starts: starting values for the variance components
        :param method: maximization method
        :param verbose: output maximization progress
        :type restricted: bool
        :type method: string
        :type starts: iterable of numerics
        :type verbose: bool:  
        """

        if (isinstance(self.mle, MLEResult) and
                self.maximized.method == method):
            return
        self.fit_model()

        if starts is None:
            starts = self._starting_variance_components()

        likefunc = REML if restricted else ML
        llik = likefunc(self, info=method)
        llik.set_parameters(starts)

        # if method.lower().startswith('minque'):
        #     mle = minque(self, value=0, verbose=verbose, starts=starts)

        if method.lower() in {'em', 'emreml', 'expectation-maximization'}:
            mle = expectation_maximization(self, llik, verbose=verbose)

        elif method.lower() == 'grid':
            mle = grid_search(self, llik, nevals=20, oob=False)

        else:
            mle = newtonlike_maximization(self, llik, verbose=verbose)

        self.mle = mle
        self.set_variance_components(mle.parameters)
        self.fit_model()

        # Get the full loglikelihood at the REML maximimum so we
        # can use it later
        self.mle.full_loglikelihood = full_loglikelihood(self.y, self.V,
                                                         self.X, self.beta)
Пример #5
0
    def maximize(self, method="Average Information", restricted=False,
                 starts=None, verbose=False):
        """
        Finds the optimal values for variance components of the model by
        restricted maximum likelihood estimation.
        """

        if (isinstance(self.mle, MLEResult) and
                self.maximized.method == method):
            return
        self.fit_model()

        if starts is None:
            starts = self._starting_variance_components()

        likefunc = REML if restricted else ML
        llik = likefunc(self, info=method)
        llik.set_parameters(starts)

        if method.lower().startswith('minque'):
            mle = minque(self, value=0, verbose=verbose, starts=starts)

        elif method.lower() in {'em', 'emreml', 'expectation-maximization'}:
            mle = expectation_maximization(self, llik, verbose=verbose)

        elif method.lower() == 'grid':
            mle = grid_search(self, llik, nevals=20, oob=False)

        else:
            mle = newtonlike_maximization(self, llik, verbose=verbose)

        self.mle = mle
        self.set_variance_components(mle.parameters)
        self.fit_model()

        # Get the full loglikelihood at the REML maximimum so we
        # can use it later
        self.mle.full_loglikelihood = full_loglikelihood(self.y, self.V,
                                                         self.X, self.beta)