コード例 #1
0
    def estimation(self, y1, y2):
        """ Estimate L divergence.
        
        Parameters
        ----------
        y1 : (number of samples1, dimension)-ndarray
             One row of y1 corresponds to one sample.
        y2 : (number of samples2, dimension)-ndarray
             One row of y2 corresponds to one sample.
    
        Returns
        -------
        d : float
            Estimated L divergence.
            
        References
        ----------
        Jianhua Lin. Divergence measures based on the Shannon entropy.
        IEEE Transactions on Information Theory, 37:145-151, 1991.
        
        Examples
        --------
        d = co.estimation(y1,y2)  
            
        """

        # verification:
        self.verification_equal_d_subspaces(y1, y2)

        num_of_samples1, num_of_samples2 = y1.shape[0], y2.shape[0]

        # mixture of y1 and y2 with 1/2, 1/2 weights:
        w = array([1 / 2, 1 / 2])
        # samples to the mixture (second part of y1 and y2; =:y1m, y2m):
        # (max) number of samples to the mixture from y1 and from y2:
        num_of_samples1m = int(floor(num_of_samples1 / 2))
        num_of_samples2m = int(floor(num_of_samples2 / 2))
        y1m = y1[num_of_samples1m:]  # broadcasting
        y2m = y2[num_of_samples2m:]  # broadcasting
        mixture_y = mixture_distribution((y1m, y2m), w)

        # with broadcasting:
        d = self.kl_co.estimation(y1[:num_of_samples1m], mixture_y) +\
            self.kl_co.estimation(y2[:num_of_samples1m], mixture_y)

        return d
コード例 #2
0
    def estimation(self, y1, y2):
        """ Estimate the value of the Jensen-Tsallis kernel.
        
        Parameters
        ----------
        y1 : (number of samples1, dimension)-ndarray
             One row of y1 corresponds to one sample.
        y2 : (number of samples2, dimension)-ndarray
             One row of y2 corresponds to one sample.
    
        Returns
        -------
        k : float
            Estimated kernel value.
            
        References
        ----------            
        Andre F. T. Martins, Noah A. Smith, Eric P. Xing, Pedro M. Q.
        Aguiar, and Mario A. T. Figueiredo. Nonextensive information
        theoretical kernels on measures. Journal of Machine Learning
        Research, 10:935-975, 2009.
           
        Examples
        --------
        k = co.estimation(y1,y2)  
            
        """

        # verification:
        self.verification_equal_d_subspaces(y1, y2)

        # Jensen-Tsallis alpha-difference (jt):
        a = self.alpha

        w = array([1 / 2, 1 / 2])
        mixture_y = mixture_distribution((y1, y2), w)  # mixture
        jt = \
            self.tsallis_co.estimation(mixture_y) -\
            (w[0]**a * self.tsallis_co.estimation(y1) +
             w[1]**a * self.tsallis_co.estimation(y2))

        k = self.log_alpha_2 - jt

        return k
コード例 #3
0
    def estimation(self, y1, y2):
        """ Estimate the value of the exponentiated Jensen-Renyi kernel-1.
        
        Parameters
        ----------
        y1 : (number of samples1, dimension)-ndarray
             One row of y1 corresponds to one sample.
        y2 : (number of samples2, dimension)-ndarray
             One row of y2 corresponds to one sample.
    
        Returns
        -------
        k : float
            Estimated kernel value.
            
        References
        ----------            
        Andre F. T. Martins, Noah A. Smith, Eric P. Xing, Pedro M. Q.
        Aguiar, and Mario A. T. Figueiredo. Nonextensive information
        theoretical kernels on measures. Journal of Machine Learning
        Research, 10:935-975, 2009.
           
        Examples
        --------
        k = co.estimation(y1,y2)  
            
        """

        # verification:
        self.verification_equal_d_subspaces(y1, y2)

        # mixture:
        w = array([1 / 2, 1 / 2])
        mixture_y = mixture_distribution((y1, y2), w)

        k = exp(-self.u * self.renyi_co.estimation(mixture_y))

        return k
コード例 #4
0
    def estimation(self, y1, y2):
        """ Estimate Jensen-Tsallis divergence.
        
        Parameters
        ----------
        y1 : (number of samples1, dimension)-ndarray
             One row of y1 corresponds to one sample.
        y2 : (number of samples2, dimension)-ndarray
             One row of y2 corresponds to one sample.
    
        Returns
        -------
        d : float
            Estimated Jensen-Tsallis divergence.
            
        References
        ----------
        J. Burbea and C.R. Rao. On the convexity of some divergence
        measures based on entropy functions. IEEE Transactions on
        Information Theory, 28:489-495, 1982.
        
        Examples
        --------
        d = co.estimation(y1,y2)  
            
        """

        # verification:
        self.verification_equal_d_subspaces(y1, y2)

        w = array([1 / 2, 1 / 2])
        mixture_y = mixture_distribution((y1, y2), w)
        d = self.tsallis_co.estimation(mixture_y) -\
            (w[0] * self.tsallis_co.estimation(y1) +
             w[1] * self.tsallis_co.estimation(y2))

        return d
コード例 #5
0
    def estimation(self, y1, y2):
        """ Estimate Jensen-Renyi divergence.
        
        Parameters
        ----------
        y1 : (number of samples1, dimension)-ndarray
             One row of y1 corresponds to one sample.
        y2 : (number of samples2, dimension)-ndarray
             One row of y2 corresponds to one sample.
    
        Returns
        -------
        d : float
            Estimated Jensen-Renyi divergence.
            
        References
        ----------
        A.B. Hamza and H. Krim. Jensen-Renyi divergence measure:
        theoretical and computational perspectives. In IEEE International
        Symposium on Information Theory (ISIT), page 257, 2003.
        
        Examples
        --------
        d = co.estimation(y1,y2)  
            
        """

        # verification:
        self.verification_equal_d_subspaces(y1, y2)

        w = self.w
        mixture_y = mixture_distribution((y1, y2), w)
        d = self.renyi_co.estimation(mixture_y) -\
            (w[0] * self.renyi_co.estimation(y1) +
             w[1] * self.renyi_co.estimation(y2))

        return d
コード例 #6
0
    def estimation(self, y1, y2):
        """ Estimate Jensen-Shannon divergence.
        
        Parameters
        ----------
        y1 : (number of samples1, dimension)-ndarray
             One row of y1 corresponds to one sample.
        y2 : (number of samples2, dimension)-ndarray
             One row of y2 corresponds to one sample.
    
        Returns
        -------
        d : float
            Estimated Jensen-Shannon divergence.
            
        References
        ----------
        Jianhua Lin. Divergence measures based on the Shannon entropy.
        IEEE Transactions on Information Theory, 37:145-151, 1991.
        
        Examples
        --------
        d = co.estimation(y1,y2)  
            
        """

        # verification:
        self.verification_equal_d_subspaces(y1, y2)

        w = self.w
        mixture_y = mixture_distribution((y1, y2), w)
        d = self.shannon_co.estimation(mixture_y) -\
            (w[0] * self.shannon_co.estimation(y1) +
             w[1] * self.shannon_co.estimation(y2))

        return d