Esempio n. 1
0
    def _get_statistics(self,data,D):
        n = getdatasize(data)
        if n > 0:
            if isinstance(data,np.ndarray):
                Syy = data[:,-D:].T.dot(data[:,-D:])
                Sytyt = data[:,:-D].T.dot(data[:,:-D])
                Syyt = data[:,-D:].T.dot(data[:,:-D])

                if self.affine:
                    Syyt = np.hstack((data[:,-D:].sum(0)[:,na],Syyt))
                    Sytytsum = data[:,:-D].sum(0)
                    Sytyt = np.vstack((
                                np.hstack(((n,),Sytytsum)),
                                np.hstack((Sytytsum[:,na],Sytyt))
                            ))
            else:
                Syy = sum(d[:,-D:].T.dot(d[:,-D:]) for d in data)
                Sytyt = sum(d[:,:-D].T.dot(d[:,:-D]) for d in data)
                Syyt = sum(d[:,-D:].T.dot(d[:,:-D]) for d in data)

                if self.affine:
                    Syyt = np.hstack((sum(d[:,-D:].sum(0) for d in data)[:,na],Syyt))
                    Sytytsum = sum(d[:,:-D].sum(0) for d in data)
                    Sytyt = np.vstack((
                                np.hstack(((n,),Sytytsum)),
                                np.hstack((Sytytsum[:,na],Sytyt))
                            ))
        else:
            Syy = Sytyt = Syyt = None

        return Syy,Sytyt,Syyt,n
Esempio n. 2
0
    def _get_statistics(self,data,D):
        # NOTE: similar to pybasicbayes/distributions.py:GaussianFixedMean
        n = getdatasize(data)
        if n > 0:
            if isinstance(data,np.ndarray):
                centered = self._center_data(data)
                sumsq = centered.T.dot(centered)
            else:
                sumsq = sum(c.T.dot(c) for c in map(self._center_data,data))
        else:
            sumsq = None

        return n, sumsq
Esempio n. 3
0
    def resample(self,data,niter=25,temp=None):
        # doesn't keep a reference to the data like a model would
        assert isinstance(data,list) or isinstance(data,np.ndarray)

        if getdatasize(data) > 0:
            if not isinstance(data,np.ndarray):
                data = np.concatenate(data)

            self.add_data(data)

            for itr in range(niter):
                self.resample_model(temp=temp)

            self.labels_list.pop()
        else:
            self.resample_model(temp=temp)
Esempio n. 4
0
    def resample(self, data, niter=25, temp=None):
        # doesn't keep a reference to the data like a model would
        assert isinstance(data, list) or isinstance(data, np.ndarray)

        if getdatasize(data) > 0:
            if not isinstance(data, np.ndarray):
                data = np.concatenate(data)

            self.add_data(data)

            for itr in range(niter):
                self.resample_model(temp=temp)

            self.labels_list.pop()
        else:
            self.resample_model(temp=temp)
Esempio n. 5
0
    def max_likelihood(self,data,weights=None):
        if weights is not None:
            raise NotImplementedError
        assert isinstance(data,list) or isinstance(data,np.ndarray)
        if isinstance(data,np.ndarray):
            data = [np.asarray(data,dtype=np.float64)]
        else:
            data = map(lambda x: np.asarray(x,dtype=np.float64), data)

        if getdatasize(data) > 0:
            for d in data:
                self.add_data(d)

            prev_like = sum(self.log_likelihood(d).sum() for d in data)
            for itr in range(100):
                self.EM_step()
                new_like = sum(self.log_likelihood(d).sum() for d in data)
                if new_like <= prev_like + 0.1:
                    break
                else:
                    prev_like = new_like

            for d in data:
                self.labels_list.pop()
Esempio n. 6
0
    def max_likelihood(self, data, weights=None):
        if weights is not None:
            raise NotImplementedError
        assert isinstance(data, list) or isinstance(data, np.ndarray)
        if isinstance(data, np.ndarray):
            data = [np.asarray(data, dtype=np.float64)]
        else:
            data = map(lambda x: np.asarray(x, dtype=np.float64), data)

        if getdatasize(data) > 0:
            for d in data:
                self.add_data(d)

            prev_like = sum(self.log_likelihood(d).sum() for d in data)
            for itr in range(100):
                self.EM_step()
                new_like = sum(self.log_likelihood(d).sum() for d in data)
                if new_like <= prev_like + 0.1:
                    break
                else:
                    prev_like = new_like

            for d in data:
                self.labels_list.pop()