Exemple #1
0
    def __init__(self,
                 likelihood_or_Y_list,
                 input_dim,
                 num_inducing=10,
                 names=None,
                 kernels=None,
                 initx='PCA',
                 initz='permute',
                 _debug=False,
                 **kw):
        if names is None:
            self.names = [
                "{}".format(i + 1) for i in range(len(likelihood_or_Y_list))
            ]

        # sort out the kernels
        if kernels is None:
            kernels = [None] * len(likelihood_or_Y_list)
        elif isinstance(kernels, kern):
            kernels = [
                kernels.copy() for i in range(len(likelihood_or_Y_list))
            ]
        else:
            assert len(kernels) == len(
                likelihood_or_Y_list), "need one kernel per output"
            assert all([isinstance(k, kern)
                        for k in kernels]), "invalid kernel object detected!"
        assert not ('kernel' in kw), "pass kernels through `kernels` argument"

        self.input_dim = input_dim
        self.num_inducing = num_inducing
        self._debug = _debug

        self._init = True
        X = self._init_X(initx, likelihood_or_Y_list)
        Z = self._init_Z(initz, X)
        self.bgplvms = [
            BayesianGPLVM(l,
                          input_dim=input_dim,
                          kernel=k,
                          X=X,
                          Z=Z,
                          num_inducing=self.num_inducing,
                          **kw) for l, k in zip(likelihood_or_Y_list, kernels)
        ]
        del self._init

        self.gref = self.bgplvms[0]
        nparams = numpy.array(
            [0] +
            [SparseGP._get_params(g).size - g.Z.size for g in self.bgplvms])
        self.nparams = nparams.cumsum()

        self.num_data = self.gref.num_data
        self.NQ = self.num_data * self.input_dim
        self.MQ = self.num_inducing * self.input_dim

        Model.__init__(self)
        self.ensure_default_constraints()
Exemple #2
0
 def _get_param_names(self):
     # X_names = sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], [])
     # S_names = sum([['X_variance_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], [])
     n1 = self.gref._get_param_names()
     n1var = n1[:self.NQ * 2 + self.MQ]
     map_names = lambda ns, name: map(lambda x: "{1}_{0}".format(*x),
                                      itertools.izip(ns,
                                                     itertools.repeat(name)))
     return list(itertools.chain(n1var, *(map_names(\
             SparseGP._get_param_names(g)[self.MQ:], n) \
             for g, n in zip(self.bgplvms, self.names))))
Exemple #3
0
 def _get_param_names(self):
     # X_names = sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], [])
     # S_names = sum([['X_variance_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], [])
     n1 = self.gref._get_param_names()
     n1var = n1[:self.NQ * 2 + self.MQ]
     map_names = lambda ns, name: map(
         lambda x: "{1}_{0}".format(*x),
         itertools.izip(ns, itertools.repeat(name)))
     return list(itertools.chain(n1var, *(map_names(\
             SparseGP._get_param_names(g)[self.MQ:], n) \
             for g, n in zip(self.bgplvms, self.names))))
Exemple #4
0
 def _get_param_names(self):
     n1 = self.gref._get_param_names()
     n1var = n1[:self.NQ * 2 + self.MQ]
     #    return n1var
     #
     #def _get_kernel_names(self):
     map_names = lambda ns, name: map(
         lambda x: "{1}_{0}".format(*x),
         itertools.izip(ns, itertools.repeat(name)))
     return list(itertools.chain(n1var, *(map_names(\
             SparseGP._get_param_names(g)[self.MQ:], n) \
             for g, n in zip(self.bgplvms, self.names))))
Exemple #5
0
Fichier : mrd.py Projet : Dalar/GPy
 def _get_param_names(self):
     n1 = self.gref._get_param_names()
     n1var = n1[:self.NQ * 2 + self.MQ]
 #    return n1var
 #
 #def _get_kernel_names(self):
     map_names = lambda ns, name: map(lambda x: "{1}_{0}".format(*x),
                                      itertools.izip(ns,
                                                     itertools.repeat(name)))
     return list(itertools.chain(n1var, *(map_names(\
             SparseGP._get_param_names(g)[self.MQ:], n) \
             for g, n in zip(self.bgplvms, self.names))))
Exemple #6
0
    def _get_params(self):
        """
        return parameter list containing private and shared parameters as follows:

        =================================================================
        | mu | S | Z || theta1 | theta2 | .. | thetaN |
        =================================================================
        """
        X = self.gref.X.ravel()
        X_var = self.gref.X_variance.ravel()
        Z = self.gref.Z.ravel()
        thetas = [SparseGP._get_params(g)[g.Z.size:] for g in self.bgplvms]
        params = numpy.hstack([X, X_var, Z, numpy.hstack(thetas)])
        return params
Exemple #7
0
Fichier : mrd.py Projet : Dalar/GPy
    def _log_likelihood_gradients(self):
        dLdmu, dLdS = reduce(lambda a, b: [a[0] + b[0], a[1] + b[1]], (g.dL_dmuS() for g in self.bgplvms))
        dKLmu, dKLdS = self.gref.dKL_dmuS()
        dLdmu -= dKLmu
        dLdS -= dKLdS
        dLdmuS = numpy.hstack((dLdmu.flatten(), dLdS.flatten())).flatten()
        dldzt1 = reduce(lambda a, b: a + b, (SparseGP._log_likelihood_gradients(g)[:self.MQ] for g in self.bgplvms))

        return numpy.hstack((dLdmuS,
                             dldzt1,
                numpy.hstack([numpy.hstack([g.dL_dtheta(),
                                            g.likelihood._gradients(\
                                                partial=g.partial_for_likelihood)]) \
                              for g in self.bgplvms])))
Exemple #8
0
Fichier : mrd.py Projet : Dalar/GPy
    def _get_params(self):
        """
        return parameter list containing private and shared parameters as follows:

        =================================================================
        | mu | S | Z || theta1 | theta2 | .. | thetaN |
        =================================================================
        """
        X = self.gref.X.ravel()
        X_var = self.gref.X_variance.ravel()
        Z = self.gref.Z.ravel()
        thetas = [SparseGP._get_params(g)[g.Z.size:] for g in self.bgplvms]
        params = numpy.hstack([X, X_var, Z, numpy.hstack(thetas)])
        return params
Exemple #9
0
    def _log_likelihood_gradients(self):
        dLdmu, dLdS = reduce(lambda a, b: [a[0] + b[0], a[1] + b[1]],
                             (g.dL_dmuS() for g in self.bgplvms))
        dKLmu, dKLdS = self.gref.dKL_dmuS()
        dLdmu -= dKLmu
        dLdS -= dKLdS
        dLdmuS = numpy.hstack((dLdmu.flatten(), dLdS.flatten())).flatten()
        dldzt1 = reduce(lambda a, b: a + b,
                        (SparseGP._log_likelihood_gradients(g)[:self.MQ]
                         for g in self.bgplvms))

        return numpy.hstack((dLdmuS,
                             dldzt1,
                numpy.hstack([numpy.hstack([g.dL_dtheta(),
                                            g.likelihood._gradients(\
                                                partial=g.partial_for_likelihood)]) \
                              for g in self.bgplvms])))
Exemple #10
0
Fichier : mrd.py Projet : Dalar/GPy
    def __init__(self, likelihood_or_Y_list, input_dim, num_inducing=10, names=None,
                 kernels=None, initx='PCA',
                 initz='permute', _debug=False, **kw):
        if names is None:
            self.names = ["{}".format(i) for i in range(len(likelihood_or_Y_list))]
        else:
            self.names = names
            assert len(names) == len(likelihood_or_Y_list), "one name per data set required"
        # sort out the kernels
        if kernels is None:
            kernels = [None] * len(likelihood_or_Y_list)
        elif isinstance(kernels, kern):
            kernels = [kernels.copy() for i in range(len(likelihood_or_Y_list))]
        else:
            assert len(kernels) == len(likelihood_or_Y_list), "need one kernel per output"
            assert all([isinstance(k, kern) for k in kernels]), "invalid kernel object detected!"
        assert not ('kernel' in kw), "pass kernels through `kernels` argument"

        self.input_dim = input_dim
        self._debug = _debug
        self.num_inducing = num_inducing

        self._init = True
        X = self._init_X(initx, likelihood_or_Y_list)
        Z = self._init_Z(initz, X)
        self.num_inducing = Z.shape[0] # ensure M==N if M>N

        self.bgplvms = [BayesianGPLVM(l, input_dim=input_dim, kernel=k, X=X, Z=Z, num_inducing=self.num_inducing, **kw) for l, k in zip(likelihood_or_Y_list, kernels)]
        del self._init

        self.gref = self.bgplvms[0]
        nparams = numpy.array([0] + [SparseGP._get_params(g).size - g.Z.size for g in self.bgplvms])
        self.nparams = nparams.cumsum()

        self.num_data = self.gref.num_data

        self.NQ = self.num_data * self.input_dim
        self.MQ = self.num_inducing * self.input_dim

        Model.__init__(self)
        self.ensure_default_constraints()
Exemple #11
0
 def log_likelihood(self):
     ll = -self.gref.KL_divergence()
     for g in self.bgplvms:
         ll += SparseGP.log_likelihood(g)
     return ll
Exemple #12
0
Fichier : mrd.py Projet : Dalar/GPy
 def log_likelihood(self):
     ll = -self.gref.KL_divergence()
     for g in self.bgplvms:
         ll += SparseGP.log_likelihood(g)
     return ll