예제 #1
0
파일: model.py 프로젝트: phuselab/PyDeepGP
    def predict(self,
                Xnew,
                full_cov=False,
                Y_metadata=None,
                kern=None,
                view=0):
        """Make a prediction from the deep Gaussian process model for a given input"""
        from GPy.core.parameterization.variational import NormalPosterior

        if self.repeatX:
            assert self.nLayers == 2
            mean, var = self.layers[-1].predict(Xnew)
            Xnew_norm = (Xnew - self.repeatX_Xmean) / self.repeatX_Xstd
            Xmean = np.hstack([mean, Xnew_norm])
            Xvar = np.empty_like(Xmean)
            Xvar[:] = 1e-6
            Xvar[:, :self.nDimsOrig[1]] = var
            x = NormalPosterior(Xmean, Xvar)
        else:
            x = Xnew
            for l in self.layers[:0:-1]:
                mean, var = l.predict(x)
                var = np.clip(var, 1e-8, np.inf)
                if var.shape[1] == 1:
                    var = np.tile(var, mean.shape[1])
                x = NormalPosterior(mean, var)
        mrd_flag = hasattr(self.layers[0], 'views')
        if mrd_flag:
            return self.layers[0].views[view].predict(x)
        else:
            return self.layers[0].predict(x)
예제 #2
0
파일: model.py 프로젝트: jaimeide/PyDeepGP
 def gen_pred_model(self, Y=None, init_X='encoder', binObserved=False):
     from GPy.core.parameterization.variational import NormalPosterior
     from deepgp.models.pred_model import PredDeepGP
     
     if Y is not None:
         Xs = [Y]
         
         if init_X=='nn':
             xy = self.collect_all_XY()
             for i in range(self.nLayers):
                 x_mean, x_var = PredDeepGP._init_Xs_NN(Y,i,xy)
                 Xs.append(NormalPosterior(x_mean, x_var))
         elif init_X=='encoder':
             x_mean = Y
             x_mean[np.isnan(x_mean)] = 0.
             for layer in self.layers:
                 x_mean = layer.encoder.predict(x_mean)
                 Xs.append(NormalPosterior(x_mean, np.ones(x_mean.shape)*layer.X_var))
     
     layers = []
     layer_lower = None
     for i in range(self.nLayers):
         layers.append(self.layers[i].gen_pred_layer(layer_lower=layer_lower,Y=Xs[i], X=Xs[i+1], binObserved=(i==0 and binObserved)))
         layer_lower = layers[-1]
     
     pmodel = PredDeepGP(layers)
     if init_X=='nn': pmodel.set_train_data(xy)
     return pmodel
예제 #3
0
파일: model.py 프로젝트: jaimeide/PyDeepGP
 def collect_all_XY(self, root=0):
     if self.mpi_comm is None:
         XY = [self.obslayer.Y.copy()]
         for l in self.layers: XY.append(l.X.copy())
         return XY
     else:
         from mpi4py import MPI
         from GPy.core.parameterization.variational import NormalPosterior
         N,D = self.Y.shape
         N_list = np.array(self.mpi_comm.allgather(N))
         N_all = np.sum(N_list)
         Y_all = np.empty((N_all,D)) if self.mpi_comm.rank==root else None
         self.mpi_comm.Gatherv([self.Y, MPI.DOUBLE], [Y_all, (N_list*D, None), MPI.DOUBLE], root=root)
         if self.mpi_comm.rank==root:
             XY = [Y_all]
         for l in self.layers:
             Q = l.X.shape[1]
             X_mean_all =  np.empty((N_all,Q)) if self.mpi_comm.rank==root else None
             self.mpi_comm.Gatherv([l.X.mean.values, MPI.DOUBLE], [X_mean_all, (N_list*Q, None), MPI.DOUBLE], root=root)
             X_var_all =  np.empty((N_all,Q)) if self.mpi_comm.rank==root else None
             self.mpi_comm.Gatherv([l.X.variance.values, MPI.DOUBLE], [X_var_all, (N_list*Q, None), MPI.DOUBLE], root=root)
             if self.mpi_comm.rank==root:
                 XY.append(NormalPosterior(X_mean_all, X_var_all))
         if self.mpi_comm.rank==root: return XY
         else: return None
예제 #4
0
    def __init__(self, dim_down, dim_up, likelihood, MLP_dims=None, X=None, X_variance=None, init='rand',  Z=None, num_inducing=10,  kernel=None, inference_method=None, uncertain_inputs=True,mpi_comm=None, mpi_root=0, back_constraint=True, name='mrdlayer'):

        #assert back_constraint
        self.uncertain_inputs = uncertain_inputs
        Y = self.Y if self.layer_lower is None else self.layer_lower.X
        assert isinstance(dim_down, list) or isinstance(dim_down, tuple)
        assert isinstance(kernel, list) and len(kernel)==len(dim_down), "The number of kernels has to be equal to the number of input modalities!"
        super(MRDLayer, self).__init__(name=name)
        self.mpi_comm, self.mpi_root = mpi_comm, mpi_root

        self.back_constraint = True if back_constraint else False

        self.views = []
        for i in range(len(dim_down)):
            view = MRDView(Y[i], dim_down[i],dim_up,likelihood=None if likelihood is None else likelihood[i], MLP_dims=None if MLP_dims is None else MLP_dims[i],
                           X=X, X_variance=X_variance, Z=None if Z is None else Z[i], num_inducing=num_inducing if isinstance(num_inducing,int) else num_inducing[i],
                           kernel= None if kernel is None else kernel[i], inference_method=None if inference_method is None else inference_method[i], uncertain_inputs=uncertain_inputs,
                           mpi_comm=mpi_comm, mpi_root=mpi_root, back_constraint=back_constraint, name='view_'+str(i))
            self.views.append(view)

        if self.back_constraint:
            self.X = None
            self._aggregate_qX()
        else:
            self.X = NormalPosterior(X,X_variance)

        self.link_parameters(*self.views)
        for v in self.views: v.X = self.X
        self.link_parameters(self.X)
예제 #5
0
    def __init__(self, X, Y, kernel=None, Z=None, num_inducing=10, X_variance=None, mean_function=None, normalizer=None, mpi_comm=None, name='sparse_gp'):
        num_data, input_dim = X.shape

        # kern defaults to rbf (plus white for stability)
        if kernel is None:
            kernel = kern.RBF(input_dim)#  + kern.white(input_dim, variance=1e-3)

        # Z defaults to a subset of the data
        if Z is None:
            i = np.random.permutation(num_data)[:min(num_inducing, num_data)]
            Z = X.view(np.ndarray)[i].copy()
        else:
            assert Z.shape[1] == input_dim

        likelihood = likelihoods.Gaussian()

        if not (X_variance is None):
            X = NormalPosterior(X,X_variance)

        if mpi_comm is not None:
            from ..inference.latent_function_inference.var_dtc_parallel import VarDTC_minibatch
            infr = VarDTC_minibatch(mpi_comm=mpi_comm)
        else:
            infr = VarDTC()

        SparseGP_MPI.__init__(self, X, Y, Z, kernel, likelihood, mean_function=mean_function,
        inference_method=infr, normalizer=normalizer, mpi_comm=mpi_comm, name=name)
예제 #6
0
def create_posterior_object(m, samples):
    """Create a NormalPosterior object.

    :param m: GP with which to create posterior
    :param samples: function inputs to create the posterior at
    :return:
    """
    mu, covar = m.predict_noiseless(samples, full_cov=True)
    variances = covar.diagonal()
    variances = np.reshape(variances, (len(samples), 1))
    return NormalPosterior(means=mu, variances=variances)
예제 #7
0
    def __init__(self,
                 X,
                 Y,
                 indexD,
                 kernel=None,
                 Z=None,
                 num_inducing=10,
                 X_variance=None,
                 normalizer=None,
                 mpi_comm=None,
                 individual_Y_noise=False,
                 name='sparse_gp'):

        assert len(Y.shape) == 1 or Y.shape[1] == 1
        self.individual_Y_noise = individual_Y_noise
        self.indexD = indexD
        output_dim = int(np.max(indexD)) + 1

        num_data, input_dim = X.shape

        # kern defaults to rbf (plus white for stability)
        if kernel is None:
            kernel = kern.RBF(
                input_dim)  #  + kern.white(input_dim, variance=1e-3)

        # Z defaults to a subset of the data
        if Z is None:
            i = np.random.permutation(num_data)[:min(num_inducing, num_data)]
            Z = X.view(np.ndarray)[i].copy()
        else:
            assert Z.shape[1] == input_dim

        if individual_Y_noise:
            likelihood = likelihoods.Gaussian(variance=np.array(
                [np.var(Y[indexD == d]) for d in range(output_dim)]) * 0.01)
        else:
            likelihood = likelihoods.Gaussian(variance=np.var(Y) * 0.01)

        if not (X_variance is None):
            X = NormalPosterior(X, X_variance)

        infr = VarDTC_MD()

        SparseGP_MPI.__init__(self,
                              X,
                              Y,
                              Z,
                              kernel,
                              likelihood,
                              inference_method=infr,
                              normalizer=normalizer,
                              mpi_comm=mpi_comm,
                              name=name)
        self.output_dim = output_dim
예제 #8
0
 def test_setxy_bgplvm(self):
     k = GPy.kern.RBF(1)
     m = GPy.models.BayesianGPLVM(self.Y, 2, kernel=k)
     mu, var = m.predict(m.X)
     X = m.X.copy()
     Xnew = NormalPosterior(m.X.mean[:10].copy(), m.X.variance[:10].copy())
     m.set_XY(Xnew, m.Y[:10])
     assert (m.checkgrad())
     m.set_XY(X, self.Y)
     mu2, var2 = m.predict(m.X)
     np.testing.assert_allclose(mu, mu2)
     np.testing.assert_allclose(var, var2)
예제 #9
0
 def test_fixed_inputs_uncertain(self):
     from GPy.plotting.matplot_dep.util import fixed_inputs
     import GPy
     from GPy.core.parameterization.variational import NormalPosterior
     X_mu = np.random.randn(10, 3)
     X_var = np.random.randn(10, 3)
     X = NormalPosterior(X_mu, X_var)
     Y = np.sin(X_mu) + np.random.randn(10, 3)*1e-3
     m = GPy.models.BayesianGPLVM(Y, X=X_mu, X_variance=X_var, input_dim=3)
     fixed = fixed_inputs(m, [1], fix_routine='median', as_list=True, X_all=False)
     self.assertTrue((0, np.median(X.mean.values[:,0])) in fixed)
     self.assertTrue((2, np.median(X.mean.values[:,2])) in fixed)
     self.assertTrue(len([t for t in fixed if t[0] == 1]) == 0) # Unfixed input should not be in fixed
    def __init__(self, Y, input_dim, X=None, X_variance=None, init='PCA', num_inducing=10,
                 Z=None, kernel=None, inference_method=None, likelihood=None,
                 name='bayesian gplvm', normalizer=None,
                 missing_data=False, stochastic=False, batchsize=1):
        self.logger = logging.getLogger(self.__class__.__name__)
        if X is None:
            from ..util.initialization import initialize_latent
            self.logger.info("initializing latent space X with method {}".format(init))
            X, fracs = initialize_latent(init, input_dim, Y)
        else:
            fracs = np.ones(input_dim)

        self.init = init

        if Z is None:
            self.logger.info("initializing inducing inputs")
            Z = np.random.permutation(X.copy())[:num_inducing]
        assert Z.shape[1] == X.shape[1]

        if X_variance is False:
            self.logger.info('no variance on X, activating sparse GPLVM')
            X = Param("latent space", X)
        else:
            if X_variance is None:
                self.logger.info("initializing latent space variance ~ uniform(0,.1)")
                X_variance = np.random.uniform(0,.1,X.shape)
            self.variational_prior = NormalPrior()
            X = NormalPosterior(X, X_variance)

        if kernel is None:
            self.logger.info("initializing kernel RBF")
            kernel = kern.RBF(input_dim, lengthscale=1./fracs, ARD=True) #+ kern.Bias(input_dim) + kern.White(input_dim)

        if likelihood is None:
            likelihood = Gaussian()

        self.kl_factr = 1.

        if inference_method is None:
            from ..inference.latent_function_inference.var_dtc import VarDTC
            self.logger.debug("creating inference_method var_dtc")
            inference_method = VarDTC(limit=3 if not missing_data else Y.shape[1])

        super(BayesianGPLVMMiniBatch,self).__init__(X, Y, Z, kernel, likelihood=likelihood,
                                           name=name, inference_method=inference_method,
                                           normalizer=normalizer,
                                           missing_data=missing_data, stochastic=stochastic,
                                           batchsize=batchsize)
        self.X = X
        self.link_parameter(self.X, 0)
예제 #11
0
    def _init_XY(self):
        X_win, X_dim, U_win, U_dim = self.X_win, self.X_dim, self.U_win, self.U_dim
        self._update_conv()
        if X_win > 0:
            X_mean_conv, X_var_conv = np.vstack(self.X_mean_conv), np.vstack(
                self.X_var_conv)
        if self.withControl:
            U_mean_conv, U_var_conv = np.vstack(self.U_mean_conv), np.vstack(
                self.U_var_conv)

        if not self.withControl:
            self.X = NormalPosterior(X_mean_conv, X_var_conv)
        elif X_win == 0:
            self.X = NormalPosterior(U_mean_conv, U_var_conv)
        else:
            self.X = NormalPosterior(np.hstack([X_mean_conv, U_mean_conv]),
                                     np.hstack([X_var_conv, U_var_conv]))

        if self.X_observed:
            self.Y = np.vstack([x[X_win:] for x in self.Xs_flat])
        else:
            self.Y = NormalPosterior(
                np.vstack([x.mean.values[X_win:] for x in self.Xs_flat]),
                np.vstack([x.variance.values[X_win:] for x in self.Xs_flat]))
예제 #12
0
 def _aggregate_qX(self):
     if self.back_constraint:
         if self.X is None:
             self.X = NormalPosterior(np.zeros_like(self.views[0].Xv.mean.values), np.zeros_like(self.views[0].Xv.variance.values))
         else:
             self.X.mean[:]  = 0
             self.X.variance[:] = 0
         
         self.prec_denom = np.zeros_like(self.X.variance.values)
         for v in self.views:
             self.prec_denom += 1./v.Xv.variance.values
             self.X.mean += v.Xv.mean.values/v.Xv.variance.values        
         self.X.mean /= self.prec_denom
         self.X.variance[:]  = 1./self.prec_denom
     else:
         for v in self.views:
             v.X = self.X
예제 #13
0
 def setUp(self):
     self.kerns = (
         #GPy.kern.RBF([0,1,2], ARD=True)+GPy.kern.Bias(self.input_dim)+GPy.kern.White(self.input_dim),
         #GPy.kern.RBF(self.input_dim)+GPy.kern.Bias(self.input_dim)+GPy.kern.White(self.input_dim),
         #GPy.kern.Linear(self.input_dim) + GPy.kern.Bias(self.input_dim) + GPy.kern.White(self.input_dim),
         #GPy.kern.Linear(self.input_dim, ARD=True) + GPy.kern.Bias(self.input_dim) + GPy.kern.White(self.input_dim),
         GPy.kern.Linear([1, 3, 6, 7], ARD=True) +
         GPy.kern.RBF([0, 5, 8], ARD=True) +
         GPy.kern.White(self.input_dim), )
     self.q_x_mean = np.random.randn(self.input_dim)[None]
     self.q_x_variance = np.exp(.5 * np.random.randn(self.input_dim))[None]
     self.q_x_samples = np.random.randn(
         self.Nsamples, self.input_dim) * np.sqrt(
             self.q_x_variance) + self.q_x_mean
     self.q_x = NormalPosterior(self.q_x_mean, self.q_x_variance)
     self.Z = np.random.randn(self.num_inducing, self.input_dim)
     self.q_x_mean.shape = (1, self.input_dim)
     self.q_x_variance.shape = (1, self.input_dim)
예제 #14
0
 def test_posterior(self):
     X = np.random.randn(3,5)
     Xv = np.random.rand(*X.shape)
     par = NormalPosterior(X,Xv)
     par.gradient = 10
     pcopy = par.copy()
     pcopy.gradient = 10
     self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist())
     self.assertListEqual(par.gradient_full.tolist(), pcopy.gradient_full.tolist())
     self.assertSequenceEqual(str(par), str(pcopy))
     self.assertIsNot(par.param_array, pcopy.param_array)
     self.assertIsNot(par.gradient_full, pcopy.gradient_full)
     with tempfile.TemporaryFile('w+b') as f:
         par.pickle(f)
         f.seek(0)
         pcopy = pickle.load(f)
     self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist())
     pcopy.gradient = 10
     np.testing.assert_allclose(par.gradient_full, pcopy.gradient_full)
     np.testing.assert_allclose(pcopy.mean.gradient_full, 10)
     self.assertSequenceEqual(str(par), str(pcopy))
예제 #15
0
 def _init_encoder(self, MLP_dims):
     from .mlp import MLP
     from copy import deepcopy
     from GPy.core.parameterization.transformations import Logexp
     X_win, X_dim, U_win, U_dim = self.X_win, self.X_dim, self.U_win, self.U_dim
     assert X_win > 0, "Neural Network constraints only applies autoregressive structure!"
     Q = X_win * X_dim + U_win * U_dim if self.withControl else X_win * X_dim
     self.init_Xs = [
         NormalPosterior(self.Xs_flat[i].mean.values[:X_win],
                         self.Xs_flat[i].variance.values[:X_win],
                         name='init_Xs_' + str(i)) for i in range(self.nSeq)
     ]
     for init_X in self.init_Xs:
         init_X.mean[:] = np.random.randn(*init_X.shape) * 1e-2
     self.encoder = MLP([Q, Q * 2, Q +
                         X_dim / 2, X_dim] if MLP_dims is None else [Q] +
                        deepcopy(MLP_dims) + [X_dim])
     self.Xs_var = [
         Param('X_var_' + str(i),
               self.Xs_flat[i].variance.values[X_win:].copy(), Logexp())
         for i in range(self.nSeq)
     ]
예제 #16
0
    def __init__(self, Y, dim_down, dim_up, likelihood, MLP_dims=None, X=None, X_variance=None, init='rand',  Z=None, num_inducing=10,  kernel=None, inference_method=None, uncertain_inputs=True,mpi_comm=None, mpi_root=0, back_constraint=True, name='mrd-view'):

        self.uncertain_inputs = uncertain_inputs
        self.layer_lower = None
        self.scale = 1.

        if back_constraint:
            from .mlp import MLP
            from copy import deepcopy
            self.encoder = MLP([dim_down, int((dim_down+dim_up)*2./3.), int((dim_down+dim_up)/3.), dim_up] if MLP_dims is None else [dim_down]+deepcopy(MLP_dims)+[dim_up])
            X = self.encoder.predict(Y.mean.values if isinstance(Y, VariationalPosterior) else Y)
            X_variance = 0.0001*np.ones(X.shape)
            self.back_constraint = True
        else:
            self.back_constraint = False

        if Z is None:
            Z = np.random.rand(num_inducing, dim_up)*2-1. #np.random.permutation(X.copy())[:num_inducing]
        assert Z.shape[1] == X.shape[1]
        
        if likelihood is None: likelihood = likelihoods.Gaussian(variance=Y.var()*0.01)
        
        if uncertain_inputs: X = NormalPosterior(X, X_variance)
        if kernel is None: kernel = kern.RBF(dim_up, ARD = True)
        
        # The command below will also give the field self.X to the view.
        super(MRDView, self).__init__(X, Y, Z, kernel, likelihood, inference_method=inference_method, mpi_comm=mpi_comm, mpi_root=mpi_root, name=name)
        if back_constraint: self.link_parameter(self.encoder)

        if self.uncertain_inputs and self.back_constraint:
            from GPy import Param
            from GPy.core.parameterization.transformations import Logexp
            self.X_var_common = Param('X_var',X_variance[0].copy(),Logexp())
            self.link_parameters(self.X_var_common)
        # There's some redundancy in the self.Xv and self.X. Currently we use self.X for the likelihood part and all calculations part,
        # self.Xv is only used for the self.Xv.gradient part. 
        # This is redundant but it's there in case we want to do the product of experts MRD model.
        self.Xv = self.X