コード例 #1
0
ファイル: layers.py プロジェクト: jaimeide/PyDeepGP
    def __init__(self, X, Y, Z, kernel, likelihood, mean_function=None, inference_method=None,
                 name='sparse gp', Y_metadata=None, normalizer=False, mpi_comm=None, mpi_root=0, auto_update=True):
        self.mpi_comm = mpi_comm
        self.mpi_root = mpi_root
        self.psicov = False
        self.svi = False
        self.qU_ratio = 1.
        self.auto_update = auto_update

        if inference_method is None:
            from ..inference import VarDTC_parallel, VarDTC
            if mpi_comm is None:
                inference_method = VarDTC()
            else:
                inference_method = VarDTC_parallel(mpi_comm, mpi_root)
        elif inference_method=='inferentia' and mpi_comm is None:
            from ..inference import VarDTC_Inferentia
            inference_method = VarDTC_Inferentia()
            self.psicov = True
        elif inference_method=='svi':
            from ..inference import SVI_VarDTC
            inference_method = SVI_VarDTC()
            self.svi = True
        
        super(SparseGP_MPI, self).__init__(X, Y, Z, kernel, likelihood, mean_function=mean_function, inference_method=inference_method,
                 name=name, Y_metadata=Y_metadata, normalizer=normalizer)
        
        if self.svi:
            from ..util.misc import comp_mapping
            W = comp_mapping(self.X, self.Y)
            qu_mean = self.Z.dot(W)
            self.qU_mean = Param('qU_m', qu_mean)
            self.qU_W = Param('qU_W', np.random.randn(Z.shape[0], Z.shape[0])*0.01) 
            self.qU_a = Param('qU_a', 1e-3, Logexp())
            self.link_parameters(self.qU_mean, self.qU_W, self.qU_a)
コード例 #2
0
ファイル: parameterized_tests.py プロジェクト: yincheng/GPy
    def test_remove_parameter(self):
        from GPy.core.parameterization.transformations import FIXED, UNFIXED, __fixed__, Logexp
        self.white.fix()
        self.test1.kern.unlink_parameter(self.white)
        self.assertIs(self.test1._fixes_, None)

        self.assertIsInstance(self.white.constraints, ParameterIndexOperations)
        self.assertListEqual(self.white._fixes_.tolist(), [FIXED])
        self.assertIs(self.test1.constraints,
                      self.rbf.constraints._param_index_ops)
        self.assertIs(self.test1.constraints,
                      self.param.constraints._param_index_ops)

        self.test1.link_parameter(self.white, 0)
        self.assertIs(self.test1.constraints,
                      self.white.constraints._param_index_ops)
        self.assertIs(self.test1.constraints,
                      self.rbf.constraints._param_index_ops)
        self.assertIs(self.test1.constraints,
                      self.param.constraints._param_index_ops)
        self.assertListEqual(self.test1.constraints[__fixed__].tolist(), [0])
        self.assertIs(self.white._fixes_, None)
        self.assertListEqual(self.test1._fixes_.tolist(),
                             [FIXED] + [UNFIXED] * 52)

        self.test1.unlink_parameter(self.white)
        self.assertIs(self.test1._fixes_, None)
        self.assertListEqual(self.white._fixes_.tolist(), [FIXED])
        self.assertIs(self.test1.constraints,
                      self.rbf.constraints._param_index_ops)
        self.assertIs(self.test1.constraints,
                      self.param.constraints._param_index_ops)
        self.assertListEqual(
            self.test1.constraints[Logexp()].tolist(),
            range(self.param.size, self.param.size + self.rbf.size))
コード例 #3
0
	def __init__(self, input_dim,variance=1,active_dims=[0],name="categorical", inverse=False,useGPU=False):
		super(Categorical, self).__init__(input_dim, active_dims, name,useGPU=useGPU)

		self.inverse = inverse

		self.variance = Param('variance',variance,Logexp())
		self.link_parameter(self.variance)
コード例 #4
0
    def __init__(self, gp_link=None, r=1.0):
        if gp_link is None:
            #Parameterised not as link_f but as f
            #gp_link = Identity()
            gp_link = Log()

        super(LogLogistic, self).__init__(gp_link, name='LogLogistic')
        self.r = Param('r_shape', float(r), Logexp())
        self.link_parameter(self.r)
コード例 #5
0
    def __init__(self,gp_link=None, deg_free=5, sigma2=2):
        if gp_link is None:
            gp_link = link_functions.Identity()

        super(HetStudentT, self).__init__(gp_link, name='Hetro_Student_T')
        self.v = Param('deg_free', float(deg_free), Logexp())
        self.link_parameter(self.v)
        self.v.constrain_fixed()

        self.log_concave = False
コード例 #6
0
ファイル: parameterized_tests.py プロジェクト: yincheng/GPy
 def test_default_constraints(self):
     self.assertIs(self.rbf.variance.constraints._param_index_ops,
                   self.rbf.constraints._param_index_ops)
     self.assertIs(self.test1.constraints,
                   self.rbf.constraints._param_index_ops)
     self.assertListEqual(self.rbf.constraints.indices()[0].tolist(),
                          range(2))
     from GPy.core.parameterization.transformations import Logexp
     kern = self.test1.kern
     self.test1.unlink_parameter(kern)
     self.assertListEqual(kern.constraints[Logexp()].tolist(), range(3))
コード例 #7
0
 def __init__(self, which, X, X_variance, Z, num_inducing, kernel):
     super(PsiStatModel, self).__init__(name='psi stat test')
     self.which = which
     self.X = Param("X", X)
     self.X_variance = Param('X_variance', X_variance, Logexp())
     self.q = NormalPosterior(self.X, self.X_variance)
     self.Z = Param("Z", Z)
     self.N, self.input_dim = X.shape
     self.num_inducing, input_dim = Z.shape
     assert self.input_dim == input_dim, "shape missmatch: Z:{!s} X:{!s}".format(
         Z.shape, X.shape)
     self.kern = kernel
     self.psi_ = self.kern.__getattribute__(self.which)(self.Z, self.q)
     self.add_parameters(self.q, self.Z, self.kern)
コード例 #8
0
 def __init__(self,k1,k2,kc,xc,cpDim):
     if k2 is None:
         super(Changepoint,self).__init__([k1],"changepoint")
         k2 = k1
     else:
         super(Changepoint,self).__init__([k1,k2],"changepoint")
     
     self.k1 = k1
     self.k2 = k2
     
     self.kc = Param('kc', kc, Logexp())
     self.link_parameter(self.kc)
     
     self.xc = np.array(xc)
     self.cpDim = cpDim
コード例 #9
0
 def __init__(self,
              input_dim,
              basis,
              variance=None,
              ARD=False,
              active_dims=None,
              name='mean',
              useGP=False):
     """
     Initialize the object.
     """
     super(MeanFunction, self).__init__(input_dim,
                                        active_dims,
                                        name,
                                        useGP=useGP)
     self.input_dim = int(input_dim)
     self._ARD = ARD
     if not hasattr(basis, '__call__'):
         raise TypeError('The basis functions must implement the '
                         '\'__call__()\' method. This method should '
                         ' the basis functions given a 2D dimensional numpy'
                         ' numpy array of \'num_points x input_dim\''
                         ' dimensions.')
     if not hasattr(basis, 'num_output'):
         raise TypeError('The basis functions must have an attribute '
                         ' \'num_output\' which should store the number of'
                         ' basis functions it contains.')
     self._basis = basis
     self._num_params = basis.num_output
     if not ARD:
         if variance is None:
             variance = np.ones(1)
         else:
             variance = np.asarray(variance)
             assert variance.size == 1, 'Only 1 variance needed for a non-ARD kernel'
     else:
         if variance is not None:
             variance = np.asarray(variance)
             assert variance.size in [1, self.num_params
                                      ], 'Bad number of variances'
             if variance.size != self.num_params:
                 variance = np.ones(self.num_params) * variance
         else:
             variance = np.ones(self.num_params)
     self.variance = Param('variance', variance, Logexp())
     self.link_parameters(self.variance)
コード例 #10
0
ファイル: enclayer.py プロジェクト: mbaddar1/wias_uq_rl
    def __init__(self,
                 layer,
                 direction='bottom_up',
                 encoder='mlp',
                 encoder_dims=None,
                 mpi_comm=None,
                 mpi_root=0,
                 name='encoder'):
        super(EncoderLayer, self).__init__(name=name)
        self.mpi_comm, self.mpi_root = mpi_comm, mpi_root
        self.layer = layer
        self.direction = direction
        if direction == 'bottom_up':
            self.bottom_up = True
            # self.X, self.Y = layer.Y, layer.X
        elif direction == 'top_down':
            self.bottom_up = False
            # self.X, self.Y = layer.X, layer.Y
        else:
            raise Exception(
                'the argument of "direction" has to be either "bottom_up" or "top_down"!'
            )
        self.uncertain_input = isinstance(self.X, VariationalPosterior)
        assert isinstance(
            self.Y, VariationalPosterior
        ), "No need to have a encoder layer for certain output!"

        if encoder == 'mlp':
            dim_in, dim_out = self.X.shape[1], self.Y.shape[1]
            from copy import deepcopy
            from deepgp.encoder.mlp import MLP
            self.encoder = MLP([dim_in, int((dim_in+dim_out)*2./3.), int((dim_in+dim_out)/3.), dim_out] if encoder_dims is None \
                               else [dim_in]+deepcopy(encoder_dims)+[dim_out])
        else:
            raise Exception('Unsupported encoder type: ' + encoder)
        self.Y_var_common = Param('Y_var', self.Y.variance.values[1].copy(),
                                  Logexp())

        # Synchronize across MPI nodes
        if self.mpi_comm is not None:
            from ..util.parallel import broadcastArrays
            broadcastArrays([self.encoder.param_array, self.Y_var_common],
                            self.mpi_comm, self.mpi_root)
        self.link_parameters(self.encoder, self.Y_var_common)
コード例 #11
0
ファイル: layers.py プロジェクト: hoangcuong2011/RGP
 def _init_encoder(self, MLP_dims):
     from .mlp import MLP
     from copy import deepcopy
     from GPy.core.parameterization.transformations import Logexp
     X_win, X_dim, U_win, U_dim = self.X_win, self.X_dim, self.U_win, self.U_dim
     assert X_win > 0, "Neural Network constraints only applies autoregressive structure!"
     Q = X_win * X_dim + U_win * U_dim if self.withControl else X_win * X_dim
     self.init_Xs = [
         NormalPosterior(self.Xs_flat[i].mean.values[:X_win],
                         self.Xs_flat[i].variance.values[:X_win],
                         name='init_Xs_' + str(i)) for i in range(self.nSeq)
     ]
     for init_X in self.init_Xs:
         init_X.mean[:] = np.random.randn(*init_X.shape) * 1e-2
     self.encoder = MLP([Q, Q * 2, Q +
                         X_dim / 2, X_dim] if MLP_dims is None else [Q] +
                        deepcopy(MLP_dims) + [X_dim])
     self.Xs_var = [
         Param('X_var_' + str(i),
               self.Xs_flat[i].variance.values[X_win:].copy(), Logexp())
         for i in range(self.nSeq)
     ]
コード例 #12
0
    def __init__(self, Y, dim_down, dim_up, likelihood, MLP_dims=None, X=None, X_variance=None, init='rand',  Z=None, num_inducing=10,  kernel=None, inference_method=None, uncertain_inputs=True,mpi_comm=None, mpi_root=0, back_constraint=True, name='mrd-view'):

        self.uncertain_inputs = uncertain_inputs
        self.layer_lower = None
        self.scale = 1.

        if back_constraint:
            from .mlp import MLP
            from copy import deepcopy
            self.encoder = MLP([dim_down, int((dim_down+dim_up)*2./3.), int((dim_down+dim_up)/3.), dim_up] if MLP_dims is None else [dim_down]+deepcopy(MLP_dims)+[dim_up])
            X = self.encoder.predict(Y.mean.values if isinstance(Y, VariationalPosterior) else Y)
            X_variance = 0.0001*np.ones(X.shape)
            self.back_constraint = True
        else:
            self.back_constraint = False

        if Z is None:
            Z = np.random.rand(num_inducing, dim_up)*2-1. #np.random.permutation(X.copy())[:num_inducing]
        assert Z.shape[1] == X.shape[1]
        
        if likelihood is None: likelihood = likelihoods.Gaussian(variance=Y.var()*0.01)
        
        if uncertain_inputs: X = NormalPosterior(X, X_variance)
        if kernel is None: kernel = kern.RBF(dim_up, ARD = True)
        
        # The command below will also give the field self.X to the view.
        super(MRDView, self).__init__(X, Y, Z, kernel, likelihood, inference_method=inference_method, mpi_comm=mpi_comm, mpi_root=mpi_root, name=name)
        if back_constraint: self.link_parameter(self.encoder)

        if self.uncertain_inputs and self.back_constraint:
            from GPy import Param
            from GPy.core.parameterization.transformations import Logexp
            self.X_var_common = Param('X_var',X_variance[0].copy(),Logexp())
            self.link_parameters(self.X_var_common)
        # There's some redundancy in the self.Xv and self.X. Currently we use self.X for the likelihood part and all calculations part,
        # self.Xv is only used for the self.Xv.gradient part. 
        # This is redundant but it's there in case we want to do the product of experts MRD model.
        self.Xv = self.X
コード例 #13
0
ファイル: layers.py プロジェクト: jaimeide/PyDeepGP
    def __init__(self, layer_lower, dim_down, dim_up, likelihood, X=None, X_variance=None, init='PCA',  Z=None, num_inducing=10,  kernel=None, inference_method=None, uncertain_inputs=True,mpi_comm=None, mpi_root=0, back_constraint=True, encoder=None, auto_update=True, name='layer'):

        self.uncertain_inputs = uncertain_inputs
        self.layer_lower = layer_lower
        Y = self.Y if self.layer_lower is None else self.layer_lower.X
        self.back_constraint = back_constraint

        from deepgp.util.util import initialize_latent
        if X is None: X, _ = initialize_latent(init, Y.shape[0], dim_up, Y.mean.values if isinstance(Y, VariationalPosterior) else Y)
        if X_variance is None: X_variance = 0.01*np.ones(X.shape) + 0.01*np.random.rand(*X.shape)
            
        if Z is None:
            if self.back_constraint: Z = np.random.rand(num_inducing, dim_up)*2-1.
            else:
                if num_inducing<=X.shape[0]:
                    Z = X[np.random.permutation(X.shape[0])[:num_inducing]].copy()
                else:
                    Z_more = np.random.rand(num_inducing-X.shape[0],X.shape[1])*(X.max(0)-X.min(0))+X.min(0)
                    Z = np.vstack([X.copy(),Z_more])
        assert Z.shape[1] == X.shape[1]
        
        if mpi_comm is not None:
            from ..util.parallel import broadcastArrays
            broadcastArrays([Z], mpi_comm, mpi_root)
        
        if uncertain_inputs: X = NormalPosterior(X, X_variance)
        if kernel is None: kernel = kern.RBF(dim_up, ARD = True)
        assert kernel.input_dim==X.shape[1], "The dimensionality of input has to be equal to the input dimensionality of kernel!"
        self.Kuu_sigma = Param('Kuu_var', np.zeros(num_inducing)+1e-3, Logexp())
        
        super(Layer, self).__init__(X, Y, Z, kernel, likelihood, inference_method=inference_method, mpi_comm=mpi_comm, mpi_root=mpi_root, auto_update=auto_update, name=name)
        self.link_parameter(self.Kuu_sigma)
        if back_constraint: self.encoder = encoder

        if self.uncertain_inputs and not self.back_constraint:
            self.link_parameter(self.X)
コード例 #14
0
    def __init__(self,
                 k1,
                 k2=None,
                 kc=1.,
                 xc=np.array([[0]]),
                 cpDim=0,
                 changepointParameter=False):
        """
        arguments:
            k1, k2: GPy.kern.Kernel
            kc: float, covariance at the changepoint
            xc: np.array, position of changepoint(s)
            cpDim: int, dimension that changepoint exists on
            changepointParameter: bool, whether xc should be linked as a parameter

        """
        if k2 is None:
            super(Changepoint, self).__init__([k1], "changepoint")
            k2 = k1
        else:
            super(Changepoint, self).__init__([k1, k2], "changepoint")

        self.k1 = k1
        self.k2 = k2

        self.kc = Param('kc', kc, Logexp())
        self.link_parameter(self.kc)

        self.changepointParameter = changepointParameter
        self.xc = np.array(xc)
        if self.changepointParameter:
            self.xc = Param('xc', self.xc)
            self.link_parameter(self.xc)
            self.xc.gradient = [[0]]

        self.cpDim = cpDim