def __init__(self, kern, Z, num_outputs, mean_function):
        """
        A sparse variational GP layer in whitened representation. This layer holds the kernel,
        variational parameters, inducing points and mean function.

        The underlying model at inputs X is
        f = Lv + mean_function(X), where v \sim N(0, I) and LL^T = kern.K(X)

        The variational distribution over the inducing points is
        q(v) = N(q_mu, q_sqrt q_sqrt^T)

        The layer holds D_out independent GPs with the same kernel and inducing points.

        :kern: The kernel for the layer (input_dim = D_in)
        :param q_mu: mean initialization (M, D_out)
        :param q_sqrt: sqrt of variance initialization (D_out,M,M)
        :param Z: Inducing points (M, D_in)
        :param mean_function: The mean function
        :return:
        """
        Parameterized.__init__(self)
        M = Z.shape[0]

        q_mu = np.zeros((M, num_outputs))
        self.q_mu = Parameter(q_mu)

        q_sqrt = np.tile(np.eye(M)[None, :, :], [num_outputs, 1, 1])
        transform = transforms.LowerTriangular(M, num_matrices=num_outputs)
        self.q_sqrt = Parameter(q_sqrt, transform=transform)

        self.feature = InducingPoints(Z)
        self.kern = kern
        self.mean_function = mean_function
예제 #2
0
 def __init__(self, kernel, Z, feature, num_outputs, mean_function,
              gc_kernel=True, white=False, q_diag=False, **kwargs):
     """
     :param kernel:
     :param Z: values of inducing points
     :param feature: inducing points, type: GraphConvolutionInducingpoints
     :param num_outputs:
     :param mean_function:
     :param white: whether set prior of inducing points as N(0, I)
     :param kwargs:
     """
     
     Parameterized.__init__(self, **kwargs)
     # super().__init__(**kwargs)
     
     self.kernel = kernel
     self.num_inducing = Z.shape[0]
     # self.feature = GraphConvolutionInducingpoints(Z)
     self.feature = feature
     
     self.num_outputs = num_outputs
     self.mean_function = mean_function
     self.gc_kernel = gc_kernel
     self.white = white
     self.q_diag = q_diag
     
     self.q_mu, self.q_sqrt = self._init_variational_parameters(Z)
     
     # print(type(self.feature.Z))
     
     self._build_cholesky()
예제 #3
0
 def __init__(self, input_prop_dim=None, **kwargs):
     """
     A base class for GP layers. Basic functionality for multisample conditional, and input propagation
     :param input_prop_dim: the first dimensions of X to propagate. If None (or zero) then no input prop
     :param kwargs:
     """
     Parameterized.__init__(self, **kwargs)
     self.input_prop_dim = input_prop_dim
예제 #4
0
    def __init__(self, Z, K, noise_sigma, white=True,**kwargs):
        Parameterized.__init__(self, **kwargs)

        self.white=True
        self.num_inducing = Z.shape[0]
        self.inducing_locations = Z
        self.K = K

        
        self.likelihood = MR_Gaussian(variance=noise_sigma)
        
        self.setup()
예제 #5
0
 def __init__(self, dims):
     Parameterized.__init__(self)
     self.dims = dims
     for i, (dim_in, dim_out) in enumerate(zip(dims[:-1], dims[1:])):
         setattr(self, 'W_{}'.format(i), Param(xavier(dim_in, dim_out)))
         setattr(self, 'b_{}'.format(i), Param(np.zeros(dim_out)))
예제 #6
0
    def __init__(self, kernel, W, name=None):
        Parameterized.__init__(self, name=name)

        self.kernel = kernel
        self.W = Parameter(W)  # P x L