Exemple #1
0
    def __init__(self, layer_lower, dim_up, X=None, X_variance=None, Z=None, num_inducing=10, kernel=None, inference_method=None, noise_var=1e-2, init='rand', mpi_comm=None, mpi_root=0, back_constraint=True, encoder=None, auto_update=True, name='hiddenlayer'):
        self.dim_up, self.dim_down = dim_up, layer_lower.X.shape[1]
        likelihood = likelihoods.Gaussian(variance=noise_var)
        self.variationalterm = NormalEntropy()

        super(HiddenLayer, self).__init__(layer_lower, self.dim_down, dim_up, likelihood, init=init, X=X, X_variance=X_variance, Z=Z,
                                          num_inducing=num_inducing, kernel=kernel, inference_method=inference_method, mpi_comm=mpi_comm, mpi_root=mpi_root, back_constraint=back_constraint, encoder=encoder, auto_update=auto_update,  name=name)
Exemple #2
0
 def __init__(self, dim_down, dim_up, Y, X=None, X_variance=None, Z=None, num_inducing=10, kernel=None, inference_method=None, likelihood=None, init='rand', 
                 mpi_comm=None, mpi_root=0, back_constraint=True, encoder=None, auto_update=True, repeatX=False, repeatXsplit=0, name='obslayer'):
     self.dim_up, self.dim_down = dim_up, dim_down
     self._Y = Y
     self.repeatX = repeatX
     self.repeatXsplit = repeatXsplit
     if likelihood is None:  likelihood = likelihoods.Gaussian()
     self._toplayer_ = False
     self.variationalterm = NormalEntropy()
     super(ObservedLayer, self).__init__(None, self.dim_down, dim_up, likelihood, init=init, X=X, X_variance=X_variance, Z=Z, 
                                       num_inducing=num_inducing, kernel=kernel, inference_method=inference_method, mpi_comm=mpi_comm, mpi_root=mpi_root,  back_constraint=back_constraint, encoder=encoder, auto_update=auto_update, name=name)
Exemple #3
0
    def gen_pred_layer(self,
                       layer_lower=None,
                       Y=None,
                       X=None,
                       binObserved=False):
        from .pred_layers import PredLayer, BinaryPredLayer
        from deepgp.encoder.mlp import MLP
        from ..inference import SVI_Ratio, SVI_Ratio_Binary
        from copy import deepcopy

        X = self.X.copy() if X is None else X
        Y = self.Y.copy() if Y is None else Y
        Z = self.Z.values.copy()
        X_var = self.X_var.values.copy() if self.back_constraint else None
        encoder = MLP.clone(self.encoder) if self.back_constraint else None
        kernel = self.kern.copy()
        likelihood = self.likelihood.copy()
        posterior = deepcopy(self.posterior)
        variationalterm = NormalPrior() if isinstance(
            self.variationalterm, NormalPrior) else NormalEntropy()

        if binObserved:
            layer = BinaryPredLayer(X,
                                    Y,
                                    kernel,
                                    Z,
                                    posterior,
                                    likelihood=likelihood,
                                    layer_lower=layer_lower,
                                    inference_method=SVI_Ratio_Binary(),
                                    variationalterm=variationalterm,
                                    X_var=X_var,
                                    encoder=encoder,
                                    name=self.name)
        else:
            layer = PredLayer(X,
                              Y,
                              kernel,
                              Z,
                              posterior,
                              likelihood=likelihood,
                              layer_lower=layer_lower,
                              inference_method=SVI_Ratio(),
                              variationalterm=variationalterm,
                              X_var=X_var,
                              encoder=encoder,
                              name=self.name)
        return layer
Exemple #4
0
    def __init__(self,
                 dim_down,
                 dim_up,
                 Y,
                 X=None,
                 X_variance=None,
                 Z=None,
                 num_inducing=10,
                 kernel=None,
                 inference_method=None,
                 likelihood=None,
                 init='rand',
                 mpi_comm=None,
                 mpi_root=0,
                 MLP_dims=None,
                 name='obslayer',
                 back_constraint=False):
        self.layer_lower = None
        self.dim_up, self.dim_down = dim_up, dim_down
        self.Y = Y
        self._toplayer_ = False
        self.variationalterm = NormalEntropy()

        if not back_constraint:
            if X is None:
                # Can change self to super if we want init_X to be even for non-observed MRD layers
                X, fracs = self._init_X(Y, dim_up, init)

            if X_variance is None:
                X_variance = np.random.uniform(0, .1, X.shape)

        super(ObservedMRDLayer,
              self).__init__(dim_down,
                             dim_up,
                             likelihood,
                             init=init,
                             X=X,
                             X_variance=X_variance,
                             Z=Z,
                             MLP_dims=MLP_dims,
                             num_inducing=num_inducing,
                             kernel=kernel,
                             inference_method=inference_method,
                             mpi_comm=mpi_comm,
                             mpi_root=mpi_root,
                             name=name,
                             back_constraint=back_constraint)
Exemple #5
0
 def set_as_toplayer(self, flag=True):
     if flag:
         self.variationalterm = NormalPrior()
     else:
         self.variationalterm = NormalEntropy()
     self._toplayer_ = flag