Пример #1
0
    def __init__(self, layer_lower,
                 dim_down, dim_up,
                 likelihood,
                 X=None, X_variance=None, init='PCA',
                 Z=None, num_inducing=10,  kernel=None,
                 inference_method=None, uncertain_inputs=True,
                 mpi_comm=None, mpi_root=0, back_constraint=True,
                 encoder=None, auto_update=True, name='layer'):

        self.uncertain_inputs = uncertain_inputs
        self.layer_lower = layer_lower
        Y = self.Y if self.layer_lower is None else self.layer_lower.X
        self.back_constraint = back_constraint

        from deepgp.util.util import initialize_latent
        if X is None: X, _ = initialize_latent(init, Y.shape[0], dim_up, Y.mean.values if isinstance(Y, VariationalPosterior) else Y)
        if X_variance is None: X_variance = 0.01*np.ones(X.shape) + 0.01*np.random.rand(*X.shape)
            
        if Z is None:
            if self.back_constraint: Z = np.random.rand(num_inducing, dim_up)*2-1.
            else:
                if num_inducing<=X.shape[0]:
                    Z = X[np.random.permutation(X.shape[0])[:num_inducing]].copy()
                else:
                    Z_more = np.random.rand(num_inducing-X.shape[0],X.shape[1])*(X.max(0)-X.min(0))+X.min(0)
                    Z = np.vstack([X.copy(),Z_more])
        assert Z.shape[1] == X.shape[1]
        
        if mpi_comm is not None:
            from ..util.parallel import broadcastArrays
            broadcastArrays([Z], mpi_comm, mpi_root)
        
        if uncertain_inputs: X = NormalPosterior(X, X_variance)
        if kernel is None: kernel = kern.RBF(dim_up, ARD = True)
        assert kernel.input_dim==X.shape[1], "The dimensionality of input has to be equal to the input dimensionality of the kernel!"
        self.Kuu_sigma = Param('Kuu_var', np.zeros(num_inducing)+1e-3, Logexp())
        
        super(Layer, self).__init__(X, Y, Z, kernel,
                                    likelihood, inference_method=inference_method,
                                    mpi_comm=mpi_comm, mpi_root=mpi_root,
                                    auto_update=auto_update, name=name)
        self.link_parameter(self.Kuu_sigma)
        if back_constraint: self.encoder = encoder

        if self.uncertain_inputs and not self.back_constraint:
            self.link_parameter(self.X)
Пример #2
0
    def _init_Xs(self, Ya, Xa):

        if isinstance(Ya, list):
            return [None for d in self.nDims[1:]]
        else:
            nDims, N = self.nDims, Ya.shape[0]
        if self.back_constraint:
            if self.X_observed:
                Xs = [np.random.rand(N, d) for d in nDims[1:-1]] + [Xa]
            else:
                Xs = [np.random.rand(N, d) for d in nDims[1:]]
        elif self.X_observed and len(nDims) == 3:
            X = Xa.mean.values if isinstance(Xa, VariationalPosterior) else Xa
            from deepgp.util.util import initialize_latent
            X_mid = initialize_latent('PCA', N, nDims[1], X)[0]
            if X.shape[1] < self.input_dim:
                tmp = np.random.randn(*X.shape)
                tmp[:, :X_mid.shape[1]] = X_mid
                X_mid = tmp
            if self.repeatX:
                if isinstance(X, VariationalPosterior):
                    #-- Haven't tested this case
                    # ...
                    raise NotImplementedError()
                else:
                    X_mid2 = np.hstack((X_mid, X))
                    # Should we re-normalize everything????
                    Xmean, Xstd = X_mid2.mean(0), X_mid2.std(0) + 1e-20
                    X_mid2 -= Xmean[np.newaxis, :]
                    X_mid2 /= Xstd[np.newaxis, :]

                    self.repeatX_Xmean = Xmean.copy()[X_mid.shape[1]:]
                    self.repeatX_Xstd = Xstd.copy()[X_mid.shape[1]:]

                    self.nDimsOrig = nDims[:]
                    nDims[1] = X_mid2.shape[1]
                    X_mid = X_mid2
            Xs = [X_mid, X]
        elif self.X_observed:
            X = Xa.mean.values if isinstance(Xa, VariationalPosterior) else Xa
            Xs = [None for d in nDims[1:-1]] + [X]
        else:
            Xs = [None for d in nDims[1:]]
        return Xs
Пример #3
0
    def __init__(self, layer_lower, dim_down, dim_up, likelihood, X=None, X_variance=None, init='PCA',  Z=None, num_inducing=10,  kernel=None, inference_method=None, uncertain_inputs=True,mpi_comm=None, mpi_root=0, back_constraint=True, encoder=None, auto_update=True, name='layer'):

        self.uncertain_inputs = uncertain_inputs
        self.layer_lower = layer_lower
        Y = self.Y if self.layer_lower is None else self.layer_lower.X
        self.back_constraint = back_constraint

        from deepgp.util.util import initialize_latent
        if X is None: X, _ = initialize_latent(init, Y.shape[0], dim_up, Y.mean.values if isinstance(Y, VariationalPosterior) else Y)
        if X_variance is None: X_variance = 0.01*np.ones(X.shape) + 0.01*np.random.rand(*X.shape)
            
        if Z is None:
            if self.back_constraint: Z = np.random.rand(num_inducing, dim_up)*2-1.
            else:
                if num_inducing<=X.shape[0]:
                    Z = X[np.random.permutation(X.shape[0])[:num_inducing]].copy()
                else:
                    Z_more = np.random.rand(num_inducing-X.shape[0],X.shape[1])*(X.max(0)-X.min(0))+X.min(0)
                    Z = np.vstack([X.copy(),Z_more])
        assert Z.shape[1] == X.shape[1]
        
        if mpi_comm is not None:
            from ..util.parallel import broadcastArrays
            broadcastArrays([Z], mpi_comm, mpi_root)
        
        if uncertain_inputs: X = NormalPosterior(X, X_variance)
        if kernel is None: kernel = kern.RBF(dim_up, ARD = True)
        assert kernel.input_dim==X.shape[1], "The dimensionality of input has to be equal to the input dimensionality of kernel!"
        self.Kuu_sigma = Param('Kuu_var', np.zeros(num_inducing)+1e-3, Logexp())
        
        super(Layer, self).__init__(X, Y, Z, kernel, likelihood, inference_method=inference_method, mpi_comm=mpi_comm, mpi_root=mpi_root, auto_update=auto_update, name=name)
        self.link_parameter(self.Kuu_sigma)
        if back_constraint: self.encoder = encoder

        if self.uncertain_inputs and not self.back_constraint:
            self.link_parameter(self.X)