Exemple #1
0
    def expected_PosteriormeanVariance(self, X, L, hyperpars, K_expected):
        size = self.n_tasks * self.n_train

        beta, varm, loc, varc = hyperpars

        Cov_test_expected = mixing_Covariance_diag(K_expected, self.Wmix, varm,
                                                   varc)  # shape M

        #----- covariance between test data and simulationn training data
        Kx3 = self.kernel(self.Xtrain, X, beta)  # shape Q x Ntrain x Nb
        Kx3_expected = tf.reduce_mean(Kx3, axis=-1,
                                      keepdims=True)  # shape Q x Ntrain x 1
        Cov_mixed_expected = mixing_Covariance(
            Kx3_expected, self.Wmix, varm, varc)  # shape M x N_train x M x 1
        Cov_mixed_expected = tf.reshape(Cov_mixed_expected,
                                        [size, self.n_tasks])

        mean_training = tf.tile(loc[:, tf.newaxis], [1, self.n_train])
        mean_training = tf.reshape(mean_training, [size, 1])

        Y = tf.transpose(self.Ytrain)
        Y = tf.reshape(Y, [size, 1]) - mean_training

        mean, var = posterior_Gaussian(L, Cov_mixed_expected,
                                       Cov_test_expected, Y, False)
        var = tf.maximum(var, 1e-40)

        mean = mean + loc[:, tf.newaxis]

        mean_and_var = tf.concat([mean, var], axis=1)
        return mean_and_var
Exemple #2
0
    def full_PosteriormeanVariance(self, X, L, hyperpars):
        n_new = X.shape[0].value
        size_new = self.n_tasks * n_new

        beta, varm, loc, varc = hyperpars

        Cov_test = varc * tf.reduce_sum(tf.square(self.Wmix),
                                        axis=1) + tf.reduce_sum(varm, axis=1)
        Cov_test = tf.tile(Cov_test[:, tf.newaxis], [1, n_new])
        Cov_test = tf.reshape(Cov_test, [size_new])

        Kx3 = self.kernel(self.Xtrain, X, beta)

        size = self.n_tasks * self.n_train

        Cov_mixed = mixing_Covariance(Kx3, self.Wmix, varm,
                                      varc)  # with shape M x N x M x N_test

        Cov_mixed = tf.reshape(Cov_mixed, [size, size_new])

        mean_training = tf.tile(loc[:, tf.newaxis], [1, self.n_train])
        mean_training = tf.reshape(mean_training, [size, 1])

        mean_test = tf.tile(loc[:, tf.newaxis], [1, n_new])
        mean_test = tf.reshape(mean_test, [size_new, 1])

        Y = tf.transpose(self.Ytrain)
        Y = tf.reshape(Y, [size, 1]) - mean_training

        mean, var = posterior_Gaussian(L, Cov_mixed, Cov_test, Y, False)

        mean = mean + mean_test

        mean_and_var = tf.concat([mean, var], axis=1)
        return mean_and_var
Exemple #3
0
    def expected_PosteriormeanVariance(self, X, L, hyperpars, K_expected):
        # This is needed for computing the main effecs and interactions
        # Inputs:
        #	X:= is a 2-dimensional array
        #	L:= Cholesky factor of the Covariance matrix of the training data
        # hyperpars := list of values for the kernel hyperparameters (of the form [beta, varm, loc])
        # K_expected := expected value for the stationary kernel

        beta, varm, loc = hyperpars

        Cov_test_expected = varm * K_expected

        Kx3 = self.kernel(self.Xtrain, X, beta)

        Cov_mixed_expected = varm * tf.reduce_mean(Kx3, axis=-1, keepdims=True)

        Y = self.Ytrain[:, tf.newaxis] - loc

        mean, var = posterior_Gaussian(L, Cov_mixed_expected,
                                       Cov_test_expected, Y, False)
        var = tf.maximum(var, 1e-40)

        mean_and_var = tf.concat([mean, var], axis=1)
        mean_and_var = tf.reshape(mean_and_var, [2])

        return mean_and_var
Exemple #4
0
    def posteriormeanVariance(self, Xtest, hyperpars, fullCov=False):
        # Function generates posterior mean and variance for the Gaussian process, given values for the hyperparameters
        # Inputs:
        # 	Xtest := N x D tensorflow array of new inputs
        #	hyperpars := 1d array containing a set of values for the hyperparameters
        #			these values are stacked in the following order: loc, varm, beta
        # fullCov := boolean specifying if a full covariance matrix should be computed or not
        # Output
        #	mean_and_var := array where the first column is an N x 1 array representing the
        #					mean of the posterior Gaussian  distribution and the remaining columns correspond to
        #        			the (Co)variance which is an N x N array if
        #					fullCov = True or a N x 1 array if fullCov = False

        loc = hyperpars[0]
        varm = hyperpars[1]
        beta = hyperpars[2:]

        # ------- generate covariance matrix for training data and computing the corresponding cholesky factor
        Kxx = self.kernel(self.Xtrain, self.Xtrain, beta)
        Cov_train = varm * Kxx + (self.noise + self.jitter_level) * tf.eye(
            self.n_train)
        L = tf.linalg.cholesky(Cov_train)

        #-------- generate covariance matrix for test data
        n_test = Xtest.shape[0].value
        if fullCov:
            Kx2 = self.kernel(Xtest, Xtest, beta)
            Cov_test = varm * Kx2 + (self.noise +
                                     self.jitter_level) * tf.eye(n_test)
        else:
            Cov_test = (varm + self.noise +
                        self.jitter_level) * tf.ones(n_test)

        #------- covariance between test data and training data
        Kx3 = self.kernel(self.Xtrain, Xtest, beta)
        Cov_mixed = varm * Kx3

        Y = self.Ytrain[:, tf.newaxis] - loc

        mean_pos, var_pos = posterior_Gaussian(L, Cov_mixed, Cov_test, Y,
                                               fullCov)

        mean_pos = mean_pos + loc

        mean_and_var = tf.concat([mean_pos, var_pos], axis=1)

        return mean_and_var
Exemple #5
0
    def full_PosteriormeanVariance(self, X, L, hyperpars):
        # This is needed for computing the main effecs and interactions
        # Inputs:
        #	X:= is a 2-dimensional array
        #	L:= Cholesky factor of the Covariance matrix of the training data
        # hyperpars := list of values for the kernel hyperparameters (of the form [beta, varm, loc])
        n_new = X.shape[0].value

        beta, varm, loc = hyperpars

        Cov_test = varm * tf.ones(n_new)

        Kx3 = self.kernel(self.Xtrain, X, beta)

        Cov_mixed = varm * Kx3

        Y = self.Ytrain[:, tf.newaxis] - loc

        mean, var = posterior_Gaussian(L, Cov_mixed, Cov_test, Y, False)

        mean_and_var = tf.concat([mean, var], axis=1)

        return mean_and_var
Exemple #6
0
    def posteriormeanVariance(self, Xtest, hyperpars, fullCov=False):
        # generate posterior mean and variance for the Gaussian process, given values for the hyperparameters
        # Xtest := N x D tensorflow array of new inputs
        # output := mean of posterior distribution in the form of a N x Q array
        #         and (Co)variance of posterior in the form of a N x N X Q array if
        #		fullCov = True or a N x 1 array if fullCov = False
        beta, varm, loc, varc = hyperpars
        noise = self.noise
        Wmix = self.Wmix

        # ------- generate covariance matrix for training data and computing the corresponding cholesky factor
        Kxx = self.kernel(self.Xtrain, self.Xtrain,
                          beta)  # kernels for the latent Gaussian processes
        # Kxx has shape R x N x N

        Cov_train = mixing_Covariance(Kxx, Wmix, varm,
                                      varc)  # with shape M x N x M x N

        size = self.n_tasks * self.n_train
        noise_matrix = tf.tile(self.noise[:, tf.newaxis], [1, self.n_train])
        noise_matrix = tf.linalg.diag(tf.reshape(noise_matrix, [-1]))

        Cov_train = tf.reshape(
            Cov_train,
            [size, size]) + noise_matrix + (self.jitter_level) * tf.eye(size)

        #-------- Computing the cholesky factor ------------
        L = tf.linalg.cholesky(Cov_train)

        #-------- generate covariance matrix for test data
        n_test = Xtest.shape[0].value
        size_test = self.n_tasks * n_test
        if fullCov:
            Kx2 = self.kernel(Xtest, Xtest, beta)
            Cov_test = mixing_Covariance(Kx2, Wmix, varm, varc)

            Cov_test = tf.reshape(
                Cov_test, [size_test, size_test
                           ]) + (noise + self.jitter_level) * tf.eye(size_test)
        else:
            Cov_test = varc * tf.reduce_sum(
                tf.square(Wmix), axis=1) + tf.reduce_sum(varm, axis=1)
            Cov_test = tf.tile(Cov_test[:, tf.newaxis], [1, n_test])
            Cov_test = tf.reshape(Cov_test, [size_test])

        #------- covariance between test data and training data
        Kx3 = self.kernel(self.Xtrain, Xtest, beta)

        Cov_mixed = mixing_Covariance(Kx3, Wmix, varm,
                                      varc)  # with shape M x N x M x N_test

        Cov_mixed = tf.reshape(Cov_mixed, [size, size_test])

        mean_training = tf.tile(loc[:, tf.newaxis], [1, self.n_train])
        mean_training = tf.reshape(mean_training, [size, 1])

        mean_test = tf.tile(loc[:, tf.newaxis], [1, n_test])
        mean_test = tf.reshape(mean_test, [size_test, 1])

        Y = tf.transpose(self.Ytrain)
        Y = tf.reshape(Y, [size, 1]) - mean_training

        mean_pos, var_pos = posterior_Gaussian(L, Cov_mixed, Cov_test, Y,
                                               fullCov)

        mean_pos = mean_pos + mean_test

        return mean_pos, var_pos