コード例 #1
0
ファイル: gp.py プロジェクト: zhoutongtj/ssdkl
 def _compute_Ky(self):
     """
     Computes the train covariance matrix.
     """
     self.K = self._compute_covariance_matrix(self.X_train, self.X_train, self.sigma_l_tf, self.sigma_f_tf, name='K')\
         + self._compute_covariance_matrix(self.locs_train, self.locs_train, self.sigma_l_tf_loc, self.sigma_f_tf_loc, name='K_loc')
     # Add diagonal for noisy observations
     if self.sigma_n is not None:
         self.Ky = tf.add(self.K,
                          tf.mul(tf.square(self.sigma_n_tf),
                                 tf_utils.eye(self.n_tf)),
                          name='Ky')
     else:
         self.Ky = tf.identity(self.K, name='Ky')
コード例 #2
0
ファイル: gp.py プロジェクト: zhoutongtj/ssdkl
 def _compute_det(self):
     """
     Computes determinant terms in lml.
     """
     if self.method == 'pseudo':
         self.det = tf.mul(-0.5, self.Ky_logdet, name='det')
     elif self.method == 'inverse':
         self.det = tf.mul(-0.5,
                           tf.log(tf.matrix_determinant(self.Ky)),
                           name='det')
     else:
         # Compute log determinant using previous determinant constant and
         # Cholesky decomposition
         self.mask_Ky = tf_utils.eye(tf.shape(self.Ky)[0], name='mask_Ky')
         self.L_Ky = tf.cholesky(self.Ky)
         self.det = tf.neg(tf.reduce_sum(
             self.mask_Ky *
             tf.log(tf.abs(0.5 * (self.L_Ky + tf.transpose(self.L_Ky)))),
             keep_dims=True),
                           name='det')
コード例 #3
0
ファイル: gp.py プロジェクト: zhoutongtj/ssdkl
 def _compute_compound_loss(self):
     """
     Computes semi-supervised loss that is a weighted sum of the negative
     log marginal likelihood and the sum of test variances.
     """
     # Compute sum of test variances
     self.sum_test_variances = tf.reduce_sum(tf.mul(
         tf_utils.eye(tf.shape(self.y_test_cov)[0]), self.y_test_cov),
                                             name='sum_test_variances')
     # Compute weighted compound loss for semi-supervised training
     self.lml_component = tf.div(tf.neg(self.lml),
                                 tf.to_float(tf.shape(self.X_train)[0]),
                                 name='lml_component')
     self.sum_test_variances_component = tf.mul(
         (self.alpha / (tf.to_float(tf.shape(self.X_test)[0]))),
         self.sum_test_variances,
         name='sum_test_variances_component')
     self.semisup_loss = tf.add(self.lml_component,
                                self.sum_test_variances_component,
                                name='semisup_loss')