Example #1
0
 def log_prob(self, xs, zs):
     """Return scalar, the log joint density log p(xs, zs)."""
     # Note there are no parameters we're being Bayesian about. The
     # parameters are baked into how we specify the neural networks.
     X, y = xs['X'], xs['y']
     self.neural_network(X)
     result = self.pi * norm.prob(y, self.mus, self.sigmas)
     result = tf.log(tf.reduce_sum(result, 1))
     return tf.reduce_sum(result)
 def log_prob(self, xs, zs):
   """Return scalar, the log joint density log p(xs, zs)."""
   # Note there are no parameters we're being Bayesian about. The
   # parameters are baked into how we specify the neural networks.
   X, y = xs['X'], xs['y']
   self.neural_network(X)
   result = self.pi * norm.prob(y, self.mus, self.sigmas)
   result = tf.log(tf.reduce_sum(result, 1))
   return tf.reduce_sum(result)
Example #3
0
 def log_prob(self, xs, zs):
     """log p((xs,ys), (z,theta)) = sum_{n=1}^N log p((xs[n,:],ys[n]), theta)"""
     # Note there are no parameters we're being Bayesian about. The
     # parameters are baked into how we specify the neural networks.
     X, y = xs['X'], xs['y']
     self.neural_network(X)
     result = self.pi * norm.prob(y, self.mus, self.sigmas)
     result = tf.log(tf.reduce_sum(result, 1))
     return tf.reduce_sum(result)
 def log_prob(self, xs, zs):
   """log p((xs,ys), (z,theta)) = sum_{n=1}^N log p((xs[n,:],ys[n]), theta)"""
   # Note there are no parameters we're being Bayesian about. The
   # parameters are baked into how we specify the neural networks.
   X, y = xs['X'], xs['y']
   self.neural_network(X)
   result = self.pi * norm.prob(y, self.mus, self.sigmas)
   result = tf.log(tf.reduce_sum(result, 1))
   return tf.reduce_sum(result)