示例#1
0
 def prior(kernel_size, bias_size=0, dtype=None):
     from tensorflow_probability import layers
     from tensorflow_probability import distributions as tfd
     import numpy as np
     import tensorflow as tf
     n = kernel_size + bias_size
     c = np.log(np.expm1(1.))
     return tf.keras.Sequential([
         layers.VariableLayer(n, dtype=dtype),
         layers.DistributionLambda(lambda t: tfd.Independent(
             tfd.Normal(loc=t, scale=1.), reinterpreted_batch_ndims=1
         )),  #[:n]#1e-5 + tf.math.softplus(c + t[n:])
     ])
示例#2
0
def posterior_mean_field(kernel_size, bias_size=0, dtype=None):
  """Posterior function for variational layer."""
  n = kernel_size + bias_size
  c = np.log(np.expm1(1e-5))
  variable_layer = tfpl.VariableLayer(
      2 * n,
      dtype=dtype,
      initializer=tfpl.BlockwiseInitializer([
          tf.keras.initializers.TruncatedNormal(mean=0., stddev=.05, seed=None),
          tf.keras.initializers.Constant(np.log(np.expm1(1e-5)))],
                                            sizes=[n, n]))

  def distribution_fn(t):
    scale = 1e-5 + tf.nn.softplus(c + t[Ellipsis, n:])
    return tfd.Independent(tfd.Normal(loc=t[Ellipsis, :n], scale=scale),
                           reinterpreted_batch_ndims=1)
  distribution_layer = tfpl.DistributionLambda(distribution_fn)
  return tf.keras.Sequential([variable_layer, distribution_layer])
示例#3
0
def prior_trainable(kernel_size, bias_size=0, dtype=None, num_updates=1):
  """Prior function for variational layer."""
  n = kernel_size + bias_size
  c = np.log(np.expm1(1e-5))

  def regularizer(t):
    out = tfd.LogNormal(0., 1.).log_prob(1e-5 + tf.nn.softplus(c + t[Ellipsis, -1]))
    return -tf.reduce_sum(out) / num_updates

  # Include the prior on the scale parameter as a regularizer in the loss.
  variable_layer = tfpl.VariableLayer(n, dtype=dtype, regularizer=regularizer)

  def distribution_fn(t):
    scale = 1e-5 + tf.nn.softplus(c + t[Ellipsis, -1])
    return tfd.Independent(tfd.Normal(loc=t[Ellipsis, :n], scale=scale),
                           reinterpreted_batch_ndims=1)

  distribution_layer = tfpl.DistributionLambda(distribution_fn)
  return tf.keras.Sequential([variable_layer, distribution_layer])