Пример #1
0
 def __init__(self,
              filters,
              kernel_size,
              strides=(1, 1),
              padding='valid',
              data_format=None,
              dilation_rate=(1, 1),
              activation=None,
              use_bias=True,
              kernel_initializer='trainable_normal',
              bias_initializer='zeros',
              kernel_regularizer='normal_kl_divergence',
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              **kwargs):
   super(Conv2DReparameterization, self).__init__(
       filters=filters,
       kernel_size=kernel_size,
       strides=strides,
       padding=padding,
       data_format=data_format,
       dilation_rate=dilation_rate,
       activation=activation,
       use_bias=use_bias,
       kernel_initializer=initializers.get(kernel_initializer),
       bias_initializer=initializers.get(bias_initializer),
       kernel_regularizer=regularizers.get(kernel_regularizer),
       bias_regularizer=regularizers.get(bias_regularizer),
       activity_regularizer=regularizers.get(activity_regularizer),
       kernel_constraint=constraints.get(kernel_constraint),
       bias_constraint=constraints.get(bias_constraint),
       **kwargs)
    def __init__(self,
                 units,
                 num_inducing,
                 mean_fn=Zeros(),
                 covariance_fn=ExponentiatedQuadratic(variance=1.,
                                                      lengthscale=1.),
                 inducing_inputs_initializer='random_normal',
                 inducing_outputs_initializer='trainable_normal',
                 inducing_inputs_regularizer=None,
                 inducing_outputs_regularizer='normal_kl_divergence',
                 inducing_inputs_constraint=None,
                 inducing_outputs_constraint=None,
                 **kwargs):
        """Constructs layer.

    Args:
      units: integer, dimensionality of layer.
      num_inducing: integer, number of inducing points for the approximation.
      mean_fn: Mean function, a callable taking an inputs Tensor of shape
        [batch, ...] and returning a Tensor of shape [batch].
      covariance_fn: Covariance function, a callable taking two input Tensors
        of shape [batch_x1, ...] and [batch_x2, ...] respectively, and returning
        a positive semi-definite matrix of shape [batch_x1, batch_x2].
      inducing_inputs_initializer: Initializer for the inducing inputs.
      inducing_outputs_initializer: Initializer for the inducing outputs.
      inducing_inputs_regularizer: Regularizer function applied to the inducing
        inputs.
      inducing_outputs_regularizer: Regularizer function applied to the inducing
        outputs.
      inducing_inputs_constraint: Constraint function applied to the inducing
        inputs.
      inducing_outputs_constraint: Constraint function applied to the inducing
        outputs.
      **kwargs: kwargs passed to parent class.
    """
        super(SparseGaussianProcess,
              self).__init__(units=units,
                             mean_fn=mean_fn,
                             covariance_fn=covariance_fn,
                             conditional_inputs=None,
                             conditional_outputs=None,
                             **kwargs)
        self.num_inducing = num_inducing
        self.inducing_inputs_initializer = initializers.get(
            inducing_inputs_initializer)
        self.inducing_outputs_initializer = initializers.get(
            inducing_outputs_initializer)
        self.inducing_inputs_regularizer = regularizers.get(
            inducing_inputs_regularizer)
        self.inducing_outputs_regularizer = regularizers.get(
            inducing_outputs_regularizer)
        self.inducing_inputs_constraint = constraints.get(
            inducing_inputs_constraint)
        self.inducing_outputs_constraint = constraints.get(
            inducing_outputs_constraint)
Пример #3
0
 def testHalfCauchyKLDivergence(self):
   shape = (3,)
   regularizer = regularizers.get('half_cauchy_kl_divergence')
   variational_posterior = ed.Independent(
       ed.LogNormal(loc=tf.zeros(shape), scale=1.).distribution,
       reinterpreted_batch_ndims=1)
   kl = regularizer(variational_posterior)
   kl_value = self.evaluate(kl)
   self.assertGreaterEqual(kl_value, 0.)
Пример #4
0
 def __init__(self,
              units,
              activation=None,
              use_bias=True,
              kernel_initializer='trainable_normal',
              bias_initializer='zero',
              kernel_regularizer='log_uniform_kl_divergence',
              bias_regularizer=None,
              activity_regularizer=None,
              **kwargs):
   super(DenseVariationalDropout, self).__init__(
       units=units,
       activation=activation,
       use_bias=use_bias,
       kernel_initializer=initializers.get(kernel_initializer),
       bias_initializer=initializers.get(bias_initializer),
       kernel_regularizer=regularizers.get(kernel_regularizer),
       bias_regularizer=regularizers.get(bias_regularizer),
       activity_regularizer=regularizers.get(activity_regularizer),
       **kwargs)
Пример #5
0
 def __init__(self,
              mean_initializer=tf.keras.initializers.truncated_normal(
                  stddev=1e-5),
              stddev_initializer='scaled_normal_std_dev',
              mean_regularizer=None,
              stddev_regularizer=None,
              mean_constraint=None,
              stddev_constraint='positive',
              seed=None,
              dtype=tf.float32,
              **kwargs):
     """Constructs the initializer."""
     super(TrainableNormal, self).__init__(dtype=dtype, **kwargs)
     self.mean_initializer = get(mean_initializer)
     self.stddev_initializer = get(stddev_initializer)
     self.mean_regularizer = regularizers.get(mean_regularizer)
     self.stddev_regularizer = regularizers.get(stddev_regularizer)
     self.mean_constraint = constraints.get(mean_constraint)
     self.stddev_constraint = constraints.get(stddev_constraint)
     self.seed = seed
Пример #6
0
 def __init__(self,
              loc_initializer=tf.keras.initializers.truncated_normal(
                  stddev=1e-5),
              scale_initializer=tf.keras.initializers.truncated_normal(
                  mean=1., stddev=1e-5),
              loc_regularizer=None,
              scale_regularizer=None,
              loc_constraint=None,
              scale_constraint='positive',
              seed=None,
              dtype=tf.float32,
              **kwargs):
     """Constructs the initializer."""
     super(TrainableHalfCauchy, self).__init__(dtype=dtype, **kwargs)
     self.loc_initializer = get(loc_initializer)
     self.scale_initializer = get(scale_initializer)
     self.loc_regularizer = regularizers.get(loc_regularizer)
     self.scale_regularizer = regularizers.get(scale_regularizer)
     self.loc_constraint = constraints.get(loc_constraint)
     self.scale_constraint = constraints.get(scale_constraint)
     self.seed = seed
Пример #7
0
 def __init__(self,
              units,
              activation='tanh',
              recurrent_activation='hard_sigmoid',
              use_bias=True,
              kernel_initializer='trainable_normal',
              recurrent_initializer='trainable_normal',
              bias_initializer='zeros',
              unit_forget_bias=True,
              kernel_regularizer='normal_kl_divergence',
              recurrent_regularizer='normal_kl_divergence',
              bias_regularizer=None,
              kernel_constraint=None,
              recurrent_constraint=None,
              bias_constraint=None,
              dropout=0.,
              recurrent_dropout=0.,
              implementation=1,
              **kwargs):
   super(LSTMCellReparameterization, self).__init__(
       units=units,
       activation=activation,
       recurrent_activation=recurrent_activation,
       use_bias=use_bias,
       kernel_initializer=initializers.get(kernel_initializer),
       recurrent_initializer=initializers.get(recurrent_initializer),
       bias_initializer=initializers.get(bias_initializer),
       unit_forget_bias=unit_forget_bias,
       kernel_regularizer=regularizers.get(kernel_regularizer),
       recurrent_regularizer=regularizers.get(recurrent_regularizer),
       bias_regularizer=regularizers.get(bias_regularizer),
       kernel_constraint=constraints.get(kernel_constraint),
       recurrent_constraint=constraints.get(recurrent_constraint),
       bias_constraint=constraints.get(bias_constraint),
       dropout=dropout,
       recurrent_dropout=recurrent_dropout,
       implementation=implementation,
       **kwargs)