Ejemplo n.º 1
0
 def test_axis_exceptions(self):
   if not tf.executing_eagerly():
     with self.assertRaisesWithPredicateMatch(
         NotImplementedError, 'Argument `axis` must be known statically.'):
       tfb.Pad(axis=tf1.placeholder_with_default([-1], shape=None),
               validate_args=True)
   with self.assertRaisesWithPredicateMatch(
       ValueError, 'Argument `axis` must be scalar or vector.'):
     tfb.Pad(axis=[[-1]], validate_args=True)
   with self.assertRaisesWithPredicateMatch(
       ValueError, 'Argument `axis` must be negative.'):
     tfb.Pad(axis=0, validate_args=True)
   with self.assertRaisesWithPredicateMatch(
       ValueError, 'Argument `axis` elements must be unique.'):
     tfb.Pad(axis=[-1, -1], validate_args=True)
Ejemplo n.º 2
0
    def test_left_right_3d(self):
        x1_actual = [[[3., 4.]]]
        y1_expected = [[
            [0., 0., 0., 0., 0., 0., 0., 0., 0.],
            [0., 0., 0., 3., 4., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0., 0., 0., 0., 0.],
        ]]
        x2_actual = [[[1., 2.], [3., 4.]]]
        y2_expected = [[
            [0., 0., 0., 0., 0., 0., 0., 0., 0.],
            [0., 0., 0., 1., 2., 0., 0., 0., 0.],
            [0., 0., 0., 3., 4., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0., 0., 0., 0., 0.],
        ]]

        b = tfb.Pad(paddings=[[1, 2], [3, 4]], validate_args=True)
        y1 = b.forward(x1_actual)
        y2 = b.forward(x2_actual)
        x1 = b.inverse(y1_expected)
        x2 = b.inverse(y2_expected)
        fldj = b.forward_log_det_jacobian([[43.]], event_ndims=2)
        ildj = b.inverse_log_det_jacobian([[45., 0.]], event_ndims=2)
        [y1_, y2_, x1_, x2_, fldj_,
         ildj_] = self.evaluate([y1, y2, x1, x2, fldj, ildj])

        self.assertAllEqual(y1_expected, y1_)
        self.assertAllEqual(y2_expected, y2_)
        self.assertAllEqual(x1_actual, x1_)
        self.assertAllEqual(x2_actual, x2_)
        self.assertEqual(0., fldj_)
        self.assertEqual(0., ildj_)
Ejemplo n.º 3
0
 def test_variable_paddings(self):
     x = tf.Variable([[1, 2]])
     b = tfb.Pad(paddings=x, validate_args=True)
     y = b.forward([[1, 2]])
     self.evaluate(b.paddings.initializer)
     self.assertAllEqual([[0, 1, 2, 0, 0]], self.evaluate(y))
     with tf.control_dependencies([b.paddings.assign([[1, 0]])]):
         y = b.forward([[1, 2]])
     self.assertAllEqual([[0, 1, 2]], self.evaluate(y))
Ejemplo n.º 4
0
 def _default_event_space_bijector(self):
     """The bijector maps a zero-dimensional null Tensor input to `self.loc`."""
     # The shape of the pulled back null tensor will be `self.loc.shape + (0,)`.
     # First we pad to a tensor of zeros with shape `self.loc.shape + (1,)`.
     pad_zero = tfb.Pad([(1, 0)])
     # Next, we squeeze to a tensor of zeros with shape matching `self.loc`.
     zeros_squeezed = tfb.Reshape([], event_shape_in=[1])(pad_zero)
     # Finally, we shift the zeros by `self.loc`.
     return tfb.Shift(self.loc)(zeros_squeezed)
Ejemplo n.º 5
0
  def test_paddings_exceptions(self):
    with self.assertRaisesWithPredicateMatch(
        ValueError, 'Argument `paddings` must be a vector of pairs.'):
      tfb.Pad(paddings=-1, validate_args=True)
    with self.assertRaisesWithPredicateMatch(
        ValueError, 'Argument `paddings` must be non-negative.'):
      tfb.Pad(paddings=[[-1, 0]], validate_args=True)
    with self.assertRaisesWithPredicateMatch(
        ValueError,
        ('Arguments `axis` and `paddings` must have the same number '
         'of elements.')):
      tfb.Pad(paddings=[[1, 0]], axis=[-2, -1], validate_args=True)

    if tf.executing_eagerly():
      return

    with self.assertRaisesWithPredicateMatch(
        tf.errors.InvalidArgumentError,
        'Argument `paddings` must be a vector of pairs.'):
      b = tfb.Pad(paddings=tf1.placeholder_with_default([[1]], shape=None),
                  axis=-1, validate_args=True)
      self.evaluate(b.forward([0]))
    with self.assertRaisesWithPredicateMatch(
        tf.errors.InvalidArgumentError,
        'Argument `paddings` must be non-negative.'):
      b = tfb.Pad(paddings=tf1.placeholder_with_default([[-1, 0]], shape=None),
                  axis=-1, validate_args=True)
      self.evaluate(b.forward([0]))
    with self.assertRaisesWithPredicateMatch(
        tf.errors.InvalidArgumentError,
        ('Arguments `axis` and `paddings` must have the same number '
         'of elements.')):
      b = tfb.Pad(paddings=tf1.placeholder_with_default([[1, 0]], shape=None),
                  axis=[-2, -1], validate_args=True)
      self.evaluate(b.forward([0]))
Ejemplo n.º 6
0
 def test_defaults(self):
     b = tfb.Pad(validate_args=True)
     y1 = b.forward([3., 4.])
     y2 = b.forward([[1., 2.], [3., 4.]])
     x1 = b.inverse([3., 4., 0.])
     x2 = b.inverse([[1., 2., 0.], [3., 4., 0]])
     fldj = b.forward_log_det_jacobian([43.], event_ndims=1)
     ildj = b.inverse_log_det_jacobian([45., 0.], event_ndims=1)
     [y1_, y2_, x1_, x2_, fldj_,
      ildj_] = self.evaluate([y1, y2, x1, x2, fldj, ildj])
     self.assertAllEqual([3., 4., 0.], y1_)
     self.assertAllEqual([[1., 2., 0.], [3., 4., 0.]], y2_)
     self.assertAllEqual([3., 4.], x1_)
     self.assertAllEqual([[1., 2.], [3., 4.]], x2_)
     self.assertAllEqual(0., fldj_)
     self.assertAllEqual(0., ildj_)
Ejemplo n.º 7
0
def build_trainable_highway_flow(width,
                                 residual_fraction_initial_value=0.5,
                                 activation_fn=None,
                                 gate_first_n=None,
                                 seed=None,
                                 validate_args=False):
    """Builds a HighwayFlow parameterized by trainable variables.

  The variables are transformed to enforce the following parameter constraints:

  - `residual_fraction` is bounded between 0 and 1.
  - `upper_diagonal_weights_matrix` is a randomly initialized (lower) diagonal
     matrix with positive diagonal of size `width x width`.
  - `lower_diagonal_weights_matrix` is a randomly initialized lower diagonal
     matrix with ones on the diagonal of size `width x width`;
  - `bias` is a randomly initialized vector of size `width`.

  Args:
    width: Input dimension of the bijector.
    residual_fraction_initial_value: Initial value for gating parameter, must be
      between 0 and 1.
    activation_fn: Callable invertible activation function
      (e.g., `tf.nn.softplus`), or `None`.
    gate_first_n: Decides which part of the input should be gated (useful for
      example when using auxiliary variables).
    seed: Seed for random initialization of the weights.
    validate_args: Python `bool`. Whether to validate input with runtime
        assertions.
        Default value: `False`.

  Returns:
    trainable_highway_flow: The initialized bijector.
  """

    residual_fraction_initial_value = tf.convert_to_tensor(
        residual_fraction_initial_value,
        dtype_hint=tf.float32,
        name='residual_fraction_initial_value')
    dtype = residual_fraction_initial_value.dtype

    bias_seed, upper_seed, lower_seed = samplers.split_seed(seed, n=3)
    lower_bijector = tfb.Chain([
        tfb.TransformDiagonal(diag_bijector=tfb.Shift(1.)),
        tfb.Pad(paddings=[(1, 0), (0, 1)]),
        tfb.FillTriangular()
    ])
    unconstrained_lower_initial_values = samplers.normal(
        shape=lower_bijector.inverse_event_shape([width, width]),
        mean=0.,
        stddev=.01,
        seed=lower_seed)
    upper_bijector = tfb.FillScaleTriL(diag_bijector=tfb.Softplus(),
                                       diag_shift=None)
    unconstrained_upper_initial_values = samplers.normal(
        shape=upper_bijector.inverse_event_shape([width, width]),
        mean=0.,
        stddev=.01,
        seed=upper_seed)

    return HighwayFlow(residual_fraction=util.TransformedVariable(
        initial_value=residual_fraction_initial_value,
        bijector=tfb.Sigmoid(),
        dtype=dtype),
                       activation_fn=activation_fn,
                       bias=tf.Variable(samplers.normal((width, ),
                                                        mean=0.,
                                                        stddev=0.01,
                                                        seed=bias_seed),
                                        dtype=dtype),
                       upper_diagonal_weights_matrix=util.TransformedVariable(
                           initial_value=upper_bijector.forward(
                               unconstrained_upper_initial_values),
                           bijector=upper_bijector,
                           dtype=dtype),
                       lower_diagonal_weights_matrix=util.TransformedVariable(
                           initial_value=lower_bijector.forward(
                               unconstrained_lower_initial_values),
                           bijector=lower_bijector,
                           dtype=dtype),
                       gate_first_n=gate_first_n,
                       validate_args=validate_args)