def testRollStatic(self):
   with self.test_session():
     with self.assertRaisesRegexp(ValueError, "None values not supported."):
       distribution_util.rotate_transpose(None, 1)
     for x in (np.ones(1), np.ones((2, 1)), np.ones((3, 2, 1))):
       for shift in np.arange(-5, 5):
         y = distribution_util.rotate_transpose(x, shift)
         self.assertAllEqual(self._np_rotate_transpose(x, shift), y.eval())
         self.assertAllEqual(np.roll(x.shape, shift), y.get_shape().as_list())
 def testRollStatic(self):
     with self.test_session():
         with self.assertRaisesRegexp(ValueError,
                                      "None values not supported."):
             distribution_util.rotate_transpose(None, 1)
         for x in (np.ones(1), np.ones((2, 1)), np.ones((3, 2, 1))):
             for shift in np.arange(-5, 5):
                 y = distribution_util.rotate_transpose(x, shift)
                 self.assertAllEqual(self._np_rotate_transpose(x, shift),
                                     y.eval())
                 self.assertAllEqual(np.roll(x.shape, shift),
                                     y.get_shape().as_list())
Example #3
0
    def make_batch_of_event_sample_matrices(
            self, x, name="make_batch_of_event_sample_matrices"):
        """Reshapes/transposes `Distribution` `Tensor` from S+B+E to B_+E_+S_.

    Where:
      - `B_ = B if B else [1]`,
      - `E_ = E if E else [1]`,
      - `S_ = [tf.reduce_prod(S)]`.

    Args:
      x: `Tensor`.
      name: `String`. The name to give this op.

    Returns:
      x: `Tensor`. Input transposed/reshaped to `B_+E_+S_`.
      sample_shape: `Tensor` (1D, `int32`).
    """
        with self._name_scope(name, values=[x]):
            x = ops.convert_to_tensor(x, name="x")
            sample_shape, batch_shape, event_shape = self.get_shape(x)
            event_shape = distribution_util.pick_vector(
                self._event_ndims_is_0, (1, ), event_shape)
            batch_shape = distribution_util.pick_vector(
                self._batch_ndims_is_0, (1, ), batch_shape)
            new_shape = array_ops.concat(0, ((-1, ), batch_shape, event_shape))
            x = array_ops.reshape(x, shape=new_shape)
            x = distribution_util.rotate_transpose(x, shift=-1)
            return x, sample_shape
Example #4
0
  def make_batch_of_event_sample_matrices(
      self, x, expand_batch_dim=True,
      name="make_batch_of_event_sample_matrices"):
    """Reshapes/transposes `Distribution` `Tensor` from S+B+E to B_+E_+S_.

    Where:
      - `B_ = B if B or not expand_batch_dim  else [1]`,
      - `E_ = E if E else [1]`,
      - `S_ = [tf.reduce_prod(S)]`.

    Args:
      x: `Tensor`.
      expand_batch_dim: Python `Boolean` scalar. If `True` the batch dims will
        be expanded such that batch_ndims>=1.
      name: `String`. The name to give this op.

    Returns:
      x: `Tensor`. Input transposed/reshaped to `B_+E_+S_`.
      sample_shape: `Tensor` (1D, `int32`).
    """
    with self._name_scope(name, values=[x]):
      x = ops.convert_to_tensor(x, name="x")
      sample_shape, batch_shape, event_shape = self.get_shape(x)
      event_shape = distribution_util.pick_vector(
          self._event_ndims_is_0, [1], event_shape)
      if expand_batch_dim:
        batch_shape = distribution_util.pick_vector(
            self._batch_ndims_is_0, [1], batch_shape)
      new_shape = array_ops.concat_v2([[-1], batch_shape, event_shape], 0)
      x = array_ops.reshape(x, shape=new_shape)
      x = distribution_util.rotate_transpose(x, shift=-1)
      return x, sample_shape
Example #5
0
    def make_batch_of_event_sample_matrices(self, x, name="make_batch_of_event_sample_matrices"):
        """Reshapes/transposes `Distribution` `Tensor` from S+B+E to B_+E_+S_.

    Where:
      - `B_ = B if B else [1]`,
      - `E_ = E if E else [1]`,
      - `S_ = [tf.reduce_prod(S)]`.

    Args:
      x: `Tensor`.
      name: `String`. The name to give this op.

    Returns:
      x: `Tensor`. Input transposed/reshaped to `B_+E_+S_`.
      sample_shape: `Tensor` (1D, `int32`).
    """
        with self._name_scope(name, values=[x]):
            x = ops.convert_to_tensor(x, name="x")
            sample_shape, batch_shape, event_shape = self.get_shape(x)
            event_shape = distribution_util.pick_vector(self._event_ndims_is_0, (1,), event_shape)
            batch_shape = distribution_util.pick_vector(self._batch_ndims_is_0, (1,), batch_shape)
            new_shape = array_ops.concat(0, ((-1,), batch_shape, event_shape))
            x = array_ops.reshape(x, shape=new_shape)
            x = distribution_util.rotate_transpose(x, shift=-1)
            return x, sample_shape
Example #6
0
    def make_batch_of_event_sample_matrices(
            self,
            x,
            expand_batch_dim=True,
            name="make_batch_of_event_sample_matrices"):
        """Reshapes/transposes `Distribution` `Tensor` from S+B+E to B_+E_+S_.

    Where:
      - `B_ = B if B or not expand_batch_dim  else [1]`,
      - `E_ = E if E else [1]`,
      - `S_ = [tf.reduce_prod(S)]`.

    Args:
      x: `Tensor`.
      expand_batch_dim: Python `Boolean` scalar. If `True` the batch dims will
        be expanded such that batch_ndims>=1.
      name: `String`. The name to give this op.

    Returns:
      x: `Tensor`. Input transposed/reshaped to `B_+E_+S_`.
      sample_shape: `Tensor` (1D, `int32`).
    """
        with self._name_scope(name, values=[x]):
            x = ops.convert_to_tensor(x, name="x")
            sample_shape, batch_shape, event_shape = self.get_shape(x)
            event_shape = distribution_util.pick_vector(
                self._event_ndims_is_0, [1], event_shape)
            if expand_batch_dim:
                batch_shape = distribution_util.pick_vector(
                    self._batch_ndims_is_0, [1], batch_shape)
            new_shape = array_ops.concat([[-1], batch_shape, event_shape], 0)
            x = array_ops.reshape(x, shape=new_shape)
            x = distribution_util.rotate_transpose(x, shift=-1)
            return x, sample_shape
Example #7
0
    def undo_make_batch_of_event_sample_matrices(
            self,
            x,
            sample_shape,
            expand_batch_dim=True,
            name="undo_make_batch_of_event_sample_matrices"):
        """Reshapes/transposes `Distribution` `Tensor` from B_+E_+S_ to S+B+E.

    Where:
      - `B_ = B if B or not expand_batch_dim  else [1]`,
      - `E_ = E if E else [1]`,
      - `S_ = [tf.reduce_prod(S)]`.

    This function "reverses" `make_batch_of_event_sample_matrices`.

    Args:
      x: `Tensor` of shape `B_+E_+S_`.
      sample_shape: `Tensor` (1D, `int32`).
      expand_batch_dim: Python `bool`. If `True` the batch dims will be expanded
        such that `batch_ndims>=1`.
      name: Python `str`. The name to give this op.

    Returns:
      x: `Tensor`. Input transposed/reshaped to `S+B+E`.
    """
        with self._name_scope(name, values=[x, sample_shape]):
            x = ops.convert_to_tensor(x, name="x")
            # x.shape: _B+_E+[prod(S)]
            sample_shape = ops.convert_to_tensor(sample_shape,
                                                 name="sample_shape")
            x = distribution_util.rotate_transpose(x, shift=1)
            # x.shape: [prod(S)]+_B+_E
            if self._is_all_constant_helper(self.batch_ndims,
                                            self.event_ndims):
                if self._batch_ndims_is_0 or self._event_ndims_is_0:
                    squeeze_dims = []
                    if self._event_ndims_is_0:
                        squeeze_dims += [-1]
                    if self._batch_ndims_is_0 and expand_batch_dim:
                        squeeze_dims += [1]
                    if squeeze_dims:
                        x = array_ops.squeeze(x, squeeze_dims=squeeze_dims)
                        # x.shape: [prod(S)]+B+E
                _, batch_shape, event_shape = self.get_shape(x)
            else:
                s = (x.get_shape().as_list() if
                     x.get_shape().is_fully_defined() else array_ops.shape(x))
                batch_shape = s[1:1 + self.batch_ndims]
                # Since sample_dims=1 and is left-most, we add 1 to the number of
                # batch_ndims to get the event start dim.
                event_start = array_ops.where(
                    math_ops.logical_and(expand_batch_dim,
                                         self._batch_ndims_is_0), 2,
                    1 + self.batch_ndims)
                event_shape = s[event_start:event_start + self.event_ndims]
            new_shape = array_ops.concat(
                [sample_shape, batch_shape, event_shape], 0)
            x = array_ops.reshape(x, shape=new_shape)
            # x.shape: S+B+E
            return x
 def testRollDynamic(self):
   with self.test_session() as sess:
     x = tf.placeholder(tf.float32)
     shift = tf.placeholder(tf.int32)
     for x_value in (np.ones(1, dtype=x.dtype.as_numpy_dtype()),
                     np.ones((2, 1), dtype=x.dtype.as_numpy_dtype()),
                     np.ones((3, 2, 1), dtype=x.dtype.as_numpy_dtype())):
       for shift_value in np.arange(-5, 5):
         self.assertAllEqual(
             self._np_rotate_transpose(x_value, shift_value),
             sess.run(distribution_util.rotate_transpose(x, shift),
                      feed_dict={x: x_value, shift: shift_value}))
Example #9
0
  def undo_make_batch_of_event_sample_matrices(
      self, x, sample_shape, expand_batch_dim=True,
      name="undo_make_batch_of_event_sample_matrices"):
    """Reshapes/transposes `Distribution` `Tensor` from B_+E_+S_ to S+B+E.

    Where:
      - `B_ = B if B or not expand_batch_dim  else [1]`,
      - `E_ = E if E else [1]`,
      - `S_ = [tf.reduce_prod(S)]`.

    This function "reverses" `make_batch_of_event_sample_matrices`.

    Args:
      x: `Tensor` of shape `B_+E_+S_`.
      sample_shape: `Tensor` (1D, `int32`).
      expand_batch_dim: Python `bool`. If `True` the batch dims will be expanded
        such that `batch_ndims>=1`.
      name: Python `str`. The name to give this op.

    Returns:
      x: `Tensor`. Input transposed/reshaped to `S+B+E`.
    """
    with self._name_scope(name, values=[x, sample_shape]):
      x = ops.convert_to_tensor(x, name="x")
      # x.shape: _B+_E+[prod(S)]
      sample_shape = ops.convert_to_tensor(sample_shape, name="sample_shape")
      x = distribution_util.rotate_transpose(x, shift=1)
      # x.shape: [prod(S)]+_B+_E
      if self._is_all_constant_helper(self.batch_ndims, self.event_ndims):
        if self._batch_ndims_is_0 or self._event_ndims_is_0:
          squeeze_dims = []
          if self._event_ndims_is_0:
            squeeze_dims += [-1]
          if self._batch_ndims_is_0 and expand_batch_dim:
            squeeze_dims += [1]
          if squeeze_dims:
            x = array_ops.squeeze(x, squeeze_dims=squeeze_dims)
            # x.shape: [prod(S)]+B+E
        _, batch_shape, event_shape = self.get_shape(x)
      else:
        s = (x.get_shape().as_list() if x.get_shape().is_fully_defined()
             else array_ops.shape(x))
        batch_shape = s[1:1+self.batch_ndims]
        # Since sample_dims=1 and is left-most, we add 1 to the number of
        # batch_ndims to get the event start dim.
        event_start = array_ops.where(
            math_ops.logical_and(expand_batch_dim, self._batch_ndims_is_0),
            2, 1 + self.batch_ndims)
        event_shape = s[event_start:event_start+self.event_ndims]
      new_shape = array_ops.concat([sample_shape, batch_shape, event_shape], 0)
      x = array_ops.reshape(x, shape=new_shape)
      # x.shape: S+B+E
      return x
 def testRollDynamic(self):
   with self.test_session() as sess:
     x = tf.placeholder(tf.float32)
     shift = tf.placeholder(tf.int32)
     for x_value in (np.ones(1, dtype=x.dtype.as_numpy_dtype()),
                     np.ones((2, 1), dtype=x.dtype.as_numpy_dtype()),
                     np.ones((3, 2, 1), dtype=x.dtype.as_numpy_dtype())):
       for shift_value in np.arange(-5, 5):
         self.assertAllEqual(
             self._np_rotate_transpose(x_value, shift_value),
             sess.run(distribution_util.rotate_transpose(x, shift),
                      feed_dict={x: x_value, shift: shift_value}))
Example #11
0
    def undo_make_batch_of_event_sample_matrices(
            self,
            x,
            sample_shape,
            name="undo_make_batch_of_event_sample_matrices"):
        """Reshapes/transposes `Distribution` `Tensor` from B_+E_+S_ to S+B+E.

    Where:
      - `B_ = B if B else [1]`,
      - `E_ = E if E else [1]`,
      - `S_ = [tf.reduce_prod(S)]`.

    This function "reverses" `make_batch_of_event_sample_matrices`.

    Args:
      x: `Tensor` of shape `B_+E_+S_`.
      sample_shape: `Tensor` (1D, `int32`).
      name: `String`. The name to give this op.

    Returns:
      x: `Tensor`. Input transposed/reshaped to `S+B+E`.
    """
        with self._name_scope(name, values=[x, sample_shape]):
            x = ops.convert_to_tensor(x, name="x")
            sample_shape = ops.convert_to_tensor(sample_shape,
                                                 name="sample_shape")
            x = distribution_util.rotate_transpose(x, shift=1)
            if self._is_all_constant_helper(self.batch_ndims,
                                            self.event_ndims):
                if self._batch_ndims_is_0 or self._event_ndims_is_0:
                    b = ((min(-2, -1 - self._event_ndims_static), )
                         if self._batch_ndims_is_0 else ())
                    e = (-1, ) if self._event_ndims_is_0 else ()
                    x = array_ops.squeeze(x, squeeze_dims=b + e)
                _, batch_shape, event_shape = self.get_shape(x)
            else:
                s = (x.get_shape().as_list() if
                     x.get_shape().is_fully_defined() else array_ops.shape(x))
                batch_shape = array_ops.slice(s, (1, ), (self.batch_ndims, ))
                # Since sample_dims=1 and is left-most, we add 1 to the number of
                # batch_ndims to get the event start dim.
                event_start = math_ops.select(self._batch_ndims_is_0, 2,
                                              1 + self.batch_ndims)
                event_shape = array_ops.slice(s, (event_start, ),
                                              (self.event_ndims, ))
            new_shape = array_ops.concat(
                0, (sample_shape, batch_shape, event_shape))
            x = array_ops.reshape(x, shape=new_shape)
            return x
Example #12
0
  def undo_make_batch_of_event_sample_matrices(
      self, x, sample_shape, name="undo_make_batch_of_event_sample_matrices"):
    """Reshapes/transposes `Distribution` `Tensor` from B_+E_+S_ to S+B+E.

    Where:
      - `B_ = B if B else [1]`,
      - `E_ = E if E else [1]`,
      - `S_ = [tf.reduce_prod(S)]`.

    This function "reverses" `make_batch_of_event_sample_matrices`.

    Args:
      x: `Tensor` of shape `B_+E_+S_`.
      sample_shape: `Tensor` (1D, `int32`).
      name: `String`. The name to give this op.

    Returns:
      x: `Tensor`. Input transposed/reshaped to `S+B+E`.
    """
    with self._name_scope(name, values=[x, sample_shape]):
      x = ops.convert_to_tensor(x, name="x")
      sample_shape = ops.convert_to_tensor(sample_shape, name="sample_shape")
      x = distribution_util.rotate_transpose(x, shift=1)
      if self._is_all_constant_helper(self.batch_ndims, self.event_ndims):
        if self._batch_ndims_is_0 or self._event_ndims_is_0:
          b = ((min(-2, -1 - self._event_ndims_static),)
               if self._batch_ndims_is_0 else ())
          e = (-1,) if self._event_ndims_is_0 else ()
          x = array_ops.squeeze(x, squeeze_dims=b + e)
        _, batch_shape, event_shape = self.get_shape(x)
      else:
        s = (x.get_shape().as_list() if x.get_shape().is_fully_defined()
             else array_ops.shape(x))
        batch_shape = array_ops.slice(s, (1,), (self.batch_ndims,))
        # Since sample_dims=1 and is left-most, we add 1 to the number of
        # batch_ndims to get the event start dim.
        event_start = array_ops.where(
            self._batch_ndims_is_0, 2, 1 + self.batch_ndims)
        event_shape = array_ops.slice(s, (event_start,), (self.event_ndims,))
      new_shape = array_ops.concat(0, (sample_shape, batch_shape, event_shape))
      x = array_ops.reshape(x, shape=new_shape)
      return x