Exemplo n.º 1
0
def soft_quantile_normalization(x, f, axis=-1, **kwargs):
  """Applies a (soft) quantile normalization of x with f.

  The usual quantile normalization operator uses the empirical values contained
  in x to construct an empirical density function (EDF), assign to each value in
  x its corresponding EDF (i.e. its rank divided by the size of x), and then
  replace it with the corresponding quantiles described in vector f
  (see https://en.wikipedia.org/wiki/Quantile_normalization).

  The operator proposed here does so in a differentiable manner, by computing
  first a distribution of ranks for x (stored in an optimal transport table) and
  then take averages of those values stored in f.

  Note that the current function only works when f is a vector of sorted values
  corresponding to the quantiles of a distribution at levels [1/m ,..., m / m],
  where m is the size of f.

  Args:
   x: Tensor<float> of any shape.
   f: Tensor<float>[m] where m can be or not the size of x along the axis.
     Usually it is. f should be sorted.
   axis: the axis along which the tensor x should be quantile normalized.
   **kwargs: extra parameters passed to the SoftQuantilizer.

  Returns:
   A tensor of the same shape of x.
  """
  z, transposition, shape = preprocess(x, axis)
  sorter = soft_quantilizer.SoftQuantilizer(
      z, descending=False, num_targets=f.shape[-1], **kwargs)
  y = 1.0 / sorter.weights * tf.linalg.matvec(sorter.transport, f)
  return postprocess(y, transposition, shape)
Exemplo n.º 2
0
def softranks(x, direction='ASCENDING', axis=-1, zero_based=True, **kwargs):
  """A differentiable argsort-like operator that returns directly the ranks.

  Note that it behaves as the 'inverse' of the argsort operator since it returns
  soft ranks, i.e. real numbers that play the role of indices and quantify the
  relative standing (among all n entries) of each entry of x.

  Args:
   x: Tensor<float> of any shape.
   direction: (str) either 'ASCENDING' or 'DESCENDING', as in tf.sort.
   axis: (int) the axis along which to sort, as in tf.sort.
   zero_based: (bool) to return values in [0, n-1] or in [1, n].
   **kwargs: see SoftQuantilizer for possible parameters.

  Returns:
   A Tensor<float> of the same shape as the input containing the soft ranks.
  """
  if direction not in DIRECTIONS:
    raise ValueError('`direction` should be one of {}'.format(DIRECTIONS))

  descending = (direction == 'DESCENDING')
  z, transposition, shape = preprocess(x, axis)
  sorter = soft_quantilizer.SoftQuantilizer(z, descending=descending, **kwargs)
  ranks = sorter.softcdf * tf.cast(tf.shape(z)[1], dtype=x.dtype)
  if zero_based:
    ranks -= tf.cast(1.0, dtype=x.dtype)
  return postprocess(ranks, transposition, shape)
Exemplo n.º 3
0
def softsort(x, direction='ASCENDING', axis=-1, **kwargs):
  """Applies the softsort operator on input tensor x.

  This operator acts as differentiable alternative to tf.sort.

  Args:
   x: the input tensor. It can be either of shape [batch, n] or [n].
   direction: the direction 'ASCENDING' or 'DESCENDING'
   axis: the axis on which to operate the sort.
   **kwargs: see SoftQuantilizer for possible parameters.

  Returns:
   A tensor of the same shape as the input.
  """
  if direction not in DIRECTIONS:
    raise ValueError('`direction` should be one of {}'.format(DIRECTIONS))

  z = _preprocess(x, axis)
  descending = (direction == 'DESCENDING')
  sorter = soft_quantilizer.SoftQuantilizer(z, descending=descending, **kwargs)

  # In case we are applying some quantization while sorting, the number of
  # outputs should be the number of targets.
  shape = x.shape.as_list()
  shape[axis] = sorter.target_weights.shape[1]
  return _postprocess(sorter.softsort, tf.TensorShape(shape), axis)
Exemplo n.º 4
0
def softsort(x, direction='ASCENDING', axis=-1, topk=None, **kwargs):
    """Applies the softsort operator on input tensor x.

  This operator acts as differentiable alternative to tf.sort.

  Args:
   x: the input tensor. It can be either of shape [batch, n] or [n].
   direction: the direction 'ASCENDING' or 'DESCENDING'
   axis: the axis on which to operate the sort.
   topk: if not None, the number of topk sorted values that are going to be
    computed. Using topk improves the speed of the algorithms since it solves
    a simpler problem.
   **kwargs: see SoftQuantilizer for possible parameters.

  Returns:
   A tensor of sorted values of the same shape as the input tensor.
  """
    if direction not in DIRECTIONS:
        raise ValueError('`direction` should be one of {}'.format(DIRECTIONS))

    if topk is not None and _TARGET_WEIGHTS_ARG in kwargs:
        raise ValueError(
            'Conflicting arguments: both topk and target_weights are being set.'
        )

    z, transposition, shape = _preprocess(x, axis)
    descending = (direction == 'DESCENDING')

    if topk is not None:
        n = tf.cast(tf.shape(z)[-1], dtype=x.dtype)
        kwargs[_TARGET_WEIGHTS_ARG] = 1.0 / n * tf.concat([
            tf.ones(topk, dtype=x.dtype),
            (n - topk) * tf.ones(1, dtype=x.dtype)
        ],
                                                          axis=0)

    sorter = soft_quantilizer.SoftQuantilizer(z,
                                              descending=descending,
                                              **kwargs)
    # We need to compute topk + 1 values in case we use topk
    values = sorter.softsort if topk is None else sorter.softsort[:, :-1]
    return _postprocess(values, transposition, shape)
Exemplo n.º 5
0
def softsort(x, direction='ASCENDING', axis=-1, **kwargs):
    """Applies the softsort operator on input tensor x.

  This operator acts as differentiable alternative to tf.sort.

  Args:
   x: the input tensor. It can be either of shape [batch, n] or [n].
   direction: the direction 'ASCENDING' or 'DESCENDING'
   axis: the axis on which to operate the sort.
   **kwargs: see SoftQuantilizer for possible parameters.

  Returns:
   A tensor of the same shape as the input.
  """
    if direction not in DIRECTIONS:
        raise ValueError('`direction` should be one of {}'.format(DIRECTIONS))

    z = _preprocess(x, axis)
    descending = (direction == 'DESCENDING')
    sorter = soft_quantilizer.SoftQuantilizer(z,
                                              descending=descending,
                                              **kwargs)
    return _postprocess(sorter.softsort, x.shape, axis)