Пример #1
0
    def test_targets(self):
        q = soft_quantilizer.SoftQuantilizer(self.x, y=[0.1, 0.2, 0.3])
        self.assertTupleEqual(q.softsort.shape, (3, 3))

        q = soft_quantilizer.SoftQuantilizer(self.x,
                                             y=[[0.1, 0.2, 0.3],
                                                [0.5, 0.7, 0.9],
                                                [-0.3, -0.2, -0.1]])
        self.assertTupleEqual(q.softsort.shape, (3, 3))
Пример #2
0
def softsort(x, direction='ASCENDING', axis=-1, **kwargs):
    """Applies the softsort operator on input tensor x.

  This operator acts as differentiable alternative to tf.sort.

  Args:
   x: the input np.ndarray. It can be either of shape [batch, n] or [n].
   direction: the direction 'ASCENDING' or 'DESCENDING'
   axis: the axis on which to operate the sort.
   **kwargs: see SoftQuantilizer for possible parameters.

  Returns:
   A np.ndarray of the same shape as the input.
  """
    if direction not in DIRECTIONS:
        raise ValueError('`direction` should be one of {}'.format(DIRECTIONS))

    x = np.array(x)
    z = _preprocess(x, axis)
    descending = (direction == 'DESCENDING')
    sorter = soft_quantilizer.SoftQuantilizer(z,
                                              descending=descending,
                                              **kwargs)

    # In case we are applying some quantization while sorting, the number of
    # outputs should be the number of targets.
    shape = list(x.shape)
    shape[axis] = sorter.target_weights.shape[1]
    return _postprocess(sorter.softsort, shape, axis)
Пример #3
0
def softranks(x, direction='ASCENDING', axis=-1, zero_based=True, **kwargs):
    """A differentiable argsort-like operator that returns directly the ranks.

  Note that it behaves as the 'inverse' of the argsort operator since it returns
  soft ranks, i.e. real numbers that play the role of indices and quantify the
  relative standing (among all n entries) of each entry of x.

  Args:
   x: np.ndarray<float> of any shape.
   direction: (str) either 'ASCENDING' or 'DESCENDING', as in tf.sort.
   axis: (int) the axis along which to sort, as in tf.sort.
   zero_based: (bool) to return values in [0, n-1] or in [1, n].
   **kwargs: see SoftQuantilizer for possible parameters.

  Returns:
   A np.ndarray<float> of the same shape as the input containing the soft ranks.
  """
    if direction not in DIRECTIONS:
        raise ValueError('`direction` should be one of {}'.format(DIRECTIONS))

    x = np.array(x)
    descending = (direction == 'DESCENDING')
    z = _preprocess(x, axis)
    sorter = soft_quantilizer.SoftQuantilizer(z,
                                              descending=descending,
                                              **kwargs)
    ranks = sorter.softcdf * z.shape[1]
    if zero_based:
        ranks -= 1

    return _postprocess(ranks, x.shape, axis)
Пример #4
0
 def test_ranks(self):
     q = soft_quantilizer.SoftQuantilizer(self.x,
                                          threshold=1e-3,
                                          epsilon=1e-3)
     soft_ranks = q._n * q.softcdf
     true_ranks = jnp.argsort(jnp.argsort(q.x, axis=-1), axis=-1) + 1
     np.testing.assert_allclose(soft_ranks, true_ranks, atol=1e-3)
Пример #5
0
 def test_ranks(self):
     q = soft_quantilizer.SoftQuantilizer(self.x,
                                          threshold=1e-3,
                                          epsilon=1e-3)
     soft_ranks = q._n * q.softcdf
     true_ranks = np.argsort(np.argsort(q.x, axis=-1), axis=-1) + 1
     self.assertAllClose(soft_ranks, true_ranks, False, atol=1e-3)
Пример #6
0
 def test_sort(self):
     q = soft_quantilizer.SoftQuantilizer(self.x,
                                          threshold=1e-3,
                                          epsilon=1e-3)
     deltas = np.diff(q.softsort, axis=-1) > 0
     self.assertAllClose(deltas,
                         np.ones(deltas.shape, dtype=bool),
                         check_dtypes=True)
Пример #7
0
def softquantile(x, quantile, quantile_width=0.05, axis=-1, **kwargs):
    """Computes soft quantiles via optimal transport.

  This operator takes advantage of the fact that an exhaustive softsort is not
  required to recover a single quantile. Instead, one can transport all
  input values in x onto only 3 weighted values. Target weights are adjusted so
  that those values in x that are transported to the middle value in the target
  vector y correspond to those concentrating around the quantile of interest.

  This idea generalizes to more quantiles, interleaving small weights on the
  quantile indices and bigger weights in between, corresponding to the gap from
  one desired quantile to the next one.

  Args:
   x: np.ndarray<float> of any shape.
   quantile: (float) the quantile to be returned.
   quantile_width: (float) mass given to the bucket supposed to attract points
    whose value concentrate around the desired quantile value. Bigger width
    means that we allow the soft quantile to be a mixture of
    more points further away from the quantile. If None, the width is set at 1/n
    where n is the number of values considered (the size along the 'axis').
   axis: (int) the axis along which to compute the quantile.
   **kwargs: see SoftQuantilizer for possible extra parameters.

  Returns:
    A np.ndarray<float> similar to the input tensor, but without the axis
    dimension that is squeezed into a single value: its soft quantile.
  """
    target_weights = [
        quantile - 0.5 * quantile_width, quantile_width,
        1.0 - quantile - 0.5 * quantile_width
    ]
    x = np.array(x)
    z = _preprocess(x, axis=axis)
    sorter = soft_quantilizer.SoftQuantilizer(z,
                                              target_weights=target_weights,
                                              **kwargs)
    shape = list(x.shape)
    shape.pop(axis)
    return np.reshape(sorter.softsort[:, 1], shape)
Пример #8
0
 def test_target_weights(self):
     q = soft_quantilizer.SoftQuantilizer(self.x,
                                          target_weights=[0.49, 0.02, 0.49],
                                          threshold=1e-3,
                                          epsilon=1e-3)
     self.assertTupleEqual(q.softsort.shape, (3, 3))
Пример #9
0
 def test_sort(self):
     q = soft_quantilizer.SoftQuantilizer(self.x,
                                          threshold=1e-3,
                                          epsilon=1e-3)
     deltas = jnp.diff(q.softsort, axis=-1) > 0
     np.testing.assert_allclose(deltas, jnp.ones(deltas.shape, dtype=bool))
Пример #10
0
 def test_target_weights(self):
     q = soft_quantilizer.SoftQuantilizer(self.x,
                                          target_weights=[0.49, 0.02, 0.49])
     self.assertTupleEqual(q.softsort.shape, (3, 3))
Пример #11
0
 def test_sort(self):
     q = soft_quantilizer.SoftQuantilizer(self.x)
     deltas = np.diff(q.softsort, axis=-1) > 0
     self.assertAllClose(deltas, np.ones(deltas.shape, dtype=bool), True)