Exemplo n.º 1
0
    def test_sample_with_intermediates(self):
        locs = jnp.array([[-5., -5.], [0., 0.], [5., 5.]])
        scales = jnp.ones_like(locs) * 0.1
        pis = jnp.array([.5, .3, .2])
        mix = GaussianMixture(locs, scales, pis)

        rng = jax.random.PRNGKey(2963)
        n_total = 1000
        vals, interm = mix.sample_with_intermediates(rng, sample_shape=(10, n_total//10))
        zs = interm[0]

        self.assertEqual((10, n_total//10), jnp.shape(zs))
        self.assertEqual((10, n_total//10, 2), jnp.shape(vals))

        self.assertTrue(jnp.alltrue(zs >= 0) and jnp.alltrue(zs < 3))

        unq_vals, unq_counts = np.unique(zs, return_counts=True)
        unq_counts = unq_counts / n_total
        # crude test that samples are from mixture (ratio of samples per component is plausible)
        pis_stddev = jnp.sqrt(pis*(1-pis)/n_total)
        self.assertTrue(jnp.allclose(unq_counts, pis, atol=3*pis_stddev))

        for i in range(3):
            # crude test that samples are from mixture (mean is within 3 times stddev per component)
            self.assertTrue(jnp.allclose(locs[i], jnp.mean(vals[zs == i], axis=0), atol=3*scales[i]/jnp.sqrt(n_total)))
Exemplo n.º 2
0
 def test_sort_batch(self, topk):
   x = jax.random.uniform(self.rng, (32, 20, 12, 8))
   axis = 1
   xs = soft_sort.sort(x, axis=axis, topk=topk)
   expected_shape = list(x.shape)
   expected_shape[axis] = topk if (0 < topk < x.shape[axis]) else x.shape[axis]
   self.assertEqual(xs.shape, tuple(expected_shape))
   self.assertTrue(jnp.alltrue(jnp.diff(xs, axis=axis) >= 0.0))
Exemplo n.º 3
0
 def test_sample_from_array_almost_full_shuffle(self):
     x = jnp.arange(0, 100) + 100
     rng_key = jax.random.PRNGKey(0)
     n_vals = 99
     shuffled = util.sample_from_array(rng_key, x, n_vals, 0)
     unq_vals = np.unique(shuffled)
     self.assertEqual(n_vals, np.size(unq_vals))
     self.assertTrue(jnp.alltrue(shuffled >= 100))
Exemplo n.º 4
0
 def test_topk_one_array(self, k):
   n = 20
   x = jax.random.uniform(self.rng, (n,))
   axis = 0
   xs = soft_sort.sort(x, axis=axis, topk=k, epsilon=1e-3)
   outsize = k if 0 < k < n else n
   self.assertEqual(xs.shape, (outsize,))
   self.assertTrue(jnp.alltrue(jnp.diff(xs, axis=axis) >= 0.0))
   self.assertAllClose(xs, jnp.sort(x, axis=axis)[-outsize:], atol=0.01)
Exemplo n.º 5
0
def test_descend(random_tensors):
    h, s, iso, dis = random_tensors
    s = simple_mera.descend(h, s, iso, dis)
    assert len(s.shape) == 6
    D = s.shape[0]
    smat = np.reshape(s, [D**3] * 2)
    assert np.isclose(np.trace(smat), 1.0)
    assert np.isclose(np.linalg.norm(smat - np.conj(np.transpose(smat))), 0.0)
    spec, _ = np.linalg.eigh(smat)
    assert np.alltrue(spec >= 0.0)
Exemplo n.º 6
0
def discrete_barycenter(geom: geometry.Geometry,
                        a: jnp.ndarray,
                        weights: jnp.ndarray = None,
                        dual_initialization: jnp.ndarray = None,
                        threshold: float = 1e-2,
                        norm_error: int = 1,
                        inner_iterations: float = 10,
                        min_iterations: int = 0,
                        max_iterations: int = 2000,
                        lse_mode: bool = True,
                        debiased: bool = False) -> SinkhornBarycenterOutput:
  """Compute discrete barycenter using https://arxiv.org/abs/2006.02575.

  Args:
    geom: a Cost object able to apply kernels with a certain epsilon.
    a: jnp.ndarray<float>[batch, geom.num_a]: batch of histograms.
    weights: jnp.ndarray of weights in the probability simplex
    dual_initialization: jnp.ndarray, size [batch, num_b] initialization for g_v
    threshold: (float) tolerance to monitor convergence.
    norm_error: int, power used to define p-norm of error for marginal/target.
    inner_iterations: (int32) the Sinkhorn error is not recomputed at each
     iteration but every inner_num_iter instead to avoid computational overhead.
    min_iterations: (int32) the minimum number of Sinkhorn iterations carried
     out before the error is computed and monitored.
    max_iterations: (int32) the maximum number of Sinkhorn iterations.
    lse_mode: True for log-sum-exp computations, False for kernel multiply.
    debiased: whether to run the debiased version of the Sinkhorn divergence.

  Returns:
    A ``SinkhornBarycenterOutput``, which contains two arrays of potentials,
    each of size ``batch`` times ``geom.num_a``, summarizing the OT between each
    histogram in the database onto the barycenter, described in ``histogram``,
    as well as a sequence of errors that monitors convergence.
  """
  batch_size, num_a = a.shape
  _, num_b = geom.shape

  if weights is None:
    weights = jnp.ones((batch_size,)) / batch_size
  if not jnp.alltrue(weights > 0) or weights.shape[0] != batch_size:
    raise ValueError(f'weights must have positive values and size {batch_size}')

  if dual_initialization is None:
    # initialization strategy from https://arxiv.org/pdf/1503.02533.pdf, (3.6)
    dual_initialization = geom.apply_cost(a.T, axis=0).T
    dual_initialization -= jnp.average(dual_initialization,
                                       weights=weights,
                                       axis=0)[jnp.newaxis, :]

  if debiased and not geom.is_symmetric:
    raise ValueError('Geometry must be symmetric to use debiased option.')
  norm_error = (norm_error,)
  return _discrete_barycenter(geom, a, weights, dual_initialization, threshold,
                              norm_error, inner_iterations, min_iterations,
                              max_iterations, lse_mode, debiased, num_a, num_b)
Exemplo n.º 7
0
    def test_basic(self):
        class MAE(elegy.Metric):
            def call(self, y_true, y_pred):
                return jnp.abs(y_true - y_pred)

        y_true = jnp.array([1.0, 2.0, 3.0])
        y_pred = jnp.array([2.0, 3.0, 4.0])

        mae = MAE()

        loss = mae.call_with_defaults()(y_true, y_pred)

        assert jnp.alltrue(loss == jnp.array([1.0, 1.0, 1.0]))
Exemplo n.º 8
0
    def test_basic(self):
        class MAE(elegy.Loss):
            def call(self, y_true, y_pred):
                return jnp.abs(y_true - y_pred)

        y_true = jnp.array([1.0, 2.0, 3.0])
        y_pred = jnp.array([2.0, 3.0, 4.0])

        mae = MAE()

        sample_loss = mae.call(y_true, y_pred)
        loss = mae(y_true, y_pred)

        assert jnp.alltrue(sample_loss == jnp.array([1.0, 1.0, 1.0]))
        assert loss == 1
Exemplo n.º 9
0
    def test_slice(self):
        class MAE(elegy.Metric):
            def call(self, y_true, y_pred):
                return jnp.abs(y_true - y_pred)

        y_true = dict(a=jnp.array([1.0, 2.0, 3.0]))
        y_pred = dict(a=jnp.array([2.0, 3.0, 4.0]))

        mae = MAE(on="a")

        # raises because it doesn't use kwargs
        with pytest.raises(BaseException):
            sample_loss = mae.call_with_defaults()(y_true, y_pred)

        # raises because it doesn't use __call__ which filters
        with pytest.raises(BaseException):
            sample_loss = mae.call(y_true=y_true, y_pred=y_pred)

        loss = mae.call_with_defaults()(y_true=y_true, y_pred=y_pred)

        assert jnp.alltrue(loss == jnp.array([1.0, 1.0, 1.0]))
Exemplo n.º 10
0
 def test_sort_batch(self):
   x = jax.random.uniform(self.rng, (32, 20, 12, 8))
   xs = soft_sort.softsort(x, axis=1)
   self.assertEqual(x.shape, xs.shape)
   self.assertTrue(jnp.alltrue(jnp.diff(xs, axis=1) >= 0.0))
Exemplo n.º 11
0
 def test_sort_one_array(self, shape):
   x = jax.random.uniform(self.rng, shape)
   xs = soft_sort.softsort(x, axis=0)
   self.assertEqual(x.shape, xs.shape)
   self.assertTrue(jnp.alltrue(jnp.diff(xs, axis=0) >= 0.0))
Exemplo n.º 12
0
 def test_sample_from_array_single_sample(self):
     x = jnp.arange(0, 100) + 100
     rng_key = jax.random.PRNGKey(0)
     n_vals = 1
     shuffled = util.sample_from_array(rng_key, x, n_vals, 0)
     self.assertTrue(jnp.alltrue(shuffled >= 100))
Exemplo n.º 13
0
def test_posdef(m):
    pd_m = posdef(m)
    assert jnp.alltrue(jnp.linalg.eigvals(pd_m) > 0)