コード例 #1
0
def test_binned_likelihood(num_bins: float, bin_probabilites: np.ndarray,
                           hybridize: bool):
    '''
    Test to check that maximizing the likelihood recovers the parameters
    '''

    bin_prob = mx.nd.array(bin_probabilites)
    bin_center = mx.nd.array(np.logspace(-1, 1, num_bins))

    # generate samples
    bin_probs = mx.nd.zeros((NUM_SAMPLES, num_bins)) + bin_prob
    bin_centers = mx.nd.zeros((NUM_SAMPLES, num_bins)) + bin_center

    distr = Binned(bin_probs, bin_centers)
    samples = distr.sample()

    # add some jitter to the uniform initialization and normalize
    bin_prob_init = mx.nd.random_uniform(1 - TOL, 1 + TOL, num_bins) * bin_prob
    bin_prob_init = bin_prob_init / bin_prob_init.sum()

    init_biases = [bin_prob_init]

    bin_prob_hat, = maximum_likelihood_estimate_sgd(
        BinnedOutput(list(bin_center.asnumpy())),
        samples,
        init_biases=init_biases,
        hybridize=hybridize,
        learning_rate=PositiveFloat(0.05),
        num_epochs=PositiveInt(25),
    )

    assert all(
        mx.nd.abs(mx.nd.array(bin_prob_hat) - bin_prob) < TOL * bin_prob
    ), f"bin_prob did not match: bin_prob = {bin_prob}, bin_prob_hat = {bin_prob_hat}"
コード例 #2
0
def test_loss_correct(labels):
    smooth_alpha = Binned(**COMMON_KWARGS, label_smoothing=0.4)
    smooth_noalpha = Binned(**COMMON_KWARGS, label_smoothing=0.0)
    binned = Binned(**COMMON_KWARGS)

    assert np.allclose(
        binned.loss(labels).asnumpy(),
        smooth_noalpha.loss(labels).asnumpy())

    assert not np.allclose(
        binned.loss(labels).asnumpy(),
        smooth_alpha.loss(labels).asnumpy())
コード例 #3
0
def test_get_smooth_mask_correct(labels):
    dist = Binned(**COMMON_KWARGS, label_smoothing=0.2)
    binned = Binned(**COMMON_KWARGS)

    labels = labels.expand_dims(-1)

    mask = dist._get_mask(labels)

    assert np.allclose(mask.asnumpy(), binned._get_mask(labels).asnumpy())

    smooth_mask = dist._smooth_mask(mx.nd, mask, alpha=mx.nd.array([0.2]))

    # check smooth mask adds to one
    assert np.allclose(smooth_mask.asnumpy().sum(axis=-1), np.ones(2))

    # check smooth mask peaks same
    assert np.allclose(
        np.argmax(smooth_mask.asnumpy(), axis=-1),
        np.argmax(mask.asnumpy(), axis=-1),
    )

    # check smooth mask mins correct
    assert np.allclose(
        smooth_mask.asnumpy().min(axis=-1),
        np.ones(2) * 0.2 / 7  # alpha / K
    )
コード例 #4
0
def test_smooth_mask_adds_to_one(K, alpha):
    bin_log_probs = mx.nd.log_softmax(mx.nd.ones(K))
    bin_centers = mx.nd.arange(K)

    dist = Binned(
        bin_log_probs=bin_log_probs,
        bin_centers=bin_centers,
        label_smoothing=0.2,
    )

    labels = mx.random.uniform(low=0, high=K, shape=(12, )).expand_dims(-1)
    mask = dist._get_mask(labels)
    smooth_mask = dist._smooth_mask(mx.nd, mask, alpha=mx.nd.array([alpha]))

    # check smooth mask adds to one
    assert np.allclose(smooth_mask.asnumpy().sum(axis=-1),
                       np.ones(12),
                       atol=1e-6)
コード例 #5
0
 [
     TransformedDistribution(
         Gaussian(
             mu=mx.nd.random.uniform(shape=BATCH_SHAPE),
             sigma=mx.nd.ones(shape=BATCH_SHAPE),
         ),
         [
             bij.AffineTransformation(
                 scale=1e-1 + mx.nd.random.uniform(shape=BATCH_SHAPE)
             ),
             bij.softrelu,
         ],
     ),
     Binned(
         bin_log_probs=mx.nd.uniform(shape=BATCH_SHAPE + (23,)),
         bin_centers=mx.nd.array(np.logspace(-1, 1, 23))
         + mx.nd.zeros(BATCH_SHAPE + (23,)),
     ),
     TransformedDistribution(
         Binned(
             bin_log_probs=mx.nd.uniform(shape=BATCH_SHAPE + (23,)),
             bin_centers=mx.nd.array(np.logspace(-1, 1, 23))
             + mx.nd.zeros(BATCH_SHAPE + (23,)),
         ),
         [
             bij.AffineTransformation(
                 scale=1e-1 + mx.nd.random.uniform(shape=BATCH_SHAPE)
             ),
             bij.softrelu,
         ],
     ),