Esempio n. 1
0
def test_rnn():
    for final_dense, gru, nn in [
        (True, False,
         rnn(10, (20, 30), normalise=True, gru=False, final_dense=True)),
        (False, True,
         rnn(10, (20, 30), normalise=True, gru=True, final_dense=False)),
    ]:
        vs = Vars(np.float32)
        nn.initialise(5, vs)
        x = B.randn(2, 3, 5)

        # Check number of weights and width.
        assert B.length(vs.get_vector()) == nn.num_weights(5)
        assert nn.width == 10

        # Test batch consistency.
        check_batch_consistency(nn, x)

        # Check composition.
        assert len(nn.layers) == 9 if final_dense else 7
        assert type(nn.layers[0]) == Recurrent
        assert type(nn.layers[0].cell) == GRU if gru else Elman
        assert nn.layers[0].width == 20
        assert type(nn.layers[1]) == Activation
        assert nn.layers[1].width == 20
        assert type(nn.layers[2]) == Normalise
        assert nn.layers[2].width == 20
        assert type(nn.layers[3]) == Recurrent
        assert type(nn.layers[3].cell) == GRU if gru else Elman
        assert nn.layers[3].width == 30
        assert type(nn.layers[4]) == Activation
        assert nn.layers[4].width == 30
        assert type(nn.layers[5]) == Normalise
        assert nn.layers[5].width == 30
        if final_dense:
            assert type(nn.layers[6]) == Linear
            assert nn.layers[6].width == 10
            assert type(nn.layers[7]) == Activation
            assert nn.layers[7].width == 10
            assert type(nn.layers[8]) == Linear
            assert nn.layers[8].width == 10
        else:
            assert type(nn.layers[6]) == Linear
            assert nn.layers[6].width == 10

    # Check that normalisation layers disappear.
    assert (len(
        rnn(10, (20, 30), normalise=False, gru=True,
            final_dense=False).layers) == 5)
Esempio n. 2
0
def test_ff():
    vs = Vars(np.float32)

    nn = ff(10, (20, 30), normalise=True)
    nn.initialise(5, vs)
    x = B.randn(2, 3, 5)

    # Check number of weights and width.
    assert B.length(vs.get_vector()) == nn.num_weights(5)
    assert nn.width == 10

    # Test batch consistency.
    check_batch_consistency(nn, x)

    # Check composition.
    assert len(nn.layers) == 7
    assert type(nn.layers[0]) == Linear
    assert nn.layers[0].A.shape[0] == 5
    assert nn.layers[0].width == 20
    assert type(nn.layers[1]) == Activation
    assert nn.layers[1].width == 20
    assert type(nn.layers[2]) == Normalise
    assert nn.layers[2].width == 20
    assert type(nn.layers[3]) == Linear
    assert nn.layers[3].width == 30
    assert type(nn.layers[4]) == Activation
    assert nn.layers[4].width == 30
    assert type(nn.layers[5]) == Normalise
    assert nn.layers[5].width == 30
    assert type(nn.layers[6]) == Linear
    assert nn.layers[6].width == 10

    # Check that one-dimensional calls are okay.
    vs = Vars(np.float32)
    nn.initialise(1, vs)
    approx(nn(B.linspace(0, 1, 10)), nn(B.linspace(0, 1, 10)[:, None]))

    # Check that zero-dimensional calls fail.
    with pytest.raises(ValueError):
        nn(0)

    # Check normalisation layers disappear.
    assert len(ff(10, (20, 30), normalise=False).layers) == 5
Esempio n. 3
0
def test_vec_to_tril_and_back_correctness(offset, batch_shape, check_lazy_shapes):
    n = B.length(B.tril_to_vec(B.ones(7, 7), offset=offset))
    for vec in Tensor(*batch_shape, n).forms():
        mat = B.vec_to_tril(vec, offset=offset)
        approx(B.tril_to_vec(mat, offset=offset), vec)
Esempio n. 4
0
 def __init__(self, *objs):
     self._shapes = [B.shape(obj) for obj in objs]
     self._lengths = [B.length(obj) for obj in objs]
Esempio n. 5
0
def test_vec_to_tril(offset, batch_shape, check_lazy_shapes):
    n = B.length(B.tril_to_vec(B.ones(7, 7), offset=offset))
    check_function(B.vec_to_tril, (Tensor(*batch_shape, n),), {"offset": Value(offset)})