示例#1
0
def test_dropout_bprop_single_comp(transformer_factory):
    nin, batch_size = 32, 2

    # set inputs
    N = ng.make_axis(batch_size, name='N')
    F = ng.make_axis(nin).named('F')

    mul_factor = ng.placeholder(())
    inp = ng.placeholder([F, N])
    layer = Dropout(keep=0.5)
    fprop = layer.train_outputs(inp * mul_factor)
    out_graph = ng.sum(fprop, out_axes=())

    # create data
    x = np.random.uniform(size=(nin, batch_size))
    bprop = ng.deriv(out_graph, mul_factor)

    # evaluate
    trans = ngt.make_transformer()
    comp = trans.computation([fprop, bprop, layer.mask], inp, mul_factor)
    fout, bout, mask = comp(x, 2)
    # Calculate derivative by hand and compare
    np.testing.assert_allclose(bout, (x * mask[:, None]).sum(), rtol=1e-6)
    trans.close()
示例#2
0
def test_dropout_train(transformer_factory):
    nin, batch_size = 32, 2

    # set inputs
    N = ng.make_axis(batch_size, name='N')
    F = ng.make_axis(nin).named('F')

    inp = ng.placeholder([F, N])
    layer = Dropout(keep=0.5)
    fprop = layer.train_outputs(inp)

    # create data
    x = np.random.uniform(size=(nin, batch_size))

    # evaluate
    with executor([fprop, layer.mask], inp) as comp:
        out, mask = comp(x)
        numpy_out = x * mask[:, None]
        np.testing.assert_allclose(out, numpy_out, rtol=1e-6)

        out1, mask1 = out.copy(), mask.copy()
        out2, mask2 = comp(x)
        assert (out1 != out2).any()
        assert (mask1 != mask2).any()