def test_linear_ones(basic_linargs, transformer_factory): # basic sanity check with all ones on the inputs # and weights, check that each row in output # is the sum of the weights for that output # this check will confirm that the correct number # of operations is being run nin, nout, batch_size = basic_linargs # set inputs N = ng.make_axis(batch_size, name="N", batch=True) F = ng.make_axis(nin, name="F") inp = ng.placeholder([F, N]) layer = Linear(nout=nout, init=UniformInit(1.0, 1.0)) fprop = layer.train_outputs(inp) # create data x = np.ones((nin, batch_size)) # evaluate ngt.make_transformer() out, w = executor([fprop, layer.W], inp)(x) sums = np.sum(w, 1).reshape((nout, 1)) * np.ones((1, batch_size)) assert np.allclose(sums, out, atol=0.0, rtol=0.0), '%e' % np.max(np.abs(out - sums))
def test_linear_zeros(basic_linargs, transformer_factory): # basic sanity check with 0 weights random inputs nin, nout, batch_size = basic_linargs # set inputs N = ng.make_axis(batch_size, name="N", batch=True) F = ng.make_axis(nin, name="F") inp = ng.placeholder([F, N]) layer = Linear(nout=nout, init=UniformInit(0.0, 0.0)) fprop = layer.train_outputs(inp) # create data x = np.random.random((nin, batch_size)) # evaluate ngt.make_transformer() out = executor(fprop, inp)(x) assert np.min(out) == 0.0 and np.max(out) == 0.0