def test_get_output_for_shared_axes(self, shared_axes): from lasagne.layers.noise import DropoutLayer layer = DropoutLayer((2, 4, 7, 9), shared_axes=shared_axes) input = theano.shared(numpy.ones((2, 4, 7, 9))) result = layer.get_output_for(input) result_eval = result.eval() # check if the dropout mask is the same across the specified axes: # compute the mean across these axes and compare against the full # output, broadcasting across the shared axes, to see if it matches assert np.allclose(result_eval.mean(axis=shared_axes, keepdims=True), result_eval)
def test_get_output_for_p_float16(self, input_layer): from lasagne.layers.noise import DropoutLayer layer = DropoutLayer(input_layer, p=numpy.float16(0.5)) input = theano.shared(numpy.ones((100, 100), dtype=numpy.float16)) assert layer.get_output_for(input).dtype == input.dtype
def test_get_output_for_p_float32(self, input_layer): from lasagne.layers.noise import DropoutLayer layer = DropoutLayer(input_layer, p=numpy.float32(0.5)) input = theano.shared(numpy.ones((100, 100), dtype=numpy.float32)) assert layer.get_output_for(input).dtype == input.dtype