def test_conv_batchnorm_fprop(conv_input_placeholder, bn_params): """This checks that that we are doing batch norm across multiple axes and properly tracking the side effect variables """ layer = BatchNorm(**bn_params) fprop = layer(conv_input_placeholder) with ExecutorFactory() as ex: # Compute executors fprop_function = ex.executor(fprop, conv_input_placeholder) stats_function = ex.executor([ng.value_of(layer.gmean), ng.value_of(layer.gvar)]) # Initial conditions for tracked variables bn_params['gmean'] = 0.0 bn_params['gvar'] = 1.0 bn_params['axis'] = (1, 2, 3, ) # Test over 2 iterations to make sure values update properly for i in range(2): # Generate data x = rng.uniform(0, 1, conv_input_placeholder.axes) # Compute reference fprop and stats batch_norm_reference = BatchNormReference(x, **bn_params) out_ref, bn_params['gmean'], bn_params['gvar'] = batch_norm_reference.fprop # Compute ngraph fprop and stats out = fprop_function(x) gm, gv = stats_function() ng.testing.assert_allclose(out, out_ref, rtol=rtol, atol=atol) ng.testing.assert_allclose(gm, bn_params['gmean'], rtol=rtol, atol=atol) ng.testing.assert_allclose(gv, bn_params['gvar'], rtol=rtol, atol=atol)
def test_sequential_side(M): x1_np = 2 x2_np = 3 b_np = 1 x_np = np.array([1, 2, 3], dtype=np.float32) x = ng.variable([M], initial_value=x_np) x1 = ng.persistent_tensor(axes=(), initial_value=x1_np) x2 = ng.persistent_tensor(axes=(), initial_value=x2_np) x1_vo = ng.value_of(x1) x2_vo = ng.value_of(x2) b = ng.persistent_tensor(axes=(), initial_value=b_np) y = ng.sequential([ x1_vo, x2_vo, ng.assign(x1, ng.sum(x, out_axes=()) + x1 * b + (1 - b)), ng.assign(x2, ng.mean(x, out_axes=()) + x2 * b + (1 - b)), x * 2 ]) with ExecutorFactory() as ex: main_effect = ex.executor((y, x1_vo, x2_vo, x1, x2)) current_values = ex.executor((x1, x2)) # Run main path #1 y_val, x1_init_val, x2_init_val, x1_final_val, x2_final_val = main_effect( ) y_np = x_np * 2 assert np.allclose(y_val, y_np) assert np.allclose(x1_init_val, x1_np) assert np.allclose(x2_init_val, x2_np) x1_np = np.sum(x_np) + x1_np * b_np + (1 - b_np) x2_np = np.mean(x_np) + x2_np * b_np + (1 - b_np) assert np.allclose(x1_final_val, x1_np) assert np.allclose(x2_final_val, x2_np) x1_val, x2_val = current_values() assert np.allclose(x1_val, x1_np) assert np.allclose(x2_val, x2_np) # Run main path #2 (Should be the same as before) y_val, x1_init_val, x2_init_val, x1_final_val, x2_final_val = main_effect( ) y_np = x_np * 2 assert np.allclose(y_val, y_np) assert np.allclose(x1_init_val, x1_np) assert np.allclose(x2_init_val, x2_np) x1_np = np.sum(x_np) + x1_np * b_np + (1 - b_np) x2_np = np.mean(x_np) + x2_np * b_np + (1 - b_np) assert np.allclose(x1_final_val, x1_np) assert np.allclose(x2_final_val, x2_np)
def test_recurrent_batchnorm_fprop(RNN, recurrent_input, output_size, bn_params): """Compare fprop RNN with batch norm to numpy batch norm followed by rnn without""" helper = RNNHelper(recurrent_input, output_size, RNN, bn_params) # Get batch norm rnn graph fprop = helper.rnn(recurrent_input) # Get batch norm side effects stats = [ng.value_of(helper.gmean), ng.value_of(helper.gvar)] # Get reference graph reference_fprop = helper.reference_rnn(helper.reference_input) with ExecutorFactory() as ex: # Compute executors fprop_function = ex.executor(fprop, recurrent_input) stats_function = ex.executor(stats) reference_function = ex.executor(reference_fprop, helper.reference_input) # Initial conditions for tracked variables bn_params['gmean'] = 0.0 bn_params['gvar'] = 1.0 # Need to reduce over two positional axes in reference bn_params['axis'] = (1, 2) # Test over 2 iterations to make sure values update properly for _ in range(2): # Get network input values input_value = rng.uniform(-1, 1, recurrent_input.axes) # Compute reference values # First compute the weighted input weighted_input = np.dot(helper.W_in, input_value.swapaxes(0, 1)) # Compute reference batch norm batch_norm_reference = BatchNormReference(weighted_input, **bn_params) normed_input, bn_params['gmean'], bn_params['gvar'] = batch_norm_reference.fprop # Finally, get reference RNN output ref = reference_function(normed_input) # Get ngraph batch norm RNN output out = fprop_function(input_value) gmean, gvar = stats_function() ng.testing.assert_allclose(out, ref, rtol=rtol, atol=recurrent_atol) ng.testing.assert_allclose(gmean, bn_params['gmean'], rtol=rtol, atol=recurrent_atol) ng.testing.assert_allclose(gvar, bn_params['gvar'], rtol=rtol, atol=recurrent_atol)
def test_batchnorm_fprop(batch_size, input_size, rho, epsilon, transformer_factory): # This checks that that we are doing batch norm across a feature make_axis # and properly tracking the side effect variables np.random.seed(0) # set inputs N = ng.make_axis(batch_size, name='N') F = ng.make_axis(input_size) input_placeholder = ng.placeholder([F, N]) layer = BatchNorm(rho, epsilon) fprop = layer.train_outputs(input_placeholder) with ExecutorFactory() as ex: fprop_function = ex.transformer.computation(fprop, input_placeholder) stats_function = ex.transformer.computation( [ng.value_of(layer.gmean), ng.value_of(layer.gvar)]) # initial conditions for tracked variables gmean_ref, gvar_ref = 0.0, 1.0 # create data for i in range(2): x = np.random.random((input_size, batch_size)).astype(np.float32) out = fprop_function(x) gm, gv = stats_function() xmean = x.mean(axis=1, keepdims=True) xvar = x.var(axis=1, keepdims=True) out_ref = (x - xmean) / np.sqrt(xvar + epsilon) gmean_ref = xmean.ravel() * (1.0 - rho) + gmean_ref * rho gvar_ref = xvar.ravel() * (1.0 - rho) + gvar_ref * rho assert ng.testing.allclose( out, out_ref, atol=1e-6), '%e' % np.max(np.abs(out - out_ref)) assert ng.testing.allclose( gm, gmean_ref, atol=1e-6), '%e' % np.max(np.abs(gm - gmean_ref)) assert ng.testing.allclose( gv, gvar_ref, atol=1e-6), '%e' % np.max(np.abs(gv - gvar_ref))
def test_fill_slice(transformer_factory): axes = ng.make_axes([ng.make_axis(length=2), ng.make_axis(length=8)]) a = ng.placeholder(axes=axes) b = ng.sequential([ng.fill(a[:, 1], 0), ng.value_of(a)]) with ExecutorFactory() as ex: func = ex.executor(b, a) baseline = func( np.array([[1, 2, 3, 4, 5, 6, 7, 8], [8, 7, 6, 5, 4, 3, 2, 1]], dtype=np.float32)) expected = np.array([[1, 0, 3, 4, 5, 6, 7, 8], [8, 0, 6, 5, 4, 3, 2, 1]]) ng.testing.assert_allclose(baseline, expected)