def test_np_shallow_neural_classifier_gradients(hidden_activation, d_hidden_activation): model = ShallowNeuralClassifier( max_iter=10, hidden_activation=hidden_activation, d_hidden_activation=d_hidden_activation) # A tiny dataset so that we can run `fit` and set all the model # parameters: X = utils.randmatrix(5, 2) y = np.random.choice((0,1), 5) model.fit(X, y) # Use the first example for the check: ex = X[0] label = model._onehot_encode([y[0]])[0] # Forward and backward to get the gradients: hidden, pred = model.forward_propagation(ex) d_W_hy, d_b_hy, d_W_xh, d_b_xh = model.backward_propagation( hidden, pred, ex, label) # Model parameters to check: param_pairs = ( ('W_hy', d_W_hy), ('b_hy', d_b_hy), ('W_xh', d_W_xh), ('b_xh', d_b_xh) ) gradient_check(param_pairs, model, ex, label)
def test_np_model(XOR): """Just makes sure that this code will run; it doesn't check that it is creating good models. """ X, y = XOR model = ShallowNeuralClassifier(hidden_dim=4, hidden_activation=np.tanh, d_hidden_activation=utils.d_tanh, eta=0.05, tol=1.5e-8, display_progress=True, max_iter=100) model.fit(X, y) model.predict(X)
def test_np_parameter_setting(param, value): mod = ShallowNeuralClassifier() mod.set_params(**{param: value}) assert getattr(mod, param) == value