def test_torch_shallow_neural_classifier_incremental(XOR):
    X, y = XOR
    model = torch_shallow_neural_classifier.TorchShallowNeuralClassifier(
        hidden_dim=4, hidden_activation=nn.ReLU(), max_iter=100, eta=0.01)
    model.fit(X, y, X_dev=X, dev_iter=1)
    epochs = list(model.dev_predictions.keys())
    assert epochs == list(range(1, 101))
    assert all(len(v) == len(X) for v in model.dev_predictions.values())
def test_torch_shallow_neural_classifier(XOR):
    """Just makes sure that this code will run; it doesn't check that
    it is creating good models.
    """
    X, y = XOR
    model = torch_shallow_neural_classifier.TorchShallowNeuralClassifier(
        hidden_dim=4, hidden_activation=nn.ReLU(), max_iter=100, eta=0.01)
    model.fit(X, y)
    model.predict(X)
    model.predict_proba(X)
def test_torch_shallow_neural_classifier_save_load(XOR):
    X, y = XOR
    mod = torch_shallow_neural_classifier.TorchShallowNeuralClassifier(
        hidden_dim=4, hidden_activation=nn.ReLU(), max_iter=100, eta=0.01)
    mod.fit(X, y)
    mod.predict(X)
    with tempfile.NamedTemporaryFile(mode='wb') as f:
        name = f.name
        mod.to_pickle(name)
        mod2 = torch_shallow_neural_classifier.TorchShallowNeuralClassifier.from_pickle(
            name)
        mod2.predict(X)
        mod2.fit(X, y)
Exemplo n.º 4
0
     }
 ],
 [
     np_shallow_neural_classifier.ShallowNeuralClassifier(
         hidden_dim=5, max_iter=1, eta=1.0),
     {
         'hidden_dim': 10,
         # Reset to ReLU:
         'hidden_activation': lambda z: np.maximum(0, z),
         'd_hidden_activation': lambda z: np.where(z > 0, 1, 0),
         'max_iter': 10,
         'eta': 0.1
     }
 ],
 [
     torch_shallow_neural_classifier.TorchShallowNeuralClassifier(
         hidden_dim=5, hidden_activation=nn.ReLU(), max_iter=1, eta=1.0),
     {
         'hidden_dim': 10,
         'hidden_activation': nn.ReLU(),
         'max_iter': 10,
         'eta': 0.1
     }
 ],
 [
     np_autoencoder.Autoencoder(hidden_dim=5, max_iter=1, eta=1.0),
     {'hidden_dim': 10, 'max_iter': 10, 'eta': 0.1}
 ],
 [
     torch_autoencoder.TorchAutoencoder(
         hidden_dim=5, hidden_activation=nn.ReLU(), max_iter=1, eta=1.0),
     {