def test_errors_loss_output_activation(): """Make sure cross-entropy loss with activations not equal to `tensorflow.nn.sigmoid` or `tensorflow.nn.softmax` fails.""" # This data will not actually be fit. # I am just using it to call the `fit` method. X = np.ones((1000, 4)) # There are two code paths for this test. One for all features # using the default loss and one for a mix of losses. # All features use the default loss. ae = Autoencoder(loss='cross-entropy', output_activation=tf.exp) with pytest.raises(ValueError) as e: ae.fit(X) assert "'cross-entropy' loss!" in str(e), ( "Wrong error raised for testing 'cross-entropy' loss with " "output activation that is not allowed for all features!") # Not all features use the default loss. ae = Autoencoder(loss='cross-entropy', output_activation=tf.exp, sigmoid_indices=[0]) with pytest.raises(ValueError) as e: ae.fit(X) assert "'cross-entropy' loss!" in str(e), ( "Wrong error raised for testing 'cross-entropy' loss with " "output activation that is not allowed for a subset of features!")
def test_refitting(): """Make sure that refitting resets internals.""" X = iris.data # Use the iris features. X = MinMaxScaler().fit_transform(X) # Use digitize to make a binary features. for i in range(X.shape[1]): bins = [0.0, np.median(X[:, i]), 1.1] X[:, i] = np.digitize(X[:, i], bins) - 1.0 ae = Autoencoder(hidden_units=(1,), n_epochs=1000, random_state=4556, learning_rate=1e-2, keep_prob=1.0, loss='cross-entropy', output_activation=tf.nn.sigmoid) ae.fit(X) assert ae.input_layer_size_ == 4, ("Input layer is the wrong size for " "the Autoencoder!") X_small = X[:, 0:-1] assert X_small.shape != X.shape, "Test data for refitting does not work!" ae.fit(X_small) assert ae.input_layer_size_ == 3, ("Input layer is the wrong size for " "the Autoencoder!")
def test_errors_overlapping_sigmoid_softmax_indixes(): """Make overlapping sigmoid and softmax indices raises an error.""" # This data will not actually be fit. # I am just using it to call the `fit` method. X = np.ones((1000, 4)) ae = Autoencoder(loss='blah', sigmoid_indices=[0], softmax_indices=[[0, 2]]) with pytest.raises(ValueError) as e: ae.fit(X) assert "Sigmoid indices and softmax indices" in str(e), ( "Wrong error raised for overlapping sigmoid and softmax indices")
def test_monitor_ae(): """Test the monitor keyword.""" # Use the iris features. X = iris.data X = MinMaxScaler().fit_transform(X) ae = Autoencoder(hidden_units=(3, 2,), n_epochs=7500, random_state=4556, learning_rate=DEFAULT_LEARNING_RATE, keep_prob=1.0, hidden_activation=tf.nn.sigmoid, encoding_activation=tf.nn.sigmoid, output_activation=tf.nn.sigmoid) def _monitor(epoch, est, stats): assert epoch <= 1000, "The autoencoder has been running too long!" if stats['loss'] < 0.2: assert epoch > 10, "The autoencoder returned too soon!" return True else: return False ae.fit(X, monitor=_monitor)
def test_errors_unallowed_loss(): """Make sure unallowed losses cause an error.""" # This data will not actually be fit. # I am just using it to call the `fit` method. X = np.ones((1000, 4)) # There are two code paths for this test. One for all features # using the default loss and one for a mix of losses. # All features use the default loss. ae = Autoencoder(loss='blah') with pytest.raises(ValueError) as e: ae.fit(X) assert "Loss 'blah'" in str(e), ( "Wrong error raised for testing unallowed losses!") # Not all features use the default loss. ae = Autoencoder(loss='blah', sigmoid_indices=[0]) with pytest.raises(ValueError) as e: ae.fit(X) assert "Loss 'blah'" in str(e), ( "Wrong error raised for testing unallowed losses!")