def test_pickle_with_callbacks(): """Test that models with callbacks (which hold a refence to the Keras model itself) are picklable.""" clf = KerasRegressor(model=get_reg, loss="mse", callbacks=[keras.callbacks.Callback()]) # Fit and roundtrip validating only that there are no errors clf.fit([[1]], [1]) clf = pickle.loads(pickle.dumps(clf)) clf.predict([[1]]) clf.partial_fit([[1]], [1])
def test_partial_fit_pickle(optim): """ This test is implemented to make sure model pickling does not affect training. (this is essentially what Dask-ML does for search) """ X, y = make_regression(n_features=8, n_samples=100) m1 = KerasRegressor(model=dynamic_regressor, optimizer=optim, random_state=42, hidden_layer_sizes=[]) m2 = clone(m1) # Ensure we can roundtrip before training m2 = _reload(m2) # Make sure start from same model m1.partial_fit(X, y) m2.partial_fit(X, y) assert _weights_close(m1, m2) # Train; make sure pickling doesn't affect it for k in range(4): m1.partial_fit(X, y) m2 = _reload(m2, epoch=k + 1).partial_fit(X, y) # Make sure the same model is produced assert _weights_close(m1, m2) # Make sure predictions are the same assert np.allclose(m1.predict(X), m2.predict(X))
def test_batch_size_all_predict(length, prefix, base): kw = prefix + base y = np.random.random((length, )) X = y.reshape((-1, 1)) est = KerasRegressor(dynamic_regressor, hidden_layer_sizes=[], **{kw: -1}) est.fit(X, y) pred_orig = est.model_.predict def check_batch_size(**kwargs): assert kwargs[base] == X.shape[0] return pred_orig(**kwargs) with mock.patch.object(est.model_, "predict", new=check_batch_size): est.predict(X)
def test_partial_fit_pickle(optim): """ This test is implemented to make sure model pickling does not affect training, which is (essentially) what Dask-ML does for a model selection search. This test is simple for functional optimizers (like SGD without momentum), and tricky for stateful transforms (SGD w/ momentum, Adam, Adagrad, etc). For more detail, see https://github.com/adriangb/scikeras/pull/126 and links within """ X, y = make_regression(n_features=8, n_samples=100) m1 = KerasRegressor(model=dynamic_regressor, optimizer=optim, random_state=42, hidden_layer_sizes=[]) m2 = clone(m1) # Ensure we can roundtrip before training m2 = _reload(m2) # Make sure start from same model m1.partial_fit(X, y) m2.partial_fit(X, y) assert _weights_close(m1, m2) # Train; make sure pickling doesn't affect it for k in range(4): m1.partial_fit(X, y) m2 = _reload(m2, epoch=k + 1).partial_fit(X, y) # Make sure the same model is produced assert _weights_close(m1, m2) # Make sure predictions are the same assert np.allclose(m1.predict(X), m2.predict(X))