Пример #1
0
 def test_custom_model_registered(self):
     """Test that a registered subclassed loss function can be
     serialized."""
     estimator = KerasRegressor(build_fn=build_fn_regs_custom_model_reg)
     check(estimator, load_boston)
Пример #2
0
def test_custom_model_unregistered():
    """Test that an unregistered subclassed Model raises an error.
    """
    estimator = KerasRegressor(model=build_fn_custom_model_unregistered)
    with pytest.raises(ValueError, match="Unknown layer"):
        check_pickle(estimator, load_boston)
Пример #3
0
def test_custom_model_registered():
    """Test that a registered subclassed Model can be serialized.
    """
    estimator = KerasRegressor(model=build_fn_custom_model_registered)
    check_pickle(estimator, load_boston)
Пример #4
0
def test_custom_model_unregistered():
    """Test that pickling an unregistered subclassed model works."""
    estimator = KerasRegressor(model=build_fn_custom_model_unregistered)
    check_pickle(estimator, fetch_california_housing)
Пример #5
0
def test_custom_model_registered():
    """Test that a registered subclassed Model can be serialized."""
    estimator = KerasRegressor(model=build_fn_custom_model_registered)
    check_pickle(estimator, fetch_california_housing)
Пример #6
0
 def test_custom_model_unregistered(self):
     """Test that an unregistered subclassed Model raises an error."""
     estimator = KerasRegressor(build_fn=build_fn_regs_custom_model_unreg)
     with pytest.raises(ValueError):
         check(estimator, load_boston)
Пример #7
0
class TestRandomState:
    @pytest.mark.parametrize(
        "random_state",
        [0, 123, np.random.RandomState(0)],
    )
    @pytest.mark.parametrize(
        "estimator",
        [
            KerasRegressor(
                model=dynamic_regressor,
                model__hidden_layer_sizes=(100, ),
            ),
            KerasClassifier(model=dynamic_classifier,
                            model__hidden_layer_sizes=(100, )),
        ],
    )
    def test_random_states(self, random_state, estimator):
        """Tests that the random_state parameter correctly
        engages deterministric training and prediction.
        """
        X, y = make_classification()

        # With seed
        estimator.set_params(random_state=random_state)
        estimator.fit(X, y)
        y1 = estimator.predict(X)
        estimator.fit(X, y)
        y2 = estimator.predict(X)
        assert np.allclose(y1, y2)

        if isinstance(estimator, KerasRegressor):
            # Without seed, regressors should NOT
            # give the same results
            # Classifiers _may_ give the same classes
            estimator.set_params(random_state=None)
            estimator.fit(X, y)
            y1 = estimator.predict(X)
            estimator.fit(X, y)
            y2 = estimator.predict(X)
            assert not np.allclose(y1, y2)

    @pytest.mark.parametrize(
        "estimator",
        [
            KerasRegressor(
                model=dynamic_regressor,
                model__hidden_layer_sizes=(100, ),
            ),
            KerasClassifier(model=dynamic_classifier,
                            model__hidden_layer_sizes=(100, )),
        ],
    )
    @pytest.mark.parametrize("pyhash", [None, "0", "1"])
    @pytest.mark.parametrize("gpu", [None, "0", "1"])
    def test_random_states_env_vars(self, estimator, pyhash, gpu):
        """Tests that the random state context management correctly
        handles TF related env variables.
        """
        X, y = make_classification()

        if "random_state" in estimator.get_params():
            estimator.set_params(random_state=None)
        estimator1 = clone(estimator)
        estimator2 = clone(estimator)
        if "random_state" in estimator1.get_params():
            estimator1.set_params(random_state=0)
        if "random_state" in estimator2.get_params():
            estimator2.set_params(random_state=0)
        if gpu is not None:
            os.environ["TF_DETERMINISTIC_OPS"] = gpu
        else:
            if os.environ.get("TF_DETERMINISTIC_OPS"):
                os.environ.pop("TF_DETERMINISTIC_OPS")
        if pyhash is not None:
            os.environ["PYTHONHASHSEED"] = pyhash
        else:
            if os.environ.get("PYTHONHASHSEED"):
                os.environ.pop("PYTHONHASHSEED")
        estimator1.fit(X, y)
        estimator2.fit(X, y)
        if gpu is not None:
            assert os.environ["TF_DETERMINISTIC_OPS"] == gpu
        else:
            assert "TF_DETERMINISTIC_OPS" not in os.environ
        if pyhash is not None:
            assert os.environ["PYTHONHASHSEED"] == pyhash
        else:
            assert "PYTHONHASHSEED" not in os.environ
        y1 = estimator1.predict(X)
        y2 = estimator2.predict(X)
        assert np.allclose(y1, y2)
        if gpu is not None:
            assert os.environ["TF_DETERMINISTIC_OPS"] == gpu
        else:
            assert "TF_DETERMINISTIC_OPS" not in os.environ
        if pyhash is not None:
            assert os.environ["PYTHONHASHSEED"] == pyhash
        else:
            assert "PYTHONHASHSEED" not in os.environ
Пример #8
0
                              loss="binary_crossentropy",
                              split=False),
    ),
    (
        MLPClassifier(**mlp_kwargs),
        MultiOutputClassifier(create_model("sigmoid", [1, 1, 1]),
                              loss="binary_crossentropy"),
    ),
    (
        ScikitLearnMultiOutputClassifier(MLPClassifier()),
        MultiOutputClassifier(create_model("softmax", [3, 3, 3]),
                              loss="sparse_categorical_crossentropy"),
    ),
)
est_paris_reg = (
    (MLPRegressor(), KerasRegressor(dynamic_regressor, hidden_layer_sizes=[])),
    (MLPRegressor(), KerasRegressor(dynamic_regressor, hidden_layer_sizes=[])),
)


@pytest.mark.parametrize(
    "y_dtype",
    ("float32", "float64", "int64", "int32", "uint8", "uint16", "object",
     "str"),
)
@pytest.mark.parametrize("y_val,est_pair,task_name",
                         zip(y_vals_cls, est_paris_cls, task_names_cls))
def test_output_shapes_and_dtypes_against_sklearn_cls(y_dtype, y_val,
                                                      task_name, est_pair):
    """Tests the output shape and dtype for all supported classification tasks
    and target dtypes (except string and object, those are incompatible with 
Пример #9
0
 def test_regression_build_fn(self):
     """Tests for errors using KerasRegressor."""
     reg = KerasRegressor(model=build_fn_reg, hidden_dim=5)
     basic_checks(reg, load_boston)
lr_schedule = ExponentialDecay(
    initial_learning_rate=1e-3,
    decay_steps=1000,
    decay_rate=0.9)
optimizer = Adam(learning_rate=lr_schedule)

model.compile(optimizer=optimizer,
            loss=CategoricalCrossentropy(from_logits=True),
            metrics=[CategoricalAccuracy()])
# model.compile(optimizer=optimizer,
#             loss=MeanSquaredError(),
#             metrics=['mse'])

# ann_estimator = KerasRegressor(build_fn=model, epochs=epochs, batch_size=batch_size, verbose=0)
ann_estimator = KerasRegressor(model=model, epochs=epochs, batch_size=batch_size, verbose=0)

boosted_ann = AdaBoostRegressor(base_estimator=ann_estimator, n_estimators=10, learning_rate=1e-3)

X_train, y_train = load_training(preprocess_batch_path, n_batches)
sample_weights = np.full(y_train.shape, 1/y_train.shape[0])

boosted_ann.fit(X_train, y_train, sample_weights)

X_test, y_test = load_testing(preprocess_batch_path)
accuracy = boosted_ann.score(X_test, y_test)
print(accuracy*100,'%')

# history = model.fit(load_batch(preprocess_batch_path, n_batches, batch_size),
#                     epochs=epochs,
#                     steps_per_epoch=197*n_batches,