コード例 #1
0
ファイル: test_esn.py プロジェクト: TUD-STKS/PyRCN
def test_esn_classifier_instance_fit() -> None:
    X, y = load_digits(return_X_y=True, as_sequence=True)
    esn = ESNClassifier(hidden_layer_size=50).fit(X[0], np.repeat(y[0], 8))
    assert (esn.sequence_to_value is False)
    y_pred = esn.predict_proba(X[0])
    assert (y_pred.ndim == 2)
    y_pred = esn.predict_log_proba(X[0])
    assert (y_pred.ndim == 2)
コード例 #2
0
def test_iris_ensemble_iterative_regression():
    print('\ntest_iris_ensemble_iterative_regression():')
    X_train, X_test, y_train, y_test = train_test_split(X_iris,
                                                        y_iris,
                                                        test_size=5,
                                                        random_state=42)
    cls = ESNClassifier(input_to_node=[('tanh',
                                        InputToNode(hidden_layer_size=10,
                                                    random_state=42,
                                                    activation='identity')),
                                       ('bounded_relu',
                                        InputToNode(hidden_layer_size=10,
                                                    random_state=42,
                                                    activation='bounded_relu'))
                                       ],
                        node_to_node=[('default',
                                       NodeToNode(hidden_layer_size=20,
                                                  spectral_radius=0.0))],
                        regressor=IncrementalRegression(alpha=.01),
                        random_state=42)

    for samples in np.split(np.arange(0, X_train.shape[0]), 5):
        cls.partial_fit(X_train[samples, :],
                        y_train[samples],
                        classes=np.arange(3, dtype=int))
    y_predicted = cls.predict(X_test)

    for record in range(len(y_test)):
        print('predicted: {0} \ttrue: {1}'.format(y_predicted[record],
                                                  y_test[record]))

    print('score: {0}'.format(cls.score(X_test, y_test)))
    print('proba: {0}'.format(cls.predict_proba(X_test)))
    print('log_proba: {0}'.format(cls.predict_log_proba(X_test)))
    assert cls.score(X_test, y_test) >= 4. / 5.
コード例 #3
0
ファイル: test_esn.py プロジェクト: TUD-STKS/PyRCN
def test_esn_classifier_no_valid_params() -> None:
    X, y = load_digits(return_X_y=True, as_sequence=True)
    with pytest.raises(TypeError):
        ESNClassifier(input_to_node=ESNRegressor()).fit(X, y)
    with pytest.raises(TypeError):
        ESNClassifier(node_to_node=ESNRegressor()).fit(X, y)
    with pytest.raises(TypeError):
        ESNClassifier(input_to_node=ESNRegressor()).fit(X, y)
    with pytest.raises(ValueError):
        ESNClassifier(requires_sequence="True").fit(X, y)
    with pytest.raises(TypeError):
        ESNClassifier(regressor=InputToNode()).fit(X, y)
コード例 #4
0
ファイル: test_esn.py プロジェクト: TUD-STKS/PyRCN
def test_esn_output_unchanged() -> None:
    X, y = load_digits(return_X_y=True, as_sequence=True)
    shape1 = y[0].shape
    esn = ESNClassifier(hidden_layer_size=50).fit(X, y)
    print(esn)
    shape2 = y[0].shape
    assert (shape1 == shape2)
コード例 #5
0
ファイル: test_esn.py プロジェクト: TUD-STKS/PyRCN
def test_esn_classifier_partial_fit() -> None:
    X, y = load_digits(return_X_y=True, as_sequence=True)
    esn = ESNClassifier(hidden_layer_size=50, verbose=True)
    for k in range(10):
        esn.partial_fit(X[k],
                        np.repeat(y[k], 8),
                        classes=np.arange(10),
                        postpone_inverse=True)
    print(esn.__sizeof__())
    print(esn.hidden_layer_state)
    esn = ESNClassifier(hidden_layer_size=50, regressor=Ridge())
    with pytest.raises(BaseException):
        for k in range(10):
            esn.partial_fit(X[k],
                            np.repeat(y[k], 8),
                            classes=np.arange(10),
                            postpone_inverse=True)
コード例 #6
0
ファイル: test_esn.py プロジェクト: TUD-STKS/PyRCN
def test_esn_classifier_sequence_to_value() -> None:
    X, y = load_digits(return_X_y=True, as_sequence=True)
    esn = ESNClassifier(hidden_layer_size=50).fit(X, y)
    y_pred = esn.predict(X)
    assert (len(y) == len(y_pred))
    assert (len(y_pred[0]) == 1)
    assert (esn.sequence_to_value is True)
    assert (esn.decision_strategy == "winner_takes_all")
    y_pred = esn.predict_proba(X)
    assert (y_pred[0].ndim == 1)
    y_pred = esn.predict_log_proba(X)
    assert (y_pred[0].ndim == 1)
    esn.sequence_to_value = False
    y_pred = esn.predict(X)
    assert (len(y_pred[0]) == 8)
    y_pred = esn.predict_proba(X)
    assert (y_pred[0].ndim == 2)
    y_pred = esn.predict_log_proba(X)
    assert (y_pred[0].ndim == 2)
コード例 #7
0
    'verbose': 1, 'n_jobs': -1,
    'scoring': make_scorer(mean_squared_error, greater_is_better=False,
                           needs_proba=True)
}
kwargs_step4 = {
    'n_iter': 50, 'random_state': 42, 'verbose': 1, 'n_jobs': -1,
    'scoring': make_scorer(mean_squared_error, greater_is_better=False,
                           needs_proba=True)
}

searches = [('step1', RandomizedSearchCV, step1_esn_params, kwargs_step1),
            ('step2', RandomizedSearchCV, step2_esn_params, kwargs_step2),
            ('step3', GridSearchCV, step3_esn_params, kwargs_step3),
            ('step4', RandomizedSearchCV, step4_esn_params, kwargs_step4)]

base_esn = ESNClassifier(**initially_fixed_params)
sequential_search = \
    SequentialSearchCV(base_esn, searches=searches).fit(X_train, y_train)

# ## Test the ESN
#
# In the test case, we train the ESN using the entire training set as seen
# before. Next, we compute the predicted outputs on the training and test set
# and fix a threshold of 0.5, above a note is assumed to be predicted.
#
# We report the accuracy score for each frame in order to follow the reference
# paper.
param_grid = {'hidden_layer_size': [500, 1000, 2000, 4000, 5000]}
base_esn = sequential_search.best_estimator_

for params in ParameterGrid(param_grid):
コード例 #8
0
    'input_to_node__input_scaling': np.linspace(start=0.1, stop=1, num=10),
    'input_to_node__bias_scaling': [0.0],
    'input_to_node__activation': ['identity'],
    'input_to_node__random_state': [42],
    'node_to_node__hidden_layer_size': [50],
    'node_to_node__leakage': [1.0],
    'node_to_node__spectral_radius': np.linspace(start=0.0, stop=1, num=11),
    'node_to_node__bias_scaling': [0.0],
    'node_to_node__activation': ['tanh'],
    'node_to_node__random_state': [42],
    'regressor__alpha': [1e-3],
    'random_state': [42]
}

base_esn = ESNClassifier(input_to_node=InputToNode(),
                         node_to_node=NodeToNode(),
                         regressor=IncrementalRegression())

# ## Optimize input_scaling and spectral_radius
#
# We use the ParameterGrid from scikit-learn, which converts the grid parameters defined before into a list of dictionaries for each parameter combination.
#
# We loop over each entry of the Parameter Grid, set the parameters in reg and fit our model on the training data. Afterwards, we report the error rates on the training and test set.
#
#     The lowest training error rate: 0.536330735; parameter combination: {'input_scaling': 0.1, 'spectral_radius': 1.0}
#     The lowest test error rate: 0.588987764; parameter combination: {'input_scaling': 0.1, 'spectral_radius': 1.0}
#
# We use the best parameter combination from the training set, because we do not want to overfit on the test set.
#
# As we can see in the python call, we have modified the training procedure: We use "partial_fit" in order to present the ESN all sequences independently from each other. The function "partial_fit" is part of the scikit-learn API. We have added one optional argument "update_output_weights". By default, it is True and thus, after feeding one sequence through the ESN, output weights are computed.
#
コード例 #9
0
    'input_to_node__input_scaling': np.linspace(start=0.1, stop=1, num=10),
    'input_to_node__bias_scaling': [0.0],
    'input_to_node__activation': ['identity'],
    'input_to_node__random_state': [42],
    'node_to_node__hidden_layer_size': [50],
    'node_to_node__leakage': [1.0],
    'node_to_node__spectral_radius': np.linspace(start=0.0, stop=1, num=11),
    'node_to_node__bias_scaling': [0.0],
    'node_to_node__activation': ['tanh'],
    'node_to_node__random_state': [42],
    'regressor__alpha': [1e-3],
    'random_state': [42]
}

base_esn = ESNClassifier(input_to_node=InputToNode(),
                         node_to_node=NodeToNode(),
                         regressor=IncrementalRegression())

# ## Optimize input_scaling and spectral_radius
#
# We use the ParameterGrid from scikit-learn, which converts the grid parameters defined before into a list of dictionaries for each parameter combination.
#
# We loop over each entry of the Parameter Grid, set the parameters in reg and fit our model on the training data. Afterwards, we report the error rates on the training and test set.
#
#     The lowest training error rate: 0.536330735; parameter combination: {'input_scaling': 0.1, 'spectral_radius': 1.0}
#     The lowest test error rate: 0.588987764; parameter combination: {'input_scaling': 0.1, 'spectral_radius': 1.0}
#
# We use the best parameter combination from the training set, because we do not want to overfit on the test set.
#
# As we can see in the python call, we have modified the training procedure: We use "partial_fit" in order to present the ESN all sequences independently from each other. The function "partial_fit" is part of the scikit-learn API. We have added one optional argument "update_output_weights". By default, it is True and thus, after feeding one sequence through the ESN, output weights are computed.
#
コード例 #10
0
ファイル: test_esn.py プロジェクト: TUD-STKS/PyRCN
def test_esn_classifier_not_fitted() -> None:
    X, y = load_digits(return_X_y=True, as_sequence=True)
    with pytest.raises(NotFittedError):
        ESNClassifier(hidden_layer_size=50, verbose=True).predict(X)
コード例 #11
0
ファイル: test_esn.py プロジェクト: TUD-STKS/PyRCN
def test_esn_get_params() -> None:
    print('\ntest_esn_get_params():')
    esn = ESNClassifier()
    esn_params = esn.get_params()
    print(esn_params)
                }
kwargs_step2 = {'n_iter': 50, 'random_state': 42, 'verbose': 1, 'n_jobs': -1,
                'scoring': make_scorer(mean_squared_error,
                                       greater_is_better=False,
                                       needs_proba=True)
                }
kwargs_step3 = {'verbose': 1, 'n_jobs': -1,
                'scoring': make_scorer(mean_squared_error,
                                       greater_is_better=False,
                                       needs_proba=True)
                }
kwargs_step4 = {'n_iter': 50, 'random_state': 42, 'verbose': 1, 'n_jobs': -1,
                'scoring': make_scorer(mean_squared_error,
                                       greater_is_better=False,
                                       needs_proba=True)
                }

searches = [('step1', RandomizedSearchCV, step1_esn_params, kwargs_step1),
            ('step2', RandomizedSearchCV, step2_esn_params, kwargs_step2),
            ('step3', GridSearchCV, step3_esn_params, kwargs_step3),
            ('step4', RandomizedSearchCV, step4_esn_params, kwargs_step4)]

base_esn = ESNClassifier(**initially_fixed_params)
base_esn.fit(X_train, y_train)
try:
    sequential_search = load("../sequential_search_arab.joblib")
except FileNotFoundError:
    sequential_search = SequentialSearchCV(
        base_esn, searches=searches).fit(X_train, y_train)
    dump(sequential_search, "../sequential_search_arab.joblib")
コード例 #13
0
ファイル: PyRCN_Intro.py プロジェクト: TUD-STKS/PyRCN
print("Shape of the first digit: {0}".format(X_te[0].shape))

# These parameters were optimized using SequentialSearchCV
esn_params = {
    'input_scaling': 0.05077514155476392,
    'spectral_radius': 1.1817858863764836,
    'input_activation': 'identity',
    'k_in': 5,
    'bias_scaling': 1.6045393364745582,
    'reservoir_activation': 'tanh',
    'leakage': 0.03470266988650412,
    'k_rec': 10,
    'alpha': 3.0786517836196185e-05,
    'decision_strategy': "winner_takes_all"
}

b_esn = ESNClassifier(**esn_params)

param_grid = {
    'hidden_layer_size': [50, 100, 200, 400, 500],
    'bidirectional': [False, True]
}

for params in ParameterGrid(param_grid):
    esn_cv = cross_validate(clone(b_esn).set_params(**params),
                            X=X_tr,
                            y=y_tr,
                            scoring=make_scorer(accuracy_score))
    esn = clone(b_esn).set_params(**params).fit(X_tr, y_tr, n_jobs=-1)
    acc_score = accuracy_score(y_te, esn.predict(X_te))
コード例 #14
0
}
kwargs_step3 = {
    'verbose': 1, 'n_jobs': -1, 'scoring': make_scorer(accuracy_score)
}
kwargs_step4 = {
    'n_iter': 50, 'random_state': 42, 'verbose': 1, 'n_jobs': -1,
    'scoring': make_scorer(accuracy_score)
}

searches = [('step1', RandomizedSearchCV, step1_esn_params, kwargs_step1),
            ('step2', RandomizedSearchCV, step2_esn_params, kwargs_step2),
            ('step3', GridSearchCV, step3_esn_params, kwargs_step3),
            ('step4', RandomizedSearchCV, step4_esn_params, kwargs_step4)]

base_km_esn = ESNClassifier(
    input_to_node=PredefinedWeightsInputToNode(
        predefined_input_weights=w_in.T),
    **initially_fixed_params)

try:
    sequential_search = load("../sequential_search_RICSyN2015_km_large.joblib")
except FileNotFoundError:
    sequential_search = SequentialSearchCV(base_km_esn, searches=searches).fit(
        X_train, y_train)
    dump(sequential_search, "../sequential_search_RICSyN2015_km_large.joblib")

base_esn = clone(sequential_search.best_estimator_)

param_grid = {
    'hidden_layer_size': [50, 100, 200, 400, 800, 1600],
    'random_state': range(1, 11)
}
コード例 #15
0
}
kwargs_step4 = {
    'n_iter': 50,
    'random_state': 42,
    'verbose': 1,
    'n_jobs': -1,
    'scoring': scoring
}

searches = [('step1', RandomizedSearchCV, step1_esn_params, kwargs_step1),
            ('step2', RandomizedSearchCV, step2_esn_params, kwargs_step2),
            ('step3', RandomizedSearchCV, step3_esn_params, kwargs_step3),
            ('step4', RandomizedSearchCV, step4_esn_params, kwargs_step4)]

base_esn = ESNClassifier(
    input_to_node=input_to_node,
    node_to_node=node_to_node).set_params(**initially_fixed_params)

try:
    sequential_search = load(
        "../sequential_search_speech_timit_km_esn_attention_0_1_rec_attention"
        ".joblib")
except FileNotFoundError:
    sequential_search = SequentialSearchCV(base_esn, searches=searches).fit(
        X_train, y_train)
    dump(
        sequential_search,
        "../sequential_search_speech_timit_km_esn_attention_0_1_rec_attention"
        ".joblib")
print(sequential_search.all_best_params_, sequential_search.all_best_score_)