예제 #1
0
def test_fast_spectral_shape(N, expected):
    if expected is Exception:
        with pytest.raises(expected):
            fast_spectral_initialization(N)
    else:
        W = fast_spectral_initialization(N)
        assert W.shape == expected
예제 #2
0
def test_reproducibility_fsi():

    seed0 = default_rng(78946312)
    W0 = fast_spectral_initialization(N=100, sr=1.2, proba=0.4,
                                      seed=seed0).toarray()

    seed1 = default_rng(78946312)
    W1 = fast_spectral_initialization(N=100, sr=1.2, proba=0.4,
                                      seed=seed1).toarray()

    seed2 = default_rng(6135435)
    W2 = fast_spectral_initialization(N=100, sr=1.2, proba=0.4,
                                      seed=seed2).toarray()

    assert_array_almost_equal(W0, W1)
    assert_raises(AssertionError, assert_array_almost_equal, W0, W2)
예제 #3
0
def init_esn(training_type):

    # Common parameters
    n_inputs = 1
    input_bias = True # add a constant input to 1
    n_outputs = 1
    n_reservoir = 300 # number of recurrent units
    leak_rate = 0.6 # leaking rate (=1/time_constant_of_neurons)
    spectral_radius = 0.5 # Scaling of recurrent matrix
    input_scaling = 1. # Scaling of input matrix
    regularization_coef = 0.02

    W = fast_spectral_initialization(n_reservoir, spectral_radius=spectral_radius)
    Win = generate_input_weights(n_reservoir, n_inputs, input_scaling=input_scaling, input_bias=input_bias)

    if training_type == 'online':
        Wout = np.zeros((n_outputs, n_reservoir + 1))
        esn = ESNOnline(leak_rate, W, Win, Wout,
                        alpha_coef=regularization_coef, input_bias=input_bias)
    elif training_type == 'online_feedback':
        Wout = np.zeros((n_outputs, n_reservoir + 1))
        Wfb = generate_input_weights(n_reservoir, n_outputs, input_bias=False)
        fbfunc = lambda x: x
        esn = ESNOnline(leak_rate, W, Win, Wout,
                        alpha_coef=regularization_coef, input_bias=input_bias, Wfb=Wfb, fbfunc=fbfunc)
    elif training_type == 'offline':
        esn = ESN(leak_rate, W, Win, 
                  input_bias=input_bias, ridge=regularization_coef)
    else:
        raise RuntimeError(f"training_type = [{training_type}] unknown")
    
    return esn
예제 #4
0
def test_fast_spectral_features(sr, proba):
    W = fast_spectral_initialization(1000, sr=sr, proba=proba, seed=1234)

    if sparse.issparse(W):
        rho = max(
            abs(
                sparse.linalg.eigs(
                    W,
                    k=1,
                    which="LM",
                    maxiter=20 * W.shape[0],
                    return_eigenvectors=False,
                )))
    else:
        rho = max(abs(linalg.eig(W)[0]))

    if proba == 0.0:
        assert_allclose(rho, 0.0)
    else:
        assert_allclose(rho, sr, rtol=1e-1)

    if 1.0 - proba < 1e-5:
        assert not sparse.issparse(W)
    if sparse.issparse(W):
        assert_allclose(np.count_nonzero(W.toarray()) / W.toarray().size,
                        proba,
                        rtol=1e-1)
    else:
        assert_allclose(np.count_nonzero(W) / W.size, proba, rtol=1e-1)
예제 #5
0
def test_fast_spectral_features(sr, proba):
    W = fast_spectral_initialization(1000, spectral_radius=sr,
                                     proba=proba, seed=1234)

    rho = max(abs(sparse.linalg.eigs(W, k=1, which='LM', return_eigenvectors=False)))
    assert_almost_equal(rho, sr, decimal=0)

    if 1. - proba < 1e-5:
        assert not sparse.issparse(W)
    if sparse.issparse(W):
        assert_almost_equal(np.sum(W.toarray() != 0.0) / W.toarray().size,
                            proba, decimal=1)
    else:
        assert_almost_equal(np.sum(W != 0.0) / W.size, proba, decimal=1)
예제 #6
0
    def objective(dataset, config, *, iss, N, sr, leak, ridge):

        # unpack train and test data, with target values.
        train_data, test_data = dataset
        x_train, y_train = train_data
        x_test, y_test = test_data

        x_train, y_train = x_train.reshape(-1, 1), y_train.reshape(-1, 1)
        x_test, y_test = x_test.reshape(-1, 1), y_test.reshape(-1, 1)

        nb_features = x_train.shape[1]

        instances = config["instances_per_trial"]

        losses = []
        rmse = []
        for n in range(instances):
            # builds an ESN given the input parameters
            W = mat_gen.fast_spectral_initialization(N=N, spectral_radius=sr)

            Win = mat_gen.generate_input_weights(nbr_neuron=N,
                                                 dim_input=nb_features,
                                                 input_bias=True,
                                                 input_scaling=iss)

            reservoir = ESN(lr=leak,
                            W=W,
                            Win=Win,
                            input_bias=True,
                            ridge=ridge)

            # train and test the model
            reservoir.train(inputs=[x_train],
                            teachers=[y_train],
                            wash_nr_time_step=20,
                            verbose=False,
                            workers=1)

            outputs, _ = reservoir.run(inputs=[x_test],
                                       verbose=False,
                                       workers=1)

            losses.append(metrics.mean_squared_error(outputs[0], y_test))
            rmse.append(
                metrics.mean_squared_error(outputs[0], y_test, squared=False))

        # returns a dictionnary of metrics. The 'loss' key is mandatory when
        # using Hyperopt.
        return {'loss': np.mean(losses), 'rmse': np.mean(rmse)}
예제 #7
0
def test_fast_spectral_features_exception(sr, proba):
    with pytest.raises(Exception):
        fast_spectral_initialization(100, sr=sr, proba=proba)
예제 #8
0
def test_fast_spectral_features_exception(sr, proba):
    with pytest.raises(Exception):
        fast_spectral_initialization(100, sr=sr, proba=proba)

    with pytest.raises(ValueError):
        fast_spectral_initialization(100, input_scaling=10.0, proba=proba)