Exemple #1
0
def ex_1_1_a(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 a)
    Remember to set alpha to 0 when initializing the model
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    params_n_h = [2, 8, 40, 100]
    for n_h in params_n_h:
        nn = MLPRegressor(solver='lbfgs',
                          max_iter=200,
                          activation='logistic',
                          hidden_layer_sizes=(n_h, ),
                          alpha=0,
                          verbose=False,
                          random_state=0)
        # zero randomness
        # verbose=True
        nn.fit(x_train, y_train)
        y_pred_train = nn.predict(x_train)
        y_pred_test = nn.predict(x_test)
        plot_learned_function(n_h, x_train, y_train, y_pred_train, x_test,
                              y_test, y_pred_test)

    pass
Exemple #2
0
def ex_1_1_a(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 a)
    Remember to set alpha to 0 when initializing the model.
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    ## TODO

    nh = 50
    nn = MLPRegressor(activation='logistic',
                      solver='lbfgs',
                      max_iter=5000,
                      alpha=0,
                      hidden_layer_sizes=(nh, ))
    nn.fit(x_train, y_train)
    y_pred_train = nn.predict(x_train)

    y_pred_test = nn.predict(x_test)
    plot_learned_function(nh, x_train, y_train, y_pred_train, x_test, y_test,
                          y_pred_test)
    pass
def ex_1_1_a(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 a)
    Remember to set alpha to 0 when initializing the model
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    ## TODO - done
    # using 8 hidden neurons
    n_h = [2, 8, 40]

    for i in n_h:
        # use random state for a fixed split - guarantees always same output (200 seems beautiful)
        nn = MLPRegressor(activation='logistic', solver='lbfgs', alpha=0.0, hidden_layer_sizes=(i,), max_iter=200,
                          random_state=200)
        nn.fit(x_train, y_train)

        pred_train_y = nn.predict(x_train)
        pred_test_y = nn.predict(x_test)

        plot_learned_function(n_h, x_train, y_train, pred_train_y, x_test, y_test, pred_test_y)
def ex_1_1_a(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 a)
    Remember to set alpha to 0 when initializing the model
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    ## TODO
    regressor = MLPRegressor(
        hidden_layer_sizes=(40, ),  #8,40
        solver="lbfgs",
        activation="logistic",
        alpha=0.0,
        max_iter=200,
    )
    regressor.fit(x_train, y_train)
    #plot_learned_function(2,x_train, y_train,regressor.predict(x_train), x_test, y_test, regressor.predict(x_test))
    #plot_learned_function(8, x_train, y_train, regressor.predict(x_train), x_test, y_test, regressor.predict(x_test))
    plot_learned_function(40, x_train, y_train, regressor.predict(x_train),
                          x_test, y_test, regressor.predict(x_test))
    pass
Exemple #5
0
def ex_1_1_a(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 a)
    Remember to set alpha to 0 when initializing the model
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    ## TODO
    #pass
    n_hidden = 5  # 2, 5, 50
    reg = MLPRegressor(hidden_layer_sizes=(n_hidden, 8),
                       activation='logistic',
                       solver='lbfgs',
                       alpha=0)

    reg.fit(x_train, y_train)
    y_pred_test = reg.predict(x_test)
    y_pred_train = reg.predict(x_train)

    plot_learned_function(n_hidden, x_train, y_train, y_pred_train, x_test,
                          y_test, y_pred_test)
def ex_1_1_c(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 c)
    Remember to set alpha to 0 when initializing the model
    Use max_iter = 10000 and tol=1e-8
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    ## TODO
    n_h = [1, 2, 3, 4, 6, 8, 12, 20, 40]
    train_array = np.zeros((9, 10))
    test_array = np.zeros((9, 10))

    for n in n_h:
        for i in range(0, 10):
            index = n_h.index(n)
            nn = MLPRegressor(tol=1e-8, activation='logistic', solver='lbfgs', alpha=0.0,
                              hidden_layer_sizes=(n_h[index],),
                              max_iter=10000, random_state=i)

            nn.fit(x_train, y_train)
            train_array[index][i] = calculate_mse(nn, x_train, y_train)
            test_array[index][i] = calculate_mse(nn, x_test, y_test)

            y_pred_train = nn.predict(x_train)
            y_pred_test = nn.predict(x_test)

            if n == 1:
                plot_learned_function(n, x_train, y_train, y_pred_train, x_test, y_test, y_pred_test)

    plot_mse_vs_neurons(np.array(train_array), np.array(test_array), n_h)
Exemple #7
0
def ex_1_1_a(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 a)
    Remember to set alpha to 0 when initializing the model
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    n_hidden = 40
    regressor = MLPRegressor(hidden_layer_sizes=(n_hidden, ),
                             activation='logistic',
                             solver='lbfgs',
                             alpha=0,
                             max_iter=200)
    regressor.fit(x_train, y_train)
    y_pred_train = regressor.predict(x_train)
    y_pred_test = regressor.predict(x_test)

    plot_learned_function(n_hidden, x_train, y_train, y_pred_train, x_test,
                          y_test, y_pred_test)

    #    [train_mses, test_mses] = calculate_mse(regressor, [x_train, x_test], [y_train, y_test])
    #    plot_mse_vs_neurons(train_mses, test_mses, n_hidden_neurons_list)

    pass
def ex_1_1_c(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 c)
    Remember to set alpha to 0 when initializing the model
    Use max_iter = 10000 and tol=1e-8
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    ## TODO
    hidden_neurons_totest = np.array([1, 2, 3, 4, 6, 8, 12, 20, 40])
    # hidden_neurons_totest = np.array([20])
    dim1 = hidden_neurons_totest.shape[0]
    mse_test_matrix = np.zeros((dim1, 10))
    mse_train_matrix = np.zeros((dim1, 10))
    k = 0
    for i in hidden_neurons_totest:
        n_hidden_neurons = i
        for j in range(10):
            nn = MLPRegressor(activation='logistic', solver='lbfgs', max_iter=10000, tol=1e-8,
                              hidden_layer_sizes=(n_hidden_neurons,), alpha=0, random_state=j)
            nn.fit(x_train, y_train)
            predictions_test = nn.predict(x_test)
            mse_test_matrix[k, j] = calculate_mse(nn, x_test, y_test)
            mse_train_matrix[k, j] = calculate_mse(nn, x_train, y_train)
        k += 1
    plot_mse_vs_neurons(mse_train_matrix, mse_test_matrix, hidden_neurons_totest)
    plt.show()
    plot_learned_function(40, x_train, y_train, 0, x_test, y_test, predictions_test)
    plt.show()
Exemple #9
0
def ex_1_1_d(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 b)
    Remember to set alpha to 0 when initializing the model
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    ## TODO
    #pass
    N = 500
    n_hidden = [2, 5, 50]
    mse_train = np.zeros([np.size(n_hidden), N])
    mse_test = np.zeros([np.size(n_hidden), N])

    for j in range(np.size(n_hidden)):
        reg = MLPRegressor(hidden_layer_sizes=(n_hidden[j], ),
                           activation='logistic',
                           solver='lbfgs',
                           alpha=0,
                           random_state=0,
                           warm_start=True,
                           max_iter=1)
        for r in range(N):
            reg.fit(x_train, y_train)

            mse_train[j, r] = calculate_mse(reg, x_train, y_train)
            mse_test[j, r] = calculate_mse(reg, x_test, y_test)

    # PLOT
    plot_mse_vs_neurons(mse_train, mse_test, n_hidden)
    #mse_test_mean = np.mean(mse_test, axis=1)
    #ind = np.argmin(mse_test_mean)

    ind = np.unravel_index(np.argmin(mse_test), mse_test.shape)
    # geht es auch ohne den MLPRegressor nochmal zu initialisieren? Keine Ahnung obs anders besser geht
    reg = MLPRegressor(hidden_layer_sizes=(n_hidden[j], ),
                       activation='logistic',
                       solver='lbfgs',
                       alpha=0,
                       random_state=random.randint(0, 1000),
                       max_iter=500)
    reg.fit(x_train, y_train)

    y_pred_test = reg.predict(x_test)
    y_pred_train = reg.predict(x_train)

    plot_learned_function(n_hidden[ind[0]], x_train, y_train, y_pred_train,
                          x_test, y_test, y_pred_test)
Exemple #10
0
def ex_1_1_c(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 c)
    Remember to set alpha to 0 when initializing the model
    Use max_iter = 10000 and tol=1e-8
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    m = 0
    n = 0

    #declaring variables used in MLP-Regressor
    hidden_layers = np.array([1, 2, 3, 4, 6, 8, 12, 20, 40])
    random_state = 10
    activation_mode = 'logistic'
    solver_mode = 'lbfgs'
    alpha = 0
    max_iter = 10000
    tol = 1e-8

    train_mse = np.zeros((hidden_layers.size, random_state))
    test_mse = np.zeros((hidden_layers.size, random_state))

    for m in range(random_state):
        for n in range(hidden_layers.size):
            nn = MLPRegressor(hidden_layer_sizes=(hidden_layers[n], ),
                              activation=activation_mode,
                              solver=solver_mode,
                              alpha=alpha,
                              max_iter=max_iter,
                              random_state=m,
                              tol=tol)
            nn.fit(x_train, y_train)
            #calculate for every random seed train_mse and test_mse
            train_mse[n][m] = calculate_mse(nn, x_train, y_train)
            test_mse[n][m] = calculate_mse(nn, x_test, y_test)

    plot_mse_vs_neurons(train_mse, test_mse, hidden_layers)

    y_test_pred = nn.predict(x_test)
    y_train_pred = nn.predict(x_train)

    plot_learned_function(40, x_train, y_train, y_train_pred, x_test, y_test,
                          y_test_pred)

    pass
Exemple #11
0
def ex_1_1_c(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 c)
    Remember to set alpha to 0 when initializing the model
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    ## TODO
    #pass
    N = 10
    n_hidden = [1, 2, 3, 4, 6, 8, 12, 20, 40]
    mse_train = np.zeros([np.size(n_hidden), N])
    mse_test = np.zeros([np.size(n_hidden), N])

    for j in range(np.size(n_hidden)):
        reg = MLPRegressor(hidden_layer_sizes=(1, n_hidden[j]),
                           activation='logistic',
                           solver='lbfgs',
                           alpha=0,
                           random_state=random.randint(0, 1000))
        for r in range(N):

            reg.fit(x_train, y_train)

            mse_train[j, r] = calculate_mse(reg, x_train, y_train)
            mse_test[j, r] = calculate_mse(reg, x_test, y_test)

    # PLOT
    plot_mse_vs_neurons(mse_train, mse_test, n_hidden)
    """
    mse_test_mean = np.mean(mse_test, axis=1) 
    ind = np.argmin(mse_test_mean)
    """
    ind = np.unravel_index(np.argmin(mse_test), mse_test.shape)

    reg = MLPRegressor(hidden_layer_sizes=(n_hidden[ind[0]], ),
                       activation='logistic',
                       solver='lbfgs',
                       alpha=0,
                       random_state=random.randint(0, 1000))

    reg.fit(x_train, y_train)
    y_pred_test = reg.predict(x_test)
    y_pred_train = reg.predict(x_train)
    plot_learned_function(n_hidden[ind[0]], x_train, y_train, y_pred_train,
                          x_test, y_test, y_pred_test)
Exemple #12
0
def ex_1_1_c(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 c)
    Remember to set alpha to 0 when initializing the model
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    ## TODO
    n_seeds = 10
    n_neur = [1, 2, 3, 4, 6, 8, 12, 20, 40]
    mse_train = np.zeros([np.size(n_neur), n_seeds])
    mse_test = np.zeros([np.size(n_neur), n_seeds])

    for h in range(np.size(n_neur)):
        for s in range(n_seeds):
            seed = np.random.randint(100)
            reg = MLPRegressor(hidden_layer_sizes=(n_neur[h], ),
                               max_iter=5000,
                               activation='logistic',
                               solver='lbfgs',
                               alpha=0,
                               random_state=seed)

            reg.fit(x_train, y_train)
            mse_train[h, s] = calculate_mse(reg, x_train, y_train)
            mse_test[h, s] = calculate_mse(reg, x_test, y_test)

    plot_mse_vs_neurons(mse_train, mse_test, n_neur)
    sum_mse = mse_test.sum(axis=1)
    ind_min = sum_mse.argmin()

    reg = MLPRegressor(hidden_layer_sizes=(n_neur[ind_min], ),
                       max_iter=5000,
                       activation='logistic',
                       solver='lbfgs',
                       alpha=0,
                       random_state=np.random.randint(100))

    reg.fit(x_train, y_train)
    y_pred_test = reg.predict(x_test)
    y_pred_train = reg.predict(x_train)
    plot_learned_function(n_neur[ind_min], x_train, y_train, y_pred_train,
                          x_test, y_test, y_pred_test)
Exemple #13
0
def ex_1_1_c(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 c)
    Remember to set alpha to 0 when initializing the model.
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    ## TODO
    nh = [1, 2, 4, 6, 8, 12, 20, 40]
    mse_all_train = np.zeros(shape=(8, 10))
    mse_all_test = np.zeros(shape=(8, 10))

    for i in range(0, 10):
        for j in range(0, 8):
            seed = np.random.randint(1, 100)
            nn = MLPRegressor(activation='logistic',
                              solver='lbfgs',
                              max_iter=5000,
                              alpha=0,
                              hidden_layer_sizes=(nh[j], ),
                              random_state=seed)
            nn.fit(x_train, y_train)
            mse_train = calculate_mse(nn, x_train, y_train)
            mse_test = calculate_mse(nn, x_test, y_test)
            mse_all_train[j][i] = mse_train
            mse_all_test[j][i] = mse_test
    plot_mse_vs_neurons(mse_all_train, mse_all_test, nh)

    nn = MLPRegressor(activation='logistic',
                      solver='lbfgs',
                      max_iter=5000,
                      alpha=0,
                      hidden_layer_sizes=(nh[2], ))
    nn.fit(x_train, y_train)
    y_pred_train = nn.predict(x_train)

    y_pred_test = nn.predict(x_test)

    plot_learned_function(nh[2], x_train, y_train, y_pred_train, x_test,
                          y_test, y_pred_test)

    pass
Exemple #14
0
def ex_1_1_c(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 c)
    Remember to set alpha to 0 when initializing the model
    Use max_iter = 10000 and tol=1e-8
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """
    n_hidden_neurons_list = [1, 2, 3, 4, 6, 8, 12, 20, 40]
    seeds = 10
    mse = np.zeros((len(n_hidden_neurons_list), seeds, 2))

    for i in range(len(n_hidden_neurons_list)):
        for j in range(seeds):
            regressor = MLPRegressor(
                hidden_layer_sizes=(n_hidden_neurons_list[i], ),
                activation='logistic',
                solver='lbfgs',
                alpha=0,
                max_iter=10000,
                random_state=j,
                tol=1e-8)
            regressor.fit(x_train, y_train)
            # mse shape: [train_mses, test_mses]
            mse[i][j] = calculate_mse(regressor, [x_train, x_test],
                                      [y_train, y_test])
    plot_mse_vs_neurons(mse[:, :, 0], mse[:, :, 1], n_hidden_neurons_list)

    n_hidden = 40
    regressor = MLPRegressor(hidden_layer_sizes=(n_hidden, ),
                             activation='logistic',
                             solver='lbfgs',
                             alpha=0,
                             max_iter=10000,
                             tol=1e-8)
    regressor.fit(x_train, y_train)
    y_pred_train = regressor.predict(x_train)
    y_pred_test = regressor.predict(x_test)

    plot_learned_function(n_hidden, x_train, y_train, y_pred_train, x_test,
                          y_test, y_pred_test)
    ## TODO
    pass
def ex_1_1_a(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 a)
    Remember to set alpha to 0 when initializing the model
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    ## TODO
    for i in [2, 8, 40]:
        n_hidden_neurons = i
        nn = MLPRegressor(activation='logistic', solver='lbfgs', max_iter=200, hidden_layer_sizes=(n_hidden_neurons,), alpha=0)
        nn.fit(x_train, y_train)
        predictions_test = nn.predict(x_test)
        plot_learned_function(n_hidden_neurons, x_train, y_train, 0, x_test, y_test, predictions_test)
        plt.show()
def ex_1_1_a(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 a)
    Remember to set alpha to 0 when initializing the model
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """
    n_hidden = 40
    trained_regressor = MLPRegressor(hidden_layer_sizes=(n_hidden, ),
                                     activation='logistic',
                                     solver='lbfgs',
                                     alpha=0,
                                     max_iter=200,
                                     random_state=1000)
    trained_regressor = trained_regressor.fit(x_train, y_train)
    y_pred_train = trained_regressor.predict(x_train)
    y_pred_test = trained_regressor.predict(x_test)
    plot_learned_function(n_hidden, x_train, y_train, y_pred_train, x_test,
                          y_test, y_pred_test)
    pass
Exemple #17
0
def ex_1_1_a(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.1 a)
    Remember to set alpha to 0 when initializing the model
    :param x_train: The training dataset
    :param x_test: The testing dataset
    :param y_train: The training targets
    :param y_test: The testing targets
    :return:
    """

    # declaring hidden layer neurons 2, 8, 40
    hidden_layer_2 = 2
    hidden_layer_8 = 8
    hidden_layer_40 = 40

    # declaring variables used in MLP-Regressor
    activation_mode = 'logistic'
    solver_mode = 'lbfgs'
    alpha = 0
    max_iter = 200

    # declaring MLP-Regressor:
    nn_2 = MLPRegressor(hidden_layer_sizes=(hidden_layer_2, ),
                        activation=activation_mode,
                        solver=solver_mode,
                        alpha=alpha,
                        max_iter=max_iter)
    nn_8 = MLPRegressor(hidden_layer_sizes=(hidden_layer_8, ),
                        activation=activation_mode,
                        solver=solver_mode,
                        alpha=alpha,
                        max_iter=max_iter)
    nn_40 = MLPRegressor(hidden_layer_sizes=(hidden_layer_40, ),
                         activation=activation_mode,
                         solver=solver_mode,
                         alpha=alpha,
                         max_iter=max_iter)

    # train neural network using the regressor method fit
    nn_2.fit(x_train, y_train)
    nn_8.fit(x_train, y_train)
    nn_40.fit(x_train, y_train)

    # compute the output using the method predict
    y_test_pred_2 = nn_2.predict(x_test)
    y_train_pred_2 = nn_2.predict(x_train)
    y_test_pred_8 = nn_8.predict(x_test)
    y_train_pred_8 = nn_8.predict(x_train)
    y_test_pred_40 = nn_40.predict(x_test)
    y_train_pred_40 = nn_40.predict(x_train)

    # plotting learned function
    #def plot_learned_function(n_hidden, x_train, y_train, y_pred_train, x_test, y_test, y_pred_test):
    plot_learned_function(hidden_layer_2, x_train, y_train, y_train_pred_2,
                          x_test, y_test, y_test_pred_2)
    plot_learned_function(hidden_layer_8, x_train, y_train, y_train_pred_8,
                          x_test, y_test, y_test_pred_8)
    plot_learned_function(hidden_layer_40, x_train, y_train, y_train_pred_40,
                          x_test, y_test, y_test_pred_40)

    pass
Exemple #18
0
def ex_1_2_c(x_train, x_test, y_train, y_test):
    """
    Solution for exercise 1.2 c)
    :param x_train:
    :param x_test:
    :param y_train:
    :param y_test:
    :return:
    """
    ## TODO
    #pass

    total_iter = 10000
    epoch_iter = 20
    epochs = total_iter // epoch_iter
    n_neuro = 6
    n_seeds = 10
    """
    sequence = np.random.permutation(np.arange(0, np.size(y_train), 1))

    x_train = x_train[sequence]
    y_train = y_train[sequence]
    SIZE = int(np.ceil(np.size(y_train) / 3))

    x_val = x_train[:SIZE]
    y_val = y_train[:SIZE]

    x_train = x_train[SIZE:]
    y_train = y_train[SIZE:]
    """
    x_train, x_val, y_train, y_val = train_test_split(x_train,
                                                      y_train,
                                                      test_size=0.33)

    mse_train = np.zeros([n_seeds, epochs])
    mse_val = np.zeros([n_seeds, epochs])
    mse_test = np.zeros([n_seeds, epochs])

    #seeds = random.sample(range(1, 100), n_seeds)
    seeds = np.zeros(n_seeds)

    for s in range(n_seeds):
        seed = s  #np.random.randint(100)
        seeds[s] = seed

        reg = MLPRegressor(hidden_layer_sizes=(n_neuro, ),
                           activation='logistic',
                           solver='lbfgs',
                           alpha=1e-3,
                           random_state=seed,
                           warm_start=True,
                           max_iter=epoch_iter)
        for ep in range(epochs):
            reg.fit(x_train, y_train)
            mse_train[s, ep] = calculate_mse(reg, x_train, y_train)
            mse_val[s, ep] = calculate_mse(reg, x_val, y_val)
            mse_test[s, ep] = calculate_mse(reg, x_test, y_test)

        y_pred_test = reg.predict(x_test)
        y_pred_train = reg.predict(x_train)
        plot_learned_function(n_neuro, x_train, y_train, y_pred_train, x_test,
                              y_test, y_pred_test)

    #min_val_index = np.argmin(mse_val, axis=1)
    import pdb
    pdb.set_trace()
    min_val_index = np.unravel_index(mse_val.argmin(), mse_val.shape)

    error_min_seed = seeds[min_val_index[0]]
    error_min

    #print(seeds)
    #print(min_val_index)
    print('Seed: ', error_min_seed)