Пример #1
0
                                   iterations=100,
                                   momentum_type='normal',
                                   plot=False,
                                   return_result=True)
mse_Xavier, Xavier = learn_network(x,
                                   y, [20], [sigmoid, linear],
                                   initialize_weights='Xavier',
                                   iterations=100,
                                   momentum_type='normal',
                                   plot=False,
                                   return_result=True)

labels = ['Zero weights', 'Uniform distribution', 'Xavier']

plot_data_1d_compare(x,
                     y, [zeros, normal, Xavier],
                     labels=["true"] + labels,
                     title="Comparison of initialization weights")
plot_measure_results_data([mse_zeros, mse_normal, mse_Xavier],
                          labels=labels,
                          title_ending=" for initialization weights",
                          from_error=5)

# batch size experiment
mse_all, res_all = learn_network(x,
                                 y, [20], [sigmoid, linear],
                                 initialize_weights='Xavier',
                                 eta=0.00001,
                                 batch_size=10000,
                                 iterations=100,
                                 momentum_type='normal',
                                 plot=False,
Пример #2
0
        x,
        y,
        neurons[:i], [tanh] * i + [linear],
        beta=0.01,
        eta=0.01,
        epochs=1,
        iterations=300,
        plot_title="Prediction with tanh activation function and " + str(i) +
        " hidden layers",
        plot=False,
        return_result=True)

    plt.subplot(420 + 2 * i - 1)
    plot_data_1d_compare(x,
                         y, [res_linear, res_ReLU, res_sigmoid, res_tanh],
                         labels=["true"] + labels,
                         title="Comparison of activation functions for " +
                         str(i) + " hidden layers networks",
                         show=False)
    # plot from 5th iteration to better see differences
    plt.subplot(420 + 2 * i)
    plot_measure_results_data([mse_linear, mse_ReLU, mse_sigmoid, mse_tanh],
                              labels=labels,
                              title_ending=" for " + str(i) +
                              " layers networks",
                              from_error=5,
                              show=False)
    if i == 3:
        plt.subplot(420 + 2 * i + 2)
        plot_measure_results_data([mse_linear, mse_sigmoid, mse_tanh],
                                  labels=labels[:1] + labels[2:4],
                                  colors=['red', 'cyan', 'yellow'],
Пример #3
0
            regularization_type=reg_type,
            plot_title="Prediction with sigmoid activation function and " +
            str(i) + " hidden layers",
            plot=False,
            return_result=True,
            x_test=x_test,
            y_test=y_test,
            use_test_and_stop_learning=True,
            no_change_epochs_to_stop=no_change_epochs_to_stop)
        mses.append(mse_reg)
        results.append(res_reg)

plt.figure(figsize=(12.8, 4.8))
plt.subplot(121)
plot_data_1d_compare(
    x,
    y,
    results,
    labels=["true"] + labels,
    title=
    "Comparison of different regularization methods on multimodal sparse dataset",
    show=False)
# plot from 5th iteration to better see differences
measures = add_the_same_value_to_the_same_length(mses)
plt.subplot(122)
plot_measure_results_data(measures,
                          labels=labels,
                          title_ending=" for multimodal sparse dataset",
                          from_error=50)
plt.show()
Пример #4
0
                             momentum_type='momentum',
                             lambda_momentum=0.9,
                             eta=0.01,
                             epochs=1,
                             iterations=100,
                             plot=False,
                             return_result=True)

# learn model with RMSProp and show model
mse_rms, rms = learn_network(x,
                             y, [20], [sigmoid, linear],
                             momentum_type='RMSProp',
                             beta=0.01,
                             eta=0.1,
                             epochs=1,
                             iterations=100,
                             plot=False,
                             return_result=True)

labels = ['No momentum', 'Momentum', 'RMSProp']

# plot data and mse
plot_data_1d_compare(x,
                     y, [base, mom, rms],
                     labels=["true"] + labels,
                     title="Comparison of momentum learning")
plot_measure_results_data([mse, mse_mom, mse_rms],
                          labels=labels,
                          title_ending=" for momentum learning",
                          from_error=5)