Esempio n. 1
0
from keras.layers import Dense, Activation
from keras.models import Sequential

from ADDITIONAL.CUSTOM_KERAS import hard_lim
import numpy as np

import LABS.ZeroLab.D_DivIntoNClasses as dataset4

if __name__ == '__main__':
    train_size = 100

    (x_train, y_train), (x_test,
                         y_test) = dataset4.load_data(train_size=train_size,
                                                      show=True)

    model = Sequential()

    model.add(
        Dense(17,
              input_dim=x_train.shape[1],
              activation=Activation(hard_lim),
              name='1',
              weights=list([
                  np.array([[
                      1.0, -1.0, 0.0, 0.0, 1.0, -1.0, 0.0, 0.0, -1.0, 0.0, 1.0,
                      0.0, 1.0, -1.0, 0.0, 1.0, 0.0
                  ],
                            [
                                0.0, 0.0, 1.0, -1.0, 0.0, 0.0, 1.0, -1.0, 0.0,
                                -1.0, 1.0, 1.0, 0.0, -1.0, 1.0, 0.0, -1.0
                            ]],
Esempio n. 2
0
if __name__ == '__main__':
    np.random.seed(42)
    # 1,2 initializing
    train_size = 16000
    batch_size = 160
    epochs = 1000
    lr = 0.007
    verbose = 1
    neurons_number = [40, 35, 7]

    opt_name = "Adam"
    optimizer = Adam(lr=lr)

    goal_loss = 0.013

    (x_train, y_train), (x_test, y_test) = dataset4.load_data(train_size=train_size, show=True)

    model = Sequential()

    model.add(Dense(neurons_number[0], input_dim=2, activation='relu'))

    model.add(Dense(neurons_number[1], activation='linear'))

    model.add(Dense(neurons_number[2], activation='sigmoid'))

    # plot_model(model, to_file="C_Model.png", show_shapes=True, show_layer_names=True)

    # 3 setting stopper
    # callbacks.EarlyStopping(monitor='acc', min_delta=0, patience=5, mode='max')
    callbacks = [EarlyStoppingByLossVal(monitor='val_loss', value=goal_loss, verbose=1)]
Esempio n. 3
0
def diff_std():
    (x_train, y_train), (x_test,
                         y_test) = dataset4.load_data_neupy(train_size=12000,
                                                            show=True)

    titles = [
        "\n\nspread greater than necessary", "\n\nspread optimal",
        "\n\nspread less than necessary"
    ]
    spreads = [0.01, 0.001, 0.0001]
    for spread, title in zip(spreads, titles):
        pnn = algorithms.PNN(std=spread, verbose=True)

        pnn.train(x_train, y_train)

        y_predicted = pnn.predict(x_test)

        mae = (np.abs(y_test - y_predicted)).mean()

        plt_x_zero = np.empty(0)
        plt_y_zero = np.empty(0)

        plt_x_one = np.empty(0)
        plt_y_one = np.empty(0)

        plt_x_two = np.empty(0)
        plt_y_two = np.empty(0)

        plt_x_three = np.empty(0)
        plt_y_three = np.empty(0)

        plt_x_four = np.empty(0)
        plt_y_four = np.empty(0)

        plt_x_five = np.empty(0)
        plt_y_five = np.empty(0)

        plt_x_six = np.empty(0)
        plt_y_six = np.empty(0)

        i = 0
        for predict in y_predicted:

            if predict == 0.0:
                plt_x_zero = np.append(plt_x_zero, x_test[i][0])
                plt_y_zero = np.append(plt_y_zero, x_test[i][1])
            elif predict == 0.1:
                plt_x_one = np.append(plt_x_one, x_test[i][0])
                plt_y_one = np.append(plt_y_one, x_test[i][1])
            elif predict == 0.2:
                plt_x_two = np.append(plt_x_two, x_test[i][0])
                plt_y_two = np.append(plt_y_two, x_test[i][1])
            elif predict == 0.3:
                plt_x_three = np.append(plt_x_three, x_test[i][0])
                plt_y_three = np.append(plt_y_three, x_test[i][1])
            elif predict == 0.4:
                plt_x_four = np.append(plt_x_four, x_test[i][0])
                plt_y_four = np.append(plt_y_four, x_test[i][1])
            elif predict == 0.5:
                plt_x_five = np.append(plt_x_five, x_test[i][0])
                plt_y_five = np.append(plt_y_five, x_test[i][1])
            elif predict == 0.6:
                plt_x_six = np.append(plt_x_six, x_test[i][0])
                plt_y_six = np.append(plt_y_six, x_test[i][1])
            i += 1

        plt.plot(plt_x_zero, plt_y_zero, '.')
        plt.plot(plt_x_one, plt_y_one, '.')
        plt.plot(plt_x_two, plt_y_two, '.')
        plt.plot(plt_x_three, plt_y_three, '.')
        plt.plot(plt_x_four, plt_y_four, '.')
        plt.plot(plt_x_five, plt_y_five, '.')
        plt.plot(plt_x_six, plt_y_six, '.')

        plt.xlim(0, 1.5)
        plt.ylim(0, 1)

        plt.legend(('0.0 class', '0.1 class', '0.2 class', '0.3 class',
                    '0.4 class', '0.5 class', '0.6 class'),
                   loc='upper right',
                   shadow=True)

        plt.title(title + '\n7 classification\nspread =%.4f\nmae = %.4f' %
                  (spread, mae))

        plt.show()
        plt.close()
Esempio n. 4
0
def diff_train():
    maes = [0, 0, 0, 0]
    train_size = [24000, 12000, 5000, 2000]
    std = [0.0004, 0.001, 0.0035, 0.01]

    for j in range(0, 4):
        (x_train, y_train), (x_test, y_test) = dataset4.load_data_neupy(
            train_size=train_size[j], show=False)

        pnn = algorithms.PNN(std=std[j], verbose=True)

        pnn.train(x_train, y_train)

        y_predicted = pnn.predict(x_test)

        mae = (np.abs(y_test - y_predicted)).mean()

        plt_x_zero = np.empty(0)
        plt_y_zero = np.empty(0)

        plt_x_one = np.empty(0)
        plt_y_one = np.empty(0)

        plt_x_two = np.empty(0)
        plt_y_two = np.empty(0)

        plt_x_three = np.empty(0)
        plt_y_three = np.empty(0)

        plt_x_four = np.empty(0)
        plt_y_four = np.empty(0)

        plt_x_five = np.empty(0)
        plt_y_five = np.empty(0)

        plt_x_six = np.empty(0)
        plt_y_six = np.empty(0)

        acc = 0.0
        i = 0
        for predict in y_predicted:

            if predict == 0.0:
                plt_x_zero = np.append(plt_x_zero, x_test[i][0])
                plt_y_zero = np.append(plt_y_zero, x_test[i][1])
            elif predict == 0.1:
                plt_x_one = np.append(plt_x_one, x_test[i][0])
                plt_y_one = np.append(plt_y_one, x_test[i][1])
            elif predict == 0.2:
                plt_x_two = np.append(plt_x_two, x_test[i][0])
                plt_y_two = np.append(plt_y_two, x_test[i][1])
            elif predict == 0.3:
                plt_x_three = np.append(plt_x_three, x_test[i][0])
                plt_y_three = np.append(plt_y_three, x_test[i][1])
            elif predict == 0.4:
                plt_x_four = np.append(plt_x_four, x_test[i][0])
                plt_y_four = np.append(plt_y_four, x_test[i][1])
            elif predict == 0.5:
                plt_x_five = np.append(plt_x_five, x_test[i][0])
                plt_y_five = np.append(plt_y_five, x_test[i][1])
            elif predict == 0.6:
                plt_x_six = np.append(plt_x_six, x_test[i][0])
                plt_y_six = np.append(plt_y_six, x_test[i][1])
            i += 1

        plt.plot(plt_x_zero, plt_y_zero, '.')
        plt.plot(plt_x_one, plt_y_one, '.')
        plt.plot(plt_x_two, plt_y_two, '.')
        plt.plot(plt_x_three, plt_y_three, '.')
        plt.plot(plt_x_four, plt_y_four, '.')
        plt.plot(plt_x_five, plt_y_five, '.')
        plt.plot(plt_x_six, plt_y_six, '.')

        plt.xlim(0, 1.5)
        plt.ylim(0, 1)

        plt.legend(('0.0 class', '0.1 class', '0.2 class', '0.3 class',
                    '0.4 class', '0.5 class', '0.6 class'),
                   loc='upper right',
                   shadow=True)

        plt.title('7 classification\ntrain_size = %d\nstd =%.4f\nmae = %.4f' %
                  (train_size[j], std[j], mae))

        maes[j] = mae

        plt.show()
        plt.close()
    return train_size, std, maes
Esempio n. 5
0
def load_data(train_size=2000, show=False):
    test_size = int(train_size * 0.2)

    x_train = np.empty(0)
    y_train = np.empty(0)

    x_test = np.empty(0)
    y_test = np.empty(0)

    x_train_for_plt = np.empty(0)
    x_train_missed_for_plt = np.empty(0)

    x_test_for_plt = np.empty(0)
    x_test_missed_for_plt = np.empty(0)

    for i in range(train_size + test_size):

        x = np.random.random()
        y = np.random.random()

        if i < train_size:
            x_train = np.append(x_train, (x, y))
        else:
            x_test = np.append(x_test, (x, y))

        if dataset4.isRect(x, y, xMin=0.2, xMax=0.6, yMin=0.1, yMax=0.5) or \
                dataset4.isTriangle(x, y, x1=0.5, x2=0.9, x3=0.9, y1=0.9, y2=0.5, y3=0.9):
            if i < train_size:
                x_train_for_plt = np.append(x_train_for_plt, (x, y))
                y_train = np.append(y_train, 1)
            else:
                x_test_for_plt = np.append(x_test_for_plt, (x, y))
                y_test = np.append(y_test, 1)
        else:
            if i < train_size:
                x_train_missed_for_plt = np.append(x_train_missed_for_plt,
                                                   (x, y))
                y_train = np.append(y_train, 0)
            else:
                x_test_missed_for_plt = np.append(x_test_missed_for_plt,
                                                  (x, y))
                y_test = np.append(y_test, 0)

    # Reshaping
    x_train.shape = (train_size, 2)
    x_test.shape = (test_size, 2)

    x_train_for_plt.shape = (int(x_train_for_plt.size / 2), 2)
    x_train_missed_for_plt.shape = (int(x_train_missed_for_plt.size / 2), 2)

    x_test_for_plt.shape = (int(x_test_for_plt.size / 2), 2)
    x_test_missed_for_plt.shape = (int(x_test_missed_for_plt.size / 2), 2)

    # Plotting train
    plt.xlim(0, 1.3)
    plt.ylim(0, 1)
    plt.title("train data")
    plt.plot(x_train_for_plt.transpose()[0],
             x_train_for_plt.transpose()[1], '.')
    plt.plot(x_train_missed_for_plt.transpose()[0],
             x_train_missed_for_plt.transpose()[1], '.')

    plt.legend(('0 class', '1 class'), loc='upper right', shadow=True)

    if show:
        plt.show()

    plt.close()

    # Plotting test
    plt.xlim(0, 1.3)
    plt.ylim(0, 1)
    plt.title("test data")
    plt.plot(x_test_for_plt.transpose()[0], x_test_for_plt.transpose()[1], '.')
    plt.plot(x_test_missed_for_plt.transpose()[0],
             x_test_missed_for_plt.transpose()[1], '.')

    plt.legend(('0 class', '1 class'), loc='upper right', shadow=True)

    if show:
        plt.show()
    plt.close()

    return (x_train, y_train), (x_test, y_test)