Пример #1
0
    np.random.seed(18)
    # 1,2 initializing
    train_size = 20000
    batch_size = 100
    epochs = 300
    lr = 0.001
    verbose = 1
    neurons = [15, 5]

    opt_name = "Adam"
    optimizer = Adam(lr=lr)

    goal_loss = 0.0001

    (x_train, y_train), (x_test,
                         y_test) = dataset3.load_data(train_size=train_size,
                                                      show=True)

    model = Sequential()

    model.add(
        Dense(neurons[0],
              input_dim=2,
              kernel_initializer='he_uniform',
              bias_initializer='he_uniform',
              activation='relu'))
    model.add(
        Dense(neurons[1],
              kernel_initializer='he_uniform',
              bias_initializer='he_uniform',
              activation='linear'))
Пример #2
0
from mpl_toolkits.mplot3d import Axes3D

import lab0.src.dataset3 as dataset3

print(
    "\n\nПостроим поверхность ошибки в плоскости двух параметров: ширина РБФ-функции spread и"
)
print("объем обучающей выборки.\n")

maes = [0, 0, 0, 0]
train_size = [24000, 12000, 5000, 2000]
std = [0.00035, 0.001, 0.0035, 0.01]

for j in range(0, 4):
    (x_train, y_train), (x_test,
                         y_test) = dataset3.load_data(train_size=train_size[j],
                                                      show=False)

    pnn = algorithms.PNN(std=std[j], verbose=False)

    pnn.train(x_train, y_train)

    y_predicted = pnn.predict(x_test)

    mae = (np.abs(y_test - y_predicted)).mean()

    plt_x_zero = np.empty(0)
    plt_y_zero = np.empty(0)

    plt_x_one = np.empty(0)
    plt_y_one = np.empty(0)
Пример #3
0
# Task 2, part 1
import matplotlib.pyplot as plt
import numpy as np
from neupy import algorithms

import lab0.src.dataset3 as dataset3

print(
    "\n\nСравним и визуализируем случаи, когда spread больше, меньше и равен оптимальному значению для"
)
print("случая деления плоскости 2 класса\nВсе графики подписаны\n")

(x_train, y_train), (x_test, y_test) = dataset3.load_data(train_size=12000,
                                                          show=False)
titles = [
    "\n\nspread greater than necessary", "\n\nspread optimal",
    "\n\nspread less than necessary"
]
spreads = [0.1, 0.001, 0.0001]

for spread, title in zip(spreads, titles):
    pnn = algorithms.PNN(std=spread, verbose=False)

    pnn.train(x_train, y_train)

    y_predicted = pnn.predict(x_test)

    mae = (np.abs(y_test - y_predicted)).mean()

    plt_x_zero = np.empty(0)
    plt_y_zero = np.empty(0)
Пример #4
0
import numpy as np
from keras.layers import Dense
from keras.models import Sequential
import sys

sys.path.append("../..")
from lib.custom import hard_lim
from lab0.src import dataset3

train_size = 4000
# load data from dataset1
(x_train, y_train), (x_test, y_test) = dataset3.load_data(train_size=train_size, show=True)

# creating model with weights initialization
model = Sequential()

model.add(Dense(12, input_dim=x_train.shape[1], activation=hard_lim, name='First',
                weights=list([np.array([[0.0, 0.0, 1.0, 1.0, 1.0, -1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0],
                                        [1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0]], dtype=float),
                              np.array([-0.6, -0.2, -0.6, -0.2, -1.2, -0.4, -0.45, -0.35, -0.55, -0.45, -0.35, -0.25],
                                       dtype=float)])))

model.add(Dense(2, activation=hard_lim, name='Second',
                weights=list([np.array([[-1.0, 1.0],  # y1
                                        [1.0, 0.0],  # y2
                                        [-1.0, 0.0],  # y3
                                        [1.0, 0.0],  # y4
                                        [0.0, -1.0],  # y5
                                        [0.0, -1.0],  # y6
                                        [1.0, 0.0],  # y7
                                        [-1.0, 0.0],  # y8