train_x, labels = create_noise_data()

# test dataset
X, Y = np.meshgrid(np.linspace(-3, 3, 100), np.linspace(-3, 3, 100))
test_x = np.array([X.ravel(), Y.ravel()]).reshape(2, -1).T

# constructing NN
model = Neural_net(2, [10, 10, 10], 1)
model.add(['tanh', 'swish', 'relu'])
model.set_loss('binary_crossentropy')
optimizer = Adam(lr=0.1)
score_acc = []

#---learning----
for _ in range(max_iter):
    grads = model.gradient(train_x, labels)
    optimizer.update(model.params, grads)

    score = model.accuracy(train_x, labels)
    score_acc.append(np.asarray(score))

# plot the training data
plt.scatter(train_x[labels.ravel() == 0, 0],
            train_x[labels.ravel() == 0, 1],
            marker=".",
            s=20,
            color='b')

plt.scatter(train_x[labels.ravel() == 1, 0],
            train_x[labels.ravel() == 1, 1],
            marker="x",
Esempio n. 2
0
'''#1 config for NN '''
model = Neural_net(784, [100, 100, 100, 100], 10, alpha=0.01)
model.add(['tanh', 'softsign', 'softplus', 'swish'])
model.set_loss('categorical_crossentropy')
train_loss = []
train_acc = []
'''#2 optimizer '''
optim = Adam(lr=0.01)
'''#3 learning '''
for i in range(int(max_iter)):
    batch_mask = np.random.choice(train_size, batch_size)
    x_batch = X_train[batch_mask]
    t_batch = to_categorical(train_t[batch_mask], cls_num=10)

    # update weight matrix
    grads = model.gradient(x_batch, t_batch)
    optim.update(model.params, grads)

    # get loss data
    loss = model.loss(x_batch, t_batch)
    train_loss.append(loss)

    # get accuracy data
    score = model.accuracy(x_batch, t_batch)
    train_acc.append(score)
'''#4 drawing loss & accuracy data '''
# ----- loss data -----
fig1 = plt.figure(figsize=(13, 5))  # dpi=50
ax = fig1.add_subplot(121)
x = np.arange(max_iter)
ax.plot(x, train_loss, color='blue', marker="*", markersize=7, markevery=10)
Esempio n. 3
0
fig = plt.figure(figsize=(15, 9))


for n,func in enumerate(multi_func,1):
    train_y = func(train_x)
    # plotting training data
    ax = fig.add_subplot(2,2,n)
    ax.scatter(train_x, train_y, s=12, color='blue')
    # constructing NN
    model = Neural_net(n_input=1, n_hidden=[10,10,10], n_output=1)
    model.add(['tanh', 'tanh', 'sigmoid'])
    model.set_loss('sum_squared_error')
    optimizer = Adam(lr=0.1, beta_1=0.9, beta_2=0.95)
    #-----learning------
    for _ in range(int(max_iter)):
        grads = model.gradient(train_x,train_y)
        optimizer.update(model.params, grads)
        score = model.accuracy(train_x, train_y)
        train_acc[func_name[n-1]].append(score)

    # prediction data
    y = model(x)
    ax.plot(x, y, 'r-')
    plt.xticks([-1,0,1])
    plt.yticks([0,0.5,1])
    plt.ylim([-0.1, 1.1])
    plt.subplots_adjust(wspace=0.2,hspace=0.3)
plt.show()


fig = plt.figure(figsize=(10,4))