Exemplo n.º 1
0
idx = np.random.randint(mnist.num_train, size=4)
print('\nFour examples of training images:')
img = mnist.x_train[idx][:, 0, :, :]

# plt.figure(1, figsize=(18, 18))
# plt.subplot(1, 4, 1)
# plt.imshow(img[0])
# plt.subplot(1, 4, 2)
# plt.imshow(img[1])
# plt.subplot(1, 4, 3)
# plt.imshow(img[2])
# plt.subplot(1, 4, 4)
# plt.imshow(img[3])

model = MNISTNet()
loss = SoftmaxCrossEntropy(num_class=10)


# define your learning rate sheduler
def func(lr, iteration):
    if iteration % 1000 == 0:
        return lr * 0.5
    else:
        return lr


adam = Adam(lr=0.01, decay=0, sheduler_func=func)
l2 = L2(w=0.001)  # L2 regularization with lambda=0.001
model.compile(optimizer=adam, loss=loss, regularization=l2)
train_results, val_results, test_results = model.train(mnist,
                                                       train_batch=30,
Exemplo n.º 2
0
import numpy as np
from loss import SoftmaxCrossEntropy
from utils.tools import rel_error
import keras
from keras import layers
from keras import models 
from keras import optimizers
from keras import backend as K
import warnings

warnings.filterwarnings('ignore')
batch = 10
num_class = 10
inputs = np.random.uniform(size=(batch, num_class))
targets = np.random.randint(num_class, size=batch)
loss = SoftmaxCrossEntropy(num_class)
out, _ = loss.forward(inputs, targets)


keras_inputs = K.softmax(inputs)
keras_targets = np.zeros(inputs.shape, dtype='int')

for i in range(batch):
	keras_targets[i, targets[i]] = 1

keras_out = K.mean(K.categorical_crossentropy(keras_targets, keras_inputs, from_logits=False))
print('Relative error (<1e-6 will be fine): ', rel_error(out, K.eval(keras_out)))