Exemplo n.º 1
0
plt.show()


# In[ ]:


plt.plot(model.history.history['accuracy'])
plt.xlabel('# epochs')
plt.ylabel('accuracy')
plt.show()


# In[ ]:


model.evaluate(new_test_data,new_test_target)


# In[ ]:


# after training a neural network, we can save it to a physical file(the knowledge gained by the dataset)
# then that can we used in other applications

#saving the weights of the knowledge gained by the NN from the trained set
model.save_weights('FFNN-MNIST.h5')


# In[ ]:

Exemplo n.º 2
0
from keras.callbacks import TensorBoard

cnn_model = Sequential()

cnn_model.add(Conv2D(32, 3, 3, input_shape = (28, 28, 1), activation = 'relu'))

cnn_model.add(MaxPooling2D(pool_size = (2,2)))
cnn_model.add(Flatter())
cnn_model.add(Dense(output_dim = 32, activation = 'relu'))
cnn_model.add(Dense(output_dim = 32, activation = 'sigmoid'))
cnn_model.compile(loss ='asparse_categorical_crossentropy', optimizersadam(lr=0.001), matrics =['accuracy'])
epochs = 50
cnn_model.fix(X_train, y_train,batch_size = 512, nb_epoch = epochs, verbose = 1, validation_data = (X_validate, y_validate))

#evaluating the model
evaluation = cnn_model.evaluate(X_test, y_test)
print('Test Accuracy : {:.3f}'.format(evaluation[1]))
predicted_classes = cnn_model.predict_classes(X_test)
predicted_classes

L = 5
W = 5
fig, axes = plt.subplots(L, W, figsize = (12,12))
axes = axes.ravel()

for i in np.arange(0, L * W):
    axes[i].imshow(X_test[i].reshape(28,28))
    axes[i].set_title("Prediction classes = (:0.1f)\n True Class = {:0.1f}".format(predicted_classes[i], y_test[i]))
    axes[i].axis('off')
    
plt.subplots_adjust(wspace=0.5)
Exemplo n.º 3
0
from keras.model import Sequential
from keras.layers import Dense

model = Sequential()
model.add(Dense(100, input_dim=128, init='uniform', activation='relu'))
model.add(Dense(100, init='uniform', activation='relu'))
model.add(Dense(1, init='uniform', activation='sigmoid'))

model.compile(loss='binary_crossentropy',
              optimizer='adam',
              metrics=['accuracy'])

model.fit(X_train,
          Y_train,
          validation_data=(X_test, Y_test),
          nb_epoch=500,
          batch_size=10,
          verbose=2)

#training accuracy
scores = model.evaluate(X_train, Y_train)
print("Accuracy: %.2f%%" % (scores[1] * 100))

#testing accuracy
scores2 = model.evaluate(X_test, Y_test)
print("Accuracy: %.2f%%" % (scores2[1] * 100))
Exemplo n.º 4
0
from keras.layers import Dense, Dropout
from keras.optimizers import SGD

model = Sequential()
model.add(Dense(64, input_dim=20, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(1, activation='sigmoid'))

model.compile(loss='binary_crossentropy',
              optimizer='rmsprop',
              metrics=['accuracy'])

model.fit(x_train, y_train, epochs=20, batch_size=128)
score = model.evaluate(x_test, y_test, batch_size=128)
'''
model = Sequential()
# Dense(64) is a fully-connected layer with 64 hidden units.
# in the first layer, you must specify the expected input data shape:
# here, 20-dimensional vectors.
model.add(Dense(64, activation='relu', input_dim=20))
model.add(Dropout(0.5))
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(10, activation='softmax'))

sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy',
              optimizer=sgd,
              metrics=['accuracy'])