Esempio n. 1
0
def train():
    # treina a NN usando os dados  gerados
    nn_input, expected_output = load_data('train')
    output_len = len(expected_output[0])

    height = len(nn_input[0])
    width = len(nn_input[0][0])
    model = inverse_planning_model(width=width,
                                   height=height,
                                   output_len=output_len)

    model.compile(loss=keras.losses.categorical_crossentropy,
                  optimizer=keras.optimizers.Adam(),
                  metrics=['accuracy'])
    model.summary()

    tensorboard = TensorBoard(log_dir=os.path.join("logs",
                                                   "{}".format(time())),
                              profile_batch=0)
    model.fit(nn_input,
              expected_output,
              batch_size=batch_size,
              epochs=num_epochs,
              callbacks=[tensorboard])

    save_model_to_json(model, 'inverse_planning_model')
Esempio n. 2
0
def train(model, network_input, network_output, X_train, X_test, y_train, y_test, results_dir):
    callbacks_list = utils.model_callbacks(results_dir)
    utils.logging('Loaded model callbacks')

    utils.save_model_to_json(model, results_dir)
    utils.logging('Model saved to file: {}/{}'.format(results_dir, 'model.json'))

    history = model.fit(network_input, network_output,
                        validation_data=(X_test, y_test),
                        validation_split=0.33,
                        epochs=200,
                        batch_size=64,
                        callbacks=callbacks_list,
                        verbose=1,
                        )

    utils.generate_final_plots(history, results_dir)
Esempio n. 3
0
BATCH_SIZE = 128

train_features, train_labels = read_mnist('train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz')

train_features, validation_features, train_labels, validation_labels = \
    train_test_split(train_features, train_labels, test_size=0.2, random_state=0)

print('# of training images:', train_features.shape[0])
print('# of cross-validation images:', validation_features.shape[0])

model = make_lenet5()
model.summary()

model.compile(loss=keras.losses.categorical_crossentropy, optimizer=keras.optimizers.Adam(), metrics=['accuracy'])

X_train, y_train = train_features, to_categorical(train_labels)
X_validation, y_validation = validation_features, to_categorical(validation_labels)

train_generator = ImageDataGenerator().flow(X_train, y_train, batch_size=BATCH_SIZE)
validation_generator = ImageDataGenerator().flow(X_validation, y_validation, batch_size=BATCH_SIZE)

steps_per_epoch = X_train.shape[0] // BATCH_SIZE
validation_steps = X_validation.shape[0] // BATCH_SIZE

tensorboard = TensorBoard(log_dir=os.path.join("logs", "{}".format(time())))
model.fit(train_generator, steps_per_epoch=steps_per_epoch, epochs=EPOCHS,
          validation_data=validation_generator, validation_steps=validation_steps,
          shuffle=True, callbacks=[tensorboard])

save_model_to_json(model, 'lenet5')
Esempio n. 4
0
print("before CNN")
model = CNN_Model(num_samples, input_volume=3).get_model()
print("after CNN")

# es_callback = EarlyStopping(monitor='val_loss',patience=10,restore_best_weights=True)

history = model.fit(
    input_train,
    output_train,
    epochs=num_epochs,
    verbose=1,
    validation_data=(input_validation,
                     output_validation))  #, callbacks=[es_callback])

save_model_to_json(model, json_file_name)

# summarize history for loss
plt.rcParams.update({'font.size': 14})

plt.figure()
plt.grid()
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
# plt.title('model loss')
plt.ylabel('Loss')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='upper left')
plt.savefig('train_val.eps', format='eps')
plt.show()