# opt = keras.optimizers.Adadelta(lr=1.0, rho=0.95, epsilon=None, decay=0.0) opt = keras.optimizers.Adamax(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.0) model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy']) log_path = '/tmp/tflearn_logs/NASNetMobile_LCZ42_Adadelta' callback = TensorBoard(log_path) callback.set_model(model) model.fit( x_train, y_train, batch_size=1024, # 128,1024 epochs=10, shuffle="batch", validation_data=(x_test, y_test)) modelpath = 'NASNetMobile_Adadelta_epochs_10.h5' model.save(modelpath) print('Saved trained model at %s ' % modelpath) # Score trained model. scores = model.evaluate(x_test, y_test, verbose=1) print('Test loss:', scores[0]) print('Test accuracy:', scores[1])
# This callback will log model stats to Tensorboard. tb_callback = TensorBoard() # This callback will checkpoint the best model at every epoch. mc_callback = ModelCheckpoint(filepath='current_best.hdf5', verbose=1, save_best_only=True) # This is the train DataSequence. train_sequence = DataSequence(train_pd, "./images", batch_size=batch_size) train_steps = len(train_pd) // batch_size # This is the validation DataSequence. validation_sequence = DataSequence(test_pd, "./images", batch_size=batch_size) validation_steps = len(test_pd) // batch_size # These are the callbacks. callbacks = [lr_callback, tb_callback, mc_callback] # This line will train the model. model.fit_generator(train_sequence, validation_data=validation_sequence, epochs=20, use_multiprocessing=True, workers=80, steps_per_epoch=train_steps, validation_steps=validation_steps, callbacks=callbacks) # Finally, we save the model. model.save(MODEL_NAME)