if optimizer == 'padam': optim = Padam(learning_rate=learning_rate, p=op['p'], beta1=op['b1'], beta2=op['b2']) elif optimizer == 'adam': optim = tf.train.AdamOptimizer(learning_rate=learning_rate, beta1=op['b1'], beta2=op['b2']) elif optimizer == 'adamw': # adamw = tf.contrib.opt.extend_with_decoupled_weight_decay(tf.train.AdamOptimizer) optim = tf.contrib.opt.AdamWOptimizer(weight_decay=op['weight_decay'], learning_rate=learning_rate, beta1=op['b1'], beta2=op['b2']) elif optimizer == 'amsgrad': optim = AMSGrad(learning_rate=learning_rate, beta1=op['b1'], beta2=op['b2']) elif optimizer == 'sgd': optim = tf.train.MomentumOptimizer(learning_rate=learning_rate, momentum=op['m']) model.compile(optimizer=optim, loss='categorical_crossentropy', metrics=['accuracy', 'top_k_categorical_accuracy'], global_step=tf.train.get_global_step()) csv_logger = CSVLogger(logfile, append=True, separator=';') history[optimizer] = model.fit_generator(datagen_train.flow(trainX, trainY, batch_size = batch_size), epochs = epochs, validation_data = datagen_test.flow(testX, testY, batch_size = batch_size), verbose=1, callbacks = [csv_logger]) scores = model.evaluate_generator(datagen_test.flow(testX, testY, batch_size = batch_size), verbose=1) print("Final test loss and accuracy:", scores) # filepath = 'model_'+optimizer+'_' + dataset + '.h5' save_model(save_model_filepath, model) # f.close() #train plot plt.figure(1)
""" from keras.preprocessing.image import * from prepare_data import * from loss_and_metrics import * from model import VGG from keras.models import * from keras.callbacks import * import config as cf train_x,train_y,val_x,val_y = create_data() model = VGG(shape=(64, 256, 1)) model.summary() model.compile(loss = custom_loss, optimizer='adam', metrics=[char_acc,image_acc]) datagen = ImageDataGenerator(width_shift_range=0.14, height_shift_range=0.08, fill_mode='constant', zoom_range = 0.1, rotation_range = 10, #rescale =1./255 ) mcp_save = ModelCheckpoint(cf.CKP_PATH, save_best_only=True, monitor='val_loss', mode='min',verbose=1) def scheduler(epoch): if epoch <4 : return 0.001 elif epoch < 10: