Ejemplo n.º 1
0
            optim = Padam(learning_rate=learning_rate, p=op['p'], beta1=op['b1'], beta2=op['b2'])
        elif optimizer == 'adam':
            optim = tf.train.AdamOptimizer(learning_rate=learning_rate, beta1=op['b1'], beta2=op['b2'])
        elif optimizer == 'adamw':
            # adamw = tf.contrib.opt.extend_with_decoupled_weight_decay(tf.train.AdamOptimizer)
            optim = tf.contrib.opt.AdamWOptimizer(weight_decay=op['weight_decay'], learning_rate=learning_rate,  beta1=op['b1'], beta2=op['b2'])
        elif optimizer == 'amsgrad':
            optim = AMSGrad(learning_rate=learning_rate, beta1=op['b1'], beta2=op['b2'])
        elif optimizer == 'sgd':
            optim = tf.train.MomentumOptimizer(learning_rate=learning_rate, momentum=op['m'])

        model.compile(optimizer=optim, loss='categorical_crossentropy', metrics=['accuracy', 'top_k_categorical_accuracy'], global_step=tf.train.get_global_step())

        csv_logger = CSVLogger(logfile, append=True, separator=';')

        history[optimizer] = model.fit_generator(datagen_train.flow(trainX, trainY, batch_size = batch_size), epochs = epochs, 
                                     validation_data = datagen_test.flow(testX, testY, batch_size = batch_size), verbose=1, callbacks = [csv_logger])

        scores = model.evaluate_generator(datagen_test.flow(testX, testY, batch_size = batch_size), verbose=1)

        print("Final test loss and accuracy:", scores)
        # filepath = 'model_'+optimizer+'_'  + dataset + '.h5'
        save_model(save_model_filepath, model)
        # f.close()

#train plot
plt.figure(1)
for optimizer in optim_array:
    op = optim_params[optimizer]
    train_loss = history[optimizer].history['loss']
    epoch_count = range(1, len(train_loss) + 1)
    plt.plot(epoch_count, train_loss, color=op['color'], linestyle=op['linestyle'])
Ejemplo n.º 2
0
model.compile(loss = custom_loss,
                  optimizer='adam',
                  metrics=[char_acc,image_acc])

datagen = ImageDataGenerator(width_shift_range=0.14,
                                 height_shift_range=0.08,
                                 fill_mode='constant',
                                 zoom_range = 0.1,
                                 rotation_range = 10,
                                 #rescale  =1./255
                                 )
mcp_save = ModelCheckpoint(cf.CKP_PATH, save_best_only=True, monitor='val_loss', mode='min',verbose=1)

def scheduler(epoch):
    if epoch <4 :
        return 0.001
    elif epoch < 10:
        return 0.001/5
    elif epoch < 15:
        return  0.0001
    elif epoch <30:
        return  0.0001/2
n = train_x.shape[0] 
#model.load_weights("plate.h5")   
lr_reduce = LearningRateScheduler(scheduler,verbose = 1)
model.fit_generator(datagen.flow(train_x, train_y,batch_size=cf.BATCH_SIZE),
                         epochs = 10,
                         steps_per_epoch=n//cf.BATCH_SIZE,
                         callbacks=[lr_reduce,mcp_save],
                         validation_data=(val_x, val_y))