Пример #1
0
            filename = os.path.basename(path[i])
            img_name = './val_images/' + str(num_epoch) + '_' + L_psnr + '_' + filename
            utls.imwrite(img_name, out_img)
        psnr_ave /= number
        print('------------------------------------------------')
        print("[Epoch %d]  [PSNR_AVE :%f]" % (num_epoch,  psnr_ave))
        print('------------------------------------------------')

    def on_batch_end(self, batch, logs={}):
        print(' - LR = ', K.eval(self.model.optimizer.lr))
        fileObject = open("./trainList.txt", 'a')
        fileObject.write("%d %f\n" % (batch, logs['loss']))
        fileObject.close()

show_history = Show_History()
change_lr = LearningRateScheduler(scheduler)
tbCallBack = keras.callbacks.TensorBoard(log_dir='./logs', histogram_freq=0, write_graph=True, write_images=False,
                                         embeddings_freq=0, embeddings_layer_names=None, embeddings_metadata=None)
nanstop = keras.callbacks.TerminateOnNaN()
reducelearate = keras.callbacks.ReduceLROnPlateau(monitor='loss', factor=0.5, patience=2, min_lr=1e-10)
earlystop = keras.callbacks.EarlyStopping(monitor='loss', min_delta=3, patience=0, verbose=0, mode='min')

batch_size = 16
step_epoch = 200
combined.fit_generator(
        data_loader.load_data(batch_size),
        steps_per_epoch=step_epoch,
        epochs=200,
        callbacks=[tbCallBack, show_history, change_lr, nanstop, reducelearate])
print('Done!')
Пример #2
0
reducelearate = keras.callbacks.ReduceLROnPlateau(monitor='loss',
                                                  factor=0.8,
                                                  patience=2,
                                                  min_lr=1e-10)
earlystop = keras.callbacks.EarlyStopping(monitor='loss',
                                          min_delta=3,
                                          patience=0,
                                          verbose=0,
                                          mode='min')

im_path = natsort.natsorted(
    glob("../../../../media/bizon/Elements/OTS/train/OTShaze/*"),
    reverse=False)
train_list = im_path[:69952]
val_list = im_path[69952:89952]
train = data_loader.load_data(fake_list=train_list)
val = data_loader.load_data(fake_list=val_list)
train_batch_size = 64
val_batch_size = 16

history = imh.fit_generator(
    train,
    steps_per_epoch=69952 // train_batch_size,
    epochs=200,
    validation_data=val,
    validation_steps=20000 // val_batch_size,
    callbacks=[show_history, tbCallBack, change_lr, nanstop, reducelearate])

utls.plot_history(history, './results/', 'imh')
#utls.save_history(history, './results/', 'imh')