def benchmark(weights_filename, r, verbose = "True"): filepath = 'model/weights/' + weights_filename # filepath of weights num_images = 729 # number of testing images to benchmark on (<=729) if verbose == "True": verbose = True else: verbose = False # Compile model opt = tf.keras.optimizers.Adam(learning_rate=0.001) # Peak Signal-to-Noise Ratio def PSNR(y_true, y_pred): max_pixel = 1.0 return tf.image.psnr(y_true, y_pred, max_val=max_pixel) model = espcn(r) model.compile(optimizer=opt, loss='mse', metrics=[PSNR]) # Initialize testing generator testing_generator = DataGenerator('LRbicx' + str(r), batch_size = 1, dictionary = "test") # Load weights model.load_weights(filepath) # Calculate average PSNR of all testing data average_psnr = 0 for i in range(0, num_images): lr, hr = testing_generator.__getitem__(i) sr = model.predict(lr) result = psnr(sr[0], hr[0]) average_psnr += result if verbose: print('Image: ' + str(i) + ', PSNR: ' + str(result) + ', Average: ' + str(average_psnr/(i+1))) print("Average PSNR: " + str(average_psnr/num_images))
def train(conf): gan = KernelGAN(conf) learner = Learner() data = DataGenerator(conf, gan) for iteration in tqdm.tqdm(range(conf.max_iters), ncols=60): [g_in, d_in] = data.__getitem__(iteration) gan.train(g_in, d_in) learner.update(iteration, gan) gan.finish()
def estimate_kernel(img_file): conf = config_kernelGAN(img_file) kgan = KernelGAN(conf) learner = Learner() data = DataGenerator(conf, kgan) for iteration in tqdm.tqdm(range(conf.max_iters), ncols=70): [g_in, d_in, _] = data.__getitem__(iteration) kgan.train(g_in, d_in) learner.update(iteration, kgan) kgan.finish()
def train(conf): sr_net = DBPISR(conf) learner = Learner() data = DataGenerator(conf, sr_net) for iteration in tqdm.tqdm(range(conf.max_iters), ncols=60): g_in = data.__getitem__(iteration) sr_net.train(g_in) learner.update(iteration, sr_net) sr_net.finish(data.input_image)