Esempio n. 1
0
def main(_):
    if not os.path.exists(FLAGS.outdir):
        os.mkdir(FLAGS.outdir)
    if (os.path.exists(FLAGS.prunedlist_path)):
        prunedlist = np.loadtxt(FLAGS.prunedlist_path, dtype=np.int64)
    else:
        prunedlist = [0] * 16

    #network = EDSR(FLAGS.layers, FLAGS.featuresize, FLAGS.scale, FLAGS.channels)
    network = EDSR(FLAGS.layers, FLAGS.featuresize, FLAGS.scale,
                   FLAGS.channels, FLAGS.channels, prunedlist)
    network.buildModel()
    network.resume(FLAGS.reusedir, 1999)

    hr_list, lr_imgs, groundtruth_imgs = data_for_predict(
        FLAGS.datadir, FLAGS.groundtruth, FLAGS.postfixlen)

    if groundtruth_imgs:
        psnr_list = []
        time_list = []
        fo = open(FLAGS.outdir + '/psnr.csv', 'w')
        fo.writelines("file, PSNR\n")
        for lr_img, groundtruth_img, hr_name in zip(lr_imgs, groundtruth_imgs,
                                                    hr_list):
            start = time.time()
            out = network.predict([lr_img])
            # out = enhance_predict([lr_img],network)
            use_time = time.time() - start
            time_list.append(use_time)
            tl.vis.save_image(out[0], FLAGS.outdir + '/' + hr_name)
            psnr = utils.psnr_np(groundtruth_img, out[0], scale=8)
            print('%s : %.6f' % (hr_name, psnr))
            psnr_list.append(psnr)
            fo.writelines("%s, %.6f\n" % (hr_name, psnr))

        print(np.mean(psnr_list))
        print(np.mean(time_list))
        fo.writelines("%d, Average,0, %.6f" % (-1, np.mean(psnr_list)))
        fo.close()

    else:
        for i in tqdm(range((len(hr_list)))):
            out = network.predict([lr_imgs[i]])
            tl.vis.save_image(out[0], FLAGS.outdir + '/' + hr_list[i])
Esempio n. 2
0
def main(_):
    if not os.path.exists(FLAGS.outdir):
        os.mkdir(FLAGS.outdir)

    network = EDSR(FLAGS.n_channels)
    network.build_model(FLAGS.n_res_blocks, FLAGS.n_features, FLAGS.scale)
    network.resume(FLAGS.reusedir, None)

    hr_list, lr_imgs, groundtruth_imgs = data_for_predict(
        FLAGS.datadir, FLAGS.grouthtruth, FLAGS.postfixlen)

    # Valid
    if groundtruth_imgs:
        psnr_list = []
        time_list = []
        fo = open(FLAGS.outdir + '/psnr.csv', 'w')
        fo.writelines("file, PSNR\n")
        for lr_img, groundtruth_img, hr_name in zip(lr_imgs, groundtruth_imgs,
                                                    hr_list):
            start = time.time()
            out = network.predict([lr_img])
            # out = enhance_predict([lr_img],network)
            use_time = time.time() - start
            time_list.append(use_time)
            tl.vis.save_image(out[0], FLAGS.outdir + '/' + hr_name)
            psnr = utils.psnr_np(groundtruth_img, out[0], scale=4)
            print('%s : %.6f' % (hr_name, psnr))
            psnr_list.append(psnr)
            fo.writelines("%s, %.6f\n" % (hr_name, psnr))

        print(np.mean(psnr_list))
        print(np.mean(time_list))
        fo.writelines("Average psnr,0, %.6f" % (np.mean(psnr_list)))
        fo.writelines("Average runtime,0, %.6f" % (np.mean(time_list)))
        fo.close()

    # Test
    else:
        for i in tqdm(range((len(hr_list)))):
            # out = network.predict([lr_imgs[i]])
            out = enhance_predict([lr_imgs[i]], network)
            tl.vis.save_image(out[0], FLAGS.outdir + '/' + hr_list[i])
Esempio n. 3
0
def main(_):
    img_files = sorted(os.listdir(FLAGS.datadir))
    lr_imgs = tl.visualize.read_images(img_files, FLAGS.datadir)

    mean_list = []
    dict = {}
    for i in range(len(img_files)):
        name_lr, postfix = os.path.splitext(img_files[i])
        name_hr = name_lr
        hr_img = tl.visualize.read_image(name_hr + postfix, FLAGS.grouthtruth)
        mean = utils.psnr_np(hr_img, lr_imgs[i])
        print('%d -> %s : %.6f' % (i, name_lr + postfix, mean))
        mean_list.append(mean)
        dict[name_hr] = mean

    dict_sort_by_value = OrderedDict(sorted(dict.items(), key=lambda x: x[1]))
    mean = np.mean(mean_list)
    with open(FLAGS.datadir + '/' + FLAGS.record, 'w') as file:
        for k, v in dict_sort_by_value.items():
            file.write('%s : %.6f\n' % (k, v))
        file.write('Average: %.6f\n' % (mean))

    print('Average: %.6f' % (mean))
Esempio n. 4
0
def main(_):
    if os.path.exists(FLAGS.outdir):
        shutil.rmtree(FLAGS.outdir)
    os.mkdir(FLAGS.outdir)

    img_files = sorted(os.listdir(FLAGS.datadir))
    lr_imgs, hr_imgs, lr_pos, hr_pos = utils.get_image_set(
        img_files,
        input_dir=FLAGS.datadir,
        ground_truth_dir=FLAGS.groundtruth,
        hr_image_size=0,
        scale=FLAGS.scale,
        postfix_len=FLAGS.postfixlen)
    hr_norm_imgs = utils.normalize_color(hr_imgs)
    network = WaveletSR(FLAGS.layers,
                        FLAGS.featuresize,
                        FLAGS.scale,
                        FLAGS.waveletimgsize,
                        FLAGS.hrimgsize,
                        channels=3)
    network.buildModel()
    network.resume(FLAGS.reusedir, global_step=FLAGS.step)

    level = FLAGS.hrimgsize // (FLAGS.scale * FLAGS.waveletimgsize)
    fo = open(FLAGS.outdir + '/psnr.csv', 'w')
    fo.writelines("seq, file, L1, PSNR\n")
    mean_list = []

    for i in range(len(img_files)):
        size, _, _ = np.shape(lr_imgs[i])
        size_hr, _, _ = np.shape(hr_imgs[i])
        target_imgs = utils.get_dwt_images(
            [hr_norm_imgs[i]],
            img_size=1 + (size_hr // (FLAGS.scale * math.pow(2, level - 1))),
            wavelet=FLAGS.wavelet)
        input_imgs = utils.get_dwt_images(
            [lr_imgs[i]], img_size=1 + (size // level),
            wavelet=FLAGS.wavelet) if level > 1 else [lr_imgs[i]]

        #output = predict(i, input_imgs, network, target_imgs, fo)
        output = ensem_predict(input_imgs, network)

        output_img = make_same_shape(hr_imgs[i], output[0])
        print(
            '%dth composed image, loss = %.6f, min = %.6f, max = %.6f, mean = %.6f, var = %.6f\n'
            %
            (i,
             np.mean(np.abs(hr_imgs[i] / 255.0, output_img.astype(
                 np.float32))), np.min(output_img), np.max(output_img),
             np.mean(output_img), math.sqrt(np.var(output_img))))

        output_img = np.clip(output_img, 0, 1)
        output_img = output_img * 255 + 0.5

        mean = utils.psnr_np(hr_imgs[i], output_img, scale=FLAGS.scale)

        #fo.writelines("%s, %.6f\n"%(img_files[i], mean))
        fo.writelines(
            "%d, %s, %.6f, %.6f\n" %
            (i, img_files[i], np.mean(np.abs(hr_imgs[i] - output_img)), mean))
        mean_list.append(mean)
        tl.vis.save_image(output_img, FLAGS.outdir + '/' + img_files[i])

    fo.writelines("%d, Average,0, %.6f\n" % (-1, np.mean(mean_list)))
    fo.close()
    return