def main():
    chainer.cuda.get_device_from_id(args.gpu).use()

    np.random.seed(1234)
    xp = gen.xp
    n = int(5000 * args.splits)
    #for _ in range(50):
    #     gen(128) 
    print("Gen")
    ims = gen_images(gen, n, batchsize=125).astype("f")
    print(np.max(ims), np.min(ims))

    if args.tf:
        # mean, std = inception_score.get_inception_score(ims, args.splits)
        stat = np.load(args.FID_stat_file, allow_pickle=False)
        is_mean, is_std, fid_mean, fid_std = inception_score_tf.get_inception_and_FID(ims, args.splits, ref_stats=stat)
        print(is_mean, is_std, fid_mean, fid_std)
    else:
        model = load_inception_model(args.inception_model_path)
        mean, std = inception_score(model, ims, splits=args.splits)
        print(mean, std)
    if not os.path.exists(args.results_dir):
        os.makedirs(args.results_dir)
    np.savetxt('{}/inception_score.txt'.format(args.results_dir),
               np.array([is_mean, is_std]))
    np.savetxt('{}/FID.txt'.format(args.results_dir),
               np.array([fid_mean, fid_std]))
Example #2
0
 def evaluation(trainer=None):
     model = load_inception_model(path)
     ims = gen_images(gen, n_ims, batchsize=batchsize).astype("f")
     mean, std = inception_score(model, ims, splits=splits)
     chainer.reporter.report({'inception_mean': mean, 'inception_std': std})
     if dst is not None:
         preview_dir = '{}/stats'.format(dst)
         preview_path = preview_dir + '/inception_score_{:0>8}.txt'.format(
             trainer.updater.iteration if trainer is not None else None)
         np.savetxt(preview_path, np.array([mean, std]))
 def evaluation(trainer=None):
     model = load_inception_model(path)
     ims = gen_images(gen, n_ims, batchsize=batchsize).astype("f")
     mean, std = inception_score(model, ims, splits=splits)
     chainer.reporter.report({'inception_mean': mean, 'inception_std': std})
     if dst is not None:
         preview_dir = '{}/IS.txt'.format(dst)
         with open(preview_dir, 'a', encoding='ascii') as f:
             f.write(str(trainer.updater.iteration))
             f.write(':')
             f.write(str(mean))
             f.write(',  ')
             f.write(str(std))
             f.write('\n')
Example #4
0
 def evaluation(trainer=None):
     model = load_inception_model(path)
     ims = gen_images(gen, n_ims, batchsize=batchsize).astype("f")
     mean, std = inception_score(model, ims, splits=splits)
     chainer.reporter.report({
         'inception_mean': mean,
         'inception_std': std
     })
     if dst is not None:
         preview_dir = '{}/stats'.format(dst)
         preview_path = preview_dir + '/inception_score_{:0>8}.txt'.format(
             trainer.updater.iteration if trainer is not None else None)
         np.savetxt(preview_path, np.array([mean, std]))
     # # export the best model.
     global best_inception
     if trainer is not None and best_inception < mean:
         ext = extensions.snapshot_object(gen, gen.__class__.__name__ + '_best_inception.npz')
         ext(trainer)
         best_inception = mean