with open(model_json, "r") as json_file: loaded_model_json = json_file.read() times = [] s = time.time() cycle_gan_generator = model_from_json(loaded_model_json) # load weights cycle_gan_generator.load_weights(model_h5) tot = time.time() - s times.append(tot) print("\nLoaded data and model") # testing loop for img_path in test_paths: # prepare data img_name = ntpath.basename(img_path).split('.')[0] im = read_and_resize(img_path, (256, 256)) im = preprocess(im) im = np.expand_dims(im, axis=0) # (1,256,256,3) # generate enhanced image s = time.time() gen = cycle_gan_generator.predict(im) gen = deprocess(gen) # Rescale to 0-1 tot = time.time() - s times.append(tot) # save samples misc.imsave(samples_dir + img_name + '_real.png', im[0]) misc.imsave(samples_dir + img_name + '_gen.png', gen[0]) # some statistics num_test = len(test_paths) if (num_test == 0):
model_json = checkpoint_dir + model_name_by_epoch + ".json" with open(model_json, "r") as json_file: loaded_model_json = json_file.read() funie_gan_generator = model_from_json(loaded_model_json) funie_gan_generator.load_weights(model_h5) print("\nLoaded data and model") times = [] s = time.time() for root, dirs, files in os.walk(test_paths): for img_path in files: if not img_path.lower().endswith('.jpg'): continue img_name = ntpath.basename(img_path).split('.')[0] im, shape = read_and_resize(os.path.join(root, img_path), (256, 256)) im = preprocess(im) s = time.time() gen = funie_gan_generator.predict(im) gen = deprocess(gen, shape) tot = time.time() - s times.append(tot) misc.imsave(os.path.join(root, img_name + '_gen.png'), gen[0]) # some statistics num_test = len(test_paths) if (num_test == 0): print("\nFound no images for test") else: print("\nTotal images: {0}".format(num_test))