"%s/%04d/predictions/final_%06d.jpg" % (model, epoch, ind), np.uint8( np.maximum(np.minimum(output[0] * 255.0, 255.0), 0.0))) sic.imsave( "%s/%04d/predictions/predictions_%06d.jpg" % (model, epoch, ind), np.uint8( np.maximum(np.minimum(C0_imall[0] * 255.0, 255.0), 0.0))) saver.save(sess, "%s/model.ckpt" % model) saver.save(sess, "%s/%04d/model.ckpt" % (model, epoch)) # Inference else: test_low = utils.get_names(test_dir) numtest = len(test_low) print(test_low[0]) out_folder = test_dir.split('/')[-1] outputs = [None] * 4 # for ind in range(numtest): for ind in range(10): input_image_src, input_image_target, input_flow_forward_src, input_flow_backward_src = prepare_input_w_flow( test_low[ind], num_frames=num_frame, gray=True) if input_image_src is None or input_image_target is None or input_flow_forward_src is None: print("Not able to read the images/flows.") continue st = time.time() C0_imall, C1_imall, C0_im, C1_im = sess.run( [objDict["prediction_0"], objDict["prediction_1"], C0, C1], feed_dict={
g1 = VCN(utils.build(tf.tile(input_i[:, :, :, 1:2], [1, 1, 1, 3])), reuse=True) saver = tf.compat.v1.train.Saver(max_to_keep=1000) # + sess.run([tf.compat.v1.global_variables_initializer()]) var_restore = [v for v in tf.compat.v1.trainable_variables()] saver_restore = tf.compat.v1.train.Saver(var_restore) ckpt = tf.train.get_checkpoint_state(model) saver_restore.restore(sess, ckpt.model_checkpoint_path) # - if not len(test_img): img_names = utils.get_names(test_dir) ind = 0 for img_name in img_names: im = np.float32(scipy.misc.imread(img_name, 'L')) / 255.0 h = im.shape[0] // 32 * 32 w = im.shape[1] // 32 * 32 im = im[np.newaxis, :h, :w, np.newaxis] st = time.time() output = sess.run( g0, feed_dict={input_i: np.concatenate((im, im), axis=3)}) print("test time for %s --> %.3f" % (ind, time.time() - st)) folder = test_dir.split('/')[-1] if not os.path.isdir("%s/%s" % (model, folder)): for idx in range(5): os.makedirs("%s/%s/result%d" % (model, folder, idx)) out_all = np.concatenate(