예제 #1
0
    def __init__(self, params):
        """init

        Parameters
        ----------
        params: dict.
        """

        self.params = params
        self.global_step = tf.Variable(0,
                                       dtype=tf.int32,
                                       trainable=False,
                                       name='global_step')
        self.lr = tf.placeholder(tf.float32, shape=[], name='learning_rate')

        (self.train_iter, self.valid_iter, self.test_iter,
         self.train_size) = self.data_loader()

        # building graph
        (self.x_r, self.angles_r, self.labels, self.x_t,
         self.angles_g) = self.train_iter.get_next()

        (self.x_valid_r, self.angles_valid_r, self.labels_valid,
         self.x_valid_t, self.angles_valid_g) = self.valid_iter.get_next()

        (self.x_test_r, self.angles_test_r, self.labels_test, self.x_test_t,
         self.angles_test_g) = self.test_iter.get_next()

        self.x_g = generator(self.x_r, self.angles_g)
        self.x_recon = generator(self.x_g, self.angles_r, reuse=True)

        self.angles_valid_g = tf.random_uniform([params.batch_size, 2],
                                                minval=-1.0,
                                                maxval=1.0)

        self.x_valid_g = generator(self.x_valid_r,
                                   self.angles_valid_g,
                                   reuse=True)

        # reconstruction loss
        self.recon_loss = l1_loss(self.x_r, self.x_recon)

        # content loss and style loss
        self.c_loss, self.s_loss = self.feat_loss()

        # regression losses and adversarial losses
        (self.d_loss, self.g_loss, self.reg_d_loss, self.reg_g_loss,
         self.gp) = self.adv_loss()

        # update operations for generator and discriminator
        self.d_op, self.g_op = self.add_optimizer()

        # adding summaries
        self.summary = self.add_summary()

        # initialization operation
        self.init_op = tf.group(tf.global_variables_initializer(),
                                tf.local_variables_initializer())
    def dataset_gene(self):

        hps = self.params

        image_data_class = ImageAll(load_size=hps.image_size,
                                    channels=3,
                                    data_path=hps.data_path)
        image_data_class.preprocess()

        dataset = tf.data.Dataset.from_tensor_slices(
            (image_data_class.images, image_data_class.angles,
             image_data_class.ids, image_data_class.suffix))

        dataset = dataset.apply(
            map_and_batch(image_data_class.image_processing,
                          hps.batch_size,
                          num_parallel_batches=8))

        dataset_iterator = dataset.make_one_shot_iterator()

        image, angle, index, suffix = dataset_iterator.get_next()

        checkpoint = tf.train.latest_checkpoint(hps.log_dir)

        image_syn = generator(image, angle, reuse=True)

        tf_config = tf.ConfigProto()
        tf_config.gpu_options.allow_growth = True

        saver = tf.train.Saver()

        with tf.Session(config=tf_config) as test_sess:

            with test_sess.graph.as_default():
                saver.restore(test_sess, checkpoint)

                imgs_dir = os.path.join(hps.log_dir, 'dataset')
                if not os.path.exists(imgs_dir):
                    os.mkdir(imgs_dir)

                try:
                    while True:
                        imgs, gaze, idx, suffix_str = test_sess.run(
                            [image_syn, angle, index, suffix])
                        gaze = gaze * np.array([15, 10])
                        for j in range(hps.batch_size):
                            imsave(
                                os.path.join(
                                    imgs_dir, '%s_%.3fV_%.3fH_%s.jpg' %
                                    (idx[j], gaze[j][1], gaze[j][0],
                                     suffix_str[j])), imgs[j])

                except tf.errors.OutOfRangeError:
                    logging.info("dataset_gene finished.")
예제 #3
0
    def eval(self):
        """ Evaluation. """
        hps = self.params

        checkpoint = tf.train.latest_checkpoint(hps.log_dir)

        x_fake = generator(self.x_test_r, self.angles_test_g, reuse=True)

        tf_config = tf.ConfigProto()
        tf_config.gpu_options.allow_growth = True

        saver = tf.train.Saver()

        with tf.Session(config=tf_config) as test_sess:

            with test_sess.graph.as_default():
                saver.restore(test_sess, checkpoint)

                imgs_dir = os.path.join(hps.log_dir, 'eval')
                if not os.path.exists(imgs_dir):
                    os.mkdir(imgs_dir)

                tar_dir = os.path.join(imgs_dir, 'targets')
                gene_dir = os.path.join(imgs_dir, 'genes')
                real_dir = os.path.join(imgs_dir, 'reals')
                os.makedirs(tar_dir)
                os.makedirs(gene_dir)
                os.makedirs(real_dir)

                try:
                    i = 0
                    while True:
                        (real_imgs, target_imgs, fake_imgs, a_r,
                         a_t) = test_sess.run([
                             self.x_test_r, self.x_test_t, x_fake,
                             self.angles_test_r, self.angles_test_g
                         ])
                        a_t = a_t * np.array([15, 10])
                        a_r = a_r * np.array([15, 10])
                        delta = angular_error(a_t, a_r)

                        for j in range(real_imgs.shape[0]):
                            imageio.imwrite(
                                os.path.join(
                                    tar_dir, '%d_%d_%.3f_H%d_V%d.jpg' %
                                    (i, j, delta[j], a_t[j][0], a_t[j][1])),
                                target_imgs[j])
                            imageio.imwrite(
                                os.path.join(
                                    gene_dir, '%d_%d_%.3f_H%d_V%d.jpg' %
                                    (i, j, delta[j], a_t[j][0], a_t[j][1])),
                                fake_imgs[j])
                            imageio.imwrite(
                                os.path.join(
                                    real_dir, '%d_%d_%.3f_H%d_V%d.jpg' %
                                    (i, j, delta[j], a_t[j][0], a_t[j][1])),
                                real_imgs[j])

                        i = i + 1
                except tf.errors.OutOfRangeError:
                    logging.info("quanti_eval finished.")
예제 #4
0
    os.makedirs(params.out_dir)


class Model(object):
    def __init__(self, params):
        self.params = params


checkpoint = tf.train.latest_checkpoint(params.log_dir)

x_test_r = tf.placeholder(tf.float32,
                          shape=(params.batch_size, params.image_size,
                                 params.image_size, 3))
angles_test_r = tf.placeholder(tf.float32, shape=(params.batch_size, 2))

x_fake = generator(x_test_r, angles_test_r)

tf_config = tf.ConfigProto()
tf_config.gpu_options.allow_growth = True
saver = tf.train.Saver()

with tf.Session(config=tf_config) as test_sess:
    with test_sess.graph.as_default():
        saver.restore(test_sess, checkpoint)

        for file in filelist:

            img_name = '.'.join(file.split('/')[-1].split('.')[:-1])
            print(img_name)

            np_x_test_r = Image.open(file)
    def test(self):

        hps = self.params

        pitch_angle = np.array([-10, 0, 10]) / 10.0
        yaw_angle = np.array([-15, -10, -5, 0, 5, 10, 15]) / 15.0

        batch_size = pitch_angle.shape[0] * yaw_angle.shape[0]

        checkpoint = tf.train.latest_checkpoint(hps.log_dir)
        test_iter = self.test_data_loader()
        x_test, sides_test = test_iter.get_next()

        images = tf.placeholder(
            tf.float32,
            shape=[batch_size, hps.image_size, hps.image_size, 3],
            name='image')
        angles = tf.placeholder(tf.float32,
                                shape=[batch_size, 2],
                                name='angles')

        x_fake = generator(images, angles, reuse=True)

        tf_config = tf.ConfigProto()
        tf_config.gpu_options.allow_growth = True

        saver = tf.train.Saver()

        with tf.Session(config=tf_config) as test_sess:

            with test_sess.graph.as_default():
                saver.restore(test_sess, checkpoint)

                imgs_dir = os.path.join(hps.log_dir, 'wild_images')
                if not os.path.exists(imgs_dir):
                    os.mkdir(imgs_dir)

                real_dir = os.path.join(imgs_dir, 'real')
                gene_dir = os.path.join(imgs_dir, 'genes')

                os.makedirs(real_dir)
                os.makedirs(gene_dir)

                real_imgs, real_sides = test_sess.run([x_test, sides_test])

                theta = np.zeros([batch_size, 2], dtype=np.float32)

                for i, yaw in enumerate(yaw_angle):
                    for j, pitch in enumerate(pitch_angle):
                        theta[pitch_angle.shape[0] * i + j, 0] = yaw
                        theta[pitch_angle.shape[0] * i + j, 1] = pitch

                for idx in range(hps.batch_size):

                    imgs_batch = np.tile(real_imgs[idx], [batch_size, 1, 1, 1])

                    fake_imgs = test_sess.run(x_fake,
                                              feed_dict={
                                                  images: imgs_batch,
                                                  angles: theta
                                              })

                    imgs_batch = (imgs_batch + 1.0) / 2.0
                    fake_imgs = np.clip((fake_imgs + 1.0) / 2.0, 0.0, 1.0)

                    for i in range(21):

                        imsave(
                            os.path.join(
                                real_dir,
                                '%d_%d_%s.jpg' % (idx, i, real_sides[idx])),
                            imgs_batch[i])
                        imsave(
                            os.path.join(
                                gene_dir,
                                '%d_%d_%s.jpg' % (idx, i, real_sides[idx])),
                            fake_imgs[i])
    def eval(self):
        """
        generate eye patch images given certain eye gaze movement trajectary.
        """

        hps = self.params

        checkpoint = tf.train.latest_checkpoint(hps.log_dir)

        x_test_r, _, _, _, _ = self.valid_iter.get_next()

        images = tf.placeholder(
            tf.float32,
            shape=[None, hps.image_size, hps.image_size, 3],
            name='image')
        angles = tf.placeholder(tf.float32, shape=[None, 2], name='angles')

        x_test_g = generator(images, angles, reuse=True)

        saver = tf.train.Saver()

        tf_config = tf.ConfigProto()
        tf_config.gpu_options.allow_growth = True

        with tf.Session(config=tf_config) as test_sess:

            with test_sess.graph.as_default():

                saver.restore(test_sess, checkpoint)
                real_dir = os.path.join(hps.imgs_dir, 'real')
                gene_dir = os.path.join(hps.imgs_dir, 'genes')
                os.makedirs(real_dir)
                os.makedirs(gene_dir)

                real_imgs = test_sess.run(x_test_r)

                theta = np.linspace(0, 2 * np.pi, 120)
                x_cord = np.sin(theta)
                y_cord = np.cos(theta)

                x_cord = np.concatenate(
                    (np.linspace(-1, 1,
                                 40), np.linspace(1, -1,
                                                  40), np.linspace(-1, 1, 40)))

                y_cord = np.concatenate(
                    (np.repeat(1, 40), np.linspace(1, -1,
                                                   40), np.repeat(-1, 40)))

                fake_labels = np.column_stack((x_cord, y_cord))

                for idx in range(hps.batch_size):

                    imsave(os.path.join(real_dir, '%d.jpg' % idx),
                           real_imgs[idx])

                    imgs_batch = np.tile(real_imgs[idx], [120, 1, 1, 1])

                    fake_imgs = test_sess.run(x_test_g,
                                              feed_dict={
                                                  images: imgs_batch,
                                                  angles: fake_labels
                                              })
                    imgs_batch = (imgs_batch + 1.0) / 2.0
                    fake_imgs = np.clip((fake_imgs + 1.0) / 2.0, 0.0, 1.0)
                    for i in range(120):
                        imsave(os.path.join(gene_dir, '%d_%d.jpg' % (idx, i)),
                               fake_imgs[i])