Beispiel #1
0
    def validate_model(self, images, epoch, step):
        """

        :param images:
        :param epoch:
        :param step:
        :return:
        """
        fake_images, real_images, loss = self.generate_fake_samples(images)
        print("Sample: loss: %.5f " % (loss))

        merged_fake_images = merge(scale_back(fake_images),
                                   [self.batch_size, 1])
        merged_real_images = merge(scale_back(real_images),
                                   [self.batch_size, 1])
        merged_pair = np.concatenate([merged_fake_images, merged_real_images],
                                     axis=1)

        model_id, _ = self.get_model_id_and_dir()
        model_sample_dir = os.path.join(self.sample_dir, model_id)
        if not os.path.exists(model_sample_dir):
            os.makedirs(model_sample_dir)

        sample_img_path = os.path.join(model_sample_dir,
                                       "sample_%04d_%06d.png" % (epoch, step))
        misc.imsave(sample_img_path, merged_pair)
Beispiel #2
0
    def infer(self, source_obj, model_dir, save_dir):
        """

        :param source_obj:
        :param model_dir:
        :param save_dir:
        :return:
        """
        source_provider = InjectDataProvider(source_obj)
        source_iter = source_provider.get_iter(self.batch_size)

        tf.global_variables_initializer().run()
        saver = tf.train.Saver(var_list=self.retrieve_generator_vars())
        self.restore_model(saver, model_dir)

        def save_imgs(imgs, count):
            p = os.path.join(save_dir, "inferred_%04d.png" % count)
            save_concat_images(imgs, img_path=p)
            print("generated images saved at %s" % p)

        count = 0
        batch_buffer = list()
        for source_imgs in source_iter:
            fake_imgs = self.generate_fake_samples(source_imgs)[0]
            merged_fake_images = merge(fake_imgs, [self.batch_size, 1])
            batch_buffer.append(merged_fake_images)
            if len(batch_buffer) == 10:
                save_imgs(batch_buffer, count)
                batch_buffer = list()
            count += 1
        if batch_buffer:
            # last batch
            save_imgs(batch_buffer, count)
Beispiel #3
0
    def infer(self, source_obj, model_dir, save_dir):
        """
        Inference this auto-encoder model.
        :param source_obj:
        :param model_dir:
        :param save_dir:
        :return:
        """
        source_provider = InjectDataProvider(source_obj)
        source_iter = source_provider.get_iter(self.batch_size)

        tf.global_variables_initializer().run()

        saver = tf.train.Saver(max_to_keep=100)
        _, model_dir = self.get_model_id_and_dir()
        self.restore_model(saver, model_dir)

        def save_imgs(imgs, count):
            p = os.path.join(save_dir, "inferred_%04d.png" % count)
            save_concat_images(imgs, img_path=p)
            print("generated images saved at %s" % p)

        count = 0
        batch_buffer = list()
        code_list = None
        for source_imgs in source_iter:
            fake_imgs, real_imgs, loss, code = self.generate_fake_samples(
                source_imgs)
            if code_list is None:
                code_list = code.copy()
            else:
                code_list = np.concatenate([code_list, code])

            merged_fake_images = merge(fake_imgs, [self.batch_size, 1])
            batch_buffer.append(merged_fake_images)
            if len(batch_buffer) == 10:
                save_imgs(batch_buffer, count)
                batch_buffer = list()
            count += 1
        if batch_buffer:
            # last batch
            save_imgs(batch_buffer, count)

        if code_list is not None:
            code_list.dump(os.path.join(save_dir, "code.dat"))
            print("code.dat dump successed!")