Exemple #1
0
    def sample_save(self, step):
        test_files = glob(os.path.join(self.data_dir, 'test', '*'))

        # [5,6] with the seequnce of (realA, realB, fakeB), totally 10 set save
        testA_list = random.sample(test_files, 10)
        testB_list = random.sample(test_files, 10)
        attrA_list = [
            self.attr_list[os.path.basename(val)] for val in testA_list
        ]
        attrB_list = [
            self.attr_list[os.path.basename(val)] for val in testB_list
        ]

        # get batch images and labels
        attrA, attrB = preprocess_attr(self.attr_names, attrA_list, attrB_list,
                                       self.attr_keys)
        imgA, imgB = preprocess_image(testA_list,
                                      testB_list,
                                      self.image_size,
                                      phase='test')
        dataA, dataB = preprocess_input(imgA, imgB, attrA, attrB,
                                        self.image_size, self.n_label)

        # generate fakeB
        feed = {self.real_A: dataA}
        fake_B = self.sess.run(self.fake_B, feed_dict=feed)

        # save samples
        sample_file = os.path.join(self.sample_dir, '%06d.jpg' % (step))
        save_images(imgA, imgB, fake_B, self.image_size, sample_file, num=10)
    def sample_save(self, step):
        test_files = glob(os.path.join(self.data_dir, 'test', '*'))

        # [5,6] with the seequnce of (realA, realB, fakeB), totally 10 set save
        # self.attr_keys = ['Male', 'Female', 'KizunaAI', 'Nekomasu', 'Mirai', 'Shiro', 'Kaguya']
        testA_list = test_files[:50]
        testB_list = test_files[:50]
        attrA_list = [np.load(val)['attr'] for val in testA_list]
        attrB_list = [self.binary_attrs] * len(testB_list)
        #phaseA_list = [np.load(val)['phase'] for val in testA_list]

        # get batch images and labels
        attrA, attrB = preprocess_attr(self.attr_names, attrA_list, attrB_list,
                                       self.attr_keys)
        imgA, imgB = preprocess_image(testA_list,
                                      testB_list,
                                      self.image_size,
                                      phase='test')
        dataA, _ = preprocess_input(imgA, imgB, attrA, attrB, self.image_size,
                                    self.n_label)

        # generate fakeB
        feed = {self.real_A: dataA}
        fake_B = self.sess.run(self.fake_B, feed_dict=feed)

        # save samples
        sample_file = os.path.join(self.sample_dir, '%06d' % (step))
        #save_wav(imgA, imgB, fake_B, self.image_size, sample_file, phaseA_list, num=10)
        save_wav_ceps(fake_B, './data_test_mcep/test.wav', sample_file)
Exemple #3
0
 def train(self):
     # summary setting
     self.summary()
     
     # load train data list & load attribute data
     dataA_files = load_data_list(self.data_dir)
     dataB_files = np.copy(dataA_files)
     attr_names, attr_list = attr_extract(self.data_dir)
     
     # variable initialize
     self.sess.run(tf.global_variables_initializer())
     
     # load or not checkpoint
     if self.continue_train and self.checkpoint_load():
         print(" [*] before training, Load SUCCESS ")
     else:
         print(" [!] before training, no need to Load ")
     
     batch_idxs = len(dataA_files) // self.batch_size # 182599
     #train
     for epoch in range(self.epoch):
         np.random.shuffle(dataA_files)
         np.random.shuffle(dataB_files)
         
         for idx in tqdm(range(batch_idxs)):
             # 
             dataA_list = dataA_files[idx * self.batch_size : (idx+1) * self.batch_size]
             dataB_list = dataB_files[idx * self.batch_size : (idx+1) * self.batch_size]
             attrA_list = [attr_list[os.path.basename(val)] for val in dataA_list]
             attrB_list = [attr_list[os.path.basename(val)] for val in dataB_list]
             # get batch images and labels
             attrA, attrB = preprocess_attr(attr_names, attrA_list, attrB_list)
             imgA, imgB = preprocess_image(dataA_list, dataB_list, self.image_size)
             dataA, dataB = preprocess_input(imgA, imgB, attrA, attrB, self.image_size)
             
             # updatae G network
             feed = { self.real_A: dataA, self.real_B: dataB }
             fake_B, _, summary = self.sess.run([self.fake_B, self.g_optim, self.g_sum], 
                                        feed_dict = feed)
             
             #update D network
             feed = { self.real_B: dataB, self.fake_B_sample: fake_B }
             _, summary = self.sess.run([self.d_optim, self.d_sum])
    def test(self):
        # check if attribute available
        # binary_attrsでtagを指定しているので長さは同じに
        if not len(self.binary_attrs) == self.n_label:
            print(
                "binary_attr length is wrong! The length should be {}".format(
                    self.n_label))
            return

        # variable initialize
        self.sess.run(tf.global_variables_initializer())

        # load or not checkpoint
        if self.phase == 'test' and self.checkpoint_load():
            print(" [*] before training, Load SUCCESS ")
        else:
            print(" [!] before training, no need to Load ")

        # [5,6] with the seequnce of (realA, realB, fakeB), totally 10 set save
        # data_dirから適当なサンプルを十持ってきている
        # 音声データだから連続的に適当な範囲を持ってくる
        test_files = glob(os.path.join(self.data_dir, 'test', '*'))
        testA_list = random.sample(test_files, 10)

        # get batch images and labels
        #        self.attr_keys = ['Black_Hair','Blond_Hair','Brown_Hair', 'Male', 'Young','Mustache','Pale_Skin']
        attrA = [float(i) for i in list(self.binary_attrs)] * len(testA_list)
        imgA, _ = preprocess_image(testA_list,
                                   testA_list,
                                   self.image_size,
                                   phase='test')
        dataA, _ = preprocess_input(imgA, imgA, attrA, attrA, self.image_size,
                                    self.n_label)

        # generate fakeB
        # 生成結果はfake_Bの中
        feed = {self.real_A: dataA}
        fake_B = self.sess.run(self.fake_B, feed_dict=feed)

        # save samples
        test_file = os.path.join(self.test_dir, 'test.jpg')
        save_images(imgA, imgA, fake_B, self.image_size, test_file, num=10)
Exemple #5
0
    def train(self):
        # summary setting
        self.summary()

        # load train data list & load attribute data
        dataA_files = load_data_list(self.data_dir)
        dataB_files = np.copy(dataA_files)
        self.attr_names, self.attr_list = attr_extract(self.data_dir)

        # variable initialize
        self.sess.run(tf.global_variables_initializer())

        # load or not checkpoint
        if self.continue_train and self.checkpoint_load():
            print(" [*] before training, Load SUCCESS ")
        else:
            print(" [!] before training, no need to Load ")

        batch_idxs = len(dataA_files) // self.batch_size  # 182599
        count = 0
        #train
        for epoch in range(self.epoch):
            # get lr_decay
            if epoch < self.epoch / 2:
                lr_decay = 1.0
            else:
                lr_decay = (self.epoch - epoch) / (self.epoch / 2)

            # data shuffle
            np.random.shuffle(dataA_files)
            np.random.shuffle(dataB_files)

            for idx in tqdm(range(batch_idxs)):
                count += 1
                #
                dataA_list = dataA_files[idx * self.batch_size:(idx + 1) *
                                         self.batch_size]
                dataB_list = dataB_files[idx * self.batch_size:(idx + 1) *
                                         self.batch_size]
                attrA_list = [
                    self.attr_list[os.path.basename(val)] for val in dataA_list
                ]
                attrB_list = [
                    self.attr_list[os.path.basename(val)] for val in dataB_list
                ]

                # get batch images and labels
                attrA, attrB = preprocess_attr(self.attr_names, attrA_list,
                                               attrB_list, self.attr_keys)
                imgA, imgB = preprocess_image(dataA_list,
                                              dataB_list,
                                              self.image_size,
                                              phase='train')
                dataA, dataB = preprocess_input(imgA, imgB, attrA, attrB,
                                                self.image_size, self.n_label)

                # generate fake_B
                feed = {self.real_A: dataA}
                fake_B = self.sess.run(self.fake_B, feed_dict=feed)

                # update D network for 5 times
                for _ in range(5):
                    epsilon = np.random.rand(self.batch_size, 1, 1, 1)
                    feed = {
                        self.fake_B_sample: fake_B,
                        self.real_B: dataB,
                        self.attr_B: np.array(attrB),
                        self.epsilon: epsilon,
                        self.lr_decay: lr_decay
                    }
                    _, d_loss, d_summary = self.sess.run(
                        [self.d_optim, self.d_loss, self.d_sum],
                        feed_dict=feed)

                # updatae G network for 1 time
                feed = {
                    self.real_A: dataA,
                    self.real_B: dataB,
                    self.attr_B: np.array(attrB),
                    self.lr_decay: lr_decay
                }
                _, g_loss, g_summary = self.sess.run(
                    [self.g_optim, self.g_loss, self.g_sum], feed_dict=feed)

                # summary
                self.writer.add_summary(g_summary, count)
                self.writer.add_summary(d_summary, count)

                # save checkpoint and samples
                if count % self.snapshot == 0:
                    print("Iter: %06d, g_loss: %4.4f, d_loss: %4.4f" %
                          (count, g_loss, d_loss))

                    # checkpoint
                    self.checkpoint_save(count)

                    # save samples (from test dataset)
                    self.sample_save(count)