def process_file(self, mel, speaker_index, speaker_index_2, notes, sess):

        datasets = "".join("_" + x.lower() for x in config.datasets)

        with h5py.File(config.stat_file, mode='r') as stat_file:
            max_feat = stat_file["feats_maximus"][()] + 0.001
            min_feat = stat_file["feats_minimus"][()] - 0.001

        mel = (mel - min_feat) / (max_feat - min_feat)

        notes = notes / np.round(max_feat[-2])

        in_batches_mel, nchunks_in = utils.generate_overlapadd(mel)
        in_batches_notes, nchunks_in = utils.generate_overlapadd(notes)

        out_batches_mel = []
        out_batches_f0 = []
        out_batches_vuv = []

        for in_batch_mel, in_batch_notes in zip(in_batches_mel,
                                                in_batches_notes):
            speaker = np.repeat(speaker_index, config.batch_size)
            speaker_2 = np.repeat(speaker_index_2, config.batch_size)
            feed_dict = {self.input_placeholder: in_batch_mel, self.speaker_labels:speaker,self.speaker_labels_1:speaker_2,\
             self.notes_placeholder:in_batch_notes, self.is_train: False}
            mel, f0, vuv = sess.run([self.output, self.f0, self.vuv],
                                    feed_dict=feed_dict)

            out_batches_mel.append(mel)
            out_batches_f0.append(f0)
            out_batches_vuv.append(vuv)

        out_batches_mel = np.array(out_batches_mel)
        out_batches_f0 = np.array(out_batches_f0)
        out_batches_vuv = np.array(out_batches_vuv)

        out_batches_mel = utils.overlapadd(out_batches_mel, nchunks_in)
        out_batches_f0 = utils.overlapadd(out_batches_f0, nchunks_in)
        out_batches_vuv = utils.overlapadd(out_batches_vuv, nchunks_in)

        out_batches_mel = out_batches_mel[:, :-2] * (
            max_feat[:-2] - min_feat[:-2]) + min_feat[:-2]

        out_batches_f0 = np.clip(out_batches_f0, 0.0, 1.0) * (
            max_feat[-2] - min_feat[-2]) + min_feat[-2]

        out_batches_vuv = out_batches_vuv * (max_feat[-1] -
                                             min_feat[-1]) + min_feat[-1]

        out_batches_vuv = np.round(out_batches_vuv)

        return out_batches_mel, out_batches_f0, out_batches_vuv
Пример #2
0
    def process_file(self, mel, speaker_index, speaker_index_2, sess):


        datasets = "".join("_"+x.lower() for x in config.datasets)

        with h5py.File(config.stat_file, mode='r') as stat_file:
            max_feat = stat_file["feats_maximus"][()] + 0.001
            min_feat = stat_file["feats_minimus"][()] - 0.001



        mel = (mel - min_feat)/(max_feat-min_feat)

        in_batches_mel, nchunks_in = utils.generate_overlapadd(mel)

        out_batches_mel = []

        for in_batch_mel in in_batches_mel :
            speaker = np.repeat(np.expand_dims(speaker_index,0),config.batch_size, axis=0)
            speaker_2 = np.repeat(np.expand_dims(speaker_index_2,0),config.batch_size, axis=0)
            feed_dict = {self.input_placeholder: in_batch_mel[:,:,:-2], self.speaker_labels:speaker,self.speaker_labels_1:speaker_2, self.is_train: False}
            mel = sess.run(self.output, feed_dict=feed_dict)

            out_batches_mel.append(mel)
        out_batches_mel = np.array(out_batches_mel)

        out_batches_mel = utils.overlapadd(out_batches_mel,nchunks_in)


        out_batches_mel = out_batches_mel*(max_feat[:-2] - min_feat[:-2]) + min_feat[:-2]

        return out_batches_mel
Пример #3
0
    def extract_feature(self, mel, sess):

        datasets = "".join("_"+x.lower() for x in config.datasets)

        mel = np.clip(mel, 0.0, 1.0)


        in_batches_mel, nchunks_in = utils.generate_overlapadd(mel)

        out_batches_mel = []


        for in_batch_mel in in_batches_mel :
            feed_dict = {self.stft_placeholder: in_batch_mel, self.is_train: False}
            mel = sess.run(self.content_embedding_stft, feed_dict=feed_dict)

            out_batches_mel.append(mel)


        out_batches_mel = np.array(out_batches_mel)

        out_batches_mel = utils.overlapadd(out_batches_mel,nchunks_in)


        return out_batches_mel
Пример #4
0
    def process_file(self, mel,  sess):

        datasets = "".join("_"+x.lower() for x in config.datasets)

        with h5py.File(config.stat_file, mode='r') as stat_file:
            max_feat = stat_file["feats_maximus"][()] + 0.001
            min_feat = stat_file["feats_minimus"][()] - 0.001

        mel = np.clip(mel, 0.0, 1.0)

        in_batches_mel, nchunks_in = utils.generate_overlapadd(mel)

        out_batches_mel = []
        out_batches_f0 = []
        out_batches_vuv = []

        for in_batch_mel in in_batches_mel :
            feed_dict = {self.stft_placeholder: in_batch_mel, self.stft_placeholder_1: in_batch_mel, self.is_train: False}
            mel, f0, vuv = sess.run([self.output_stft, self.f0, self.vuv], feed_dict=feed_dict)

            out_batches_mel.append(mel)
            out_batches_f0.append(f0)
            out_batches_vuv.append(vuv)

        out_batches_mel = np.array(out_batches_mel)
        out_batches_f0 = np.array(out_batches_f0)
        out_batches_vuv = np.array(out_batches_vuv)

        out_batches_mel = utils.overlapadd(out_batches_mel,nchunks_in)
        out_batches_f0 = utils.overlapadd(out_batches_f0,nchunks_in)
        out_batches_vuv = utils.overlapadd(out_batches_vuv,nchunks_in)


        out_batches_mel = out_batches_mel*(max_feat[:-2] - min_feat[:-2]) + min_feat[:-2]

        out_batches_f0 = out_batches_f0*(max_feat[-2] - min_feat[-2]) + min_feat[-2]

        out_batches_vuv = out_batches_vuv*(max_feat[-1] - min_feat[-1]) + min_feat[-1]

        out_batches_vuv = np.round(out_batches_vuv)

        return out_batches_mel, out_batches_f0, out_batches_vuv