Ejemplo n.º 1
0
 def __init__(self,
              num_chars,
              num_speakers,
              r=5,
              linear_dim=1025,
              mel_dim=80,
              memory_size=5,
              attn_win=False,
              attn_norm="sigmoid",
              prenet_type="original",
              prenet_dropout=True,
              forward_attn=False,
              trans_agent=False,
              forward_attn_mask=False,
              location_attn=True,
              separate_stopnet=True):
     super(Tacotron, self).__init__()
     self.r = r
     self.mel_dim = mel_dim
     self.linear_dim = linear_dim
     self.embedding = nn.Embedding(num_chars, 256)
     self.embedding.weight.data.normal_(0, 0.3)
     if num_speakers > 1:
         self.speaker_embedding = nn.Embedding(num_speakers, 256)
         self.speaker_embedding.weight.data.normal_(0, 0.3)
     self.encoder = Encoder(256)
     self.decoder = Decoder(256, mel_dim, r, memory_size, attn_win,
                            attn_norm, prenet_type, prenet_dropout,
                            forward_attn, trans_agent, forward_attn_mask,
                            location_attn, separate_stopnet)
     self.postnet = PostCBHG(mel_dim)
     self.last_linear = nn.Linear(self.postnet.cbhg.gru_features * 2,
                                  linear_dim)
Ejemplo n.º 2
0
 def __init__(self,
              num_chars,
              embedding_dim=256,
              linear_dim=1025,
              mel_dim=80,
              r=5,
              padding_idx=None,
              memory_size=5,
              attn_windowing=False,
              forward_attention=False):
     super(Tacotron, self).__init__()
     self.r = r
     self.mel_dim = mel_dim
     self.linear_dim = linear_dim
     self.embedding = nn.Embedding(num_chars,
                                   embedding_dim,
                                   padding_idx=padding_idx)
     self.embedding.weight.data.normal_(0, 0.3)
     self.encoder = Encoder(embedding_dim)
     self.decoder = Decoder(256, mel_dim, r, memory_size, attn_windowing,
                            forward_attention)
     self.postnet = PostCBHG(mel_dim)
     self.last_linear = nn.Sequential(
         nn.Linear(self.postnet.cbhg.gru_features * 2, linear_dim),
         nn.Sigmoid())
Ejemplo n.º 3
0
    def test_in_out():
        layer = Decoder(
            in_features=256,
            memory_dim=80,
            r=2,
            memory_size=4,
            attn_windowing=False,
            attn_norm="sigmoid",
            prenet_type='original',
            prenet_dropout=True,
            forward_attn=True,
            trans_agent=True,
            forward_attn_mask=True,
            location_attn=True,
            separate_stopnet=True)
        dummy_input = T.rand(4, 8, 256)
        dummy_memory = T.rand(4, 2, 80)

        output, alignment, stop_tokens = layer(
            dummy_input, dummy_memory, mask=None)

        assert output.shape[0] == 4
        assert output.shape[1] == 1, "size not {}".format(output.shape[1])
        assert output.shape[2] == 80 * 2, "size not {}".format(output.shape[2])
        assert stop_tokens.shape[0] == 4
Ejemplo n.º 4
0
    def test_in_out(self):
        layer = Decoder(in_features=256, memory_dim=80, r=2, memory_size=4, attn_windowing=False, attn_norm="sigmoid")
        dummy_input = T.rand(4, 8, 256)
        dummy_memory = T.rand(4, 2, 80)

        output, alignment, stop_tokens = layer(dummy_input, dummy_memory, mask=None)

        assert output.shape[0] == 4
        assert output.shape[1] == 1, "size not {}".format(output.shape[1])
        assert output.shape[2] == 80 * 2, "size not {}".format(output.shape[2])
        assert stop_tokens.shape[0] == 4
Ejemplo n.º 5
0
    def test_in_out(self):
        layer = Decoder(in_features=256, memory_dim=80, r=2)
        dummy_input = T.rand(4, 8, 256)
        dummy_memory = T.rand(4, 2, 80)

        output, alignment, stop_tokens = layer(dummy_input, dummy_memory)

        assert output.shape[0] == 4
        assert output.shape[1] == 1, "size not {}".format(output.shape[1])
        assert output.shape[2] == 80 * 2, "size not {}".format(output.shape[2])
        assert stop_tokens.shape[0] == 4
        assert stop_tokens.max() <= 1.0
        assert stop_tokens.min() >= 0
Ejemplo n.º 6
0
 def __init__(self,
              embedding_dim=256,
              linear_dim=1025,
              mel_dim=80,
              r=5,
              padding_idx=None):
     super(Tacotron, self).__init__()
     self.r = r
     self.mel_dim = mel_dim
     self.linear_dim = linear_dim
     self.embedding = nn.Embedding(len(symbols),
                                   embedding_dim,
                                   padding_idx=padding_idx)
     print(" | > Number of characters : {}".format(len(symbols)))
     self.embedding.weight.data.normal_(0, 0.3)
     self.encoder = Encoder(embedding_dim)
     self.decoder = Decoder(256, mel_dim, r)
     self.postnet = CBHG(mel_dim, K=8, projections=[256, mel_dim])
     self.last_linear = nn.Linear(mel_dim * 2, linear_dim)
Ejemplo n.º 7
0
 def __init__(self,
              embedding_dim=256,
              linear_dim=1025,
              mel_dim=80,
              r=5,
              padding_idx=None):
     super(Tacotron, self).__init__()
     self.r = r
     self.mel_dim = mel_dim
     self.linear_dim = linear_dim
     self.embedding = nn.Embedding(
         len(symbols), embedding_dim, padding_idx=padding_idx)
     print(" | > Number of characters : {}".format(len(symbols)))
     self.embedding.weight.data.normal_(0, 0.3)
     self.encoder = Encoder(embedding_dim)
     self.decoder = Decoder(256, mel_dim, r)
     self.postnet = PostCBHG(mel_dim)
     self.last_linear = nn.Sequential(
         nn.Linear(self.postnet.cbhg.gru_features * 2, linear_dim),
         nn.Sigmoid())