def create_generator(input_shape):
     inp = keras.Input(input_shape)
     x = MelSpec()(inp)
     x = layers.Conv1D(512, 7, padding="same")(x)
     x = layers.LeakyReLU()(x)
     x = conv_block(x, 256, 8)
     x = conv_block(x, 128, 8)
     x = conv_block(x, 64, 2)
     x = conv_block(x, 32, 2)
     x = addon_layers.WeightNormalization(
         layers.Conv1D(1, 7, padding="same", activation="tanh"))(x)
     return keras.Model(inp, x)
 def conv_block(input, conv_dim, upsampling_factor):
     # Dilated convolutional block with weight normalizaton.
     conv_t = addon_layers.WeightNormalization(
         layers.Conv1DTranspose(conv_dim,
                                16,
                                upsampling_factor,
                                padding="same"),
         data_init=False,
     )(input)
     lrelu1 = layers.LeakyReLU()(conv_t)
     res_stack = residual_stack(lrelu1, conv_dim)
     lrelu2 = layers.LeakyReLU()(res_stack)
     return lrelu2
Пример #3
0
def residual_stack(input, filters):
    """Convolutional residual stack with weight normalization.

    Args:
        filter: int, determines filter size for the residual stack.

    Returns:
        Residual stack output.
    """
    c1 = addon_layers.WeightNormalization(layers.Conv1D(filters,
                                                        3,
                                                        dilation_rate=1,
                                                        padding="same"),
                                          data_init=False)(input)
    lrelu1 = layers.LeakyReLU()(c1)
    c2 = addon_layers.WeightNormalization(layers.Conv1D(filters,
                                                        3,
                                                        dilation_rate=1,
                                                        padding="same"),
                                          data_init=False)(lrelu1)
    add1 = layers.Add()([c2, input])

    lrelu2 = layers.LeakyReLU()(add1)
    c3 = addon_layers.WeightNormalization(layers.Conv1D(filters,
                                                        3,
                                                        dilation_rate=3,
                                                        padding="same"),
                                          data_init=False)(lrelu2)
    lrelu3 = layers.LeakyReLU()(c3)
    c4 = addon_layers.WeightNormalization(layers.Conv1D(filters,
                                                        3,
                                                        dilation_rate=1,
                                                        padding="same"),
                                          data_init=False)(lrelu3)
    add2 = layers.Add()([add1, c4])

    lrelu4 = layers.LeakyReLU()(add2)
    c5 = addon_layers.WeightNormalization(layers.Conv1D(filters,
                                                        3,
                                                        dilation_rate=9,
                                                        padding="same"),
                                          data_init=False)(lrelu4)
    lrelu5 = layers.LeakyReLU()(c5)
    c6 = addon_layers.WeightNormalization(layers.Conv1D(filters,
                                                        3,
                                                        dilation_rate=1,
                                                        padding="same"),
                                          data_init=False)(lrelu5)
    add3 = layers.Add()([c6, add2])

    return add3
Пример #4
0
def conv_block(input, conv_dim, upsampling_factor):
    """Dilated Convolutional Block with weight normalization.

    Args:
        conv_dim: int, determines filter size for the block.
        upsampling_factor: int, scale for upsampling.

    Returns:
        Dilated convolution block.
    """
    conv_t = addon_layers.WeightNormalization(
        layers.Conv1DTranspose(conv_dim, 16, upsampling_factor,
                               padding="same"),
        data_init=False,
    )(input)
    lrelu1 = layers.LeakyReLU()(conv_t)
    res_stack = residual_stack(lrelu1, conv_dim)
    lrelu2 = layers.LeakyReLU()(res_stack)
    return lrelu2
 def discriminator_block(input):
     conv1 = addon_layers.WeightNormalization(layers.Conv1D(
         16, 15, 1, "same"),
                                              data_init=False)(input)
     lrelu1 = layers.LeakyReLU()(conv1)
     conv2 = addon_layers.WeightNormalization(layers.Conv1D(64,
                                                            41,
                                                            4,
                                                            "same",
                                                            groups=4),
                                              data_init=False)(lrelu1)
     lrelu2 = layers.LeakyReLU()(conv2)
     conv3 = addon_layers.WeightNormalization(layers.Conv1D(256,
                                                            41,
                                                            4,
                                                            "same",
                                                            groups=16),
                                              data_init=False)(lrelu2)
     lrelu3 = layers.LeakyReLU()(conv3)
     conv4 = addon_layers.WeightNormalization(layers.Conv1D(1024,
                                                            41,
                                                            4,
                                                            "same",
                                                            groups=64),
                                              data_init=False)(lrelu3)
     lrelu4 = layers.LeakyReLU()(conv4)
     conv5 = addon_layers.WeightNormalization(layers.Conv1D(1024,
                                                            41,
                                                            4,
                                                            "same",
                                                            groups=256),
                                              data_init=False)(lrelu4)
     lrelu5 = layers.LeakyReLU()(conv5)
     conv6 = addon_layers.WeightNormalization(layers.Conv1D(
         1024, 5, 1, "same"),
                                              data_init=False)(lrelu5)
     lrelu6 = layers.LeakyReLU()(conv6)
     conv7 = addon_layers.WeightNormalization(layers.Conv1D(
         1, 3, 1, "same"),
                                              data_init=False)(lrelu6)
     return [lrelu1, lrelu2, lrelu3, lrelu4, lrelu5, lrelu6, conv7]
    def residual_stack(input, filters):
        # Convolutional residual stack with weight normalization.
        c1 = addon_layers.WeightNormalization(layers.Conv1D(filters,
                                                            3,
                                                            dilation_rate=1,
                                                            padding="same"),
                                              data_init=False)(input)
        lrelu1 = layers.LeakyReLU()(c1)
        c2 = addon_layers.WeightNormalization(layers.Conv1D(filters,
                                                            3,
                                                            dilation_rate=1,
                                                            padding="same"),
                                              data_init=False)(lrelu1)
        add1 = layers.Add()([c2, input])

        lrelu2 = layers.LeakyReLU()(add1)
        c3 = addon_layers.WeightNormalization(layers.Conv1D(filters,
                                                            3,
                                                            dilation_rate=3,
                                                            padding="same"),
                                              data_init=False)(lrelu2)
        lrelu3 = layers.LeakyReLU()(c3)
        c4 = addon_layers.WeightNormalization(layers.Conv1D(filters,
                                                            3,
                                                            dilation_rate=1,
                                                            padding="same"),
                                              data_init=False)(lrelu3)
        add2 = layers.Add()([c4, add1])

        relu4 = layers.LeakyReLU()(add2)
        c5 = addon_layers.WeightNormalization(layers.Conv1D(filters,
                                                            3,
                                                            dilation_rate=9,
                                                            padding="same"),
                                              data_init=False)(relu4)
        lrelu5 = layers.LeakyReLU()(c5)
        c6 = addon_layers.WeightNormalization(layers.Conv1D(filters,
                                                            3,
                                                            dilation_rate=1,
                                                            padding="same"),
                                              data_init=False)(lrelu5)
        add3 = layers.Add()([c6, add2])

        return add3