예제 #1
0
 def __init__(
     self,
     filters,
     kernel_size,
     strides,
     padding,
     is_weight_norm,
     initializer_seed,
     **kwargs
 ):
     """Initialize TFConvTranspose1d( module.
     Args:
         filters (int): Number of filters.
         kernel_size (int): kernel size.
         strides (int): Stride width.
         padding (str): Padding type ("same" or "valid").
     """
     super().__init__(**kwargs)
     self.conv1d_transpose = tf.keras.layers.Conv2DTranspose(
         filters=filters,
         kernel_size=(kernel_size, 1),
         strides=(strides, 1),
         padding="same",
         kernel_initializer=get_initializer(initializer_seed),
     )
     if is_weight_norm:
         self.conv1d_transpose = WeightNormalization(self.conv1d_transpose)
예제 #2
0
 def _apply_weightnorm(self, list_layers):
     """Try apply weightnorm for all layer in list_layers."""
     for i in range(len(list_layers)):
         try:
             layer_name = list_layers[i].name.lower()
             if "conv1d" in layer_name or "dense" in layer_name:
                 list_layers[i] = WeightNormalization(list_layers[i])
         except Exception:
             pass
예제 #3
0
    def __init__(
        self,
        kernel_size,
        filters,
        dilation_rate,
        use_bias,
        nonlinear_activation,
        nonlinear_activation_params,
        is_weight_norm,
        initializer_seed,
        **kwargs
    ):
        """Initialize TFResidualStack module.
        Args:
            kernel_size (int): Kernel size.
            filters (int): Number of filters.
            dilation_rate (int): Dilation rate.
            use_bias (bool): Whether to add bias parameter in convolution layers.
            nonlinear_activation (str): Activation function module name.
            nonlinear_activation_params (dict): Hyperparameters for activation function.
        """
        super().__init__(**kwargs)
        self.blocks = [
            getattr(tf.keras.layers, nonlinear_activation)(
                **nonlinear_activation_params
            ),
            TFReflectionPad1d((kernel_size - 1) // 2 * dilation_rate),
            tf.keras.layers.Conv1D(
                filters=filters,
                kernel_size=kernel_size,
                dilation_rate=dilation_rate,
                use_bias=use_bias,
                kernel_initializer=get_initializer(initializer_seed),
            ),
            getattr(tf.keras.layers, nonlinear_activation)(
                **nonlinear_activation_params
            ),
            tf.keras.layers.Conv1D(
                filters=filters,
                kernel_size=1,
                use_bias=use_bias,
                kernel_initializer=get_initializer(initializer_seed),
            ),
        ]
        self.shortcut = tf.keras.layers.Conv1D(
            filters=filters,
            kernel_size=1,
            use_bias=use_bias,
            kernel_initializer=get_initializer(initializer_seed),
            name="shortcut",
        )

        # apply weightnorm
        if is_weight_norm:
            self._apply_weightnorm(self.blocks)
            self.shortcut = WeightNormalization(self.shortcut)
예제 #4
0
    def __init__(
        self,
        period,
        out_channels=1,
        n_layers=5,
        kernel_size=5,
        strides=3,
        filters=8,
        filter_scales=4,
        max_filters=1024,
        nonlinear_activation="LeakyReLU",
        nonlinear_activation_params={"alpha": 0.2},
        initializer_seed=42,
        is_weight_norm=False,
        **kwargs
    ):
        super().__init__(**kwargs)
        self.period = period
        self.out_filters = out_channels
        self.convs = []

        for i in range(n_layers):
            self.convs.append(
                tf.keras.layers.Conv2D(
                    filters=min(filters * (filter_scales ** (i + 1)), max_filters),
                    kernel_size=(kernel_size, 1),
                    strides=(strides, 1),
                    padding="same",
                )
            )
        self.conv_post = tf.keras.layers.Conv2D(
            filters=out_channels, kernel_size=(3, 1), padding="same",
        )
        self.activation = getattr(tf.keras.layers, nonlinear_activation)(
            **nonlinear_activation_params
        )

        if is_weight_norm:
            self._apply_weightnorm(self.convs)
            self.conv_post = WeightNormalization(self.conv_post)