def Conv1d(in_channels,
           out_channels,
           kernel_size=1,
           padding=0,
           dilation=1,
           bias=True,
           weight_normalization=True,
           dropout=0,
           std_mul=1.0,
           **kwargs):
    m = conv.Conv1d(in_channels,
                    out_channels,
                    kernel_size=kernel_size,
                    padding=padding,
                    dilation=dilation,
                    bias=bias,
                    **kwargs)

    if weight_normalization:
        assert bias
        std = math.sqrt(
            (std_mul * (1.0 - dropout)) / (m.kernel_size[0] * in_channels))
        m.weight.data.normal_(mean=0, std=std)
        m.bias.data.zero_()
        return nn.utils.weight_norm(m)
    else:
        return m
def Conv1d(in_channels,
           out_channels,
           kernel_size,
           dropout=0,
           std_mul=4.0,
           **kwargs):
    m = conv.Conv1d(in_channels, out_channels, kernel_size, **kwargs)
    std = math.sqrt(
        (std_mul * (1.0 - dropout)) / (m.kernel_size[0] * in_channels))
    m.weight.data.normal_(mean=0, std=std)
    m.bias.data.zero_()
    return nn.utils.weight_norm(m)
def Conv1d1x1(in_channels, out_channels, bias=True, weight_normalization=True):
    """1-by-1 convolution layer
    """
    if weight_normalization:
        assert bias
        return Conv1d(in_channels,
                      out_channels,
                      kernel_size=1,
                      padding=0,
                      dilation=1,
                      bias=bias,
                      std_mul=1.0)
    else:
        return conv.Conv1d(in_channels,
                           out_channels,
                           kernel_size=1,
                           padding=0,
                           dilation=1,
                           bias=bias)
    def __init__(self,
                 residual_channels,
                 gate_channels,
                 kernel_size,
                 skip_out_channels=None,
                 cin_channels=-1,
                 gin_channels=-1,
                 dropout=1 - 0.95,
                 padding=None,
                 dilation=1,
                 causal=True,
                 bias=True,
                 weight_normalization=True,
                 *args,
                 **kwargs):
        super(ResidualConv1dGLU, self).__init__()
        self.dropout = dropout
        if skip_out_channels is None:
            skip_out_channels = residual_channels
        if padding is None:
            # no future time stamps available
            if causal:
                padding = (kernel_size - 1) * dilation
            else:
                padding = (kernel_size - 1) // 2 * dilation
        self.causal = causal

        if weight_normalization:
            assert bias
            self.conv = Conv1d(residual_channels,
                               gate_channels,
                               kernel_size,
                               padding=padding,
                               dilation=dilation,
                               bias=bias,
                               std_mul=1.0,
                               *args,
                               **kwargs)
        else:
            self.conv = conv.Conv1d(residual_channels,
                                    gate_channels,
                                    kernel_size,
                                    padding=padding,
                                    dilation=dilation,
                                    bias=bias,
                                    *args,
                                    **kwargs)

        # local conditioning
        if cin_channels > 0:
            self.conv1x1c = Conv1d1x1(
                cin_channels,
                gate_channels,
                bias=bias,
                weight_normalization=weight_normalization)
        else:
            self.conv1x1c = None

        # global conditioning
        if gin_channels > 0:
            self.conv1x1g = Conv1d1x1(
                gin_channels,
                gate_channels,
                bias=bias,
                weight_normalization=weight_normalization)
        else:
            self.conv1x1g = None

        # conv output is split into two groups
        gate_out_channels = gate_channels // 2
        self.conv1x1_out = Conv1d1x1(gate_out_channels,
                                     residual_channels,
                                     bias=bias,
                                     weight_normalization=weight_normalization)
        self.conv1x1_skip = Conv1d1x1(
            gate_out_channels,
            skip_out_channels,
            bias=bias,
            weight_normalization=weight_normalization)
        self.tanh = nn.Tanh()
        self.sigmoid = nn.Sigmoid()
Пример #5
0
def Conv1d(in_channels, out_channels, kernel_size, dropout=0, **kwargs):
    m = conv.Conv1d(in_channels, out_channels, kernel_size, **kwargs)
    nn.init.kaiming_normal_(m.weight, nonlinearity="relu")
    if m.bias is not None:
        nn.init.constant_(m.bias, 0)
    return nn.utils.weight_norm(m)
Пример #6
0
def Conv1d(in_channels, out_channels, kernel_size, dropout=0, **kwargs):
    m = conv.Conv1d(in_channels, out_channels, kernel_size, **kwargs)
    return m