Exemplo n.º 1
0
    def __init__(
        self,
        input_shape,
        inner_dim,
        activation=torch.nn.Sigmoid,
        norm=BatchNorm1d,
    ):
        super().__init__()
        self.inner_dim = inner_dim
        self.norm = norm
        self.activation = activation

        bz, t, chn = input_shape
        self.conv = Sequential(input_shape=input_shape)
        self.conv.append(
            DepthwiseSeparableConv1d,
            out_channels=chn,
            kernel_size=1,
            stride=1,
        )
        self.conv.append(self.norm)
        self.conv.append(self.activation())

        self.avg_pool = AdaptivePool(1)
        self.bottleneck = Sequential(
            Linear(input_size=input_shape[-1], n_neurons=self.inner_dim),
            self.activation(),
            Linear(input_size=self.inner_dim, n_neurons=chn),
            self.activation(),
        )
Exemplo n.º 2
0
    def __init__(
        self,
        out_channels,
        kernel_size,
        num_layers,
        inner_dim,
        input_shape,
        stride=1,
        beta=1,
        dropout=0.15,
        activation=Swish,
        se_activation=torch.nn.Sigmoid,
        norm=BatchNorm1d,
        residual=True,
    ):
        super().__init__()
        self.residual = residual

        self.Convs = Sequential(input_shape=input_shape)
        for i in range(num_layers):
            self.Convs.append(
                DepthwiseSeparableConv1d,
                out_channels,
                kernel_size,
                stride=stride if i == num_layers - 1 else 1,
            )
            self.Convs.append(norm)

        self.SE = SEmodule(
            input_shape=self.Convs.get_output_shape(),
            inner_dim=inner_dim,
            activation=se_activation,
            norm=norm,
        )
        self.drop = Dropout(dropout)
        self.reduced_cov = None
        if residual:
            self.reduced_cov = Sequential(input_shape=input_shape)
            self.reduced_cov.append(
                Conv1d,
                out_channels,
                kernel_size=3,
                stride=stride,
            )
            self.reduced_cov.append(norm)

        if isinstance(activation, Swish):
            self.activation = activation(beta)
        else:
            self.activation = activation()

        self._reset_params()
Exemplo n.º 3
0
    def __init__(
        self,
        num_layers,
        out_channels,
        input_shape,
        kernel_size=3,
        stride=1,
        dilation=1,
        residual=False,
        conv_module=Conv2d,
        activation=torch.nn.LeakyReLU,
        norm=None,
        dropout=0.1,
    ):
        super().__init__()

        self.convs = Sequential(input_shape=input_shape)

        for i in range(num_layers):
            self.convs.append(
                conv_module,
                out_channels=out_channels,
                kernel_size=kernel_size,
                stride=stride if i == num_layers - 1 else 1,
                dilation=dilation,
                layer_name=f"conv_{i}",
            )
            if norm is not None:
                self.convs.append(norm, layer_name=f"norm_{i}")
            self.convs.append(activation(), layer_name=f"act_{i}")
            self.convs.append(
                torch.nn.Dropout(dropout), layer_name=f"dropout_{i}"
            )

        self.reduce_conv = None
        self.drop = None
        if residual:
            self.reduce_conv = Sequential(input_shape=input_shape)
            self.reduce_conv.append(
                conv_module,
                out_channels=out_channels,
                kernel_size=1,
                stride=stride,
                layer_name="conv",
            )
            self.reduce_conv.append(norm, layer_name="norm")
            self.drop = torch.nn.Dropout(dropout)