def __init__(self, n_inputs, n_shortcut, n_outputs, kernel_size, stride, depth, conv_type, res): super(UpsamplingBlock, self).__init__() assert (stride > 1) # CONV 1 for UPSAMPLING if res == "fixed": self.upconv = Resample1d(n_inputs, 15, stride, transpose=True) else: self.upconv = ConvLayer(n_inputs, n_inputs, kernel_size, stride, conv_type, transpose=True) self.pre_shortcut_convs = nn.ModuleList( [ConvLayer(n_inputs, n_outputs, kernel_size, 1, conv_type)] + [ ConvLayer(n_outputs, n_outputs, kernel_size, 1, conv_type) for _ in range(depth - 1) ]) # CONVS to combine high- with low-level information (from shortcut) self.post_shortcut_convs = nn.ModuleList([ ConvLayer(n_outputs + n_shortcut, n_outputs, kernel_size, 1, conv_type) ] + [ ConvLayer(n_outputs, n_outputs, kernel_size, 1, conv_type) for _ in range(depth - 1) ])
def __init__(self, n_inputs, n_shortcut, n_outputs, kernel_size, stride, depth, conv_type, res): super(DownsamplingBlock, self).__init__() assert (stride > 1) self.kernel_size = kernel_size self.stride = stride # CONV 1 self.pre_shortcut_convs = nn.ModuleList( [ConvLayer(n_inputs, n_shortcut, kernel_size, 1, conv_type)] + [ ConvLayer(n_shortcut, n_shortcut, kernel_size, 1, conv_type) for _ in range(depth - 1) ]) self.post_shortcut_convs = nn.ModuleList( [ConvLayer(n_shortcut, n_outputs, kernel_size, 1, conv_type)] + [ ConvLayer(n_outputs, n_outputs, kernel_size, 1, conv_type) for _ in range(depth - 1) ]) # CONV 2 with decimation if res == "fixed": self.downconv = Resample1d( n_outputs, 15, stride) # Resampling with fixed-size sinc lowpass filter else: self.downconv = ConvLayer(n_outputs, n_outputs, kernel_size, stride, conv_type)