def resnet34(): class Dummy(): inplanes = n_features >> 3 r = Dummy() layer = lambda *a, **b: ResNet._make_layer(r, BasicBlock, *a, **b) return [ T('x', 'x', lambda x: x.permute(0, 3, 1, 2)), T( 'x', 'x', nn.Sequential( Conv2d(n_ch, n_features >> 3, kernel_size=(7, 7), padding=(3, 3), bias=False), layer(n_features >> 3, 3), layer(n_features >> 2, 4, stride=2), layer(n_features >> 1, 6, stride=2), layer(n_features, 3, stride=2), nn.AdaptiveAvgPool2d((None, 1)), # avgpool the freq. dim. )), T('x', 'x', lambda x: x.squeeze(3)), T('x', 'x', lambda x: x.transpose(1, 2)), ]
def make_res_layer(block, inplanes, planes, blocks, norm_layer=nn.BatchNorm2d, base_width=64, groups=1, stride=1, dilation=1, dilate=False, **kwargs) -> nn.Module: """ Args: block: Module class. For example `BasicBlock` or `Bottleneck`. inplanes: Number of in planes planes: Number of planes blocks: Number of blocks norm_layer: Norm Module class base_width: Base width. Acts as a factor of the bottleneck size of the Bottleneck block and is used with groups. groups: stride: dilation: dilate: Returns: """ d = Dict(inplanes=inplanes, _norm_layer=norm_layer, base_width=base_width, groups=groups, dilation=dilation) # almost a ResNet return RN._make_layer(self=d, block=block, planes=planes, blocks=blocks, stride=stride, dilate=dilate)