def __init__( self, in_planes, out_planes, stride=1, mid_planes_and_cardinality=None, reduction=4, final_bn_relu=True, use_se=False, se_reduction_ratio=16, ): # assertions on inputs: assert is_pos_int(in_planes) and is_pos_int(out_planes) assert (is_pos_int(stride) or is_pos_int_tuple(stride)) and is_pos_int( reduction ) # define convolutional layers: bottleneck_planes = int(math.ceil(out_planes / reduction)) cardinality = 1 if mid_planes_and_cardinality is not None: mid_planes, cardinality = mid_planes_and_cardinality bottleneck_planes = mid_planes * cardinality convolutional_block = nn.Sequential( conv1x1(in_planes, bottleneck_planes), nn.BatchNorm2d(bottleneck_planes), nn.ReLU(inplace=INPLACE), conv3x3( bottleneck_planes, bottleneck_planes, stride=stride, groups=cardinality ), nn.BatchNorm2d(bottleneck_planes), nn.ReLU(inplace=INPLACE), conv1x1(bottleneck_planes, out_planes), ) # call constructor of generic layer: super(BottleneckLayer, self).__init__( convolutional_block, in_planes, out_planes, stride=stride, reduction=reduction, final_bn_relu=final_bn_relu, use_se=use_se, se_reduction_ratio=se_reduction_ratio, )
def __init__( self, convolutional_block, in_planes, out_planes, stride=1, mid_planes_and_cardinality=None, reduction=4, final_bn_relu=True, use_se=False, se_reduction_ratio=16, ): # assertions on inputs: assert is_pos_int(in_planes) and is_pos_int(out_planes) assert (is_pos_int(stride) or is_pos_int_tuple(stride)) and is_pos_int( reduction ) # set object fields: super(GenericLayer, self).__init__() self.convolutional_block = convolutional_block self.final_bn_relu = final_bn_relu # final batchnorm and relu layer: if final_bn_relu: self.bn = nn.BatchNorm2d(out_planes) self.relu = nn.ReLU(inplace=INPLACE) # define down-sampling layer (if direct residual impossible): self.downsample = None if (stride != 1 and stride != (1, 1)) or in_planes != out_planes: self.downsample = nn.Sequential( conv1x1(in_planes, out_planes, stride=stride), nn.BatchNorm2d(out_planes), ) self.se = ( SqueezeAndExcitationLayer(out_planes, reduction_ratio=se_reduction_ratio) if use_se else None )
def __init__( self, in_planes, out_planes, stride=1, mid_planes_and_cardinality=None, reduction=1, final_bn_relu=True, use_se=False, se_reduction_ratio=16, ): # assertions on inputs: assert is_pos_int(in_planes) and is_pos_int(out_planes) assert (is_pos_int(stride) or is_pos_int_tuple(stride)) and is_pos_int( reduction ) # define convolutional block: convolutional_block = nn.Sequential( conv3x3(in_planes, out_planes, stride=stride), nn.BatchNorm2d(out_planes), nn.ReLU(inplace=INPLACE), conv3x3(out_planes, out_planes), ) # call constructor of generic layer: super().__init__( convolutional_block, in_planes, out_planes, stride=stride, reduction=reduction, final_bn_relu=final_bn_relu, use_se=use_se, se_reduction_ratio=se_reduction_ratio, )