def __init__(self, in_channels, out_channels, data_format="channels_last", **kwargs): super(StemBlock, self).__init__(**kwargs) mid1_channels = out_channels // 2 mid2_channels = out_channels * 2 self.first_conv = conv3x3_block(in_channels=in_channels, out_channels=out_channels, strides=2, data_format=data_format, name="first_conv") self.branches = Concurrent(data_format=data_format, name="branches") self.branches.add( PeleeBranch1(in_channels=out_channels, out_channels=out_channels, mid_channels=mid1_channels, strides=2, data_format=data_format, name="branch1")) self.branches.add( MaxPool2d(pool_size=2, strides=2, padding=0, data_format=data_format, name="branch2")) self.last_conv = conv1x1_block(in_channels=mid2_channels, out_channels=out_channels, data_format=data_format, name="last_conv")
def __init__(self, data_format="channels_last", **kwargs): super(ReductionBUnit, self).__init__(**kwargs) in_channels = 1152 self.branches = Concurrent(data_format=data_format, name="branches") self.branches.add( ConvSeqBranch(in_channels=in_channels, out_channels_list=(256, 256, 256), kernel_size_list=(1, 3, 3), strides_list=(1, 1, 2), padding_list=(0, 1, 0), data_format=data_format, name="branch1")) self.branches.add( ConvSeqBranch(in_channels=in_channels, out_channels_list=(256, 256), kernel_size_list=(1, 3), strides_list=(1, 2), padding_list=(0, 0), data_format=data_format, name="branch2")) self.branches.add( ConvSeqBranch(in_channels=in_channels, out_channels_list=(256, 384), kernel_size_list=(1, 3), strides_list=(1, 2), padding_list=(0, 0), data_format=data_format, name="branch3")) self.branches.add( MaxPoolBranch(data_format=data_format, name="branch4"))
def __init__(self, data_format="channels_last", **kwargs): super(TwoWayABlock, self).__init__(**kwargs) in_channels = 384 self.branches = Concurrent(data_format=data_format, name="branches") self.branches.add( ConvSeqBranch(in_channels=in_channels, out_channels_list=(32, 48, 64), kernel_size_list=(1, 3, 3), strides_list=(1, 1, 1), padding_list=(0, 1, 1), data_format=data_format, name="branch1")) self.branches.add( ConvSeqBranch(in_channels=in_channels, out_channels_list=(32, 32), kernel_size_list=(1, 3), strides_list=(1, 1), padding_list=(0, 1), data_format=data_format, name="branch2")) self.branches.add( Conv1x1Branch(in_channels=in_channels, out_channels=32, data_format=data_format, name="branch3")) self.conv = conv1x1_block(in_channels=128, out_channels=in_channels, activation=None, data_format=data_format, name="conv")
def __init__(self, in_channels, mid_channels, use_bias, use_bn, data_format="channels_last", **kwargs): super(LffdDetectionBlock, self).__init__(**kwargs) self.conv = conv1x1_block( in_channels=in_channels, out_channels=mid_channels, use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="conv") self.branches = Concurrent( data_format=data_format, name="branches") self.branches.add(LffdDetectionBranch( in_channels=mid_channels, out_channels=4, use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="bbox_branch")) self.branches.add(LffdDetectionBranch( in_channels=mid_channels, out_channels=2, use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="score_branch"))
class PolyBlock4a(nn.Layer): """ PolyNet type Mixed-4a block. Parameters: ---------- data_format : str, default 'channels_last' The ordering of the dimensions in tensors. """ def __init__(self, data_format="channels_last", **kwargs): super(PolyBlock4a, self).__init__(**kwargs) self.branches = Concurrent(data_format=data_format, name="branches") self.branches.add( ConvSeqBranch(in_channels=160, out_channels_list=(64, 96), kernel_size_list=(1, 3), strides_list=(1, 1), padding_list=(0, 0), data_format=data_format, name="branch1")) self.branches.add( ConvSeqBranch(in_channels=160, out_channels_list=(64, 64, 64, 96), kernel_size_list=(1, (7, 1), (1, 7), 3), strides_list=(1, 1, 1, 1), padding_list=(0, (3, 0), (0, 3), 0), data_format=data_format, name="branch2")) def call(self, x, training=None): x = self.branches(x, training=training) return x
def __init__(self, data_format="channels_last", **kwargs): super(PolyBlock5a, self).__init__(**kwargs) self.branches = Concurrent(data_format=data_format, name="branches") self.branches.add( MaxPoolBranch(data_format=data_format, name="branch1")) self.branches.add( Conv3x3Branch(in_channels=192, out_channels=192, data_format=data_format, name="branch2"))
class LffdDetectionBlock(nn.Layer): """ LFFD specific detection block. Parameters: ---------- in_channels : int Number of input channels. mid_channels : int Number of middle channels. use_bias : bool Whether the layer uses a bias vector. use_bn : bool Whether to use BatchNorm layer. data_format : str, default 'channels_last' The ordering of the dimensions in tensors. """ def __init__(self, in_channels, mid_channels, use_bias, use_bn, data_format="channels_last", **kwargs): super(LffdDetectionBlock, self).__init__(**kwargs) self.conv = conv1x1_block( in_channels=in_channels, out_channels=mid_channels, use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="conv") self.branches = Concurrent( data_format=data_format, name="branches") self.branches.add(LffdDetectionBranch( in_channels=mid_channels, out_channels=4, use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="bbox_branch")) self.branches.add(LffdDetectionBranch( in_channels=mid_channels, out_channels=2, use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="score_branch")) def call(self, x, training=None): x = self.conv(x, training=training) x = self.branches(x, training=training) return x
class StemBlock(nn.Layer): """ PeleeNet stem block. Parameters: ---------- in_channels : int Number of input channels. out_channels : int Number of output channels. data_format : str, default 'channels_last' The ordering of the dimensions in tensors. """ def __init__(self, in_channels, out_channels, data_format="channels_last", **kwargs): super(StemBlock, self).__init__(**kwargs) mid1_channels = out_channels // 2 mid2_channels = out_channels * 2 self.first_conv = conv3x3_block(in_channels=in_channels, out_channels=out_channels, strides=2, data_format=data_format, name="first_conv") self.branches = Concurrent(data_format=data_format, name="branches") self.branches.add( PeleeBranch1(in_channels=out_channels, out_channels=out_channels, mid_channels=mid1_channels, strides=2, data_format=data_format, name="branch1")) self.branches.add( MaxPool2d(pool_size=2, strides=2, padding=0, data_format=data_format, name="branch2")) self.last_conv = conv1x1_block(in_channels=mid2_channels, out_channels=out_channels, data_format=data_format, name="last_conv") def call(self, x, training=None): x = self.first_conv(x, training=training) x = self.branches(x, training=training) x = self.last_conv(x, training=training) return x
def __init__(self, data_format="channels_last", **kwargs): super(InceptionBUnit, self).__init__(**kwargs) in_channels = 1024 self.branches = Concurrent(data_format=data_format, name="branches") self.branches.children.append( Conv1x1Branch(in_channels=in_channels, out_channels=384, data_format=data_format, name="branch1")) self.branches.children.append( ConvSeqBranch(in_channels=in_channels, out_channels_list=(192, 224, 256), kernel_size_list=(1, (1, 7), (7, 1)), strides_list=(1, 1, 1), padding_list=(0, (0, 3), (3, 0)), data_format=data_format, name="branch2")) self.branches.children.append( ConvSeqBranch(in_channels=in_channels, out_channels_list=(192, 192, 224, 224, 256), kernel_size_list=(1, (7, 1), (1, 7), (7, 1), (1, 7)), strides_list=(1, 1, 1, 1, 1), padding_list=(0, (3, 0), (0, 3), (3, 0), (0, 3)), data_format=data_format, name="branch3")) self.branches.children.append( AvgPoolBranch(in_channels=in_channels, out_channels=128, data_format=data_format, name="branch4"))
def __init__(self, data_format="channels_last", **kwargs): super(InceptionAUnit, self).__init__(**kwargs) in_channels = 384 self.branches = Concurrent(data_format=data_format, name="branches") self.branches.children.append( Conv1x1Branch(in_channels=in_channels, out_channels=96, data_format=data_format, name="branch1")) self.branches.children.append( ConvSeqBranch(in_channels=in_channels, out_channels_list=(64, 96), kernel_size_list=(1, 3), strides_list=(1, 1), padding_list=(0, 1), data_format=data_format, name="branch2")) self.branches.children.append( ConvSeqBranch(in_channels=in_channels, out_channels_list=(64, 96, 96), kernel_size_list=(1, 3, 3), strides_list=(1, 1, 1), padding_list=(0, 1, 1), data_format=data_format, name="branch3")) self.branches.children.append( AvgPoolBranch(in_channels=in_channels, out_channels=96, data_format=data_format, name="branch4"))
def __init__(self, scale=0.2, activate=True, data_format="channels_last", **kwargs): super(InceptionCUnit, self).__init__(**kwargs) self.activate = activate self.scale = scale in_channels = 2080 self.branches = Concurrent(data_format=data_format, name="branches") self.branches.children.append( Conv1x1Branch(in_channels=in_channels, out_channels=192, data_format=data_format, name="branch1")) self.branches.children.append( ConvSeqBranch(in_channels=in_channels, out_channels_list=(192, 224, 256), kernel_size_list=(1, (1, 3), (3, 1)), strides_list=(1, 1, 1), padding_list=(0, (0, 1), (1, 0)), data_format=data_format, name="branch2")) self.conv = conv1x1(in_channels=448, out_channels=in_channels, use_bias=True, data_format=data_format, name="conv") if self.activate: self.activ = nn.ReLU()
def __init__(self, data_format="channels_last", **kwargs): super(InceptionBUnit, self).__init__(**kwargs) self.scale = 0.10 in_channels = 1088 self.branches = Concurrent(data_format=data_format, name="branches") self.branches.children.append( Conv1x1Branch(in_channels=in_channels, out_channels=192, data_format=data_format, name="branch1")) self.branches.children.append( ConvSeqBranch(in_channels=in_channels, out_channels_list=(128, 160, 192), kernel_size_list=(1, (1, 7), (7, 1)), strides_list=(1, 1, 1), padding_list=(0, (0, 3), (3, 0)), data_format=data_format, name="branch2")) self.conv = conv1x1(in_channels=384, out_channels=in_channels, use_bias=True, data_format=data_format, name="conv") self.activ = nn.ReLU()
def __init__(self, data_format="channels_last", **kwargs): super(InceptionAUnit, self).__init__(**kwargs) self.scale = 0.17 in_channels = 320 self.branches = Concurrent(data_format=data_format, name="branches") self.branches.children.append( Conv1x1Branch(in_channels=in_channels, out_channels=32, data_format=data_format, name="branch1")) self.branches.children.append( ConvSeqBranch(in_channels=in_channels, out_channels_list=(32, 32), kernel_size_list=(1, 3), strides_list=(1, 1), padding_list=(0, 1), data_format=data_format, name="branch2")) self.branches.children.append( ConvSeqBranch(in_channels=in_channels, out_channels_list=(32, 48, 64), kernel_size_list=(1, 3, 3), strides_list=(1, 1, 1), padding_list=(0, 1, 1), data_format=data_format, name="branch3")) self.conv = conv1x1(in_channels=128, out_channels=in_channels, use_bias=True, data_format=data_format, name="conv") self.activ = nn.ReLU()
def __init__(self, in_channels, out_channels, data_format="channels_last", **kwargs): super(InceptionCUnit, self).__init__(**kwargs) assert (out_channels == 2048) self.branches = Concurrent(data_format=data_format, name="branches") self.branches.children.append( Conv1x1Branch(in_channels=in_channels, out_channels=320, data_format=data_format, name="branch1")) self.branches.children.append( ConvSeq3x3Branch(in_channels=in_channels, out_channels_list=(384, ), kernel_size_list=(1, ), strides_list=(1, ), padding_list=(0, ), data_format=data_format, name="branch2")) self.branches.children.append( ConvSeq3x3Branch(in_channels=in_channels, out_channels_list=(448, 384), kernel_size_list=(1, 3), strides_list=(1, 1), padding_list=(0, 1), data_format=data_format, name="branch3")) self.branches.children.append( AvgPoolBranch(in_channels=in_channels, out_channels=192, data_format=data_format, name="branch4"))
def __init__(self, in_channels, out_channels, data_format="channels_last", **kwargs): super(ReductionBUnit, self).__init__(**kwargs) assert (in_channels == 768) assert (out_channels == 1280) self.branches = Concurrent(data_format=data_format, name="branches") self.branches.children.append( ConvSeqBranch(in_channels=in_channels, out_channels_list=(192, 320), kernel_size_list=(1, 3), strides_list=(1, 2), padding_list=(0, 0), data_format=data_format, name="branch1")) self.branches.children.append( ConvSeqBranch(in_channels=in_channels, out_channels_list=(192, 192, 192, 192), kernel_size_list=(1, (1, 7), (7, 1), 3), strides_list=(1, 1, 1, 2), padding_list=(0, (0, 3), (3, 0), 0), data_format=data_format, name="branch2")) self.branches.children.append( MaxPoolBranch(data_format=data_format, name="branch3"))
def __init__(self, data_format="channels_last", **kwargs): super(InceptionCUnit, self).__init__(**kwargs) in_channels = 1536 self.branches = Concurrent(data_format=data_format, name="branches") self.branches.children.append( Conv1x1Branch(in_channels=in_channels, out_channels=256, data_format=data_format, name="branch1")) self.branches.children.append( ConvSeq3x3Branch(in_channels=in_channels, out_channels=256, mid_channels_list=(384, ), kernel_size_list=(1, ), strides_list=(1, ), padding_list=(0, ), data_format=data_format, name="branch2")) self.branches.children.append( ConvSeq3x3Branch(in_channels=in_channels, out_channels=256, mid_channels_list=(384, 448, 512), kernel_size_list=(1, (3, 1), (1, 3)), strides_list=(1, 1, 1), padding_list=(0, (1, 0), (0, 1)), data_format=data_format, name="branch3")) self.branches.children.append( AvgPoolBranch(in_channels=in_channels, out_channels=256, data_format=data_format, name="branch4"))
def __init__(self, in_channels, out_channels, data_format="channels_last", **kwargs): super(ReductionAUnit, self).__init__(**kwargs) assert (in_channels == 288) assert (out_channels == 768) self.branches = Concurrent(data_format=data_format, name="branches") self.branches.children.append( ConvSeqBranch(in_channels=in_channels, out_channels_list=(384, ), kernel_size_list=(3, ), strides_list=(2, ), padding_list=(0, ), data_format=data_format, name="branch1")) self.branches.children.append( ConvSeqBranch(in_channels=in_channels, out_channels_list=(64, 96, 96), kernel_size_list=(1, 3, 3), strides_list=(1, 1, 2), padding_list=(0, 1, 0), data_format=data_format, name="branch2")) self.branches.children.append( MaxPoolBranch(data_format=data_format, name="branch3"))
def __init__(self, in_channels, out_channels, kernel_sizes, scale_factors, use_residual, in_size, bn_eps, data_format="channels_last", **kwargs): super(ESPBlock, self).__init__(**kwargs) self.use_residual = use_residual groups = len(kernel_sizes) mid_channels = int(out_channels / groups) res_channels = out_channels - groups * mid_channels self.conv = conv1x1(in_channels=in_channels, out_channels=mid_channels, groups=groups, data_format=data_format, name="conv") self.c_shuffle = ChannelShuffle(channels=mid_channels, groups=groups, data_format=data_format, name="c_shuffle") self.branches = Concurrent(data_format=data_format, name="branches") for i in range(groups): out_channels_i = (mid_channels + res_channels) if i == 0 else mid_channels self.branches.add( SBBlock(in_channels=mid_channels, out_channels=out_channels_i, kernel_size=kernel_sizes[i], scale_factor=scale_factors[i], size=in_size, bn_eps=bn_eps, data_format=data_format, name="branch{}".format(i + 1))) self.preactiv = PreActivation(in_channels=out_channels, bn_eps=bn_eps, data_format=data_format, name="preactiv")
def __init__(self, backbone, backbone_out_channels, channels, return_heatmap=False, topk=40, in_channels=3, in_size=(512, 512), classes=80, data_format="channels_last", **kwargs): super(CenterNet, self).__init__(**kwargs) self.in_size = in_size self.in_channels = in_channels self.return_heatmap = return_heatmap self.data_format = data_format self.backbone = backbone self.backbone._name = "backbone" self.decoder = tf.keras.Sequential(name="decoder") in_channels = backbone_out_channels for i, out_channels in enumerate(channels): self.decoder.add( CenterNetDecoderUnit(in_channels=in_channels, out_channels=out_channels, data_format=data_format, name="unit{}".format(i + 1))) in_channels = out_channels heads = Concurrent(data_format=data_format, name="heads") heads.add( CenterNetHeatmapBlock(in_channels=in_channels, out_channels=classes, do_nms=(not self.return_heatmap), data_format=data_format, name="heapmap_block")) heads.add( CenterNetHeadBlock(in_channels=in_channels, out_channels=2, data_format=data_format, name="wh_block")) heads.add( CenterNetHeadBlock(in_channels=in_channels, out_channels=2, data_format=data_format, name="reg_block")) self.decoder.add(heads) if not self.return_heatmap: self.heatmap_max_det = CenterNetHeatmapMaxDet( topk=topk, scale=4, data_format=data_format, name="heatmap_max_det")
def __init__(self, data_format="channels_last", **kwargs): super(PolyBlock4a, self).__init__(**kwargs) self.branches = Concurrent(data_format=data_format, name="branches") self.branches.add( ConvSeqBranch(in_channels=160, out_channels_list=(64, 96), kernel_size_list=(1, 3), strides_list=(1, 1), padding_list=(0, 0), data_format=data_format, name="branch1")) self.branches.add( ConvSeqBranch(in_channels=160, out_channels_list=(64, 64, 64, 96), kernel_size_list=(1, (7, 1), (1, 7), 3), strides_list=(1, 1, 1, 1), padding_list=(0, (3, 0), (0, 3), 0), data_format=data_format, name="branch2"))
def __init__(self, data_format="channels_last", **kwargs): super(InceptBlock3a, self).__init__(**kwargs) self.branches = Concurrent(data_format=data_format, name="branches") self.branches.children.append( MaxPoolBranch(data_format=data_format, name="branch1")) self.branches.children.append( Conv3x3Branch(in_channels=64, out_channels=96, data_format=data_format, name="branch2"))
def __init__(self, in_channels, upscale_out_size, data_format="channels_last", **kwargs): super(AtrousSpatialPyramidPooling, self).__init__(**kwargs) atrous_rates = [12, 24, 36] assert (in_channels % 8 == 0) mid_channels = in_channels // 8 project_in_channels = 5 * mid_channels self.branches = Concurrent( data_format=data_format, name="branches") self.branches.add(conv1x1_block( in_channels=in_channels, out_channels=mid_channels, data_format=data_format, name="branch1")) for i, atrous_rate in enumerate(atrous_rates): self.branches.add(conv3x3_block( in_channels=in_channels, out_channels=mid_channels, padding=atrous_rate, dilation=atrous_rate, data_format=data_format, name="branch{}".format(i + 2))) self.branches.add(ASPPAvgBranch( in_channels=in_channels, out_channels=mid_channels, upscale_out_size=upscale_out_size, data_format=data_format, name="branch5")) self.conv = conv1x1_block( in_channels=project_in_channels, out_channels=mid_channels, data_format=data_format, name="conv") self.dropout = nn.Dropout( rate=0.5, name="dropout")
class PyramidPooling(nn.Layer): """ Pyramid Pooling module. Parameters: ---------- in_channels : int Number of input channels. upscale_out_size : tuple of 2 int Spatial size of the input tensor for the bilinear upsampling operation. data_format : str, default 'channels_last' The ordering of the dimensions in tensors. """ def __init__(self, in_channels, upscale_out_size, data_format="channels_last", **kwargs): super(PyramidPooling, self).__init__(**kwargs) pool_out_sizes = [1, 2, 3, 6] assert (len(pool_out_sizes) == 4) assert (in_channels % 4 == 0) mid_channels = in_channels // 4 self.branches = Concurrent( data_format=data_format, name="branches") self.branches.add(Identity(name="branch1")) for i, pool_out_size in enumerate(pool_out_sizes): self.branches.add(PyramidPoolingBranch( in_channels=in_channels, out_channels=mid_channels, pool_out_size=pool_out_size, upscale_out_size=upscale_out_size, data_format=data_format, name="branch{}".format(i + 2))) def call(self, x, training=None): x = self.branches(x, training=training) return x
class AtrousSpatialPyramidPooling(nn.Layer): """ Atrous Spatial Pyramid Pooling (ASPP) module. Parameters: ---------- in_channels : int Number of input channels. upscale_out_size : tuple of 2 int Spatial size of the input tensor for the bilinear upsampling operation. data_format : str, default 'channels_last' The ordering of the dimensions in tensors. """ def __init__(self, in_channels, upscale_out_size, data_format="channels_last", **kwargs): super(AtrousSpatialPyramidPooling, self).__init__(**kwargs) atrous_rates = [12, 24, 36] assert (in_channels % 8 == 0) mid_channels = in_channels // 8 project_in_channels = 5 * mid_channels self.branches = Concurrent( data_format=data_format, name="branches") self.branches.add(conv1x1_block( in_channels=in_channels, out_channels=mid_channels, data_format=data_format, name="branch1")) for i, atrous_rate in enumerate(atrous_rates): self.branches.add(conv3x3_block( in_channels=in_channels, out_channels=mid_channels, padding=atrous_rate, dilation=atrous_rate, data_format=data_format, name="branch{}".format(i + 2))) self.branches.add(ASPPAvgBranch( in_channels=in_channels, out_channels=mid_channels, upscale_out_size=upscale_out_size, data_format=data_format, name="branch5")) self.conv = conv1x1_block( in_channels=project_in_channels, out_channels=mid_channels, data_format=data_format, name="conv") self.dropout = nn.Dropout( rate=0.5, name="dropout") def call(self, x, training=None): x = self.branches(x, training=training) x = self.conv(x, training=training) x = self.dropout(x, training=training) return x
class PolyBlock5a(nn.Layer): """ PolyNet type Mixed-5a block. Parameters: ---------- data_format : str, default 'channels_last' The ordering of the dimensions in tensors. """ def __init__(self, data_format="channels_last", **kwargs): super(PolyBlock5a, self).__init__(**kwargs) self.branches = Concurrent(data_format=data_format, name="branches") self.branches.add( MaxPoolBranch(data_format=data_format, name="branch1")) self.branches.add( Conv3x3Branch(in_channels=192, out_channels=192, data_format=data_format, name="branch2")) def call(self, x, training=None): x = self.branches(x, training=training) return x
def __init__(self, in_channels, upscale_out_size, data_format="channels_last", **kwargs): super(PyramidPooling, self).__init__(**kwargs) pool_out_sizes = [1, 2, 3, 6] assert (len(pool_out_sizes) == 4) assert (in_channels % 4 == 0) mid_channels = in_channels // 4 self.branches = Concurrent( data_format=data_format, name="branches") self.branches.add(Identity(name="branch1")) for i, pool_out_size in enumerate(pool_out_sizes): self.branches.add(PyramidPoolingBranch( in_channels=in_channels, out_channels=mid_channels, pool_out_size=pool_out_size, upscale_out_size=upscale_out_size, data_format=data_format, name="branch{}".format(i + 2)))
def __init__(self, data_format="channels_last", **kwargs): super(TwoWayCBlock, self).__init__(**kwargs) in_channels = 2048 self.branches = Concurrent(data_format=data_format, name="branches") self.branches.add( ConvSeqBranch(in_channels=in_channels, out_channels_list=(192, 224, 256), kernel_size_list=(1, (1, 3), (3, 1)), strides_list=(1, 1, 1), padding_list=(0, (0, 1), (1, 0)), data_format=data_format, name="branch1")) self.branches.add( Conv1x1Branch(in_channels=in_channels, out_channels=192, data_format=data_format, name="branch2")) self.conv = conv1x1_block(in_channels=448, out_channels=in_channels, activation=None, data_format=data_format, name="conv")
class TwoWayCBlock(nn.Layer): """ PolyNet type Inception-C block. Parameters: ---------- data_format : str, default 'channels_last' The ordering of the dimensions in tensors. """ def __init__(self, data_format="channels_last", **kwargs): super(TwoWayCBlock, self).__init__(**kwargs) in_channels = 2048 self.branches = Concurrent(data_format=data_format, name="branches") self.branches.add( ConvSeqBranch(in_channels=in_channels, out_channels_list=(192, 224, 256), kernel_size_list=(1, (1, 3), (3, 1)), strides_list=(1, 1, 1), padding_list=(0, (0, 1), (1, 0)), data_format=data_format, name="branch1")) self.branches.add( Conv1x1Branch(in_channels=in_channels, out_channels=192, data_format=data_format, name="branch2")) self.conv = conv1x1_block(in_channels=448, out_channels=in_channels, activation=None, data_format=data_format, name="conv") def call(self, x, training=None): x = self.branches(x, training=training) x = self.conv(x, training=training) return x
class ReductionBUnit(nn.Layer): """ PolyNet type Reduction-B unit. Parameters: ---------- data_format : str, default 'channels_last' The ordering of the dimensions in tensors. """ def __init__(self, data_format="channels_last", **kwargs): super(ReductionBUnit, self).__init__(**kwargs) in_channels = 1152 self.branches = Concurrent(data_format=data_format, name="branches") self.branches.add( ConvSeqBranch(in_channels=in_channels, out_channels_list=(256, 256, 256), kernel_size_list=(1, 3, 3), strides_list=(1, 1, 2), padding_list=(0, 1, 0), data_format=data_format, name="branch1")) self.branches.add( ConvSeqBranch(in_channels=in_channels, out_channels_list=(256, 256), kernel_size_list=(1, 3), strides_list=(1, 2), padding_list=(0, 0), data_format=data_format, name="branch2")) self.branches.add( ConvSeqBranch(in_channels=in_channels, out_channels_list=(256, 384), kernel_size_list=(1, 3), strides_list=(1, 2), padding_list=(0, 0), data_format=data_format, name="branch3")) self.branches.add( MaxPoolBranch(data_format=data_format, name="branch4")) def call(self, x, training=None): x = self.branches(x, training=training) return x
def __init__(self, in_channels, mid1_channels_list, mid2_channels_list, avg_pool, use_bias, use_bn, data_format="channels_last", **kwargs): super(InceptionBlock, self).__init__(**kwargs) assert (len(mid1_channels_list) == 2) assert (len(mid2_channels_list) == 4) self.branches = Concurrent(data_format=data_format, name="branches") self.branches.children.append( conv1x1_block(in_channels=in_channels, out_channels=mid2_channels_list[0], use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="branch1")) self.branches.children.append( Inception3x3Branch(in_channels=in_channels, out_channels=mid2_channels_list[1], mid_channels=mid1_channels_list[0], use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="branch2")) self.branches.children.append( InceptionDouble3x3Branch(in_channels=in_channels, out_channels=mid2_channels_list[2], mid_channels=mid1_channels_list[1], use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="branch3")) self.branches.children.append( InceptionPoolBranch(in_channels=in_channels, out_channels=mid2_channels_list[3], avg_pool=avg_pool, use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="branch4"))