def __init__(self, in_channels, out_channels, mid_channels, use_bias, use_bn, data_format="channels_last", **kwargs): super(StemBlock, self).__init__(**kwargs) self.conv1 = conv7x7_block(in_channels=in_channels, out_channels=mid_channels, strides=2, use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="conv1") self.pool1 = MaxPool2d(pool_size=3, strides=2, padding=0, ceil_mode=True, data_format=data_format, name="pool1") self.conv2 = Inception3x3Branch(in_channels=mid_channels, out_channels=out_channels, mid_channels=mid_channels, use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="conv2") self.pool2 = MaxPool2d(pool_size=3, strides=2, padding=0, ceil_mode=True, data_format=data_format, name="pool2")
def __init__(self, in_channels, out_channels, data_format="channels_last", **kwargs): super(InceptInitBlock, self).__init__(**kwargs) assert (out_channels == 192) self.conv1 = InceptConv(in_channels=in_channels, out_channels=32, kernel_size=3, strides=2, padding=0, data_format=data_format, name="conv1") self.conv2 = InceptConv(in_channels=32, out_channels=32, kernel_size=3, strides=1, padding=0, data_format=data_format, name="conv2") self.conv3 = InceptConv(in_channels=32, out_channels=64, kernel_size=3, strides=1, padding=1, data_format=data_format, name="conv3") self.pool1 = MaxPool2d(pool_size=3, strides=2, padding=0, data_format=data_format, name="pool1") self.conv4 = InceptConv(in_channels=64, out_channels=80, kernel_size=1, strides=1, padding=0, data_format=data_format, name="conv4") self.conv5 = InceptConv(in_channels=80, out_channels=192, kernel_size=3, strides=1, padding=0, data_format=data_format, name="conv5") self.pool2 = MaxPool2d(pool_size=3, strides=2, padding=0, data_format=data_format, name="pool2")
def __init__(self, in_channels, out_channels, use_deptwise, activation, data_format="channels_last", **kwargs): super(HarDInitBlock, self).__init__(**kwargs) mid_channels = out_channels // 2 self.conv1 = conv3x3_block(in_channels=in_channels, out_channels=mid_channels, strides=2, activation=activation, data_format=data_format, name="conv1") conv2_block_class = conv1x1_block if use_deptwise else conv3x3_block self.conv2 = conv2_block_class(in_channels=mid_channels, out_channels=out_channels, activation=activation, data_format=data_format, name="conv2") if use_deptwise: self.downsample = dwconv3x3_block(in_channels=out_channels, out_channels=out_channels, strides=2, activation=None, data_format=data_format, name="downsample") else: self.downsample = MaxPool2d(pool_size=3, strides=2, padding=1, data_format=data_format, name="downsample")
def __init__(self, data_format="channels_last", **kwargs): super(MaxPoolBranch, self).__init__(**kwargs) self.pool = MaxPool2d(pool_size=3, strides=2, padding=0, data_format=data_format, name="pool")
def __init__(self, channels, residuals, init_block_kernel_size, init_block_channels, in_channels=3, in_size=(224, 224), classes=1000, data_format="channels_last", **kwargs): super(SqueezeNet, self).__init__(**kwargs) self.in_size = in_size self.classes = classes self.data_format = data_format self.features = tf.keras.Sequential(name="features") self.features.add( SqueezeInitBlock(in_channels=in_channels, out_channels=init_block_channels, kernel_size=init_block_kernel_size, data_format=data_format, name="init_block")) in_channels = init_block_channels for i, channels_per_stage in enumerate(channels): stage = tf.keras.Sequential(name="stage{}".format(i + 1)) stage.add( MaxPool2d(pool_size=3, strides=2, ceil_mode=True, data_format=data_format, name="pool{}".format(i + 1))) for j, out_channels in enumerate(channels_per_stage): expand_channels = out_channels // 2 squeeze_channels = out_channels // 8 stage.add( FireUnit(in_channels=in_channels, squeeze_channels=squeeze_channels, expand1x1_channels=expand_channels, expand3x3_channels=expand_channels, residual=((residuals is not None) and (residuals[i][j] == 1)), data_format=data_format, name="unit{}".format(j + 1))) in_channels = out_channels self.features.add(stage) self.features.add(nn.Dropout(rate=0.5, name="dropout")) self.output1 = tf.keras.Sequential(name="output1") self.output1.add( Conv2d(in_channels=in_channels, out_channels=classes, kernel_size=1, data_format=data_format, name="final_conv")) self.output1.add(nn.ReLU()) self.output1.add( nn.AveragePooling2D(pool_size=13, strides=1, data_format=data_format, name="final_pool"))
def __init__(self, in_channels, out_channels, kernel_size, padding, data_format="channels_last", **kwargs): super(DPNInitBlock, self).__init__(**kwargs) self.conv = Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, strides=2, padding=padding, use_bias=False, data_format=data_format, name="conv") self.bn = dpn_batch_norm(channels=out_channels, data_format=data_format, name="bn") self.activ = nn.ReLU() self.pool = MaxPool2d(pool_size=3, strides=2, padding=1, data_format=data_format, name="pool")
def __init__(self, in_channels, out_channels, data_format="channels_last", **kwargs): super(StemBlock, self).__init__(**kwargs) mid1_channels = out_channels // 2 mid2_channels = out_channels * 2 self.first_conv = conv3x3_block(in_channels=in_channels, out_channels=out_channels, strides=2, data_format=data_format, name="first_conv") self.branches = Concurrent(data_format=data_format, name="branches") self.branches.add( PeleeBranch1(in_channels=out_channels, out_channels=out_channels, mid_channels=mid1_channels, strides=2, data_format=data_format, name="branch1")) self.branches.add( MaxPool2d(pool_size=2, strides=2, padding=0, data_format=data_format, name="branch2")) self.last_conv = conv1x1_block(in_channels=mid2_channels, out_channels=out_channels, data_format=data_format, name="last_conv")
def __init__(self, in_channels, out_channels, data_format="channels_last", **kwargs): super(SEInitBlock, self).__init__(**kwargs) mid_channels = out_channels // 2 self.conv1 = conv3x3_block(in_channels=in_channels, out_channels=mid_channels, strides=2, data_format=data_format, name="conv1") self.conv2 = conv3x3_block(in_channels=mid_channels, out_channels=mid_channels, data_format=data_format, name="conv2") self.conv3 = conv3x3_block(in_channels=mid_channels, out_channels=out_channels, data_format=data_format, name="conv3") self.pool = MaxPool2d(pool_size=3, strides=2, padding=1, data_format=data_format, name="pool")
def __init__(self, in_channels, out_channels, avg_pool, use_bias, use_bn, data_format="channels_last", **kwargs): super(InceptionPoolBranch, self).__init__(**kwargs) if avg_pool: self.pool = AvgPool2d( pool_size=3, strides=1, padding=1, ceil_mode=True, # count_include_pad=True, data_format=data_format, name="pool") else: self.pool = MaxPool2d(pool_size=3, strides=1, padding=1, ceil_mode=True, data_format=data_format, name="pool") self.conv = conv1x1_block(in_channels=in_channels, out_channels=out_channels, use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="conv")
def __init__(self, in_channels_list, out_channels_list, links_list, use_deptwise, use_dropout, downsampling, activation, data_format="channels_last", **kwargs): super(HarDUnit, self).__init__(**kwargs) self.data_format = data_format self.links_list = links_list self.use_dropout = use_dropout self.downsampling = downsampling self.blocks = SimpleSequential(name="blocks") for i in range(len(links_list)): in_channels = in_channels_list[i] out_channels = out_channels_list[i] if use_deptwise: unit = invdwsconv3x3_block(in_channels=in_channels, out_channels=out_channels, pw_activation=activation, dw_activation=None, data_format=data_format, name="block{}".format(i + 1)) else: unit = conv3x3_block(in_channels=in_channels, out_channels=out_channels, data_format=data_format, name="block{}".format(i + 1)) self.blocks.add(unit) if self.use_dropout: self.dropout = nn.Dropout(rate=0.1, name="dropout") self.conv = conv1x1_block(in_channels=in_channels_list[-1], out_channels=out_channels_list[-1], activation=activation, data_format=data_format, name="conv") if self.downsampling: if use_deptwise: self.downsample = dwconv3x3_block( in_channels=out_channels_list[-1], out_channels=out_channels_list[-1], strides=2, activation=None, data_format=data_format, name="downsample") else: self.downsample = MaxPool2d(pool_size=2, strides=2, data_format=data_format, name="downsample")
def __init__(self, channels, odd_pointwise, avg_pool_size, cls_activ, alpha=0.1, in_channels=3, in_size=(224, 224), classes=1000, data_format="channels_last", **kwargs): super(DarkNet, self).__init__(**kwargs) self.in_size = in_size self.classes = classes self.data_format = data_format self.features = tf.keras.Sequential(name="features") for i, channels_per_stage in enumerate(channels): stage = tf.keras.Sequential(name="stage{}".format(i + 1)) for j, out_channels in enumerate(channels_per_stage): stage.add( dark_convYxY(in_channels=in_channels, out_channels=out_channels, alpha=alpha, pointwise=(len(channels_per_stage) > 1) and not (((j + 1) % 2 == 1) ^ odd_pointwise), data_format=data_format, name="unit{}".format(j + 1))) in_channels = out_channels if i != len(channels) - 1: stage.add( MaxPool2d(pool_size=2, strides=2, data_format=data_format, name="pool{}".format(i + 1))) self.features.add(stage) self.output1 = tf.keras.Sequential(name="output1") self.output1.add( Conv2d(in_channels=in_channels, out_channels=classes, kernel_size=1, data_format=data_format, name="final_conv")) if cls_activ: self.output1.add(nn.LeakyReLU(alpha=alpha)) self.output1.add( nn.AveragePooling2D(pool_size=avg_pool_size, strides=1, data_format=data_format, name="final_pool"))
def __init__(self, in_channels, out_channels, activation, data_format="channels_last", **kwargs): super(IbpDownBlock, self).__init__(**kwargs) self.down = MaxPool2d(pool_size=2, strides=2, data_format=data_format, name="down") self.res = IbpResUnit(in_channels=in_channels, out_channels=out_channels, activation=activation, data_format=data_format, name="res")
def __init__(self, in_channels, out_channels, data_format="channels_last", **kwargs): super(IBNbResInitBlock, self).__init__(**kwargs) self.conv = ibnb_conv7x7_block(in_channels=in_channels, out_channels=out_channels, strides=2, data_format=data_format, name="conv") self.pool = MaxPool2d(pool_size=3, strides=2, padding=1, data_format=data_format, name="pool")
def __init__(self, channels, init_block_channels, in_channels=3, in_size=(224, 224), classes=1000, data_format="channels_last", **kwargs): super(DiracNetV2, self).__init__(**kwargs) self.in_size = in_size self.classes = classes self.data_format = data_format self.features = tf.keras.Sequential(name="features") self.features.add( DiracInitBlock(in_channels=in_channels, out_channels=init_block_channels, data_format=data_format, name="init_block")) in_channels = init_block_channels for i, channels_per_stage in enumerate(channels): stage = tf.keras.Sequential(name="stage{}".format(i + 1)) for j, out_channels in enumerate(channels_per_stage): stage.add( dirac_conv3x3(in_channels=in_channels, out_channels=out_channels, data_format=data_format, name="unit{}".format(j + 1))) in_channels = out_channels if i != len(channels) - 1: stage.add( MaxPool2d(pool_size=2, strides=2, padding=0, data_format=data_format, name="pool{}".format(i + 1))) self.features.add(stage) self.features.add(nn.ReLU(name="final_activ")) self.features.add( nn.AveragePooling2D(pool_size=7, strides=1, data_format=data_format, name="final_pool")) self.output1 = nn.Dense(units=classes, input_dim=in_channels, name="output1")
def __init__(self, in_channels, out_channels, strides, reps, start_with_relu=True, grow_first=True, data_format="channels_last", **kwargs): super(XceptionUnit, self).__init__(**kwargs) self.resize_identity = (in_channels != out_channels) or (strides != 1) if self.resize_identity: self.identity_conv = conv1x1_block(in_channels=in_channels, out_channels=out_channels, strides=strides, activation=None, data_format=data_format, name="identity_conv") self.body = SimpleSequential(name="body") for i in range(reps): if (grow_first and (i == 0)) or ((not grow_first) and (i == reps - 1)): in_channels_i = in_channels out_channels_i = out_channels else: if grow_first: in_channels_i = out_channels out_channels_i = out_channels else: in_channels_i = in_channels out_channels_i = in_channels activate = start_with_relu if (i == 0) else True self.body.children.append( dws_conv3x3_block(in_channels=in_channels_i, out_channels=out_channels_i, activate=activate, data_format=data_format, name="block{}".format(i + 1))) if strides != 1: self.body.children.append( MaxPool2d(pool_size=3, strides=strides, padding=1, data_format=data_format, name="pool"))
def __init__(self, strides=2, extra_padding=False, data_format="channels_last", **kwargs): super(PnasMaxPoolBlock, self).__init__(**kwargs) self.extra_padding = extra_padding self.data_format = data_format self.pool = MaxPool2d(pool_size=3, strides=strides, padding=1, data_format=data_format, name="pool") if self.extra_padding: self.pad = nn.ZeroPadding2D(padding=((1, 0), (1, 0)), data_format=data_format)
def __init__(self, in_channels, out_channels, data_format="channels_last", **kwargs): super(ShuffleInitBlock, self).__init__(**kwargs) self.conv = conv3x3_block(in_channels=in_channels, out_channels=out_channels, strides=2, data_format=data_format, name="conv") self.pool = MaxPool2d(pool_size=3, strides=2, padding=0, ceil_mode=True, data_format=data_format, name="pool")
def __init__(self, channels, kernel_sizes, strides, paddings, use_lrn, in_channels=3, in_size=(224, 224), classes=1000, data_format="channels_last", **kwargs): super(AlexNet, self).__init__(**kwargs) self.in_size = in_size self.classes = classes self.data_format = data_format self.features = tf.keras.Sequential(name="features") for i, channels_per_stage in enumerate(channels): use_lrn_i = use_lrn and (i in [0, 1]) stage = tf.keras.Sequential(name="stage{}".format(i + 1)) for j, out_channels in enumerate(channels_per_stage): stage.add(AlexConv( in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_sizes[i][j], strides=strides[i][j], padding=paddings[i][j], use_lrn=use_lrn_i, data_format=data_format, name="unit{}".format(j + 1))) in_channels = out_channels stage.add(MaxPool2d( pool_size=3, strides=2, padding=0, ceil_mode=True, data_format=data_format, name="pool{}".format(i + 1))) self.features.add(stage) in_channels = in_channels * 6 * 6 self.output1 = AlexOutputBlock( in_channels=in_channels, classes=classes, name="output1")
def __init__(self, in_channels, out_channels, data_format="channels_last", **kwargs): super(DiracInitBlock, self).__init__(**kwargs) self.conv = Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=7, strides=2, padding=3, use_bias=True, data_format=data_format, name="conv") self.pool = MaxPool2d(pool_size=3, strides=2, padding=1, data_format=data_format, name="pool")
def __init__(self, in_channels, out_channels, activation, data_format="channels_last", **kwargs): super(IbpBackbone, self).__init__(**kwargs) self.data_format = data_format dilations = (3, 3, 4, 4, 5, 5) mid1_channels = out_channels // 4 mid2_channels = out_channels // 2 self.conv1 = conv7x7_block(in_channels=in_channels, out_channels=mid1_channels, strides=2, activation=activation, data_format=data_format, name="conv1") self.res1 = IbpResUnit(in_channels=mid1_channels, out_channels=mid2_channels, activation=activation, data_format=data_format, name="res1") self.pool = MaxPool2d(pool_size=2, strides=2, data_format=data_format, name="pool") self.res2 = IbpResUnit(in_channels=mid2_channels, out_channels=mid2_channels, activation=activation, data_format=data_format, name="res2") self.dilation_branch = SimpleSequential(name="dilation_branch") for i, dilation in enumerate(dilations): self.dilation_branch.add( conv3x3_block(in_channels=mid2_channels, out_channels=mid2_channels, padding=dilation, dilation=dilation, activation=activation, data_format=data_format, name="block{}".format(i + 1)))
def __init__(self, in_channels, out_channels, do_nms, data_format="channels_last", **kwargs): super(CenterNetHeatmapBlock, self).__init__(**kwargs) self.do_nms = do_nms self.head = CenterNetHeadBlock(in_channels=in_channels, out_channels=out_channels, data_format=data_format, name="head") self.sigmoid = tf.nn.sigmoid if self.do_nms: self.pool = MaxPool2d(pool_size=3, strides=1, padding=1, data_format=data_format, name="pool")
def __init__(self, in_channels, out_channels, data_format="channels_last", **kwargs): super(ShuffleInitBlock, self).__init__(**kwargs) self.conv = conv3x3(in_channels=in_channels, out_channels=out_channels, strides=2, data_format=data_format, name="conv") self.bn = BatchNorm( # in_channels=out_channels, data_format=data_format, name="bn") self.activ = nn.ReLU() self.pool = MaxPool2d(pool_size=3, strides=2, padding=1, data_format=data_format, name="pool")
def __init__(self, in_channels, mid1_channels_list, mid2_channels_list, use_bias, use_bn, data_format="channels_last", **kwargs): super(ReductionBlock, self).__init__(**kwargs) assert (len(mid1_channels_list) == 2) assert (len(mid2_channels_list) == 4) self.branches = Concurrent(data_format=data_format, name="branches") self.branches.children.append( Inception3x3Branch(in_channels=in_channels, out_channels=mid2_channels_list[1], mid_channels=mid1_channels_list[0], strides=2, use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="branch1")) self.branches.children.append( InceptionDouble3x3Branch(in_channels=in_channels, out_channels=mid2_channels_list[2], mid_channels=mid1_channels_list[1], strides=2, use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="branch2")) self.branches.children.append( MaxPool2d(pool_size=3, strides=2, padding=0, ceil_mode=True, data_format=data_format, name="branch3"))
def __init__(self, in_channels, out_channels, branch_channels, num_branches, resize, use_residual, data_format="channels_last", **kwargs): super(VoVUnit, self).__init__(**kwargs) self.resize = resize self.use_residual = use_residual if self.resize: self.pool = MaxPool2d( pool_size=3, strides=2, ceil_mode=True, data_format=data_format, name="pool") self.branches = SequentialConcurrent( data_format=data_format, name="branches") branch_in_channels = in_channels for i in range(num_branches): self.branches.add(conv3x3_block( in_channels=branch_in_channels, out_channels=branch_channels, data_format=data_format, name="branch{}".format(i + 1))) branch_in_channels = branch_channels self.concat_conv = conv1x1_block( in_channels=(in_channels + num_branches * branch_channels), out_channels=out_channels, data_format=data_format, name="concat_conv")
def __init__(self, channels, use_bias=True, use_bn=False, in_channels=3, in_size=(224, 224), classes=1000, data_format="channels_last", **kwargs): super(VGG, self).__init__(**kwargs) self.in_size = in_size self.classes = classes self.data_format = data_format self.features = tf.keras.Sequential(name="features") for i, channels_per_stage in enumerate(channels): stage = tf.keras.Sequential(name="stage{}".format(i + 1)) for j, out_channels in enumerate(channels_per_stage): stage.add(conv3x3_block( in_channels=in_channels, out_channels=out_channels, use_bias=use_bias, use_bn=use_bn, data_format=data_format, name="unit{}".format(j + 1))) in_channels = out_channels stage.add(MaxPool2d( pool_size=2, strides=2, padding=0, data_format=data_format, name="pool{}".format(i + 1))) self.features.add(stage) self.output1 = VGGOutputBlock( in_channels=(in_channels * 7 * 7), classes=classes, name="output1")