def __init__(self, in_channels, k, l, m, n):
     super(Reduction_A, self).__init__()
     self.branch2 = BN_Conv2d(in_channels, n, 3, 2, 0, bias=False)
     self.branch3 = nn.Sequential(
         BN_Conv2d(in_channels, k, 1, 1, 0, bias=False),
         BN_Conv2d(k, l, 3, 1, 1, bias=False),
         BN_Conv2d(l, m, 3, 2, 0, bias=False))
 def __init__(self, str_version, num_classes):
     super(GoogleNet, self).__init__()
     self.block_type = "type1" if str_version == "v1" else "type2"
     self.version = str_version  # "v1", "v2"
     self.conv1 = BN_Conv2d(3, 64, 7, stride=2, padding=3, bias=False)
     self.conv2 = BN_Conv2d(64, 192, 3, stride=1, padding=1, bias=False)
     self.inception3_a = Inception_builder(self.block_type, 192, 16, 32, 96,
                                           128, 32, 64)
     self.inception3_b = Inception_builder(self.block_type, 256, 32, 96,
                                           128, 192, 64, 128)
     self.inception4_a = Inception_builder(self.block_type, 480, 16, 48, 96,
                                           208, 64, 192)
     self.inception4_b = Inception_builder(self.block_type, 512, 24, 64,
                                           112, 224, 64, 160)
     self.inception4_c = Inception_builder(self.block_type, 512, 24, 64,
                                           128, 256, 64, 128)
     self.inception4_d = Inception_builder(self.block_type, 512, 32, 64,
                                           144, 288, 64, 112)
     self.inception4_e = Inception_builder(self.block_type, 528, 32, 128,
                                           160, 320, 128, 256)
     self.inception5_a = Inception_builder(self.block_type, 832, 32, 128,
                                           160, 320, 128, 256)
     self.inception5_b = Inception_builder(self.block_type, 832, 48, 128,
                                           192, 384, 128, 384)
     self.fc = nn.Linear(1024, num_classes)
Example #3
0
 def __init__(self,
              in_chnls,
              cardinality,
              group_depth,
              stride,
              is_se=False):
     super(ResNeXt_Block, self).__init__()
     self.is_se = is_se
     self.group_chnls = cardinality * group_depth
     self.conv1 = BN_Conv2d(in_chnls,
                            self.group_chnls,
                            1,
                            stride=1,
                            padding=0)
     self.conv2 = BN_Conv2d(self.group_chnls,
                            self.group_chnls,
                            3,
                            stride=stride,
                            padding=1,
                            groups=cardinality)
     self.conv3 = nn.Conv2d(self.group_chnls,
                            self.group_chnls * 2,
                            1,
                            stride=1,
                            padding=0)
     self.bn = nn.BatchNorm2d(self.group_chnls * 2)
     if self.is_se:
         self.se = SE(self.group_chnls * 2, 16)
     self.short_cut = nn.Sequential(
         nn.Conv2d(in_chnls, self.group_chnls * 2, 1, stride, 0,
                   bias=False), nn.BatchNorm2d(self.group_chnls * 2))
Example #4
0
 def __init__(self, num_classes):
     super(Inception_v3, self).__init__()
     self.conv = BN_Conv2d(3, 32, 3, 2, 0, bias=False)
     self.conv1 = BN_Conv2d(32, 32, 3, 1, 0, bias=False)
     self.conv2 = BN_Conv2d(32, 64, 3, 1, 1, bias=False)
     self.conv3 = BN_Conv2d(64, 80, 1, 1, 0, bias=False)
     self.conv4 = BN_Conv2d(80, 192, 3, 1, 0, bias=False)
     self.inception1_1 = Block_bank("type1", 192, 64, 96, 48, 64, 32, 64)
     self.inception1_2 = Block_bank("type1", 256, 64, 96, 48, 64, 64, 64)
     self.inception1_3 = Block_bank("type1", 288, 64, 96, 48, 64, 64, 64)
     self.inception2 = Block_bank("type2", 288, 64, 96, 288, 384, 288, 288)
     self.inception3_1 = Block_bank("type3", 768, 128, 192, 128, 192, 192,
                                    192)
     self.inception3_2 = Block_bank("type3", 768, 160, 192, 160, 192, 192,
                                    192)
     self.inception3_3 = Block_bank("type3", 768, 160, 192, 160, 192, 192,
                                    192)
     self.inception3_4 = Block_bank("type3", 768, 192, 192, 192, 192, 192,
                                    192)
     self.inception4 = Block_bank("type4", 768, 192, 192, 192, 320, 288,
                                  288)
     self.inception5_1 = Block_bank("type5", 1280, 448, 384, 384, 384, 192,
                                    320)
     self.inception5_2 = Block_bank("type5", 2048, 448, 384, 384, 384, 192,
                                    320)
     self.fc = nn.Linear(2048, num_classes)
Example #5
0
 def __init__(self,
              in_chnls,
              out_chnls,
              kernel_size,
              expansion,
              stride,
              is_se=False,
              activation=nn.ReLU(inplace=True)):
     super(MBConv, self).__init__()
     self.is_se = is_se
     self.is_shortcut = (stride == 1) and (in_chnls == out_chnls)
     self.trans1 = BN_Conv2d(in_chnls,
                             in_chnls * expansion,
                             1,
                             1,
                             0,
                             activation=activation)
     self.DWConv = BN_Conv2d(in_chnls * expansion,
                             in_chnls * expansion,
                             kernel_size,
                             stride=stride,
                             padding=kernel_size // 2,
                             groups=in_chnls * expansion,
                             activation=activation)
     if self.is_se:
         self.se = SE(in_chnls * expansion, 4)  #se ratio = 0.25
     self.trans2 = BN_Conv2d(in_chnls * expansion,
                             out_chnls,
                             1,
                             1,
                             0,
                             activation=None)  # Linear activation
    def __init__(self, in_channels, out_channels, strides, is_se=False):
        super(BottleNeck, self).__init__()
        self.is_se = is_se
        self.conv1 = BN_Conv2d(in_channels,
                               out_channels,
                               1,
                               stride=1,
                               padding=0,
                               bias=False)  # same padding
        self.conv2 = BN_Conv2d(out_channels,
                               out_channels,
                               3,
                               stride=strides,
                               padding=1,
                               bias=False)
        self.conv3 = BN_Conv2d(out_channels,
                               out_channels * 4,
                               1,
                               stride=1,
                               padding=0,
                               bias=False,
                               activation=None)
        if self.is_se:
            self.se = SE(out_channels * 4, 16)

        # fit input with residual output
        self.shortcut = nn.Sequential(
            nn.Conv2d(in_channels,
                      out_channels * 4,
                      1,
                      stride=strides,
                      padding=0,
                      bias=False), nn.BatchNorm2d(out_channels * 4))
Example #7
0
 def __make_layers(self):
     layer_list = []
     for i in range(self.num_layers):
         layer_list.append(
             nn.Sequential(
                 BN_Conv2d(self.k0 + i * self.k, 4 * self.k, 1, 1, 0),
                 BN_Conv2d(4 * self.k, self.k, 3, 1, 1)))
     return layer_list
Example #8
0
 def __init__(self, in_chnls, groups=2):
     super(DSampling, self).__init__()
     self.groups = groups
     self.dwconv_l1 = BN_Conv2d(in_chnls, in_chnls, 3, 2, 1,  # down-sampling, depth-wise conv.
                                groups=in_chnls, activation=False)
     self.conv_l2 = BN_Conv2d(in_chnls, in_chnls, 1, 1, 0)
     self.conv_r1 = BN_Conv2d(in_chnls, in_chnls, 1, 1, 0)
     self.dwconv_r2 = BN_Conv2d(in_chnls, in_chnls, 3, 2, 1, groups=in_chnls, activation=False)
     self.conv_r3 = BN_Conv2d(in_chnls, in_chnls, 1, 1, 0)
    def __init__(self, block_type, in_channels, b1_reduce, b1, b2_reduce, b2, b3, b4):
        super(Inception_builder, self).__init__()
        self.block_type = block_type  # controlled by strings "type1", "type2"

        # 5x5 reduce, 5x5
        self.branch1_type1 = nn.Sequential(
            BN_Conv2d(in_channels, b1_reduce, 1, stride=1, padding=0, bias=False),
            BN_Conv2d(b1_reduce, b1, 5, stride=1, padding=2, bias=False)  # same padding
        )

        # 5x5 reduce, 2x3x3
        self.branch1_type2 = nn.Sequential(
            BN_Conv2d(in_channels, b1_reduce, 1, stride=1, padding=0, bias=False),
            BN_Conv2d(b1_reduce, b1, 3, stride=1, padding=1, bias=False),  # same padding
            BN_Conv2d(b1, b1, 3, stride=1, padding=1, bias=False)
        )

        # 3x3 reduce, 3x3
        self.branch2 = nn.Sequential(
            BN_Conv2d(in_channels, b2_reduce, 1, stride=1, padding=0, bias=False),
            BN_Conv2d(b2_reduce, b2, 3, stride=1, padding=1, bias=False)
        )

        # max pool, pool proj
        self.branch3 = nn.Sequential(
            nn.MaxPool2d(3, stride=1, padding=1),  # to keep size, also use same padding
            BN_Conv2d(in_channels, b3, 1, stride=1, padding=0, bias=False)
        )

        # 1x1
        self.branch4 = BN_Conv2d(in_channels, b4, 1, stride=1, padding=0, bias=False)
Example #10
0
 def __init__(self, in_chnls, out_chnls, kernel_size, activation=True):
     super(SeqConv, self).__init__()
     self.DWConv = BN_Conv2d(in_chnls,
                             in_chnls,
                             kernel_size,
                             stride=1,
                             padding=kernel_size // 2,
                             groups=in_chnls,
                             activation=activation)
     self.trans = BN_Conv2d(in_chnls, out_chnls, 1, 1, 0,
                            activation=False)  # Linear activation
Example #11
0
 def __init__(self, num_classes = 1000, input_size=224):
     super(MnasNet_A1, self).__init__()
     self.__dict__.update(self._defaults)
     self.body = self.__make_body()
     self.trans = BN_Conv2d(self.chnls[-1], 1280, 1, 1, 0)
     self.pool = nn.AdaptiveAvgPool2d(1)
     self.fc = nn.Sequential(nn.Dropout(self.dropout_ratio), nn.Linear(1280, num_classes))
Example #12
0
 def __make_body(self):
     blocks = [
         BN_Conv2d(3, 32, 3, 2, 1, activation=None),
         SeqConv(32, 16, 3)
     ]
     for index in range(len(self.blocks)):
         blocks.append(self.__make_block(index))
     return nn.Sequential(*blocks)
 def __init__(self, in_channels, b1, b2_n1, b2_n1x3, b2_n3x1, n1_linear):
     super(Inception_C_res, self).__init__()
     self.branch1 = BN_Conv2d(in_channels, b1, 1, 1, 0, bias=False)
     self.branch2 = nn.Sequential(
         BN_Conv2d(in_channels, b2_n1, 1, 1, 0, bias=False),
         BN_Conv2d(b2_n1, b2_n1x3, (1, 3), (1, 1), (0, 1), bias=False),
         BN_Conv2d(b2_n1x3, b2_n3x1, (3, 1), (1, 1), (1, 0), bias=False))
     self.conv_linear = nn.Conv2d(b1 + b2_n3x1,
                                  n1_linear,
                                  1,
                                  1,
                                  0,
                                  bias=False)
     self.short_cut = nn.Sequential()
     if in_channels != n1_linear:
         self.short_cut = nn.Sequential(
             nn.Conv2d(in_channels, n1_linear, 1, 1, 0, bias=False),
             nn.BatchNorm2d(n1_linear))
Example #14
0
 def __init__(self, in_chnls, add_chnl, cat_chnl, cardinality, d, stride):
     super(DPN_Block, self).__init__()
     self.add = add_chnl
     self.cat = cat_chnl
     self.chnl = cardinality * d
     self.conv1 = BN_Conv2d(in_chnls, self.chnl, 1, 1, 0)
     self.conv2 = BN_Conv2d(self.chnl,
                            self.chnl,
                            3,
                            stride,
                            1,
                            groups=cardinality)
     self.conv3 = nn.Conv2d(self.chnl, add_chnl + cat_chnl, 1, 1, 0)
     self.bn = nn.BatchNorm2d(add_chnl + cat_chnl)
     self.shortcut = nn.Sequential()
     if add_chnl != in_chnls:
         self.shortcut = nn.Sequential(
             nn.Conv2d(in_chnls, add_chnl, 1, stride, 0),
             nn.BatchNorm2d(add_chnl))
Example #15
0
    def __init__(self,
                 in_chnls,
                 out_chnls,
                 is_se=False,
                 is_residual=False,
                 c_ratio=0.5,
                 groups=2):
        super(BasicUnit, self).__init__()
        self.is_se, self.is_res = is_se, is_residual
        self.l_chnls = int(in_chnls * c_ratio)
        self.r_chnls = in_chnls - self.l_chnls
        self.ro_chnls = out_chnls - self.l_chnls
        self.groups = groups

        # layers
        self.conv1 = BN_Conv2d(self.r_chnls, self.ro_chnls, 1, 1, 0)
        self.dwconv2 = BN_Conv2d(
            self.ro_chnls,
            self.ro_chnls,
            3,
            1,
            1,  # same padding, depthwise conv
            groups=self.ro_chnls,
            activation=None)
        act = None if self.is_res else nn.ReLU(inplace=True)
        self.conv3 = BN_Conv2d(self.ro_chnls,
                               self.ro_chnls,
                               1,
                               1,
                               0,
                               activation=act)
        if self.is_se:
            self.se = SE(self.ro_chnls, 16)
        if self.is_res:
            self.shortcut = nn.Sequential()
            if self.r_chnls != self.ro_chnls:
                self.shortcut = BN_Conv2d(self.r_chnls,
                                          self.ro_chnls,
                                          1,
                                          1,
                                          0,
                                          activation=None)
 def __init__(self, in_channels, b1, b2, b3_n1, b3_n1x7, b3_n7x1, b4_n1,
              b4_n1x7_1, b4_n7x1_1, b4_n1x7_2, b4_n7x1_2):
     super(Inception_B, self).__init__()
     self.branch1 = nn.Sequential(
         nn.AvgPool2d(3, 1, 1),
         BN_Conv2d(in_channels, b1, 1, 1, 0, bias=False))
     self.branch2 = BN_Conv2d(in_channels, b2, 1, 1, 0, bias=False)
     self.branch3 = nn.Sequential(
         BN_Conv2d(in_channels, b3_n1, 1, 1, 0, bias=False),
         BN_Conv2d(b3_n1, b3_n1x7, (1, 7), (1, 1), (0, 3), bias=False),
         BN_Conv2d(b3_n1x7, b3_n7x1, (7, 1), (1, 1), (3, 0), bias=False))
     self.branch4 = nn.Sequential(
         BN_Conv2d(in_channels, b4_n1, 1, 1, 0, bias=False),
         BN_Conv2d(b4_n1, b4_n1x7_1, (1, 7), (1, 1), (0, 3), bias=False),
         BN_Conv2d(b4_n1x7_1, b4_n7x1_1, (7, 1), (1, 1), (3, 0),
                   bias=False),
         BN_Conv2d(b4_n7x1_1, b4_n1x7_2, (1, 7), (1, 1), (0, 3),
                   bias=False),
         BN_Conv2d(b4_n1x7_2, b4_n7x1_2, (7, 1), (1, 1), (3, 0),
                   bias=False))
 def __init__(self):
     super(Stem_Res1, self).__init__()
     self.stem = nn.Sequential(BN_Conv2d(3, 32, 3, 2, 0, bias=False),
                               BN_Conv2d(32, 32, 3, 1, 0, bias=False),
                               BN_Conv2d(32, 64, 3, 1, 1, bias=False),
                               nn.MaxPool2d(3, 2, 0),
                               BN_Conv2d(64, 80, 1, 1, 0, bias=False),
                               BN_Conv2d(80, 192, 3, 1, 0, bias=False),
                               BN_Conv2d(192, 256, 3, 2, 0, bias=False))
 def __init__(self, in_channels, b1, b2, b3_n1, b3_n3, b4_n1, b4_n3):
     super(Inception_A, self).__init__()
     self.branch1 = nn.Sequential(
         nn.AvgPool2d(3, 1, 1),
         BN_Conv2d(in_channels, b1, 1, 1, 0, bias=False))
     self.branch2 = BN_Conv2d(in_channels, b2, 1, 1, 0, bias=False)
     self.branch3 = nn.Sequential(
         BN_Conv2d(in_channels, b3_n1, 1, 1, 0, bias=False),
         BN_Conv2d(b3_n1, b3_n3, 3, 1, 1, bias=False))
     self.branch4 = nn.Sequential(
         BN_Conv2d(in_channels, b4_n1, 1, 1, 0, bias=False),
         BN_Conv2d(b4_n1, b4_n3, 3, 1, 1, bias=False),
         BN_Conv2d(b4_n3, b4_n3, 3, 1, 1, bias=False))
 def __init__(self, in_channels, b1, b2, b3_n1, b3_n1x3_3x1, b4_n1, b4_n1x3,
              b4_n3x1, b4_n1x3_3x1):
     super(Inception_C, self).__init__()
     self.branch1 = nn.Sequential(
         nn.AvgPool2d(3, 1, 1),
         BN_Conv2d(in_channels, b1, 1, 1, 0, bias=False))
     self.branch2 = BN_Conv2d(in_channels, b2, 1, 1, 0, bias=False)
     self.branch3_1 = BN_Conv2d(in_channels, b3_n1, 1, 1, 0, bias=False)
     self.branch3_1x3 = BN_Conv2d(b3_n1,
                                  b3_n1x3_3x1, (1, 3), (1, 1), (0, 1),
                                  bias=False)
     self.branch3_3x1 = BN_Conv2d(b3_n1,
                                  b3_n1x3_3x1, (3, 1), (1, 1), (1, 0),
                                  bias=False)
     self.branch4_1 = nn.Sequential(
         BN_Conv2d(in_channels, b4_n1, 1, 1, 0, bias=False),
         BN_Conv2d(b4_n1, b4_n1x3, (1, 3), (1, 1), (0, 1), bias=False),
         BN_Conv2d(b4_n1x3, b4_n3x1, (3, 1), (1, 1), (1, 0), bias=False))
     self.branch4_1x3 = BN_Conv2d(b4_n3x1,
                                  b4_n1x3_3x1, (1, 3), (1, 1), (0, 1),
                                  bias=False)
     self.branch4_3x1 = BN_Conv2d(b4_n3x1,
                                  b4_n1x3_3x1, (3, 1), (1, 1), (1, 0),
                                  bias=False)
Example #20
0
 def __init__(self, blocks: object, add_chnls: object, cat_chnls: object,
              conv1_chnl, cardinality, d, num_classes) -> object:
     super(DPN, self).__init__()
     self.cdty = cardinality
     self.chnl = conv1_chnl
     self.conv1 = BN_Conv2d(3, self.chnl, 7, 2, 3)
     d1 = d
     self.conv2 = self.__make_layers(blocks[0], add_chnls[0], cat_chnls[0], d1, 1)
     d2 = 2 * d1
     self.conv3 = self.__make_layers(blocks[1], add_chnls[1], cat_chnls[1], d2, 2)
     d3 = 2 * d2
     self.conv4 = self.__make_layers(blocks[2], add_chnls[2], cat_chnls[2], d3, 2)
     d4 = 2 * d3
     self.conv5 = self.__make_layers(blocks[3], add_chnls[3], cat_chnls[3], d4, 2)
     self.fc = nn.Linear(self.chnl, num_classes)
 def __init__(self,
              layers: object,
              k,
              theta,
              num_classes,
              part_ratio=0) -> object:
     super(DenseNet, self).__init__()
     # params
     self.layers = layers
     self.k = k
     self.theta = theta
     self.Block = DenseBlock if part_ratio == 0 else CSP_DenseBlock  # 通过part_tatio参数控制block type
     # layers
     self.conv = BN_Conv2d(3, 2 * k, 7, 2, 3)
     self.blocks, patches = self.__make_blocks(2 * k)
     self.fc = nn.Linear(patches, num_classes)
 def __init__(self, in_channels, b2_n1, b2_n3, b3_n1, b3_n3, b4_n1, b4_n3_1,
              b4_n3_2):
     super(Reduction_B_Res, self).__init__()
     self.branch2 = nn.Sequential(
         BN_Conv2d(in_channels, b2_n1, 1, 1, 0, bias=False),
         BN_Conv2d(b2_n1, b2_n3, 3, 2, 0, bias=False),
     )
     self.branch3 = nn.Sequential(
         BN_Conv2d(in_channels, b3_n1, 1, 1, 0, bias=False),
         BN_Conv2d(b3_n1, b3_n3, 3, 2, 0, bias=False))
     self.branch4 = nn.Sequential(
         BN_Conv2d(in_channels, b4_n1, 1, 1, 0, bias=False),
         BN_Conv2d(b4_n1, b4_n3_1, 3, 1, 1, bias=False),
         BN_Conv2d(b4_n3_1, b4_n3_2, 3, 2, 0, bias=False))
 def __init__(self, in_channels, b2_n1, b2_n3, b3_n1, b3_n1x7, b3_n7x1,
              b3_n3):
     super(Reduction_B_v4, self).__init__()
     self.branch2 = nn.Sequential(
         BN_Conv2d(in_channels, b2_n1, 1, 1, 0, bias=False),
         BN_Conv2d(b2_n1, b2_n3, 3, 2, 0, bias=False))
     self.branch3 = nn.Sequential(
         BN_Conv2d(in_channels, b3_n1, 1, 1, 0, bias=False),
         BN_Conv2d(b3_n1, b3_n1x7, (1, 7), (1, 1), (0, 3), bias=False),
         BN_Conv2d(b3_n1x7, b3_n7x1, (7, 1), (1, 1), (3, 0), bias=False),
         BN_Conv2d(b3_n7x1, b3_n3, 3, 2, 0, bias=False))
 def __init__(self,
              layers: object,
              cardinality,
              group_depth,
              num_classes,
              is_se=False) -> object:
     super(ResNeXt, self).__init__()
     self.is_se = is_se
     self.cardinality = cardinality
     self.channels = 64
     self.conv1 = BN_Conv2d(3, self.channels, 7, stride=2, padding=3)
     d1 = group_depth
     self.conv2 = self.___make_layers(d1, layers[0], stride=1)
     d2 = d1 * 2
     self.conv3 = self.___make_layers(d2, layers[1], stride=2)
     d3 = d2 * 2
     self.conv4 = self.___make_layers(d3, layers[2], stride=2)
     d4 = d3 * 2
     self.conv5 = self.___make_layers(d4, layers[3], stride=2)
     self.fc = nn.Linear(self.channels, num_classes)  # 224x224 input size
    def __init__(self, layers: object, num_classes, is_se=False) -> object:
        super(DarkNet, self).__init__()
        self.is_se = is_se
        filters = [64, 128, 256, 512, 1024]

        self.conv1 = BN_Conv2d(3, 32, 3, 1, 1)
        self.redu1 = BN_Conv2d(32, 64, 3, 2, 1)
        self.conv2 = self.__make_layers(filters[0], layers[0])
        self.redu2 = BN_Conv2d(filters[0], filters[1], 3, 2, 1)
        self.conv3 = self.__make_layers(filters[1], layers[1])
        self.redu3 = BN_Conv2d(filters[1], filters[2], 3, 2, 1)
        self.conv4 = self.__make_layers(filters[2], layers[2])
        self.redu4 = BN_Conv2d(filters[2], filters[3], 3, 2, 1)
        self.conv5 = self.__make_layers(filters[3], layers[3])
        self.redu5 = BN_Conv2d(filters[3], filters[4], 3, 2, 1)
        self.conv6 = self.__make_layers(filters[4], layers[4])
        self.global_pool = nn.AdaptiveAvgPool2d((1, 1))
        self.fc = nn.Linear(filters[4], num_classes)
    def __init__(self, in_channels, b1, b2_n1, b2_n3, b3_n1, b3_n3_1, b3_n3_2,
                 n1_linear):
        super(Inception_A_res, self).__init__()
        self.branch1 = BN_Conv2d(in_channels, b1, 1, 1, 0, bias=False)
        self.branch2 = nn.Sequential(
            BN_Conv2d(in_channels, b2_n1, 1, 1, 0, bias=False),
            BN_Conv2d(b2_n1, b2_n3, 3, 1, 1, bias=False),
        )
        self.branch3 = nn.Sequential(
            BN_Conv2d(in_channels, b3_n1, 1, 1, 0, bias=False),
            BN_Conv2d(b3_n1, b3_n3_1, 3, 1, 1, bias=False),
            BN_Conv2d(b3_n3_1, b3_n3_2, 3, 1, 1, bias=False))
        self.conv_linear = nn.Conv2d(b1 + b2_n3 + b3_n3_2,
                                     n1_linear,
                                     1,
                                     1,
                                     0,
                                     bias=True)

        self.short_cut = nn.Sequential()
        if in_channels != n1_linear:
            self.short_cut = nn.Sequential(
                nn.Conv2d(in_channels, n1_linear, 1, 1, 0, bias=False),
                nn.BatchNorm2d(n1_linear))
 def __make_transition(self, in_chls):
     out_chls = int(self.theta * in_chls)
     return nn.Sequential(BN_Conv2d(in_chls, out_chls, 1, 1, 0),
                          nn.AvgPool2d(2)), out_chls
    def __init__(self, block_type, in_channels, b1_reduce, b1, b2_reduce, b2, b3, b4):
        super(Block_bank, self).__init__()
        self.block_type = block_type  # controlled by strings "type1", "type2", "type3", "type4", "type5"

        """
        branch 1
        """
        # reduce, 3x3, 3x3
        self.branch1_type1 = nn.Sequential(
            BN_Conv2d(in_channels, b1_reduce, 1, 1, 0, bias=False),
            BN_Conv2d(b1_reduce, b1, 3, 1, 1, bias=False),
            BN_Conv2d(b1, b1, 3, 1, 1, bias=False)
        )

        # reduce, 3x3, 3x3_s2
        self.branch1_type2 = nn.Sequential(
            BN_Conv2d(in_channels, b1_reduce, 1, 1, 0, bias=False),
            BN_Conv2d(b1_reduce, b1, 3, 1, 1, bias=False),
            BN_Conv2d(b1, b1, 3, 2, 0, bias=False)
        )

        # reduce, 1x7, 7x1, 1x7, 7x1
        self.branch1_type3 = nn.Sequential(
            BN_Conv2d(in_channels, b1_reduce, 1, 1, 0, bias=False),
            BN_Conv2d(b1_reduce, b1_reduce, (7, 1), (1, 1), (3, 0), bias=False),
            BN_Conv2d(b1_reduce, b1_reduce, (1, 7), (1, 1), (0, 3), bias=False),  # same padding
            BN_Conv2d(b1_reduce, b1_reduce, (7, 1), (1, 1), (3, 0), bias=False),
            BN_Conv2d(b1_reduce, b1, (1, 7), (1, 1), (0, 3), bias=False)
        )

        # reduce, 1x7, 7x1, 3x3_s2
        self.branch1_type4 = nn.Sequential(
            BN_Conv2d(in_channels, b1_reduce, 1, 1, 0, bias=False),
            BN_Conv2d(b1_reduce, b1, (1, 7), (1, 1), (0, 3), bias=False),
            BN_Conv2d(b1, b1, (7, 1), (1, 1), (3, 0), bias=False),
            BN_Conv2d(b1, b1, 3, 2, 0, bias=False)
        )

        # reduce, 3x3, 2 sub-branch of 1x3, 3x1
        self.branch1_type5_head = nn.Sequential(
            BN_Conv2d(in_channels, b1_reduce, 1, 1, 0, bias=False),
            BN_Conv2d(b1_reduce, b1, 3, 1, 1, bias=False)
        )
        self.branch1_type5_body1 = BN_Conv2d(b1, b1, (1, 3), (1, 1), (0, 1), bias=False)
        self.branch1_type5_body2 = BN_Conv2d(b1, b1, (3, 1), (1, 1), (1, 0), bias=False)

        """
        branch 2
        """
        # reduce, 5x5
        self.branch2_type1 = nn.Sequential(
            BN_Conv2d(in_channels, b2_reduce, 1, 1, 0, bias=False),
            BN_Conv2d(b2_reduce, b2, 5, 1, 2, bias=False)
        )

        # 3x3_s2
        self.branch2_type2 = BN_Conv2d(in_channels, b2, 3, 2, 0, bias=False)

        # reduce, 1x7, 7x1
        self.branch2_type3 = nn.Sequential(
            BN_Conv2d(in_channels, b2_reduce, 1, 1, 0, bias=False),
            BN_Conv2d(b2_reduce, b2_reduce, (1, 7), (1, 1), (0, 3), bias=False),
            BN_Conv2d(b2_reduce, b2, (7, 1), (1, 1), (3, 0), bias=False)
        )

        # reduce, 3x3_s2
        self.branch2_type4 = nn.Sequential(
            BN_Conv2d(in_channels, b2_reduce, 1, 1, 0, bias=False),
            BN_Conv2d(b2_reduce, b2, 3, 2, 0, bias=False)
        )

        # reduce, 2 sub-branch of 1x3, 3x1
        self.branch2_type5_head = BN_Conv2d(in_channels, b2_reduce, 1, 1, 0, bias=False)
        self.branch2_type5_body1 = BN_Conv2d(b2_reduce, b2, (1, 3), (1, 1), (0, 1), bias=False)
        self.branch2_type5_body2 = BN_Conv2d(b2_reduce, b2, (3, 1), (1, 1), (1, 0), bias=False)

        """
        branch 3
        """
        # avg pool, 1x1
        self.branch3 = nn.Sequential(
            nn.AvgPool2d(3, 1, 1),
            BN_Conv2d(in_channels, b3, 1, 1, 0, bias=False)
        )

        """
        branch 4
        """
        # 1x1
        self.branch4 = BN_Conv2d(in_channels, b4, 1, 1, 0, bias=False)
 def __init__(self):
     super(Stem_v4_Res2, self).__init__()
     self.step1 = nn.Sequential(BN_Conv2d(3, 32, 3, 2, 0, bias=False),
                                BN_Conv2d(32, 32, 3, 1, 0, bias=False),
                                BN_Conv2d(32, 64, 3, 1, 1, bias=False))
     self.step2_pool = nn.MaxPool2d(3, 2, 0)
     self.step2_conv = BN_Conv2d(64, 96, 3, 2, 0, bias=False)
     self.step3_1 = nn.Sequential(BN_Conv2d(160, 64, 1, 1, 0, bias=False),
                                  BN_Conv2d(64, 96, 3, 1, 0, bias=False))
     self.step3_2 = nn.Sequential(
         BN_Conv2d(160, 64, 1, 1, 0, bias=False),
         BN_Conv2d(64, 64, (7, 1), (1, 1), (3, 0), bias=False),
         BN_Conv2d(64, 64, (1, 7), (1, 1), (0, 3), bias=False),
         BN_Conv2d(64, 96, 3, 1, 0, bias=False))
     self.step4_pool = nn.MaxPool2d(3, 2, 0)
     self.step4_conv = BN_Conv2d(192, 192, 3, 2, 0, bias=False)