Exemple #1
0
 def __init__(
     self,
     in_channels,
     channels,
     stride=1,
     groups=1,
     base_width=64,
     dilation=1,
     norm=M.BatchNorm2d,
 ):
     assert norm is M.BatchNorm2d, "Quant mode only support BatchNorm2d currently."
     super(Bottleneck, self).__init__()
     width = int(channels * (base_width / 64.0)) * groups
     self.conv_bn_relu1 = M.ConvBnRelu2d(in_channels, width, 1, 1, bias=False)
     self.conv_bn_relu2 = M.ConvBnRelu2d(
         width,
         width,
         3,
         stride,
         padding=dilation,
         groups=groups,
         dilation=dilation,
         bias=False,
     )
     self.conv_bn3 = M.ConvBn2d(width, channels * self.expansion, 1, 1, bias=False)
     self.downsample = (
         M.Identity()
         if in_channels == channels * self.expansion and stride == 1
         else M.ConvBn2d(
             in_channels, channels * self.expansion, 1, stride, bias=False
         )
     )
     self.add = M.Elemwise("FUSE_ADD_RELU")
Exemple #2
0
 def _init_feature_adjust(self):
     # feature adjustment
     self.r_z_k = M.ConvBn2d(self.channels,
                             self.feat_channels,
                             3,
                             1,
                             1,
                             momentum=0.9,
                             affine=True,
                             track_running_stats=True)
     self.c_z_k = M.ConvBn2d(self.channels,
                             self.feat_channels,
                             3,
                             1,
                             1,
                             momentum=0.9,
                             affine=True,
                             track_running_stats=True)
     self.r_x = M.ConvBn2d(self.channels,
                           self.feat_channels,
                           3,
                           1,
                           1,
                           momentum=0.9,
                           affine=True,
                           track_running_stats=True)
     self.c_x = M.ConvBn2d(self.channels,
                           self.feat_channels,
                           3,
                           1,
                           1,
                           momentum=0.9,
                           affine=True,
                           track_running_stats=True)
Exemple #3
0
 def __init__(
     self,
     in_channels,
     channels,
     stride=1,
     groups=1,
     base_width=64,
     dilation=1,
     norm=M.BatchNorm2d,
 ):
     assert norm is M.BatchNorm2d, "Quant mode only support BatchNorm2d currently."
     super(BasicBlock, self).__init__()
     if groups != 1 or base_width != 64:
         raise ValueError("BasicBlock only supports groups=1 and base_width=64")
     if dilation > 1:
         raise NotImplementedError("Dilation > 1 not supported in BasicBlock")
     self.conv_bn_relu1 = M.ConvBnRelu2d(
         in_channels, channels, 3, stride, padding=dilation, bias=False
     )
     self.conv_bn2 = M.ConvBn2d(channels, channels, 3, 1, padding=1, bias=False)
     self.downsample = (
         M.Identity()
         if in_channels == channels and stride == 1
         else M.ConvBn2d(in_channels, channels, 1, stride, bias=False)
     )
     self.add = M.Elemwise("FUSE_ADD_RELU")
Exemple #4
0
 def __init__(self, inp, oup, stride):
     super().__init__()
     if inp == oup and stride == 1:
         self.proj = M.Identity()
     else:
         self.proj = M.ConvBn2d(inp, oup, 1, stride=stride, bias=False)
     self.conv1 = M.ConvBnRelu2d(inp,
                                 oup,
                                 3,
                                 padding=1,
                                 stride=stride,
                                 bias=False)
     self.conv2 = M.ConvBn2d(oup, oup, 3, padding=1, stride=1, bias=False)
Exemple #5
0
    def __init__(self, inp, oup, stride, expand_ratio):
        super(InvertedResidual, self).__init__()
        self.stride = stride
        assert stride in [1, 2]

        hidden_dim = int(round(inp * expand_ratio))
        self.use_res_connect = self.stride == 1 and inp == oup

        layers = []
        if expand_ratio != 1:
            # pw
            layers.append(
                M.ConvBnRelu2d(inp, hidden_dim, kernel_size=1, bias=False))
        layers.extend([
            # dw
            M.ConvBnRelu2d(
                hidden_dim,
                hidden_dim,
                kernel_size=3,
                padding=1,
                stride=stride,
                groups=hidden_dim,
                bias=False,
            ),
            # pw-linear
            M.ConvBn2d(hidden_dim, oup, kernel_size=1, bias=False),
        ])
        self.conv = M.Sequential(*layers)
        self.add = M.Elemwise("ADD")
Exemple #6
0
 def __init__(self):
     super().__init__()
     self.classifier = None
     if dist.get_rank() == 0:
         self.features = M.Sequential(
             M.ConvBn2d(3, 64, 7, stride=2, padding=3, bias=False),
             M.MaxPool2d(kernel_size=3, stride=2, padding=1),
             BasicBlock(64, 64, 1),
             BasicBlock(64, 64, 1),
         )
     elif dist.get_rank() == 1:
         self.features = M.Sequential(
             BasicBlock(64, 128, 2),
             BasicBlock(128, 128, 1),
         )
     elif dist.get_rank() == 2:
         self.features = M.Sequential(
             BasicBlock(128, 256, 2),
             BasicBlock(256, 256, 1),
         )
     elif dist.get_rank() == 3:
         self.features = M.Sequential(
             BasicBlock(256, 512, 2),
             BasicBlock(512, 512, 1),
         )
         self.classifier = M.Linear(512, 1000)
Exemple #7
0
 def _init_predictor(self):
     """Initialize predictor layers of the head."""
     self.conv_cls = M.ConvBn2d(self.feat_channels,
                                self.cls_out_channels,
                                kernel_size=1,
                                stride=1,
                                padding=0)
     self.conv_reg = M.ConvBn2d(self.feat_channels,
                                2,
                                kernel_size=1,
                                stride=1,
                                padding=0)
     self.conv_centerness = M.ConvBn2d(self.feat_channels,
                                       1,
                                       kernel_size=1,
                                       stride=1,
                                       padding=0)
Exemple #8
0
 def __init__(self, has_bias=True):
     super().__init__()
     self.data = np.random.random((1, 3, 224, 224)).astype(np.float32)
     self.convbn = M.ConvBn2d(3,
                              10,
                              3,
                              stride=(2, 3),
                              dilation=(2, 2),
                              padding=(3, 1),
                              bias=has_bias)
Exemple #9
0
 def __init__(self, ):
     super().__init__()
     self.conv0 = MyConvBnRelu2d(3, 3, 3, 1, 1)
     self.conv1 = M.ConvBn2d(3, 3, 1, 1, 0)
     self.conv2 = M.ConvBn2d(3, 3, 1, 1, 0)
     self.add = M.Elemwise("FUSE_ADD_RELU")