Example #1
0
 def __init__(self, in_chs, out_ch, activation='valid', dim=1, **kwargs):
     super(DarkCatBlock, self).__init__()
     self.conv1 = ConvSameBnRelu2D(in_chs[0], out_ch, activation=activation)
     self.conv2 = ConvSameBnRelu2D(in_chs[1], out_ch, activation=activation)
     self.concat = Concat(dim=dim)
     self.out_ch_last = in_chs[0] + in_chs[1]
     self.addLayers([self.conv1, self.conv2, self.concat])
     pass
Example #2
0
 def __init__(self, in_ch, out_ch, stride=1, hid_ch=None, pool_size=1, residual_path='equal'):
     super(ResBlockB, self).__init__()
     self.res_list = list([
         ConvSameBnRelu2D(in_ch, hid_ch, 1),
         ConvSameBnRelu2D(hid_ch, hid_ch, 3, stride=stride),
         FeatureExtractor(hid_ch, out_ch, 1, padding='same', bn=True),
     ])
     self.shortcut = self.make_shortcut(in_ch, out_ch, pool_size, residual_path)
     self.act = nn.ReLU(inplace=True)
     self.addLayers([self.res_list, self.shortcut, self.act])
Example #3
0
    def __init__(self,
                 in_ch,
                 out_ch,
                 hid_ch=None,
                 kernels_size=1,
                 strides=1,
                 activation='valid',
                 **kwargs):
        """YoloBlock

        Args:
            in_ch (int):
            out_ch (int):
            hid_ch (int):
            kernels_size (Union[int, List[int]]):
            strides (Union[int, List[int]]):

        Returns:
            Module
        """
        super(YoloBlock, self).__init__()
        kernels_size, strides = check_kernel_stride(kernels_size, strides)
        if hid_ch is None:
            hid_ch = out_ch
        self.block = nn.Sequential(
            ConvSameBnRelu2D(in_ch,
                             hid_ch,
                             kernels_size[0],
                             strides[0],
                             activation=activation),
            ConvSameBnRelu2D(hid_ch,
                             in_ch,
                             kernels_size[1],
                             strides[1],
                             activation=activation),
            ConvSameBnRelu2D(in_ch,
                             hid_ch,
                             kernels_size[2],
                             strides[2],
                             activation=activation),
            ConvSameBnRelu2D(hid_ch,
                             in_ch,
                             kernels_size[3],
                             strides[3],
                             activation=activation),
            ConvSameBnRelu2D(in_ch,
                             out_ch,
                             kernels_size[4],
                             strides[4],
                             activation=activation),
        )
        self.out_ch_last = out_ch
        self.addLayers(self.block)
        pass
Example #4
0
    def __init__(self, in_ch, out_ch):
        """InceptionBlock_v1A
        __init__ 构造函数

        Args:
            in_ch (None): 输入维度
            out_ch (None): 输出维度
        """
        super(InceptionBlock_v1A, self).__init__(out_ch)
        self.inc_list = [
            ConvSameBnRelu2D(in_ch, self.out_ch, 1, stride=2),
            ConvSameBnRelu2D(in_ch, self.out_ch, 3, stride=2),
            ConvSameBnRelu2D(in_ch, self.out_ch, 5, stride=2),
            MaxPool2D(3, stride=2, padding='same')
        ]
        self.addLayers(self.inc_list)
Example #5
0
    def __init__(self,
                 in_ch,
                 out_ch,
                 hid_ch=None,
                 kernel_size=1,
                 stride=1,
                 activation='valid',
                 dim=0,
                 **kwargs):
        """YoloNeck

        Args:
            in_ch (int):
            out_ch (int):
            hid_ch (int):
            kernel_size (int): default 1.
            stride (int): default 1.
            activation (Union[str, Module]): default 'valid'.
            dim (int): default 0.

        Returns:
            Module
        """
        super(YoloNeck, self).__init__()
        if hid_ch is None:
            hid_ch = out_ch // 2
        self.conv = ConvSameBnRelu2D(in_ch,
                                     hid_ch,
                                     kernel_size=kernel_size,
                                     stride=stride,
                                     activation=activation)
        self.cat = Concat(dim=dim)

        self.out_ch_last = out_ch
        self.addLayers([self.conv, self.cat])
Example #6
0
 def __init__(self, in_ch, out_ch, hid_ch, activation='valid', **kwargs):
     super(UpSampleBlock, self).__init__()
     us_size = kwargs['us_size'] if 'us_size' in kwargs else None
     us_stride = kwargs['us_stride'] if 'us_stride' in kwargs else None
     us_mode = kwargs['us_mode'] if 'us_mode' in kwargs else 'nearest'
     self.us_block = nn.Sequential(
         ConvSameBnRelu2D(in_ch, hid_ch, activation=activation),
         ConvSameBnRelu2D(hid_ch,
                          hid_ch * 2,
                          kernel_size=3,
                          activation=activation),
         ConvSameBnRelu2D(hid_ch * 2, hid_ch, activation=activation),
         ConvSameBnRelu2D(hid_ch, out_ch, activation=activation),
         UpSample(size=us_size, stride=us_stride, mode=us_mode),
     )
     self.addLayers(self.us_block)
     pass
Example #7
0
    def __init__(self, in_ch, out_ch, kernel_size=3, num_layer=2, pool_size=1, pool_stride=1):
        """VGGPoolBlock

        Args:
            in_ch:
            out_ch:
            num_layer:
            kernel_size:

        Returns:
            None
        """
        super(VGGPoolBlock, self).__init__()
        self.add_module('fe1', ConvSameBnRelu2D(in_ch, out_ch, 3))
        if 3 <= num_layer:
            self.add_module('fe2', ConvSameBnRelu2D(out_ch, out_ch, 3))
        self.add_module(f'fe{num_layer}', ConvSameBnRelu2D(out_ch, out_ch, kernel_size))
        self.add_module('max_pool', MaxPool2D(pool_size, pool_stride, padding='same'))
Example #8
0
 def __init__(self,
              in_ch,
              out_ch,
              kernel_size=1,
              activation='valid',
              residual_path='equal',
              **kwargs):
     super(DarkBlock, self).__init__()
     self.blocks = nn.Sequential(
         ConvSameBnRelu2D(in_ch, out_ch, activation=activation),
         ConvSameBnRelu2D(out_ch,
                          out_ch,
                          kernel_size,
                          activation=activation),
     )
     self.shortcut = Shortcut(residual_path=residual_path)
     self.addLayers([self.blocks, self.shortcut])
     pass
Example #9
0
    def __init__(self, in_ch, out_ch, kernel_n=3, stride=2):
        """IncResBlock_v4B
            c1-s2, c3-s2, c5-s2, pool-s2

            c1-s2: Conv1-s2

            c3-s2: Conv1 -> Conv3-s2

            c5-s2: Conv1 -> Conv5-s2

            pool-s2: MaxPooling-s2 -> Conv1

            call: concat([c1-s2(x), c3-s2(x), c5-s2(x), prob-s2(x)], -3)

        Args:
            in_ch:
            out_ch:
            kernel_n:

        Returns:
            None
        """
        super(IncResBlock_v4B, self).__init__(out_ch)
        self.inc_list = [
            nn.Sequential(
                ConvSameBnRelu2D(in_ch, self.out_ch, kernel_size=1, stride=stride)),
            nn.Sequential(
                ConvSameBnRelu2D(in_ch, self.out_ch, 1),
                ConvSameBnRelu2D(self.out_ch, self.out_ch, kernel_size=(1, kernel_n), stride=(stride, 1)),
                ConvSameBnRelu2D(self.out_ch, self.out_ch, kernel_size=(kernel_n, 1), stride=(1, stride))),
            nn.Sequential(
                ConvSameBnRelu2D(in_ch, self.out_ch, 1),
                ConvSameBnRelu2D(self.out_ch, self.out_ch, kernel_size=(1, kernel_n)),
                ConvSameBnRelu2D(self.out_ch, self.out_ch, kernel_size=(kernel_n, 1)),
                ConvSameBnRelu2D(self.out_ch, self.out_ch, kernel_size=(1, kernel_n), stride=(stride, 1)),
                ConvSameBnRelu2D(self.out_ch, self.out_ch, kernel_size=(kernel_n, 1), stride=(1, stride))),
            nn.Sequential(
                MaxPool2D(3, stride=stride, padding='same'),
                ConvSameBnRelu2D(in_ch, self.out_ch, 1))
        ]
        self.addLayers(self.inc_list)
Example #10
0
    def __init__(self,
                 in_ch,
                 num_cls,
                 hid_ch,
                 kernel_size=1,
                 stride=1,
                 activation='valid',
                 anchors=None,
                 **kwargs):
        """YoloClassifier

        Args:
            in_ch (int):
            num_cls (int):
            hid_ch (int):
            kernel_size (Union[int, List[int]]):
            stride (Union[int, List[int]]):
            activation (Union[str, Module]):
            anchors (List[int]):

        Returns:
            Module
        """
        super(YoloClassifier, self).__init__()
        self.anchors = anchors
        self.num_cls = num_cls
        self.num_anchors = kwargs[
            'num_anchors'] if 'num_anchors' in kwargs else 3
        self.iou = kwargs['iou'] if 'iou' in kwargs else 'iou'
        self.nms = kwargs['nms'] if 'nms' in kwargs else 'nms'
        self.out_ch_last = self.num_anchors * (num_cls + 4 + 1)
        self.conv = ConvSameBnRelu2D(in_ch, hid_ch, 3, activation=activation)
        self.anchor = ConvSameBnRelu2D(hid_ch, self.out_ch_last, kernel_size,
                                       stride, 'valid')
        self.flatten = nn.Flatten(start_dim=-2)
        self.addLayers([self.conv, self.anchor, self.flatten])
        pass
Example #11
0
    def __init__(self, in_ch, out_ch):
        """IncResBlock_v4A
            c1-s2, c3-s2, c5-s2, pool-s2

            c1-s2: Conv1-s2

            c3-s2: Conv1 -> Conv3-s2

            c5-s2: Conv1 -> Conv5-s2

            pool-s2: MaxPooling-s2 -> Conv1

            call: concat([c1-s2(x), c3-s2(x), c5-s2(x), prob-s2(x)], -3)

        Args:
            in_ch:
            out_ch:

        Returns:
            None
        """
        super(IncResBlock_v4A, self).__init__(out_ch)
        self.inc_list = [
            nn.Sequential(
                ConvSameBnRelu2D(in_ch, self.out_ch, stride=2)),
            nn.Sequential(
                ConvSameBnRelu2D(in_ch, self.out_ch, 1),
                ConvSameBnRelu2D(self.out_ch, self.out_ch, 3, stride=2)),
            nn.Sequential(
                ConvSameBnRelu2D(in_ch, self.out_ch, 1),
                ConvSameBnRelu2D(self.out_ch, self.out_ch, 3),
                ConvSameBnRelu2D(self.out_ch, self.out_ch, 3, stride=2)),
            nn.Sequential(
                MaxPool2D(3, stride=2, padding='same'),
                ConvSameBnRelu2D(in_ch, self.out_ch, 1)),
        ]
        self.addLayers(self.inc_list)
Example #12
0
    def __init__(self, in_ch, out_ch):
        """ReductionBlock_v4B
            c1-s2, c3-s2, c5-s2, pool-s2

            c1-s2: Conv1-s2

            c3-s2: Conv1 -> Conv3-s2

            c5-s2: Conv1 -> Conv5-s2

            pool-s2: MaxPooling-s2 -> Conv1

            call: concat([c1-s2(x), c3-s2(x), c5-s2(x), prob-s2(x)], -3)

        Args:
            in_ch:
            out_ch:

        Returns:
            None
        """
        super(ReductionBlock_v4B, self).__init__(out_ch)
        self.inc_list = [
            nn.Sequential(
                ConvSameBnRelu2D(in_ch, 256, 1),
                ConvSameBnRelu2D(256, 384, 3, stride=2)),
            nn.Sequential(
                ConvSameBnRelu2D(in_ch, 256, 1),
                ConvSameBnRelu2D(256, 288, 3, stride=2)),
            nn.Sequential(
                ConvSameBnRelu2D(in_ch, 256, 1),
                ConvSameBnRelu2D(256, 288, 3),
                FeatureExtractor(288, 320, 3, stride=2)),
            nn.Sequential(MaxPool2D(3, stride=2, padding='same')),
        ]
        self.addLayers(self.inc_list)