def __init__(self,
              in_ch,
              out_ch,
              hid_ch,
              out_stride,
              kernel_sizes=(1, 3),
              padding='same'):
     super(SSDBlock, self).__init__()
     self.blocks = [
         FeatureExtractor(in_ch,
                          hid_ch,
                          kernel_sizes[0],
                          padding=padding,
                          bn=True,
                          activation='relu'),
         FeatureExtractor(hid_ch,
                          out_ch,
                          kernel_sizes[1],
                          out_stride,
                          padding=padding,
                          bn=True,
                          activation='relu')
     ]
     self.addLayers(self.blocks)
     pass
 def __init__(self,
              in_ch,
              out_ch,
              hid_ch,
              out_stride,
              num_cls,
              num_anchor=4,
              kernel_sizes=(1, 3),
              padding='same'):
     super(SSD_Block, self).__init__()
     self.num_cls = num_cls
     self.feb0 = FeatureExtractor(in_ch,
                                  hid_ch,
                                  kernel_sizes[0],
                                  padding=padding,
                                  bn=True,
                                  activation='relu')
     self.feb1 = FeatureExtractor(hid_ch,
                                  out_ch,
                                  kernel_sizes[1],
                                  out_stride,
                                  padding=padding,
                                  bn=True,
                                  activation='relu')
     self.classifier = SSDClassifier(out_ch, num_cls, num_anchor)
     self.addLayers([self.feb0, self.feb1, self.classifier])
     pass
Beispiel #3
0
    def __init__(self, in_chs, out_chs, kernel_channels=3,
                 strides=3, padding='valid', pool=False):
        """AlexBlock

        Args:
            in_chs (Any):
            out_chs (Any):
            kernel_channels (Any):
            strides (Any):
            padding:
            pool:

        Returns:
            None
        """
        super(AlexBlock, self).__init__()
        in_chs = trans_tuple(in_chs)
        out_chs = trans_tuple(out_chs)
        kernel_channels = trans_tuple(kernel_channels, num_param=len(out_chs))
        strides = trans_tuple(strides, num_param=len(out_chs))
        self._layers = list([[], []])

        for in_ch, out_ch, kernel_size, stride in zip(in_chs, out_chs, kernel_channels, strides):
            for i in range(2):
                self._layers[i].append(
                    FeatureExtractor(in_ch, out_ch, kernel_size, stride, padding=padding, bn=True, activation='relu'))
                if pool:
                    self._layers[i].append(MaxPool2D(3, 1, 'same'))
            self.addLayers(self._layers[0] + self._layers[1])
 def __init__(self, in_ch, num_cls, num_anchor):
     super(SSDClassifier, self).__init__()
     self.anchor_vector_len = num_cls + 4
     self.classifier = FeatureExtractor(in_ch,
                                        num_anchor * (num_cls + 4),
                                        3,
                                        padding='same')
     self.addLayers(self.classifier)
Beispiel #5
0
 def __init__(self, in_ch, out_ch, stride=1, hid_ch=None, pool_size=1, residual_path='equal'):
     super(ResBlockA, self).__init__()
     self.res_fun = nn.Sequential(
         ConvSameBnRelu2D(in_ch, hid_ch, 3),
         FeatureExtractor(hid_ch, out_ch, 3, stride=stride, padding='same', bn=True),
     )
     self.shortcut = self.make_shortcut(in_ch, out_ch, pool_size, residual_path)
     self.act = nn.ReLU(inplace=True)
     self.addLayers([self.res_fun, self.shortcut, self.act])
Beispiel #6
0
    def make_shortcut(in_ch, out_ch, pool_size, residual_path='equal'):
        """make_shortcut
            Conv, MaxPool2D, Equ

        Args:
            in_ch:
            out_ch:
            pool_size:
            residual_path: 'conv', 'pool', 'equal', Default 'equal'

        Returns:
            None
        """
        if residual_path == 'conv':
            shortcut = FeatureExtractor(in_ch, out_ch, 1, padding='same', strides=1)
        elif residual_path == 'pool':
            shortcut = MaxPool2D(pool_size, stride=2, padding='same')
        else:
            shortcut = Equ()
        return shortcut
Beispiel #7
0
    def __init__(self, in_ch, out_ch):
        """ReductionBlock_v4B
            c1-s2, c3-s2, c5-s2, pool-s2

            c1-s2: Conv1-s2

            c3-s2: Conv1 -> Conv3-s2

            c5-s2: Conv1 -> Conv5-s2

            pool-s2: MaxPooling-s2 -> Conv1

            call: concat([c1-s2(x), c3-s2(x), c5-s2(x), prob-s2(x)], -3)

        Args:
            in_ch:
            out_ch:

        Returns:
            None
        """
        super(ReductionBlock_v4B, self).__init__(out_ch)
        self.inc_list = [
            nn.Sequential(
                ConvSameBnRelu2D(in_ch, 256, 1),
                ConvSameBnRelu2D(256, 384, 3, stride=2)),
            nn.Sequential(
                ConvSameBnRelu2D(in_ch, 256, 1),
                ConvSameBnRelu2D(256, 288, 3, stride=2)),
            nn.Sequential(
                ConvSameBnRelu2D(in_ch, 256, 1),
                ConvSameBnRelu2D(256, 288, 3),
                FeatureExtractor(288, 320, 3, stride=2)),
            nn.Sequential(MaxPool2D(3, stride=2, padding='same')),
        ]
        self.addLayers(self.inc_list)