Beispiel #1
0
 def __init__(self,
              in_channels,
              channels,
              groups,
              stride=1,
              se_reduction=4,
              drop_path=0.2):
     super().__init__()
     branch1 = [
         Norm(in_channels),
         Conv2d(in_channels,
                channels,
                kernel_size=1,
                norm='default',
                act='default'),
         *([Pool2d(3, 2)] if stride != 1 else []),
         Conv2d(channels,
                channels,
                kernel_size=3,
                groups=groups,
                norm='default',
                act='default'),
         *([SELayer(channels, se_reduction, groups)]
           if se_reduction else []),
         Conv2d(channels, channels, kernel_size=1, norm='default'),
         *([DropPath(drop_path)] if drop_path and stride == 1 else []),
     ]
     self.branch1 = Sequential(branch1)
     self.branch2 = Shortcut(in_channels, channels, stride)
Beispiel #2
0
    def __init__(self, in_channels, out_channels, kernel_size, stride,
                 expand_ratio, se_ratio, drop_connect):
        super().__init__()
        self._has_se = se_ratio is not None and 0 < se_ratio <= 1

        channels = in_channels * expand_ratio

        self.expand = Conv2d(in_channels, channels, 1, norm='def',
                             act='def') if expand_ratio != 1 else Identity()

        self.depthwise = Conv2d(channels,
                                channels,
                                kernel_size,
                                stride,
                                groups=channels,
                                padding='SAME',
                                norm='def',
                                act='def')

        if self._has_se:
            self.se = SELayer(channels,
                              se_channels=int(in_channels * se_ratio),
                              min_se_channels=1)

        self.project = Conv2d(channels, out_channels, 1, norm='def')
        self._use_residual = in_channels == out_channels and stride == 1
        if self._use_residual:
            self.drop_connect = DropPath(
                drop_connect) if drop_connect else Identity()
Beispiel #3
0
    def __init__(self,
                 in_channels,
                 channels,
                 stride,
                 dropout=0,
                 drop_path=0,
                 avd=False,
                 start_block=False,
                 end_block=False,
                 exclude_bn0=False):
        super().__init__()
        out_channels = channels * self.expansion
        if not start_block and not exclude_bn0:
            self.bn0 = Norm(in_channels)

        if not start_block:
            self.act0 = Act()

        self.conv1 = Conv2d(in_channels,
                            out_channels,
                            kernel_size=3,
                            stride=stride)
        self.bn1 = Norm(out_channels)
        self.act1 = Act()
        self.dropout = Dropout(dropout) if dropout else Identity()
        self.conv2 = Conv2d(out_channels, out_channels, kernel_size=3)

        if start_block:
            self.bn2 = Norm(out_channels)

        self.drop_path = DropPath(drop_path) if drop_path else Identity()

        if end_block:
            self.bn2 = Norm(out_channels)
            self.act2 = Act()

        if stride != 1 or in_channels != out_channels:
            shortcut = []
            if stride != 1:
                shortcut.append(Pool2d(3, 2, type='max'))
            if in_channels != out_channels:
                shortcut.append(
                    Conv2d(in_channels,
                           out_channels,
                           kernel_size=1,
                           norm='def'))
            self.shortcut = Sequential(shortcut)
        else:
            self.shortcut = Identity()
        self.start_block = start_block
        self.end_block = end_block
        self.exclude_bn0 = exclude_bn0
Beispiel #4
0
 def __init__(self, in_channels, channels, groups, stride=1, radix=1, drop_path=0.2):
     super().__init__()
     out_channels = channels * self.expansion
     branch1 = [
         Norm(in_channels),
         Conv2d(in_channels, channels, kernel_size=1, norm='def', act='default'),
         *([Pool2d(3, 2)] if stride != 1 else []),
         SplAtConv2d(channels, channels, kernel_size=3, groups=groups, radix=radix)
         if radix != 0 else Conv2d(channels, channels, kernel_size=3, groups=groups, norm='def', act='default'),
         Conv2d(channels, out_channels, kernel_size=1, norm='def'),
         *([DropPath(drop_path)] if drop_path and stride == 1 else []),
     ]
     self.branch1 = Sequential(branch1)
     self.branch2 = Shortcut(in_channels, out_channels, stride)
Beispiel #5
0
 def __init__(self, in_channels, out_channels, dropout, use_se, drop_path):
     layers = [
         Norm(in_channels),
         Act(),
         Conv2d(in_channels, out_channels, kernel_size=3),
         Norm(out_channels),
         Act(),
         Conv2d(out_channels, out_channels, kernel_size=3),
     ]
     if dropout:
         layers.insert(5, Dropout(dropout))
     if use_se:
         layers.append(SELayer(out_channels, reduction=8))
     if drop_path:
         layers.append(DropPath(drop_path))
     super().__init__(layers)
Beispiel #6
0
 def __init__(self, C, stride, drop_path):
     super().__init__()
     self.stride = stride
     self._ops = []
     for i, primitive in enumerate(get_primitives()):
         if drop_path:
             op = Sequential([
                 OPS[primitive](C, stride),
             ])
             if 'pool' in primitive:
                 op.add(Norm(C))
             op.add(DropPath(drop_path))
         else:
             op = OPS[primitive](C, stride)
             if 'pool' in primitive:
                 op = Sequential([op, Norm(C)])
         self._ops.append(op)
Beispiel #7
0
    def __init__(self,
                 d_model,
                 num_heads,
                 dff,
                 drop_rate=0,
                 attn_drop_rate=0,
                 activation='gelu',
                 drop_path=0,
                 **kwargs):
        super().__init__(**kwargs)
        self.d_model = d_model
        self.num_heads = num_heads
        self.dff = dff
        self.drop_rate = drop_rate
        self.activation = activation
        self.drop_path = drop_path

        self.ln1 = Norm(type='ln')
        self.mha = MultiHeadAttention(d_model, num_heads, attn_drop_rate)

        self.ffn = FFN(d_model, dff, drop_rate, activation)
        self.ln2 = Norm(type='ln')

        self.drop_path = DropPath(drop_path) if drop_rate else Identity()
Beispiel #8
0
    def _compile(self, C, op_names, op_indices, indices, concat, reduction):
        self._concat = concat
        self.multiplier = len(concat)

        self._ops = []
        self._indices = []
        prev_op_index = 1
        for name, op_index, index in zip(op_names, op_indices, indices):
            if op_index != prev_op_index:
                self._ops.append([])
                self._indices.append([])

            stride = 2 if reduction and index < 2 else 1
            op = OPS[name](C, stride)

            if self.drop_path and not isinstance(op, Identity):
                op = Sequential([
                    op,
                    DropPath(self.drop_path),
                ])

            self._ops[-1].append(op)
            self._indices[-1].append(index)
            prev_op_index = op_index
Beispiel #9
0
    def __init__(self,
                 in_channels,
                 channels,
                 stride,
                 drop_path=0,
                 start_block=False,
                 end_block=False,
                 exclude_bn0=False,
                 se_reduction=4,
                 se_mode=0,
                 se_last=False,
                 base_width=26,
                 scale=4):
        super().__init__()
        self.start_block = start_block
        self.se_last = se_last

        out_channels = channels * self.expansion
        channels = math.floor(channels * (base_width / 64)) * scale

        if not start_block:
            if exclude_bn0:
                self.act0 = Act()
            else:
                self.norm_act0 = NormAct(in_channels)

        self.conv1 = Conv2d(in_channels,
                            channels,
                            kernel_size=1,
                            norm='def',
                            act='def')

        if start_block:
            self.conv2 = Res2Conv(channels,
                                  channels,
                                  kernel_size=3,
                                  stride=1,
                                  scale=scale,
                                  start_block=True,
                                  norm='def',
                                  act='def')
        else:
            self.conv2 = PPConv(channels, scale)
        self.aa = AntiAliasing(kernel_size=3,
                               stride=2) if stride == 2 else None

        if not self.se_last:
            self.se = SELayer(channels,
                              se_channels=out_channels // se_reduction,
                              mode=se_mode)

        self.conv3 = Conv2d(channels, out_channels, kernel_size=1)

        if start_block:
            self.bn3 = Norm(out_channels)

        if self.se_last:
            self.se = SELayer(out_channels,
                              se_channels=out_channels // se_reduction,
                              mode=se_mode)

        self.drop_path = DropPath(drop_path) if drop_path else Identity()

        if end_block:
            self.norm_act3 = NormAct(out_channels)

        self.shortcut = get_shortcut_vd(in_channels, out_channels, stride)

        self.start_block = start_block
        self.end_block = end_block
        self.exclude_bn0 = exclude_bn0