Exemplo n.º 1
0
 def __init__(self, w_in, head_channels, head_acts, nc):
     super(MBHead, self).__init__()
     assert len(head_channels) == len(head_channels)
     self.conv = nn.Conv2d(w_in,
                           head_channels[0],
                           1,
                           stride=1,
                           padding=0,
                           bias=False)
     self.conv_bn = nn.BatchNorm2d(head_channels[0],
                                   eps=cfg.BN.EPS,
                                   momentum=cfg.BN.MOM)
     self.conv_act = build_activation(head_acts[0])
     self.avg_pool = nn.AdaptiveAvgPool2d((1, 1))
     _head_acts = head_acts[1:]
     _head_channels = head_channels[1:]
     self.linears = []
     pre_w = head_channels[0]
     for i, act in enumerate(_head_acts):
         self.linears.append(nn.Linear(pre_w, _head_channels[i]))
         # self.linears.append(nn.BatchNorm1d(_head_channels[i]))
         self.linears.append(build_activation(act))
         pre_w = _head_channels[i]
     if len(self.linears) > 0:
         self.linears = nn.Sequential(*self.linears)
     if cfg.MB.DROPOUT_RATIO > 0.0:
         self.dropout = nn.Dropout(p=cfg.MB.DROPOUT_RATIO)
     self.fc = nn.Linear(head_channels[-1], nc, bias=True)
Exemplo n.º 2
0
 def __init__(self, in_channel, expand_ratio, kernel_size, stride, act_func,
              se, out_channel):
     # expansion, 3x3 dwise, BN, Swish, SE, 1x1, BN, skip_connection
     super(MBConv, self).__init__()
     middle_channel = int(in_channel * expand_ratio)
     middle_channel = make_divisible(middle_channel, 8)
     if middle_channel != in_channel:
         self.expand = True
         self.inverted_bottleneck_conv = nn.Conv2d(in_channel,
                                                   middle_channel,
                                                   1,
                                                   stride=1,
                                                   padding=0,
                                                   bias=False)
         self.inverted_bottleneck_bn = nn.BatchNorm2d(middle_channel,
                                                      eps=cfg.BN.EPS,
                                                      momentum=cfg.BN.MOM)
         self.inverted_bottleneck_act = build_activation(act_func)
     else:
         self.expand = False
     self.depth_conv = nn.Conv2d(middle_channel,
                                 middle_channel,
                                 kernel_size,
                                 stride=stride,
                                 groups=middle_channel,
                                 padding=get_same_padding(kernel_size),
                                 bias=False)
     self.depth_bn = nn.BatchNorm2d(middle_channel,
                                    eps=cfg.BN.EPS,
                                    momentum=cfg.BN.MOM)
     self.depth_act = build_activation(act_func)
     if se > 0:
         self.depth_se = SEModule(middle_channel, se)
     self.point_linear_conv = nn.Conv2d(middle_channel,
                                        out_channel,
                                        1,
                                        stride=1,
                                        padding=0,
                                        bias=False)
     self.point_linear_bn = nn.BatchNorm2d(out_channel,
                                           eps=cfg.BN.EPS,
                                           momentum=cfg.BN.MOM)
     # Skip connection if in and out shapes are the same (MN-V2 style)
     self.has_skip = stride == 1 and in_channel == out_channel
Exemplo n.º 3
0
 def __init__(self, w_in, w_out, kernel_size, stride, padding, conv_act):
     super(ConvLayer, self).__init__()
     self.conv = nn.Conv2d(w_in,
                           w_out,
                           kernel_size,
                           stride=stride,
                           padding=padding,
                           bias=False)
     self.bn = nn.BatchNorm2d(w_out, eps=cfg.BN.EPS, momentum=cfg.BN.MOM)
     self.conv_act = build_activation(conv_act)
Exemplo n.º 4
0
    def __init__(self, in_channels, out_channels,
                 kernel_size=3, stride=1, expand_ratio=6, mid_channels=None, act_func='relu6', use_se=False):
        super(MBInvertedConvLayer, self).__init__()

        self.in_channels = in_channels
        self.out_channels = out_channels

        self.kernel_size = kernel_size
        self.stride = stride
        self.expand_ratio = expand_ratio
        self.mid_channels = mid_channels
        self.act_func = act_func
        self.use_se = use_se

        if self.mid_channels is None:
            feature_dim = round(self.in_channels * self.expand_ratio)
        else:
            feature_dim = self.mid_channels

        if self.expand_ratio == 1:
            self.inverted_bottleneck = None
        else:
            self.inverted_bottleneck = nn.Sequential(OrderedDict([
                ('conv', nn.Conv2d(self.in_channels, feature_dim, 1, 1, 0, bias=False)),
                ('bn', nn.BatchNorm2d(feature_dim)),
                ('act', build_activation(self.act_func, inplace=True)),
            ]))

        pad = get_same_padding(self.kernel_size)
        depth_conv_modules = [
            ('conv', nn.Conv2d(feature_dim, feature_dim, kernel_size, stride, pad, groups=feature_dim, bias=False)),
            ('bn', nn.BatchNorm2d(feature_dim)),
            ('act', build_activation(self.act_func, inplace=True))
        ]
        if self.use_se:
            depth_conv_modules.append(('se', SEModule(feature_dim, reduction=0.25)))
        self.depth_conv = nn.Sequential(OrderedDict(depth_conv_modules))

        self.point_linear = nn.Sequential(OrderedDict([
            ('conv', nn.Conv2d(feature_dim, out_channels, 1, 1, 0, bias=False)),
            ('bn', nn.BatchNorm2d(out_channels)),
        ]))
Exemplo n.º 5
0
 def __init__(self,
              in_features,
              out_features,
              act_func=None,
              dropout_rate=0,
              bias=True):
     self.linear = nn.Linear(in_features, out_features, bias=bias)
     self.act = build_activation(act_func) if act_func is not None else None
     if self.dropout_rate > 0:
         self.dropout = nn.Dropout(self.dropout_rate, inplace=True)
     else:
         self.dropout = None
Exemplo n.º 6
0
    def __init__(self, in_features, out_features, bias=True,
                 use_bn=False, act_func=None, dropout_rate=0, ops_order='weight_bn_act'):
        super(LinearLayer, self).__init__()

        self.in_features = in_features
        self.out_features = out_features
        self.bias = bias

        self.use_bn = use_bn
        self.act_func = act_func
        self.dropout_rate = dropout_rate
        self.ops_order = ops_order

        """ modules """
        modules = {}
        # batch norm
        if self.use_bn:
            if self.bn_before_weight:
                modules['bn'] = nn.BatchNorm1d(in_features)
            else:
                modules['bn'] = nn.BatchNorm1d(out_features)
        else:
            modules['bn'] = None
        # activation
        modules['act'] = build_activation(self.act_func, self.ops_list[0] != 'act')
        # dropout
        if self.dropout_rate > 0:
            modules['dropout'] = nn.Dropout(self.dropout_rate, inplace=True)
        else:
            modules['dropout'] = None
        # linear
        modules['weight'] = {'linear': nn.Linear(self.in_features, self.out_features, self.bias)}

        # add modules
        for op in self.ops_list:
            if modules[op] is None:
                continue
            elif op == 'weight':
                if modules['dropout'] is not None:
                    self.add_module('dropout', modules['dropout'])
                for key in modules['weight']:
                    self.add_module(key, modules['weight'][key])
            else:
                self.add_module(op, modules[op])
Exemplo n.º 7
0
    def __init__(self, in_channels, out_channels,
                 use_bn=True, act_func='relu', dropout_rate=0, ops_order='weight_bn_act'):
        super(My2DLayer, self).__init__()
        self.in_channels = in_channels
        self.out_channels = out_channels

        self.use_bn = use_bn
        self.act_func = act_func
        self.dropout_rate = dropout_rate
        self.ops_order = ops_order

        """ modules """
        modules = {}
        # batch norm
        if self.use_bn:
            if self.bn_before_weight:
                modules['bn'] = nn.BatchNorm2d(in_channels)
            else:
                modules['bn'] = nn.BatchNorm2d(out_channels)
        else:
            modules['bn'] = None
        # activation
        modules['act'] = build_activation(self.act_func, self.ops_list[0] != 'act')
        # dropout
        if self.dropout_rate > 0:
            modules['dropout'] = nn.Dropout2d(self.dropout_rate, inplace=True)
        else:
            modules['dropout'] = None
        # weight
        modules['weight'] = self.weight_op()

        # add modules
        for op in self.ops_list:
            if modules[op] is None:
                continue
            elif op == 'weight':
                # dropout before weight operation
                if modules['dropout'] is not None:
                    self.add_module('dropout', modules['dropout'])
                for key in modules['weight']:
                    self.add_module(key, modules['weight'][key])
            else:
                self.add_module(op, modules[op])