def __init__(self, in_planes, out_planes, expansion_factor, stride=1): super(InvertedResidualBlock, self).__init__() intermed_planes = in_planes * expansion_factor self.residual = (in_planes == out_planes) and (stride == 1) self.output = nn.Sequential(convbnrelu(in_planes, intermed_planes, 1), convbnrelu(intermed_planes, intermed_planes, 3, stride=stride, groups=intermed_planes), convbnrelu(intermed_planes, out_planes, 1, act=False))
def __init__(self, num_classes): super(MBv2, self).__init__() self.layer1 = convbnrelu(3, self.in_planes, kernel_size=3, stride=2) c_layer = 2 for t,c,n,s in (self.mobilenet_config): layers = [] for idx in range(n): layers.append(InvertedResidualBlock(self.in_planes, c, expansion_factor=t, stride=s if idx == 0 else 1)) self.in_planes = c setattr(self, 'layer{}'.format(c_layer), nn.Sequential(*layers)) c_layer += 1 ## Light-Weight RefineNet ## self.conv8 = conv1x1(320, 256, bias=False) self.conv7 = conv1x1(160, 256, bias=False) self.conv6 = conv1x1(96, 256, bias=False) self.conv5 = conv1x1(64, 256, bias=False) self.conv4 = conv1x1(32, 256, bias=False) self.conv3 = conv1x1(24, 256, bias=False) self.crp4 = self._make_crp(256, 256, 4) self.crp3 = self._make_crp(256, 256, 4) self.crp2 = self._make_crp(256, 256, 4) self.crp1 = self._make_crp(256, 256, 4) self.conv_adapt4 = conv1x1(256, 256, bias=False) self.conv_adapt3 = conv1x1(256, 256, bias=False) self.conv_adapt2 = conv1x1(256, 256, bias=False) self.segm = conv3x3(256, num_classes, bias=True) self.relu = nn.ReLU6(inplace=True) self._initialize_weights()