def __init__(self): super(Conv4_BinAct, self).__init__() builder = get_builder() self.convs = nn.Sequential( builder.conv3x3(3, 64, first_layer=True), builder.batchnorm(64), BiRealAct(), builder.conv3x3(64, 64), nn.MaxPool2d((2, 2)), builder.batchnorm(64), BiRealAct(), builder.conv3x3(64, 128), builder.batchnorm(128), BiRealAct(), builder.conv3x3(128, 128), nn.MaxPool2d((2, 2)), builder.batchnorm(128), BiRealAct(), ) self.linear = nn.Sequential( builder.conv1x1(32 * 32 * 8, 256), builder.batchnorm(256), BiRealAct(), builder.conv1x1(256, 256), builder.batchnorm(256), BiRealAct(), builder.conv1x1(256, 10), )
def __init__(self): super(Conv8Wide, self).__init__() builder = get_builder() self.convs = nn.Sequential( builder.conv3x3(3, scale(64), first_layer=True), nn.ReLU(), builder.conv3x3(scale(64), scale(64)), nn.ReLU(), nn.MaxPool2d((2, 2)), builder.conv3x3(scale(64), scale(128)), nn.ReLU(), builder.conv3x3(scale(128), scale(128)), nn.ReLU(), nn.MaxPool2d((2, 2)), builder.conv3x3(scale(128), scale(256)), nn.ReLU(), builder.conv3x3(scale(256), scale(256)), nn.ReLU(), nn.MaxPool2d((2, 2)), builder.conv3x3(scale(256), scale(512)), nn.ReLU(), builder.conv3x3(scale(512), scale(512)), nn.ReLU(), nn.MaxPool2d((2, 2)) ) self.linear = nn.Sequential( builder.conv1x1(scale(512) * 2 * 2, scale(256)), nn.ReLU(), builder.conv1x1(scale(256), scale(256)), nn.ReLU(), builder.conv1x1(scale(256), 10), )
def __init__(self): super(Conv4Wide_BinAct, self).__init__() builder = get_builder() self.convs = nn.Sequential( builder.conv3x3(3, scale(64), first_layer=True), builder.batchnorm(scale(64)), BiRealAct(), builder.conv3x3(scale(64), scale(64)), nn.MaxPool2d((2, 2)), builder.batchnorm(scale(64)), BiRealAct(), builder.conv3x3(scale(64), scale(128)), builder.batchnorm(scale(128)), BiRealAct(), builder.conv3x3(scale(128), scale(128)), nn.MaxPool2d((2, 2)), builder.batchnorm(scale(128)), BiRealAct(), ) self.linear = nn.Sequential( builder.conv1x1(scale(128)*8*8, scale(256)), builder.batchnorm(scale(256)), BiRealAct(), builder.conv1x1(scale(256), scale(256)), builder.batchnorm(scale(256)), BiRealAct(), builder.conv1x1(scale(256), 10), )
def __init__(self): super(VGG_Small_noReLU_BinAct, self).__init__() builder = get_builder() self.convs = nn.Sequential( builder.conv3x3(3, 128, first_layer=True), #builder.batchnorm(128), BiRealAct(), builder.conv3x3(128, 128), nn.MaxPool2d((2, 2)), builder.batchnorm(128), #nn.ReLU(), BiRealAct(), builder.conv3x3(128, 256), #builder.batchnorm(256), BiRealAct(), builder.conv3x3(256, 256), nn.MaxPool2d((2, 2)), builder.batchnorm(256), #nn.ReLU(), BiRealAct(), builder.conv3x3(256, 512), #builder.batchnorm(512), BiRealAct(), builder.conv3x3(512, 512), nn.MaxPool2d((2, 2)), builder.batchnorm(512), nn.ReLU(), ) self.linear = nn.Sequential( builder.conv1x1(512 * 4 * 4, 10), )
def __init__(self): super(Wide_VGG_Small, self).__init__() builder = get_builder() self.convs = nn.Sequential( builder.conv3x3(3, scale(16), first_layer=True), builder.batchnorm(scale(16)), BiRealAct(), builder.conv3x3(scale(16), scale(16)), nn.MaxPool2d((2, 2)), builder.batchnorm(scale(16)), BiRealAct(), builder.conv3x3(scale(16), scale(32)), builder.batchnorm(scale(32)), BiRealAct(), builder.conv3x3(scale(32), scale(32)), nn.MaxPool2d((2, 2)), builder.batchnorm(scale(32)), BiRealAct(), builder.conv3x3(scale(32), scale(64)), builder.batchnorm(scale(64)), BiRealAct(), builder.conv3x3(scale(64), scale(64)), nn.MaxPool2d((2, 2)), builder.batchnorm(scale(64)), #BiRealAct(), nn.ReLU(), ) self.linear = nn.Sequential( builder.conv1x1(scale(64) * 4 * 4, 10), )
def __init__(self): super(Conv8, self).__init__() builder = get_builder() self.convs = nn.Sequential( builder.conv3x3(3, 64, first_layer=True), nn.ReLU(), builder.conv3x3(64, 64), nn.ReLU(), nn.MaxPool2d((2, 2)), builder.conv3x3(64, 128), nn.ReLU(), builder.conv3x3(128, 128), nn.ReLU(), nn.MaxPool2d((2, 2)), builder.conv3x3(128, 256), nn.ReLU(), builder.conv3x3(256, 256), nn.ReLU(), nn.MaxPool2d((2, 2)), builder.conv3x3(256, 512), nn.ReLU(), builder.conv3x3(512, 512), nn.ReLU(), nn.MaxPool2d((2, 2)) ) self.linear = nn.Sequential( builder.conv1x1(512 * 2 * 2, 256), nn.ReLU(), builder.conv1x1(256, 256), nn.ReLU(), builder.conv1x1(256, 10), )
def vgg13_fc(): r"""VGG 13-layer model (configuration "B") `"Very Deep Convolutional Networks For Large-Scale Image Recognition" <https://arxiv.org/pdf/1409.1556.pdf>`_ Args: pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ return _vgg('B', False, get_builder())
def vgg11_bn_new_fc(): r"""VGG 11-layer model (configuration "A") with batch normalization `"Very Deep Convolutional Networks For Large-Scale Image Recognition" <https://arxiv.org/pdf/1409.1556.pdf>`_ Args: pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ return _vgg('A', True, get_builder())
def __init__(self): super(FC, self).__init__() builder = get_builder() self.linear = nn.Sequential( builder.conv1x1(28 * 28, 300, first_layer=True), nn.ReLU(), builder.conv1x1(300, 100), nn.ReLU(), builder.conv1x1(100, 10), )
def __init__(self, block, num_blocks, num_classes): super(ResNet, self).__init__() self.builder = get_builder() _outputs = [32, 64, 128] self.in_planes = _outputs[0] self.conv1 = self.builder.conv3x3(3, 32, stride=1, first_layer=True) self.bn = nn.BatchNorm2d(_outputs[0]) self.layer1 = self._make_layer(block, 32, num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 64, num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 128, num_blocks[2], stride=2) self.linear = self.builder.conv1x1(128, num_classes)
def resnet50(): block_cfg = [64, 128, 256, 512] layer_cfg = [ 64, 64, 64, 64, 64, 64, 128, 128, 128, 128, 128, 128, 128, 128, 256, 256, 256, 256, 256, 256, 256, 256, 256, 256, 256, 256, 512, 512, 512, 512, 512, 512 ] return ResNet(get_builder(), Bottleneck, [3, 4, 6, 3], block_cfg=block_cfg, layer_cfg=layer_cfg, num_classes=1000)
def __init__(self): super(Conv2, self).__init__() builder = get_builder() self.convs = nn.Sequential( builder.conv3x3(3, 64, first_layer=True), nn.ReLU(), builder.conv3x3(64, 64), nn.ReLU(), nn.MaxPool2d((2, 2)), ) self.linear = nn.Sequential( builder.conv1x1(64 * 16 * 16, 256), nn.ReLU(), builder.conv1x1(256, 256), nn.ReLU(), builder.conv1x1(256, 10), )
def mobilenet_v3_small(**kwargs): """ Constructs a MobileNetV3-Small model """ cfgs = [ # k, t, c, SE, HS, s [3, 1, 16, 1, 0, 2], [3, 4.5, 24, 0, 0, 2], [3, 3.67, 24, 0, 0, 1], [5, 4, 40, 1, 1, 2], [5, 6, 40, 1, 1, 1], [5, 6, 40, 1, 1, 1], [5, 3, 48, 1, 1, 1], [5, 3, 48, 1, 1, 1], [5, 6, 96, 1, 1, 2], [5, 6, 96, 1, 1, 1], [5, 6, 96, 1, 1, 1], ] return MobileNetV3(get_builder(), cfgs, mode='small', **kwargs)
def mobilenet_v3_large(**kwargs): """ Constructs a MobileNetV3-Large model """ cfgs = [ # k, t, c, SE, HS, s [3, 1, 16, 0, 0, 1], [3, 4, 24, 0, 0, 2], [3, 3, 24, 0, 0, 1], [5, 3, 40, 1, 0, 2], [5, 3, 40, 1, 0, 1], [5, 3, 40, 1, 0, 1], [3, 6, 80, 0, 1, 2], [3, 2.5, 80, 0, 1, 1], [3, 2.3, 80, 0, 1, 1], [3, 2.3, 80, 0, 1, 1], [3, 6, 112, 1, 1, 1], [3, 6, 112, 1, 1, 1], [5, 6, 160, 1, 1, 2], [5, 6, 160, 1, 1, 1], [5, 6, 160, 1, 1, 1] ] return MobileNetV3(get_builder(), cfgs, mode='large', **kwargs)
def __init__(self): super(MobileNetV1, self).__init__() builder = get_builder() def conv_bn(inp, oup, stride): return nn.Sequential( builder.conv2d(inp, oup, 3, stride, 1, bias=False), builder.batchnorm(oup), nn.ReLU(inplace=True)) def conv_dw(inp, oup, stride): return nn.Sequential( builder.conv2d(inp, inp, 3, stride, 1, groups=inp, bias=False), builder.batchnorm(inp), nn.ReLU(inplace=True), builder.conv2d(inp, oup, 1, 1, 0, bias=False), builder.batchnorm(oup), nn.ReLU(inplace=True), ) self.model = nn.Sequential( conv_bn(3, 32, 2), conv_dw(32, 64, 1), conv_dw(64, 128, 2), conv_dw(128, 128, 1), conv_dw(128, 256, 2), conv_dw(256, 256, 1), conv_dw(256, 512, 2), conv_dw(512, 512, 1), conv_dw(512, 512, 1), conv_dw(512, 512, 1), conv_dw(512, 512, 1), conv_dw(512, 512, 1), conv_dw(512, 1024, 2), conv_dw(1024, 1024, 1), nn.AvgPool2d(7), ) self.fc = builder.conv1x1(1024, 1000)
def __init__(self): super(Conv6_BNN, self).__init__() builder = get_builder() self.convs = nn.Sequential( builder.conv3x3(3, 128, first_layer=True), builder.batchnorm(128), BinAct(), builder.conv3x3(128, 128), nn.MaxPool2d((2, 2)), builder.batchnorm(128), BinAct(), builder.conv3x3(128, 256), builder.batchnorm(256), BinAct(), builder.conv3x3(256, 256), nn.MaxPool2d((2, 2)), builder.batchnorm(256), BinAct(), builder.conv3x3(256, 512), builder.batchnorm(512), BinAct(), builder.conv3x3(512, 512), nn.MaxPool2d((2, 2)), builder.batchnorm(512), BinAct() ) self.linear = nn.Sequential( builder.conv1x1(512 * 4 * 4, 1024), builder.batchnorm(1024), BinAct(), builder.conv1x1(1024, 1024), builder.batchnorm(1024), BinAct(), builder.conv1x1(1024, 10) )
def cResNet34_BinAct(): return ResNet_BinAct(get_builder(), BasicBlock_BinAct, [3, 4, 6, 3])
def cWideResNet18_3_BinAct(): return WideResNet_BinAct(get_builder(), BasicBlock_BinAct, [2, 2, 2, 2], widen_factor=3)
def cResNet18_BinAct(): return ResNet_BinAct(get_builder(), BasicBlock_BinAct, [2, 2, 2, 2])
def cResNet110(): return SmallResNet(get_builder(), BasicBlock, [18, 18, 18])
def cResNet56_BinAct(): return SmallResNet_BinAct(get_builder(), BasicBlock_BinAct, [9, 9, 9])
def cResNet32_BinAct(): return SmallResNet_BinAct(get_builder(), BasicBlock_BinAct, [5, 5, 5])
def cResNet20(): return SmallResNet(get_builder(), BasicBlock, [3, 3, 3])
def cWideResNeXt18_2_BinAct_small(): return WideResNeXt_BinAct(get_builder(), Bottleneck2, [1, 2, 6, 2], [4, 4, 8, 8], widen_factor=2)
def cResNet20_BinAct(): return SmallResNet_BinAct(get_builder(), BasicBlock_BinAct, [3, 3, 3])
def cResNet32(): return SmallResNet(get_builder(), BasicBlock, [5, 5, 5])
def cResNet44_BinAct(): return SmallResNet_BinAct(get_builder(), BasicBlock_BinAct, [7, 7, 7])
def cResNet44(): return SmallResNet(get_builder(), BasicBlock, [7, 7, 7])
def cResNet110_BinAct(): return SmallResNet_BinAct(get_builder(), BasicBlock_BinAct, [18, 18, 18])
def cResNet56(): return SmallResNet(get_builder(), BasicBlock, [9, 9, 9])