def __init__(self, n_classes, n_blocks, pyramids, multi_grid=[1, 2, 1]): super(DeepLabV3Plus, self).__init__() self.add_module( 'layer1', nn.Sequential( OrderedDict([ ('conv1', _ConvBatchNormReLU(3, 64, 7, 2, 3, 1)), ('pool', nn.MaxPool2d(3, 2, 1, ceil_mode=True)), ]))) self.add_module('layer2', _ResBlock(n_blocks[0], 64, 64, 256, 1, 1)) # output_stride=4 self.add_module('layer3', _ResBlock(n_blocks[1], 256, 128, 512, 2, 1)) # output_stride=8 self.add_module('layer4', _ResBlock(n_blocks[2], 512, 256, 1024, 1, 2)) # output_stride=8 self.add_module( 'layer5', _ResBlockMG(n_blocks[3], 1024, 512, 2048, 1, 2, mg=multi_grid)) self.add_module('aspp', _ASPPModule(2048, 256, pyramids)) self.add_module( 'fc1', _ConvBatchNormReLU(256 * (len(pyramids) + 2), 256, 1, 1, 0, 1)) self.add_module('reduce', _ConvBatchNormReLU(512, 48, 1, 1, 0, 1)) self.add_module( 'fc2', nn.Sequential( OrderedDict([ ('conv1', _ConvBatchNormReLU(304, 256, 3, 1, 1, 1)), ('conv2', _ConvBatchNormReLU(256, 256, 3, 1, 1, 1)), ('conv3', nn.Conv2d(256, n_classes, kernel_size=1)), ])))
def __init__(self, n_classes, n_blocks, pyramids, freeze_bn): super(DeepLabV2_local, self).__init__() self.add_module( "layer1", nn.Sequential( OrderedDict([ ("conv1", _ConvBatchNormReLU(3, 64, 7, 2, 3, 1)), ("pool", nn.MaxPool2d(3, 2, 1, ceil_mode=True)), ]))) self.add_module("layer2", _ResBlock(n_blocks[0], 64, 64, 256, 1, 1)) self.add_module("layer3", _ResBlock(n_blocks[1], 256, 128, 512, 2, 1)) self.add_module("layer4", _ResBlock(n_blocks[2], 512, 256, 1024, 1, 2)) self.add_module("layer5", _ResBlock(n_blocks[3], 1024, 512, 2048, 1, 4)) self.add_module("aspp", _ASPPModule(2048, n_classes, pyramids)) self.add_module("contextual1", _ConvReLU_(n_classes, 256, 3, 1, 1, 1)) self.add_module("contextual2", _ConvReLU_(256, 256, 3, 1, 2, 2)) self.add_module("contextual3", _ConvReLU_(256, 256, 3, 1, 5, 5)) self.add_module("fc_1", _ConvReLU_(3 * 256, 3, 3, 1, 1, 1)) self.add_module("contextualpool", _ConvReLU_(3 * 256, n_classes, 1, 1, 1, 1)) self.add_module( "contextuallocalmu", _ConvReLU_(n_classes, n_classes, 3, 1, 1, 1, relu=False)) self.add_module("contextuallocalsigma", _ConvReLU_(n_classes, n_classes, 3, 1, 1, 1)) if freeze_bn: self.freeze_bn()
def __init__(self, n_blocks): super(_DilatedFCN, self).__init__() self.layer1 = nn.Sequential( OrderedDict([('conv1', _ConvBatchNormReLU(3, 64, 3, 2, 1, 1)), ('conv2', _ConvBatchNormReLU(64, 64, 3, 1, 1, 1)), ('conv3', _ConvBatchNormReLU(64, 128, 3, 1, 1, 1)), ('pool', nn.MaxPool2d(3, 2, 1))])) self.layer2 = _ResBlock(n_blocks[0], 128, 64, 256, 1, 1) self.layer3 = _ResBlock(n_blocks[1], 256, 128, 512, 2, 1) self.layer4 = _ResBlock(n_blocks[2], 512, 256, 1024, 1, 2) self.layer5 = _ResBlock(n_blocks[3], 1024, 512, 2048, 1, 4)
def __init__(self, n_classes, n_blocks, pyramids): super(DeepLabV2, self).__init__() self.add_module( 'layer1', nn.Sequential( OrderedDict([('conv1', _ConvBatchNormReLU(3, 64, 7, 2, 3, 1)), ('pool', nn.MaxPool2d(3, 2, 1, ceil_mode=True))]))) self.add_module('layer2', _ResBlock(n_blocks[0], 64, 64, 256, 1, 1)) self.add_module('layer3', _ResBlock(n_blocks[1], 256, 128, 512, 2, 1)) self.add_module('layer4', _ResBlock(n_blocks[2], 512, 256, 1024, 1, 2)) self.add_module('layer5', _ResBlock(n_blocks[3], 1024, 512, 2048, 1, 4)) # NOQA self.add_module('aspp', _ASPPModule(2048, n_classes, pyramids)) self._freeze_bn()
def __init__(self, n_classes, n_blocks): super(ResNet8s, self).__init__() self.add_module( 'layer1', nn.Sequential( OrderedDict([ ('conv1', _ConvBatchNormReLU(3, 64, 7, 2, 3, 1)), ('pool', nn.MaxPool2d(3, 2, 1, ceil_mode=True)), ]) ) ) self.add_module('layer2', _ResBlock(n_blocks[0], 64, 64, 256, 1, 1)) self.add_module('layer3', _ResBlock(n_blocks[1], 256, 128, 512, 2, 1)) self.add_module('layer4', _ResBlock(n_blocks[2], 512, 256, 1024, 1, 2)) self.add_module('layer5', _ResBlock(n_blocks[3], 1024, 512, 2048, 1, 4)) self.classifier = nn.Sequential( OrderedDict([ ('conv5_4', _ConvBatchNormReLU(2048, 512, 3, 1, 1, 1)), ('drop5_4', nn.Dropout2d(p=0.1)), ('conv6', nn.Conv2d(512, n_classes, 1, stride=1, padding=0)), ]) )
def __init__(self, n_classes, n_blocks, pyramids): super(DeepLabV2, self).__init__() self.add_module( 'layer1', nn.Sequential( OrderedDict([ ('conv1', _ConvBatchNormReLU(3, 64, 7, 2, 3, 1)), ('pool', nn.MaxPool2d(3, 2, 1, ceil_mode=True)), ]))) self.add_module('layer2', _ResBlock(n_blocks[0], 64, 64, 256, 1, 1)) self.add_module('layer3', _ResBlock(n_blocks[1], 256, 128, 512, 2, 1)) self.add_module('layer4', _ResBlock(n_blocks[2], 512, 256, 1024, 1, 2)) self.add_module('layer5', _ResBlock(n_blocks[3], 1024, 512, 2048, 1, 4)) # self.add_module('aspp', _ASPPModule(2048, n_classes, pyramids)) Arezoo self.add_module('aspp', _ASPPModule(2048, 4, pyramids)) self.upsample_1 = nn.Sequential( nn.Conv2d(4, 4, kernel_size=1, stride=1, padding=0), nn.BatchNorm2d(4), Interpolate(), ) self.weight_init()