def __init__(self, n_classes, n_blocks): super(ResNet8s, self).__init__() self.add_module( 'layer1', nn.Sequential( OrderedDict([ ('conv1', _ConvBatchNormReLU(3, 64, 7, 2, 3, 1)), ('pool', nn.MaxPool2d(3, 2, 1, ceil_mode=True)), ]) ) ) self.add_module('layer2', _ResBlock(n_blocks[0], 64, 64, 256, 1, 1)) self.add_module('layer3', _ResBlock(n_blocks[1], 256, 128, 512, 2, 1)) self.add_module('layer4', _ResBlock(n_blocks[2], 512, 256, 1024, 1, 2)) self.add_module('layer5', _ResBlock(n_blocks[3], 1024, 512, 2048, 1, 4)) self.classifier = nn.Sequential( OrderedDict([ ('conv5_4', _ConvBatchNormReLU(2048, 512, 3, 1, 1, 1)), ('drop5_4', nn.Dropout2d(p=0.1)), ('conv6', nn.Conv2d(512, n_classes, 1, stride=1, padding=0)), ]) )
def __init__(self, in_channels, pyramids=[6, 3, 2, 1]): super(_PyramidPoolModule, self).__init__() out_channels = in_channels // len(pyramids) self.stages = nn.Module() for i, p in enumerate(pyramids): self.stages.add_module( 's{}'.format(i), nn.Sequential( OrderedDict([ ('pool', nn.AdaptiveAvgPool2d(output_size=p)), ('conv', _ConvBatchNormReLU(in_channels, out_channels, 1, 1, 0, 1)), ]) ) )
def __init__(self, n_classes, n_blocks, pyramids, multi_grid=[1, 2, 1]): super(DeepLabV3, self).__init__() self.add_module( 'layer1', nn.Sequential( OrderedDict([ ('conv1', _ConvBatchNormReLU(3, 64, 7, 2, 3, 1)), ('pool', nn.MaxPool2d(3, 2, 1, ceil_mode=True)), ]))) self.add_module('layer2', _ResBlock(n_blocks[0], 64, 64, 256, 1, 1)) # output_stride=4 self.add_module('layer3', _ResBlock(n_blocks[1], 256, 128, 512, 2, 1)) # output_stride=8 self.add_module('layer4', _ResBlock(n_blocks[2], 512, 256, 1024, 1, 2)) # output_stride=8 self.add_module( 'layer5', _ResBlockMG(n_blocks[3], 1024, 512, 2048, 1, 2, mg=multi_grid)) self.add_module('aspp', _ASPPModule(2048, 256, pyramids)) self.add_module( 'fc1', _ConvBatchNormReLU(256 * (len(pyramids) + 2), 256, 1, 1, 0, 1)) self.add_module('fc2', nn.Conv2d(256, n_classes, kernel_size=1))
def __init__(self, n_classes, n_blocks, pyramids): super(DeepLabV2, self).__init__() self.add_module( 'layer1', nn.Sequential( OrderedDict([('conv1', _ConvBatchNormReLU(3, 64, 7, 2, 3, 1)), ('pool', nn.MaxPool2d(3, 2, 1, ceil_mode=True))]))) self.add_module('layer2', _ResBlock(n_blocks[0], 64, 64, 256, 1, 1)) self.add_module('layer3', _ResBlock(n_blocks[1], 256, 128, 512, 2, 1)) self.add_module('layer4', _ResBlock(n_blocks[2], 512, 256, 1024, 1, 2)) self.add_module('layer5', _ResBlock(n_blocks[3], 1024, 512, 2048, 1, 4)) # NOQA self.add_module('aspp', _ASPPModule(2048, n_classes, pyramids)) self._freeze_bn()
def __init__(self, n_classes, n_blocks, pyramids): super(DeepLabV2, self).__init__() self.add_module( 'layer1', nn.Sequential( OrderedDict([ ('conv1', _ConvBatchNormReLU(3, 64, 7, 2, 3, 1)), ('pool', nn.MaxPool2d(3, 2, 1, ceil_mode=True)), ]))) self.add_module('layer2', _ResBlock(n_blocks[0], 64, 64, 256, 1, 1)) self.add_module('layer3', _ResBlock(n_blocks[1], 256, 128, 512, 2, 1)) self.add_module('layer4', _ResBlock(n_blocks[2], 512, 256, 1024, 1, 2)) self.add_module('layer5', _ResBlock(n_blocks[3], 1024, 512, 2048, 1, 4)) # self.add_module('aspp', _ASPPModule(2048, n_classes, pyramids)) Arezoo self.add_module('aspp', _ASPPModule(2048, 4, pyramids)) self.upsample_1 = nn.Sequential( nn.Conv2d(4, 4, kernel_size=1, stride=1, padding=0), nn.BatchNorm2d(4), Interpolate(), ) self.weight_init()