def __init__(self, in_channels, feat_channels=128, out_channels=128, norm_cfg=dict(type='BN', requires_grad=True)): super(CenterPool, self).__init__() self.direction1_conv = ConvModule( in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg) self.direction2_conv = ConvModule( in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg) self.aftpool_conv = ConvModule( feat_channels, out_channels, 3, padding=1, norm_cfg=norm_cfg, act_cfg=None) self.conv1 = ConvModule( in_channels, out_channels, 1, norm_cfg=norm_cfg, act_cfg=None) self.conv2 = ConvModule( in_channels, out_channels, 3, padding=1, norm_cfg=norm_cfg) self.left_pool = CornerPool('left') self.right_pool = CornerPool('right') self.top_pool = CornerPool('top') self.bottom_pool = CornerPool('bottom') self.relu = nn.ReLU(inplace=True)
def __init__(self, in_channels, direction, feat_channels=128, out_channels=128, norm_cfg=dict(type='BN', requires_grad=True)): super(CascadeCornerPool, self).__init__() self.pool1 = CornerPool(direction[0]) self.pool2 = CornerPool(direction[1]) # pool1 conv self.look1_conv = ConvModule( in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg) self.direct1_conv = ConvModule( in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg) self.aftconcat1_conv = ConvModule( feat_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg, act_cfg=None) # pool2 conv self.look2_conv = ConvModule( in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg) self.direct2_conv = ConvModule( in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg) self.aftconcat2_conv = ConvModule( feat_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg, act_cfg=None) # main and direct conv self.aftconcat_conv = ConvModule( feat_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg, act_cfg=None) self.direct_conv = ConvModule( feat_channels, feat_channels, 1, padding=1, norm_cfg=norm_cfg, act_cfg=None) self.out_conv = ConvModule( feat_channels, out_channels, 3, paddng=1, norm_cfg=norm_cfg, act_cfg=None) self.relu = nn.ReLU(inplace=True)
def __init__(self, in_channels, directions, feat_channels=128, out_channels=128, norm_cfg=dict(type='BN', requires_grad=True), init_cfg=None): super(BiCornerPool, self).__init__(init_cfg) self.direction1_conv = ConvModule( in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg) self.direction2_conv = ConvModule( in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg) self.aftpool_conv = ConvModule( feat_channels, out_channels, 3, padding=1, norm_cfg=norm_cfg, act_cfg=None) self.conv1 = ConvModule( in_channels, out_channels, 1, norm_cfg=norm_cfg, act_cfg=None) self.conv2 = ConvModule( in_channels, out_channels, 3, padding=1, norm_cfg=norm_cfg) self.direction1_pool = CornerPool(directions[0]) self.direction2_pool = CornerPool(directions[1]) self.relu = nn.ReLU(inplace=True)
def test_corner_pool_device_and_dtypes_cpu(): """ CommandLine: xdoctest -m tests/test_corner_pool.py \ test_corner_pool_device_and_dtypes_cpu """ with pytest.raises(AssertionError): # pool mode must in ['bottom', 'left', 'right', 'top'] pool = CornerPool('corner') lr_tensor = torch.tensor([[[[0, 0, 0, 0, 0], [2, 1, 3, 0, 2], [5, 4, 1, 1, 6], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]]]) tb_tensor = torch.tensor([[[[0, 3, 1, 0, 0], [0, 1, 1, 0, 0], [0, 3, 4, 0, 0], [0, 2, 2, 0, 0], [0, 0, 2, 0, 0]]]]) # Left Pool left_answer = torch.tensor([[[[0, 0, 0, 0, 0], [3, 3, 3, 2, 2], [6, 6, 6, 6, 6], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]]]) pool = CornerPool('left') left_tensor = pool(lr_tensor) assert left_tensor.type() == lr_tensor.type() assert torch.equal(left_tensor, left_answer) # Right Pool right_answer = torch.tensor([[[[0, 0, 0, 0, 0], [2, 2, 3, 3, 3], [5, 5, 5, 5, 6], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]]]) pool = CornerPool('right') right_tensor = pool(lr_tensor) assert right_tensor.type() == lr_tensor.type() assert torch.equal(right_tensor, right_answer) # Top Pool top_answer = torch.tensor([[[[0, 3, 4, 0, 0], [0, 3, 4, 0, 0], [0, 3, 4, 0, 0], [0, 2, 2, 0, 0], [0, 0, 2, 0, 0]]]]) pool = CornerPool('top') top_tensor = pool(tb_tensor) assert top_tensor.type() == tb_tensor.type() assert torch.equal(top_tensor, top_answer) # Bottom Pool bottom_answer = torch.tensor([[[[0, 3, 1, 0, 0], [0, 3, 1, 0, 0], [0, 3, 4, 0, 0], [0, 3, 4, 0, 0], [0, 3, 4, 0, 0]]]]) pool = CornerPool('bottom') bottom_tensor = pool(tb_tensor) assert bottom_tensor.type() == tb_tensor.type() assert torch.equal(bottom_tensor, bottom_answer)
def __init__(self, dim, conv_cfg=None, norm_cfg=None, first_kernel_size=3, kernel_size=3, corner_dim=128): super(BRPool, self).__init__( dim, CornerPool('bottom'), CornerPool('right'), conv_cfg, norm_cfg, first_kernel_size, kernel_size, corner_dim, )
def __init__(self, dim, conv_cfg=None, norm_cfg=None, first_kernel_size=3, kernel_size=3, corner_dim=128): super(TLPool, self).__init__( dim, CornerPool('top'), CornerPool('left'), conv_cfg, norm_cfg, first_kernel_size, kernel_size, corner_dim, )
def test_corner_pool_top_gradcheck(self): if not torch.cuda.is_available(): return from mmcv.ops import CornerPool input = torch.randn(2, 4, 5, 5, requires_grad=True, device='cuda') gradcheck(CornerPool('top'), (input, ), atol=1e-3, eps=1e-4)