Пример #1
0
    def __init__(self, inp, oup, stride, expand_ratio):
        super(InvertedResidual, self).__init__()
        self.stride = stride
        assert stride in [1, 2]

        hidden_dim = int(round(inp * expand_ratio))
        self.use_res_connect = self.stride == 1 and inp == oup

        if expand_ratio == 1:
            self.conv = nn.Sequential(
                # dw
                Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False),
                FrozenBatchNorm2d(hidden_dim),
                nn.ReLU6(),
                # pw-linear
                Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
                FrozenBatchNorm2d(oup),
            )
        else:
            self.conv = nn.Sequential(
                # pw
                Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
                FrozenBatchNorm2d(hidden_dim),
                nn.ReLU6(),
                # dw
                Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False),
                FrozenBatchNorm2d(hidden_dim),
                nn.ReLU6(),
                # pw-linear
                Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
                FrozenBatchNorm2d(oup),
            )
Пример #2
0
    def __init__(self, inp, oup, stride, dilation, expand_ratio, BatchNorm):
        super(InvertedResidual, self).__init__()
        self.stride = stride
        assert stride in [1, 2]

        hidden_dim = round(inp * expand_ratio)
        self.use_res_connect = self.stride == 1 and inp == oup
        self.kernel_size = 3
        self.dilation = dilation

        if expand_ratio == 1:
            self.conv = nn.Sequential(
                # dw
                nn.Conv(hidden_dim,
                        hidden_dim,
                        3,
                        stride,
                        0,
                        dilation,
                        groups=hidden_dim,
                        bias=False),
                BatchNorm(hidden_dim),
                nn.ReLU6(),
                # pw-linear
                nn.Conv(hidden_dim, oup, 1, 1, 0, 1, 1, bias=False),
                BatchNorm(oup),
            )
        else:
            self.conv = nn.Sequential(
                # pw
                nn.Conv(inp, hidden_dim, 1, 1, 0, 1, bias=False),
                BatchNorm(hidden_dim),
                nn.ReLU6(),
                # dw
                nn.Conv(hidden_dim,
                        hidden_dim,
                        3,
                        stride,
                        0,
                        dilation,
                        groups=hidden_dim,
                        bias=False),
                BatchNorm(hidden_dim),
                nn.ReLU6(),
                # pw-linear
                nn.Conv(hidden_dim, oup, 1, 1, 0, 1, bias=False),
                BatchNorm(oup),
            )
Пример #3
0
    def test_relu(self):
        # ***************************************************************
        # Test ReLU Layer
        # ***************************************************************
        arr = np.random.randn(16,10,224,224)
        check_equal(arr, jnn.ReLU(), tnn.ReLU())

        # ***************************************************************
        # Test PReLU Layer
        # ***************************************************************
        arr = np.random.randn(16,10,224,224)
        check_equal(arr, jnn.PReLU(), tnn.PReLU())
        check_equal(arr, jnn.PReLU(10, 99.9), tnn.PReLU(10, 99.9))
        check_equal(arr, jnn.PReLU(10, 2), tnn.PReLU(10, 2))
        check_equal(arr, jnn.PReLU(10, -0.2), tnn.PReLU(10, -0.2))
        
        # ***************************************************************
        # Test ReLU6 Layer
        # ***************************************************************
        arr = np.random.randn(16,10,224,224)
        check_equal(arr, jnn.ReLU6(), tnn.ReLU6())

        # ***************************************************************
        # Test LeakyReLU Layer
        # ***************************************************************
        arr = np.random.randn(16,10,224,224)
        check_equal(arr, jnn.LeakyReLU(), tnn.LeakyReLU())
        check_equal(arr, jnn.LeakyReLU(2), tnn.LeakyReLU(2))
        check_equal(arr, jnn.LeakyReLU(99.9), tnn.LeakyReLU(99.9))
    def test_relu(self):
        # ***************************************************************
        # Test ReLU Layer
        # ***************************************************************
        arr = np.random.randn(16, 10, 224, 224)
        check_equal(arr, jnn.ReLU(), tnn.ReLU())

        # ***************************************************************
        # Test PReLU Layer
        # ***************************************************************
        arr = np.random.randn(16, 10, 224, 224)
        check_equal(arr, jnn.PReLU(), tnn.PReLU())
        check_equal(arr, jnn.PReLU(10, 99.9), tnn.PReLU(10, 99.9))
        check_equal(arr, jnn.PReLU(10, 2), tnn.PReLU(10, 2))
        check_equal(arr, jnn.PReLU(10, -0.2), tnn.PReLU(10, -0.2))

        # ***************************************************************
        # Test ReLU6 Layer
        # ***************************************************************
        arr = np.random.randn(16, 10, 224, 224)
        check_equal(arr, jnn.ReLU6(), tnn.ReLU6())

        # ***************************************************************
        # Test LeakyReLU Layer
        # ***************************************************************
        arr = np.random.randn(16, 10, 224, 224)
        check_equal(arr, jnn.LeakyReLU(), tnn.LeakyReLU())
        check_equal(arr, jnn.LeakyReLU(2), tnn.LeakyReLU(2))
        check_equal(arr, jnn.LeakyReLU(99.9), tnn.LeakyReLU(99.9))

        # ***************************************************************
        # Test ELU Layer
        # ***************************************************************
        arr = np.random.randn(16, 10, 224, 224)
        check_equal(arr, jnn.ELU(), tnn.ELU())
        check_equal(arr, jnn.ELU(0.3), tnn.ELU(0.3))
        check_equal(arr, jnn.ELU(2), tnn.ELU(2))
        check_equal(arr, jnn.ELU(99.9), tnn.ELU(99.9))

        # ***************************************************************
        # Test GELU Layer
        # ***************************************************************
        if hasattr(tnn, "GELU"):
            arr = np.random.randn(16, 10, 224, 224)
            check_equal(arr, jnn.GELU(), tnn.GELU())

        # ***************************************************************
        # Test Softplus  Layer
        # ***************************************************************
        arr = np.random.randn(16, 10, 224, 224)
        check_equal(arr, jnn.Softplus(), tnn.Softplus())
        check_equal(arr, jnn.Softplus(2), tnn.Softplus(2))
        check_equal(arr, jnn.Softplus(2, 99.9), tnn.Softplus(2, 99.9))
Пример #5
0
 def __init__(self,
              in_planes,
              out_planes,
              kernel_size=3,
              stride=1,
              groups=1):
     padding = ((kernel_size - 1) // 2)
     super(ConvBNReLU, self).__init__(
         nn.Conv(in_planes,
                 out_planes,
                 kernel_size,
                 stride,
                 padding,
                 groups=groups,
                 bias=False), nn.BatchNorm(out_planes), nn.ReLU6())
def conv_1x1_bn(inp, oup):
    return nn.Sequential(Conv2d(inp, oup, 1, 1, 0, bias=False),
                         FrozenBatchNorm2d(oup), nn.ReLU6())
def conv_bn(inp, oup, stride):
    return nn.Sequential(Conv2d(inp, oup, 3, stride, 1, bias=False),
                         FrozenBatchNorm2d(oup), nn.ReLU6())
Пример #8
0
def conv_bn(inp, oup, stride, BatchNorm):
    return nn.Sequential(nn.Conv(inp, oup, 3, stride, 1, bias=False),
                         BatchNorm(oup), nn.ReLU6())