Example #1
0
    def test_leaky_relu(self):
        """Test creation of a LeakyReLU activation"""
        activation_name = 'LeakyReLU'
        args = {}

        activation = activation_factory.create(activation_name, **args)
        self.assertEqual(activation._get_name(), activation_name)
Example #2
0
    def test_softmax(self):
        """Test creation of a Softmax activation"""
        activation_name = 'Softmax'
        args = {}

        activation = activation_factory.create(activation_name, **args)
        self.assertEqual(activation._get_name(), activation_name)

        x = torch.empty(10, 2)
        y = activation(x)
        assert_array_equal(y, torch.softmax(x, -1))
Example #3
0
    def test_sigmoid(self):
        """Test creation of a Sigmoid activation"""
        activation_name = 'Sigmoid'
        args = {}

        activation = activation_factory.create(activation_name, **args)
        self.assertEqual(activation._get_name(), activation_name)

        x = torch.empty(10)
        y = activation(x)
        assert_array_equal(y, torch.sigmoid(x))
Example #4
0
    def test_relu(self):
        """Test creation of a ReLU activation"""
        activation_name = 'ReLU'
        args = {}

        activation = activation_factory.create(activation_name, **args)
        self.assertEqual(activation._get_name(), activation_name)

        x = torch.ones(10) * -1
        y = activation(x)
        self.assertEqual(len(torch.nonzero(y, as_tuple=False)), 0)
Example #5
0
    def __init__(self, block, layers, in_channels=3,
                 zero_init_residual=False, groups=1, width_per_group=64,
                 replace_stride_with_dilation=None, norm_layer=None,
                 activation={'name': 'ReLU', 'params': {'inplace': True}}):
        super(ResNet, self).__init__()
        if norm_layer is None:
            norm_layer = nn.BatchNorm2d
        self._norm_layer = norm_layer

        self.inplanes = 64
        self.dilation = 1
        if replace_stride_with_dilation is None:
            # each element in the tuple indicates if we should replace
            # the 2x2 stride with a dilated convolution instead
            replace_stride_with_dilation = [False, False, False]
        if len(replace_stride_with_dilation) != 3:
            raise ValueError("replace_stride_with_dilation should be None "
                             "or a 3-element tuple, got {}".format(
                                replace_stride_with_dilation))
        self.groups = groups
        self.base_width = width_per_group
        self.conv1 = nn.Conv2d(in_channels, self.inplanes, kernel_size=7,
                               stride=2, padding=3, bias=False)
        self.bn1 = norm_layer(self.inplanes)
        self.activation = activation_factory.create(
            activation['name'], **activation['params'])
        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
        self.layer1 = self._make_layer(block, 64, layers[0])
        self.layer2 = self._make_layer(block, 128, layers[1], stride=2,
                                       dilate=replace_stride_with_dilation[0])
        self.layer3 = self._make_layer(block, 256, layers[2], stride=2,
                                       dilate=replace_stride_with_dilation[1])
        self.layer4 = self._make_layer(block, 512, layers[3], stride=2,
                                       dilate=replace_stride_with_dilation[2])

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
            elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
                nn.init.constant_(m.weight, 1)
                nn.init.constant_(m.bias, 0)

        # Zero-initialize the last BN in each residual branch,
        # so that the residual branch starts with zeros, and each residual
        # block behaves like an identity. This improves the model by 0.2~0.3%
        # according to https://arxiv.org/abs/1706.02677
        if zero_init_residual:
            for m in self.modules():
                if isinstance(m, Bottleneck):
                    nn.init.constant_(m.bn3.weight, 0)
                elif isinstance(m, BasicBlock):
                    nn.init.constant_(m.bn2.weight, 0)