Example #1
0
    def __init__(self, in_channels: int, out_channels: int, stride: int = 1):
        super().__init__()
        self.bn1 = BatchNorm2d(in_channels)
        self.act1 = ReLU(num_channels=in_channels, inplace=True)
        self.conv1 = Conv2d(
            in_channels,
            out_channels,
            kernel_size=3,
            stride=stride,
            padding=1,
            bias=False,
        )

        self.bn2 = BatchNorm2d(out_channels)
        self.act2 = ReLU(num_channels=out_channels, inplace=True)
        self.conv2 = Conv2d(
            out_channels, out_channels, kernel_size=3, stride=1, padding=1, bias=False
        )

        self.identity = (
            Conv2d(in_channels, out_channels, kernel_size=1, stride=stride, bias=False)
            if in_channels != out_channels or stride != 1
            else None
        )

        self.initialize()
Example #2
0
    def __init__(
        self,
        in_channels: int,
        out_channels: int,
        proj_channels: int,
        stride: int = 1,
        groups: int = 1,
    ):
        super().__init__()

        self.conv1 = Conv2d(in_channels, proj_channels, kernel_size=1, bias=False)
        self.bn1 = BatchNorm2d(proj_channels)
        self.act1 = ReLU(num_channels=proj_channels, inplace=True)
        self.conv2 = Conv2d(
            proj_channels,
            proj_channels,
            kernel_size=3,
            stride=stride,
            padding=1,
            bias=False,
            groups=groups,
        )
        self.bn2 = BatchNorm2d(proj_channels)
        self.act2 = ReLU(num_channels=proj_channels, inplace=True)
        self.conv3 = Conv2d(proj_channels, out_channels, kernel_size=1, bias=False)
        self.bn3 = BatchNorm2d(out_channels)
        self.identity = (
            _IdentityModifier(in_channels, out_channels, stride)
            if _IdentityModifier.required(in_channels, out_channels, stride)
            else None
        )
        self.act_out = ReLU(num_channels=out_channels, inplace=True)

        self.initialize()
Example #3
0
    def __init__(self, in_channels: int, out_channels: int, stride: int = 1):
        super().__init__()
        self.conv1 = Conv2d(
            in_channels,
            out_channels,
            kernel_size=3,
            stride=stride,
            padding=1,
            bias=False,
        )
        self.bn1 = BatchNorm2d(out_channels)
        self.act1 = ReLU(num_channels=out_channels, inplace=True)
        self.conv2 = Conv2d(out_channels,
                            out_channels,
                            kernel_size=3,
                            stride=1,
                            padding=1,
                            bias=False)
        self.bn2 = BatchNorm2d(out_channels)
        self.identity = (_IdentityModifier(
            in_channels, out_channels, stride) if _IdentityModifier.required(
                in_channels, out_channels, stride) else None)

        self.add_relu = (FloatFunctional() if FloatFunctional is not None else
                         ReLU(num_channels=out_channels, inplace=True))

        self.initialize()
Example #4
0
def test_is_activation():
    assert is_activation(ReLU())
    assert is_activation(ReLU6())
    assert is_activation(TReLU())
    assert is_activation(TReLU6())
    assert is_activation(PReLU())
    assert is_activation(LeakyReLU())
    assert is_activation(Swish())
Example #5
0
    def __init__(self, in_channels: int, out_channels: int, batch_norm: bool):
        super().__init__()
        self.conv = Conv2d(in_channels,
                           out_channels,
                           kernel_size=3,
                           padding=1,
                           stride=1)
        self.bn = BatchNorm2d(out_channels) if batch_norm else None
        self.act = ReLU(num_channels=out_channels, inplace=True)

        self.initialize()
Example #6
0
    def __init__(self,
                 in_channels: int,
                 num_classes: int,
                 class_type: str = "single"):
        super().__init__()
        self.mlp = Sequential(
            Linear(in_channels * 7 * 7, 4096),
            Dropout(),
            ReLU(num_channels=4096, inplace=True),
            Linear(4096, 4096),
            Dropout(),
            ReLU(num_channels=4096, inplace=True),
            Linear(4096, num_classes),
        )

        if class_type == "single":
            self.softmax = Softmax(dim=1)
        elif class_type == "multi":
            self.softmax = Sigmoid()
        else:
            raise ValueError(
                "unknown class_type given of {}".format(class_type))
Example #7
0
    def __init__(self):
        super().__init__()
        self.conv = Conv2d(
            in_channels=3,
            out_channels=32,
            kernel_size=3,
            stride=2,
            padding=1,
            bias=False,
        )
        self.bn = BatchNorm2d(32)
        self.act = ReLU(num_channels=32, inplace=True)

        self.initialize()
Example #8
0
    def __init__(self):
        super().__init__()
        self.conv = Conv2d(
            _Input.IN_CHANNELS,
            _Input.OUT_CHANNELS,
            kernel_size=7,
            stride=2,
            padding=3,
            bias=False,
        )
        self.bn = BatchNorm2d(_Input.OUT_CHANNELS)
        self.act = ReLU(num_channels=_Input.OUT_CHANNELS, inplace=True)
        self.pool = MaxPool2d(kernel_size=3, stride=2, padding=1)

        self.initialize()
Example #9
0
 def __init__(
     self,
     in_channels: int,
     out_channels: int,
     kernel_size: int,
     stride: int,
     padding: int,
 ):
     super().__init__()
     self.conv = Conv2d(
         in_channels,
         out_channels,
         kernel_size,
         stride,
         padding,
         bias=False,
     )
     self.bn = BatchNorm2d(out_channels)
     self.act = ReLU(num_channels=out_channels, inplace=True)
Example #10
0
    def __init__(
        self,
        in_channels: int,
        out_channels: int,
        kernel_size: Union[int, Tuple[int, ...]],
        stride: Union[int, Tuple[int, ...]] = 1,
        padding: Union[int, Tuple[int, ...]] = 0,
        init_stddev: float = 0.1,
    ):
        super().__init__()
        self.conv = Conv2d(
            in_channels,
            out_channels,
            kernel_size,
            stride,
            padding,
            bias=False,
        )
        self.bn = BatchNorm2d(out_channels)
        self.act = ReLU(num_channels=out_channels, inplace=True)

        self.initialize(init_stddev)
Example #11
0
def test_relu():
    x_tens = torch.randn(16, 1, 64, 64)
    comp_one = ReLU(num_channels=1)(x_tens)
    comp_two = TReLU()(x_tens)

    assert (comp_one - comp_two).abs().sum() < sys.float_info.epsilon