Exemplo n.º 1
0
 def __init__(self):
     super(NNActivationModule, self).__init__()
     self.activations = nn.ModuleList([
         nn.ELU(),
         nn.Hardshrink(),
         nn.Hardsigmoid(),
         nn.Hardtanh(),
         nn.Hardswish(),
         nn.LeakyReLU(),
         nn.LogSigmoid(),
         # nn.MultiheadAttention(),
         nn.PReLU(),
         nn.ReLU(),
         nn.ReLU6(),
         nn.RReLU(),
         nn.SELU(),
         nn.CELU(),
         nn.GELU(),
         nn.Sigmoid(),
         nn.SiLU(),
         nn.Mish(),
         nn.Softplus(),
         nn.Softshrink(),
         nn.Softsign(),
         nn.Tanh(),
         nn.Tanhshrink(),
         # nn.Threshold(0.1, 20),
         nn.GLU(),
         nn.Softmin(),
         nn.Softmax(),
         nn.Softmax2d(),
         nn.LogSoftmax(),
         # nn.AdaptiveLogSoftmaxWithLoss(),
     ])
    def _make_gen_block(in_channels: int,
                        out_channels: int,
                        kernel_size: int = 4,
                        stride: int = 2,
                        padding: int = 1,
                        bias: bool = False,
                        last_block: bool = False,
                        use_relu: bool = False) -> nn.Sequential:
        if not last_block:
            gen_block = nn.Sequential(
                nn.ConvTranspose2d(in_channels,
                                   out_channels,
                                   kernel_size,
                                   stride,
                                   padding,
                                   bias=bias),
                nn.BatchNorm2d(out_channels),
                nn.Relu() if use_relu else nn.Mish(),
            )
        else:
            gen_block = nn.Sequential(
                nn.ConvTranspose2d(in_channels,
                                   out_channels,
                                   kernel_size,
                                   stride,
                                   padding,
                                   bias=bias),
                nn.Sigmoid(),
            )

        return gen_block
Exemplo n.º 3
0
def cspdarknet53_mish(pretrained: bool = False,
                      progress: bool = True,
                      **kwargs: Any) -> DarknetV4:
    """Modified version of CSP-Darknet-53 from
    `"CSPNet: A New Backbone that can Enhance Learning Capability of CNN" <https://arxiv.org/pdf/1911.11929.pdf>`_
    with Mish as activation layer and DropBlock as regularization layer.

    Args:
        pretrained (bool): If True, returns a model pre-trained on ImageNet
        progress (bool): If True, displays a progress bar of the download to stderr

    Returns:
        torch.nn.Module: classification model
    """

    kwargs['act_layer'] = nn.Mish(inplace=True)
    kwargs['drop_layer'] = DropBlock2d

    return _darknet('cspdarknet53_mish', pretrained, progress,
                    **kwargs)  # type: ignore[return-value]
Exemplo n.º 4
0
    def __init__(
        self,
        layout: List[Tuple[int, int]],
        num_classes: int = 80,
        in_channels: int = 3,
        stem_channels: int = 32,
        anchors: Optional[Tensor] = None,
        act_layer: Optional[nn.Module] = None,
        norm_layer: Optional[Callable[[int], nn.Module]] = None,
        drop_layer: Optional[Callable[..., nn.Module]] = None,
        conv_layer: Optional[Callable[..., nn.Module]] = None,
        backbone_norm_layer: Optional[Callable[[int],
                                               nn.Module]] = None) -> None:
        super().__init__()

        if act_layer is None:
            act_layer = nn.Mish(inplace=True)
        if norm_layer is None:
            norm_layer = nn.BatchNorm2d
        if backbone_norm_layer is None:
            backbone_norm_layer = norm_layer
        if drop_layer is None:
            drop_layer = DropBlock2d

        # backbone
        self.backbone = DarknetBodyV4(layout, in_channels, stem_channels, 3,
                                      act_layer, backbone_norm_layer,
                                      drop_layer, conv_layer)
        # neck
        self.neck = Neck([1024, 512, 256], act_layer, norm_layer, drop_layer,
                         conv_layer)
        # head
        self.head = Yolov4Head(num_classes, anchors, act_layer, norm_layer,
                               drop_layer, conv_layer)

        init_module(self.neck, 'leaky_relu')
        init_module(self.head, 'leaky_relu')
Exemplo n.º 5
0
    def __init__(self):
        super(Model, self).__init__()

        self.act_0 = nn.Mish()