def __init__(self, in_channels, out_channels):
     super().__init__()
     self.double_conv = nn.Sequential(
         nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
         BatchNorm2d(out_channels), nn.ReLU(inplace=True),
         nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1),
         BatchNorm2d(out_channels), nn.ReLU(inplace=True))
Exemple #2
0
 def __init__(self, in_features, kernel_size, padding):
     super(ResBlock2d, self).__init__()
     self.conv1 = nn.Conv2d(in_channels=in_features, out_channels=in_features, kernel_size=kernel_size,
                            padding=padding)
     self.conv2 = nn.Conv2d(in_channels=in_features, out_channels=in_features, kernel_size=kernel_size,
                            padding=padding)
     self.norm1 = BatchNorm2d(in_features, affine=True)
     self.norm2 = BatchNorm2d(in_features, affine=True)
 def __init__(self, in_dim, reduction_dim, bins):
     super(PPM, self).__init__()
     self.features = []
     for bin in bins:
         self.features.append(
             nn.Sequential(
                 nn.AdaptiveAvgPool2d(bin),
                 nn.Conv2d(in_dim, reduction_dim, kernel_size=1,
                           bias=False), BatchNorm2d(reduction_dim),
                 nn.ReLU(inplace=True)))
     self.features = nn.ModuleList(self.features)
     self.gamma = Parameter(torch.zeros(1))
     self.conv51 = nn.Sequential(
         nn.Conv2d(512, 512, 3, padding=1, bias=False), BatchNorm2d(512),
         nn.ReLU())
 def __init__(self,
              in_features,
              out_features,
              groups=1,
              kernel_size=3,
              padding=1):
     super(SameBlock2d, self).__init__()
     self.conv = nn.Conv2d(in_channels=in_features,
                           out_channels=out_features,
                           kernel_size=kernel_size,
                           padding=padding,
                           groups=groups)
     self.norm = BatchNorm2d(out_features, affine=True)
 def __init__(self, in_channels):
     super(attentionHead, self).__init__()
     self.conv5a = nn.Sequential(
         nn.Conv2d(in_channels, in_channels, 3, padding=1, bias=False),
         BatchNorm2d(in_channels), nn.ReLU())
     self.sa = PAM_Module(in_channels)