Esempio n. 1
0
 def __init__(self,
              in_channels,
              out_channels,
              kernel_size=3,
              stride=1,
              groups=1):
     super(ConvBNReLU, self).__init__()
     padding = (kernel_size - 1) // 2
     if groups == 1:
         conv = layers.Conv2d(in_channels,
                              out_channels,
                              kernel_size,
                              stride,
                              pad_mode='pad',
                              padding=padding)
     else:
         conv = layers.Conv2d(in_channels,
                              in_channels,
                              kernel_size,
                              stride,
                              pad_mode='pad',
                              padding=padding,
                              group=in_channels)
     self.features = layers.SequentialLayer(
         [conv, layers.BatchNorm2d(out_channels),
          layers.ReLU6()])
Esempio n. 2
0
 def __init__(self,
              in_planes,
              out_planes,
              kernel_size=4,
              stride=2,
              alpha=0.2,
              norm_mode='batch',
              pad_mode='CONSTANT',
              use_relu=True,
              padding=None):
     super(ConvTransposeNormReLU, self).__init__()
     conv = layers.Conv2dTranspose(in_planes,
                                   out_planes,
                                   kernel_size,
                                   stride=stride,
                                   pad_mode='same')
     norm = layers.BatchNorm2d(out_planes)
     if norm_mode == 'instance':
         # Use BatchNorm2d with batchsize=1, affine=False, training=True instead of InstanceNorm2d
         norm = layers.BatchNorm2d(out_planes, affine=False)
     has_bias = (norm_mode == 'instance')
     if padding is None:
         padding = (kernel_size - 1) // 2
     if pad_mode == 'CONSTANT':
         conv = layers.Conv2dTranspose(in_planes,
                                       out_planes,
                                       kernel_size,
                                       stride,
                                       pad_mode='same',
                                       has_bias=has_bias)
         layer_list = [conv, norm]
     else:
         paddings = ((0, 0), (0, 0), (padding, padding), (padding, padding))
         pad = layers.Pad(paddings=paddings, mode=pad_mode)
         conv = layers.Conv2dTranspose(in_planes,
                                       out_planes,
                                       kernel_size,
                                       stride,
                                       pad_mode='pad',
                                       has_bias=has_bias)
         layer_list = [pad, conv, norm]
     if use_relu:
         relu = layers.ReLU()
         if alpha > 0:
             relu = layers.LeakyReLU(alpha)
         layer_list.append(relu)
     self.features = layers.SequentialLayer(layer_list)
Esempio n. 3
0
def _bn_last(channel):
    return layers.BatchNorm2d(channel,
                              eps=1e-4,
                              momentum=0.9,
                              gamma_init=0,
                              beta_init=0,
                              moving_mean_init=0,
                              moving_var_init=1)
Esempio n. 4
0
    def __init__(self, outer_nc, inner_nc, in_planes=None, dropout=False,
                 submodule=None, outermost=False, innermost=False, alpha=0.2, norm_mode='batch'):
        super(UnetSkipConnectionBlock, self).__init__()
        downnorm = layers.BatchNorm2d(inner_nc)
        upnorm = layers.BatchNorm2d(outer_nc)
        use_bias = False
        if norm_mode == 'instance':
            downnorm = layers.BatchNorm2d(inner_nc, affine=False)
            upnorm = layers.BatchNorm2d(outer_nc, affine=False)
            use_bias = True
        if in_planes is None:
            in_planes = outer_nc
        downconv = layers.Conv2d(in_planes, inner_nc, kernel_size=4,
                             stride=2, padding=1, has_bias=use_bias, pad_mode='pad')
        downrelu = layers.LeakyReLU(alpha)
        uprelu = layers.ReLU()

        if outermost:
            upconv = layers.Conv2dTranspose(inner_nc * 2, outer_nc,
                                            kernel_size=4, stride=2,
                                            padding=1, pad_mode='pad')
            down = [downconv]
            up = [uprelu, upconv, layers.Tanh()]
            model = down + [submodule] + up
        elif innermost:
            upconv = layers.Conv2dTranspose(inner_nc, outer_nc,
                                            kernel_size=4, stride=2,
                                            padding=1, has_bias=use_bias, pad_mode='pad')
            down = [downrelu, downconv]
            up = [uprelu, upconv, upnorm]
            model = down + up
        else:
            upconv = layers.Conv2dTranspose(inner_nc * 2, outer_nc,
                                            kernel_size=4, stride=2,
                                            padding=1, has_bias=use_bias, pad_mode='pad')
            down = [downrelu, downconv, downnorm]
            up = [uprelu, upconv, upnorm]

            model = down + [submodule] + up
            if dropout:
                model.append(layers.Dropout(0.5))

        self.model = layers.SequentialLayer(model)
        self.skip_connections = not outermost
        self.concat = Concat(axis=1)
Esempio n. 5
0
def make_layers(cfg, batch_norm=False):
    Layers = []
    in_channels = 3
    for v in cfg:
        if v == 'M':
            Layers += [layers.MaxPool2d(kernel_size=2, stride=2)]
        else:
            conv2d = _conv3x3(in_channels, v)
            if batch_norm:
                Layers += [conv2d, layers.BatchNorm2d(v), layers.ReLU()]
            else:
                Layers += [conv2d, layers.ReLU()]
            in_channels = v
    return layers.SequentialLayer(Layers)
Esempio n. 6
0
    def __init__(self, inp, oup, stride, expand_ratio):
        super(InvertedResidual, self).__init__()
        assert stride in [1, 2]

        hidden_dim = int(round(inp * expand_ratio))
        self.use_res_connect = stride == 1 and inp == oup

        residual_layers = []
        if expand_ratio != 1:
            residual_layers.append(ConvBNReLU(inp, hidden_dim, kernel_size=1))
        residual_layers.extend([
            ConvBNReLU(hidden_dim,
                       hidden_dim,
                       stride=stride,
                       groups=hidden_dim),
            layers.Conv2d(hidden_dim,
                          oup,
                          kernel_size=1,
                          stride=1,
                          has_bias=False),
            layers.BatchNorm2d(oup),
        ])
        self.conv = layers.SequentialLayer(residual_layers)