Exemplo n.º 1
0
 def __init__(self, in_channels, growth_rate, bn_size):
     super(_DenseLayer, self).__init__()
     self.layer = layers.SequentialLayer([
         _bn(in_channels),
         layers.ReLU(),
         _conv1x1(in_channels, bn_size * growth_rate),
         _bn(bn_size * growth_rate),
         layers.ReLU(),
         _conv3x3(bn_size * growth_rate, growth_rate),
     ])
     self.ops = Concat(axis=1)
Exemplo n.º 2
0
 def __init__(self, features, class_num=1000):
     super(VGG, self).__init__()
     self.features = features
     self.flatten = layers.Flatten()
     self.classifier = layers.SequentialLayer([
         layers.Dense(512 * 7 * 7, 4096),
         layers.ReLU(),
         layers.Dropout(),
         layers.Dense(4096, 4096),
         layers.ReLU(),
         layers.Dropout(),
         layers.Dense(4096, class_num),
     ])
Exemplo n.º 3
0
def make_layers(cfg, batch_norm=False):
    Layers = []
    in_channels = 3
    for v in cfg:
        if v == 'M':
            Layers += [layers.MaxPool2d(kernel_size=2, stride=2)]
        else:
            conv2d = _conv3x3(in_channels, v)
            if batch_norm:
                Layers += [conv2d, layers.BatchNorm2d(v), layers.ReLU()]
            else:
                Layers += [conv2d, layers.ReLU()]
            in_channels = v
    return layers.SequentialLayer(Layers)
Exemplo n.º 4
0
 def __init__(self, in_channels, out_channels):
     super(_Transition, self).__init__()
     self.layer = layers.SequentialLayer([
         _bn(in_channels),
         layers.ReLU(),
         _conv1x1(in_channels, out_channels),
         AvgPool2d(kernel_size=2,
                   stride=2,
                   pad_mode='same',
                   data_format='NCHW')
     ])
Exemplo n.º 5
0
def test_sequential():
    context.set_context(mode=context.GRAPH_MODE, device_target="CPU")

    net = layers.SequentialLayer([
        layers.Conv2d(1, 6, 5, pad_mode='valid', weight_init="ones"),
        layers.ReLU(),
        layers.MaxPool2d(kernel_size=2, stride=2)
    ])
    model = Model(net)
    model.compile()
    z = model.predict(ts.ones((1, 1, 32, 32)))
    print(z.asnumpy())
Exemplo n.º 6
0
 def __init__(self,
              in_planes,
              out_planes,
              kernel_size=4,
              stride=2,
              alpha=0.2,
              norm_mode='batch',
              pad_mode='CONSTANT',
              use_relu=True,
              padding=None):
     super(ConvTransposeNormReLU, self).__init__()
     conv = layers.Conv2dTranspose(in_planes,
                                   out_planes,
                                   kernel_size,
                                   stride=stride,
                                   pad_mode='same')
     norm = layers.BatchNorm2d(out_planes)
     if norm_mode == 'instance':
         # Use BatchNorm2d with batchsize=1, affine=False, training=True instead of InstanceNorm2d
         norm = layers.BatchNorm2d(out_planes, affine=False)
     has_bias = (norm_mode == 'instance')
     if padding is None:
         padding = (kernel_size - 1) // 2
     if pad_mode == 'CONSTANT':
         conv = layers.Conv2dTranspose(in_planes,
                                       out_planes,
                                       kernel_size,
                                       stride,
                                       pad_mode='same',
                                       has_bias=has_bias)
         layer_list = [conv, norm]
     else:
         paddings = ((0, 0), (0, 0), (padding, padding), (padding, padding))
         pad = layers.Pad(paddings=paddings, mode=pad_mode)
         conv = layers.Conv2dTranspose(in_planes,
                                       out_planes,
                                       kernel_size,
                                       stride,
                                       pad_mode='pad',
                                       has_bias=has_bias)
         layer_list = [pad, conv, norm]
     if use_relu:
         relu = layers.ReLU()
         if alpha > 0:
             relu = layers.LeakyReLU(alpha)
         layer_list.append(relu)
     self.features = layers.SequentialLayer(layer_list)
Exemplo n.º 7
0
    def __init__(self, outer_nc, inner_nc, in_planes=None, dropout=False,
                 submodule=None, outermost=False, innermost=False, alpha=0.2, norm_mode='batch'):
        super(UnetSkipConnectionBlock, self).__init__()
        downnorm = layers.BatchNorm2d(inner_nc)
        upnorm = layers.BatchNorm2d(outer_nc)
        use_bias = False
        if norm_mode == 'instance':
            downnorm = layers.BatchNorm2d(inner_nc, affine=False)
            upnorm = layers.BatchNorm2d(outer_nc, affine=False)
            use_bias = True
        if in_planes is None:
            in_planes = outer_nc
        downconv = layers.Conv2d(in_planes, inner_nc, kernel_size=4,
                             stride=2, padding=1, has_bias=use_bias, pad_mode='pad')
        downrelu = layers.LeakyReLU(alpha)
        uprelu = layers.ReLU()

        if outermost:
            upconv = layers.Conv2dTranspose(inner_nc * 2, outer_nc,
                                            kernel_size=4, stride=2,
                                            padding=1, pad_mode='pad')
            down = [downconv]
            up = [uprelu, upconv, layers.Tanh()]
            model = down + [submodule] + up
        elif innermost:
            upconv = layers.Conv2dTranspose(inner_nc, outer_nc,
                                            kernel_size=4, stride=2,
                                            padding=1, has_bias=use_bias, pad_mode='pad')
            down = [downrelu, downconv]
            up = [uprelu, upconv, upnorm]
            model = down + up
        else:
            upconv = layers.Conv2dTranspose(inner_nc * 2, outer_nc,
                                            kernel_size=4, stride=2,
                                            padding=1, has_bias=use_bias, pad_mode='pad')
            down = [downrelu, downconv, downnorm]
            up = [uprelu, upconv, upnorm]

            model = down + [submodule] + up
            if dropout:
                model.append(layers.Dropout(0.5))

        self.model = layers.SequentialLayer(model)
        self.skip_connections = not outermost
        self.concat = Concat(axis=1)
Exemplo n.º 8
0
    def __init__(self, in_channel, out_channel, stride=1):
        super(ResidualBlock, self).__init__()
        channel = out_channel // self.expansion
        self.conv1 = _conv1x1(in_channel, channel, stride=1)
        self.bn1 = _bn(channel)
        self.conv2 = _conv3x3(channel, channel, stride=stride)
        self.bn2 = _bn(channel)
        self.conv3 = _conv1x1(channel, out_channel, stride=1)
        self.bn3 = _bn_last(out_channel)
        self.relu = layers.ReLU()

        self.down_sample = False
        self.down_sample_layer = None
        if stride != 1 or in_channel != out_channel:
            self.down_sample = True
        if self.down_sample:
            self.down_sample_layer = layers.SequentialLayer(
                [_conv1x1(in_channel, out_channel, stride), _bn(out_channel)])
Exemplo n.º 9
0
    def __init__(self,
                 block,
                 layer_nums,
                 in_channels,
                 out_channels,
                 strides,
                 num_classes):
        super(ResNet, self).__init__()

        if not len(layer_nums) == len(in_channels) == len(out_channels) == 4:
            raise ValueError("the length of layer_num, in_channels, out_channels list must be 4!")

        self.conv1 = _conv7x7(3, 64, stride=2)
        self.bn1 = _bn(64)
        self.relu = layers.ReLU()
        self.maxpool = layers.MaxPool2d(kernel_size=3, stride=2, pad_mode="same")
        self.layer1 = self._make_layer(block,
                                       layer_nums[0],
                                       in_channel=in_channels[0],
                                       out_channel=out_channels[0],
                                       stride=strides[0])
        self.layer2 = self._make_layer(block,
                                       layer_nums[1],
                                       in_channel=in_channels[1],
                                       out_channel=out_channels[1],
                                       stride=strides[1])
        self.layer3 = self._make_layer(block,
                                       layer_nums[2],
                                       in_channel=in_channels[2],
                                       out_channel=out_channels[2],
                                       stride=strides[2])
        self.layer4 = self._make_layer(block,
                                       layer_nums[3],
                                       in_channel=in_channels[3],
                                       out_channel=out_channels[3],
                                       stride=strides[3])

        self.mean = ReduceMean(keep_dims=True)
        self.flatten = layers.Flatten()
        self.end_point = _fc(out_channels[3], num_classes)