コード例 #1
0
    def __init__(self, num_class=10, channel=1):
        super(LeNet5, self).__init__()
        self.type = "fusion"
        self.num_class = num_class

        # change `nn.Conv2d` to `nn.Conv2dBnAct`
        self.conv1 = nn.Conv2dBnAct(channel,
                                    6,
                                    5,
                                    pad_mode='valid',
                                    has_bn=True,
                                    activation='relu')
        self.conv2 = nn.Conv2dBnAct(6,
                                    16,
                                    5,
                                    pad_mode='valid',
                                    has_bn=True,
                                    activation='relu')
        # change `nn.Dense` to `nn.DenseBnAct`
        self.fc1 = nn.DenseBnAct(16 * 5 * 5, 120, activation='relu')
        self.fc2 = nn.DenseBnAct(120, 84, activation='relu')
        self.fc3 = nn.DenseBnAct(84, self.num_class)

        self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)
        self.flatten = nn.Flatten()
コード例 #2
0
 def __init__(self, num_class=10):
     super(LeNet5, self).__init__()
     self.num_class = num_class
     self.conv1 = nn.Conv2dBnAct(1, 6, kernel_size=5, has_bn=True, activation='relu', pad_mode="valid")
     self.conv2 = nn.Conv2dBnAct(6, 16, kernel_size=5, activation='relu', pad_mode="valid")
     self.fc1 = nn.DenseBnAct(16 * 5 * 5, 120, activation='relu')
     self.fc2 = nn.DenseBnAct(120, 84, activation='relu')
     self.fc3 = nn.DenseBnAct(84, self.num_class)
     self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)
     self.flatten = nn.Flatten()
コード例 #3
0
ファイル: mobilenetV2.py プロジェクト: wudenggang/mindspore
    def __init__(self, num_classes=1000, width_mult=1.,
                 has_dropout=False, inverted_residual_setting=None, round_nearest=8):
        super(mobilenetV2, self).__init__()
        block = InvertedResidual
        input_channel = 32
        last_channel = 1280
        # setting of inverted residual blocks
        self.cfgs = inverted_residual_setting
        if inverted_residual_setting is None:
            self.cfgs = [
                # t, c, n, s
                [1, 16, 1, 1],
                [6, 24, 2, 2],
                [6, 32, 3, 2],
                [6, 64, 4, 2],
                [6, 96, 3, 1],
                [6, 160, 3, 2],
                [6, 320, 1, 1],
            ]

        # building first layer
        input_channel = _make_divisible(input_channel * width_mult, round_nearest)
        self.out_channels = _make_divisible(last_channel * max(1.0, width_mult), round_nearest)

        features = [ConvBNReLU(3, input_channel, stride=2)]
        # building inverted residual blocks
        for t, c, n, s in self.cfgs:
            output_channel = _make_divisible(c * width_mult, round_nearest)
            for i in range(n):
                stride = s if i == 0 else 1
                features.append(block(input_channel, output_channel, stride, expand_ratio=t))
                input_channel = output_channel
        # building last several layers
        features.append(ConvBNReLU(input_channel, self.out_channels, kernel_size=1))
        # make it nn.CellList
        self.features = nn.SequentialCell(features)
        # mobilenet head
        head = ([GlobalAvgPooling(),
                 nn.DenseBnAct(self.out_channels, num_classes, has_bias=True, has_bn=False)
                 ] if not has_dropout else
                [GlobalAvgPooling(),
                 nn.Dropout(0.2),
                 nn.DenseBnAct(self.out_channels, num_classes, has_bias=True, has_bn=False)
                 ])
        self.head = nn.SequentialCell(head)

        # init weights
        self._initialize_weights()
コード例 #4
0
    def __init__(self,
                 in_channels=3,
                 classes=1000,
                 k=192,
                 l=224,
                 m=256,
                 n=384,
                 is_train=True):
        super(Inceptionv4, self).__init__()
        blocks = []
        blocks.append(Stem(in_channels))
        for _ in range(4):
            blocks.append(InceptionA(384))
        blocks.append(ReductionA(384, k, l, m, n))
        for _ in range(7):
            blocks.append(InceptionB(1024))
        blocks.append(ReductionB(1024))
        for _ in range(3):
            blocks.append(InceptionC(1536))
        self.features = nn.SequentialCell(blocks)

        self.avgpool = P.ReduceMean(keep_dims=False)
        self.softmax = nn.DenseBnAct(1536,
                                     classes,
                                     weight_init="XavierUniform",
                                     has_bias=True,
                                     has_bn=True,
                                     activation="logsoftmax")

        if is_train:
            self.dropout = nn.Dropout(0.20)
        else:
            self.dropout = nn.Dropout(1)
        self.bn0 = nn.BatchNorm1d(1536, eps=0.001, momentum=0.1)
コード例 #5
0
    def __init__(self, num_class=1000, input_size=224, width_mul=1.):
        super(MobileNetV2, self).__init__()
        _ = input_size
        block = InvertedResidual
        input_channel = 32
        last_channel = 1280
        inverted_residual_setting = [
            [1, 16, 1, 1],
            [6, 24, 2, 2],
            [6, 32, 3, 2],
            [6, 64, 4, 2],
            [6, 96, 3, 1],
            [6, 160, 3, 2],
            [6, 230, 1, 1],
        ]
        if width_mul > 1.0:
            last_channel = make_divisible(last_channel * width_mul)
        self.last_channel = last_channel
        features = [_conv_bn(3, input_channel, 3, 2)]

        for t, c, n, s in inverted_residual_setting:
            out_channel = make_divisible(c * width_mul) if t > 1 else c
            for i in range(n):
                if i == 0:
                    features.append(block(input_channel, out_channel, s, t))
                else:
                    features.append(block(input_channel, out_channel, 1, t))
                input_channel = out_channel

        features.append(_conv_bn(input_channel, self.last_channel, 1))

        self.features = nn.SequentialCell(features)
        self.mean = P.ReduceMean(keep_dims=False)
        self.classifier = nn.DenseBnAct(self.last_channel, num_class)
コード例 #6
0
ファイル: resnet_quant.py プロジェクト: zuoshou030/mindspore
    def __init__(self,
                 block,
                 layer_nums,
                 in_channels,
                 out_channels,
                 strides,
                 num_classes):
        super(ResNet, self).__init__()

        if not len(layer_nums) == len(in_channels) == len(out_channels) == 4:
            raise ValueError("the length of layer_num, in_channels, out_channels list must be 4!")

        self.conv1 = ConvBNReLU(3, 64, kernel_size=7, stride=2)
        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, pad_mode="same")

        self.layer1 = self._make_layer(block,
                                       layer_nums[0],
                                       in_channel=in_channels[0],
                                       out_channel=out_channels[0],
                                       stride=strides[0])
        self.layer2 = self._make_layer(block,
                                       layer_nums[1],
                                       in_channel=in_channels[1],
                                       out_channel=out_channels[1],
                                       stride=strides[1])
        self.layer3 = self._make_layer(block,
                                       layer_nums[2],
                                       in_channel=in_channels[2],
                                       out_channel=out_channels[2],
                                       stride=strides[2])
        self.layer4 = self._make_layer(block,
                                       layer_nums[3],
                                       in_channel=in_channels[3],
                                       out_channel=out_channels[3],
                                       stride=strides[3])

        self.mean = P.ReduceMean(keep_dims=True)
        self.flatten = nn.Flatten()
        self.end_point = nn.DenseBnAct(out_channels[3], num_classes, has_bias=True, has_bn=False)