예제 #1
0
 def __init__(self, network, l2_coef=1e-6):
     super(DeepFMWithLoss, self).__init__(auto_prefix=False)
     self.network = network
     self.l2_coef = l2_coef
     self.square = Square()
     self.reduce_mean = ReduceMean(keep_dims=False)
     self.reduce_sum = ReduceSum(keep_dims=False)
     self.loss = SigmoidCrossEntropyWithLogits()
예제 #2
0
    def __init__(self,
                 class_num=1000,
                 growth_rate=12,
                 block_config=(6, 12, 24, 16),
                 bn_size=4,
                 theta=0.5,
                 bc=False):
        super(DenseNet, self).__init__()

        num_init_feature = 2 * growth_rate
        if bc:
            self.features = layers.SequentialLayer([
                _conv3x3(3, num_init_feature, 1),
                _bn(num_init_feature),
                ReLU()
            ])
        else:
            self.features = layers.SequentialLayer([
                _conv7x7(3, num_init_feature, 2),
                _bn(num_init_feature),
                ReLU(),
                MaxPool2d(kernel_size=2,
                          stride=2,
                          pad_mode='same',
                          data_format='NCHW')
            ])

        num_feature = num_init_feature
        for i, num_layers in enumerate(block_config):

            self.features.append(
                _DenseBlock(num_layers, num_feature, bn_size, growth_rate))
            num_feature = num_feature + growth_rate * num_layers
            if i != len(block_config) - 1:
                self.features.append(
                    _Transition(num_feature, int(num_feature * theta)))
                num_feature = int(num_feature * theta)

        self.norm = _bn(num_feature)
        self.relu = ReLU()
        # self.features.append([_bn(num_feature),ReLU()])
        self.mean = ReduceMean(keep_dims=True)
        self.flatten = layers.Flatten()
        self.end_point = _fc(num_feature, class_num)
예제 #3
0
    def __init__(self,
                 block,
                 layer_nums,
                 in_channels,
                 out_channels,
                 strides,
                 num_classes):
        super(ResNet, self).__init__()

        if not len(layer_nums) == len(in_channels) == len(out_channels) == 4:
            raise ValueError("the length of layer_num, in_channels, out_channels list must be 4!")

        self.conv1 = _conv7x7(3, 64, stride=2)
        self.bn1 = _bn(64)
        self.relu = layers.ReLU()
        self.maxpool = layers.MaxPool2d(kernel_size=3, stride=2, pad_mode="same")
        self.layer1 = self._make_layer(block,
                                       layer_nums[0],
                                       in_channel=in_channels[0],
                                       out_channel=out_channels[0],
                                       stride=strides[0])
        self.layer2 = self._make_layer(block,
                                       layer_nums[1],
                                       in_channel=in_channels[1],
                                       out_channel=out_channels[1],
                                       stride=strides[1])
        self.layer3 = self._make_layer(block,
                                       layer_nums[2],
                                       in_channel=in_channels[2],
                                       out_channel=out_channels[2],
                                       stride=strides[2])
        self.layer4 = self._make_layer(block,
                                       layer_nums[3],
                                       in_channel=in_channels[3],
                                       out_channel=out_channels[3],
                                       stride=strides[3])

        self.mean = ReduceMean(keep_dims=True)
        self.flatten = layers.Flatten()
        self.end_point = _fc(out_channels[3], num_classes)
예제 #4
0
 def __init__(self):
     super(GlobalAvgPooling, self).__init__()
     self.mean = ReduceMean(keep_dims=False)