def __init__(self, block, num_classes=100):
     super(ResNetModelParallel, self).__init__()
     self.relu = P.ReLU().shard(((1, dev_num, 1, 1),))
     self.maxpool = MaxPool2d(kernel_size=3, stride=2, pad_mode="same")
     self.layer1 = MakeLayer0(
         block, in_channels=64, out_channels=256, stride=1)
     self.pool = M.ReduceMean(keep_dims=True).shard(strategy_no_weight)
     self.fc = fc_with_initialize(64 * block.expansion, num_classes)
     self.flatten = Flatten()
示例#2
0
 def __init__(self, block, num_classes=100):
     super(ResNet, self).__init__()
     self.conv1 = conv7x7(3, 64, stride=2)
     self.bn1 = bn_with_initialize(64)
     self.relu = P.ReLU().set_strategy(strategy_no_weight)
     self.maxpool = MaxPool2d(kernel_size=3, stride=2, pad_mode="same")
     self.layer1 = MakeLayer0(block,
                              in_channels=64,
                              out_channels=256,
                              stride=1)
     self.pool = M.ReduceMean(
         keep_dims=True).set_strategy(strategy_no_weight)
     self.fc = fc_with_initialize(64 * block.expansion, num_classes)
     self.flatten = Flatten()