def __init__(self, num_class=10): super(LeNet5, self).__init__() self.num_class = num_class self.conv1 = combined.Conv2d( 1, 6, kernel_size=5, batchnorm=True, activation='relu6') self.conv2 = combined.Conv2d(6, 16, kernel_size=5, activation='relu') self.fc1 = combined.Dense(16 * 5 * 5, 120, activation='relu') self.fc2 = combined.Dense(120, 84, activation='relu') self.fc3 = combined.Dense(84, self.num_class) self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) self.flattern = nn.Flatten()
def _conv_bn(in_channel, out_channel, ksize, stride=1): """Get a conv2d batchnorm and relu layer.""" return nn.SequentialCell([ combined.Conv2d(in_channel, out_channel, kernel_size=ksize, stride=stride, batchnorm=True) ])
def __init__(self, inp, oup, stride, expend_ratio): super(InvertedResidual, self).__init__() self.stride = stride assert stride in [1, 2] hidden_dim = int(inp * expend_ratio) self.use_res_connect = self.stride == 1 and inp == oup if expend_ratio == 1: self.conv = nn.SequentialCell([ combined.Conv2d(hidden_dim, hidden_dim, 3, stride, group=hidden_dim, batchnorm=True, activation='relu6'), combined.Conv2d(hidden_dim, oup, 1, 1, batchnorm=True) ]) else: self.conv = nn.SequentialCell([ combined.Conv2d(inp, hidden_dim, 1, 1, batchnorm=True, activation='relu6'), combined.Conv2d(hidden_dim, hidden_dim, 3, stride, group=hidden_dim, batchnorm=True, activation='relu6'), combined.Conv2d(hidden_dim, oup, 1, 1, batchnorm=True) ]) self.add = P.TensorAdd()