Beispiel #1
0
 def __init__(self, inchannel, outchannel, stride=1, x=0):
     super(ResidualBlock, self).__init__()
     self.x = x
     self.left = nn.Sequential(
         nn.Conv2d(inchannel,
                   outchannel,
                   kernel_size=3,
                   stride=stride,
                   padding=1,
                   bias=False), nn.BatchNorm2d(outchannel),
         nn.ReLU(inplace=True),
         nn.Conv2d(outchannel,
                   outchannel,
                   kernel_size=3,
                   stride=1,
                   padding=1,
                   bias=False), nn.BatchNorm2d(outchannel))
     self.x = cal_conv2d(x=self.x, kernel_size=3, stride=stride, padding=1)
     self.x = cal_conv2d(x=self.x, kernel_size=3, stride=1, padding=1)
     self.shortcut = nn.Sequential()
     if stride != 1 or inchannel != outchannel:
         self.shortcut = nn.Sequential(
             nn.Conv2d(inchannel,
                       outchannel,
                       kernel_size=1,
                       stride=stride,
                       bias=False), nn.BatchNorm2d(outchannel))
         self.x = cal_conv2d(x=self.x, kernel_size=1, stride=stride)
Beispiel #2
0
 def __init__(self, ResidualBlock, x=32, num_classes=10):
     super(ResNet, self).__init__()
     self.inchannel = 64
     self.INPUT_SIZE = x
     self.conv1 = nn.Sequential(
         nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False),
         nn.BatchNorm2d(64),
         nn.ReLU(),
     )
     self.INPUT_SIZE = cal_conv2d(x=self.INPUT_SIZE,
                                  kernel_size=3,
                                  stride=1,
                                  padding=1)
     self.layer1 = self.make_layer(ResidualBlock, 64, 2, stride=1)
     self.layer2 = self.make_layer(ResidualBlock, 128, 2, stride=2)
     self.layer3 = self.make_layer(ResidualBlock, 256, 2, stride=2)
     self.layer4 = self.make_layer(ResidualBlock, 512, 2, stride=2)
     print(self.INPUT_SIZE)
     self.fc = nn.Linear(512 * self.INPUT_SIZE * self.INPUT_SIZE,
                         num_classes)
Beispiel #3
0
 def __init__(self,
              ResidualBlock,
              cfg,
              x=32,
              num_classes=10,
              ipt_channel=3):
     super(ResNet, self).__init__()
     self.inchannel = 16
     self.INPUT_SIZE = x
     self.conv1 = nn.Sequential(
         nn.Conv2d(ipt_channel,
                   16,
                   kernel_size=2,
                   stride=1,
                   padding=1,
                   bias=False),
         nn.BatchNorm2d(16),
         nn.ReLU(),
     )
     self.INPUT_SIZE = cal_conv2d(x=self.INPUT_SIZE,
                                  kernel_size=2,
                                  stride=1,
                                  padding=1)
     layer = []
     self.cfg_length = len(cfg)
     for i in range(self.cfg_length):
         layer.append(
             self.make_layer(ResidualBlock, cfg[i]['channels'],
                             cfg[i]['num_blocks'], cfg[i]['stride']))
         if i == self.cfg_length - 1:
             # print(self.INPUT_SIZE)
             self.INPUT_SIZE = cal_maxpool2d(x=self.INPUT_SIZE,
                                             kernel_size=2)
             # print('I am')
             # print(self.INPUT_SIZE)
             self.layers = nn.Sequential(*layer)
             self.pool = nn.MaxPool2d(kernel_size=2)
             self.fc = nn.Linear(LINEAR_OUTPUT, num_classes)