예제 #1
0
    def __init__(self, input_channels, block_channels, stride,
                 projection_shortcut, use_dropout, builder: ConvBuilder):
        super(WRNCifarBlock, self).__init__()
        assert len(block_channels) == 2

        if projection_shortcut:
            self.proj = builder.BNReLUConv2d(in_channels=input_channels,
                                             out_channels=block_channels[1],
                                             kernel_size=1,
                                             stride=stride,
                                             padding=0)
        else:
            self.proj = builder.ResIdentity(num_channels=block_channels[1])

        self.conv1 = builder.BNReLUConv2d(in_channels=input_channels,
                                          out_channels=block_channels[0],
                                          kernel_size=3,
                                          stride=stride,
                                          padding=1)
        if use_dropout:
            self.dropout = builder.Dropout(keep_prob=0.7)
            print('use dropout for WRN')
        else:
            self.dropout = builder.Identity()
        self.conv2 = builder.BNReLUConv2d(in_channels=block_channels[0],
                                          out_channels=block_channels[1],
                                          kernel_size=3,
                                          stride=1,
                                          padding=1)
예제 #2
0
 def __init__(self, builder:ConvBuilder, deps=SIMPLE_ALEXNET_DEPS):
     super(AlexBN, self).__init__()
     # self.bd = builder
     stem = builder.Sequential()
     stem.add_module('conv1', builder.Conv2dBNReLU(in_channels=3, out_channels=deps[0], kernel_size=11, stride=4, padding=2))
     stem.add_module('maxpool1', builder.Maxpool2d(kernel_size=3, stride=2))
     stem.add_module('conv2', builder.Conv2dBNReLU(in_channels=deps[0], out_channels=deps[1], kernel_size=5, padding=2))
     stem.add_module('maxpool2', builder.Maxpool2d(kernel_size=3, stride=2))
     stem.add_module('conv3',
                     builder.Conv2dBNReLU(in_channels=deps[1], out_channels=deps[2], kernel_size=3, padding=1))
     stem.add_module('conv4',
                     builder.Conv2dBNReLU(in_channels=deps[2], out_channels=deps[3], kernel_size=3, padding=1))
     stem.add_module('conv5',
                     builder.Conv2dBNReLU(in_channels=deps[3], out_channels=deps[4], kernel_size=3, padding=1))
     stem.add_module('maxpool3', builder.Maxpool2d(kernel_size=3, stride=2))
     self.stem = stem
     self.flatten = builder.Flatten()
     self.linear1 = builder.Linear(in_features=deps[4] * 6 * 6, out_features=4096)
     self.relu1 = builder.ReLU()
     self.drop1 = builder.Dropout(0.5)
     self.linear2 = builder.Linear(in_features=4096, out_features=4096)
     self.relu2 = builder.ReLU()
     self.drop2 = builder.Dropout(0.5)
     self.linear3 = builder.Linear(in_features=4096, out_features=1000)