def __init__(self, builder: ConvBuilder, deps): super(LeNet5, self).__init__() self.bd = builder stem = builder.Sequential() stem.add_module( 'conv1', builder.Conv2d(in_channels=1, out_channels=LENET5_DEPS[0], kernel_size=5, bias=True)) stem.add_module('maxpool1', builder.Maxpool2d(kernel_size=2)) stem.add_module( 'conv2', builder.Conv2d(in_channels=LENET5_DEPS[0], out_channels=LENET5_DEPS[1], kernel_size=5, bias=True)) stem.add_module('maxpool2', builder.Maxpool2d(kernel_size=2)) self.stem = stem self.flatten = builder.Flatten() self.linear1 = builder.Linear(in_features=LENET5_DEPS[1] * 16, out_features=LENET5_DEPS[2]) self.relu1 = builder.ReLU() self.linear2 = builder.Linear(in_features=LENET5_DEPS[2], out_features=10)
def __init__(self, block_counts, num_classes, builder:ConvBuilder, deps, use_dropout): super(WRNCifarNet, self).__init__() self.bd = builder converted_deps = wrn_convert_flattened_deps(deps) print('the converted deps is ', converted_deps) self.conv1 = builder.Conv2d(in_channels=3, out_channels=converted_deps[0], kernel_size=3, stride=1, padding=1, bias=False) self.stage1 = self._build_wrn_stage(num_blocks=block_counts[0], stage_input_channels=converted_deps[0], stage_deps=converted_deps[1], downsample=False, use_dropout=use_dropout) self.stage2 = self._build_wrn_stage(num_blocks=block_counts[1], stage_input_channels=converted_deps[1][-1][1], stage_deps=converted_deps[2], downsample=True, use_dropout=use_dropout) self.stage3 = self._build_wrn_stage(num_blocks=block_counts[2], stage_input_channels=converted_deps[2][-1][1], stage_deps=converted_deps[3], downsample=True, use_dropout=use_dropout) self.last_bn = builder.BatchNorm2d(num_features=converted_deps[3][-1][1]) self.linear = builder.Linear(in_features=converted_deps[3][-1][1], out_features=num_classes)