def __init__(self, builder: ConvBuilder, deps): super(LeNet5, self).__init__() self.bd = builder stem = builder.Sequential() stem.add_module( 'conv1', builder.Conv2d(in_channels=1, out_channels=LENET5_DEPS[0], kernel_size=5, bias=True)) stem.add_module('maxpool1', builder.Maxpool2d(kernel_size=2)) stem.add_module( 'conv2', builder.Conv2d(in_channels=LENET5_DEPS[0], out_channels=LENET5_DEPS[1], kernel_size=5, bias=True)) stem.add_module('maxpool2', builder.Maxpool2d(kernel_size=2)) self.stem = stem self.flatten = builder.Flatten() self.linear1 = builder.Linear(in_features=LENET5_DEPS[1] * 16, out_features=LENET5_DEPS[2]) self.relu1 = builder.ReLU() self.linear2 = builder.Linear(in_features=LENET5_DEPS[2], out_features=10)
def __init__(self, builder:ConvBuilder, deps): super(LeNet5BN, self).__init__() self.bd = builder stem = builder.Sequential() stem.add_module('conv1', builder.Conv2dBNReLU(in_channels=1, out_channels=deps[0], kernel_size=5)) stem.add_module('maxpool1', builder.Maxpool2d(kernel_size=2)) stem.add_module('conv2', builder.Conv2dBNReLU(in_channels=deps[0], out_channels=deps[1], kernel_size=5)) stem.add_module('maxpool2', builder.Maxpool2d(kernel_size=2)) self.stem = stem self.flatten = builder.Flatten() self.linear1 = builder.IntermediateLinear(in_features=deps[1] * 16, out_features=500) self.relu1 = builder.ReLU() self.linear2 = builder.Linear(in_features=500, out_features=10)
def __init__(self, builder: ConvBuilder, num_blocks, num_classes=1000, deps=None): super(SBottleneckResNet, self).__init__() # self.mean_tensor = torch.from_numpy(np.array([0.485, 0.456, 0.406])).reshape(1, 3, 1, 1).cuda().type(torch.cuda.FloatTensor) # self.std_tensor = torch.from_numpy(np.array([0.229, 0.224, 0.225])).reshape(1, 3, 1, 1).cuda().type(torch.cuda.FloatTensor) # self.mean_tensor = torch.from_numpy(np.array([0.406, 0.456, 0.485])).reshape(1, 3, 1, 1).cuda().type( # torch.cuda.FloatTensor) # self.std_tensor = torch.from_numpy(np.array([0.225, 0.224, 0.229])).reshape(1, 3, 1, 1).cuda().type( # torch.cuda.FloatTensor) # self.mean_tensor = torch.from_numpy(np.array([0.5, 0.5, 0.5])).reshape(1, 3, 1, 1).cuda().type( # torch.cuda.FloatTensor) # self.std_tensor = torch.from_numpy(np.array([0.5, 0.5, 0.5])).reshape(1, 3, 1, 1).cuda().type( # torch.cuda.FloatTensor) if deps is None: if num_blocks == [3, 4, 6, 3]: deps = RESNET50_ORIGIN_DEPS_FLATTENED elif num_blocks == [3, 4, 23, 3]: deps = resnet_bottleneck_origin_deps_flattened(101) else: raise ValueError('???') self.conv1 = builder.Conv2dBNReLU(3, deps[0], kernel_size=7, stride=2, padding=3) self.maxpool = builder.Maxpool2d(kernel_size=3, stride=2, padding=1) # every stage has num_block * 3 + 1 nls = [n * 3 + 1 for n in num_blocks] # num layers in each stage self.stage1 = ResNetBottleneckStage(builder=builder, in_planes=deps[0], stage_deps=deps[1:nls[0] + 1]) self.stage2 = ResNetBottleneckStage(builder=builder, in_planes=deps[nls[0]], stage_deps=deps[nls[0] + 1:nls[0] + 1 + nls[1]], stride=2) self.stage3 = ResNetBottleneckStage( builder=builder, in_planes=deps[nls[0] + nls[1]], stage_deps=deps[nls[0] + nls[1] + 1:nls[0] + 1 + nls[1] + nls[2]], stride=2) self.stage4 = ResNetBottleneckStage( builder=builder, in_planes=deps[nls[0] + nls[1] + nls[2]], stage_deps=deps[nls[0] + nls[1] + nls[2] + 1:nls[0] + 1 + nls[1] + nls[2] + nls[3]], stride=2) self.gap = builder.GAP(kernel_size=7) self.fc = builder.Linear(deps[-1], num_classes)
def __init__(self, builder:ConvBuilder, deps=SIMPLE_ALEXNET_DEPS): super(AlexBN, self).__init__() # self.bd = builder stem = builder.Sequential() stem.add_module('conv1', builder.Conv2dBNReLU(in_channels=3, out_channels=deps[0], kernel_size=11, stride=4, padding=2)) stem.add_module('maxpool1', builder.Maxpool2d(kernel_size=3, stride=2)) stem.add_module('conv2', builder.Conv2dBNReLU(in_channels=deps[0], out_channels=deps[1], kernel_size=5, padding=2)) stem.add_module('maxpool2', builder.Maxpool2d(kernel_size=3, stride=2)) stem.add_module('conv3', builder.Conv2dBNReLU(in_channels=deps[1], out_channels=deps[2], kernel_size=3, padding=1)) stem.add_module('conv4', builder.Conv2dBNReLU(in_channels=deps[2], out_channels=deps[3], kernel_size=3, padding=1)) stem.add_module('conv5', builder.Conv2dBNReLU(in_channels=deps[3], out_channels=deps[4], kernel_size=3, padding=1)) stem.add_module('maxpool3', builder.Maxpool2d(kernel_size=3, stride=2)) self.stem = stem self.flatten = builder.Flatten() self.linear1 = builder.Linear(in_features=deps[4] * 6 * 6, out_features=4096) self.relu1 = builder.ReLU() self.drop1 = builder.Dropout(0.5) self.linear2 = builder.Linear(in_features=4096, out_features=4096) self.relu2 = builder.ReLU() self.drop2 = builder.Dropout(0.5) self.linear3 = builder.Linear(in_features=4096, out_features=1000)
def __init__(self, builder: ConvBuilder, num_blocks, num_classes=1000, deps=None): super(SBottleneckResNet, self).__init__() if deps is None: if num_blocks == [3, 4, 6, 3]: deps = RESNET50_ORIGIN_DEPS_FLATTENED elif num_blocks == [3, 4, 23, 3]: deps = resnet_bottleneck_origin_deps_flattened(101) else: raise ValueError('???') self.conv1 = builder.Conv2dBNReLU(3, deps[0], kernel_size=7, stride=2, padding=3) self.maxpool = builder.Maxpool2d(kernel_size=3, stride=2, padding=1) # every stage has num_block * 3 + 1 nls = [n * 3 + 1 for n in num_blocks] # num layers in each stage self.stage1 = ResNetBottleneckStage(builder=builder, in_planes=deps[0], stage_deps=deps[1:nls[0] + 1]) self.stage2 = ResNetBottleneckStage(builder=builder, in_planes=deps[nls[0]], stage_deps=deps[nls[0] + 1:nls[0] + 1 + nls[1]], stride=2) self.stage3 = ResNetBottleneckStage( builder=builder, in_planes=deps[nls[0] + nls[1]], stage_deps=deps[nls[0] + nls[1] + 1:nls[0] + 1 + nls[1] + nls[2]], stride=2) self.stage4 = ResNetBottleneckStage( builder=builder, in_planes=deps[nls[0] + nls[1] + nls[2]], stage_deps=deps[nls[0] + nls[1] + nls[2] + 1:nls[0] + 1 + nls[1] + nls[2] + nls[3]], stride=2) self.gap = builder.GAP(kernel_size=7) self.fc = builder.Linear(deps[-1], num_classes)
def __init__(self, num_classes, builder: ConvBuilder, deps): super(VANet, self).__init__() sq = builder.Sequential() sq.add_module( 'conv1', builder.Conv2dBNReLU(in_channels=3, out_channels=deps[0], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv2', builder.Conv2dBNReLU(in_channels=deps[0], out_channels=deps[1], kernel_size=3, stride=1, padding=1)) sq.add_module('maxpool1', builder.Maxpool2d(kernel_size=2)) sq.add_module( 'conv3', builder.Conv2dBNReLU(in_channels=deps[1], out_channels=deps[2], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv4', builder.Conv2dBNReLU(in_channels=deps[2], out_channels=deps[3], kernel_size=3, stride=1, padding=1)) sq.add_module('maxpool2', builder.Maxpool2d(kernel_size=2)) sq.add_module( 'conv5', builder.Conv2dBNReLU(in_channels=deps[3], out_channels=deps[4], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv6', builder.Conv2dBNReLU(in_channels=deps[4], out_channels=deps[5], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv7', builder.Conv2dBNReLU(in_channels=deps[5], out_channels=deps[6], kernel_size=3, stride=1, padding=1)) sq.add_module('maxpool3', builder.Maxpool2d(kernel_size=2)) sq.add_module( 'conv8', builder.Conv2dBNReLU(in_channels=deps[6], out_channels=deps[7], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv9', builder.Conv2dBNReLU(in_channels=deps[7], out_channels=deps[8], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv10', builder.Conv2dBNReLU(in_channels=deps[8], out_channels=deps[9], kernel_size=3, stride=1, padding=1)) sq.add_module('maxpool4', builder.Maxpool2d(kernel_size=2)) sq.add_module( 'conv11', builder.Conv2dBNReLU(in_channels=deps[9], out_channels=deps[10], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv12', builder.Conv2dBNReLU(in_channels=deps[10], out_channels=deps[11], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv13', builder.Conv2dBNReLU(in_channels=deps[11], out_channels=deps[12], kernel_size=3, stride=1, padding=1)) sq.add_module('maxpool5', builder.Maxpool2d(kernel_size=2)) self.stem = sq self.flatten = builder.Flatten() self.linear1 = builder.IntermediateLinear(in_features=deps[12], out_features=512) self.relu = builder.ReLU() self.linear2 = builder.Linear(in_features=512, out_features=num_classes)