def __init__(self, builder: ConvBuilder, deps): super(LeNet5, self).__init__() self.bd = builder stem = builder.Sequential() stem.add_module( 'conv1', builder.Conv2d(in_channels=1, out_channels=LENET5_DEPS[0], kernel_size=5, bias=True)) stem.add_module('relu1', builder.ReLU()) stem.add_module('maxpool1', builder.Maxpool2d(kernel_size=2)) stem.add_module( 'conv2', builder.Conv2d(in_channels=LENET5_DEPS[0], out_channels=LENET5_DEPS[1], kernel_size=5, bias=True)) stem.add_module('relu2', builder.ReLU()) stem.add_module('maxpool2', builder.Maxpool2d(kernel_size=2)) self.stem = stem self.flatten = builder.Flatten() self.linear1 = builder.Linear(in_features=LENET5_DEPS[1] * 16, out_features=LENET5_DEPS[2]) self.relu1 = builder.ReLU() self.linear2 = builder.Linear(in_features=LENET5_DEPS[2], out_features=10)
def __init__(self, builder:ConvBuilder): super(LeNet300, self).__init__() self.flatten = builder.Flatten() self.linear1 = builder.Linear(in_features=28*28, out_features=300, bias=True) self.relu1 = builder.ReLU() self.linear2 = builder.Linear(in_features=300, out_features=100, bias=True) self.relu2 = builder.ReLU() self.linear3 = builder.Linear(in_features=100, out_features=10, bias=True)
def __init__(self, builder: ConvBuilder, in_planes, stage_deps, stride=1): super(ResNetBottleneckStage, self).__init__() print('building stage: in {}, deps {}'.format(in_planes, stage_deps)) assert (len(stage_deps) - 1) % 3 == 0 self.num_blocks = (len(stage_deps) - 1) // 3 stage_out_channels = stage_deps[3] for i in range(2, self.num_blocks): assert stage_deps[3 * i] == stage_out_channels self.relu = builder.ReLU() self.projection = builder.Conv2dBN(in_channels=in_planes, out_channels=stage_deps[0], kernel_size=1, stride=stride) self.align_opr = builder.ResNetAlignOpr(channels=stage_deps[0]) for i in range(self.num_blocks): in_c = in_planes if i == 0 else stage_out_channels block_stride = stride if i == 0 else 1 self.__setattr__( 'block{}'.format(i), BottleneckBranch(builder=builder, in_channels=in_c, deps=stage_deps[1 + i * 3:4 + i * 3], stride=block_stride))
def __init__(self, num_classes, builder: ConvBuilder, deps): super(VCNet, self).__init__() self.stem = _create_vgg_stem(builder=builder, deps=deps) self.flatten = builder.Flatten() self.linear1 = builder.IntermediateLinear(in_features=deps[12], out_features=512) self.relu = builder.ReLU() self.linear2 = builder.Linear(in_features=512, out_features=num_classes)
def __init__(self, builder:ConvBuilder, deps): super(LeNet5BN, self).__init__() self.bd = builder stem = builder.Sequential() stem.add_module('conv1', builder.Conv2dBNReLU(in_channels=1, out_channels=deps[0], kernel_size=5)) stem.add_module('maxpool1', builder.Maxpool2d(kernel_size=2)) stem.add_module('conv2', builder.Conv2dBNReLU(in_channels=deps[0], out_channels=deps[1], kernel_size=5)) stem.add_module('maxpool2', builder.Maxpool2d(kernel_size=2)) self.stem = stem self.flatten = builder.Flatten() self.linear1 = builder.IntermediateLinear(in_features=deps[1] * 16, out_features=500) self.relu1 = builder.ReLU() self.linear2 = builder.Linear(in_features=500, out_features=10)
def __init__(self, conv_idx, builder: ConvBuilder, preced_layer_idx, in_features, out_features, bias=True): super(AOFPFCReluLayer, self).__init__() self.conv_idx = conv_idx self.base_path = builder.Linear(in_features=in_features, out_features=out_features, bias=bias) self.relu = builder.ReLU() self.register_buffer('t_value', torch.zeros(1)) self.preced_layer_idx = preced_layer_idx
def __init__(self, builder:ConvBuilder, deps=SIMPLE_ALEXNET_DEPS): super(AlexBN, self).__init__() # self.bd = builder stem = builder.Sequential() stem.add_module('conv1', builder.Conv2dBNReLU(in_channels=3, out_channels=deps[0], kernel_size=11, stride=4, padding=2)) stem.add_module('maxpool1', builder.Maxpool2d(kernel_size=3, stride=2)) stem.add_module('conv2', builder.Conv2dBNReLU(in_channels=deps[0], out_channels=deps[1], kernel_size=5, padding=2)) stem.add_module('maxpool2', builder.Maxpool2d(kernel_size=3, stride=2)) stem.add_module('conv3', builder.Conv2dBNReLU(in_channels=deps[1], out_channels=deps[2], kernel_size=3, padding=1)) stem.add_module('conv4', builder.Conv2dBNReLU(in_channels=deps[2], out_channels=deps[3], kernel_size=3, padding=1)) stem.add_module('conv5', builder.Conv2dBNReLU(in_channels=deps[3], out_channels=deps[4], kernel_size=3, padding=1)) stem.add_module('maxpool3', builder.Maxpool2d(kernel_size=3, stride=2)) self.stem = stem self.flatten = builder.Flatten() self.linear1 = builder.Linear(in_features=deps[4] * 6 * 6, out_features=4096) self.relu1 = builder.ReLU() self.drop1 = builder.Dropout(0.5) self.linear2 = builder.Linear(in_features=4096, out_features=4096) self.relu2 = builder.ReLU() self.drop2 = builder.Dropout(0.5) self.linear3 = builder.Linear(in_features=4096, out_features=1000)
def __init__(self, builder: ConvBuilder, in_planes, stage_deps, stride=1, is_first=False): super(ResNetBasicStage, self).__init__() print('building stage: in {}, deps {}'.format(in_planes, stage_deps)) self.num_blocks = len(stage_deps) // 2 stage_out_channels = stage_deps[0] for i in range(0, self.num_blocks): assert stage_deps[i * 2 + 2] == stage_out_channels if is_first: self.conv1 = builder.Conv2dBN(in_channels=in_planes, out_channels=stage_out_channels, kernel_size=3, stride=1, padding=1) # self.projection = builder.ResIdentity(num_channels=stage_out_channels) else: self.projection = builder.Conv2dBN(in_channels=in_planes, out_channels=stage_out_channels, kernel_size=1, stride=stride) self.relu = builder.ReLU() self.align_opr = builder.ResNetAlignOpr(channels=stage_out_channels) for i in range(self.num_blocks): if i == 0 and is_first: in_c = stage_deps[0] elif i == 0: in_c = in_planes else: in_c = stage_out_channels block_stride = stride if i == 0 else 1 self.__setattr__( 'block{}'.format(i), BasicBranch(builder=builder, in_channels=in_c, deps=stage_deps[1 + i * 2:3 + i * 2], stride=block_stride))
def __init__(self, builder: ConvBuilder, in_planes, planes, stride=1): super(BasicBlock, self).__init__() self.bd = builder self.relu = builder.ReLU() if stride != 1 or in_planes != self.expansion * planes: self.shortcut = builder.Conv2dBN(in_channels=in_planes, out_channels=self.expansion * planes, kernel_size=1, stride=stride) else: self.shortcut = builder.ResIdentity(num_channels=in_planes) self.conv1 = builder.Conv2dBNReLU(in_channels=in_planes, out_channels=planes, kernel_size=3, stride=stride, padding=1) self.conv2 = builder.Conv2dBN(in_channels=planes, out_channels=self.expansion * planes, kernel_size=3, stride=1, padding=1)
def __init__(self, num_classes, builder: ConvBuilder, deps): super(VANet, self).__init__() sq = builder.Sequential() sq.add_module( 'conv1', builder.Conv2dBNReLU(in_channels=3, out_channels=deps[0], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv2', builder.Conv2dBNReLU(in_channels=deps[0], out_channels=deps[1], kernel_size=3, stride=1, padding=1)) sq.add_module('maxpool1', builder.Maxpool2d(kernel_size=2)) sq.add_module( 'conv3', builder.Conv2dBNReLU(in_channels=deps[1], out_channels=deps[2], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv4', builder.Conv2dBNReLU(in_channels=deps[2], out_channels=deps[3], kernel_size=3, stride=1, padding=1)) sq.add_module('maxpool2', builder.Maxpool2d(kernel_size=2)) sq.add_module( 'conv5', builder.Conv2dBNReLU(in_channels=deps[3], out_channels=deps[4], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv6', builder.Conv2dBNReLU(in_channels=deps[4], out_channels=deps[5], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv7', builder.Conv2dBNReLU(in_channels=deps[5], out_channels=deps[6], kernel_size=3, stride=1, padding=1)) sq.add_module('maxpool3', builder.Maxpool2d(kernel_size=2)) sq.add_module( 'conv8', builder.Conv2dBNReLU(in_channels=deps[6], out_channels=deps[7], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv9', builder.Conv2dBNReLU(in_channels=deps[7], out_channels=deps[8], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv10', builder.Conv2dBNReLU(in_channels=deps[8], out_channels=deps[9], kernel_size=3, stride=1, padding=1)) sq.add_module('maxpool4', builder.Maxpool2d(kernel_size=2)) sq.add_module( 'conv11', builder.Conv2dBNReLU(in_channels=deps[9], out_channels=deps[10], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv12', builder.Conv2dBNReLU(in_channels=deps[10], out_channels=deps[11], kernel_size=3, stride=1, padding=1)) sq.add_module( 'conv13', builder.Conv2dBNReLU(in_channels=deps[11], out_channels=deps[12], kernel_size=3, stride=1, padding=1)) sq.add_module('maxpool5', builder.Maxpool2d(kernel_size=2)) self.stem = sq self.flatten = builder.Flatten() self.linear1 = builder.IntermediateLinear(in_features=deps[12], out_features=512) self.relu = builder.ReLU() self.linear2 = builder.Linear(in_features=512, out_features=num_classes)