def conv_add(in_planes, out_planes, kernel_size=(3, 3), stride=1, padding=0, quantize=False, weight_bits=8, sparsity=0): " 3x3 convolution with padding " shift = nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, bias=False) add = adder.Adder2D(out_planes, out_planes, kernel_size=(1, 1), stride=1, padding=padding, bias=False, quantize=quantize, weight_bits=weight_bits, sparsity=sparsity) return nn.Sequential(shift, add)
def _make_layer(self, block, planes, blocks, stride=1): downsample = None if stride != 1 or self.inplanes != planes * block.expansion: downsample = nn.Sequential( adder.Adder2D(self.inplanes, planes * block.expansion, kernel_size=1, stride=stride, bias=False, quantize=self.quantize, weight_bits=self.weight_bits, sparsity=self.sparsity), # adder.Adder2D nn.BatchNorm2d(planes * block.expansion)) layers = [] layers.append( block(inplanes=self.inplanes, planes=planes, stride=stride, downsample=downsample, quantize=self.quantize, weight_bits=self.weight_bits, sparsity=self.sparsity)) self.inplanes = planes * block.expansion for _ in range(1, blocks): layers.append( block(inplanes=self.inplanes, planes=planes, quantize=self.quantize, weight_bits=self.weight_bits, sparsity=self.sparsity)) return nn.Sequential(*layers)
def conv1x1(in_planes, out_planes, threshold, sign_threshold, distribution, stride=1, quantize=False, weight_bits=8, sparsity=0): """1x1 convolution""" shift = SEConv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False, threshold=threshold, sign_threshold=sign_threshold, distribution=distribution) add = adder.Adder2D(out_planes, out_planes, kernel_size=1, stride=1, bias=False, quantize=quantize, weight_bits=weight_bits, sparsity=sparsity) return nn.Sequential(shift, add)
def __init__(self, num_classes=10): super(LeNet_vis, self).__init__() self.conv1_1 = adder.Adder2D(1, 32, kernel_size=5, padding=2) self.prelu1_1 = nn.PReLU() self.conv1_2 = adder.Adder2D(32, 32, kernel_size=5, padding=2) self.prelu1_2 = nn.PReLU() # self.conv2_1 = adder.Adder2D(32, 64, kernel_size=5, padding=2) # self.prelu2_1 = nn.PReLU() # self.conv2_2 = adder.Adder2D(64, 64, kernel_size=5, padding=2) # self.prelu2_2 = nn.PReLU() # self.conv3_1 = adder.Adder2D(64, 128, kernel_size=5, padding=2) # self.prelu3_1 = nn.PReLU() # self.conv3_2 = adder.Adder2D(128, 128, kernel_size=5, padding=2) # self.prelu3_2 = nn.PReLU() self.preluip1 = nn.PReLU() self.ip1 = nn.Linear(32 * 3 * 3, 2) self.ip2 = nn.Linear(2, 10, bias=False)
def conv3x3(in_planes, out_planes, stride=1): " 3x3 convolution with padding " return adder.Adder2D(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)
def conv1x1(in_planes, out_planes, stride=1, quantize=False, weight_bits=8, sparsity=0): """1x1 convolution""" return adder.Adder2D(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
def conv3x3(in_planes, out_planes, stride=1, quantize=False, weight_bits=8, sparsity=0): """3x3 convolution with padding""" return adder.Adder2D(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)
def conv_add(in_planes, out_planes, kernel_size=(3, 3), stride=1, padding=0, quantize=False, weight_bits=8, sparsity=0): " 3x3 convolution with padding " return adder.Adder2D(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, bias=False, quantize=quantize, weight_bits=weight_bits, sparsity=sparsity)
def conv3x3(in_planes, out_planes, stride=1, quantize=False, weight_bits=8, sparsity=0, quantize_v='sbm'): " 3x3 convolution with padding " return adder.Adder2D(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False, quantize=quantize, weight_bits=weight_bits, sparsity=sparsity, quantize_v=quantize_v)
def _make_layer(self, block, planes, blocks, stride=1): downsample = None if stride != 1 or self.inplanes != planes * block.expansion: downsample = nn.Sequential( nn.Conv2d(self.inplanes, planes * block.expansion, kernel_size=1, stride=stride, bias=False), # normal adder.Adder2D(planes * block.expansion, planes * block.expansion, kernel_size=1, stride=1, bias=False, quantize=self.quantize, weight_bits=self.weight_bits), # add nn.BatchNorm2d(planes * block.expansion)) layers = [] layers.append( block(inplanes=self.inplanes, planes=planes, threshold=self.threshold, sign_threshold=self.sign_threshold, distribution=self.distribution, stride=stride, downsample=downsample, quantize=self.quantize, weight_bits=self.weight_bits)) self.inplanes = planes * block.expansion for _ in range(1, blocks): layers.append( block(inplanes=self.inplanes, planes=planes, threshold=self.threshold, sign_threshold=self.sign_threshold, distribution=self.distribution, quantize=self.quantize, weight_bits=self.weight_bits)) return nn.Sequential(*layers)
def conv1x1(in_planes, out_planes, stride=1, quantize=False, weight_bits=8, sparsity=0): """1x1 convolution""" shift = nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False) add = adder.Adder2D(out_planes, out_planes, kernel_size=1, stride=1, bias=False, quantize=quantize, weight_bits=weight_bits, sparsity=sparsity) return nn.Sequential(shift, add)
def conv3x3(in_planes, out_planes, threshold, sign_threshold, distribution, stride=1, quantize=False, weight_bits=8): " 3x3 convolution with padding " normal = nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False) add = adder.Adder2D(out_planes, out_planes, kernel_size=3, stride=1, padding=1, bias=False, quantize=quantize, weight_bits=weight_bits) return nn.Sequential(normal, add)
def conv(in_planes, out_planes, threshold, sign_threshold, distribution, kernel_size=3, stride=1, padding=0, quantize=False, weight_bits=8, sparsity=0): """3x3 convolution with padding""" shift = SEConv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, bias=False, threshold=threshold, sign_threshold=sign_threshold, distribution=distribution) add = adder.Adder2D(out_planes, out_planes, kernel_size=kernel_size, stride=1, padding=padding, bias=False, quantize=quantize, weight_bits=weight_bits, sparsity=sparsity) return nn.Sequential(shift, add)