class _ClassifierD(nn.Module): ''' the ACGAN part, the code is doing the classification here ''' def __init__(self, outputs, inputs,): super(_ClassifierD, self).__init__() self.outputs = outputs #due to the convlayer change have to assume the logvar initial value. self.q_logvar_init = 0.05 self.p_logvar_init = math.log(0.05) self.conv1 = BBBConv2d(self.q_logvar_init, self.p_logvar_init,inputs,6, 5, stride=1) self.soft1 = nn.Softplus() self.pool1 = nn.MaxPool2d(kernel_size=2, stride=2) self.conv2 = BBBConv2d(self.q_logvar_init, self.p_logvar_init,6, 16, 5, stride=1) self.soft2 = nn.Softplus() self.pool2 = nn.MaxPool2d(kernel_size=2, stride=2) self.flatten = FlattenLayer(5 * 5 * 16) self.fc1 = BBBLinearFactorial(self.q_logvar_init, self.p_logvar_init,5 * 5 * 16, 120) self.soft3 = nn.Softplus() self.fc2 = BBBLinearFactorial(self.q_logvar_init, self.p_logvar_init,120, 84) self.soft4 = nn.Softplus() #self.fc3 = BBBLinearFactorial(self.q_logvar_init, self.p_logvar_init,84, outputs) self.fcA = BBBLinearFactorial(self.q_logvar_init, self.p_logvar_init,84, 1) self.fcB = BBBLinearFactorial(self.q_logvar_init, self.p_logvar_init,84, 1) layers = [self.conv1, self.soft1, self.pool1, self.conv2, self.soft2, self.pool2, self.flatten, self.fc1, self.soft3, self.fc2, self.soft4] #not sure if this is right, test drive. self.prob = nn.Sigmoid() if outputs == 1: layers.append(self.prob) self.layers = nn.ModuleList(layers) def forward(self, x):#used name: probforward 'Forward pass with Bayesian weights' kl = 0 for layer in self.layers: if hasattr(layer, 'convprobforward') and callable(layer.convprobforward): x, _kl, = layer.convprobforward(x) kl += _kl elif hasattr(layer, 'fcprobforward') and callable(layer.fcprobforward): x, _kl, = layer.fcprobforward(x) kl += _kl else: x = layer(x) #logits = x #return logits, kl logitsA, klA = self.fcA.fcprobforward(x) logitsB, klB = self.fcB.fcprobforward(x) return logitsA, logitsB, klA+kl, klB+kl
class _BayesianAlexNetD(nn.Module): '''The architecture of AlexNet with Bayesian Layers''' def __init__(self, outputs, inputs): super(_BayesianAlexNetD, self).__init__() self.q_logvar_init = 0.05 self.p_logvar_init = math.log(0.05) self.classifier = BBBLinearFactorial(self.q_logvar_init, self.p_logvar_init, 1* 1 * 128, outputs) self.conv1 = BBBConv2d(self.q_logvar_init, self.p_logvar_init, inputs, 64, kernel_size=11, stride=4, padding=5) self.soft1 = nn.Softplus() self.pool1 = nn.MaxPool2d(kernel_size=2, stride=2) self.conv2 = BBBConv2d(self.q_logvar_init, self.p_logvar_init, 64, 192, kernel_size=5, padding=2) self.soft2 = nn.Softplus() self.pool2 = nn.MaxPool2d(kernel_size=2, stride=2) self.conv3 = BBBConv2d(self.q_logvar_init, self.p_logvar_init, 192, 384, kernel_size=3, padding=1) self.soft3 = nn.Softplus() self.conv4 = BBBConv2d(self.q_logvar_init, self.p_logvar_init, 384, 256, kernel_size=3, padding=1) self.soft4 = nn.Softplus() self.conv5 = BBBConv2d(self.q_logvar_init, self.p_logvar_init, 256, 128, kernel_size=3, padding=1) self.soft5 = nn.Softplus() self.pool3 = nn.MaxPool2d(kernel_size=2, stride=2) # self.flatten = FlattenLayer(1 * 1 * 128) # self.fc1 = BBBLinearFactorial(q_logvar_init, N, p_logvar_init, 1* 1 * 128, outputs) layers = [self.conv1, self.soft1, self.pool1, self.conv2, self.soft2, self.pool2, self.conv3, self.soft3, self.conv4, self.soft4, self.conv5, self.soft5, self.pool3] self.layers = nn.ModuleList(layers) def forward(self, x): kl = 0 for layer in self.layers: if hasattr(layer, 'convprobforward') and callable(layer.convprobforward): x, _kl, = layer.convprobforward(x) else: x = layer.forward(x) x = x.view(x.size(0), -1) x, _kl = self.classifier.fcprobforward(x) kl += _kl logits = x return logits, kl