def __init__(self): """Declare all needed layers.""" torch.nn.Module.__init__(self) # Convolution and pooling layers of VGG-16. self.features = torchvision.models.vgg16(pretrained=False).features self.features = torch.nn.Sequential(*list( self.features.children())[:-1]) # Remove pool5. # Linear classifier. #self.fc = torch.nn.Linear(512**2, 200) self.d1 = 16 self.d2 = 3 self.w1 = torch.nn.Parameter( (2 * torch.randint(0, 2, (self.d1, self.d2)) - 1).float()) #, requires_grad = False) self.w2 = torch.nn.Parameter( (2 * torch.randint(0, 2, (self.d1, self.d2)) - 1).float()) #, requires_grad = False) self.w3 = torch.nn.Parameter( (2 * torch.randint(0, 2, (self.d1, self.d2)) - 1).float()) #, requires_grad = False) self.w4 = torch.nn.Parameter( (2 * torch.randint(0, 2, (self.d1, self.d2)) - 1).float()) self.fc = torch.nn.Linear((512 // self.d1 * self.d2)**2, 67) self.norm = runLayer()
def __init__(self): """Declare all needed layers.""" torch.nn.Module.__init__(self) # Convolution and pooling layers of VGG-16. self.features = torchvision.models.vgg16(pretrained=True).features self.features = torch.nn.Sequential(*list( self.features.children())[:-1]) # Remove pool5. self.d1 = 16 self.d2 = 3 self.w1 = torch.nn.Parameter( (2 * torch.randint(0, 2, (self.d1, self.d2)) - 1).float()) #, requires_grad = False) self.w2 = torch.nn.Parameter( (2 * torch.randint(0, 2, (self.d1, self.d2)) - 1).float()) #, requires_grad = False) self.w3 = torch.nn.Parameter( (2 * torch.randint(0, 2, (self.d1, self.d2)) - 1).float()) #, requires_grad = False) self.w4 = torch.nn.Parameter( (2 * torch.randint(0, 2, (self.d1, self.d2)) - 1).float()) self.fc = torch.nn.Linear((512 // self.d1 * self.d2)**2, 67) self.norm = runLayer() for param in self.features.parameters(): param.requires_grad = False torch.nn.init.kaiming_normal(self.fc.weight.data) torch.nn.init.kaiming_normal(self.w1.data) torch.nn.init.kaiming_normal(self.w2.data) torch.nn.init.kaiming_normal(self.w3.data) torch.nn.init.kaiming_normal(self.w4.data) if self.fc.bias is not None: torch.nn.init.constant(self.fc.bias.data, val=0)