Exemple #1
0
 def weights_init(self, m):
     classname = m.__class__.__name__
     if classname == 'Conv2d' or classname == 'Linear':
         print(classname, 'apply xavier')
         xavier(m.weight.data)
         #xavier(m.bias.data)
         m.bias.data.fill_(0.1)
Exemple #2
0
    def __init__(self):
        super().__init__()
        self.fc1 = nn.Linear(28 * 28, 100)
        self.fc2 = nn.Linear(100, 10)

        for m in self.modules():
            if isinstance(m, nn.Linear):
                xavier(m.weight.data)
def weights_init(m):
    import torch.nn.init.xavier_uniform as xavier
    if isinstance(m, nn.Linear):
        xavier(m.weight.data)
        xavier(m.bias.data)
Exemple #4
0
def weights_init(m):
    if isinstance(m, nn.Conv2d):
        xavier(m.weight.data,gain=args.gain)
        m.bias.data.zero_()
Exemple #5
0
    criterion_train =  nn.MSELoss()
    criterion = nn.MSELoss()
elif args.loss=='ce':
    criterion = nn.CrossEntropyLoss()
    criterion_train = nn.CrossEntropyLoss()

from torch.nn.init import xavier_normal_ as xavier
def weights_init(m):
    if isinstance(m, nn.Conv2d):
        xavier(m.weight.data,gain=args.gain)
        m.bias.data.zero_()

net.apply(weights_init)
FC1 = nn.Linear(512*k, 10).cuda()
FC2 = nn.Linear(512*k, 10).cuda()
xavier(FC1.weight.data, gain=args.gain)
FC1.bias.data.zero_()


net2=copy.deepcopy(net)

# Symmetrize!
FC2.weight.data.copy_(FC1.weight.data)
FC2.bias.data.copy_(FC1.bias.data)



par = list(net.parameters())+list(FC1.parameters())
optimizer = optim.SGD(par, lr=args.lr, momentum=0.9, weight_decay=0)

net_clone = copy.deepcopy(net)
Exemple #6
0
 def reset_parameters(self):
     xavier(self.weight)
     zeros(self.bias)
     print("Glorot Initialized Weights")
Exemple #7
0
def weights_init_xavier(m):
    if isinstance(m, nn.Conv2d):
        if not isinstance(m.weight, torch.HalfTensor):
            xavier(m.weight.data)
            if m.bias is not None:
                m.bias.data.fill_(0)