def init_params(self):
     for param in self.parameters():
         if param.requires_grad:
             if cfg.use_truncated_normal:
                 truncated_normal_(param, std=0.1)
             else:
                 torch.nn.init.normal_(param, std=0.1)
Ejemplo n.º 2
0
 def init_params(self):
     for param in self.parameters():
         if param.requires_grad and len(param.shape) > 0:
             stddev = 1 / math.sqrt(param.shape[0])
             if cfg.use_truncated_normal:
                 truncated_normal_(param, std=stddev)
             else:
                 torch.nn.init.normal_(param, std=stddev)
Ejemplo n.º 3
0
 def init_params(self):
     for param in self.parameters():
         if param.requires_grad and len(param.shape) > 0:
             stddev = 1 / math.sqrt(param.shape[0])
             if cfg.dis_init == 'uniform':
                 torch.nn.init.uniform_(param, a=-0.05, b=0.05)
             elif cfg.dis_init == 'normal':
                 torch.nn.init.normal_(param, std=stddev)
             elif cfg.dis_init == 'truncated_normal':
                 truncated_normal_(param, std=stddev)