def compute_weight(self, module): if (self.fixed_norm is None) and (self.fixed_log_norm is None): g = getattr(module, self.name + '_g') else: if self.fixed_norm is not None: g = self.fixed_norm else: g = th.exp(self.fixed_log_norm) v = getattr(module, self.name + '_v') return _weight_norm(v, g, self.dim)
def compute_weight(self, module): g = getattr(module, self.name + '_g') v = getattr(module, self.name + '_v') return _weight_norm(v, g, self.dim)