def reset_parameters(self): if self.in_channels <= 0: pass elif self.weight_initializer == 'glorot': inits.glorot(self.weight) elif self.weight_initializer == 'uniform': bound = 1.0 / math.sqrt(self.weight.size(-1)) torch.nn.init.uniform_(self.weight.data, -bound, bound) elif self.weight_initializer == 'kaiming_uniform': inits.kaiming_uniform(self.weight, fan=self.in_channels, a=math.sqrt(5)) elif self.weight_initializer is None: inits.kaiming_uniform(self.weight, fan=self.in_channels, a=math.sqrt(5)) else: raise RuntimeError(f"Linear layer weight initializer " f"'{self.weight_initializer}' is not supported") if self.bias is None or self.in_channels <= 0: pass elif self.bias_initializer == 'zeros': inits.zeros(self.bias) elif self.bias_initializer is None: inits.uniform(self.in_channels, self.bias) else: raise RuntimeError(f"Linear layer bias initializer " f"'{self.bias_initializer}' is not supported")
def reset_parameters(self): if self.in_channels > 0: if self.weight_initializer == 'glorot': inits.glorot(self.weight) elif (self.weight_initializer == 'kaiming_uniform' or self.weight_initializer is None): inits.kaiming_uniform(self.weight, fan=self.in_channels, a=math.sqrt(5)) else: raise RuntimeError( f"Linear layer weight initializer " f"'{self.weight_initializer}' is not supported") if self.in_channels > 0 and self.bias is not None: if self.bias_initializer == 'zeros': inits.zeros(self.bias) elif self.bias_initializer is None: inits.uniform(self.in_channels, self.bias) else: raise RuntimeError( f"Linear layer bias initializer " f"'{self.bias_initializer}' is not supported")
def reset_parameters(self): inits.kaiming_uniform(self.weight, fan=self.in_channels, a=math.sqrt(5)) inits.uniform(self.in_channels, self.bias)