def reset_parameters(self): gain = cal_gain(self.activation) nn.init.xavier_normal_(self.linear.weight, gain=gain) if isinstance(self.res_fc, nn.Linear): nn.init.xavier_normal_(self.res_fc.weight, gain=gain) if self.res_fc.bias is not None: nn.init.zeros_(self.res_fc.bias)
def reset_parameters(self): gain = cal_gain(self.activation) nn.init.xavier_uniform_(self.linear1.weight, gain=gain) if self.linear1.bias is not None: nn.init.zeros_(self.linear1.bias) nn.init.xavier_uniform_(self.linear2.weight, gain=gain) if self.linear2.bias is not None: nn.init.zeros_(self.linear2.bias)
def reset_parameters(self): gain = cal_gain(self.activation) nn.init.xavier_uniform_(self.fcs[0].weight, gain=gain) if self.fcs[0].bias is not None: nn.init.zeros_(self.fcs[0].bias) nn.init.xavier_uniform_(self.fcs[-1].weight) if self.fcs[-1].bias is not None: nn.init.zeros_(self.fcs[-1].bias)
def reset_parameters(self): gain = cal_gain("leaky_relu") nn.init.xavier_normal_(self.fc.weight, gain=gain) nn.init.xavier_normal_(self.attn_fc.weight, gain=gain) if isinstance(self.res_fc, nn.Linear): nn.init.xavier_normal_(self.res_fc.weight, gain=cal_gain(self.activation))
def reset_parameters(self): gain = cal_gain(self.activation) if self.project: nn.init.xavier_normal_(self.linear.weight, gain=gain) if isinstance(self.res_fc, nn.Linear): nn.init.xavier_normal_(self.res_fc.weight, gain=gain)
def reset_parameters(self): gain = cal_gain(F.sigmoid) nn.init.xavier_uniform_(self.s, gain=gain)