コード例 #1
0
 def weight_initialization(self):
     self.conv1.weight.data = torch.FloatTensor(
         NN.weight_relu_initialization(self.conv1))
     self.linear.weight.data = torch.FloatTensor(
         np.random.uniform(-0.1, 0.1, self.linear.weight.data.shape))
     self.linear.bias.data = torch.FloatTensor(
         NN.bias_initialization(self.linear, constant=0.0))
コード例 #2
0
 def weight_initialization(self):
     self.conv1.weight.data = torch.FloatTensor(NN.weight_relu_initialization(self.conv1))
     # self.conv1.bias.data = torch.FloatTensor(NN.bias_initialization(self.conv1, constant=0))
     self.conv2.weight.data = torch.FloatTensor(NN.weight_relu_initialization(self.conv2))
     # self.conv2.bias.data = torch.FloatTensor(NN.bias_initialization(self.conv2, constant=0))
     if self.shortcut is not None:
         self.shortcut.weight.data = torch.FloatTensor(NN.weight_relu_initialization(self.shortcut))
 def weight_initialization(self):
     self.conv1.weight.data = torch.FloatTensor(
         NN.weight_relu_initialization(self.conv1))
     self.conv1.bias.data = torch.FloatTensor(
         NN.bias_initialization(self.conv1, constant=0))
     self.conv2.weight.data = torch.FloatTensor(
         NN.weight_relu_initialization(self.conv2))
     self.conv2.bias.data = torch.FloatTensor(
         NN.bias_initialization(self.conv2, constant=0))