Exemple #1
0
    def reset_parameters(self):
        if self.in_channels <= 0:
            pass
        elif self.weight_initializer == 'glorot':
            inits.glorot(self.weight)
        elif self.weight_initializer == 'uniform':
            bound = 1.0 / math.sqrt(self.weight.size(-1))
            torch.nn.init.uniform_(self.weight.data, -bound, bound)
        elif self.weight_initializer == 'kaiming_uniform':
            inits.kaiming_uniform(self.weight,
                                  fan=self.in_channels,
                                  a=math.sqrt(5))
        elif self.weight_initializer is None:
            inits.kaiming_uniform(self.weight,
                                  fan=self.in_channels,
                                  a=math.sqrt(5))
        else:
            raise RuntimeError(f"Linear layer weight initializer "
                               f"'{self.weight_initializer}' is not supported")

        if self.bias is None or self.in_channels <= 0:
            pass
        elif self.bias_initializer == 'zeros':
            inits.zeros(self.bias)
        elif self.bias_initializer is None:
            inits.uniform(self.in_channels, self.bias)
        else:
            raise RuntimeError(f"Linear layer bias initializer "
                               f"'{self.bias_initializer}' is not supported")
Exemple #2
0
    def __init__(self, in_channels, k, bias=True, **kwargs):
        super(SACNConv, self).__init__(aggr='add', **kwargs)

        assert k > 0

        self.in_channels = in_channels

        self.relationships_M = Parameter(uniform((in_channels, in_channels)))

        self.chebyshev_coefficients = Parameter(uniform((k, )))
Exemple #3
0
def test_inits():
    x = torch.empty(1, 4)

    uniform(size=4, tensor=x)
    assert x.min() >= -0.5
    assert x.max() <= 0.5

    glorot(x)
    assert x.min() >= -1.25
    assert x.max() <= 1.25

    zeros(x)
    assert x.tolist() == [[0, 0, 0, 0]]

    ones(x)
    assert x.tolist() == [[1, 1, 1, 1]]
Exemple #4
0
    def reset_parameters(self):
        if self.att is None:
            size = self.num_relations * self.in_channels
        else:
            size = self.num_bases * self.in_channels
            inits.uniform(size, self.att)

        inits.uniform(size, self.basis)
        inits.uniform(size, self.root)
        inits.uniform(size, self.bias)
Exemple #5
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 k,
                 linear_transform=False,
                 bias=True,
                 **kwargs):
        super(LAGNConv, self).__init__(aggr='add', **kwargs)

        num_basis_functions = kwargs.get("num_basis_functions", 10)

        assert k > 0

        self.in_channels = in_channels
        self.out_channels = out_channels

        self.inner_coefficients = Parameter(uniform((k, num_basis_functions)))
        self.outer_coefficients = Parameter(uniform((k, num_basis_functions)))

        self.lin = lambda x: x
        if linear_transform:
            self.lin = torch.nn.Linear(in_channels, out_channels, bias=bias)
    def reset_parameters(self):
        if self.in_channels > 0:
            if self.weight_initializer == 'glorot':
                inits.glorot(self.weight)
            elif (self.weight_initializer == 'kaiming_uniform'
                  or self.weight_initializer is None):
                inits.kaiming_uniform(self.weight,
                                      fan=self.in_channels,
                                      a=math.sqrt(5))
            else:
                raise RuntimeError(
                    f"Linear layer weight initializer "
                    f"'{self.weight_initializer}' is not supported")

        if self.in_channels > 0 and self.bias is not None:
            if self.bias_initializer == 'zeros':
                inits.zeros(self.bias)
            elif self.bias_initializer is None:
                inits.uniform(self.in_channels, self.bias)
            else:
                raise RuntimeError(
                    f"Linear layer bias initializer "
                    f"'{self.bias_initializer}' is not supported")
 def reset_parameters(self):
     size = self.out_channels
     uniform(size, self.weight)
     self.rnn.reset_parameters()
Exemple #8
0
 def reset_parameters(self):
     uniform(self.in_channels, self.weight)
     uniform(self.in_channels, self.bias)
Exemple #9
0
 def reset_parameters(self):
     glorot(self.weight)
     zeros(self.bias)
     uniform(1, self.alpha)
Exemple #10
0
 def reset_parameters(self):
     inits.uniform(self.node_channels * self.out_channels, self.node_weight)
     inits.uniform(self.edge_channels * self.out_channels, self.edge_weight)
     inits.uniform(self.node_channels * self.out_channels,
                   self.neighbor_weight)
     inits.zeros(self.bias)
Exemple #11
0
 def reset_parameters(self):
     uniform(self.in_channels, self.weight)
     self.lin1.reset_parameters()
     self.lin2.reset_parameters()
Exemple #12
0
    def reset_parameters(self):
        size = self.in_channels * 144 * self.K

        uniform(size, self.weight)
        uniform(size, self.bias)
Exemple #13
0
 def reset_parameters(self):
     reset(self.nn)
     uniform(self.in_channels, self.root)
     uniform(self.in_channels, self.bias)
Exemple #14
0
 def reset_parameters(self):
     size = self.num_bases * self.in_channels
     uniform(size, self.basis)
     uniform(size, self.att)
     uniform(size, self.root)
     uniform(size, self.bias)
Exemple #15
0
 def reset_parameters(self):
     size = self.in_channels
     uniform(size, self.Wc)
     uniform(size, self.Wn)
     uniform(size, self.bias)
Exemple #16
0
 def reset_parameters(self):
     size = self.weight.size(0)
     uniform(size, self.weight)
Exemple #17
0
 def reset_parameters(self):
     uniform(self.weights.size(0), self.weights)
Exemple #18
0
 def reset_parameters(self):
     if self.weights is not None:
         uniform(self.weights.size(0), self.weights)
Exemple #19
0
    def reset_parameters(self):
        size = self.num_relations * self.in_dim

        glorot(self.basis)
        uniform(size, self.bias)
Exemple #20
0
 def reset_parameters(self):
     # uniform(self.in_channels, self.weight)
     self.weight.data = nn.init.xavier_uniform_(
         self.weight.data, gain=nn.init.calculate_gain('relu'))
     uniform(self.in_channels, self.bias)
Exemple #21
0
 def reset_parameters(self):
     size = self.heads * self.in_channels
     uniform(size, self.weight)
     uniform(size, self.att_weight)
     uniform(size, self.bias)
Exemple #22
0
 def reset_parameters(self):
     size = self.edge_attr_dim * self.in_channels
     uniform(size, self.weight)
     uniform(size, self.att_weight)
     uniform(size, self.bias)
Exemple #23
0
 def reset_parameters(self):
     size = self.in_channels
     uniform(size, self.weight)
Exemple #24
0
 def reset_parameters(self):
     uniform(self.out_channels, self.weight)
     self.rnn.reset_parameters()
Exemple #25
0
 def reset_parameters(self):
     inits.kaiming_uniform(self.weight, fan=self.in_channels,
                           a=math.sqrt(5))
     inits.uniform(self.in_channels, self.bias)
Exemple #26
0
    def reset_parameters(self):

        size = self.in_channels
        uniform(size, self.fc.weight)
        uniform(size, self.fc_attention.weight)
        uniform(size, self.fc_edge_attr.weight)