示例#1
0
 def build_output_layer(self):
     print('Building an OUTPUT  layer of {}x{}'.format(
         self.hidden_dimensions[-2], self.hidden_dimensions[-1]))
     return RelGraphConv(self.hidden_dimensions[-2],
                         self.hidden_dimensions[-1],
                         self.num_rels,
                         regularizer='basis',
                         dropout=self.feat_drop,
                         num_bases=self.num_bases,
                         activation=torch.tanh)
示例#2
0
 def build_input_layer(self):
     print('Building an INPUT  layer of {}x{}'.format(
         self.in_dim, self.hidden_dimensions[0]))
     return RelGraphConv(self.in_dim,
                         self.hidden_dimensions[0],
                         self.num_rels,
                         regularizer='basis',
                         dropout=self.feat_drop,
                         num_bases=self.num_bases,
                         activation=F.leaky_relu)
示例#3
0
 def build_hidden_layer(self, i):
     print('Building an HIDDEN  layer of {}x{}'.format(
         self.hidden_dimensions[i], self.hidden_dimensions[i + 1]))
     return RelGraphConv(self.hidden_dimensions[i],
                         self.hidden_dimensions[i + 1],
                         self.num_rels,
                         regularizer='basis',
                         dropout=self.feat_drop,
                         num_bases=self.num_bases,
                         activation=F.leaky_relu)
示例#4
0
 def build_gnn_output_layer(self):
     print(
         f'Building an OUTPUT layer for RGCN DeulingDQN of {self.gnn_hidden_dimensions[-2]}x{self.gnn_hidden_dimensions[-1]} (activation:{self.gnn_activation[-1]})'
     )
     return RelGraphConv(self.gnn_hidden_dimensions[-2],
                         self.gnn_hidden_dimensions[-1],
                         self.num_rels,
                         regularizer='basis',
                         dropout=self.feat_drop,
                         num_bases=self.num_bases,
                         activation=self.gnn_activation[-1])