def build_hidden_layer(self): return RGCNLayer(self.h_dim, self.h_dim, self.num_rels, self.num_bases, dropout=self.dropout, activation=F.relu)
def build_input_layer(self): return RGCNLayer(self.num_nodes, self.h_dim, self.num_rels, self.num_bases, activation=F.relu, is_input_layer=True)
def build_hidden_layer(self, idx): act = F.relu if idx < self.num_hidden_layers - 1 else None return RGCNLayer(self.h_dim, self.h_dim, self.num_rels, self.num_bases, activation=act, rank=2)
def build_input_layer(self): return RGCNLayer(self.in_feat, self.h_dim, self.num_rels, self.num_bases, activation=F.relu, is_input_layer=True, node_features=self.features)
def build_hidden_layer(self, idx): act = F.relu if idx < self.num_hidden_layers - 1 else None return RGCNLayer(self.h_dim, self.h_dim, self.num_rels, self.num_bases, activation=act, self_loop=True, dropout=self.dropout)
def build_hidden_layer(self, idx): act = F.relu if idx < self.num_hidden_layers - 1 else None concat = True if idx < self.num_hidden_layers - 1 else False if USE_ATTN: return RGCNLayer(self.h_dim, self.h_dim, self.num_rels, self.num_bases, num_heads=self.num_heads, activation=act, self_loop=True, dropout=self.dropout, concat_attn=concat, relation_type=self.relation_type, relation_size=self.relation_size) else: return RGCNLayer(self.h_dim, self.h_dim, self.num_rels, self.num_bases, activation=act, self_loop=True, dropout=self.dropout)
def build_output_layer(self): return RGCNLayer(self.h_dim, self.out_dim, self.num_rels,self.num_bases, activation=partial(F.softmax, axis=1))
def build_hidden_layer(self, idx): return RGCNLayer(self.h_dim, self.h_dim, self.num_rels, self.num_bases, activation=F.relu)
def build_output_layer(self): return RGCNLayer(self.h_dim, self.out_dim, self.num_rels, self.num_bases, activation=None)