Exemple #1
0
 def __init__(self, gnn_dim, n_layers, nn_dim):
     super(pair_matrix_model, self).__init__()
     with self.init_scope():
         self.ggnn_gwm = ggnn_gwm(hidden_channels=gnn_dim,
                                  n_update_layers=n_layers)
         self.nn1 = GraphMLP(channels=[nn_dim, 5])
         self.nn2 = GraphMLP(channels=[nn_dim, 2])
Exemple #2
0
 def __init__(self, in_channels=None, hidden_channels=16, out_channels=None,
              dropout_ratio=0.5, n_layers=2, **kwargs):
     if out_channels is None:
         out_channels = hidden_channels
     super(GINUpdate, self).__init__()
     channels = [hidden_channels] * (n_layers - 1) + [out_channels]
     with self.init_scope():
         # two Linear + RELU
         self.graph_mlp = GraphMLP(
             channels=channels, in_channels=in_channels,
             activation=functions.relu)
     self.dropout_ratio = dropout_ratio
 def __init__(self, out_dim, hidden_dim, n_layers, concat_hidden,
              weight_tying, nn_hidden_dim, gwm):
     super(ggnngwm_stop_step, self).__init__()
     with self.init_scope():
         self.ggnngwm = GWMGraphConvModel(out_dim=out_dim,
                                          hidden_channels=hidden_dim,
                                          n_update_layers=n_layers,
                                          update_layer=GGNNUpdate,
                                          concat_hidden=concat_hidden,
                                          weight_tying=weight_tying,
                                          with_gwm=gwm,
                                          n_edge_types=5)
         # output: binary classification after softmax
         self.mlp = GraphMLP(channels=[nn_hidden_dim, 2])
 def __init__(self, out_dim, hidden_dim, n_layers, concat_hidden,
              weight_tying, nn_hidden_dim, gwm, topK):
     super(ggnngwm_atom, self).__init__()
     with self.init_scope():
         self.ggnngwm = GWMGraphConvModel(out_dim=out_dim,
                                          hidden_channels=hidden_dim,
                                          n_update_layers=n_layers,
                                          update_layer=GGNNUpdate,
                                          concat_hidden=concat_hidden,
                                          weight_tying=weight_tying,
                                          with_gwm=gwm,
                                          n_edge_types=5)
         # output: one sigmoid score of each atom
         self.mlp = GraphMLP(channels=[nn_hidden_dim, 1])
         self.topK = topK
 def __init__(self, out_dim, hidden_dim, n_layers, concat_hidden,
              weight_tying, nn_hidden_dim, gwm):
     super(ggnngwn_action_step, self).__init__()
     with self.init_scope():
         self.ggnngwm = GWMGraphConvModel(out_dim=out_dim,
                                          hidden_channels=hidden_dim,
                                          n_update_layers=n_layers,
                                          update_layer=GGNNUpdate,
                                          concat_hidden=concat_hidden,
                                          weight_tying=weight_tying,
                                          with_gwm=gwm,
                                          n_edge_types=5)
         # output: [remove, single, double, triple] softmax score of each action for one pair
         self.mlp = GraphMLP(channels=[nn_hidden_dim, 5 - 1])  # no aromatic
         self.out_dim = out_dim