Ejemplo n.º 1
0
 def __init__(self, config):
     super().__init__(config)
     self.set2set = Set2Set(config['n_node_hidden'], n_iters=6, n_layers=2)
     self.cls_act = MLP(config['n_node_hidden'] * 2, 2)
     self.cls_del = MLP(
         config['n_node_hidden'] * 2 + config['n_edge_hidden'], 2)
     self.cls_add = MLP(config['n_node_hidden'], 2)
     self.cls_arm = MLP(config['n_node_hidden'], config['vocab_size'])
Ejemplo n.º 2
0
    def __init__(self, params):
        super(RelTM, self).__init__()
        num_atom_type = params['num_atom_type']
        num_bond_type = params['num_bond_type']
        # embdim = params['embedding_dim']
        self.hdim = params["hidden_dim"]
        self.norel = params["norel"] if "norel" in params else False
        self.edge_feat = params["edge_feat"]
        self.pos_enc = params["pos_enc"]
        self.readout = params["readout"]
        numheads = params["n_heads"]
        dropout = params["dropout"]
        dropoutemb = params["in_feat_dropout"]
        dropout_red = params["dropout_red"] if "dropout_red" in params else 0.
        dropout_attn = params[
            "dropout_attn"] if "dropout_attn" in params else 0.
        # dropout_act = params["dropout_act"] if "dropout_act" in params else 0.
        rdim = params["rdim"] if "rdim" in params else self.hdim
        usevallin = params["usevallin"] if "usevallin" in params else False
        cat_rel = params["cat_rel"] if "cat_rel" in params else True
        cat_tgt = params["cat_tgt"] if "cat_tgt" in params else False
        use_gate = params["use_gate"] if "use_gate" in params else False
        skipatt = params["skipatt"] if "skipatt" in params else False
        self.numlayers = params["numlayers"] if "numlayers" in params else 1
        # self.numrepsperlayer = params["numrepsperlayer"] if "numrepsperlayer" in params else 1
        self.numrepsperlayer = params["L"]
        self.device = params['device']

        use_sgru = params["use_sgru"] if "use_sgru" in params else False
        use_logdegree = params[
            "use_logdegree"] if "use_logdegree" in params else False

        self.embedding_h = nn.Embedding(num_atom_type, self.hdim)

        if self.edge_feat:
            self.embedding_e = nn.Embedding(num_bond_type + 1, self.hdim)
            self.self_edge_id = num_bond_type
        else:
            self.embedding_e = nn.Linear(1, self.hdim)
            self.self_edge_id = num_bond_type

        self.in_feat_dropout = nn.Dropout(dropoutemb)

        self.layers = torch.nn.ModuleList([
            RelTMCell(self.hdim,
                      numrels=10,
                      numheads=numheads,
                      dropout=dropout,
                      norel=self.norel,
                      use_gate=use_gate) for _ in range(self.numlayers)
        ])
        self.numrepsperlayer = self.numrepsperlayer
        self.dropout = torch.nn.Dropout(dropout)
        self.MLP_layer = MLPReadout(self.hdim,
                                    1)  # 1 out dim since regression problem

        if self.readout == "set2set":
            self.set2set = Set2Set(self.hdim, n_iters=10, n_layers=1)
Ejemplo n.º 3
0
 def __init__(self, config):
     super().__init__()
     self.device = config['device']
     self.encoder = GraphEncoder(config['n_atom_feat'],
                                 config['n_node_hidden'],
                                 config['n_bond_feat'],
                                 config['n_edge_hidden'],
                                 config['n_layers'])
     self.set2set = Set2Set(config['n_node_hidden'], n_iters=6, n_layers=2)
     self.classifier = MLP(config['n_node_hidden'] * 2, 2)
Ejemplo n.º 4
0
    def __init__(self,
                 num_node_emb_list,
                 num_edge_emb_list,
                 num_layers=5,
                 emb_dim=300,
                 JK='last',
                 dropout=0.5,
                 readout='mean',
                 n_tasks=1):
        super(GINPredictor, self).__init__()

        if num_layers < 2:
            raise ValueError('Number of GNN layers must be greater '
                             'than 1, got {:d}'.format(num_layers))

        self.gnn = GIN(num_node_emb_list=num_node_emb_list,
                       num_edge_emb_list=num_edge_emb_list,
                       num_layers=num_layers,
                       emb_dim=emb_dim,
                       JK=JK,
                       dropout=dropout)

        if readout == 'sum':
            self.readout = SumPooling()
        elif readout == 'mean':
            self.readout = AvgPooling()
        elif readout == 'max':
            self.readout = MaxPooling()
        elif readout == 'attention':
            if JK == 'concat':
                self.readout = GlobalAttentionPooling(
                    gate_nn=nn.Linear((num_layers + 1) * emb_dim, 1))
            else:
                self.readout = GlobalAttentionPooling(
                    gate_nn=nn.Linear(emb_dim, 1))
        elif readout == 'set2set':
            self.readout = Set2Set()
        else:
            raise ValueError(
                "Expect readout to be 'sum', 'mean', "
                "'max', 'attention' or 'set2set', got {}".format(readout))

        if JK == 'concat':
            self.predict = nn.Linear((num_layers + 1) * emb_dim, n_tasks)
        else:
            self.predict = nn.Linear(emb_dim, n_tasks)
Ejemplo n.º 5
0
    def __init__(self, params):
        super(MASNet, self).__init__()
        num_atom_type = params['num_atom_type']
        num_bond_type = params['num_bond_type']
        # embdim = params['embedding_dim']
        self.hdim = params["hidden_dim"]
        self.norel = params["norel"] if "norel" in params else False
        self.edge_feat = params["edge_feat"]
        self.pos_enc = params["pos_enc"]
        self.readout = params["readout"]
        numheads = params["n_heads"]
        dropout = params["dropout"]
        dropout_red = params["dropout_red"] if "dropout_red" in params else 0.
        dropout_attn = params[
            "dropout_attn"] if "dropout_attn" in params else 0.
        # dropout_act = params["dropout_act"] if "dropout_act" in params else 0.
        rdim = params["rdim"] if "rdim" in params else self.hdim
        self.numlayers = params["numlayers"] if "numlayers" in params else 1
        self.numsublayers = params[
            "numsublayers"] if "numsublayers" in params else 0
        self.numechos = params["numechos"] if "numechos" in params else 0
        # self.numrepsperlayer = params["numrepsperlayer"] if "numrepsperlayer" in params else 1
        self.numrepspergnnlayer = params["L"]
        self.device = params['device']

        use_sgru = params["use_sgru"] if "use_sgru" in params else False

        self.embedding_h = nn.Embedding(num_atom_type, self.hdim)

        if self.edge_feat:
            # self.embedding_e = nn.Embedding(num_bond_type+1, self.hdim)
            # self.self_edge_id = num_bond_type
            pass
        else:
            raise Exception("not implemented")
            # self.embedding_e = nn.Linear(1, self.hdim)
        self.self_edge_id = num_bond_type

        self.in_feat_dropout = nn.Dropout(dropout)

        self.net = MASNN(self.hdim,
                         numlayers=self.numlayers,
                         numgnnperlayer=self.numsublayers,
                         numrepsperlayer=1,
                         numrepspergnnlayer=self.numrepspergnnlayer,
                         numechos=self.numechos,
                         numrels=num_bond_type + 1,
                         numheads=numheads,
                         dropout=dropout,
                         use_sgru=use_sgru)

        self.dropout = torch.nn.Dropout(dropout)
        self.MLP_layer = MLPReadout(self.hdim,
                                    1)  # 1 out dim since regression problem

        if self.readout == "set2set":
            self.set2set = Set2Set(self.hdim, n_iters=10, n_layers=1)
        elif "mas" in self.readout:
            self.readoutm = MaskedReadout(mode=self.readout,
                                          maskattr="iscentr")
        else:
            self.readoutm = Readout(mode=self.readout)