Esempio n. 1
0
    def __init__(self, net_params):
        super().__init__()
        num_atom_type = net_params['num_atom_type']
        hidden_dim = net_params['hidden_dim']
        out_dim = net_params['out_dim']
        kernel = net_params['kernel']                       # for MoNet
        dim = net_params['pseudo_dim_MoNet']                # for MoNet
        dropout = net_params['dropout']
        n_layers = net_params['L']
        self.readout = net_params['readout']                      
        graph_norm = net_params['graph_norm']      
        batch_norm = net_params['batch_norm']
        residual = net_params['residual']  
        self.device = net_params['device']
        
        aggr_type = "sum"                                    # default for MoNet
        
        self.embedding_h = nn.Embedding(num_atom_type, hidden_dim)
        
        self.layers = nn.ModuleList()
        self.pseudo_proj = nn.ModuleList()

        # Hidden layer
        for _ in range(n_layers-1):
            self.layers.append(GMMLayer(hidden_dim, hidden_dim, F.relu, dim, kernel, aggr_type, dropout, graph_norm, batch_norm, residual))

            self.pseudo_proj.append(nn.Sequential(nn.Linear(2, dim), nn.Tanh()))
            
        # Output layer
        self.layers.append(GMMLayer(hidden_dim, out_dim, None, dim, kernel, aggr_type, dropout, graph_norm, batch_norm, residual))
        self.pseudo_proj.append(nn.Sequential(nn.Linear(2, dim), nn.Tanh()))
        
        self.MLP_layer = MLPReadout(out_dim, 1) # out dim 1 since regression
Esempio n. 2
0
    def __init__(self, net_params):
        super().__init__()
        self.name = 'MoNet'
        in_dim = net_params['in_dim']
        hidden_dim = net_params['hidden_dim']
        out_dim = net_params['out_dim']
        kernel = net_params['kernel']                       # for MoNet
        dim = net_params['pseudo_dim_MoNet']                # for MoNet
        n_classes = net_params['n_classes']
        dropout = net_params['dropout']
        n_layers = net_params['L']
        self.readout = net_params['readout']                            
        batch_norm = net_params['batch_norm']
        residual = net_params['residual']  
        self.device = net_params['device']
        
        aggr_type = "sum"                                    # default for MoNet
        
        self.embedding_h = nn.Linear(in_dim, hidden_dim)
        
        self.layers = nn.ModuleList()
        self.pseudo_proj = nn.ModuleList()

        # Hidden layer
        for _ in range(n_layers-1):
            self.layers.append(GMMLayer(hidden_dim, hidden_dim, dim, kernel, aggr_type,
                                        dropout, batch_norm, residual))
            self.pseudo_proj.append(nn.Sequential(nn.Linear(2, dim), nn.Tanh()))
            
        # Output layer
        self.layers.append(GMMLayer(hidden_dim, out_dim, dim, kernel, aggr_type,
                                    dropout, batch_norm, residual))
        self.pseudo_proj.append(nn.Sequential(nn.Linear(2, dim), nn.Tanh()))
        
        self.MLP_layer = MLPReadout(out_dim, n_classes)
Esempio n. 3
0
    def __init__(self, net_params):
        super().__init__()

        self.readout = net_params.readout
        self.device = net_params.device
        self.aggr_type = "sum"  # default for MoNet

        self.embedding_h = nn.Linear(net_params.in_dim, net_params.hidden_dim)

        self.layers = nn.ModuleList()
        self.pseudo_proj = nn.ModuleList()

        # Hidden layer
        for _ in range(net_params.L - 1):
            self.layers.append(
                GMMLayer(net_params.hidden_dim, net_params.hidden_dim,
                         net_params.pseudo_dim_MoNet, net_params.kernel,
                         self.aggr_type, net_params.dropout,
                         net_params.graph_norm, net_params.batch_norm,
                         net_params.residual))
            self.pseudo_proj.append(
                nn.Sequential(nn.Linear(2, net_params.pseudo_dim_MoNet),
                              nn.Tanh()))
            pass

        # Output layer
        self.layers.append(
            GMMLayer(net_params.hidden_dim, net_params.out_dim,
                     net_params.pseudo_dim_MoNet, net_params.kernel,
                     self.aggr_type, net_params.dropout, net_params.graph_norm,
                     net_params.batch_norm, net_params.residual))
        self.pseudo_proj.append(
            nn.Sequential(nn.Linear(2, net_params.pseudo_dim_MoNet),
                          nn.Tanh()))

        self.readout_mlp = MLPReadout(net_params.out_dim, net_params.n_classes)
        pass