Exemplo n.º 1
0
    def __init__(self, num_tasks = 1, num_layers = 5, emb_dim = 300, gnn_type = 'gin',
                 virtual_node = True, residual = False, drop_ratio = 0, JK = "last",
                 graph_pooling = "sum"):
        super(BayesDiffPoolGNN, self).__init__(num_tasks, num_layers, emb_dim, gnn_type,
                 virtual_node, residual, drop_ratio, JK, graph_pooling)
        
        # 2x number of outputs
        self.graph_pred_linear = torch.nn.Sequential(
            bnn.BayesLinear(prior_mu=0, prior_sigma=0.1, in_features=2*self.emb_dim, out_features=100),
            torch.nn.ReLU(),
            bnn.BayesLinear(prior_mu=0, prior_sigma=0.1, in_features=100, out_features=100),
            torch.nn.ReLU(),
            bnn.BayesLinear(prior_mu=0, prior_sigma=0.1, in_features=100, out_features=100),
            torch.nn.ReLU(),
            bnn.BayesLinear(prior_mu=0, prior_sigma=0.1, in_features=100, out_features=self.num_tasks),
        )
        
        self.first_diffpool_layer = BayesDiffPoolBatchedGraphLayer(
            input_dim=600, # graph embedding dimension
            assign_dim=5, # group to 10
            output_feat_dim=600,
            activation=F.relu,
            dropout=0.0,
            aggregator_type="meanpool",
            link_pred=False
        )

        self.gc_after_pool = BayesBatchedGraphSAGE(600, 600)

        # KL-divergence loss for Bayesian Neural Network
        self.kl_loss = bnn.BKLLoss(reduction='mean', last_layer_only=False)
        self.kl_weight = 1 # 0.01
Exemplo n.º 2
0
    def __init__(self,
                 n_features,
                 n_classes,
                 num_layers,
                 hidden_gcn,
                 hidden_fc,
                 pathway,
                 n_cmt,
                 edge_index,
                 mode='cat',
                 bayesian=False,
                 batchnorm=False,
                 num_genes=15135,
                 k_hops=2,
                 do_layers=1):
        super().__init__()
        self.edge_index = edge_index
        self.row, self.col = pathway
        self.n_cmt = n_cmt
        self.mode = mode
        self.batchnorm = batchnorm
        self.do_layers = do_layers

        # GCNs
        self.conv1 = ChebConv(n_features, hidden_gcn, k_hops)
        self.convs = torch.nn.ModuleList()
        for i in range(num_layers - 1):
            self.convs.append(ChebConv(hidden_gcn, hidden_gcn, k_hops))

        # FC(1)
        if mode == 'cat':
            self.fc = torch.nn.Linear(
                num_layers * hidden_gcn,
                1)  # FC layer to reduce dim of pathway features to 1
        else:
            self.fc = torch.nn.Linear(
                hidden_gcn,
                1)  # FC layer to reduce dim of pathway features to 1

        # MLP
        if bayesian:
            self.lin1 = bnn.BayesLinear(prior_mu=0,
                                        prior_sigma=0.05,
                                        in_features=n_cmt,
                                        out_features=hidden_fc)
            self.lin2 = bnn.BayesLinear(prior_mu=0,
                                        prior_sigma=0.05,
                                        in_features=hidden_fc,
                                        out_features=n_classes)

        else:
            self.lin1 = Linear(n_cmt, hidden_fc)
            self.lin2 = Linear(hidden_fc, n_classes)

        # BatchNorm
        if batchnorm:
            self.bnconvs = torch.nn.ModuleList()
            for i in range(num_layers):
                self.bnconvs.append(BatchNorm1d(hidden_gcn))
Exemplo n.º 3
0
    def __init__(self,
                 num_tasks=1,
                 num_layers=5,
                 emb_dim=300,
                 gnn_type='gin',
                 virtual_node=True,
                 residual=False,
                 drop_ratio=0,
                 JK="last",
                 graph_pooling="sum"):
        super(BayesianGNN, self).__init__(num_tasks, num_layers, emb_dim,
                                          gnn_type, virtual_node, residual,
                                          drop_ratio, JK, graph_pooling)

        ### GNN to generate node embeddings
        if virtual_node:
            self.gnn_node = Bayesian_GNN_node_Virtualnode(
                num_layers,
                emb_dim,
                JK=JK,
                drop_ratio=drop_ratio,
                residual=residual,
                gnn_type=gnn_type)
        else:
            raise Exception("not implemented")

        # KL-divergence loss for Bayesian Neural Network
        self.kl_loss = bnn.BKLLoss(reduction='mean', last_layer_only=False)
        self.kl_weight = 0.01

        # change graph_pred_linear
        if graph_pooling == "set2set":
            embedding_dim = 2 * self.emb_dim
        else:
            embedding_dim = self.emb_dim

        self.graph_pred_linear = torch.nn.Sequential(
            bnn.BayesLinear(prior_mu=0,
                            prior_sigma=0.1,
                            in_features=embedding_dim,
                            out_features=100),
            torch.nn.ReLU(),
            bnn.BayesLinear(prior_mu=0,
                            prior_sigma=0.1,
                            in_features=100,
                            out_features=100),
            torch.nn.ReLU(),
            bnn.BayesLinear(prior_mu=0,
                            prior_sigma=0.1,
                            in_features=100,
                            out_features=100),
            torch.nn.ReLU(),
            bnn.BayesLinear(prior_mu=0,
                            prior_sigma=0.1,
                            in_features=100,
                            out_features=self.num_tasks),
        )
Exemplo n.º 4
0
    def __init__(self,
                 n_features,
                 n_classes,
                 num_layers,
                 hidden_gcn,
                 hidden_fc,
                 edge_index,
                 n_genes=15135,
                 mode='cat',
                 bayesian=False,
                 batchnorm=False,
                 do_layers=1):
        super().__init__()
        self.edge_index = edge_index
        self.conv1 = SAGEConv(n_features, hidden_gcn)
        self.convs = torch.nn.ModuleList()
        self.batchnorm = batchnorm
        self.do_layers = do_layers

        self.mode = mode

        for i in range(num_layers - 1):
            self.convs.append(SAGEConv(hidden_gcn, hidden_gcn))

        if mode == 'cat':
            self.fc = torch.nn.Linear(
                num_layers * hidden_gcn,
                1)  # FC layer to reduce dim of pathway features to 1
        else:
            self.fc = torch.nn.Linear(
                hidden_gcn,
                1)  # FC layer to reduce dim of pathway features to 1

        if bayesian:
            self.lin1 = bnn.BayesLinear(prior_mu=0,
                                        prior_sigma=0.05,
                                        in_features=n_genes,
                                        out_features=hidden_fc)
            self.lin2 = bnn.BayesLinear(prior_mu=0,
                                        prior_sigma=0.05,
                                        in_features=hidden_fc,
                                        out_features=n_classes)

        else:
            self.lin1 = Linear(n_genes, hidden_fc)
            self.lin2 = Linear(hidden_fc, n_classes)

        #BatchNorm
        if batchnorm:
            self.bnconvs = torch.nn.ModuleList()
            for i in range(num_layers):
                self.bnconvs.append(BatchNorm1d(hidden_gcn))
Exemplo n.º 5
0
    def __init__(self, emb_dim):
        '''
            emb_dim (int): node embedding dimensionality
        '''

        super(BayesianGINConv, self).__init__(aggr = "add")

        self.mlp = torch.nn.Sequential(
            bnn.BayesLinear(prior_mu=0, prior_sigma=0.1, in_features=emb_dim, out_features=emb_dim),
            torch.nn.BatchNorm1d(emb_dim), 
            torch.nn.ReLU(), 
            bnn.BayesLinear(prior_mu=0, prior_sigma=0.1, in_features=emb_dim, out_features=emb_dim)
        )
        self.eps = torch.nn.Parameter(torch.Tensor([0]))
        
        self.bond_encoder = BondEncoder(emb_dim = emb_dim)
Exemplo n.º 6
0
 def __init__(self, backbone, num_classes=1000, bayes=False):
     super(VGGClassifier, self).__init__()
     self.backbone = backbone
     self.num_classes = num_classes
     self.avgpool = nn.AdaptiveAvgPool2d((7, 7))
     self.classifier = nn.Sequential(
         nn.Linear(backbone.get_last_output_channels() * 7 *
                   7, 4096) if not bayes else bnn.BayesLinear(
                       0, 0.1,
                       backbone.get_last_output_channels() * 7 * 7, 4096),
         nn.ReLU(True), nn.Dropout(),
         nn.Linear(4096, 4096) if not bayes else bnn.BayesLinear(
             0, 0.1, 4096, 4096), nn.ReLU(True), nn.Dropout(),
         nn.Linear(4096, num_classes) if not bayes else bnn.BayesLinear(
             0, 0.1, 4096, num_classes))
     self.initialize_weights_kaiming()
Exemplo n.º 7
0
    def __init__(self, num_layers, emb_dim, drop_ratio = 0.5, JK = "last", residual = False, gnn_type = 'gin'):
        '''
            emb_dim (int): node embedding dimensionality
        '''

        super(Bayesian_GNN_node_Virtualnode, self).__init__()
        self.num_layers = num_layers
        self.drop_ratio = drop_ratio
        self.JK = JK
        ### add residual connection or not
        self.residual = residual

        if self.num_layers < 2:
            raise ValueError("Number of GNN layers must be greater than 1.")

        self.atom_encoder = AtomEncoder(emb_dim)

        ### set the initial virtual node embedding to 0.
        self.virtualnode_embedding = torch.nn.Embedding(1, emb_dim)
        torch.nn.init.constant_(self.virtualnode_embedding.weight.data, 0)

        ### List of GNNs
        self.convs = torch.nn.ModuleList()
        ### batch norms applied to node embeddings
        self.batch_norms = torch.nn.ModuleList()

        ### List of MLPs to transform virtual node at every layer
        self.mlp_virtualnode_list = torch.nn.ModuleList()

        for layer in range(num_layers):
            if gnn_type == 'gin':
                self.convs.append(BayesianGINConv(emb_dim))
            elif gnn_type == 'gcn':
                raise Exception("not implemented yet")
            else:
                ValueError('Undefined GNN type called {}'.format(gnn_type))

            self.batch_norms.append(torch.nn.BatchNorm1d(emb_dim))

        for layer in range(num_layers - 1):
            self.mlp_virtualnode_list.append(torch.nn.Sequential(
                bnn.BayesLinear(prior_mu=0, prior_sigma=0.1, in_features=emb_dim, out_features=emb_dim), 
                torch.nn.BatchNorm1d(emb_dim), 
                torch.nn.ReLU(),
                bnn.BayesLinear(prior_mu=0, prior_sigma=0.1, in_features=emb_dim, out_features=emb_dim), 
                torch.nn.BatchNorm1d(emb_dim), 
                torch.nn.ReLU()))
    def __init__(self, input_size, output_size, prior_mu, prior_sigma,
                 **kwargs):
        super(TorchBNN, self).__init__(**kwargs)

        self.hidden_size.insert(0, input_size)

        self.layers = nn.ModuleList([
            bnn.BayesLinear(prior_mu=prior_mu,
                            prior_sigma=prior_sigma,
                            in_features=self.hidden_size[i],
                            out_features=self.hidden_size[i + 1])
            for i in range(len(self.hidden_size) - 1)
        ])

        self.last_layer = bnn.BayesLinear(prior_mu=prior_mu,
                                          prior_sigma=prior_sigma,
                                          in_features=self.hidden_size[-1],
                                          out_features=output_size)
Exemplo n.º 9
0
 def __init__(self,
              infeat,
              outfeat,
              use_bn=True,
              mean=False,
              add_self=False):
     super().__init__(infeat, outfeat, use_bn, mean, add_self)
     self.W = bnn.BayesLinear(prior_mu=0,
                              prior_sigma=0.1,
                              in_features=infeat,
                              out_features=outfeat)
Exemplo n.º 10
0
 def __init__(self, in_feats, out_feats, activation, dropout, bias=True):
     super(BayesBundler, self).__init__()
     self.dropout = nn.Dropout(p=dropout)
     self.linear = bnn.BayesLinear(prior_mu=0, prior_sigma=0.1, in_features=in_feats * 2, out_features=out_feats)
     self.activation = activation