示例#1
0
def test_appnp_conv():
    ctx = F.ctx()
    g = dgl.DGLGraph(sp.sparse.random(100, 100, density=0.1), readonly=True)
    appnp = nn.APPNPConv(10, 0.1)
    feat = F.randn((100, 5))
    appnp = appnp.to(ctx)

    h = appnp(g, feat)
    assert h.shape[-1] == 5
示例#2
0
文件: test_nn.py 项目: yangce0224/dgl
def test_appnp_conv(g, idtype):
    ctx = F.ctx()
    g = g.astype(idtype).to(ctx)
    appnp = nn.APPNPConv(10, 0.1)
    feat = F.randn((g.number_of_nodes(), 5))
    appnp = appnp.to(ctx)

    h = appnp(g, feat)
    assert h.shape[-1] == 5
示例#3
0
def test_appnp_conv_e_weight(g, idtype):
    ctx = F.ctx()
    g = g.astype(idtype).to(ctx)
    appnp = nn.APPNPConv(10, 0.1)
    feat = F.randn((g.number_of_nodes(), 5))
    eweight = F.ones((g.num_edges(), ))
    appnp = appnp.to(ctx)

    h = appnp(g, feat, edge_weight=eweight)
    assert h.shape[-1] == 5
示例#4
0
    def __init__(self,in_dim,hidden_dim,n_classes,hidden_layers,readout,
                 activation,feat_drop,edge_drop,alpha,K,grid,device):
        super(Classifier, self).__init__()
        self.device = device
        self.readout = readout
        self.layers = nn.ModuleList()
        self.grid = grid
        # input layer
        self.layers.append(nn.Linear(in_dim, hidden_dim))
        # hidden layers
        for i in range(hidden_layers):
            self.layers.append(nn.Linear(hidden_dim, hidden_dim))

        self.activation = activation

        if feat_drop:
            self.feat_drop = nn.Dropout(feat_drop)
        else:
            self.feat_drop = lambda x: x

        self.propagate = conv.APPNPConv(K, alpha, edge_drop)

        # last layer
        if self.readout == 'max':
            self.readout_fcn = conv.MaxPooling()
        elif self.readout == 'mean':
            self.readout_fcn = conv.AvgPooling()
        elif self.readout == 'sum':
            self.readout_fcn = conv.SumPooling()
        elif self.readout == 'gap':
            self.readout_fcn = conv.GlobalAttentionPooling(nn.Linear(hidden_dim,1),nn.Linear(hidden_dim,hidden_dim*2))
        else:
            self.readout_fcn = SppPooling(hidden_dim,self.grid)
        
        if self.readout == 'spp':
            self.classify = nn.Sequential(
                nn.Dropout(),
                nn.Linear(hidden_dim * self.grid * self.grid, hidden_dim*2),
                nn.ReLU(inplace=True),
                nn.Linear(2*hidden_dim, n_classes),
            )
        else:
            var=hidden_dim
            if self.readout == 'gap':
                var*=2
            self.classify = nn.Linear(var, n_classes)


        self.reset_parameters()