示例#1
0
文件: grace.py 项目: Zeigar/cogdl
 def forward(
     self,
     graph: Graph,
     x: torch.Tensor,
 ):
     graph.sym_norm()
     return self.encoder(graph, x)
示例#2
0
文件: grace.py 项目: rpatil524/cogdl
 def forward(
     self,
     graph: Graph,
     x: torch.Tensor = None,
 ):
     if x is None:
         x = graph.x
     graph.sym_norm()
     return self.encoder(graph, x)
示例#3
0
def _add_undirected_graph_positional_embedding(g: Graph,
                                               hidden_size,
                                               retry=10):
    # We use eigenvectors of normalized graph laplacian as vertex features.
    # It could be viewed as a generalization of positional embedding in the
    # attention is all you need paper.
    # Recall that the eignvectors of normalized laplacian of a line graph are cos/sin functions.
    # See section 2.4 of http://www.cs.yale.edu/homes/spielman/561/2009/lect02-09.pdf
    n = g.num_nodes
    with g.local_graph():
        g.sym_norm()
        adj = g.to_scipy_csr()
    laplacian = adj

    k = min(n - 2, hidden_size)
    x = eigen_decomposision(n, k, laplacian, hidden_size, retry)
    g.pos_undirected = x.float()
    return g