Beispiel #1
0
    def forward(self, graph, feature):
        """
         
        Args:
 
            graph: `pgl.Graph` instance.

            feature: A tensor with shape (num_nodes, input_size)
     
        Return:

            A tensor with shape (num_nodes, output_size)

        """
        if self.cached:
            if self.cached_output is None:
                norm = GF.degree_norm(graph)
                ori_feature = feature
                sum_feature = feature
                for hop in range(self.k_hop):
                    feature = feature * norm
                    feature = graph.send_recv(feature, "sum")
                    feature = feature * norm
                    feature = (1 - self.alpha) * feature
                    sum_feature += feature
                feature = sum_feature / self.k_hop + self.alpha * ori_feature
                self.cached_output = feature
            else:
                feature = self.cached_output
        else:

            norm = GF.degree_norm(graph)
            ori_feature = feature
            sum_feature = feature
            for hop in range(self.k_hop):
                feature = feature * norm
                feature = graph.send_recv(feature, "sum")
                feature = feature * norm
                feature = (1 - self.alpha) * feature
                sum_feature += feature
            feature = sum_feature / self.k_hop + self.alpha * ori_feature

        output = self.linear(feature)
        if hasattr(self, "bias"):
            output = output + self.bias

        if self.activation is not None:
            output = self.activation(output)
        return output
Beispiel #2
0
    def forward(self, graph, feature, norm=None):
        """
         
        Args:
 
            graph: `pgl.Graph` instance.

            feature: A tensor with shape (num_nodes, input_size)

            norm: (default None). If :code:`norm` is not None, then the feature will be normalized by given norm. If :code:`norm` is None, then we use `lapacian degree norm`.
     
        Return:

            A tensor with shape (num_nodes, output_size)

        """
        if norm is None:
            norm = GF.degree_norm(graph)
        h0 = feature

        for _ in range(self.k_hop):
            feature = feature * norm
            feature = graph.send_recv(feature)
            feature = feature * norm
            feature = self.alpha * h0 + (1 - self.alpha) * feature

        return feature
Beispiel #3
0
    def forward(self, graph, feature, norm=None):
        """
        Args:
 
            graph: `pgl.Graph` instance.

            feature: A tensor with shape (num_nodes, input_size)

            norm: (default None). If :code:`norm` is not None, then the feature will be normalized by given norm. If :code:`norm` is None, then we use `lapacian degree norm`.
     
        Return:

            A tensor with shape (num_nodes, output_size)

        """

        if norm is None:
            norm = GF.degree_norm(graph)
        h0 = feature

        for i in range(self.k_hop):
            beta_i = np.log(1.0 * self.lambda_l / (i + 1) + 1)
            feature = self.drop_fn(feature)

            feature = feature * norm
            feature = graph.send_recv(feature)
            feature = feature * norm
            feature = self.alpha * h0 + (1 - self.alpha) * feature

            feature_transed = self.mlps[i](feature)
            feature = beta_i * feature_transed + (1 - beta_i) * feature
            if self.activation is not None:
                feature = self.activation(feature)
        return feature
Beispiel #4
0
    def forward(self, graph, feat):
        """Forward
        Args:
            graph: hetergeneous graph built by pgl.HeterGraph.
            inputs: node features/representation from graph/previous layer.
        """
        if self.num_bases < self.num_rels:
            weight = paddle.transpose(self.weight, perm=[1, 0, 2])
            weight = paddle.matmul(self.w_comp, weight)
            weight = paddle.transpose(weight, perm=[1, 0, 2])
        else:
            weight = self.weight

        def send_func(src_feat, dst_feat, edge_feat):
            """
            send function
            """
            return src_feat

        def recv_func(msg):
            """
            receive function
            """
            return msg.reduce_mean(msg['h'])

        feat_list = []
             
        for idx, etype in enumerate(self.etypes):
            sub_g = graph[graph.edge_types[idx]]
            sub_g.tensor()
            if self.norm:
                norm = GF.degree_norm(sub_g)
            feat = feat * norm
            w = weight[idx, :, :].squeeze()
            h = paddle.matmul(feat, w)
            msg = sub_g.send(send_func, src_feat={'h':h})
            h = sub_g.recv(recv_func, msg)
            feat_list.append(h)
        h = paddle.stack(feat_list, axis=0)
        h = paddle.sum(h, axis=0)
        if self.act == 'relu':
            Act = paddle.nn.ReLU()
            h = Act(h)
        else:
            Act = paddle.nn.Sigmoid()
            h = Act(h)

        return h
Beispiel #5
0
    def forward(self, graph, feature):
        """
        Args:
 
            graph: `pgl.Graph` instance.

            feature: A tensor with shape (num_nodes, input_size)

        Return:

            A tensor with shape (num_nodes, output_size)

        """

        norm = GF.degree_norm(graph)
        feature = feature * norm
        feature = graph.send_recv(feature, "sum")
        feature = feature * norm
        return feature
Beispiel #6
0
    def forward(self, graph, user_feature, item_feature, norm=None):
        """
        propagate methods for lightGCN
        """

        norm = GF.degree_norm(graph)
        feature = paddle.concat([user_feature, item_feature])
        embs = [feature]

        for layer in range(self.n_layers):
            feature = feature * norm
            feature = graph.send_recv(feature, "sum")
            feature = feature * norm
            embs.append(feature)
        embs = paddle.stack(embs, axis=1)
        light_out = paddle.mean(embs, axis=1)
        users, items = paddle.split(
            light_out, [user_feature.shape[0], item_feature.shape[0]])
        return users, items
Beispiel #7
0
    def forward(self, graph, feature, edge_feat):
        feature = self.linear(feature)
        edge_embedding = self.bond_encoder(edge_feat)

        norm = GF.degree_norm(graph)

        msg = graph.send(src_feat={
            "x": feature,
            "norm": norm
        },
                         edge_feat={"e": edge_embedding},
                         message_func=self.send_func)

        output = graph.recv(msg=msg, reduce_func=self.recv_sum)

        output = output + self.bias
        output = output * norm

        return output
Beispiel #8
0
    def forward(self, graph, feature, edge_feat):
        if self.with_efeat:
            edge_embedding = self.bond_encoder(edge_feat)

            msg = graph.send(src_feat={"x": feature},
                             edge_feat={"e": edge_embedding},
                             message_func=self.send_func)
        else:
            msg = graph.send(src_feat={"x": feature},
                             dst_feat={"x": feature},
                             message_func=self.send_func)

        neigh_feature = graph.recv(msg=msg, reduce_func=self.recv_sum)

        out = (1 + self.eps) * feature + neigh_feature
        norm = GF.degree_norm(graph)
        out = out * norm
        out = self.mlp(out)

        return out
Beispiel #9
0
    def forward(self, graph, feature):
        """
         
        Args:
 
            graph: `pgl.Graph` instance.

            feature: A tensor with shape (num_nodes, input_size)
     
        Return:

            A tensor with shape (num_nodes, output_size)

        """
        norm = GF.degree_norm(graph)
        neigh_feature = graph.send_recv(feature, "sum")
        output = neigh_feature + feature
        output = output * norm
        output = self.linear(output) + self.linear2(feature * output)
        output = self.leaky_relu(output)
        return output
Beispiel #10
0
    def forward(self, graph, feature, norm=None):
        """
         
        Args:
 
            graph: `pgl.Graph` instance.

            feature: A tensor with shape (num_nodes, input_size)

            norm: (default None). If :code:`norm` is not None, then the feature will be normalized by given norm. If :code:`norm` is None, then we use `lapacian degree norm`.
     
        Return:

            A tensor with shape (num_nodes, output_size)

        """
        if self.self_loop:
            index = paddle.arange(start=0, end=graph.num_nodes, dtype="int64")
            self_loop_edges = paddle.transpose(paddle.stack((index, index)),
                                               [1, 0])

            mask = graph.edges[:, 0] != graph.edges[:, 1]
            mask_index = paddle.masked_select(
                paddle.arange(end=graph.num_edges), mask)
            edges = paddle.gather(graph.edges, mask_index)  # remove self loop

            edges = paddle.concat((self_loop_edges, edges), axis=0)
            graph = pgl.Graph(num_nodes=graph.num_nodes, edges=edges)

        if norm is None:
            norm = GF.degree_norm(graph)
        h0 = feature

        for _ in range(self.k_hop):
            feature = feature * norm
            feature = graph.send_recv(feature)
            feature = feature * norm
            feature = self.alpha * h0 + (1 - self.alpha) * feature

        return feature
Beispiel #11
0
    def forward(self, graph, feature, norm=None):
        """
         
        Args:
 
            graph: `pgl.Graph` instance.

            feature: A tensor with shape (num_nodes, input_size)

            norm: (default None). If :code:`norm` is not None, then the feature will be normalized by given norm. If :code:`norm` is None and :code:`self.norm` is `true`, then we use `lapacian degree norm`.
     
        Return:

            A tensor with shape (num_nodes, output_size)

        """

        if self.norm and norm is None:
            norm = GF.degree_norm(graph)

        if self.input_size > self.output_size:
            feature = self.linear(feature)

        if norm is not None:
            feature = feature * norm

        output = graph.send_recv(feature, "sum")

        if self.input_size <= self.output_size:
            output = self.linear(output)

        if norm is not None:
            output = output * norm
        output = output + self.bias
        if self.activation is not None:
            output = self.activation(output)
        return output
Beispiel #12
0
 def forward(self, graph, feature):
     norm = GF.degree_norm(graph)
     output = feature * norm
     output = graph.send_recv(output, "sum")
     output = output * norm
     return output