Exemple #1
0
 def spatial_feats(self):
     node_size, node_centre = self.node.spatial_attr
     node_dists = node_intersect(self.node.coords, 'minus')  # b, n, n, 4
     node_dists = node_dists / torch.cat(
         (node_size, node_size), dim=-1).unsqueeze(dim=2)
     node_scale = node_intersect(node_size, 'divide')
     node_mul = node_intersect(
         node_size[:, :, 0].unsqueeze(-1) *
         node_size[:, :, 1].unsqueeze(-1), 'divide')
     node_sum = node_intersect(
         node_size[:, :, 0].unsqueeze(-1) +
         node_size[:, :, 1].unsqueeze(-1), 'divide')
     return torch.cat((node_dists, node_scale, node_mul, node_sum), dim=-1)
Exemple #2
0
    def forward(self, graph: Graph):
        if self.node_dim % 512 == 4:
            coord_feats = torch.cat(graph.node.spatial_attr, dim=-1)
            node_feats = self.drop_l(graph.node_feats)
            node_feats = torch.cat((node_feats, coord_feats), dim=-1)
        else:
            node_feats = self.drop_l(graph.node_feats)
        node_feats = self.node_proj_l(node_feats)
        batch_num, node_num, _ = node_feats.shape

        n_fusion_feats = node_intersect(node_feats,
                                        method=self.n2n_method).view(
                                            batch_num * node_num * node_num,
                                            -1)
        if graph.edge is not None:
            graph.edge.remove_self_loop()
            n_fusion_feats = n_fusion_feats[graph.edge_indexes]
        joint_feats = self.joint_proj_l(n_fusion_feats)
        edge_num, o_c = joint_feats.shape
        q_feats = self.q_proj_l(graph.cond_feats).unsqueeze(1).expand(
            -1, edge_num // batch_num, -1)
        joint_feats = torch.cat(
            (joint_feats.view(batch_num, -1, o_c), q_feats), dim=-1)
        edge_feats = self.linear_l(joint_feats)
        return edge_feats.view(edge_num, -1)
Exemple #3
0
 def compute_pseudo(self, graph: Graph):
     node_size, node_centre = graph.node.spatial_attr
     node_dis = node_intersect(node_centre, 'minus')  # b, k, k, 2
     node_dis = node_dis.view(-1, 2)
     coord_x, coord_y = node_dis.chunk(2, dim=-1)
     rho = torch.sqrt(coord_x**2 + coord_y**2)
     theta = torch.atan2(coord_x, coord_y)
     coord = torch.cat((rho, theta), dim=-1)  # m, 2
     return coord
Exemple #4
0
 def op_process(self):
     edge_mask = node_intersect(self.node.mask.unsqueeze(-1),
                                'mul').squeeze()  # b, n, n
     node_i, node_j = self.meshgrid_cache  # b, n, n
     node_i, node_j = node_i.cuda(self.device)[edge_mask], node_j.cuda(
         self.device)[edge_mask]  # k, k
     self.node_i_ids, self.node_j_ids = self.node.old2new_map[
         node_i], self.node.old2new_map[node_j]
     self.mask = edge_mask
Exemple #5
0
    def compute_pseudo(self, graph: Graph):
        node_size, node_centre = graph.node.spatial_attr
        node_dis = node_intersect(node_centre, 'minus')  # b, k, k, 2
        node_dis = node_dis.view(-1, 2)
        node_dis = graph.edge.topk_op().attr_process(
            EdgeAttr('node_dist', node_dis, EdgeNull()))

        coord_x, coord_y = node_dis.value.chunk(2, dim=-1)
        rho = torch.sqrt(coord_x**2 + coord_y**2)
        theta = torch.atan2(coord_x, coord_y)
        coord = torch.cat((rho, theta), dim=-1)  # m, 2
        return coord
Exemple #6
0
 def op_process(self, node):
     self.batch_num, self.node_num, self.device = node.batch_num, node.node_num, node.device
     if node.masks is None and self.method == 'full':
         self.masks = None
     elif node.masks is None:
         self.masks = self.init_masks()
     else:
         self.masks = node_intersect(node.masks.unsqueeze(-1),
                                     'mul').squeeze(-1) * self.init_masks()
     node_i, node_j = self.meshgrid_cache  # b, n, n
     if self.masks is not None:
         node_i, node_j = node_i.cuda(self.device)[self.masks], node_j.cuda(
             self.device)[self.masks]  # k, k
     else:
         node_i, node_j = node_i.view(-1).cuda(
             self.device), node_j.view(-1).cuda(self.device)
     self.node_i_ids, self.node_j_ids = node.map_idx(node_i), node.map_idx(
         node_j)
Exemple #7
0
    def forward(self, graph: Graph):
        if self.node_dim % 512 == 4:
            coord_feats = torch.cat(graph.node.size_center, dim=-1)
            node_feats = self.drop_l(graph.node_feats)
            node_feats = torch.cat((node_feats, coord_feats), dim=-1)
        else:
            node_feats = self.drop_l(graph.node_feats)
        node_feats = self.node_proj_l(node_feats)
        batch_num, node_num, _ = node_feats.shape

        joint_feats = node_intersect(node_feats, method=self.n2n_method)
        joint_feats = torch.cat((joint_feats, graph.edge.spatial_feats()), dim=-1).view(batch_num*node_num*node_num, -1)
        joint_feats = graph.edge.attr_process(joint_feats)
        joint_feats = self.joint_proj_l(joint_feats)
        edge_num, o_c = joint_feats.shape

        edge_feats = joint_feats.view(batch_num, -1, o_c) * self.q_proj_l(graph.cond_feats).unsqueeze(1)
        return self.relu_l(edge_feats.view(edge_num, -1))