def adj_type_to_edge_tensor_type(layout: EdgeLayout, edge_index: Adj) -> EdgeTensorType: r"""Converts a PyG Adj tensor to an EdgeTensorType equivalent.""" if isinstance(edge_index, Tensor): return (edge_index[0], edge_index[1]) # (row, col) if layout == EdgeLayout.COO: return edge_index.coo()[:-1] # (row, col) elif layout == EdgeLayout.CSR: return edge_index.csr()[:-1] # (rowptr, col) else: return edge_index.csr()[-2::-1] # (row, colptr)
def to_csc( adj: Adj, edge_attr: EdgeAttr, device: Optional[torch.device] = None, share_memory: bool = False, ) -> Tuple[Tensor, Tensor, OptTensor]: # Convert the graph data into a suitable format for sampling (CSC format). # Returns the `colptr` and `row` indices of the graph, as well as an # `perm` vector that denotes the permutation of edges. # Since no permutation of edges is applied when using `SparseTensor`, # `perm` can be of type `None`. perm: Optional[Tensor] = None layout = edge_attr.layout is_sorted = edge_attr.is_sorted size = edge_attr.size if layout == EdgeLayout.CSR: colptr, row, _ = adj.csc() elif layout == EdgeLayout.CSC: colptr, row, _ = adj.csr() else: if size is None: raise ValueError( f"Edge {edge_attr.edge_type} cannot be converted " f"to a different type without specifying 'size' for " f"the source and destination node types (got {size}). " f"Please specify these parameters for successful execution. ") (row, col) = adj if not is_sorted: perm = (col * size[0]).add_(row).argsort() row = row[perm] colptr = torch.ops.torch_sparse.ind2ptr(col[perm], size[1]) colptr = colptr.to(device) row = row.to(device) perm = perm.to(device) if perm is not None else None if not colptr.is_cuda and share_memory: colptr.share_memory_() row.share_memory_() if perm is not None: perm.share_memory_() return colptr, row, perm