def __init__(self, nfeat, nhid, nclass, dropout, nlayer=3): super(HyperGraphX, self).__init__() self.conv1 = HypergraphConv(nfeat, nhid) self.conv2 = HypergraphConv(nhid, nclass) self.convx = nn.ModuleList( [HypergraphConv(nhid, nhid) for _ in range(nlayer - 2)]) self.dropout_p = dropout
def test_hypergraph_conv(): in_channels, out_channels = (16, 32) hyper_index = torch.tensor([[0, 0, 0, 1, 2, 3], [1, 2, 3, 0, 0, 0]]) hyper_weight = torch.tensor([1, 0.5, 0.3, 0.7]) num_nodes = hyper_index.max().item() + 1 x = torch.randn((num_nodes, in_channels)) conv = HypergraphConv(in_channels, out_channels) assert conv.__repr__() == 'HypergraphConv(16, 32)' out = conv(x, hyper_index, hyper_weight) assert out.size() == (num_nodes, out_channels)
def test_hypergraph_conv_with_more_edges_than_nodes(): in_channels, out_channels = (16, 32) hyperedge_index = torch.tensor([[0, 0, 1, 1, 2, 3, 3, 3, 2, 1, 2], [0, 1, 2, 1, 2, 1, 0, 3, 3, 4, 4]]) hyperedge_weight = torch.tensor([1.0, 0.5, 0.8, 0.2, 0.7]) num_nodes = hyperedge_index[0].max().item() + 1 x = torch.randn((num_nodes, in_channels)) conv = HypergraphConv(in_channels, out_channels) assert conv.__repr__() == 'HypergraphConv(16, 32)' out = conv(x, hyperedge_index) assert out.size() == (num_nodes, out_channels) out = conv(x, hyperedge_index, hyperedge_weight) assert out.size() == (num_nodes, out_channels)
def test_hypergraph_conv(): in_channels, out_channels = (16, 32) hyperedge_index = torch.tensor([[0, 0, 1, 1, 2, 3], [0, 1, 0, 1, 0, 1]]) hyperedge_weight = torch.tensor([1, 0.5]) num_nodes = hyperedge_index[0].max().item() + 1 x = torch.randn((num_nodes, in_channels)) conv = HypergraphConv(in_channels, out_channels) assert conv.__repr__() == 'HypergraphConv(16, 32)' out = conv(x, hyperedge_index) assert out.size() == (num_nodes, out_channels) out = conv(x, hyperedge_index, hyperedge_weight) assert out.size() == (num_nodes, out_channels) conv = HypergraphConv(in_channels, out_channels, use_attention=True, heads=2) out = conv(x, hyperedge_index) assert out.size() == (num_nodes, 2 * out_channels) out = conv(x, hyperedge_index, hyperedge_weight) assert out.size() == (num_nodes, 2 * out_channels) conv = HypergraphConv(in_channels, out_channels, use_attention=True, heads=2, concat=False, dropout=0.5) out = conv(x, hyperedge_index, hyperedge_weight) assert out.size() == (num_nodes, out_channels)
def __init__(self, dim_in, dim_out, bias=False, **kwargs): super(HypergraphConvGG, self).__init__() self.model = HypergraphConv(dim_in, dim_out, bias=bias)
def __init__(self, nfeat, nhid, nclass, dropout, nlayer=1): super(HyperGraph1, self).__init__() self.conv1 = HypergraphConv(nfeat, nclass) self.dropout_p = dropout