def __init__(self, c, args): super(GAT, self).__init__(c) assert args.num_layers > 0 dims, acts = get_dim_act(args) gat_layers = [] for i in range(len(dims) - 1): in_dim, out_dim = dims[i], dims[i + 1] act = acts[i] assert dims[i + 1] % args.n_heads == 0 out_dim = dims[i + 1] // args.n_heads concat = True gat_layers.append( GraphAttentionLayer(in_dim, out_dim, args.dropout, act, args.alpha, args.n_heads, concat)) self.layers = nn.Sequential(*gat_layers) self.encode_graph = True
def __init__(self, c, args): super(GATDecoder, self).__init__(c) self.cls = GraphAttentionLayer(args.dim, args.n_classes, args.dropout, F.elu, args.alpha, 1, True) self.decode_adj = True