parser.add_argument('--num_hidden', type=int, default=64) parser.add_argument('--num_layers', type=int, default=8) parser.add_argument('--dropout', type=float, default=0.5) parser.add_argument('--alpha', type=float, default=0.1) parser.add_argument('--lamda', type=float, default=0.5) parser.add_argument('--variant', action='store_true', default=False) parser.add_argument('--seed', type=int, default=42) parser.add_argument('--learn_rate', type=float, default=1e-2) parser.add_argument('--weight_decay1', type=float, default=0.01) parser.add_argument('--weight_decay2', type=float, default=0.0005) parser.add_argument('--num_epochs', type=int, default=400) parser.add_argument('--patience', type=int, default=40) args = parser.parse_args() graph, features, labels, train_mask, val_mask, test_mask, num_feats, num_classes = load_data_default( args.dataset) model = GCNIINet(num_feats, num_classes, args.num_hidden, args.num_layers, dropout=args.dropout, alpha=args.alpha, lamda=args.lamda) labels = labels.squeeze() # set_seed(args.seed) optimizer = th.optim.Adam([{ 'params': model.params1, 'weight_decay': args.weight_decay1 }, { 'params': model.params2,
parser.add_argument('--dropout', type=float, default=0.8) parser.add_argument('--propagation', type=int, default=1) parser.add_argument('--seed', type=int, default=42) parser.add_argument('--learn_rate', type=float, default=0.01) parser.add_argument('--weight_decay', type=float, default=5e-5) parser.add_argument('--num_epochs', type=int, default=1500) parser.add_argument('--patience', type=int, default=100) parser.add_argument('--cuda', type=int, default=0) parser.add_argument('--filename', type=str, default='VSGC_Multi') parser.add_argument('--id', type=int, default=0) args = parser.parse_args() test_print = False graph, features, labels, train_mask, val_mask, test_mask,\ num_feats, num_classes = load_data_default(args.dataset) model = VSGCNetMulti(num_feats, num_classes, k=args.k, dropout=args.dropout, propagation=args.propagation) labels = labels.squeeze() # set_seed(args.seed) optimizer = th.optim.Adam(model.parameters(), lr=args.learn_rate, weight_decay=args.weight_decay) early_stopping = EarlyStopping(args.patience,
# if t in b: # count += 1 # print(count) def remove_reverse_edges(graph): edges = graph.edges() m = graph.num_edges() edges_set = set() remove_eids = [] for i in range(m): u, v = edges[0][i].item(), edges[1][i].item() if u > v: e = (v, u) else: e = (u, v) if e in edges_set: remove_eids.append(i) else: edges_set.add(e) graph.remove_edges(th.LongTensor(remove_eids)) graph, features, labels, train_mask, val_mask, test_mask, num_feats, num_classes = load_data_default( "citeseer") graph = graph.remove_self_loop() print(graph.num_edges()) remove_reverse_edges(graph) print(graph.num_edges())