if mask == "val": offset = 3 else: offset = 5 z = model.lp_encode(data[0]) link_logits = model.lp_decode(z, torch.stack(data[offset].edges()), torch.stack(data[offset + 1].edges())) link_probs = link_logits.sigmoid() link_labels = get_link_labels(torch.stack(data[offset].edges()), torch.stack(data[offset + 1].edges())) result = roc_auc_score(link_labels.cpu().numpy(), link_probs.cpu().numpy()) return result res = [] model_hp, _ = get_encoder_decoder_hp(args.model, decoupled=False) for seed in tqdm(range(1234, 1234 + args.repeat)): setup_seed(seed) g = dataset[0] splitted = list(split_train_valid_test(g)) if args.model == 'gcn' or args.model == 'gat': splitted[0] = dgl.add_self_loop(splitted[0]) splitted = [g.to(device) for g in splitted] if args.model == 'gcn': model = AutoGCN( input_dimension=splitted[0].ndata['feat'].shape[1], output_dimension=2,
parser.add_argument('--epoch', type=int, default=200) args = parser.parse_args() # seed = 100 dataset = build_dataset_from_name(args.dataset.lower()) dataset = to_dgl_dataset(dataset) data = dataset[0].to(args.device) num_features = data.ndata['feat'].size(1) num_classes = data.ndata['label'].max().item() + 1 label = data.ndata['label'] test_mask = data.ndata['test_mask'] accs = [] model_hp, decoder_hp = get_encoder_decoder_hp(args.model) for seed in tqdm(range(args.repeat)): set_seed(seed) trainer = NodeClassificationFullTrainer( model=args.model, num_features=num_features, num_classes=num_classes, device=args.device, init=False, feval=['acc'], loss="nll_loss", ).duplicate_from_hyper_parameter({ "trainer": { "max_epoch": args.epoch,
parser.add_argument('--repeat', type=int, default=50) parser.add_argument('--model', type=str, choices=['gat', 'gcn', 'sage'], default='gat') parser.add_argument('--lr', type=float, default=0.01) parser.add_argument('--weight_decay', type=float, default=0.0) parser.add_argument('--epoch', type=int, default=200) parser.add_argument('--debug', action='store_true', default=False) args = parser.parse_args() # seed = 100 dataset = Planetoid(os.path.expanduser('~/.cache-autogl'), args.dataset, transform=T.NormalizeFeatures()) data = dataset[0].to(args.device) accs = [] model_hp, decoder_hp = get_encoder_decoder_hp(args.model, decoupled=True) for seed in tqdm(range(args.repeat)): set_seed(seed) if args.model == 'gat': model = GATEncoderMaintainer( input_dimension=dataset.num_node_features, final_dimension=dataset.num_classes, device=args.device ).from_hyper_parameter(model_hp) elif args.model == 'gcn': model = GCNEncoderMaintainer( input_dimension=dataset.num_node_features, final_dimension=dataset.num_classes, device=args.device