예제 #1
0
    last_model = os.path.join(save_dir, f'saved_model_epoch_{epochs}.pt')
    if os.path.exists(last_model):
        model.load_state_dict(torch.load(last_model, map_location=device))
    else:
        for epoch in range(epochs):
            train(
                epoch,
                model,
                loader,
                optimizer,
                scheduler,
                writer=writer,
                verbose=verbose
            )
            torch.save(
                model.state_dict(),
                os.path.join(save_dir, f'saved_model_epoch_{epoch+1}.pt')
            )
        torch.save(
            model.state_dict(),
            os.path.join(save_dir, 'saved_model_epoch_last.pt')
        )
        logger.info('Done pretraining!')

    # Downstream task
    dataset = CSVDataset(DOWNSTREAM_TRAIN_FILE, LANG_FILE)
    train_latent_data = []
    train_target = []
    model.encoder.update_batch_size(2)
    for seqs, y in dataset:
        encoder_seq, _, _ = packed_sequential_data_preparation(
예제 #2
0
    z, _, _ = model(data.x, data.edge_index)
    acc = model.test(z[data.train_mask], data.y[data.train_mask],
                     z[data.test_mask], data.y[data.test_mask], max_iter=150)
    return acc


test_acc = []
best_acc = 0
for _ in range(5):
    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
    model = DeepGraphInfomax(
        hidden_channels=512, encoder=Encoder(dataset.num_features, 512),
        summary=lambda z, *args, **kwargs: torch.sigmoid(z.mean(dim=0)),
        corruption=corruption).to(device)
    data = dataset[0].to(device)
    optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
    for epoch in range(1, 301):
        loss = train()
        # print('Epoch: {:03d}, Loss: {:.4f}'.format(epoch, loss))
    acc = test()
    test_acc.append(acc)
    print('Accuracy: {:.4f}'.format(acc))
    if best_acc < acc:
        best_acc = acc
        if not os.path.isdir('../checkpoint'):
            os.makedirs('../checkpoint')
        torch.save(model.state_dict(), os.path.join('../checkpoint', '{}.pth'.format(args.dataset)))
test_acc = np.array(test_acc)
print("Node classification for {}, acc mean {:.4f}, acc std {:.4f}".format(args.dataset,
                                                                         np.mean(test_acc), np.std(test_acc, ddof=1)))