Exemple #1
0
def run_digcn(dataset, gpu_no):
    dataset = get_citation_dataset(dataset, args.alpha, args.recache,
                                   args.normalize_features, args.adj_type)
    # Replace Sparse_Three_Sum with Sparse_Three_Concat to test concat
    val_loss, test_acc, test_std, time = run(dataset, gpu_no,
                                             Sparse_Three_Sum(dataset),
                                             args.runs, args.epochs, args.lr,
                                             args.weight_decay,
                                             args.early_stopping)
    return val_loss, test_acc, test_std, time
Exemple #2
0
        super(Net, self).__init__()
        self.conv1 = ARMAConv(dataset.num_features,
                              args.hidden,
                              args.num_stacks,
                              args.num_layers,
                              args.shared_weights,
                              dropout=args.skip_dropout)
        self.conv2 = ARMAConv(args.hidden,
                              dataset.num_classes,
                              args.num_stacks,
                              args.num_layers,
                              args.shared_weights,
                              dropout=args.skip_dropout)

    def reset_parameters(self):
        self.conv1.reset_parameters()
        self.conv2.reset_parameters()

    def forward(self, data):
        x, edge_index = data.x, data.edge_index
        x = F.relu(self.conv1(x, edge_index))
        x = F.dropout(x, p=args.dropout, training=self.training)
        x = self.conv2(x, edge_index)
        return F.log_softmax(x, dim=1)


dataset = get_planetoid_dataset(args.dataset, args.normalize_features)
permute_masks = random_planetoid_splits if args.random_splits else None
run(dataset, Net(dataset), args.runs, args.epochs, args.lr, args.weight_decay,
    args.early_stopping, permute_masks)
Exemple #3
0
def run_digcn(dataset,gpu_no):
    dataset = get_citation_dataset(dataset, args.alpha, args.recache, args.normalize_features, args.adj_type)
    print("Num of edges ",dataset[0].num_edges)
    val_loss, test_acc, test_std, time = run(dataset, gpu_no, Net(dataset), args.runs, args.epochs, args.lr, args.weight_decay,
        args.early_stopping)
    return val_loss, test_acc, test_std, time
        nn = Seq(Lin(128, 128), LeakyReLU(negative_slope=0.2), Lin(128, 128),
                 LeakyReLU(negative_slope=0.2), Lin(128, 256),
                 LeakyReLU(negative_slope=0.2))
        self.conv2 = DynamicEdgeConv(nn, k=20, aggr='max')

        self.lin0 = Lin(256, 512)

        self.lin1 = Lin(512, 256)
        self.lin2 = Lin(256, 256)
        self.lin3 = Lin(256, num_classes)

    def forward(self, pos, batch):
        x = self.conv1(pos, batch)
        x = self.conv2(x, batch)

        x = F.relu(self.lin0(x))

        x = global_max_pool(x, batch)

        x = F.relu(self.lin1(x))
        x = F.relu(self.lin2(x))
        x = F.dropout(x, p=0.5, training=self.training)
        x = self.lin3(x)
        return F.log_softmax(x, dim=-1)


train_dataset, test_dataset = get_dataset(num_points=1024)
model = Net(train_dataset.num_classes)
run(train_dataset, test_dataset, model, args.epochs, args.batch_size, args.lr,
    args.lr_decay_factor, args.lr_decay_step_size, args.weight_decay)
Exemple #5
0
    GCN,
]

results = []
for dataset_name, Net in product(datasets, nets):
    print("{}\n ========".format(dataset_name))
    dataset = get_planetoid_dataset(dataset_name, True)
    permute_masks = random_planetoid_splits if random_splits else None
    if (Net == GCNDiff):
        print("GCN Diffusion")
        data = dataset[0]
        gdc = T.GDC(self_loop_weight=1,
                    normalization_in='sym',
                    normalization_out='col',
                    diffusion_kwargs=dict(method='ppr', alpha=0.05),
                    sparsification_kwargs=dict(method='topk', k=128, dim=0),
                    exact=True)
        data = gdc(data)
        run_(dataset, data, Net(data, dataset, hidden), run_num, epoch_num,
             0.01, 0.0005, 10, permute_masks)
    elif (Net == GCNHIGH):
        print("GCN Highway")
        for alpha in alphas:
            print("alpha : {}".format(alpha))

            run(dataset, Net(dataset, hidden, alpha), run_num, epoch_num, 0.01,
                0.0005, 10, permute_masks)
    else:
        print("GCN")
        run(dataset, Net(dataset, hidden), run_num, epoch_num, 0.01, 0.0005,
            10, permute_masks)
Exemple #6
0
parser.add_argument('--lr', type=float, default=0.01)
parser.add_argument('--weight_decay', type=float, default=0.0005)
parser.add_argument('--early_stopping', type=int, default=10)
parser.add_argument('--hidden', type=int, default=128)
parser.add_argument('--dropout', type=float, default=0.5)
parser.add_argument('--normalize_features', type=bool, default=True)
args = parser.parse_args()


class Net(torch.nn.Module):
    def __init__(self, dataset):
        super(Net, self).__init__()
        self.conv1 = GCNConv(dataset.num_features, args.hidden)
        self.conv2 = GCNConv(args.hidden, dataset.num_classes)

    def reset_parameters(self):
        self.conv1.reset_parameters()
        self.conv2.reset_parameters()

    def forward(self, data):
        x, edge_index = data.x, data.edge_index
        x = F.relu(self.conv1(x, edge_index))
        x = self.conv2(x, edge_index)
        return F.log_softmax(x, dim=1)


dataset = get_planetoid_dataset(args.dataset, args.normalize_features)
dataset = dataset.shuffle()
run(dataset, Net(dataset), args.runs, args.epochs, args.lr, args.weight_decay,
    args.early_stopping)
Exemple #7
0
    'runs': args.runs, 
    'epochs': args.epochs, 
    'lr': args.lr, 
    'weight_decay': args.weight_decay, 
    'early_stopping': args.early_stopping, 
    'logger': args.logger, 
    'momentum': args.momentum,
    'eps': args.eps,
    'update_freq': args.update_freq,
    'gamma': args.gamma,
    'alpha': args.alpha,
    'hyperparam': args.hyperparam
}

if args.hyperparam == 'eps':
    for param in np.logspace(-3, 0, 10, endpoint=True):
        print(f"{args.hyperparam}: {param}")
        kwargs[args.hyperparam] = param
        run(**kwargs)
elif args.hyperparam == 'update_freq':
    for param in [4, 8, 16, 32, 64, 128]:
        print(f"{args.hyperparam}: {param}")
        kwargs[args.hyperparam] = param
        run(**kwargs)
elif args.hyperparam == 'gamma':
    for param in np.linspace(1., 10., 10, endpoint=True):
        print(f"{args.hyperparam}: {param}")
        kwargs[args.hyperparam] = param
        run(**kwargs)
else:
    run(**kwargs)