예제 #1
0
optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
scheduler =  torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min',
                                                             factor=0.5,
                                                             patience=25,
                                                             verbose=True)

dur = []
total_load = 0
total_fp = 0
total_bp = 0
total_up = 0
total_ot = 0
for epoch in range(1, 201):
    t1 = time.time()
    train_loss, train_acc, optimizer= train_dgl('sage', model,  optimizer, device, train_loader)
    dur.append(time.time() - t1)
    print(
        'load Time: {:.4f},forward Time: {:.4f}, backward Time: {:.4f}, update Time: {:.4f}, batch Time: {:.4f}'.format(
            epoch_load_time,
            epoch_forward_time,
            epoch_backward_time,
            epoch_update_time,
            epoch_batch_time))

    if epoch > 50 and epoch < 151:
        total_load = total_load + epoch_load_time
        total_fp = total_fp + epoch_forward_time
        total_bp = total_bp + epoch_backward_time
        total_up = total_up + epoch_update_time
        total_ot = epoch_batch_time - epoch_forward_time - epoch_backward_time - epoch_update_time + total_ot
예제 #2
0
                                                       factor=0.5,
                                                       patience=25,
                                                       verbose=True)

dur = []
total_load = 0
total_fp = 0
total_bp = 0
total_up = 0
total_ot = 0
for epoch in range(1, 201):

    torch.cuda.synchronize()
    t1 = time.time()

    train_loss, epoch_train_acc, optimizer = train_dgl('gcn', model, optimizer,
                                                       device, train_loader)
    # gc.collect()

    torch.cuda.synchronize()
    dur.append(time.time() - t1)

    print(
        'load Time: {:.4f},forward Time: {:.4f}, backward Time: {:.4f}, update Time: {:.4f}, batch Time: {:.4f}'
        .format(epoch_load_time, epoch_forward_time, epoch_backward_time,
                epoch_update_time, epoch_batch_time))
    if epoch > 50 and epoch < 151:
        total_load = total_load + epoch_load_time
        total_fp = total_fp + epoch_forward_time
        total_bp = total_bp + epoch_backward_time
        total_up = total_up + epoch_update_time
        total_ot = epoch_batch_time - epoch_forward_time - epoch_backward_time - epoch_update_time + total_ot