Ejemplo n.º 1
0
def run_test(model_test, k=5):
    model_test.eval()
    with torch.no_grad():
        correct = 0
        total = 0
        avg_loss = None
        for i, batch in tqdm(enumerate(test_dataloader),
                             total=len(test_dataloader)):
            x, y, info = batch
            x = x.squeeze(0).cuda()
            y = y.squeeze(0).cuda()
            info = remove_tensor(info)

            x = augment_bbox(x, info)

            prediction = model_test(x)
            loss = F.nll_loss(F.log_softmax(prediction, dim=1), y)
            if avg_loss is None:
                avg_loss = loss
            else:
                avg_loss += loss

            # _, predicted = torch.max(prediction.data, 1)
            _, predicted = torch.topk(prediction.data, k)
            pred = predicted.t()
            correct_num = pred.eq(y.view(1, -1).expand_as(pred))
            correct_k = correct_num[:k].view(-1).float().sum(0, keepdim=True)

            total += y.size(0)
            correct += correct_k[0]
        avg_loss = avg_loss / i
        acc = correct / total
        return avg_loss, acc
Ejemplo n.º 2
0
for iter in range(num_epoches):
    print('\n Iteration: ', iter)
    print(args.filename)
    correct = 0
    total = 0
    avg_loss_all = None
    avg_loss_ce = None
    for i, batch in tqdm(enumerate(train_dataloader),
                         total=len(train_dataloader)):
        gc.collect()
        x, y, info = batch
        x = x.squeeze(0).cuda()
        y = y.squeeze(0).cuda()
        info = remove_tensor(info)

        x = augment_bbox(x, info)

        prediction = model(x)
        #print(prediction.dim())
        #exit(0)
        loss_entropy = F.nll_loss(F.log_softmax(prediction, dim=1), y)

        # calculate loss
        if 'gcn' in embedder_path:
            formula_embedding = get_formula_from_image(
                info[0][0], preprocessed_annotation_train, embedder, clauses,
                converter, objs, tokenizers)
            assignment_embedding = prediction_to_assignment_embedding(
                F.softmax(prediction, dim=1), info, embedder, tokenizers, pres,
                objs)
        else:  ## tree-lstm