Exemplo n.º 1
0
def test(epoch):
    global best_accuracy
    global best_miou
    model.eval()
    test_loss_iou = 0
    test_loss_vec = 0
    hist = np.zeros((config["task1_classes"], config["task1_classes"]))
    hist_angles = np.zeros((config["task2_classes"], config["task2_classes"]))
    crop_size = config["val_dataset"][args.dataset]["crop_size"]
    for i, (inputsBGR, labels, vecmap_angles) in enumerate(val_loader, 0):
        inputsBGR = Variable(inputsBGR.float().cuda(),
                             volatile=True,
                             requires_grad=False)

        outputs, pred_vecmaps = model(inputsBGR)
        if args.multi_scale_pred:
            loss1 = road_loss(outputs[0],
                              util.to_variable(labels[0], True, False), True)
            num_stacks = model.module.num_stacks if num_gpus > 1 else model.num_stacks
            for idx in range(num_stacks - 1):
                loss1 += road_loss(outputs[idx + 1],
                                   util.to_variable(labels[0], True, False),
                                   True)
            for idx, output in enumerate(outputs[-2:]):
                loss1 += road_loss(
                    output, util.to_variable(labels[idx + 1], True, False),
                    True)

            loss2 = angle_loss(pred_vecmaps[0],
                               util.to_variable(vecmap_angles[0], True, False))
            for idx in range(num_stacks - 1):
                loss2 += angle_loss(
                    pred_vecmaps[idx + 1],
                    util.to_variable(vecmap_angles[0], True, False))
            for idx, pred_vecmap in enumerate(pred_vecmaps[-2:]):
                loss2 += angle_loss(
                    pred_vecmap,
                    util.to_variable(vecmap_angles[idx + 1], True, False))

            outputs = outputs[-1]
            pred_vecmaps = pred_vecmaps[-1]
        else:
            loss1 = road_loss(outputs,
                              util.to_variable(labels[0], True, False), True)
            loss2 = angle_loss(pred_vecmaps,
                               util.to_variable(labels[0], True, False))

        test_loss_iou += loss1.data[0]
        test_loss_vec += loss2.data[0]

        _, predicted = torch.max(outputs.data, 1)

        correctLabel = labels[-1].view(-1, crop_size, crop_size).long()
        hist += util.fast_hist(
            predicted.view(predicted.size(0), -1).cpu().numpy(),
            correctLabel.view(correctLabel.size(0), -1).cpu().numpy(),
            config["task1_classes"],
        )

        _, predicted_angle = torch.max(pred_vecmaps.data, 1)
        correct_angles = vecmap_angles[-1].view(-1, crop_size,
                                                crop_size).long()
        hist_angles += util.fast_hist(
            predicted_angle.view(predicted_angle.size(0), -1).cpu().numpy(),
            correct_angles.view(correct_angles.size(0), -1).cpu().numpy(),
            config["task2_classes"],
        )

        p_accu, miou, road_iou, fwacc = util.performMetrics(
            train_loss_file,
            val_loss_file,
            epoch,
            hist,
            test_loss_iou / (i + 1),
            test_loss_vec / (i + 1),
            is_train=False,
        )
        p_accu_angle, miou_angle, fwacc_angle = util.performAngleMetrics(
            train_loss_angle_file,
            val_loss_angle_file,
            epoch,
            hist_angles,
            is_train=False)

        viz_util.progress_bar(
            i,
            len(val_loader),
            "Loss: %.6f | VecLoss: %.6f | road miou: %.4f%%(%.4f%%) | angle miou: %.4f%%"
            % (
                test_loss_iou / (i + 1),
                test_loss_vec / (i + 1),
                miou,
                road_iou,
                miou_angle,
            ),
        )

        if i % 100 == 0 or i == len(val_loader) - 1:
            images_path = "{}/images/".format(experiment_dir)
            util.ensure_dir(images_path)
            util.savePredictedProb(
                inputsBGR.data.cpu(),
                labels[-1].cpu(),
                predicted.cpu(),
                F.softmax(outputs, dim=1).data.cpu()[:, 1, :, :],
                predicted_angle.cpu(),
                os.path.join(images_path,
                             "validate_pair_{}_{}.png".format(epoch, i)),
                norm_type=config["val_dataset"]["normalize_type"],
            )

        del inputsBGR, labels, predicted, outputs, pred_vecmaps, predicted_angle

    accuracy, miou, road_iou, fwacc = util.performMetrics(
        train_loss_file,
        val_loss_file,
        epoch,
        hist,
        test_loss_iou / len(val_loader),
        test_loss_vec / len(val_loader),
        is_train=False,
        write=True,
    )
    util.performAngleMetrics(
        train_loss_angle_file,
        val_loss_angle_file,
        epoch,
        hist_angles,
        is_train=False,
        write=True,
    )

    if miou > best_miou:
        best_accuracy = accuracy
        best_miou = miou
        util.save_checkpoint(epoch, test_loss_iou / len(val_loader), model,
                             optimizer, best_accuracy, best_miou, config,
                             experiment_dir)

    return test_loss_iou / len(val_loader)
Exemplo n.º 2
0
def test(epoch):
    global best_accuracy
    global best_miou
    model.eval()
    test_loss_iou = 0
    test_loss_vec = 0
    hist = np.zeros((config["task1_classes"], config["task1_classes"]))
    crop_size = config["val_dataset"][args.dataset]["crop_size"]
    for i, datas in enumerate(val_loader, 0):
        inputs, labels, erased_label = data
        batch_size = inputs.size(0)

        inputs = Variable(inputs.float().cuda(),
                          volatile=True,
                          requires_grad=False)
        erased_label = Variable(erased_label[-1].float().cuda(),
                                volatile=True,
                                requires_grad=False).unsqueeze(dim=1)
        temp = erased_label

        for k in range(config['refinement']):
            in_ = torch.cat((inputs, erased_label, temp), dim=1)
            outputs = model(in_)
            if args.multi_scale_pred:
                loss1 = road_loss(outputs[0], labels[0].long().cuda(), False)
                num_stacks = model.module.num_stacks if num_gpus > 1 else model.num_stacks
                for idx in range(num_stacks - 1):
                    loss1 += road_loss(outputs[idx + 1],
                                       labels[0].long().cuda(), False)
                for idx, output in enumerate(outputs[-2:]):
                    loss1 += road_loss(output, labels[idx + 1].long().cuda(),
                                       False)

                outputs = outputs[-1]
            else:
                loss1 = road_loss(outputs, labels[-1].long().cuda(), False)

            temp = Variable(torch.max(outputs.data, 1)[1].float(),
                            volatile=True,
                            requires_grad=False).unsqueeze(dim=1)

        test_loss_iou += loss1.data[0]

        _, predicted = torch.max(outputs.data, 1)

        correctLabel = labels[-1].view(-1, crop_size, crop_size).long()
        hist += util.fast_hist(
            predicted.view(predicted.size(0), -1).cpu().numpy(),
            correctLabel.view(correctLabel.size(0), -1).cpu().numpy(),
            config["task1_classes"],
        )

        p_accu, miou, road_iou, fwacc = util.performMetrics(
            train_loss_file,
            val_loss_file,
            epoch,
            hist,
            test_loss_iou / (i + 1),
            0,
            is_train=False,
        )

        viz_util.progress_bar(
            i,
            len(val_loader),
            "Loss: %.6f | road miou: %.4f%%(%.4f%%)" % (
                test_loss_iou / (i + 1),
                miou,
                road_iou,
            ),
        )

        if i % 100 == 0 or i == len(val_loader) - 1:
            images_path = "{}/images/".format(experiment_dir)
            util.ensure_dir(images_path)
            util.savePredictedProb(
                inputsBGR.data.cpu(),
                labels[-1].cpu(),
                predicted.cpu(),
                F.softmax(outputs, dim=1).data.cpu()[:, 1, :, :],
                None,
                os.path.join(images_path,
                             "validate_pair_{}_{}.png".format(epoch, i)),
                norm_type=config["val_dataset"]["normalize_type"],
            )

        del inputsBGR, labels, predicted, outputs

    accuracy, miou, road_iou, fwacc = util.performMetrics(
        train_loss_file,
        val_loss_file,
        epoch,
        hist,
        test_loss_iou / len(val_loader),
        0,
        is_train=False,
        write=True,
    )

    if miou > best_miou:
        best_accuracy = accuracy
        best_miou = miou
        util.save_checkpoint(epoch, test_loss_iou / len(val_loader), model,
                             optimizer, best_accuracy, best_miou, config,
                             experiment_dir)

    return test_loss_iou / len(val_loader)
Exemplo n.º 3
0
def train(epoch):
    train_loss_iou = 0
    train_loss_vec = 0
    model.train()
    optimizer.zero_grad()
    hist = np.zeros((config["task1_classes"], config["task1_classes"]))
    hist_angles = np.zeros((config["task2_classes"], config["task2_classes"]))
    crop_size = config["train_dataset"][args.dataset]["crop_size"]
    for i, data in enumerate(train_loader, 0):
        inputsBGR, labels, vecmap_angles = data
        inputsBGR = Variable(inputsBGR.float().cuda())
        outputs, pred_vecmaps = model(inputsBGR)

        if args.multi_scale_pred:
            loss1 = road_loss(outputs[0], util.to_variable(labels[0]), False)
            num_stacks = model.module.num_stacks if num_gpus > 1 else model.num_stacks
            for idx in range(num_stacks - 1):
                loss1 += road_loss(outputs[idx + 1],
                                   util.to_variable(labels[0]), False)
            for idx, output in enumerate(outputs[-2:]):
                loss1 += road_loss(output, util.to_variable(labels[idx + 1]),
                                   False)

            loss2 = angle_loss(pred_vecmaps[0],
                               util.to_variable(vecmap_angles[0]))
            for idx in range(num_stacks - 1):
                loss2 += angle_loss(pred_vecmaps[idx + 1],
                                    util.to_variable(vecmap_angles[0]))
            for idx, pred_vecmap in enumerate(pred_vecmaps[-2:]):
                loss2 += angle_loss(pred_vecmap,
                                    util.to_variable(vecmap_angles[idx + 1]))

            outputs = outputs[-1]
            pred_vecmaps = pred_vecmaps[-1]
        else:
            loss1 = road_loss(outputs, util.to_variable(labels[-1]), False)
            loss2 = angle_loss(pred_vecmaps,
                               util.to_variable(vecmap_angles[-1]))

        train_loss_iou += loss1.data[0]
        train_loss_vec += loss2.data[0]

        _, predicted = torch.max(outputs.data, 1)

        correctLabel = labels[-1].view(-1, crop_size, crop_size).long()
        hist += util.fast_hist(
            predicted.view(predicted.size(0), -1).cpu().numpy(),
            correctLabel.view(correctLabel.size(0), -1).cpu().numpy(),
            config["task1_classes"],
        )

        _, predicted_angle = torch.max(pred_vecmaps.data, 1)
        correct_angles = vecmap_angles[-1].view(-1, crop_size,
                                                crop_size).long()
        hist_angles += util.fast_hist(
            predicted_angle.view(predicted_angle.size(0), -1).cpu().numpy(),
            correct_angles.view(correct_angles.size(0), -1).cpu().numpy(),
            config["task2_classes"],
        )

        p_accu, miou, road_iou, fwacc = util.performMetrics(
            train_loss_file,
            val_loss_file,
            epoch,
            hist,
            train_loss_iou / (i + 1),
            train_loss_vec / (i + 1),
        )
        p_accu_angle, miou_angle, fwacc_angle = util.performAngleMetrics(
            train_loss_angle_file, val_loss_angle_file, epoch, hist_angles)

        viz_util.progress_bar(
            i,
            len(train_loader),
            "Loss: %.6f | VecLoss: %.6f | road miou: %.4f%%(%.4f%%) | angle miou: %.4f%% "
            % (
                train_loss_iou / (i + 1),
                train_loss_vec / (i + 1),
                miou,
                road_iou,
                miou_angle,
            ),
        )

        torch.autograd.backward([loss1, loss2])

        if i % config["trainer"]["iter_size"] == 0 or i == len(
                train_loader) - 1:
            optimizer.step()
            optimizer.zero_grad()

        del (
            outputs,
            pred_vecmaps,
            predicted,
            correct_angles,
            correctLabel,
            inputsBGR,
            labels,
            vecmap_angles,
        )

    util.performMetrics(
        train_loss_file,
        val_loss_file,
        epoch,
        hist,
        train_loss_iou / len(train_loader),
        train_loss_vec / len(train_loader),
        write=True,
    )
    util.performAngleMetrics(train_loss_angle_file,
                             val_loss_angle_file,
                             epoch,
                             hist_angles,
                             write=True)
Exemplo n.º 4
0
def train(epoch):
    train_loss_iou = 0
    train_loss_vec = 0
    model.train()
    optimizer.zero_grad()
    hist = np.zeros((config["task1_classes"], config["task1_classes"]))
    crop_size = config["train_dataset"][args.dataset]["crop_size"]
    for i, data in enumerate(train_loader, 0):
        inputs, labels, erased_label = data
        batch_size = inputs.size(0)

        inputs = Variable(inputs.float().cuda())
        erased_label = Variable(
            erased_label[-1].float().cuda()).unsqueeze(dim=1)
        temp = erased_label

        for k in range(config['refinement']):
            in_ = torch.cat((inputs, erased_label, temp), dim=1)
            outputs = model(in_)
            if args.multi_scale_pred:
                loss1 = road_loss(outputs[0], labels[0].long().cuda(), False)
                num_stacks = model.module.num_stacks if num_gpus > 1 else model.num_stacks
                for idx in range(num_stacks - 1):
                    loss1 += road_loss(outputs[idx + 1],
                                       labels[0].long().cuda(), False)
                for idx, output in enumerate(outputs[-2:]):
                    loss1 += road_loss(output, labels[idx + 1].long().cuda(),
                                       False)

                outputs = outputs[-1]
            else:
                loss1 = road_loss(outputs, labels[-1].long().cuda(), False)

            loss1.backward()
            temp = Variable(torch.max(outputs.data,
                                      1)[1].float()).unsqueeze(dim=1)

        train_loss_iou += loss1.data[0]

        _, predicted = torch.max(outputs.data, 1)

        correctLabel = labels[-1].view(-1, crop_size, crop_size).long()
        hist += util.fast_hist(
            predicted.view(predicted.size(0), -1).cpu().numpy(),
            correctLabel.view(correctLabel.size(0), -1).cpu().numpy(),
            config["task1_classes"],
        )

        p_accu, miou, road_iou, fwacc = util.performMetrics(
            train_loss_file,
            val_loss_file,
            epoch,
            hist,
            train_loss_iou / (i + 1),
            0,
        )

        viz_util.progress_bar(
            i,
            len(train_loader),
            "Loss: %.6f | road miou: %.4f%%(%.4f%%)" % (
                train_loss_iou / (i + 1),
                miou,
                road_iou,
            ),
        )

        if i % config["trainer"]["iter_size"] == 0 or i == len(
                train_loader) - 1:
            optimizer.step()
            optimizer.zero_grad()

        del (
            outputs,
            predicted,
            correctLabel,
            inputs,
            labels,
        )

    util.performMetrics(
        train_loss_file,
        val_loss_file,
        epoch,
        hist,
        train_loss_iou / len(train_loader),
        0,
        write=True,
    )