Пример #1
0
def validate(test_loader, model, criterion, epoch, opt):
    """Perform validation on the validation set"""
    batch_time = utils.AverageMeter()
    losses = utils.AverageMeter()
    top1 = utils.AverageMeter()

    # switch to evaluate mode
    model.eval()

    end = time.time()
    # tested_samples = 0
    for i, (input_points, labels) in enumerate(test_loader):
        # tested_samples = tested_samples + input_points.size(0)

        if opt.cuda:
            input_points = input_points.cuda()
            labels = labels.long().cuda(async=True)
        input_points = input_points.transpose(2, 1)
        input_var = Variable(input_points, volatile=True)
        target_var = Variable(labels[:, 0], volatile=True)

        # compute output
        output, _ = model(input_var)
        loss = criterion(output, target_var)

        # measure accuracy and record loss
        prec1 = utils.accuracy(output.data, target_var.data, topk=(1, ))[0]
        losses.update(loss.data[0], input_points.size(0))
        top1.update(prec1[0], input_points.size(0))

        # measure elapsed time
        batch_time.update(time.time() - end)
        end = time.time()

        if i % opt.print_freq == 0:
            print('Test: [{0}/{1}]\t'
                  'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
                  'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
                  'Prec@1 {top1.val:.3f} ({top1.avg:.3f})'.format(
                      i,
                      len(test_loader),
                      batch_time=batch_time,
                      loss=losses,
                      top1=top1))

    print(' * Prec@1 {top1.avg:.3f}'.format(top1=top1))
    # print(tested_samples)
    return top1.avg
Пример #2
0
def validate(val_loader, model, criterion, epoch, opt):
    """Perform validation on the validation set"""
    # switch to evaluate mode
    model.eval()

    top1 = utils.AverageMeter()

    for i, (input_points, _labels, segs) in enumerate(val_loader):
        # bz x 2048 x 3
        input_points = Variable(input_points, volatile=True)
        input_points = input_points.transpose(2, 1)
        _labels = _labels.long()  # this will be feed to the network
        segs = segs.long()
        labels_onehot = utils.labels_batch2one_hot_batch(
            _labels, opt.num_classes)
        segs = Variable(segs, volatile=True)
        labels_onehot = Variable(labels_onehot, volatile=True)

        if opt.cuda:
            input_points = input_points.cuda()
            segs = segs.cuda()  # must be long cuda tensor
            labels_onehot = labels_onehot.float().cuda(
            )  # this will be feed into the network

        # forward, backward optimize
        pred, _, _ = model(input_points, labels_onehot)
        pred = pred.view(-1, opt.num_seg_classes)
        segs = segs.view(-1, 1)[:, 0]  # min is already 0
        # debug_here()
        loss = criterion(pred, segs)

        pred_choice = pred.data.max(1)[1]
        correct = pred_choice.eq(segs.data).cpu().sum()

        acc = correct / float(opt.batch_size * opt.num_points)
        top1.update(acc, input_points.size(0))

        if i % opt.print_freq == 0:
            print('[%d: %d] val loss: %f accuracy: %f' %
                  (i, len(val_loader), loss.data[0], acc))
            # print(tested_samples)
    return top1.avg
def validate(sketch_dataloader, shape_dataloader, model, criterion, epoch, opt):

    """
    test for one epoch on the testing set
    """
    sketch_losses = utils.AverageMeter()
    sketch_top1 = utils.AverageMeter()

    shape_losses = utils.AverageMeter()
    shape_top1 = utils.AverageMeter()

    net_whole, net_bp, net_vp, net_ap, net_cls = model
    # optim_sketch, optim_shape, optim_centers = optimizer
    crt_cls, crt_tlc, w1, w2 = criterion

    net_whole.eval()
    net_bp.eval()
    net_vp.eval()
    net_ap.eval()
    net_cls.eval()

    sketch_features = []
    sketch_scores = []
    sketch_labels = []

    shape_features = []
    shape_scores = []
    shape_labels = []

    batch_time = utils.AverageMeter()
    end = time.time()

    for i, (sketches, k_labels) in enumerate(sketch_dataloader):
        sketches_v = Variable(sketches.cuda())
        k_labels_v = Variable(k_labels.long().cuda())
        sketch_feat = net_whole(sketches_v)
        sketch_score = net_cls(sketch_feat)

        loss = crt_cls(sketch_score, k_labels_v)

        prec1 = utils.accuracy(sketch_score.data, k_labels_v.data, topk=(1,))[0]
        sketch_losses.update(loss.data[0], sketch_score.size(0)) # batchsize
        sketch_top1.update(prec1[0], sketch_score.size(0))
        sketch_features.append(sketch_feat.data.cpu())
        sketch_labels.append(k_labels)
        sketch_scores.append(sketch_score.data.cpu())

        batch_time.update(time.time() - end)
        end = time.time()

        if i % opt.print_freq == 0:
            print('Test: [{0}/{1}]\t'
                  'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
                  'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
                  'Prec@1 {top1.val:.3f} ({top1.avg:.3f})'.format(
                      i, len(sketch_dataloader), batch_time=batch_time, loss=sketch_losses,
                      top1=sketch_top1))
    print(' *Sketch Prec@1 {top1.avg:.3f}'.format(top1=sketch_top1))

    batch_time = utils.AverageMeter()
    end = time.time()
    for i, (shapes, p_labels) in enumerate(shape_dataloader):
        shapes = shapes.view(shapes.size(0)*shapes.size(1), shapes.size(2), shapes.size(3), shapes.size(4))
        # expanding: (bz * 12) x 3 x 224 x 224
        shapes = shapes.expand(shapes.size(0), 3, shapes.size(2), shapes.size(3))

        shapes_v = Variable(shapes.cuda())
        p_labels_v = Variable(p_labels.long().cuda())

        o_bp = net_bp(shapes_v)
        o_vp = net_vp(o_bp)
        shape_feat = net_ap(o_vp)
        shape_score = net_cls(shape_feat)

        loss = crt_cls(shape_score, p_labels_v)

        prec1 = utils.accuracy(shape_score.data, p_labels_v.data, topk=(1,))[0]
        shape_losses.update(loss.data[0], shape_score.size(0)) # batchsize
        shape_top1.update(prec1[0], shape_score.size(0))
        shape_features.append(shape_feat.data.cpu())
        shape_labels.append(p_labels)
        shape_scores.append(shape_score.data.cpu())

        batch_time.update(time.time() - end)
        end = time.time()

        if i % opt.print_freq == 0:
            print('Test: [{0}/{1}]\t'
                  'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
                  'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
                  'Prec@1 {top1.val:.3f} ({top1.avg:.3f})'.format(
                      i, len(shape_dataloader), batch_time=batch_time, loss=shape_losses,
                      top1=shape_top1))
    print(' *Shape Prec@1 {top1.avg:.3f}'.format(top1=shape_top1))

    shape_features = torch.cat(shape_features, 0).numpy()
    sketch_features = torch.cat(sketch_features, 0).numpy()

    shape_scores = torch.cat(shape_scores, 0).numpy()
    sketch_scores = torch.cat(sketch_scores, 0).numpy()

    shape_labels = torch.cat(shape_labels, 0).numpy()
    sketch_labels = torch.cat(sketch_labels, 0).numpy()

    # d = compute_distance(sketch_features.copy(), shape_features.copy(), l2=False)
    # scio.savemat('test/example.mat',{'d':d, 'feat':dataset_features, 'labels':dataset_labels})
    # AUC, mAP = map_and_auc(sketch_labels.copy(), shape_labels.copy(), d)
    # print(' * Feature AUC {0:.5}   mAP {0:.5}'.format(AUC, mAP))

    d_feat = compute_distance(sketch_features.copy(), shape_features.copy(), l2=False)
    d_feat_norm = compute_distance(sketch_features.copy(), shape_features.copy(), l2=True)
    mAP_feat = compute_map(sketch_labels.copy(), shape_labels.copy(), d_feat)
    mAP_feat_norm = compute_map(sketch_labels.copy(), shape_labels.copy(), d_feat_norm)
    print(' * Feature mAP {0:.5%}\tNorm Feature mAP {1:.5%}'.format(mAP_feat, mAP_feat_norm))


    d_score = compute_distance(sketch_scores.copy(), shape_scores.copy(), l2=False)
    mAP_score = compute_map(sketch_labels.copy(), shape_labels.copy(), d_score)
    d_score_norm = compute_distance(sketch_scores.copy(), shape_scores.copy(), l2=True)
    mAP_score_norm = compute_map(sketch_labels.copy(), shape_labels.copy(), d_score_norm)
    if opt.sf:
        shape_paths = [img[0] for img in shape_dataloader.dataset.shape_target_path_list]
        sketch_paths = [img[0] for img in sketch_dataloader.dataset.sketch_target_path_list]
        scio.savemat('{}/test_feat_temp.mat'.format(opt.checkpoint_folder), {'score_dist':d_score, 'score_dist_norm': d_score_norm, 'feat_dist': d_feat, 'feat_dist_norm': d_feat_norm,'sketch_features':sketch_features, 'sketch_labels':sketch_labels, 'sketch_scores': sketch_scores,
        'shape_features':shape_features, 'shape_labels':shape_labels, 'sketch_paths':sketch_paths, 'shape_paths':shape_paths})
    print(' * Score mAP {0:.5%}\tNorm Score mAP {1:.5%}'.format(mAP_score, mAP_score_norm))
    return [sketch_top1.avg, shape_top1.avg, mAP_feat, mAP_feat_norm, mAP_score, mAP_score_norm]
def train(sketch_dataloader, shape_dataloader, model, criterion, optimizer, epoch, opt):
    """
    train for one epoch on the training set
    """
    batch_time = utils.AverageMeter()
    losses = utils.AverageMeter()
    top1 = utils.AverageMeter()
    tpl_losses = utils.AverageMeter()

    # training mode
    net_whole, net_bp, net_vp, net_ap, net_cls = model
    optim_sketch, optim_shape, optim_centers = optimizer
    crt_cls, crt_tlc, w1, w2 = criterion

    net_whole.train()
    net_bp.train()
    net_vp.train()
    net_ap.train()
    net_cls.train()

    end = time.time()
    # debug_here() 
    for i, ((sketches, k_labels), (shapes, p_labels)) in enumerate(zip(sketch_dataloader, shape_dataloader)):

        shapes = shapes.view(shapes.size(0)*shapes.size(1), shapes.size(2), shapes.size(3), shapes.size(4))

        # expanding: (bz * 12) x 3 x 224 x 224
        shapes = shapes.expand(shapes.size(0), 3, shapes.size(2), shapes.size(3))

        shapes_v = Variable(shapes.cuda())
        p_labels_v = Variable(p_labels.long().cuda())

        sketches_v = Variable(sketches.cuda())
        k_labels_v = Variable(k_labels.long().cuda())


        o_bp = net_bp(shapes_v)
        o_vp = net_vp(o_bp)
        shape_feat = net_ap(o_vp)
        sketch_feat = net_whole(sketches_v)
        feat = torch.cat([shape_feat, sketch_feat])
        target = torch.cat([p_labels_v, k_labels_v])
        score = net_cls(feat) 
        
        cls_loss = crt_cls(score, target)
        tpl_loss, _ = crt_tlc(score, target)
        # tpl_loss, _ = crt_tlc(feat, target)

        loss = w1 * cls_loss + w2 * tpl_loss

        ## measure accuracy
        prec1 = utils.accuracy(score.data, target.data, topk=(1,))[0]
        losses.update(cls_loss.data[0], score.size(0)) # batchsize
        tpl_losses.update(tpl_loss.data[0], score.size(0))
        top1.update(prec1[0], score.size(0))

        ## backward
        optim_sketch.zero_grad()
        optim_shape.zero_grad()
        optim_centers.zero_grad()

        loss.backward()
        utils.clip_gradient(optim_sketch, opt.gradient_clip)
        utils.clip_gradient(optim_shape, opt.gradient_clip)
        utils.clip_gradient(optim_centers, opt.gradient_clip)
        
        optim_sketch.step()
        optim_shape.step()
        optim_centers.step()

        # measure elapsed time
        batch_time.update(time.time() - end)
        end = time.time()
        if i % opt.print_freq == 0:
            print('Epoch: [{0}][{1}/{2}]\t'
                'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
                'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
                'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
                'Trploss {triplet.val:.4f}({triplet.avg:.3f})'.format(
                epoch, i, len(sketch_dataloader), batch_time=batch_time,
                loss=losses, top1=top1, triplet=tpl_losses))
            # print('triplet loss: ', tpl_center_loss.data[0])
    print(' * Train Prec@1 {top1.avg:.3f}'.format(top1=top1))
    return top1.avg
Пример #5
0
def train(train_loader, model, criterion, optimizer, epoch, opt):
    """
    train for one epoch on the training set
    """
    batch_time = utils.AverageMeter()
    losses = utils.AverageMeter()
    top1 = utils.AverageMeter()

    # training mode
    model.train()

    end = time.time()
    for i, (input_points, labels) in enumerate(train_loader):
        # bz x 2048 x 3
        input_points = Variable(input_points)
        input_points = input_points.transpose(2, 1)
        labels = Variable(labels[:, 0])

        # print(points.size())
        # print(labels.size())
        # shift data to GPU
        if opt.cuda:
            input_points = input_points.cuda()
            labels = labels.long().cuda()  # must be long cuda tensor

        # forward, backward optimize
        output, _ = model(input_points)
        # debug_here()
        loss = criterion(output, labels)
        ##############################
        # measure accuracy
        ##############################
        prec1 = utils.accuracy(output.data, labels.data, topk=(1, ))[0]
        losses.update(loss.data[0], input_points.size(0))
        top1.update(prec1[0], input_points.size(0))

        ##############################
        # compute gradient and do sgd
        ##############################
        optimizer.zero_grad()
        loss.backward()
        ##############################
        # gradient clip stuff
        ##############################
        utils.clip_gradient(optimizer, opt.gradient_clip)

        optimizer.step()

        # measure elapsed time
        batch_time.update(time.time() - end)
        end = time.time()
        if i % opt.print_freq == 0:
            print('Epoch: [{0}][{1}/{2}]\t'
                  'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
                  'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
                  'Prec@1 {top1.val:.3f} ({top1.avg:.3f})'.format(
                      epoch,
                      i,
                      len(train_loader),
                      batch_time=batch_time,
                      loss=losses,
                      top1=top1))