Exemple #1
0
def valid(model, valid_loader, softmax):
    with torch.no_grad():
        degrees_error_f = 0.
        degrees_error_r = 0.
        degrees_error_u = 0.
        count = 0.
        #for j, (valid_img, cls_label, vector_label, _,) in enumerate(valid_loader):
        for j, (valid_img, cls_label_f, cls_label_r, cls_label_u,
                vector_label_f, vector_label_r, vector_label_u,
                _) in enumerate(valid_loader):
            valid_img = valid_img.cuda(0)

            vector_label_f = vector_label_f.cuda(0)
            vector_label_r = vector_label_r.cuda(0)
            vector_label_u = vector_label_u.cuda(0)

            # get x,y,z cls predictions
            #x_cls_pred, y_cls_pred, z_cls_pred = model(valid_img)
            x_cls_pred_f, y_cls_pred_f, z_cls_pred_f, x_cls_pred_r, y_cls_pred_r, z_cls_pred_r, x_cls_pred_u, y_cls_pred_u, z_cls_pred_u = model(
                valid_img)

            # get prediction vector(get continue value from classify result)
            _, _, _, vector_pred_f = utils.classify2vector(
                x_cls_pred_f,
                y_cls_pred_f,
                z_cls_pred_f,
                softmax,
                args.num_classes,
            )
            _, _, _, vector_pred_r = utils.classify2vector(
                x_cls_pred_r,
                y_cls_pred_r,
                z_cls_pred_r,
                softmax,
                args.num_classes,
            )
            _, _, _, vector_pred_u = utils.classify2vector(
                x_cls_pred_u,
                y_cls_pred_u,
                z_cls_pred_u,
                softmax,
                args.num_classes,
            )

            # get validation degrees error
            cos_value = utils.vector_cos(vector_pred_f, vector_label_f)
            degrees_error_f += torch.mean(torch.acos(cos_value) * 180 / np.pi)

            cos_value = utils.vector_cos(vector_pred_r, vector_label_r)
            degrees_error_r += torch.mean(torch.acos(cos_value) * 180 / np.pi)

            cos_value = utils.vector_cos(vector_pred_u, vector_label_u)
            degrees_error_u += torch.mean(torch.acos(cos_value) * 180 / np.pi)

            count += 1.
        #print("count:-----",count)
    return degrees_error_f / count, degrees_error_r / count, degrees_error_u / count
def valid(model, valid_loader, softmax):
    with torch.no_grad():
        degrees_error = 0.
        count = 0.
        for j, (
                valid_img,
                cls_label,
                vector_label,
                _,
        ) in enumerate(valid_loader):
            valid_img = valid_img.cuda(0)
            vector_label = vector_label.cuda(0)

            # get x,y,z cls predictions
            x_cls_pred, y_cls_pred, z_cls_pred = model(valid_img)

            # get prediction vector(get continue value from classify result)
            _, _, _, vector_pred = utils.classify2vector(
                x_cls_pred,
                y_cls_pred,
                z_cls_pred,
                softmax,
                args.num_classes,
            )

            # get validation degrees error
            cos_value = utils.vector_cos(vector_pred, vector_label)
            degrees_error += torch.mean(torch.acos(cos_value) * 180 / np.pi)
            count += 1.
    return degrees_error / count
Exemple #3
0
def valid(model, valid_loader, softmax, num_classes):
    """
    Validation on test images

    return: 
            Mean angle errors between predicted vectors and groud truth vectors
    """
    degrees_error_v1 = 0.0
    degrees_error_v2 = 0.0
    degrees_error_v3 = 0.0
    batch_num = 0.0
    model.eval()
    with torch.no_grad():

        for j, (valid_img, cls_v1, cls_v2, cls_v3, reg_v1, reg_v2, reg_v3, _,
                _, _, _) in enumerate(valid_loader):
            valid_img = valid_img.cuda(0).float()

            reg_v1 = reg_v1.cuda(0)
            reg_v2 = reg_v2.cuda(0)
            reg_v3 = reg_v3.cuda(0)

            # get x,y,z cls predictions
            x_pred_v1, y_pred_v1, z_pred_v1, x_pred_v2, y_pred_v2, z_pred_v2, x_pred_v3, y_pred_v3, z_pred_v3 = model(
                valid_img)

            # get prediction vector(get continue value from classify result)
            _, _, _, vector_pred_v1 = utils.classify2vector(
                x_pred_v1, y_pred_v1, z_pred_v1, softmax, num_classes)
            _, _, _, vector_pred_v2 = utils.classify2vector(
                x_pred_v2, y_pred_v2, z_pred_v2, softmax, num_classes)
            _, _, _, vector_pred_v3 = utils.classify2vector(
                x_pred_v3, y_pred_v3, z_pred_v3, softmax, num_classes)

            # get validation degrees error
            cos_value = utils.vector_cos(vector_pred_v1, reg_v1)
            degrees_error_v1 += torch.mean(torch.acos(cos_value) * 180 / np.pi)

            cos_value = utils.vector_cos(vector_pred_v2, reg_v2)
            degrees_error_v2 += torch.mean(torch.acos(cos_value) * 180 / np.pi)

            cos_value = utils.vector_cos(vector_pred_v3, reg_v3)
            degrees_error_v3 += torch.mean(torch.acos(cos_value) * 180 / np.pi)

            batch_num += 1.0

    return degrees_error_v1 / batch_num, degrees_error_v2 / batch_num, degrees_error_v3 / batch_num
Exemple #4
0
def test(model, test_loader, softmax, args):
    if args.analysis:
        utils.mkdir(os.path.join(args.save_dir, 'analysis'))
        loss_dict = {'img_name': list(), 'angles': list(), 'degree_error': list()}
    error = 0.0
    total = 0.0
    score = 0.0
    for i, (images, classify_label, vector_label, angle_label, pt2d, names) in enumerate(tqdm.tqdm(test_loader)):
        with torch.no_grad():
            images = images.cuda(0)
            vector_label = vector_label.cuda(0)

            # get x,y,z cls predictions
            x_cls_pred, y_cls_pred, z_cls_pred = model(images)

            # get prediction vector(get continue value from classify result)
            _, _, _, pred_vector = utils.classify2vector(x_cls_pred, y_cls_pred, z_cls_pred, softmax, args.num_classes)

            # Mean absolute error
            cos_value = utils.vector_cos(pred_vector, vector_label)
            degrees_error = torch.acos(cos_value) * 180 / np.pi

            # save euler angle and degrees error to loss_dict
            if args.analysis:
                for k in range(len(angle_label)):
                    loss_dict['img_name'].append(names[k])
                    loss_dict['angles'].append(angle_label[k].tolist())  # pitch,yaw,roll
                    loss_dict['degree_error'].append(float(degrees_error[k]))

            # collect error
            error += torch.sum(degrees_error)
            score += torch.sum(utils.degress_score(cos_value, args.degree_error_limit))

            total += vector_label.size(0)

            # Save first image in batch with pose cube or axis.
            if args.show_front:
                utils.mkdir(os.path.join(args.save_dir, 'show_front'))
                for j in range(vector_label.size(0)):
                    draw_attention_vector(vector_label[j].cpu().tolist(),
                                          angle_label[j].cpu().tolist(),
                                          pred_vector[j].cpu().tolist(),
                                          names[j],
                                          pt2d[j],
                                          args)

    avg_error = error / total
    total_score = score / total
    print('Average degree Error:%.4f | score with error<10º:%.4f' % (avg_error.item(), total_score.item()))

    # save analysis of loss distribute
    if args.analysis:
        print('analysis result')
        utils.show_loss_distribute(loss_dict, os.path.join(args.save_dir, 'analysis'), os.path.basename(args.snapshot).split('.')[0])

    # save collect score curve
    if args.collect_score:
        print("analysis collect score")
        utils.collect_score(loss_dict, os.path.join(args.save_dir, "collect_score"))
Exemple #5
0
def similarity(articles, num_top_term=0):
    # Gather terms to be used to calculate cosine similarity
    terms = set()
    for article in articles.values():
        tfidf_dict = tf_idf(article.tf_dict, idf_dict)
        top_list = dict_top_by_value(tfidf_dict, num_top_term)
        terms.update(top_list)

    # Calculate cosine similarity
    _sim_matrix = defaultdict(dict)
    other = articles.keys()
    for a1 in articles.values():
        other.remove(a1.filename)
        for filename in other:
            a2 = articles[filename]
            a1v = []
            a2v = []
            for term in terms:
                a1v.append(a1.tf_dict[term] if term in a1.tf_dict else 0)
                a2v.append(a2.tf_dict[term] if term in a2.tf_dict else 0)
            _sim_matrix[a1.filename][a2.filename] = vector_cos(a1v, a2v)
    return _sim_matrix
Exemple #6
0
def test(model, test_loader, softmax, args):
    if args.analysis:
        utils.mkdir(os.path.join(args.save_dir, 'analysis'))
        #loss_dict = {'img_name': list(), 'angles': list(), 'degree_error': list()}
        loss_dict = {
            'img_name': list(),
            'degree_error_f': list(),
            'degree_error_r': list(),
            'degree_error_u': list()
        }

    if args.write_error:
        error_5 = {'img_name': list(), 'degree_error': list()}

    error = 0.0
    total = 0.0
    score = 0.0
    for i, (images, cls_label_f, cls_label_r, cls_label_u, vector_label_f,
            vector_label_r, vector_label_u,
            names) in enumerate(tqdm.tqdm(test_loader)):
        with torch.no_grad():
            #print(images.shape)
            images = images.cuda(0)

            vector_label_f = vector_label_f.cuda(0)
            vector_label_r = vector_label_r.cuda(0)
            vector_label_u = vector_label_u.cuda(0)

            # get x,y,z cls predictions
            x_cls_pred_f, y_cls_pred_f, z_cls_pred_f, x_cls_pred_r, y_cls_pred_r, z_cls_pred_r, x_cls_pred_u, y_cls_pred_u, z_cls_pred_u = model(
                images)

            # get prediction vector(get continue value from classify result)
            _, _, _, pred_vector_f = utils.classify2vector(
                x_cls_pred_f,
                y_cls_pred_f,
                z_cls_pred_f,
                softmax,
                args.num_classes,
            )

            _, _, _, pred_vector_r = utils.classify2vector(
                x_cls_pred_r,
                y_cls_pred_r,
                z_cls_pred_r,
                softmax,
                args.num_classes,
            )

            _, _, _, pred_vector_u = utils.classify2vector(
                x_cls_pred_u,
                y_cls_pred_u,
                z_cls_pred_u,
                softmax,
                args.num_classes,
            )

            # Mean absolute error
            cos_value_f = utils.vector_cos(pred_vector_f, vector_label_f)
            degrees_error_f = torch.acos(cos_value_f) * 180 / np.pi
            #print(degrees_error_f)

            cos_value_r = utils.vector_cos(pred_vector_r, vector_label_r)
            degrees_error_r = torch.acos(cos_value_r) * 180 / np.pi

            cos_value_u = utils.vector_cos(pred_vector_u, vector_label_u)
            degrees_error_u = torch.acos(cos_value_u) * 180 / np.pi

            if args.write_error:
                for k in range(len(names)):
                    if degrees_error[k] > 10.0:
                        error_5['img_name'].append(names[k])
                        error_5['degree_error'].append(float(degrees_error[k]))

            # save euler angle and degrees error to loss_dict
            if args.analysis:
                for k in range(len(names)):
                    loss_dict['img_name'].append(names[k])
                    #loss_dict['angles'].append(angle_label[k].tolist())  # pitch,yaw,roll
                    loss_dict['degree_error_f'].append(
                        float(degrees_error_f[k]))
                    loss_dict['degree_error_r'].append(
                        float(degrees_error_r[k]))
                    loss_dict['degree_error_u'].append(
                        float(degrees_error_u[k]))

            # collect error
            #error += torch.sum(degrees_error)
            #score += torch.sum(utils.degress_score(cos_value, args.degree_error_limit))

            #total += vector_label.size(0)

            # Save first image in batch with pose cube or axis.
            if args.show_front:
                utils.mkdir(os.path.join(args.save_dir, 'show_front'))
                for j in range(vector_label.size(0)):
                    draw_attention_vector(vector_label[j].cpu().tolist(),
                                          pred_vector[j].cpu().tolist(),
                                          names[j], args)

    #avg_error = error / total
    #total_score = score / total
    #print('Average degree Error:%.4f | score with error<10º:%.4f' % (avg_error.item(), total_score.item()))

    #save loss dict
    with open('loss.pickle', 'wb') as handle:
        pickle.dump(loss_dict, handle, protocol=pickle.HIGHEST_PROTOCOL)

    print("done saving loss dict.")

    if args.write_error:
        print("Writing error to local txt file.")
        with open("front_error_10.txt", 'w') as f:
            for i in range(len(error_5["img_name"])):
                f.write(error_5["img_name"][i] + "," +
                        str(error_5["degree_error"][i]) + '\n')
        print("Done writing.")

    # save analysis of loss distribute
    if args.analysis:
        print('analysis result')
        utils.show_loss_distribute(
            loss_dict, os.path.join(args.save_dir, 'analysis'),
            os.path.basename(args.snapshot).split('.')[0])

    # save collect score curve
    if args.collect_score:
        print("analysis collect score")
        utils.collect_score(loss_dict,
                            os.path.join(args.save_dir, "collect_score"))
Exemple #7
0
def test(model, test_loader, softmax, args):
    if args.analysis:
        utils.mkdir(os.path.join(args.save_dir, 'analysis'))
        loss_dict = {
            'img_name': list(),
            'degree_error_f': list(),
            'degree_error_r': list(),
            'degree_error_u': list()
        }

    if args.write_error:
        error_5 = {'img_name': list(), 'degree_error': list()}

    l_total_err = 0.0
    d_total_err = 0.0
    f_total_err = 0.0

    total = 0.0
    score = 0.0
    for i, (images, cls_label_f, cls_label_r, cls_label_u, vector_label_f,
            vector_label_r, vector_label_u,
            names) in enumerate(tqdm.tqdm(test_loader)):
        with torch.no_grad():
            #print(images.shape)
            images = images.cuda(0)

            vector_label_f = vector_label_f.cuda(0)
            vector_label_r = vector_label_r.cuda(0)
            vector_label_u = vector_label_u.cuda(0)

            # get x,y,z cls predictions
            x_cls_pred_f, y_cls_pred_f, z_cls_pred_f, x_cls_pred_r, y_cls_pred_r, z_cls_pred_r, x_cls_pred_u, y_cls_pred_u, z_cls_pred_u = model(
                images)

            # get prediction vector(get continue value from classify result)
            _, _, _, pred_vector_f = utils.classify2vector(
                x_cls_pred_f,
                y_cls_pred_f,
                z_cls_pred_f,
                softmax,
                args.num_classes,
            )

            _, _, _, pred_vector_r = utils.classify2vector(
                x_cls_pred_r,
                y_cls_pred_r,
                z_cls_pred_r,
                softmax,
                args.num_classes,
            )

            _, _, _, pred_vector_u = utils.classify2vector(
                x_cls_pred_u,
                y_cls_pred_u,
                z_cls_pred_u,
                softmax,
                args.num_classes,
            )

            # Mean absolute error
            cos_value_f = utils.vector_cos(pred_vector_f, vector_label_f)
            degrees_error_f = torch.acos(cos_value_f) * 180 / np.pi
            #print(degrees_error_f)

            cos_value_r = utils.vector_cos(pred_vector_r, vector_label_r)
            degrees_error_r = torch.acos(cos_value_r) * 180 / np.pi

            cos_value_u = utils.vector_cos(pred_vector_u, vector_label_u)
            degrees_error_u = torch.acos(cos_value_u) * 180 / np.pi

            l_total_err += torch.mean(degrees_error_f)
            d_total_err += torch.mean(degrees_error_r)
            f_total_err += torch.mean(degrees_error_u)

            total += 1.0

            if args.write_vector:
                for k in range(len(names)):
                    basename = os.path.basename(
                        names[k]).split(".")[0] + ".txt"
                    with open("./BIWI_results/" + basename, 'w') as f:
                        f.write(
                            str(float(pred_vector_f[k][0])) + " " +
                            str(float(pred_vector_f[k][1])) + " " +
                            str(float(pred_vector_f[k][2])) + '\n')
                        f.write(
                            str(float(pred_vector_r[k][0])) + " " +
                            str(float(pred_vector_r[k][1])) + " " +
                            str(float(pred_vector_r[k][2])) + '\n')
                        f.write(
                            str(float(pred_vector_u[k][0])) + " " +
                            str(float(pred_vector_u[k][1])) + " " +
                            str(float(pred_vector_u[k][2])))

            if args.write_error:
                for k in range(len(names)):
                    if degrees_error[k] > 10.0:
                        error_5['img_name'].append(names[k])
                        error_5['degree_error'].append(float(degrees_error[k]))

            # save euler angle and degrees error to loss_dict
            if args.analysis:
                for k in range(len(names)):
                    loss_dict['img_name'].append(names[k])
                    #loss_dict['angles'].append(angle_label[k].tolist())  # pitch,yaw,roll
                    loss_dict['degree_error_f'].append(
                        float(degrees_error_f[k]))
                    loss_dict['degree_error_r'].append(
                        float(degrees_error_r[k]))
                    loss_dict['degree_error_u'].append(
                        float(degrees_error_u[k]))

            # Save first image in batch with pose cube or axis.
            if args.show_front:
                utils.mkdir(os.path.join(args.save_dir, 'show_front'))
                for j in range(vector_label.size(0)):
                    draw_attention_vector(vector_label[j].cpu().tolist(),
                                          pred_vector[j].cpu().tolist(),
                                          names[j], args)

    #with open('./loss_pickles/loss_%s.pickle' % (os.path.basename(args.snapshot).split(",")[0]), 'wb') as handle:
    #    pickle.dump(loss_dict, handle, protocol=pickle.HIGHEST_PROTOCOL)

    #print("done saving loss dict.")
    print("Mean degree error for each vector:")
    print("Left Vector:", l_total_err.item() / total)
    print("Down Vector:", d_total_err.item() / total)
    print("Front Vector:", f_total_err.item() / total)

    if args.write_error:
        print("Writing error to local txt file.")
        with open("front_error_10.txt", 'w') as f:
            for i in range(len(error_5["img_name"])):
                f.write(error_5["img_name"][i] + "," +
                        str(error_5["degree_error"][i]) + '\n')
        print("Done writing.")

    # save analysis of loss distribute
    if args.analysis:
        print('analysis result')
        #utils.show_loss_distribute(loss_dict, os.path.join(args.save_dir, 'analysis'), os.path.basename(args.snapshot).split('.')[0])

    # save collect score curve
    if args.collect_score:
        print("analysis collect score")
        utils.collect_score(loss_dict,
                            os.path.join(args.save_dir, "collect_score"))