Exemple #1
0
def load_and_enqueue(sess, enqueue_op, pointgrid_ph, cat_label_ph,
                     seg_label_ph):
    for epoch in range(1000 * TRAINING_EPOCHES):
        train_file_idx = np.arange(0, len(TRAINING_FILE_LIST))
        np.random.shuffle(train_file_idx)
        for loop in range(0, len(TRAINING_FILE_LIST)):
            # mat_content = scipy.io.loadmat('../data/ShapeNet/train/' + TRAINING_FILE_LIST[train_file_idx[loop]] + '.mat')
            # pc = mat_content['points']
            # labels = np.squeeze(mat_content['labels'])
            # category = mat_content['category'][0][0]
            cur_train_filename = os.path.join(
                hdf5_data_dir, TRAINING_FILE_LIST[train_file_idx[loop]])
            print('Loading train file ' + cur_train_filename)

            cur_data, cur_labels, cur_seg = provider.loadDataFile_with_seg(
                cur_train_filename)
            cur_data, cur_labels, order = provider.shuffle_data(
                cur_data, np.squeeze(cur_labels))
            cur_seg = cur_seg[order, ...]

            for pc_idx in range(0, len(cur_data)):
                pc = cur_data[pc_idx]
                category = cur_labels[pc_idx]
                labels = cur_seg[pc_idx]

                pc = model.rotate_pc(pc)
                cat_label = model.integer_label_to_one_hot_label(category)
                seg_label = model.integer_label_to_one_hot_label(labels)
                pointgrid, pointgrid_label, _ = model.pc2voxel(pc, seg_label)
                sess.run(enqueue_op,
                         feed_dict={
                             pointgrid_ph: pointgrid,
                             cat_label_ph: cat_label,
                             seg_label_ph: pointgrid_label
                         })
Exemple #2
0
        def test_one_epoch(epoch_num):
            is_training = False
            total_loss_acc = 0.0
            seg_loss_acc = 0.0
            gt_classes = [0 for _ in range(model.NUM_CATEGORY)]
            positive_classes = [0 for _ in range(model.NUM_CATEGORY)]
            true_positive_classes = [0 for _ in range(model.NUM_CATEGORY)]
            for i in range(test_data.shape[0]):
                pc = np.squeeze(test_data[i, :, :])
                labels = np.squeeze(test_label[i, :]).astype(int)
                seg_label = model.integer_label_to_one_hot_label(labels)
                pointgrid, pointgrid_label, index = model.pc2voxel(
                    pc, seg_label)
                feed_dict = {
                    is_training_ph: is_training,
                    pointgrid_ph: pointgrid,
                    seg_label_ph: pointgrid_label
                }
                total_loss_val, seg_loss_val, pred_seg_val = sess.run(
                    [total_loss, seg_loss, pred_seg], feed_dict=feed_dict)
                total_loss_acc += total_loss_val
                seg_loss_acc += seg_loss_val

                pred_seg_val = pred_seg_val[0, :, :, :, :, :]
                pred_point_label = model.populateOneHotSegLabel(
                    pc, pred_seg_val, index)
                for j in range(pred_point_label.shape[0]):
                    gt_l = int(labels[j])
                    pred_l = int(pred_point_label[j])
                    gt_classes[gt_l - 1] += 1
                    positive_classes[pred_l - 1] += 1
                    true_positive_classes[gt_l - 1] += int(gt_l == pred_l)

            printout(flog, 'gt_l count:{}'.format(gt_classes))
            printout(flog,
                     'positive_classes count:{}'.format(positive_classes))
            printout(
                flog,
                'true_positive_classes count:{}'.format(true_positive_classes))

            iou_list = []
            for i in range(model.SEG_PART):
                iou = true_positive_classes[i] / float(
                    gt_classes[i] + positive_classes[i] -
                    true_positive_classes[i])
                iou_list.append(iou)
            printout(flog, 'IOU:{}'.format(iou_list))
            printout(
                flog, 'ACC:{}'.format(
                    sum(true_positive_classes) / sum(positive_classes)))
            printout(flog,
                     'mIOU:{}'.format(sum(iou_list) / float(model.SEG_PART)))
            printout(
                flog, '\tMean Total Loss: %f' %
                (total_loss_acc / test_data.shape[0]))
            printout(
                flog, '\tMean Segmentation Loss: %f' %
                (seg_loss_acc / test_data.shape[0]))
Exemple #3
0
def load_and_enqueue(sess, enqueue_op, pointgrid_ph, seg_label_ph):
    for i in range(train_data.shape[0]):
        pc = np.squeeze(train_data[i, :, :])
        labels = np.squeeze(train_label[i, :]).astype(int)
        seg_label = model.integer_label_to_one_hot_label(labels)
        pointgrid, pointgrid_label, _ = model.pc2voxel(pc, seg_label)
        sess.run(enqueue_op,
                 feed_dict={
                     pointgrid_ph: pointgrid,
                     seg_label_ph: pointgrid_label
                 })
Exemple #4
0
def transfor_data(cur_data, cur_sem):
    data = []
    label = []
    for i in range(cur_data.shape[0]):
        pc = np.squeeze(cur_data[i, :, :])
        labels = np.squeeze(cur_sem[i, :]).astype(int)
        seg_label = model.integer_label_to_one_hot_label(labels)
        pointgrid, pointgrid_label, index = model.pc2voxel(pc, seg_label)
        data.append(pointgrid)
        label.append(pointgrid_label)
    data = np.asarray(data)
    label = np.asarray(label)
    return data, label
Exemple #5
0
def load_and_enqueue(sess, enqueue_op, the_model_ph, cat_label_ph, seg_label_ph):
    for epoch in range(1000 * TRAINING_EPOCHES):
        train_file_idx = np.arange(0, len(TRAINING_FILE_LIST))
        np.random.shuffle(train_file_idx)
        for loop in range(len(TRAINING_FILE_LIST)):
            mat_content = scipy.io.loadmat('../data/ShapeNet/train/' + TRAINING_FILE_LIST[train_file_idx[loop]] + '.mat')
            pc = mat_content['points']
            labels = np.squeeze(mat_content['labels'])
            category = mat_content['category'][0][0]
            pc = model.rotate_pc(pc)
            cat_label = model.integer_label_to_one_hot_label(category)
            seg_label = model.integer_label_to_one_hot_label(labels)
            the_model, the_model_label, _ = model.pc2voxel(pc, seg_label)
            sess.run(enqueue_op, feed_dict={the_model_ph: the_model, cat_label_ph: cat_label, seg_label_ph: the_model_label})
Exemple #6
0
def load_and_enqueue(sess, enqueue_op, pointgrid_ph, seg_label_ph):
    for epoch in range(1000 * TRAINING_EPOCHES):
        train_file_idx = np.arange(0, len(TRAINING_FILE_LIST))
        np.random.shuffle(train_file_idx)
        for loop in range(len(TRAINING_FILE_LIST)):
            mat_content = np.load('../data/ShapeNet/' + TRAINING_FILE_LIST[train_file_idx[loop]] + '.npy')
            # choice=np.random.choice(mat_content.shape[0],model.SAMPLE_NUM, replace=False)
            # mat_content=mat_content[choice,:]

            xyz = mat_content[:, 0:3]
            xyz = model.rotate_pc(xyz)
            rgb=mat_content[:, 3:6]/255.0

            pc=np.concatenate((xyz,rgb),axis=1)
            labels = np.squeeze(mat_content[:, -1]).astype(int)

            seg_label = model.integer_label_to_one_hot_label(labels)
            pointgrid, pointgrid_label, _ = model.pc2voxel(pc, seg_label)
            sess.run(enqueue_op, feed_dict={pointgrid_ph: pointgrid, seg_label_ph: pointgrid_label})
Exemple #7
0
def predict():

    with tf.device('/gpu:' + str(gpu_to_use)):
        pointgrid_ph, seg_label_ph = placeholder_inputs()
        is_training_ph = tf.placeholder(tf.bool, shape=())

        pred_seg = model.get_model(pointgrid_ph, is_training=is_training_ph)

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.allow_soft_placement = True

    with tf.Session(config=config) as sess:
        if not os.path.exists(output_dir):
            os.mkdir(output_dir)
        flog = open(os.path.join(output_dir, 'log_test.txt'), 'w')
        # Restore variables from disk.

        if not load_checkpoint(ckpt_dir, sess):
            exit()

        is_training = False
        gt_classes = [0 for _ in range(model.NUM_CATEGORY)]
        positive_classes = [0 for _ in range(model.NUM_CATEGORY)]
        true_positive_classes = [0 for _ in range(model.NUM_CATEGORY)]
        for i in range(test_data.shape[0]):
            pc = np.squeeze(test_data[i, :, :])
            labels = np.squeeze(test_label[i, :]).astype(int)
            seg_label = model.integer_label_to_one_hot_label(labels)
            pointgrid, pointgrid_label, index = model.pc2voxel(pc, seg_label)
            pointgrid = np.expand_dims(pointgrid, axis=0)
            pointgrid_label = np.expand_dims(pointgrid_label, axis=0)
            feed_dict = {
                pointgrid_ph: pointgrid,
                seg_label_ph: pointgrid_label,
                is_training_ph: is_training,
            }
            pred_seg_val = sess.run(pred_seg, feed_dict=feed_dict)
            pred_seg_val = pred_seg_val[0, :, :, :, :, :]
            pred_point_label = model.populateOneHotSegLabel(
                pc, pred_seg_val, index)

            if purify == True:
                pre_label = pred_point_label
                for i in range(pc.shape[0]):  #one point cloud num 2500--2800
                    idx = np.argsort(np.sum((pc[i, :] - pc)**2, axis=1))
                    j, L = 0, []
                    for _ in range(knn):
                        if (idx[j] == i):
                            j += 1
                        L.append(pre_label[idx[j]])
                        j += 1
                    majority = max(set(L), key=L.count)
                    if (pre_label[i] == 0 or len(set(L)) == 1):
                        pred_point_label[i] = majority

            for j in range(pred_point_label.shape[0]):
                gt_l = int(labels[j])
                pred_l = int(pred_point_label[j])
                gt_classes[gt_l] += 1
                positive_classes[pred_l] += 1
                true_positive_classes[gt_l] += int(gt_l == pred_l)

        printout(flog, 'gt_l count:{}'.format(gt_classes))
        printout(flog, 'positive_classes count:{}'.format(positive_classes))
        printout(
            flog,
            'true_positive_classes count:{}'.format(true_positive_classes))

        iou_list = []
        for i in range(model.SEG_PART):
            try:
                iou = true_positive_classes[i] / float(
                    gt_classes[i] + positive_classes[i] -
                    true_positive_classes[i])
            except ZeroDivisionError:
                iou = 0
                print('here no room')
            finally:
                iou_list.append(iou)
        printout(flog, 'IOU:{}'.format(iou_list))
        printout(
            flog, 'ACC:{}'.format(
                sum(true_positive_classes) * 1.0 / (sum(positive_classes))))
        printout(flog, 'mIOU:{}'.format(sum(iou_list) / float(model.SEG_PART)))
Exemple #8
0
def predict():
    is_training = False

    with tf.device('/gpu:' + str(gpu_to_use)):
        the_model_ph, cat_label_ph, seg_label_ph = placeholder_inputs()
        is_training_ph = tf.placeholder(tf.bool, shape=())

        # model
        pred_cat, pred_seg = model.get_model(the_model_ph,
                                             is_training=is_training_ph)

    # Add ops to save and restore all the variables.
    saver = tf.train.Saver()

    # Later, launch the model, use the saver to restore variables from disk, and
    # do some work with the model.

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.allow_soft_placement = True

    with tf.Session(config=config) as sess:
        if not os.path.exists(output_dir):
            os.mkdir(output_dir)

        flog = open(os.path.join(output_dir, 'log.txt'), 'w')

        # Restore variables from disk.
        ckpt_dir = './train_results/trained_models'
        if not load_checkpoint(ckpt_dir, sess):
            exit()

        if not os.path.exists('../data/ShapeNet/test-object'):
            os.mkdir('../data/ShapeNet/test-object')

        map_table = np.zeros((10, 10), dtype=int)
        y_true = []
        y_pred = []
        start_all = datetime.now()
        total_time = datetime.now() - datetime.now()
        incorrect = []
        preficted = []
        ccc = 0

        avg_cat_accuracy = 0.0
        cat_accuracy = np.zeros((model.NUM_CATEGORY), dtype=np.float32)
        cat_obj = np.zeros((model.NUM_CATEGORY), dtype=np.int32)
        avg_iou = 0.0
        cat_iou = np.zeros((model.NUM_CATEGORY), dtype=np.float32)
        for loop in range(len(TESTING_FILE_LIST)):
            start_one = datetime.now()
            mat_content = scipy.io.loadmat('../data/ShapeNet/test/' +
                                           TESTING_FILE_LIST[loop] + '.mat')
            pc = mat_content['points']
            labels = np.squeeze(mat_content['labels'])
            category = mat_content['category'][0][0]
            cat_label = model.integer_label_to_one_hot_label(category)
            category = int(category)
            cat_obj[category] += 1
            seg_label = model.integer_label_to_one_hot_label(labels)
            the_model, the_model_label, index = model.pc2voxel(pc, seg_label)
            the_model = np.expand_dims(the_model, axis=0)
            cat_label = np.expand_dims(cat_label, axis=0)
            the_model_label = np.expand_dims(the_model_label, axis=0)
            feed_dict = {
                the_model_ph: the_model,
                cat_label_ph: cat_label,
                seg_label_ph: the_model_label,
                is_training_ph: is_training,
            }
            pred_cat_val, pred_seg_val = sess.run([pred_cat, pred_seg],
                                                  feed_dict=feed_dict)
            pred_cat_val = np.argmax(pred_cat_val[0, :], axis=0)
            pred_seg_val = pred_seg_val[0, :, :, :, :, :]
            avg_cat_accuracy += (pred_cat_val == category)
            cat_accuracy[category] += (pred_cat_val == category)

            stop_one = datetime.now()
            print('Time: ', stop_one - start_one)
            total_time += (stop_one - start_one)
            print('total Time: ', total_time)
            y_true.insert(loop, category)
            y_pred.insert(loop, pred_cat_val)

            if pred_cat_val != category:
                incorrect.insert(ccc, TESTING_FILE_LIST[loop])
                preficted.insert(ccc, category2name[pred_cat_val])
                ccc += 1
                print(category2name[pred_cat_val])

            pred_point_label = model.populateOneHotSegLabel(
                pc, pred_seg_val, index)
            if purify == True:
                pre_label = pred_point_label
                for i in range(pc.shape[0]):
                    idx = np.argsort(np.sum((pc[i, :] - pc)**2, axis=1))
                    j, L = 0, []
                    for _ in range(knn):
                        if (idx[j] == i):
                            j += 1
                        L.append(pre_label[idx[j]])
                        j += 1
                    majority = max(set(L), key=L.count)
                    if (pre_label[i] == 0 or len(set(L)) == 1):
                        pred_point_label[i] = majority
            iou = model.intersection_over_union(pred_point_label, labels)
            avg_iou += iou
            cat_iou[category] += iou
            #if (cat_obj[category] <= 3):
            #    output_color_point_cloud(pc, pred_point_label, '../data/ShapeNet/test-object/' + category2name[category] + '_' + str(cat_obj[category]) + '.obj')
            printout(
                flog, '%d/%d %s' %
                ((loop + 1), len(TESTING_FILE_LIST), TESTING_FILE_LIST[loop]))
            printout(flog, '----------')

        avg_cat_accuracy /= float(np.sum(cat_obj))
        avg_iou /= float(np.sum(cat_obj))
        printout(flog,
                 'Average classification accuracy: %f' % avg_cat_accuracy)
        printout(flog, 'Average IoU: %f' % avg_iou)
        printout(
            flog,
            'CategoryName, CategorySize, ClassificationAccuracy, SegmentationIoU'
        )
        for i in range(model.NUM_CATEGORY):
            cat_accuracy[i] /= float(cat_obj[i])
            cat_iou[i] /= float(cat_obj[i])
            printout(
                flog, '\t%s (%d): %f, %f' %
                (category2name[i], cat_obj[i], cat_accuracy[i], cat_iou[i]))

        # Print the confusion matrix
        print(metrics.confusion_matrix(y_true, y_pred))

        # Print the precision and recall, among other metrics
        print(metrics.classification_report(y_true, y_pred, digits=3))

        stop_all = datetime.now()
        print('Time: ', stop_all - start_all)

        print(incorrect)
        print(preficted)
Exemple #9
0
def predict():
    is_training = False
    with tf.device('/gpu:'+str(gpu_to_use)):
        pointgrid_ph,  seg_label_ph = placeholder_inputs()
        is_training_ph = tf.placeholder(tf.bool, shape=())
        pred_seg = model.get_model(pointgrid_ph, is_training=is_training_ph)

    saver = tf.train.Saver()

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.allow_soft_placement = True

    with tf.Session(config=config) as sess:
        if not os.path.exists(output_dir):
            os.mkdir(output_dir)

        flog = open(os.path.join(output_dir, 'log-6.txt'), 'w')

        # Restore variables from disk.
        ckpt_dir = './train_results_6/trained_models'
        if not load_checkpoint(ckpt_dir, sess):
            exit()

        if not os.path.exists('../data/ShapeNet/test-PointGrid'):
            os.mkdir('../data/ShapeNet/test-PointGrid')

        gt_classes = [0 for _ in range(model.SEG_PART)]
        positive_classes = [0 for _ in range(model.SEG_PART)]
        true_positive_classes = [0 for _ in range(model.SEG_PART)]
        for filelist in sorted(os.listdir(TESTING_FILE_LIST)):
            printout(flog,filelist)
            mat_content = np.load(os.path.join(TESTING_FILE_LIST,filelist))
            # choice = np.random.choice(mat_content.shape[0], model.SAMPLE_NUM, replace=False)
            # mat_content = mat_content[choice, :]

            xyz = mat_content[:, 0:3]
            rgb = mat_content[:, 3:6] / 255.0

            pc = np.concatenate((xyz, rgb), axis=1)
            labels = np.squeeze(mat_content[:, -1]).astype(int)

            seg_label = model.integer_label_to_one_hot_label(labels)
            pointgrid, pointgrid_label, index = model.pc2voxel(pc, seg_label)

            pointgrid = np.expand_dims(pointgrid, axis=0)
            pointgrid_label = np.expand_dims(pointgrid_label, axis=0)
            feed_dict = {
                         pointgrid_ph: pointgrid,
                         seg_label_ph: pointgrid_label,
                         is_training_ph: is_training,
                        }
            t1 = time.time()
            pred_seg_val = sess.run(pred_seg, feed_dict = feed_dict)
            #    pred_seg: of size B x N x N x N x (K+1) x NUM_PART_SEG

            pred_seg_val = pred_seg_val[0, :, :, :, :, :]
            pred_point_label = model.populateOneHotSegLabel(pc, pred_seg_val, index)
            #     pred_point_label: size n x 1

            if purify == True:
                pre_label = pred_point_label
                for i in range(pc.shape[0]): #one point cloud num 2500--2800
                    idx = np.argsort(np.sum((pc[i, :] - pc) ** 2, axis=1))
                    j, L = 0, []
                    for _ in range(knn):
                        if (idx[j] == i):
                            j += 1
                        L.append(pre_label[idx[j]])
                        j += 1
                    majority = max(set(L), key=L.count)
                    if (pre_label[i] == 0 or len(set(L)) == 1):
                        pred_point_label[i] = majority
            t2 = time.time()
            print('one point cloud cost time:{}'.format(t2 - t1))

            for j in range(pred_point_label.shape[0]):
                # gt_classes[labels[j]-1]+=1
                # if int(labels[j])==int(pred_point_label[j]):
                #     positive_classes[labels[j]-1]+=1
                # else:
                #     negative_classes[labels[j]-1]+=1

                gt_l = int(labels[j])
                pred_l = int(pred_point_label[j])
                gt_classes[gt_l] += 1
                positive_classes[pred_l] += 1
                true_positive_classes[gt_l] += int(gt_l == pred_l)
            printout(flog, 'gt_l:{},positive_classes:{},true_positive_classes:{}'.format(gt_classes, positive_classes,
                                                                                         true_positive_classes))

        printout(flog, 'gt_l count:{}'.format(gt_classes))
        printout(flog, 'positive_classes count:{}'.format(positive_classes))
        printout(flog, 'true_positive_classes count:{}'.format(true_positive_classes))

        iou_list = []
        for i in range(model.SEG_PART):
            iou = true_positive_classes[i] / float(gt_classes[i] + positive_classes[i] - true_positive_classes[i])
            iou_list.append(iou)
        printout(flog, 'IOU:{}'.format(iou_list))
        printout(flog, 'ACC:{}'.format(sum(true_positive_classes) / sum(positive_classes)))
        printout(flog, 'mIOU:{}'.format(sum(iou_list) / float(model.SEG_PART)))