Exemplo n.º 1
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--filelist',
                        '-t',
                        help='Path to input .h5 filelist (.txt)',
                        required=True)
    parser.add_argument('--data_folder',
                        '-f',
                        help='Path to *.pts directory',
                        required=True)
    parser.add_argument('--category',
                        '-c',
                        help='Path to category list file (.txt)',
                        required=True)
    parser.add_argument('--load_ckpt',
                        '-l',
                        help='Path to a check point file for load',
                        required=True)
    parser.add_argument('--repeat_num',
                        '-r',
                        help='Repeat number',
                        type=int,
                        default=1)
    parser.add_argument('--model', '-m', help='Model to use', required=True)
    parser.add_argument('--setting',
                        '-x',
                        help='Setting to use',
                        required=True)
    parser.add_argument('--save_ply',
                        '-s',
                        help='Save results as ply',
                        action='store_true')
    args = parser.parse_args()
    print(args)

    model = importlib.import_module(args.model)
    setting_path = os.path.join(os.path.dirname(__file__), args.model)
    sys.path.append(setting_path)
    setting = importlib.import_module(args.setting)

    sample_num = setting.sample_num
    num_class = setting.num_class

    # Prepare output folder
    output_folder = args.data_folder + 'pred_' + str(args.repeat_num)
    category_list = [
        (category, int(label_num))
        for (category,
             label_num) in [line.split() for line in open(args.category, 'r')]
    ]
    for category, _ in category_list:
        folder = os.path.join(output_folder, category)
        if not os.path.exists(folder):
            os.makedirs(folder)

    # prepare input pts path, output seg path, output ply path
    input_filelist = []
    output_filelist = []
    output_ply_filelist = []
    for category in sorted(os.listdir(args.data_folder)):
        data_category_folder = os.path.join(args.data_folder, category)
        for filename in sorted(os.listdir(data_category_folder)):
            input_filelist.append(
                os.path.join(args.data_folder, category, filename))
            output_filelist.append(
                os.path.join(output_folder, category, filename[0:-3] + 'seg'))
            output_ply_filelist.append(
                os.path.join(output_folder + '_ply', category,
                             filename[0:-3] + 'ply'))

    # Prepare inputs
    print('{}-Preparing datasets...'.format(datetime.now()))
    data, _, data_num, _ = data_utils.load_seg(args.filelist)

    batch_num = data.shape[0]
    #point_num
    max_point_num = data.shape[1]
    batch_size = args.repeat_num * math.ceil(data.shape[1] / sample_num)

    print('{}-{:d} testing batches.'.format(datetime.now(), batch_num))

    ######################################################################
    # Placeholders
    indices = tf.placeholder(tf.int32,
                             shape=(batch_size, None, 2),
                             name="indices")
    is_training = tf.placeholder(tf.bool, name='is_training')
    pts_fts = tf.placeholder(tf.float32,
                             shape=(batch_size, max_point_num,
                                    setting.data_dim),
                             name='points')
    ######################################################################

    ######################################################################
    pts_fts_sampled = tf.gather_nd(pts_fts,
                                   indices=indices,
                                   name='pts_fts_sampled')
    if setting.data_dim > 3:
        points_sampled, features_sampled = tf.split(
            pts_fts_sampled, [3, setting.data_dim - 3],
            axis=-1,
            name='split_points_features')
        if not setting.use_extra_features:
            features_sampled = None
    else:
        points_sampled = pts_fts_sampled
        features_sampled = None

    net = model.Net(points_sampled, features_sampled, num_class, is_training,
                    setting)
    _, seg_probs_op = net.logits, net.probs

    update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)

    #for restore model
    saver = tf.train.Saver()

    parameter_num = np.sum(
        [np.prod(v.shape.as_list()) for v in tf.trainable_variables()])
    print('{}-Parameter number: {:d}.'.format(datetime.now(), parameter_num))

    with tf.Session() as sess:
        # Load the model
        saver.restore(sess, args.load_ckpt)
        print('{}-Checkpoint loaded from {}!'.format(datetime.now(),
                                                     args.load_ckpt))

        indices_batch_indices = np.tile(
            np.reshape(np.arange(batch_size), (batch_size, 1, 1)),
            (1, sample_num, 1))

        for batch_idx in range(batch_num):

            points_batch = data[[batch_idx] * batch_size, ...]
            point_num = data_num[batch_idx]

            coordinates = [[float(value) for value in xyz.split(' ')]
                           for xyz in open(input_filelist[batch_idx], 'r')
                           if len(xyz.split(' ')) == setting.data_dim]
            assert (point_num == len(coordinates))

            tile_num = math.ceil((sample_num * batch_size) / point_num)
            indices_shuffle = np.tile(np.arange(point_num),
                                      tile_num)[0:sample_num * batch_size]
            np.random.shuffle(indices_shuffle)
            indices_batch_shuffle = np.reshape(indices_shuffle,
                                               (batch_size, sample_num, 1))
            indices_batch = np.concatenate(
                (indices_batch_indices, indices_batch_shuffle), axis=2)

            _, seg_probs = \
                sess.run([update_ops, seg_probs_op],
                         feed_dict={
                             pts_fts: points_batch,
                             indices: indices_batch,
                             is_training: False,
                         })

            seg_probs_2d = np.reshape(seg_probs, (sample_num * batch_size, -1))

            predictions = [(-1, 0.0, [])] * point_num

            for idx in range(sample_num * batch_size):
                point_idx = indices_shuffle[idx]
                point_seg_probs = seg_probs_2d[idx, :]
                prob = np.amax(point_seg_probs)
                seg_idx = np.argmax(point_seg_probs)
                if prob > predictions[point_idx][1]:

                    predictions[point_idx] = [seg_idx, prob, point_seg_probs]

            labels = []
            with open(output_filelist[batch_idx], 'w') as file_seg:
                for seg_idx, prob, probs in predictions:

                    file_seg.write(str(int(seg_idx)))

                    file_seg.write("\n")

                    labels.append(seg_idx)

            if args.save_ply:
                data_utils.save_ply_property(np.array(coordinates),
                                             np.array(labels), 6,
                                             output_ply_filelist[batch_idx])

            print('{}-[Testing]-Iter: {:06d} saved to {}'.format(
                datetime.now(), batch_idx, output_filelist[batch_idx]))
            sys.stdout.flush()
            ######################################################################
        print('{}-Done!'.format(datetime.now()))
Exemplo n.º 2
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--filelist',
                        '-f',
                        help='Path to input .h5 filelist (.txt)',
                        required=True)
    parser.add_argument('--category',
                        '-c',
                        help='Path to category list file (.txt)',
                        required=True)
    parser.add_argument('--data_folder',
                        '-d',
                        help='Path to *.pts directory',
                        required=True)
    parser.add_argument('--load_ckpt',
                        '-l',
                        help='Path to a check point file for load',
                        required=True)
    parser.add_argument('--repeat_num',
                        '-r',
                        help='Repeat number',
                        type=int,
                        default=1)
    parser.add_argument('--sample_num',
                        help='Point sample num',
                        type=int,
                        default=2048)
    parser.add_argument('--model', '-m', help='Model to use', required=True)
    parser.add_argument('--setting',
                        '-x',
                        help='Setting to use',
                        required=True)
    parser.add_argument('--save_ply',
                        '-s',
                        help='Save results as ply',
                        action='store_true')
    args = parser.parse_args()
    print(args)

    model = importlib.import_module(args.model)
    sys.path.append(os.path.dirname(args.setting))
    print(os.path.dirname(args.setting))
    setting = importlib.import_module(os.path.basename(args.setting))

    sample_num = setting.sample_num

    output_folder = args.data_folder + '_pred_nips_' + str(args.repeat_num)
    category_list = [
        (category, int(label_num))
        for (category,
             label_num) in [line.split() for line in open(args.category, 'r')]
    ]
    offset = 0
    category_range = dict()
    for category, category_label_seg_max in category_list:
        category_range[category] = (offset, offset + category_label_seg_max)
        offset = offset + category_label_seg_max
        folder = os.path.join(output_folder, category)
        if not os.path.exists(folder):
            os.makedirs(folder)

    input_filelist = []
    output_filelist = []
    output_ply_filelist = []
    for category in sorted(os.listdir(args.data_folder)):
        data_category_folder = os.path.join(args.data_folder, category)
        for filename in sorted(os.listdir(data_category_folder)):
            input_filelist.append(
                os.path.join(args.data_folder, category, filename))
            output_filelist.append(
                os.path.join(output_folder, category, filename[0:-3] + 'seg'))
            output_ply_filelist.append(
                os.path.join(output_folder + '_ply', category,
                             filename[0:-3] + 'ply'))

    # Prepare inputs
    print('{}-Preparing datasets...'.format(datetime.now()))
    data, label, data_num, _, _ = data_utils.load_seg(args.filelist)

    batch_num = data.shape[0]
    max_point_num = data.shape[1]
    batch_size = args.repeat_num * math.ceil(data.shape[1] / sample_num)

    print('{}-{:d} testing batches.'.format(datetime.now(), batch_num))

    ######################################################################
    # Placeholders
    indices = tf.placeholder(tf.int32,
                             shape=(batch_size, None, 2),
                             name="indices")
    is_training = tf.placeholder(tf.bool, name='is_training')
    pts_fts = tf.placeholder(tf.float32,
                             shape=(None, max_point_num, setting.data_dim),
                             name='pts_fts')
    ######################################################################

    ######################################################################
    pts_fts_sampled = tf.gather_nd(pts_fts,
                                   indices=indices,
                                   name='pts_fts_sampled')
    if setting.data_dim > 3:
        points_sampled, features_sampled = tf.split(
            pts_fts_sampled, [3, setting.data_dim - 3],
            axis=-1,
            name='split_points_features')
        if not setting.use_extra_features:
            features_sampled = None
    else:
        points_sampled = pts_fts_sampled
        features_sampled = None

    net = model.Net(points_sampled, features_sampled, is_training, setting)
    logits = net.logits
    probs_op = tf.nn.softmax(logits, name='probs')

    saver = tf.train.Saver()

    parameter_num = np.sum(
        [np.prod(v.shape.as_list()) for v in tf.trainable_variables()])
    print('{}-Parameter number: {:d}.'.format(datetime.now(), parameter_num))

    with tf.Session() as sess:
        # Load the model
        saver.restore(sess, args.load_ckpt)
        print('{}-Checkpoint loaded from {}!'.format(datetime.now(),
                                                     args.load_ckpt))

        indices_batch_indices = np.tile(
            np.reshape(np.arange(batch_size), (batch_size, 1, 1)),
            (1, sample_num, 1))
        for batch_idx in range(batch_num):
            points_batch = data[[batch_idx] * batch_size, ...]
            object_label = label[batch_idx]
            point_num = data_num[batch_idx]
            category = category_list[object_label][0]
            label_start, label_end = category_range[category]

            tile_num = math.ceil((sample_num * batch_size) / point_num)
            indices_shuffle = np.tile(np.arange(point_num),
                                      tile_num)[0:sample_num * batch_size]
            np.random.shuffle(indices_shuffle)
            indices_batch_shuffle = np.reshape(indices_shuffle,
                                               (batch_size, sample_num, 1))
            indices_batch = np.concatenate(
                (indices_batch_indices, indices_batch_shuffle), axis=2)

            probs = sess.run(
                [probs_op],
                feed_dict={
                    pts_fts: points_batch,
                    indices: indices_batch,
                    is_training: False,
                })
            probs_2d = np.reshape(probs, (sample_num * batch_size, -1))
            predictions = [(-1, 0.0)] * point_num
            for idx in range(sample_num * batch_size):
                point_idx = indices_shuffle[idx]
                probs = probs_2d[idx, label_start:label_end]
                confidence = np.amax(probs)
                seg_idx = np.argmax(probs)
                if confidence > predictions[point_idx][1]:
                    predictions[point_idx] = (seg_idx, confidence)

            labels = []
            with open(output_filelist[batch_idx], 'w') as file_seg:
                for seg_idx, _ in predictions:
                    file_seg.write('%d\n' % (seg_idx))
                    labels.append(seg_idx)

            # read the coordinates from the txt file for verification
            coordinates = [[float(value) for value in xyz.split(' ')]
                           for xyz in open(input_filelist[batch_idx], 'r')
                           if len(xyz.split(' ')) == 3]
            assert (point_num == len(coordinates))
            if args.save_ply:
                data_utils.save_ply_property(np.array(coordinates),
                                             np.array(labels), 6,
                                             output_ply_filelist[batch_idx])

            print('{}-[Testing]-Iter: {:06d} saved to {}'.format(
                datetime.now(), batch_idx, output_filelist[batch_idx]))
            sys.stdout.flush()
            ######################################################################
        print('{}-Done!'.format(datetime.now()))
Exemplo n.º 3
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--category',
                        '-c',
                        help='category name',
                        required=True)
    parser.add_argument('--level',
                        '-l',
                        type=int,
                        help='level id',
                        required=True)
    parser.add_argument('--load_ckpt',
                        '-k',
                        help='Path to a check point file for load',
                        required=True)
    parser.add_argument('--model', '-m', help='Model to use', required=True)
    parser.add_argument('--setting',
                        '-x',
                        help='Setting to use',
                        required=True)
    parser.add_argument('--batch_size',
                        '-b',
                        help='Batch size during testing',
                        default=8,
                        type=int)
    parser.add_argument('--save_ply',
                        '-s',
                        help='Save results as ply',
                        action='store_true')
    parser.add_argument('--save_dir',
                        '-o',
                        help='The output directory',
                        type=str,
                        default=None)
    parser.add_argument('--save_num_shapes',
                        '-u',
                        help='how many shapes to visualize',
                        default=20,
                        type=int)
    args = parser.parse_args()
    print(args)

    if args.save_ply:
        if os.path.exists(args.save_dir):
            print('ERROR: folder %s exists! Please check and delete!' %
                  args.save_dir)
            exit(1)
        os.mkdir(args.save_dir)

    model = importlib.import_module(args.model)
    setting_path = os.path.join(os.path.dirname(__file__), args.model)
    sys.path.append(setting_path)
    setting = importlib.import_module(args.setting)

    sample_num = setting.sample_num
    batch_size = args.batch_size

    args.data_folder = '../../data/sem_seg_h5/'

    # Load all test data
    args.filelist = os.path.join(args.data_folder,
                                 '%s-%d' % (args.category, args.level),
                                 'test_files.txt')
    data_test, _, label_gt = data_utils.load_seg(args.filelist)
    num_shape = data_test.shape[0]
    print('Loaded data: %s shapes in total to test.' % num_shape)

    # Load current category + level statistics
    with open(
            '../../stats/after_merging_label_ids/%s-level-%d.txt' %
        (args.category, args.level), 'r') as fin:
        setting.num_class = len(fin.readlines()) + 1  # with "other"
        print('NUM CLASS: %d' % setting.num_class)

    ######################################################################
    # Placeholders
    is_training = tf.placeholder(tf.bool, name='is_training')
    pts_fts = tf.placeholder(tf.float32,
                             shape=(batch_size, sample_num, setting.data_dim),
                             name='points')
    ######################################################################

    ######################################################################
    pts_fts_sampled = pts_fts
    points_sampled = pts_fts_sampled
    features_sampled = None

    net = model.Net(points_sampled, features_sampled, is_training, setting)
    seg_probs_op = tf.nn.softmax(net.logits, name='seg_probs')

    # for restore model
    saver = tf.train.Saver()

    parameter_num = np.sum(
        [np.prod(v.shape.as_list()) for v in tf.trainable_variables()])
    print('{}-Parameter number: {:d}.'.format(datetime.now(), parameter_num))

    # Create a session
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.allow_soft_placement = True
    config.log_device_placement = False
    sess = tf.Session(config=config)

    # Load the model
    ckptstate = tf.train.get_checkpoint_state(args.load_ckpt)
    if ckptstate is not None:
        LOAD_MODEL_FILE = os.path.join(
            args.load_ckpt, os.path.basename(ckptstate.model_checkpoint_path))
        saver.restore(sess, LOAD_MODEL_FILE)
        print("Model loaded in file: %s" % LOAD_MODEL_FILE)
    else:
        print("Fail to load modelfile: %s" % args.load_ckpt)

    # Start the testing
    print('{}-Testing...'.format(datetime.now()))

    num_batch = (num_shape - 1) // batch_size + 1
    pts_batch = np.zeros((batch_size, sample_num, 3), dtype=np.float32)

    avg_acc = 0.0
    avg_cnt = 0

    shape_iou_tot = 0.0
    shape_iou_cnt = 0

    part_intersect = np.zeros((setting.num_class), dtype=np.float32)
    part_union = np.zeros((setting.num_class), dtype=np.float32)

    bar = ProgressBar()
    all_seg_probs = []
    for batch_idx in bar(range(num_batch)):
        start_idx = batch_idx * batch_size
        end_idx = min((batch_idx + 1) * batch_size, num_shape)

        pts_batch[:end_idx - start_idx, ...] = data_test[start_idx:end_idx]

        seg_probs = sess.run(seg_probs_op,
                             feed_dict={
                                 pts_fts: pts_batch,
                                 is_training: False
                             })
        seg_probs = seg_probs[:end_idx - start_idx]
        all_seg_probs.append(seg_probs)

        seg_res = np.argmax(seg_probs[:, :, 1:], axis=-1) + 1

        avg_acc += np.sum(
            np.mean((seg_res == label_gt[start_idx:end_idx]) |
                    (label_gt[start_idx:end_idx] == 0),
                    axis=-1))
        avg_cnt += end_idx - start_idx

        seg_gt = label_gt[start_idx:end_idx]
        seg_res[seg_gt == 0] = 0

        for i in range(end_idx - start_idx):
            cur_pred = seg_res[i]
            cur_gt = seg_gt[i]

            cur_shape_iou_tot = 0.0
            cur_shape_iou_cnt = 0
            for j in range(1, setting.num_class):
                cur_gt_mask = (cur_gt == j)
                cur_pred_mask = (cur_pred == j)

                has_gt = (np.sum(cur_gt_mask) > 0)
                has_pred = (np.sum(cur_pred_mask) > 0)

                if has_gt or has_pred:
                    intersect = np.sum(cur_gt_mask & cur_pred_mask)
                    union = np.sum(cur_gt_mask | cur_pred_mask)
                    iou = intersect / union

                    cur_shape_iou_tot += iou
                    cur_shape_iou_cnt += 1

                    part_intersect[j] += intersect
                    part_union[j] += union

            if cur_shape_iou_cnt > 0:
                cur_shape_miou = cur_shape_iou_tot / cur_shape_iou_cnt
                shape_iou_tot += cur_shape_miou
                shape_iou_cnt += 1

        if args.save_ply and start_idx < args.save_num_shapes:
            for i in range(start_idx, min(end_idx, args.save_num_shapes)):
                out_fn = os.path.join(args.save_dir, 'shape-%02d-pred.ply' % i)
                data_utils.save_ply_property(data_test[i],
                                             seg_res[i - start_idx],
                                             setting.num_class, out_fn)
                out_fn = os.path.join(args.save_dir, 'shape-%02d-gt.ply' % i)
                data_utils.save_ply_property(data_test[i], label_gt[i],
                                             setting.num_class, out_fn)

    all_seg_probs = np.vstack(all_seg_probs)
    print('{}-Done!'.format(datetime.now()))

    print('Average Accuracy: %f' % (avg_acc / avg_cnt))
    print('Shape mean IoU: %f' % (shape_iou_tot / shape_iou_cnt))

    part_iou = np.divide(part_intersect[1:], part_union[1:])
    mean_part_iou = np.mean(part_iou)
    print('Category mean IoU: %f, %s' % (mean_part_iou, str(part_iou)))

    out_list = ['%3.1f' % (item * 100) for item in part_iou.tolist()]
    print('%3.1f;%3.1f;%3.1f;%s' %
          (avg_acc * 100 / avg_cnt, shape_iou_tot * 100 / shape_iou_cnt,
           mean_part_iou * 100, '[' + ', '.join(out_list) + ']'))
Exemplo n.º 4
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--folder_gt',
                        '-g',
                        help='Path to ground truth folder',
                        required=True)
    parser.add_argument('--folder_pred',
                        '-p',
                        help='Path to prediction folder',
                        required=True)
    parser.add_argument('--folder_data',
                        '-d',
                        help='Path to point cloud data folder')
    parser.add_argument('--part_avg',
                        '-a',
                        action='store_true',
                        help='Use part level average')
    args = parser.parse_args()
    print(args)

    category_id_to_name = {
        2691156: 'Airplane',
        2773838: 'Bag',
        2954340: 'Cap',
        2958343: 'Car',
        3001627: 'Chair',
        3261776: 'Earphone',
        3467517: 'Guitar',
        3624134: 'Knife',
        3636649: 'Lamp',
        3642806: 'Laptop',
        3790512: 'Motorbike',
        3797390: 'Mug',
        3948459: 'Pistol',
        4099429: 'Rocket',
        4225987: 'Skateboard',
        4379243: 'Table'
    }

    categories = sorted(os.listdir(args.folder_gt))

    label_min = sys.maxsize
    for category in categories:
        category_folder_gt = os.path.join(args.folder_gt, category)
        filenames = sorted(os.listdir(category_folder_gt))
        for filename in filenames:
            filepath_gt = os.path.join(category_folder_gt, filename)
            label_gt = np.loadtxt(filepath_gt).astype(np.int32)
            label_min = min(label_min, np.amin(label_gt))

    IoU = 0.0
    total_num = 0
    for category in categories:
        category_folder_gt = os.path.join(args.folder_gt, category)
        category_folder_pred = os.path.join(args.folder_pred, category)
        if args.folder_data:
            category_folder_data = os.path.join(args.folder_data, category)
            category_folder_err = os.path.join(args.folder_pred + '_err_ply',
                                               category)

        IoU_category = 0.0
        filenames = sorted(os.listdir(category_folder_gt))
        for filename in filenames:
            filepath_gt = os.path.join(category_folder_gt, filename)
            filepath_pred = os.path.join(category_folder_pred, filename)
            label_gt = np.loadtxt(filepath_gt).astype(np.int32) - label_min
            label_pred = np.loadtxt(filepath_pred).astype(np.int32)

            if args.folder_data:
                filepath_data = os.path.join(category_folder_data,
                                             filename[:-3] + 'pts')
                filepath_err = os.path.join(category_folder_err,
                                            filename[:-3] + 'ply')
                coordinates = [[float(value) for value in xyz.split(' ')]
                               for xyz in open(filepath_data, 'r')
                               if len(xyz.split(' ')) == 3]
                assert (label_gt.shape[0] == len(coordinates))
                data_utils.save_ply_property(np.array(coordinates),
                                             (label_gt == label_pred), 6,
                                             filepath_err)

            if args.part_avg:
                label_max = np.amax(label_gt)
                IoU_part = 0.0
                for label_idx in range(label_max + 1):
                    locations_gt = (label_gt == label_idx)
                    locations_pred = (label_pred == label_idx)
                    I_locations = np.logical_and(locations_gt, locations_pred)
                    U_locations = np.logical_or(locations_gt, locations_pred)
                    I = np.sum(I_locations) + np.finfo(np.float32).eps
                    U = np.sum(U_locations) + np.finfo(np.float32).eps
                    IoU_part = IoU_part + I / U
                IoU_sample = IoU_part / (label_max + 1)
            else:
                label_correct_locations = (label_gt == label_pred)
                IoU_sample = np.sum(label_correct_locations) / label_gt.size
            IoU_category = IoU_category + IoU_sample
        IoU = IoU + IoU_category
        IoU_category = IoU_category / len(filenames)
        if category.isdigit():
            print("IoU of %s: " % (category_id_to_name[int(category)]),
                  IoU_category)
        else:
            print("IoU of %s: " % category, IoU_category)
        total_num = total_num + len(filenames)
    IoU = IoU / total_num
    print("IoU: ", IoU)