コード例 #1
0
 def get_data(self):
     for model_id in self.model_list:
         complete = read_pcd(
             os.path.join(self.complete_dir, '%s.pcd' % model_id))
         partial = read_pcd(
             os.path.join(self.partial_dir, '%s.pcd' % model_id))
         yield model_id, partial, complete
コード例 #2
0
 def get_data(self):
     for model_id in model_list:
         complete = read_pcd(
             os.path.join(self.complete_dir, '%s.pcd' % model_id))
         for i in range(self.num_scans):
             partial = read_pcd(
                 os.path.join(self.partial_dir, model_id, '%d.pcd' % i))
             yield model_id.replace('/', '_'), partial, complete
コード例 #3
0
 def get_data(self):
     for model_id in model_list:
         complete = read_pcd(
             os.path.join(self.complete_dir, '%s.pcd' % model_id))
         # complete = resample_pcd(complete, 16384)
         partial = read_pcd(
             os.path.join(self.partial_dir, '%s.pcd' % model_id))
         yield model_id.replace('/', '_'), partial, complete
         """
コード例 #4
0
ファイル: demo.py プロジェクト: star-cold/3d_project
def main():
    args = parse_args()

    inputs = tf.placeholder(tf.float32, (1, None, 3))
    gt = tf.placeholder(tf.float32, (1, args.num_gt_points, 3))
    npts = tf.placeholder(tf.int32, (1,))
    model_module = importlib.import_module('.%s' % args.model_type, 'models')
    model = model_module.Model(inputs, npts, gt, tf.constant(1.0))

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.allow_soft_placement = True
    sess = tf.Session(config=config)

    saver = tf.train.Saver()
    saver.restore(sess, args.checkpoint)

    partial = read_pcd(args.input_path)
    complete = sess.run(model.outputs, feed_dict={inputs: [partial], npts: [partial.shape[0]]})[0]
    create_plots(partial, complete)

    if args.output_path is None:
        show_pcd(complete)
        plt.show()
    else:
        os.makedirs(args.output_path, exist_ok=True)
        filename = os.path.splitext(os.path.basename(args.input_path))[0]

        output_file = os.path.join(args.output_path, filename + '.pcd')
        save_pcd(output_file, complete)

        output_file = os.path.join(args.output_path, filename + '.png')
        plt.savefig(output_file)
コード例 #5
0
ファイル: my_demo.py プロジェクト: josephinemonica/pcn
def test(args):
    inputs = tf.placeholder(tf.float32, (1, None, 3))
    npts = tf.placeholder(tf.int32, (1, ))
    print('AAAAAAAAAAAAAAAA')
    print(args.num_gt_points)
    print('AAAAAAAAAAAAAAAAAAAAAAAAAa')
    gt = tf.placeholder(tf.float32, (1, args.num_gt_points, 3))
    model_module = importlib.import_module('.%s' % args.model_type, 'models')
    model = model_module.Model(inputs, npts, gt, tf.constant(1.0))

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.allow_soft_placement = True
    sess = tf.Session(config=config)

    saver = tf.train.Saver()
    saver.restore(sess, args.checkpoint)

    t0 = time.time()

    partial = read_pcd(args.pcd_file)
    bbox = np.loadtxt(args.bbox_file)

    # Calculate center, rotation and scale
    center = (bbox.min(0) + bbox.max(0)) / 2
    bbox -= center
    yaw = np.arctan2(bbox[3, 1] - bbox[0, 1], bbox[3, 0] - bbox[0, 0])
    rotation = np.array([[np.cos(yaw), -np.sin(yaw), 0],
                         [np.sin(yaw), np.cos(yaw), 0], [0, 0, 1]])
    bbox = np.dot(bbox, rotation)
    scale = bbox[3, 0] - bbox[0, 0]
    bbox /= scale

    partial = np.dot(partial - center, rotation) / scale
    partial = np.dot(partial, [[1, 0, 0], [0, 0, 1], [0, 1, 0]])

    start = time.time()
    completion = sess.run(model.outputs,
                          feed_dict={
                              inputs: [partial],
                              npts: [partial.shape[0]]
                          })

    completion = completion[0]

    completion_w = np.dot(completion, [[1, 0, 0], [0, 0, 1], [0, 1, 0]])
    completion_w = np.dot(completion_w * scale, rotation.T) + center

    dummy = args.pcd_file.split('/')[-1]
    result_path = os.path.join(args.results_dir, dummy)
    save_pcd(result_path, completion_w)

    plot_path = os.path.join(args.results_dir, 'plots.png')
    plot_pcd_three_views(plot_path, [partial, completion], ['input', 'output'],
                         '%d input points' % partial.shape[0], [5, 0.5])
    sess.close()
    tf = time.time()
    print('Total time: {}'.format(tf - t0))
コード例 #6
0
ファイル: test_kitti.py プロジェクト: no-materials/pcn
def test(args):
    inputs = tf.placeholder(tf.float32, (1, None, 3))
    npts = tf.placeholder(tf.int32, (1,))
    gt = tf.placeholder(tf.float32, (1, args.num_gt_points, 3))
    model_module = importlib.import_module('.%s' % args.model_type, 'models')
    model = model_module.Model(inputs, npts, gt, tf.constant(1.0))

    os.makedirs(os.path.join(args.results_dir, 'plots'), exist_ok=True)
    os.makedirs(os.path.join(args.results_dir, 'completions'), exist_ok=True)

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.allow_soft_placement = True
    sess = tf.Session(config=config)

    saver = tf.train.Saver()
    saver.restore(sess, args.checkpoint)

    car_ids = [filename.split('.')[0] for filename in os.listdir(args.pcd_dir)]
    total_time = 0
    total_points = 0
    for i, car_id in enumerate(car_ids):
        partial = read_pcd(os.path.join(args.pcd_dir, '%s.pcd' % car_id))
        bbox = np.loadtxt(os.path.join(args.bbox_dir, '%s.txt' % car_id))
        total_points += partial.shape[0]

        # Calculate center, rotation and scale
        center = (bbox.min(0) + bbox.max(0)) / 2
        bbox -= center
        yaw = np.arctan2(bbox[3, 1] - bbox[0, 1], bbox[3, 0] - bbox[0, 0])
        rotation = np.array([[np.cos(yaw), -np.sin(yaw), 0],
                            [np.sin(yaw), np.cos(yaw), 0],
                            [0, 0, 1]])
        bbox = np.dot(bbox, rotation)
        scale = bbox[3, 0] - bbox[0, 0]
        bbox /= scale

        partial = np.dot(partial - center, rotation) / scale
        partial = np.dot(partial, [[1, 0, 0], [0, 0, 1], [0, 1, 0]])

        start = time.time()
        completion = sess.run(model.outputs, feed_dict={inputs: [partial], npts: [partial.shape[0]]})
        total_time += time.time() - start
        completion = completion[0]

        completion_w = np.dot(completion, [[1, 0, 0], [0, 0, 1], [0, 1, 0]])
        completion_w = np.dot(completion_w * scale, rotation.T) + center
        pcd_path = os.path.join(args.results_dir, 'completions', '%s.pcd' % car_id)
        save_pcd(pcd_path, completion_w)

        if i % args.plot_freq == 0:
            plot_path = os.path.join(args.results_dir, 'plots', '%s.png' % car_id)
            plot_pcd_three_views(plot_path, [partial, completion], ['input', 'output'],
                                 '%d input points' % partial.shape[0], [5, 0.5])
    print('Average # input points:', total_points / len(car_ids))
    print('Average time:', total_time / len(car_ids))
    sess.close()
コード例 #7
0
ファイル: lmdb_writer.py プロジェクト: lt6253090/OcCo
 def get_data(self):
     for model_id in self.model_list:
         # if 'test' not in model_id:
         # 	print(model_id)
         #	continue
         # the raw data format of ModelNet40 is '.obj'
         complete = sample_from_mesh(
             os.path.join(self.complete_dir, '%s.obj' % model_id))
         for i in range(self.num_scans):
             partial = read_pcd(
                 os.path.join(self.partial_dir, model_id + '_%d.pcd' % i))
             partial = partial[np.random.choice(len(partial),
                                                self.num_ppoints)]
             yield model_id.replace('/', '_'), partial, complete
コード例 #8
0
def test(args):
    inputs = tf.placeholder(tf.float32, (1, None, 3))
    npts = tf.placeholder(tf.int32, (1, ))
    gt = tf.placeholder(tf.float32, (1, args.num_gt_points, 3))
    model_module = importlib.import_module('.%s' % args.model_type, 'models')
    model = model_module.Model(inputs, npts, gt, tf.constant(1.0))

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.allow_soft_placement = True
    sess = tf.Session(config=config)

    saver = tf.train.Saver()
    saver.restore(sess, args.checkpoint)

    an = Analyzer(dr.get_PLIDAR_predicted_path)
    argument_list = an.get_datatype_trackno_carno(data_types=["train"])

    for i in range(len(argument_list)):
        data_type, track_no, car_no = argument_list[i]
        # Input - partial point cloud .pcd
        pcd_file = \
            dr.get_pcn_lidar_reference_partial_path(data_type, track_no,
                                                    car_no, extension='pcd')
        # Input - bbox 8 corners .txt
        bbox_file = dr.get_pcn_bbox_lidar_path(data_type, track_no, car_no)

        # Result - complete point cloud .pcd
        result_path = dr.get_pcn_lidar_reference_complete_path(
            data_type, track_no, car_no)
        # Result - plot of input and output of pointcloud
        plot_path = dr.get_pcn_plot_lidar_path(data_type, track_no, car_no)

        partial = read_pcd(pcd_file)
        bbox = np.loadtxt(bbox_file)

        # Calculate center, rotation and scale
        center = (bbox.min(0) + bbox.max(0)) / 2
        bbox -= center
        yaw = np.arctan2(bbox[3, 1] - bbox[0, 1], bbox[3, 0] - bbox[0, 0])
        rotation = np.array([[np.cos(yaw), -np.sin(yaw), 0],
                             [np.sin(yaw), np.cos(yaw), 0], [0, 0, 1]])
        bbox = np.dot(bbox, rotation)
        scale = bbox[3, 0] - bbox[0, 0]
        bbox /= scale

        partial = np.dot(partial - center, rotation) / scale
        partial = np.dot(partial, [[1, 0, 0], [0, 0, 1], [0, 1, 0]])

        completion = sess.run(model.outputs,
                              feed_dict={
                                  inputs: [partial],
                                  npts: [partial.shape[0]]
                              })
        completion = completion[0]

        completion_w = np.dot(completion, [[1, 0, 0], [0, 0, 1], [0, 1, 0]])
        completion_w = np.dot(completion_w * scale, rotation.T) + center

        save_pcd(result_path, completion_w)

        plot_pcd_three_views(plot_path, [partial, completion],
                             ['input', 'output'],
                             '%d input points' % partial.shape[0], [5, 0.5])

        print('Finish {}/{}'.format(i + 1, len(argument_list)))

    sess.close()
コード例 #9
0
def test(args):
    inputs = tf.placeholder(tf.float32, (1, None, 3))
    gt = tf.placeholder(tf.float32, (1, args.num_gt_points, 3))
    model_module = importlib.import_module('.%s' % args.model_type, 'models')
    model = model_module.Model(inputs, gt, tf.constant(1.0))

    output = tf.placeholder(tf.float32, (1, args.num_gt_points, 3))
    cd_op = chamfer(output, gt)
    emd_op = earth_mover(output, gt)

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.allow_soft_placement = True
    sess = tf.Session(config=config)

    saver = tf.train.Saver()
    saver.restore(sess, args.checkpoint)

    os.makedirs(args.results_dir, exist_ok=True)
    csv_file = open(os.path.join(args.results_dir, 'results.csv'), 'w')
    writer = csv.writer(csv_file)
    writer.writerow(['id', 'cd', 'emd'])

    with open(args.list_path) as file:
        model_list = file.read().splitlines()
    total_time = 0
    total_cd = 0
    total_emd = 0
    cd_per_cat = {}
    emd_per_cat = {}
    for i, model_id in enumerate(model_list):
        partial = read_pcd(
            os.path.join(args.data_dir, 'partial', '%s.pcd' % model_id))
        complete = read_pcd(
            os.path.join(args.data_dir, 'complete', '%s.pcd' % model_id))
        start = time.time()
        completion = sess.run(model.outputs, feed_dict={inputs: [partial]})
        total_time += time.time() - start
        cd, emd = sess.run([cd_op, emd_op],
                           feed_dict={
                               output: completion,
                               gt: [complete]
                           })
        total_cd += cd
        total_emd += emd
        writer.writerow([model_id, cd, emd])

        synset_id, model_id = model_id.split('/')
        if not cd_per_cat.get(synset_id):
            cd_per_cat[synset_id] = []
        if not emd_per_cat.get(synset_id):
            emd_per_cat[synset_id] = []
        cd_per_cat[synset_id].append(cd)
        emd_per_cat[synset_id].append(emd)

        if i % args.plot_freq == 0:
            os.makedirs(os.path.join(args.results_dir, 'plots', synset_id),
                        exist_ok=True)
            plot_path = os.path.join(args.results_dir, 'plots', synset_id,
                                     '%s.png' % model_id)
            plot_pcd_three_views(plot_path, [partial, completion[0], complete],
                                 ['input', 'output', 'ground truth'],
                                 'CD %.4f  EMD %.4f' % (cd, emd),
                                 [5, 0.5, 0.5])
        if args.save_pcd:
            os.makedirs(os.path.join(args.results_dir, 'pcds', synset_id),
                        exist_ok=True)
            save_pcd(
                os.path.join(args.results_dir, 'pcds', '%s.pcd' % model_id),
                completion[0])
    csv_file.close()
    sess.close()

    print('Average time: %f' % (total_time / len(model_list)))
    print('Average Chamfer distance: %f' % (total_cd / len(model_list)))
    print('Average Earth mover distance: %f' % (total_emd / len(model_list)))
    print('Chamfer distance per category')
    for synset_id in cd_per_cat.keys():
        print(synset_id, '%f' % np.mean(cd_per_cat[synset_id]))
    print('Earth mover distance per category')
    for synset_id in emd_per_cat.keys():
        print(synset_id, '%f' % np.mean(emd_per_cat[synset_id]))
コード例 #10
0
ファイル: test_shapenet.py プロジェクト: jtpils/softpool
def test(args):
    inputs = tf.placeholder(tf.float32, (1, None, 3))
    npts = tf.placeholder(tf.int32, (1, ))
    gt = tf.placeholder(tf.float32, (1, args.num_gt_points, 6))
    model_module = importlib.import_module('.%s' % args.model_type, 'models')
    model = model_module.Model(inputs, npts, gt, tf.constant(1.0),
                               args.num_channel)

    output = tf.placeholder(tf.float32,
                            (1, args.num_gt_points, 3 + args.num_channel))
    cd_op = chamfer(output[:, :, 0:3], gt[:, :, 0:3])
    emd_op = earth_mover(output[:, :, 0:3], gt[:, :, 0:3])

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.allow_soft_placement = True
    sess = tf.Session(config=config)

    saver = tf.train.Saver()
    saver.restore(sess, args.checkpoint)

    os.makedirs(args.results_dir, exist_ok=True)
    csv_file = open(os.path.join(args.results_dir, 'results.csv'), 'w')
    writer = csv.writer(csv_file)
    writer.writerow(['id', 'cd', 'emd'])

    with open(args.list_path) as file:
        model_list = file.read().splitlines()
    total_time = 0
    total_cd = 0
    total_emd = 0
    cd_per_cat = {}
    emd_per_cat = {}
    np.random.seed(1)
    for i, model_id in enumerate(model_list):
        if args.experiment == 'shapenet':
            synset_id, model_id = model_id.split('/')
            partial = read_pcd(
                os.path.join(args.data_dir, 'partial', synset_id,
                             '%s.pcd' % model_id))
            complete = read_pcd(
                os.path.join(args.data_dir, 'complete', synset_id,
                             '%s.pcd' % model_id))
        elif args.experiment == 'suncg':
            synset_id = 'all_rooms'
            partial = read_pcd(
                os.path.join(args.data_dir, 'pcd_partial',
                             '%s.pcd' % model_id))
            complete = read_pcd(
                os.path.join(args.data_dir, 'pcd_complete',
                             '%s.pcd' % model_id))
        if args.rotate:
            angle = np.random.rand(1) * 2 * np.pi
            partial = np.stack([
                np.cos(angle) * partial[:, 0] - np.sin(angle) * partial[:, 2],
                partial[:, 1],
                np.sin(angle) * partial[:, 0] + np.cos(angle) * partial[:, 2]
            ],
                               axis=-1)
            complete = np.stack([
                np.cos(angle) * complete[:, 0] -
                np.sin(angle) * complete[:, 2], complete[:, 1],
                np.sin(angle) * complete[:, 0] +
                np.cos(angle) * complete[:, 2], complete[:, 3], complete[:, 4],
                complete[:, 5]
            ],
                                axis=-1)
        partial = partial[:, :3]
        complete = resample_pcd(complete, 16384)
        start = time.time()
        completion1, completion2, mesh_out = sess.run(
            [model.outputs1, model.outputs2, model.gt_can],
            feed_dict={
                inputs: [partial],
                npts: [partial.shape[0]],
                gt: [complete]
            })
        completion1[0][:, (3 + args.num_channel):] *= 0
        completion2[0][:, (3 + args.num_channel):] *= 0
        mesh_out[0][:, (3 + args.num_channel):] *= 0
        total_time += time.time() - start
        # cd, emd = sess.run([cd_op, emd_op],
        cd, emd = sess.run([cd_op, cd_op],
                           feed_dict={
                               output: completion2,
                               gt: [complete]
                           })
        total_cd += cd
        total_emd += emd
        if not cd_per_cat.get(synset_id):
            cd_per_cat[synset_id] = []
        if not emd_per_cat.get(synset_id):
            emd_per_cat[synset_id] = []
        cd_per_cat[synset_id].append(cd)
        emd_per_cat[synset_id].append(emd)
        writer.writerow([model_id, cd, emd])

        if i % args.plot_freq == 0:
            os.makedirs(
                os.path.join(args.results_dir, 'plots', synset_id),
                exist_ok=True)
            plot_path = os.path.join(args.results_dir, 'plots', synset_id,
                                     '%s.png' % model_id)
            plot_pcd_three_views(
                plot_path, [
                    partial, completion1[0], completion2[0], mesh_out[0],
                    complete
                ], ['input', 'coarse', 'fine', 'mesh', 'ground truth'],
                'CD %.4f  EMD %.4f' % (cd, emd), [5, 0.5, 0.5, 0.5, 0.5],
                num_channel=args.num_channel)
        if args.save_pcd:
            os.makedirs(
                os.path.join(args.results_dir, 'input', synset_id),
                exist_ok=True)
            pts_coord = partial[:, 0:3]
            pts_color = matplotlib.cm.cool((partial[:, 1]))[:, 0:3]
            # save_pcd(os.path.join(args.results_dir, 'input', synset_id, '%s.ply' % model_id), np.concatenate((pts_coord, pts_color), -1))
            pcd = PointCloud()
            pcd.points = Vector3dVector(pts_coord)
            pcd.colors = Vector3dVector(pts_color)
            write_point_cloud(
                os.path.join(args.results_dir, 'input', synset_id,
                             '%s.ply' % model_id),
                pcd,
                write_ascii=True)
            os.makedirs(
                os.path.join(args.results_dir, 'output1', synset_id),
                exist_ok=True)
            pts_coord = completion1[0][:, 0:3]
            pts_color = matplotlib.cm.Set1(
                (np.argmax(completion1[0][:, 3:3 + args.num_channel], -1) +
                 1) / args.num_channel - 0.5 / args.num_channel)[:, 0:3]
            # pts_color = matplotlib.cm.tab20((np.argmax(completion1[0][:, 3:3+args.num_channel], -1) + 1)/args.num_channel - 0.5/args.num_channel)[:,0:3]
            # save_pcd(os.path.join(args.results_dir, 'output1', synset_id, '%s.ply' % model_id), np.concatenate((pts_coord, pts_color), -1))
            pcd.points = Vector3dVector(pts_coord)
            pcd.colors = Vector3dVector(pts_color)
            write_point_cloud(
                os.path.join(args.results_dir, 'output1', synset_id,
                             '%s.ply' % model_id),
                pcd,
                write_ascii=True)
            os.makedirs(
                os.path.join(args.results_dir, 'output2', synset_id),
                exist_ok=True)
            pts_coord = completion2[0][:, 0:3]
            pts_color = matplotlib.cm.Set1(
                (np.argmax(completion2[0][:, 3:3 + args.num_channel], -1) +
                 1) / args.num_channel - 0.5 / args.num_channel)[:, 0:3]
            # pts_color = matplotlib.cm.tab20((np.argmax(completion2[0][:, 3:3+args.num_channel], -1) + 1)/args.num_channel - 0.5/args.num_channel)[:,0:3]
            # save_pcd(os.path.join(args.results_dir, 'output2', synset_id, '%s.ply' % model_id), np.concatenate((pts_coord, pts_color), -1))
            pcd.points = Vector3dVector(pts_coord)
            pcd.colors = Vector3dVector(pts_color)
            write_point_cloud(
                os.path.join(args.results_dir, 'output2', synset_id,
                             '%s.ply' % model_id),
                pcd,
                write_ascii=True)
            #######
            os.makedirs(
                os.path.join(args.results_dir, 'regions', synset_id),
                exist_ok=True)
            for idx in range(3, 3 + args.num_channel):
                val_min = np.min(completion2[0][:, idx])
                val_max = np.max(completion2[0][:, idx])
                pts_color = 0.8 * matplotlib.cm.Reds(
                    (completion2[0][:, idx] - val_min) /
                    (val_max - val_min))[:, 0:3]
                pts_color += 0.2 * matplotlib.cm.gist_gray(
                    (completion2[0][:, idx] - val_min) /
                    (val_max - val_min))[:, 0:3]
                pcd.colors = Vector3dVector(pts_color)
                write_point_cloud(
                    os.path.join(args.results_dir, 'regions', synset_id,
                                 '%s_%s.ply' % (model_id, idx - 3)),
                    pcd,
                    write_ascii=True)
            os.makedirs(
                os.path.join(args.results_dir, 'gt', synset_id), exist_ok=True)
            pts_coord = complete[:, 0:3]
            if args.experiment == 'shapenet':
                pts_color = matplotlib.cm.cool(complete[:, 1])[:, 0:3]
            elif args.experiment == 'suncg':
                pts_color = matplotlib.cm.Set1(complete[:, 3] -
                                               0.5 / args.num_channel)[:, 0:3]
            # save_pcd(os.path.join(args.results_dir, 'gt', synset_id, '%s.ply' % model_id), np.concatenate((pts_coord, pts_color), -1))
            pcd.points = Vector3dVector(pts_coord)
            pcd.colors = Vector3dVector(pts_color)
            write_point_cloud(
                os.path.join(args.results_dir, 'gt', synset_id,
                             '%s.ply' % model_id),
                pcd,
                write_ascii=True)
    sess.close()

    print('Average time: %f' % (total_time / len(model_list)))
    print('Average Chamfer distance: %f' % (total_cd / len(model_list)))
    print('Average Earth mover distance: %f' % (total_emd / len(model_list)))
    writer.writerow([
        total_time / len(model_list), total_cd / len(model_list),
        total_emd / len(model_list)
    ])
    print('Chamfer distance per category')
    for synset_id in cd_per_cat.keys():
        print(synset_id, '%f' % np.mean(cd_per_cat[synset_id]))
        writer.writerow([synset_id, np.mean(cd_per_cat[synset_id])])
    print('Earth mover distance per category')
    for synset_id in emd_per_cat.keys():
        print(synset_id, '%f' % np.mean(emd_per_cat[synset_id]))
        writer.writerow([synset_id, np.mean(emd_per_cat[synset_id])])
    csv_file.close()
コード例 #11
0
        print("=== ModelNet40 ===\n")
        for t in ['train', 'test']:
            # for res in ['fine', 'middle', 'coarse', 'supercoarse']:
            for res in ['supercoarse']:
                sum_dict = {}
                for shape in shape_names:
                    sum_dict[shape] = np.zeros(
                        3, dtype=np.int32
                    )  # num of objects, num of points, average

                model_list = [_file for _file in file_ if t in _file]
                for model_id in tqdm(model_list):
                    model_name = model_id.split('/')[0]
                    for i in range(10):
                        partial_pc = read_pcd(
                            os.path.join(MODELNET40_PATH + res, 'pcd',
                                         model_id + '_%d.pcd' % i))
                        sum_dict[model_name][1] += len(partial_pc)
                        sum_dict[model_name][0] += 1

                    sum_dict[model_name][
                        2] = sum_dict[model_name][1] / sum_dict[model_name][0]

                f = open("./dump_sum_points/modelnet40_%s_%s.txt" % (t, res),
                         "w+")
                for key in sum_dict.keys():
                    f.writelines([key, str(sum_dict[key]), '\n'])
                f.close()
                print("=== ModelNet40 %s %s Done ===\n" % (t, res))

    elif args.dataset == 'shapenet8':
コード例 #12
0
    model_module = importlib.import_module('.%s' % args.model_type, 'models')
    model = model_module.Model(inputs, npts, gt, tf.constant(1.0))

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.allow_soft_placement = True
    sess = tf.Session(config=config)

    saver = tf.train.Saver()
    saver.restore(sess, args.checkpoint)

#    partial = read_point_cloud(args.input_path)
#    partial = np.array(partial.points)
    print('Processing', args.input_path)
    if args.input_path.endswith('.pcd'):
        partial = read_pcd(args.input_path)
    elif args.input_path.endswith('.ply'):
        scene, _ = loadPLY(args.input_path)
        partial = scene[:, :3].copy()
        center = 0.5 * (partial.min(axis=0) + partial.max(axis=0))
        partial -= center
        scale = (partial.max(axis=0) - partial.min(axis=0)).max()
        partial /= scale
        save_pcd('scaled_partial.pcd', partial)
        mean_color = scene[:, 3:6].mean(axis=0)
    print('partial', partial.shape)
    complete = sess.run(model.outputs, feed_dict={inputs: [partial], npts: [partial.shape[0]]})[0]
    print('complete', complete.shape)
    save_pcd('scaled_complete.pcd', complete)
    out = np.zeros((len(complete), 6))
    out[:, :3] = complete*scale + center
コード例 #13
0
ファイル: test_kitti_all.py プロジェクト: jlqzzz/pcn
def test(args):
    inputs = tf.placeholder(tf.float32, (1, None, 3))
    npts = tf.placeholder(tf.int32, (1, ))
    gt = tf.placeholder(tf.float32, (1, args.num_gt_points, 3))
    model_module = importlib.import_module('.%s' % args.model_type, 'models')
    model = model_module.Model(inputs, npts, gt, tf.constant(1.0))

    #os.makedirs(os.path.join(args.results_dir, args.drive, 'plots'), exist_ok=True)
    #os.makedirs(os.path.join(args.results_dir, args.drive, 'completions'), exist_ok=True)

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.allow_soft_placement = True
    sess = tf.Session(config=config)

    saver = tf.train.Saver()
    saver.restore(sess, args.checkpoint)

    car_ids = [
        filename.split('.')[0]
        for filename in sorted(os.listdir(osp.join(args.base_dir, args.drive)))
        if '.ply' in filename and 'car' in filename
    ]
    total_time = 0
    total_points = 0
    for i, car_id in enumerate(car_ids):
        partial = read_pcd(
            os.path.join(args.base_dir, args.drive, '%s.ply' % car_id))
        affine_params = np.loadtxt(
            os.path.join(args.base_dir, args.drive, '%s.txt' % car_id))
        #img_car = Image.open(os.path.join(args.base_dir, args.drive, '%s.png' % car_id))
        #img_car = cv2.imread(os.path.join(args.base_dir, args.drive, '%s.png' % car_id))

        total_points += partial.shape[0]

        # Calculate center, rotation and scale
        center = affine_params[0:3]
        yaw = affine_params[3]
        rotation = np.array([[np.cos(yaw), -np.sin(yaw), 0],
                             [np.sin(yaw), np.cos(yaw), 0], [0, 0, 1]])
        scale = affine_params[4]

        partial = np.dot(partial - center, rotation) / scale
        partial = np.dot(partial, [[1, 0, 0], [0, 0, 1], [0, 1, 0]])

        start = time.time()
        completion = sess.run(model.outputs,
                              feed_dict={
                                  inputs: [partial],
                                  npts: [partial.shape[0]]
                              })
        total_time += time.time() - start
        completion = completion[0]

        completion_w = np.dot(completion, [[1, 0, 0], [0, 0, 1], [0, 1, 0]])
        scale_after = np.max(completion[:, 0]) - np.min(completion[:, 0])
        #completion_w = np.dot(completion_w * scale/scale_after, rotation.T) + center
        #completion_w[:,2] -= np.min(completion_w[:,2]) + 1.73
        with open(
                os.path.join(
                    args.base_dir, args.drive,
                    '%s.txt' % car_id.replace('car', 'completion_transform')),
                'w') as f:
            f.write("{:f} {:f} {:f} {:f} {:f} {:f}".format(center[0], center[1], center[2],\
                scale, scale_after, yaw))
        pcd_path = os.path.join(args.base_dir, args.drive,
                                '%s.ply' % car_id.replace('car', 'completion'))
        save_pcd(pcd_path, completion_w)

        #if i % args.plot_freq == 0:
        #plot_path = os.path.join(args.results_dir, args.drive, 'plots', '%s.png' % car_id)
        #plot_pcd_img(plot_path, [partial, completion], img_car, ['input', 'output'],
        #                         '%d input points' % partial.shape[0], [5, 0.5])
    print('Average # input points:', total_points / len(car_ids))
    print('Average time:', total_time / len(car_ids))
    sess.close()
コード例 #14
0
ファイル: pca_testing.py プロジェクト: hmgoforth/pcn
               s=0.5,
               cmap='Reds',
               vmin=-1,
               vmax=0.5)
    ax.set_xlim(-0.3, 0.3)
    ax.set_ylim(-0.3, 0.3)
    ax.set_zlim(-0.3, 0.3)


parser = argparse.ArgumentParser()
parser.add_argument('--point_cloud_dir')
args = parser.parse_args()

for point_cloud in os.listdir(args.point_cloud_dir):
    if point_cloud.endswith('.pcd'):
        pcd = read_pcd(os.path.join(args.point_cloud_dir, point_cloud))
        pcd = pcd - np.mean(pcd, axis=0)

        cov = np.cov(pcd.T)
        evals, evecs = np.linalg.eig(cov)
        sort_indices = np.argsort(evals)[::-1]
        evecs_sort = evecs[:, sort_indices]

        fig = plt.figure(figsize=(4, 4))
        ax = fig.add_subplot(111, projection='3d')
        plot_pcd(ax, pcd)
        ax.plot3D([0, evecs_sort[0, 0]], [0, evecs_sort[1, 0]],
                  [0, evecs_sort[2, 0]],
                  color='blue',
                  zdir='y')
        ax.plot3D([0, evecs_sort[0, 1]], [0, evecs_sort[1, 1]],