config_folder = sys.argv[1] + '/'
    assert os.path.exists(
        config_folder), 'directory %s does not exist' % config_folder

    config_files = [config_file for config_file in os.listdir(config_folder)]

    modulo_base = 1
    if len(sys.argv) > 2:
        modulo_base = max(1, int(sys.argv[2]))
        print('[Data] modulo base %d' % modulo_base)

    modulo_index = 0
    if len(sys.argv) > 3:
        modulo_index = max(0, int(sys.argv[3]))
        print('[Data] modulo index %d' % modulo_index)

    config = utils.read_json(config_folder + config_files[-1])

    watertight_directory = config['watertight_directory'] + '/'
    simplified_directory = config['simplified_directory'] + '/'
    utils.makedir(simplified_directory)

    off_files = utils.read_ordered_directory(watertight_directory)

    for n in range(len(off_files)):
        if (n - modulo_index) % modulo_base == 0:
            os.system('meshlabserver -i %s/%d.off -o %s/%d.off -s %s' %
                      (watertight_directory, n, simplified_directory, n,
                       config['simplification_script']))
    modulo_base = 1
    if len(sys.argv) > 2:
        modulo_base = max(1, int(sys.argv[2]))
        print('[Data] modulo base %d' % modulo_base)

    modulo_index = 0
    if len(sys.argv) > 3:
        modulo_index = max(0, int(sys.argv[3]))
        print('[Data] modulo index %d' % modulo_index)

    scaled_directory = config['scaled_directory'] + '/'
    assert os.path.exists(
        scaled_directory), 'directory %s does not exist' % scaled_directory

    scaled_files = utils.read_ordered_directory(scaled_directory)
    n_files_expected = len(scaled_files)

    depth_directory = config['depth_directory'] + '/'
    while not os.path.exists(depth_directory):
        print('[Data] waiting for %s' % depth_directory)
        time.sleep(10)

    watertight_directory = config['watertight_directory'] + '/'
    tsdf_directory = config['tsdf_directory'] + '/'

    utils.makedir(watertight_directory)
    utils.makedir(tsdf_directory)
    timer = Timer()

    Rs = watertight_render.get_views(config['watertight_rendering']['n_views'])
                              dtype=np.float64)
        size = np.array([image_height, image_width], dtype=np.int32)
        mesh_center = (config['mesh_center_x'], config['mesh_center_y'],
                       config['mesh_center_z'])
        znf = np.array([config['z_near'], config['z_far']], dtype=float)

        height = config['height']
        width = config['width']
        depth = config['depth']
        suffix = config['suffix']

        off_directory = config['multiplied_directory'] + '/' + set + '/'
        depth_file = common.filename(config, 'depth_file')
        angles_file = common.filename(config, 'render_orientation_file')

        off_files = utils.read_ordered_directory(off_directory)
        n_files = len(off_files)
        print('[Data] found %d off files in %s' % (n_files, off_directory))

        n_observations = config['n_observations']
        depth_maps = np.zeros(
            (n_files, n_observations, image_height, image_width))
        rotations = np.zeros((n_files, n_observations, 3))

        for n in range(n_files):
            base_mesh = Mesh.from_off(off_files[n])

            for k in range(n_observations):
                mesh = base_mesh.copy()
                rotations[n, k, 0] = (
                    np.random.random() *
    config_folder = sys.argv[1] + '/'
    assert os.path.exists(
        config_folder), 'directory %s does not exist' % config_folder

    config_files = ['test.json']
    for config_file in config_files:
        print('[Data] reading ' + config_folder + config_file)
        config = utils.read_json(config_folder + config_file)

        multiplier = config['multiplier']
        vis_directory = common.dirname(config, 'vis_dir')
        if not os.path.isdir(vis_directory):
            os.makedirs(vis_directory)

        depth_directory = config['depth_directory']
        depth_files = utils.read_ordered_directory(depth_directory)

        N = 30
        for i in range(N):
            n = i * (len(depth_files) * multiplier // N) // multiplier
            print('[Data] visualizing %d/%d' % ((n + 1), len(depth_files)))

            depths = utils.read_hdf5(depth_files[n])

            height = 3
            width = 6
            fig = plt.figure(figsize=(width * 1.6, height * 1.6))

            gs = matplotlib.gridspec.GridSpec(height, width)
            gs.update(wspace=0.025, hspace=0.025)
Example #5
0
        print('[Data] Usage python 3_simplify.py config_folder')
        exit(1)

    config_folder = sys.argv[1] + '/'
    assert os.path.exists(
        config_folder), 'directory %s does not exist' % config_folder

    # Rescale all models in place (as this wasn't accounted for and there shouldn't be any more directories).
    config_files = [config_file for config_file in os.listdir(config_folder)]

    for config_file in config_files:
        config = utils.read_json(config_folder + config_file)

        padding = config['padding']
        simplified_directory = config['simplified_directory'] + '/'
        off_files = utils.read_ordered_directory(simplified_directory)

        for n in range(len(off_files)):
            mesh = Mesh.from_off(off_files[n])

            # Get extents of model.
            min, max = mesh.extents()
            total_min = np.min(np.array(min))
            total_max = np.max(np.array(max))

            # Set the center (although this should usually be the origin already).
            centers = ((min[0] + max[0]) / 2, (min[1] + max[1]) / 2,
                       (min[2] + max[2]) / 2)
            # Scales all dimensions equally.
            sizes = (total_max - total_min, total_max - total_min,
                     total_max - total_min)
Example #6
0
            print('[Data] \'-> writing ' + common.filename(to_config, key))
            data = np.concatenate(tuple(data), axis=0)
            utils.write_hdf5(common.filename(to_config, key), data)

        key = 'off_gt_dir'
        count = 0

        for key in dir_keys:
            for from_config_folder in config_folders:
                from_config = utils.read_json(from_config_folder + config_file)

                from_dir = common.dirname(from_config, key)
                to_dir = common.dirname(to_config, key)
                utils.makedir(to_dir)

                files = utils.read_ordered_directory(from_dir)
                for i in range(len(files)):
                    from_file = files[i]
                    to_file = to_dir + '/%d.%s' % (count, from_file[-3:])

                    if i == 0:
                        print('[Data] +')
                        print('[Data] |- ' + from_file)
                        print('[Data] \'-> ' + to_file)
                    shutil.copy(from_file, to_file)
                    count += 1

    # 1 Assemble all HDF5 files.
    hdf5_file_keys = [
        'depth_file',
        'render_orientation_file',
Example #7
0
    assert os.path.exists(config_folder), 'directory %s does not exist' % config_folder

    config_files = [config_file for config_file in os.listdir(config_folder)]
    config = utils.read_json(config_folder + config_files[-1])

    index = -1
    if len(sys.argv) > 2:
        index = int(sys.argv[2])

    if index >= 0:
        depths = utils.read_hdf5(config['depth_directory'] + '/%d.hdf5' % index)

        for i in range(depths.shape[0]):
            pyplot.clf()
            pyplot.imshow(depths[i], interpolation='none')
            pyplot.savefig('%d_%d_depth.png' % (index, i))
            print('[Data] wrote %d_%d_depth.png' % (index, i))
    else:
        n = 0
        depth_files = utils.read_ordered_directory(config['depth_directory'])
        for depth_file in depth_files:
            depths = utils.read_hdf5(depth_file)

            for i in range(depths.shape[0]):
                pyplot.clf()
                pyplot.imshow(depths[i], interpolation='none')
                pyplot.savefig('%d_%d_depth.png' % (n, i))
                print('[Data] wrote %d_%d_depth.png' % (n, i))

            n += 1
import common

if __name__ == '__main__':
    if len(sys.argv) < 2:
        print('[Data] Usage python create_cuboids.py config_folder')
        exit(1)

    config_file = sys.argv[1]
    assert os.path.exists(config_file)
    config = utils.read_json(config_file)
    dataset = ntpath.basename(config_file)[:-5]

    orig_dir = common.filename(config, 'bounding_box_txt_directory', '',
                               dataset)
    gt_dir = common.filename(config, 'velodyne_gt_txt_directory', '', dataset)
    gt_files = utils.read_ordered_directory(gt_dir)

    for i in range(len(gt_files)):
        orig_file = orig_dir + '/' + ntpath.basename(gt_files[i])
        orig_point_cloud = PointCloud.from_txt(orig_file)
        gt_point_cloud = PointCloud.from_txt(gt_files[i])
        print('[Data] read ' + orig_file)
        print('[Data] read ' + gt_files[i])

        fig = plt.figure()
        ax = fig.add_subplot(111, projection='3d')

        xx = gt_point_cloud.points[:, 0]
        yy = gt_point_cloud.points[:, 1]
        zz = gt_point_cloud.points[:, 2]
        config_file for config_file in config_files
        if config_file[:-5] in config_sets
    ]

    for config_file in config_files:
        config = utils.read_json(config_folder + config_file)

        raw_directory = config['raw_directory'] + '/'
        assert os.path.exists(
            raw_directory), 'directory %s does not exist' % raw_directory

        # The models file was crafted based on MatLab indexing; originally, the first file
        # in the list got the corresponding number, 1-based.
        # This means that the filter indices in the configuration file are 1-based.

        filenames = utils.read_ordered_directory(raw_directory)
        print('[Data] found %d files' % len(filenames))

        scaled_directory = config['scaled_directory'] + '/'
        utils.makedir(scaled_directory)

        padding = config['padding']

        j = 0
        for i in range(len(filenames)):

            if config['limit'] > 0 and i >= config['limit']:
                print('[Data] processed %d models, reached limit' % (i + 1))
                break

            in_file = filenames[i]
Example #10
0
            for k in range(n_observations):
                perm = np.random.permutation(n_observations)
                perm = perm[:i]
                print('[Data] perm ' + ', '.join(map(str, perm)))
                i_inputs[k] = np.sum(inputs[perm], axis=0)
                i_inputs[k] = np.clip(i_inputs[k], 0, 1)

                i_space[k] = np.sum(space[perm], axis=0)
                i_space[k] = np.clip(i_space[k], 0, 1)

                i_sdf_inputs[k] = np.min(sdf_inputs[perm], axis=0)
                i_tsdf_inputs[k] = np.min(tsdf_inputs[perm], axis=0)
                i_ltsdf_inputs[k] = np.min(ltsdf_inputs[perm], axis=0)

                # Also fuse the actual point clouds!
                txt_directories = utils.read_ordered_directory(
                    common.dirname(config, 'txt_gt_dir'))
                txt_directory = common.dirname(config, 'txt_gt_dir',
                                               i) + '%d/' % k
                utils.makedir(txt_directory)

                for n in range(N):
                    point_cloud = PointCloud()
                    print('[Data] +')
                    for j in range(perm.shape[0]):
                        txt_file = txt_directories[perm[j]] + '/%d.txt' % n
                        print('[Data] | read ' + txt_file)
                        point_cloud_j = PointCloud.from_txt(txt_file)
                        point_cloud.points = np.concatenate(
                            (point_cloud.points, point_cloud_j.points), axis=0)

                    txt_file = txt_directory + '%d.txt' % n