Ejemplo n.º 1
0
def postprocess(output_file, cubes, points_numbers, cube_positions, scale,
                cube_size, rho):
    """Classify voxels to occupied or free, then extract points and write to file.
  Input:  deocded cubes, cube positions, points numbers, cube size and rho=ouput numbers/input numbers.
  """

    print('===== Post process =====')
    # Classify.
    start = time.time()
    output = select_voxels(cubes, points_numbers, rho)

    # Extract points.
    points = voxels2points(output)
    print("Classify and extract points: {}s".format(
        round(time.time() - start, 4)))

    # scaling (optional)
    start = time.time()
    if scale == 1:
        save_points(points, cube_positions, output_file, cube_size)
    else:
        scaling_output_file = './downsampling_rec.ply'
        save_points(points, cube_positions, scaling_output_file, cube_size)
        pc = load_ply_data(scaling_output_file)
        # pc_up = pc.astype('float32') * scale
        pc_up = pc.astype('float32') * 1 / float(scale)  #
        write_ply_data(output_file, pc_up)
    print("Write point cloud to {}: {}s".format(output_file,
                                                round(time.time() - start, 4)))

    return
Ejemplo n.º 2
0
def preprocess(input_file, scale, cube_size, min_num):
  """Scaling, Partition & Voxelization.
  Input: .ply file and arguments for pre-process.  
  Output: partitioned cubes, cube positions, and number of points in each cube. 
  """

  print('===== Preprocess =====')
  # scaling (optional)
  start = time.time()
  if scale == 1:
    scaling_file = input_file 
  else:
    pc = load_ply_data(input_file)
    pc_down = np.round(pc.astype('float32') * scale)
    pc_down = np.unique(pc_down, axis=0)# remove duplicated points
    scaling_file = './downscaling.ply'
    write_ply_data(scaling_file, pc_down)
  print("Scaling: {}s".format(round(time.time()-start, 4)))

  # partition.
  start = time.time()
  partitioned_points, cube_positions = load_points(scaling_file, cube_size, min_num)
  print("Partition: {}s".format(round(time.time()-start, 4)))

  # voxelization.
  start = time.time()
  cubes = points2voxels(partitioned_points, cube_size)
  points_numbers = np.sum(cubes, axis=(1,2,3,4)).astype(np.uint16)
  print("Voxelization: {}s".format(round(time.time()-start, 4)))

  print('cubes shape: {}'.format(cubes.shape))
  print('points numbers (sum/mean/max/min): {} {} {} {}'.format( 
  points_numbers.sum(), round(points_numbers.mean()), points_numbers.max(), points_numbers.min()))

  return cubes, cube_positions, points_numbers
Ejemplo n.º 3
0
def write_binary_files(filename, strings, points_numbers, cube_positions, min_v, max_v, shape, rootdir='./'):
  """Write compressed binary files:
    1) Compressed latent features.
    2) Number of input points.
    3) Positions of each cube.
  """ 
  if not os.path.exists(rootdir):
    os.makedirs(rootdir)
  print('===== Write binary files =====')
  file_strings = os.path.join(rootdir, filename+'.strings')
  file_pointnums = os.path.join(rootdir, filename+'.pointnums')
  file_cubepos = os.path.join(rootdir, filename+'.cubepos')
  ply_cubepos = os.path.join(rootdir, filename+'_cubepos.ply')
  
  with open(file_strings, 'wb') as f:
    f.write(np.array(shape, dtype=np.int16).tobytes())# [batch size, length, width, height, channels]
    f.write(np.array((min_v, max_v), dtype=np.int8).tobytes())
    f.write(strings)

  # TODO: Compress numbers of points.
  with open(file_pointnums, 'wb') as f:
    f.write(np.array(points_numbers, dtype=np.uint16).tobytes())
  
  write_ply_data(ply_cubepos, cube_positions.astype('uint8'))
  gpcc_encode(ply_cubepos, file_cubepos)
  
  bytes_strings = os.path.getsize(file_strings)
  bytes_pointnums = os.path.getsize(file_pointnums)
  bytes_cubepos = os.path.getsize(file_cubepos)
  print('Total file size (Bytes): {}'.format(bytes_strings+bytes_pointnums+bytes_cubepos))
  print('Strings (Bytes): {}'.format(bytes_strings))
  print('Numbers of points (Bytes): {}'.format(bytes_pointnums))
  print('Positions of cubes (Bytes): {}'.format(bytes_cubepos))

  return bytes_strings, bytes_pointnums, bytes_cubepos
Ejemplo n.º 4
0
def postprocess(output_file,
                cubes,
                points_numbers,
                cube_positions,
                scale,
                cube_size,
                rho,
                fixed_thres=None):
    """Classify voxels to occupied or free, then extract points and write to file.
    Input:  deocded cubes, cube positions, points numbers, cube size and rho=ouput numbers/input numbers.
    """
    prefix = output_file.split('/')[-1].split('_')[0] + str(
        random.randint(1, 100))
    print('===== Post process =====')
    # Classify.
    start = time.time()
    output = select_voxels(cubes, points_numbers, rho, fixed_thres=fixed_thres)

    # Extract points.
    #points = voxels2points(output.numpy())
    points = voxels2points(output)
    print("Classify and extract points: {}s".format(
        round(time.time() - start, 4)))

    # scaling (optional)
    start = time.time()
    if scale == 1:
        save_points(points, cube_positions, output_file, cube_size)
    else:
        scaling_output_file = prefix + 'downsampling_rec.ply'
        save_points(points, cube_positions, scaling_output_file, cube_size)
        pc = load_ply_data(scaling_output_file)
        pc_up = pc.astype('float32') * float(1 / scale)
        write_ply_data(output_file, pc_up)
        os.system("rm " + scaling_output_file)
    print("Write point cloud to {}: {}s".format(output_file,
                                                round(time.time() - start, 4)))

    return
Ejemplo n.º 5
0
def write_binary_files_hyper(filename,
                             y_strings,
                             z_strings,
                             points_numbers,
                             cube_positions,
                             y_min_vs,
                             y_max_vs,
                             y_shape,
                             z_min_v,
                             z_max_v,
                             z_shape,
                             rootdir='./'):
    """Write compressed binary files:
    1) Compressed latent features.
    2) Compressed hyperprior.
    3) Number of input points.
    4) Positions of each cube.
  """

    if not os.path.exists(rootdir):
        os.makedirs(rootdir)
    print('===== Write binary files =====')
    file_strings = os.path.join(rootdir, filename + '.strings')
    file_strings_head = os.path.join(rootdir, filename + '.strings_head')
    file_strings_hyper = os.path.join(rootdir, filename + '.strings_hyper')
    file_pointnums = os.path.join(rootdir, filename + '.pointnums')
    file_cubepos = os.path.join(rootdir, filename + '.cubepos')
    ply_cubepos = os.path.join(rootdir, filename + '_cubepos.ply')

    with open(file_strings_head, 'wb') as f:
        f.write(np.array(len(y_strings), dtype=np.int16).tobytes())
        y_max_min_vs = y_max_vs * 16 - y_min_vs
        f.write(np.array(y_max_min_vs, dtype=np.uint8).tobytes())
        y_strings_lens = np.array(
            [len(y_string) for _, y_string in enumerate(y_strings)])
        for i, l in enumerate(y_strings_lens):
            if l <= 255:
                f.write(np.array(l, dtype=np.uint8).tobytes())
            else:
                f.write(np.array(0, dtype=np.uint8).tobytes())
                f.write(np.array(l, dtype=np.int16).tobytes())
        f.write(np.array(y_shape, dtype=np.int16).tobytes()
                )  # [batch size, length, width, height, channels]

    with open(file_strings, 'wb') as f:
        for i, y_string in enumerate(y_strings):
            f.write(y_string)

    with open(file_strings_hyper, 'wb') as f:
        f.write(np.array(z_shape, dtype=np.int16).tobytes()
                )  # [batch size, length, width, height, channels]
        f.write(np.array((z_min_v, z_max_v), dtype=np.int8).tobytes())
        f.write(z_strings)

    # TODO: Compress numbers of points.
    with open(file_pointnums, 'wb') as f:
        f.write(np.array(points_numbers, dtype=np.uint16).tobytes())

    write_ply_data(ply_cubepos, cube_positions.astype('uint8'))
    gpcc_encode(ply_cubepos, file_cubepos)

    # bytes_strings = sum([os.path.getsize(f) for f in glob.glob(file_strings_folder+'/*.strings')])
    bytes_strings = os.path.getsize(file_strings)
    bytes_strings_head = os.path.getsize(file_strings_head)
    bytes_strings_hyper = os.path.getsize(file_strings_hyper)
    bytes_pointnums = os.path.getsize(file_pointnums)
    bytes_cubepos = os.path.getsize(file_cubepos)

    print(
        'Total file size (Bytes): {}'.format(bytes_strings +
                                             bytes_strings_head +
                                             bytes_strings_hyper +
                                             bytes_pointnums + bytes_cubepos))

    print('Strings (Bytes): {}'.format(bytes_strings))
    print('Strings head (Bytes): {}'.format(bytes_strings_head))
    print('Strings hyper (Bytes): {}'.format(bytes_strings_hyper))
    print('Numbers of points (Bytes): {}'.format(bytes_pointnums))
    print('Positions of cubes (Bytes): {}'.format(bytes_cubepos))

    return bytes_strings, bytes_strings_head, bytes_strings_hyper, bytes_pointnums, bytes_cubepos