def points_to_pnts(name, points, out_folder, include_rgb): count = int(len(points) / (3 * 4 + (3 if include_rgb else 0))) if count == 0: return 0, None pdt = np.dtype([('X', '<f4'), ('Y', '<f4'), ('Z', '<f4')]) cdt = np.dtype([('Red', 'u1'), ('Green', 'u1'), ('Blue', 'u1')]) if include_rgb else None ft = py3dtiles.feature_table.FeatureTable() ft.header = py3dtiles.feature_table.FeatureTableHeader.from_dtype( pdt, cdt, count) ft.body = py3dtiles.feature_table.FeatureTableBody.from_array( ft.header, points) body = py3dtiles.pnts.PntsBody() body.feature_table = ft tile = py3dtiles.tile.Tile() tile.body = body tile.header = py3dtiles.pnts.PntsHeader() tile.header.sync(body) filename = name_to_filename(out_folder, name, '.pnts') assert not os.path.exists(filename), '{} already written'.format(filename) tile.save_as(filename) return count, filename
def write_tileset(in_folder, out_folder, octree_metadata, offset, scale, projection, rotation_matrix, include_rgb): # compute tile transform matrix if rotation_matrix is None: transform = np.identity(4) else: transform = inverse_matrix(rotation_matrix) transform = np.dot(transform, scale_matrix(1.0 / scale[0])) transform = np.dot(translation_matrix(offset), transform) # build fake points if True: root_node = Node('', octree_metadata.aabb, octree_metadata.spacing * 2) root_node.children = [] inv_aabb_size = (1.0 / (octree_metadata.aabb[1] - octree_metadata.aabb[0])).astype(np.float32) for child in ['0', '1', '2', '3', '4', '5', '6', '7']: ondisk_tile = name_to_filename(out_folder, child.encode('ascii'), '.pnts') if os.path.exists(ondisk_tile): tile_content = TileContentReader.read_file(ondisk_tile) fth = tile_content.body.feature_table.header xyz = tile_content.body.feature_table.body.positions_arr.view(np.float32).reshape((fth.points_length, 3)) if include_rgb: rgb = tile_content.body.feature_table.body.colors_arr.reshape((fth.points_length, 3)) else: rgb = np.zeros(xyz.shape, dtype=np.uint8) root_node.grid.insert( octree_metadata.aabb[0].astype(np.float32), inv_aabb_size, xyz.copy(), rgb) pnts_writer.node_to_pnts(''.encode('ascii'), root_node, out_folder, include_rgb) executor = concurrent.futures.ProcessPoolExecutor() root_tileset = Node.to_tileset(executor, ''.encode('ascii'), octree_metadata.aabb, octree_metadata.spacing, out_folder, scale) executor.shutdown() root_tileset['transform'] = transform.T.reshape(16).tolist() root_tileset['refine'] = 'REPLACE' for child in root_tileset['children']: child['refine'] = 'ADD' tileset = { 'asset': { 'version': '1.0', }, 'geometricError': np.linalg.norm( octree_metadata.aabb[1] - octree_metadata.aabb[0]) / scale[0], 'root': root_tileset, } with open('{}/tileset.json'.format(out_folder), 'w') as f: f.write(json.dumps(tileset))
def remove(self, name): meta = self.metadata.pop(name, None) filename = name_to_filename(self.folder, name) if meta is None: assert os.path.exists(filename), '{} should exist'.format(filename) else: self.memory_size['content'] -= getsizeof(meta) self.memory_size['content'] -= len(self.data[meta[1]]) self.memory_size['container'] = getsizeof(self.data) + getsizeof(self.metadata) self.data[meta[1]] = None if os.path.exists(filename): os.remove(filename)
def _remove_all(store): # delete the entries count = len(store.metadata) bytes_written = 0 for name, meta in store.metadata.items(): data = store.data[meta[1]] filename = name_to_filename(store.folder, name) with open(filename, 'wb') as f: bytes_written += f.write(data) store.metadata = {} store.data = [] return (count, bytes_written)
def get(self, name, stat_inc=1): metadata = self.metadata.get(name, None) data = b'' if metadata is not None: data = self.data[metadata[1]] self.stats['hit'] += stat_inc else: filename = name_to_filename(self.folder, name) if os.path.exists(filename): self.stats['miss'] += stat_inc with open(filename, 'rb') as f: data = f.read() else: self.stats['new'] += stat_inc # should we cache this node? return data
def to_tileset(executor, name, parent_aabb, parent_spacing, folder, scale): node = node_from_name(name, parent_aabb, parent_spacing) aabb = node.aabb ondisk_tile = name_to_filename(folder, name, '.pnts') xyz, rgb = None, None # Read tile's pnts file, if existing, we'll need it for: # - computing the real AABB (instead of the one based on the octree) # - merging this tile's small (<100 points) children if os.path.exists(ondisk_tile): tile = TileReader().read_file(ondisk_tile) fth = tile.body.feature_table.header xyz = tile.body.feature_table.body.positions_arr if fth.colors != SemanticPoint.NONE: rgb = tile.body.feature_table.body.colors_arr xyz_float = xyz.view(np.float32).reshape((fth.points_length, 3)) # update aabb based on real values aabb = np.array( [np.amin(xyz_float, axis=0), np.amax(xyz_float, axis=0)]) # geometricError is in meters, so we divide it by the scale tileset = {'geometricError': 10 * node.spacing / scale[0]} children = [] tile_needs_rewrite = False if os.path.exists(ondisk_tile): tileset['content'] = {'uri': os.path.relpath(ondisk_tile, folder)} for child in ['0', '1', '2', '3', '4', '5', '6', '7']: child_name = '{}{}'.format(name.decode('ascii'), child).encode('ascii') child_ondisk_tile = name_to_filename(folder, child_name, '.pnts') if os.path.exists(child_ondisk_tile): # See if we should merge this child in tile if xyz is not None: # Read pnts content tile = TileReader().read_file(child_ondisk_tile) fth = tile.body.feature_table.header # If this child is small enough, merge in the current tile if fth.points_length < 100: xyz = np.concatenate( (xyz, tile.body.feature_table.body.positions_arr)) if fth.colors != SemanticPoint.NONE: rgb = np.concatenate( (rgb, tile.body.feature_table.body.colors_arr)) # update aabb xyz_float = tile.body.feature_table.body.positions_arr.view( np.float32).reshape((fth.points_length, 3)) aabb[0] = np.amin( [aabb[0], np.min(xyz_float, axis=0)], axis=0) aabb[1] = np.amax( [aabb[1], np.max(xyz_float, axis=0)], axis=0) tile_needs_rewrite = True os.remove(child_ondisk_tile) continue # Add child to the to-be-processed list if it hasn't been merged if executor is not None: children += [(child_name, node.aabb, node.spacing, folder, scale)] else: children += [ Node.to_tileset(None, child_name, node.aabb, node.spacing, folder, scale) ] # If we merged at least one child tile in the current tile # the pnts file needs to be rewritten. if tile_needs_rewrite: os.remove(ondisk_tile) count, filename = points_to_pnts(name, np.concatenate((xyz, rgb)), folder, rgb is not None) center = ((aabb[0] + aabb[1]) * 0.5).tolist() half_size = ((aabb[1] - aabb[0]) * 0.5).tolist() tileset['boundingVolume'] = { 'box': [ center[0], center[1], center[2], half_size[0], 0, 0, 0, half_size[1], 0, 0, 0, half_size[2] ] } if executor is not None: children = [t for t in executor.map(node_to_tileset, children)] if children: tileset['children'] = children else: tileset['geometricError'] = 0.0 if len(name) > 0 and children: if len(json.dumps(tileset)) > 100000: tile_root = { 'asset': { 'version': '1.0', }, 'refine': 'ADD', 'geometricError': tileset['geometricError'], 'root': tileset } tileset_name = 'tileset.{}.json'.format(name.decode('ascii')) with open('{}/{}'.format(folder, tileset_name), 'w') as f: f.write(json.dumps(tile_root)) tileset['content'] = {'uri': tileset_name} tileset['children'] = [] return tileset
def test_long_name_to_filename_with_short_split(): long_tile_name = '110542453782'.encode("ascii") filename = name_to_filename('work/', long_tile_name, split_len=2) assert filename == 'work/11/05/42/45/37/r82'
def test_long_name_to_filename_with_extension(): long_tile_name = '110542453782'.encode("ascii") filename = name_to_filename('work/', long_tile_name, suffix='.pnts') assert filename == 'work/11054245/r3782.pnts'
def test_long_name_to_filename(): long_tile_name = '110542453782'.encode("ascii") filename = name_to_filename('work/', long_tile_name) assert filename == 'work/11054245/r3782'
def test_short_name_to_filename(): short_tile_name = ''.encode("ascii") filename = name_to_filename('work/', short_tile_name) assert filename == 'work/r'
def to_tileset(executor, name, parent_aabb, parent_spacing, folder, scale): # Se're working we unscaled / unoffsetted coordinates, # so there's no need to apply the inverse transform # from the tileset node = node_from_name(name, parent_aabb, parent_spacing) aabb = node.aabb ondisk_tile = name_to_filename(folder, name, '.pnts') if os.path.exists(ondisk_tile): tile = TileReader().read_file(ondisk_tile) fth = tile.body.feature_table.header xyz = tile.body.feature_table.body.positions_arr.view( np.float32).reshape((fth.points_length, 3)) aabb = np.array([np.min(xyz, axis=0), np.max(xyz, axis=0)]) center = ((aabb[0] + aabb[1]) * 0.5).tolist() half_size = ((aabb[1] - aabb[0]) * 0.5).tolist() tile = { 'boundingVolume': { 'box': [ center[0], center[1], center[2], half_size[0], 0, 0, 0, half_size[1], 0, 0, 0, half_size[2] ] }, # geometricError is in meter so cancel scale 'geometricError': 10 * node.spacing / scale[0], } children = [] if os.path.exists(ondisk_tile): tile['content'] = {'url': os.path.relpath(ondisk_tile, folder)} for child in ['0', '1', '2', '3', '4', '5', '6', '7']: child_name = '{}{}'.format(name.decode('ascii'), child).encode('ascii') ondisk_tile = name_to_filename(folder, child_name, '.pnts') if os.path.exists(ondisk_tile): if executor is not None: children += [(child_name, node.aabb, node.spacing, folder, scale)] else: children += [ Node.to_tileset(None, child_name, node.aabb, node.spacing, folder, scale) ] if executor is not None: children = [t for t in executor.map(node_to_tileset, children)] if children: tile['children'] = children else: tile['geometricError'] = 0.0 if len(name) > 0 and children: if len(json.dumps(tile)) > 100000: tile_root = { 'asset': { 'version': '1.0', 'gltfUpAxis': 'Z', }, 'refine': 'ADD', 'geometricError': tile['geometricError'], 'root': tile } tileset_name = 'tileset.{}.json'.format(name.decode('ascii')) with open('{}/{}'.format(folder, tileset_name), 'w') as f: f.write(json.dumps(tile_root)) tile['content'] = {'url': tileset_name} tile['children'] = [] return tile