def property_reference(run_name="property_reference"): """Produces identical visualization to small_scale, but does not store repeated properties of ``vertex_positions`` and ``vertex_normals``. """ logdir = os.path.join(BASE_LOGDIR, run_name) writer = tf.summary.create_file_writer(logdir) cube = o3d.geometry.TriangleMesh.create_box(1, 2, 4, create_uv_map=True) cube.compute_vertex_normals() cylinder = o3d.geometry.TriangleMesh.create_cylinder(radius=1.0, height=2.0, resolution=20, split=4, create_uv_map=True) cylinder.compute_vertex_normals() colors = [(1.0, 0.0, 0.0), (0.0, 1.0, 0.0), (0.0, 0.0, 1.0)] with writer.as_default(): for step in range(3): cube.paint_uniform_color(colors[step]) cube_summary = to_dict_batch([cube]) if step > 0: cube_summary['vertex_positions'] = 0 cube_summary['vertex_normals'] = 0 summary.add_3d('cube', cube_summary, step=step, logdir=logdir) cylinder.paint_uniform_color(colors[step]) cylinder_summary = to_dict_batch([cylinder]) if step > 0: cylinder_summary['vertex_positions'] = 0 cylinder_summary['vertex_normals'] = 0 summary.add_3d('cylinder', cylinder_summary, step=step, logdir=logdir)
def small_scale(run_name="small_scale"): """Basic demo with cube and cylinder with normals and colors. """ logdir = os.path.join(BASE_LOGDIR, run_name) writer = tf.summary.create_file_writer(logdir) cube = o3d.geometry.TriangleMesh.create_box(1, 2, 4, create_uv_map=True) cube.compute_vertex_normals() cylinder = o3d.geometry.TriangleMesh.create_cylinder(radius=1.0, height=2.0, resolution=20, split=4, create_uv_map=True) cylinder.compute_vertex_normals() colors = [(1.0, 0.0, 0.0), (0.0, 1.0, 0.0), (0.0, 0.0, 1.0)] with writer.as_default(): for step in range(3): cube.paint_uniform_color(colors[step]) summary.add_3d('cube', to_dict_batch([cube]), step=step, logdir=logdir) cylinder.paint_uniform_color(colors[step]) summary.add_3d('cylinder', to_dict_batch([cylinder]), step=step, logdir=logdir)
def demo_scene(): """Write the demo_scene.py example showing rich PBR materials as a summary. """ import demo_scene demo_scene.check_for_required_assets() geoms = demo_scene.create_scene() logdir = os.path.join(BASE_LOGDIR, 'demo_scene') writer = tf.summary.create_file_writer(logdir) for geom_data in geoms: geom = geom_data["geometry"] summary_3d = {} for key, tensor in geom.vertex.items(): summary_3d["vertex_" + key] = tensor for key, tensor in geom.triangle.items(): summary_3d["triangle_" + key] = tensor if geom.has_valid_material(): summary_3d["material_name"] = geom.material.material_name for key, value in geom.material.scalar_properties.items(): summary_3d["material_scalar_" + key] = value for key, value in geom.material.vector_properties.items(): summary_3d["material_vector_" + key] = value for key, value in geom.material.texture_maps.items(): summary_3d["material_texture_map_" + key] = value with writer.as_default(): summary.add_3d(geom_data["name"], summary_3d, step=0, logdir=logdir)
def with_material(model_dir=MODEL_DIR): """Read an obj model from a directory and write as a TensorBoard summary. """ model_name = os.path.basename(model_dir) logdir = os.path.join(BASE_LOGDIR, model_name) model_path = os.path.join(model_dir, model_name + ".obj") model = o3d.t.geometry.TriangleMesh.from_legacy( o3d.io.read_triangle_mesh(model_path)) summary_3d = { "vertex_positions": model.vertex["positions"], "vertex_normals": model.vertex["normals"], "triangle_texture_uvs": model.triangle["texture_uvs"], "triangle_indices": model.triangle["indices"], "material_name": "defaultLit" } names_to_o3dprop = {"ao": "ambient_occlusion"} for texture in ("albedo", "normal", "ao", "metallic", "roughness"): texture_file = os.path.join(model_dir, texture + ".png") if os.path.exists(texture_file): texture = names_to_o3dprop.get(texture, texture) summary_3d.update({ ("material_texture_map_" + texture): o3d.t.io.read_image(texture_file) }) if texture == "metallic": summary_3d.update(material_scalar_metallic=1.0) writer = tf.summary.create_file_writer(logdir) with writer.as_default(): summary.add_3d(model_name, summary_3d, step=0, logdir=logdir)
def large_scale(n_steps=16, batch_size=1, base_resolution=200, run_name="large_scale"): """Generate a large scale summary. Geometry resolution increases linearly with step. Each element in a batch is painted a different color. """ logdir = os.path.join(BASE_LOGDIR, run_name) writer = tf.summary.create_file_writer(logdir) colors = [] for k in range(batch_size): t = k * np.pi / batch_size colors.append(((1 + np.sin(t)) / 2, (1 + np.cos(t)) / 2, t / np.pi)) with writer.as_default(): for step in range(n_steps): resolution = base_resolution * (step + 1) cylinder_list = [] moebius_list = [] cylinder = o3d.geometry.TriangleMesh.create_cylinder( radius=1.0, height=2.0, resolution=resolution, split=4) cylinder.compute_vertex_normals() moebius = o3d.geometry.TriangleMesh.create_moebius( length_split=int(3.5 * resolution), width_split=int(0.75 * resolution), twists=1, raidus=1, flatness=1, width=1, scale=1) moebius.compute_vertex_normals() for b in range(batch_size): cylinder_list.append(copy.deepcopy(cylinder)) cylinder_list[b].paint_uniform_color(colors[b]) moebius_list.append(copy.deepcopy(moebius)) moebius_list[b].paint_uniform_color(colors[b]) summary.add_3d('cylinder', to_dict_batch(cylinder_list), step=step, logdir=logdir, max_outputs=batch_size) summary.add_3d('moebius', to_dict_batch(moebius_list), step=step, logdir=logdir, max_outputs=batch_size)
def test_tensorflow_summary(geometry_data, tmp_path): """Test writing summary from TensorFlow """ tf = pytest.importorskip("tensorflow") logdir = str(tmp_path) writer = tf.summary.create_file_writer(logdir) rng = np.random.default_rng() tensor_converter = (tf.convert_to_tensor, o3d.core.Tensor.from_numpy, np.array) cube, material = geometry_data['cube'], geometry_data['material'] cube_ls, material_ls = geometry_data['cube_ls'], geometry_data[ 'material_ls'] colors = geometry_data['colors'] max_outputs = geometry_data['max_outputs'] with writer.as_default(): for step in range(3): cube[0].paint_uniform_color(colors[step][0]) cube[1].paint_uniform_color(colors[step][1]) cube_summary = to_dict_batch(cube) cube_summary.update(material) # Randomly convert to TF, Open3D, Numpy tensors, or use property # reference if step > 0: cube_summary['vertex_positions'] = 0 cube_summary['vertex_normals'] = 0 cube_summary['vertex_colors'] = rng.choice(tensor_converter)( cube_summary['vertex_colors']) else: for prop, tensor in cube_summary.items(): # skip material scalar and vector props if (not prop.startswith("material_") or prop.startswith("material_texture_map_")): cube_summary[prop] = rng.choice(tensor_converter)( tensor) summary.add_3d('cube', cube_summary, step=step, logdir=logdir, max_outputs=max_outputs) for key in tuple(cube_summary): # Convert to PointCloud if key.startswith(('triangle_', 'material_texture_map_')): cube_summary.pop(key) summary.add_3d('cube_pcd', cube_summary, step=step, logdir=logdir, max_outputs=max_outputs) cube_ls[0].paint_uniform_color(colors[step][0]) cube_ls[1].paint_uniform_color(colors[step][1]) cube_ls_summary = to_dict_batch(cube_ls) cube_ls_summary.update(material_ls) for prop, tensor in cube_ls_summary.items(): if (not prop.startswith("material_") or prop.startswith("material_texture_map_")): cube_ls_summary[prop] = rng.choice(tensor_converter)( tensor) summary.add_3d('cube_ls', cube_ls_summary, step=step, logdir=logdir, max_outputs=max_outputs) sleep(0.25) # msgpack writing disk flush time dirpath_ref = [ logdir, os.path.join(logdir, 'plugins'), os.path.join(logdir, 'plugins/Open3D') ] filenames_ref = [['events.out.tfevents.*'], [], ['cube.*.msgpack']] dirpath, filenames = [], [] for dp, unused_dn, fn in os.walk(logdir): dirpath.append(dp) filenames.append(fn) assert (dirpath[:2] == dirpath_ref[:2] and dirpath[2][0][:20] == dirpath_ref[2][0][:20]) assert filenames[0][0][:20] == filenames_ref[0][0][:20] assert set(x.split('.')[0] for x in filenames[2]) == set( ('cube', 'cube_pcd', 'cube_ls')) assert filenames_ref[2][0][-8:] == '.msgpack' # Note: The event file written during this test cannot be reliably verified # in the same Python process, since it's usually buffered by GFile / Python # / OS and written to disk in increments of the filesystem blocksize. # Complete write is guaranteed after Python has exited. shutil.rmtree(logdir)