def main(): paths = [] for ext in ('*.3ds', '*.3DS'): paths.extend(args.models_dir.glob(ext)) for path in paths: with session_scope() as sess: shapes = sess.query(models.Shape).filter_by(source_id=path.name).all() print([s for s in shapes]) app = brender.Brender() pbar = tqdm(paths) for path in pbar: pbar.set_description(f'{path}') with session_scope() as sess: shapes = sess.query(models.Shape).filter_by(source_id=path.name).all() if len(shapes) == 0: continue for shape in shapes: app.init() try: register_shape(app, shape, path) except Exception: raise
def main(out_dir): out_dir = Path(out_dir) app = brender.Brender() materials_by_substance = defaultdict(list) with session_scope() as sess: materials = sess.query(Material).filter_by(enabled=True).all() for material in materials: materials_by_substance[material.substance].append(material) # pairs, count = controllers.fetch_pairs( # sess, max_dist=config.ALIGN_DIST_THRES, # filters=[ExemplarShapePair.id >= start], # order_by=ExemplarShapePair.shape_id.asc(), # ) pairs, count = controllers.fetch_pairs( sess, by_shape=True, order_by=ExemplarShapePair.distance.asc(), ) print(f'Fetched {len(pairs)} pairs. ' f'align_dist_thres = {config.ALIGN_DIST_THRES}') pair_pbar = tqdm(pairs) for i, pair in enumerate(pair_pbar): pair_pbar.set_description(f'Pair {pair.id}') if not pair.data_exists(config.PAIR_SHAPE_CLEAN_SEGMENT_MAP_NAME): continue app.init() render_pair(app, pair, materials_by_substance, out_dir)
def main(): with session_scope() as sess: materials = (sess.query(models.Material) .filter_by(type=models.MaterialType.MDL) .order_by(models.Material.id.asc()) .all()) app = brender.Brender() app.init(do_reset=False) bpy.ops.wm.open_mainfile(filepath=str(scene_path)) solid_scene = Scene(app, (1000, 1000), bscene=bpy.data.scenes['SolidMaterialScene'], aa_samples=96) cloth_scene = Scene(app, (1000, 1000), aa_samples=96, bscene=bpy.data.scenes['ClothMaterialScene']) camera = Camera(bpy.data.objects['Camera']) solid_scene.set_active_camera(camera) cloth_scene.set_active_camera(camera) pbar = tqdm(materials) for material in pbar: pbar.set_description(material.name) if material.data_exists('previews/bmps.png'): continue if material.substance in {'plastic', 'metal', 'wood', 'polished'}: scene = solid_scene mesh = Mesh(bpy.data.objects['SolidModel'], name='SolidModel') elif material.substance in {'fabric', 'leather'}: scene = cloth_scene mesh = Mesh(bpy.data.objects['ClothModel'], name='ClothModel') else: scene = solid_scene mesh = Mesh(bpy.data.objects['SolidModel'], name='SolidModel') print(f'Unknown material substance {material.substance}') bpy.context.screen.scene = scene.bobj uv_ref_scale = 2 ** (material.default_scale - 4) bmat = material_to_brender(material, uv_ref_scale=uv_ref_scale) brender.mesh.set_material(mesh.bobj, bmat) if bmat.has_uvs: mesh.compute_uv_density(base_size=12.0) with suppress_stdout(): rend = scene.render_to_array(format='exr') rend_srgb = to_8bit(linear_to_srgb(rend)) vis.image(rend_srgb.transpose((2, 0, 1)), win='rendering', opts={'title': material.name}) material.save_data('previews/bmps.exr', rend) material.save_data('previews/bmps.png', rend_srgb)
def main(): app = brender.Brender() paths = list(args.models_dir.glob('*/*.3DS')) paths.extend(args.models_dir.glob('*/*.3ds')) pbar = tqdm(paths) for hm_path in pbar: pbar.set_description(f'{hm_path}') app.init() try: register_shape(app, hm_path) except Exception: raise
def main(): app = brender.Brender() if not args.inference_path.exists(): print(f' * {args.inference_path!s} does not exist.') return with args.inference_path.open('r') as f: inference_dict = json.load(f) with session_scope() as sess: envmap = sess.query(models.Envmap).get(30) materials = sess.query(models.Material).all() mat_by_id = {m.id: m for m in materials} pair = sess.query(ExemplarShapePair).get(inference_dict['pair_id']) # if args.use_weighted_scores: # compute_weighted_scores(inference_dict, mat_by_id, # force_substances=args.use_minc_substances, # sort=True) if args.use_weighted_scores: compute_weighted_scores(inference_dict, mat_by_id, sort=True, force_substances=True) app.init() scene = blender.construct_inference_scene( app, pair, inference_dict, mat_by_id, envmap, scene_type=args.type, rend_shape=_REND_SHAPE, frontal_camera=args.frontal, diagonal_camera=args.diagonal, add_floor=not args.no_floor) if args.animate: blender.animate_scene(scene) print(f' * Saving blend file to {args.out_path!s}') if args.pack_assets: bpy.ops.file.pack_all() else: bpy.ops.file.make_paths_absolute() bpy.ops.wm.save_as_mainfile(filepath=str(args.out_path)) scene.clear_bmats()
def main(): if not args.inference_path.exists(): print(f'{args.inference_path!s} does not exist.') return with args.inference_path.open('r') as f: inference_dict = json.load(f) with session_scope() as sess: pair = (sess.query(ExemplarShapePair).options( orm.joinedload(models.ExemplarShapePair.exemplar), orm.joinedload(models.ExemplarShapePair.shape)).get( inference_dict['pair_id'])) envmap = sess.query(models.Envmap).get(13) materials = sess.query(models.Material).all() mat_by_id = {m.id: m for m in materials} print(f" * Loading {pair.get_data_path(config.PAIR_FG_BBOX_NAME)}") fg_mask = pair.load_data(config.PAIR_FG_BBOX_NAME) fg_mask = toolbox.images.resize(fg_mask, _REND_SHAPE) fg_mask = fg_mask.astype(bool) fg_bbox = toolbox.images.mask_bbox(fg_mask) crop_bbox = toolbox.images.bbox_make_square(fg_bbox) if args.use_weighted_scores: compute_weighted_scores(inference_dict, mat_by_id, sort=True) app = brender.Brender() app.init() scene = blender.construct_inference_scene(app, pair, inference_dict, mat_by_id, envmap, num_samples=128, tile_size=(640, 640), rend_shape=_REND_SHAPE) logger.info(' * Rendering...') rend = scene.render_to_array(format='exr') print('aaa', rend.shape, fg_mask.shape) rend_srgb = toolbox.images.linear_to_srgb(rend) rend_srgb = toolbox.images.crop_bbox(rend_srgb, crop_bbox) print(f" * Saving to {args.out_path!s}") toolbox.io.images.save_image(args.out_path, rend_srgb)
def main(): with session_scope() as sess: materials = (sess.query(models.Material).order_by( models.Material.id.asc()).all()) # Initialize Brender and Scene. app = brender.Brender() app.init() scene = brender.Scene(app, shape=_REND_SHAPE, aa_samples=196) scene.set_envmap(_ENVMAP_PATH, scale=5.0) # Initialize Camera. rk_camera = cameras.spherical_coord_to_cam( 60.0, azimuth=math.pi / 2 - math.pi / 12, elevation=math.pi / 2 - math.pi / 6, cam_dist=2.5, max_len=_REND_SHAPE[0] / 2) camera = brender.CalibratedCamera(scene, rk_camera.cam_to_world(), rk_camera.fov) scene.set_active_camera(camera) with scene.select(): mesh = brender.mesh.Monkey(position=(0, 0, 0)) mesh.enable_smooth_shading() pbar = tqdm(materials) for material in pbar: pbar.set_description(material.name) uv_ref_scale = 2**(material.default_scale - 4) bmat = material_to_brender(material, uv_ref_scale=uv_ref_scale) brender.mesh.set_material(mesh.bobj, bmat) if bmat.has_uvs: mesh.compute_uv_density() with suppress_stdout(): rend = scene.render_to_array(format='exr') material.save_data('previews/monkey.studio021.exr', rend) material.save_data('previews/monkey.studio021.png', to_8bit(toolbox.images.linear_to_srgb(rend)))
def main(): app = brender.Brender() if not args.list_path.exists(): print(f'{args.list_path!s} does not exist.') return with args.list_path.open('r') as f: file_list = f.read().strip().split('\n') inference_paths = [ Path(args.inference_dir, f'{p}.json') for p in file_list ] with session_scope() as sess: envmap = sess.query(models.Envmap).get(13) materials = sess.query(models.Material).all() mat_by_id = {m.id: m for m in materials} app.init() engine = Engine.CYCLES scene = brender.Scene(app, shape=_REND_SHAPE, engine=engine, tile_size=(40, 40), aa_samples=128, diffuse_samples=3, specular_samples=3, background_mode=BackgroundMode.ENVMAP, background_color=(1, 1, 1, 1)) envmap_rotation = (0, 0, (envmap.azimuth + 3 * math.pi / 2)) scene.set_envmap(envmap.get_data_path('hdr.exr'), scale=1.0, rotation=envmap_rotation) floor = Plane((0, 0, 0), radius=1000) camera = brender.BasicCamera( scene, position=(0.217, -22.76, 7.49), rotation=(0.99, 0, 0), ) scene.set_active_camera(camera) grid_width = 13 grid_height = int(math.ceil(len(inference_paths) / grid_width)) + 10 grid_coords = [] for r, row in enumerate( np.linspace(-grid_height / 2, grid_height / 2, grid_height)): offset = 0 if r % 2 == 0: offset = 0.5 for c, col in enumerate( np.linspace(-grid_width / 2, grid_width / 2, grid_width)): if r == 0 and c in {0, 1, grid_width - 1, grid_width - 2}: continue if r in {1, 2, 3} and c in {0, grid_width - 1}: continue coord = (col + offset, row) grid_coords.append(coord) random.seed(1000) random.shuffle(inference_paths) pbar = tqdm(inference_paths) for i, inference_path in enumerate(pbar): with inference_path.open('r') as f: inference_dict = json.load(f) pair = sess.query(ExemplarShapePair).get(inference_dict['pair_id']) pbar.set_description(f"Adding pair {pair.id}") location = (1.5 * grid_coords[i][0], 1.5 * grid_coords[i][1], 0) mesh = add_pair_mesh(scene, pair, inference_dict, mat_by_id) scene.meshes.append(mesh) mesh.bobj.location = location mesh.bobj.location[2] -= mesh.compute_min_pos() tqdm.write(f'Saving blend file to {args.out_path!s}') # bpy.ops.file.make_paths_absolute() if args.pack_assets: bpy.ops.file.pack_all() bpy.ops.wm.save_as_mainfile(filepath=str(args.out_path))
def main(client_id, epoch_start, rends_per_epoch, dry_run, max_dist, host, port, required_substances): loop = asyncio.get_event_loop() http_sess = loop.run_until_complete(make_http_client()) app = brender.Brender() collector_ctx = { 'host': host, 'port': port, 'sess': http_sess, 'client_id': client_id, } if required_substances: required_substances = set(required_substances.split(',')) assert all(s in SUBSTANCES for s in required_substances) for epoch in itertools.count(start=epoch_start): logger.info('Starting epoch %d', epoch) with session_scope() as sess: pairs = (sess.query(ExemplarShapePair).join(Shape).join( Exemplar).filter( sa.and_( ExemplarShapePair.distance < max_dist, sa.not_(Shape.exclude), sa.not_(Exemplar.exclude), Shape.split_set.isnot(None), )).options(orm.joinedload(ExemplarShapePair.exemplar), orm.joinedload( ExemplarShapePair.shape)).order_by( Shape.id.asc()).all()) materials = sess.query( models.Material).filter_by(enabled=True).all() envmaps = sess.query(models.Envmap).filter_by(enabled=True).all() cam_angles = [(p.azimuth, p.elevation) for p in pairs] logger.info('Loaded %d pairs and %d camera angles', len(pairs), len(cam_angles)) mats_by_subst = collections.defaultdict(list) for material in materials: mats_by_subst[material.substance].append(material) envmaps_by_split = { 'train': [e for e in envmaps if e.split_set == 'train'], 'validation': [e for e in envmaps if e.split_set == 'validation'], } for i in range(1000): pair = random.choice(pairs) if not pair.data_exists(config.PAIR_SHAPE_CLEAN_SEGMENT_MAP_NAME): continue app.init() logger.info('pair %d, shape %s, exemplar %d', pair.id, pair.shape.id, pair.exemplar.id) try: loop.run_until_complete( process_pair(app, pair, cam_angles, mats_by_subst, envmaps_by_split=envmaps_by_split, num_rends=rends_per_epoch, is_dry_run=dry_run, epoch=epoch, collector_ctx=collector_ctx, required_substances=required_substances)) except Exception as e: logger.exception('Uncaught exception', exc_info=True) continue
def main(): with session_scope() as sess: materials = ( sess.query(models.Material) # .filter(sa.and_(models.Material.default_scale.isnot(None), # models.Material.substance == 'fabric')) # .filter(models.Material.type == MaterialType.MDL) .order_by(models.Material.id.asc()).all()) shape = sess.query(models.Shape).get(4682) # shape = sess.query(models.Shape).get(2333) # Initialize Brender and Scene. app = brender.Brender() app.init() scene = brender.Scene(app, shape=_REND_SHAPE, aa_samples=196) envmap_rotation = (0, 0, (math.pi + math.pi / 2 - math.pi / 2 - math.pi / 12)) scene.set_envmap(_ENVMAP_PATH, scale=2.0, rotation=envmap_rotation) # Initialize Camera. rk_camera = cameras.spherical_coord_to_cam( 60.0, azimuth=-math.pi / 2 - math.pi / 12, elevation=math.pi / 2 - math.pi / 6, cam_dist=1.2, max_len=_REND_SHAPE[0] / 2) camera = brender.CalibratedCamera(scene, rk_camera.cam_to_world(), rk_camera.fov) scene.set_active_camera(camera) # Load shapenet mesh and resize to 1.0 to match Blender size. rk_mesh, _ = shape.load() rk_mesh.resize(1) with open(_TMP_MESH_PATH, 'w') as f: wavefront.save_obj_file(f, rk_mesh) mesh = brender.mesh.Mesh.from_obj(scene, _TMP_MESH_PATH) # Align the mesh to a camera looking straight at the diffuser. The diffuser # for Studio 021 is at azimuth=pi/2, elevation=pi/2. # brender.mesh.align_mesh_to_direction(mesh, math.pi / 2, math.pi / 2) # with scene.select(): # mesh = brender.mesh.Monkey(position=(0, 0, 0)) pbar = tqdm(materials) for material in pbar: uv_ref_scale = 2**(material.default_scale - 4) pbar.set_description(material.name) data_name = f'previews/chair_{material.default_scale}.png' if material.data_exists(data_name): continue # _, _, uv_density = measure_uv_density(mesh.bobj) bmat = material_to_brender(material, uv_ref_scale=uv_ref_scale) brender.mesh.set_material(mesh.bobj, bmat) if bmat.has_uvs: mesh.compute_uv_density() with suppress_stdout(): rend = scene.render_to_array(format='exr') # material.save_data( # f'previews/chair.exr', # rend) bpy.ops.wm.save_as_mainfile(filepath='/local1/data/test.blend') material.save_data(data_name, to_8bit(toolbox.images.linear_to_srgb(rend)))