def test_Meshes(): meshes1 = shapenet.ShapeNet_Meshes(root=SHAPENET_ROOT, categories=['can'], train=True, split=.7) assert len(meshes1) > 0 for mesh in meshes1: assert Path(mesh['attributes']['path']).is_file() assert mesh['data']['vertices'].shape[0] > 0 meshes2 = shapenet.ShapeNet_Meshes(root=SHAPENET_ROOT, categories=['can', 'bowl'], train=True, split=.7) assert len(meshes2) > len(meshes1)
def test_Combination(): dataset_params = { 'root': SHAPENET_ROOT, 'categories': ['can'], 'train': True, 'split': .8, } # images = shapenet.ShapeNet_Images(root=SHAPENET_ROOT, cache_dir=CACHE_DIR, # categories=['bowl'], views=1, train=True, split=.8) meshes = shapenet.ShapeNet_Meshes(**dataset_params) voxels = shapenet.ShapeNet_Voxels(**dataset_params, cache_dir=CACHE_DIR, resolutions=[32]) sdf_points = shapenet.ShapeNet_SDF_Points(**dataset_params, cache_dir=CACHE_DIR, smoothing_iterations=3, num_points=500, occ=False, sample_box=True) points = shapenet.ShapeNet_Points(**dataset_params, cache_dir=CACHE_DIR, resolution=100, smoothing_iterations=3, num_points=500, surface=False, normals=True) dataset = shapenet.ShapeNet_Combination([voxels, sdf_points, points]) for obj in dataset: obj_data = obj['data'] assert set(obj['data']['sdf_points'].shape) == set([500, 3]) assert set(obj['data']['sdf_distances'].shape) == set([500]) assert set(obj['data']['32'].shape) == set([32, 32, 32]) assert set(obj['data']['points'].shape) == set([500, 3]) assert set(obj['data']['normals'].shape) == set([500, 3]) train_loader = DataLoader(dataset, batch_size=2, shuffle=True, num_workers=8) for batch in train_loader: assert set(batch['data']['sdf_points'].shape) == set([2, 500, 3]) assert set(batch['data']['sdf_distances'].shape) == set([2, 500]) assert set(batch['data']['32'].shape) == set([2, 32, 32, 32]) assert set(batch['data']['points'].shape) == set([2, 500, 3]) assert set(batch['data']['normals'].shape) == set([2, 500, 3]) shutil.rmtree('tests/datasets/cache/sdf_points') shutil.rmtree('tests/datasets/cache/points') shutil.rmtree('tests/datasets/cache/voxels') shutil.rmtree('tests/datasets/cache/surface_meshes')
root='/media/archana/Local/Datasets/ShapeNetCore.v1.zip/ShapeNetCore.v1', cache_dir='/media/archana/Local/Datasets/ShapeNetCore.v1.zip/cache/', categories=args.categories, train=False, split=.7, num_points=5000) images_set_valid = shapenet.ShapeNet_Images( root='/media/archana/Local/Datasets/ShapeNetRendering', categories=args.categories, train=False, split=.7, views=1, transform=preprocess) meshes_set_valid = shapenet.ShapeNet_Meshes( root='/media/archana/Local/Datasets/ShapeNetCore.v1.zip/ShapeNetCore.v1', categories=args.categories, train=False, split=.7) valid_set = shapenet.ShapeNet_Combination( [points_set_valid, images_set_valid, meshes_set_valid]) dataloader_val = DataLoader(valid_set, batch_size=args.batchsize, shuffle=False, collate_fn=collate_fn, num_workers=0) # Model mesh = kal.rep.TriangleMesh.from_obj('386.obj', enable_adjacency=True) mesh.cuda() normalize_adj(mesh)
# Data points_set_valid = shapenet.ShapeNet_Points(root=args.shapenet_root, cache_dir=args.cache_dir, categories=args.categories, train=False, split=.7, num_points=5000) images_set_valid = shapenet.ShapeNet_Images(root=args.shapenet_images_root, categories=args.categories, train=False, split=.7, views=1, transform=preprocess) meshes_set_valid = shapenet.ShapeNet_Meshes(root=args.shapenet_root, categories=args.categories, train=False, split=.7) valid_set = shapenet.ShapeNet_Combination( [points_set_valid, images_set_valid, meshes_set_valid]) dataloader_val = DataLoader(valid_set, batch_size=args.batch_size, collate_fn=collate_fn, shuffle=False, num_workers=8) # Model meshes = setup_meshes(filename='meshes/386.obj', device=args.device) encoders = [Encoder().to(args.device) for i in range(3)] mesh_update_kernels = [963, 1091, 1091]