Esempio n. 1
0
def test_Combination():
    dataset_params = {
        'root': SHAPENET_ROOT,
        'categories': ['can'],
        'train': True,
        'split': .8,
    }
    # images = shapenet.ShapeNet_Images(root=SHAPENET_ROOT, cache_dir=CACHE_DIR,
    #                                   categories=['bowl'], views=1, train=True, split=.8)
    meshes = shapenet.ShapeNet_Meshes(**dataset_params)
    voxels = shapenet.ShapeNet_Voxels(**dataset_params,
                                      cache_dir=CACHE_DIR,
                                      resolutions=[32])
    sdf_points = shapenet.ShapeNet_SDF_Points(**dataset_params,
                                              cache_dir=CACHE_DIR,
                                              smoothing_iterations=3,
                                              num_points=500,
                                              occ=False,
                                              sample_box=True)

    points = shapenet.ShapeNet_Points(**dataset_params,
                                      cache_dir=CACHE_DIR,
                                      resolution=100,
                                      smoothing_iterations=3,
                                      num_points=500,
                                      surface=False,
                                      normals=True)

    dataset = shapenet.ShapeNet_Combination([voxels, sdf_points, points])

    for obj in dataset:
        obj_data = obj['data']
        assert set(obj['data']['sdf_points'].shape) == set([500, 3])
        assert set(obj['data']['sdf_distances'].shape) == set([500])
        assert set(obj['data']['32'].shape) == set([32, 32, 32])
        assert set(obj['data']['points'].shape) == set([500, 3])
        assert set(obj['data']['normals'].shape) == set([500, 3])

    train_loader = DataLoader(dataset,
                              batch_size=2,
                              shuffle=True,
                              num_workers=8)
    for batch in train_loader:
        assert set(batch['data']['sdf_points'].shape) == set([2, 500, 3])
        assert set(batch['data']['sdf_distances'].shape) == set([2, 500])
        assert set(batch['data']['32'].shape) == set([2, 32, 32, 32])
        assert set(batch['data']['points'].shape) == set([2, 500, 3])
        assert set(batch['data']['normals'].shape) == set([2, 500, 3])

    shutil.rmtree('tests/datasets/cache/sdf_points')
    shutil.rmtree('tests/datasets/cache/points')
    shutil.rmtree('tests/datasets/cache/voxels')
    shutil.rmtree('tests/datasets/cache/surface_meshes')
Esempio n. 2
0
    split=.7,
    num_points=5000)
images_set_valid = shapenet.ShapeNet_Images(
    root='/media/archana/Local/Datasets/ShapeNetRendering',
    categories=args.categories,
    train=False,
    split=.7,
    views=1,
    transform=preprocess)
meshes_set_valid = shapenet.ShapeNet_Meshes(
    root='/media/archana/Local/Datasets/ShapeNetCore.v1.zip/ShapeNetCore.v1',
    categories=args.categories,
    train=False,
    split=.7)

valid_set = shapenet.ShapeNet_Combination(
    [points_set_valid, images_set_valid, meshes_set_valid])
dataloader_val = DataLoader(valid_set,
                            batch_size=args.batchsize,
                            shuffle=False,
                            collate_fn=collate_fn,
                            num_workers=0)

# Model
mesh = kal.rep.TriangleMesh.from_obj('386.obj', enable_adjacency=True)
mesh.cuda()
normalize_adj(mesh)

initial_verts = mesh.vertices.clone()
camera_fov_y = 49.13434207744484 * np.pi / 180.0
cam_proj = perspectiveprojectionnp(camera_fov_y, 1.0)
cam_proj = torch.FloatTensor(cam_proj).cuda()
Esempio n. 3
0
# Data
mesh_set = shapenet.ShapeNet_Surface_Meshes(root=args.shapenet_root,
                                            cache_dir=args.cache_dir,
                                            categories=args.categories,
                                            resolution=32,
                                            train=False,
                                            split=.7,
                                            mode='Tri')
voxel_set = shapenet.ShapeNet_Voxels(root=args.shapenet_root,
                                     cache_dir=args.cache_dir,
                                     categories=args.categories,
                                     train=False,
                                     resolutions=[32],
                                     split=.7)
valid_set = shapenet.ShapeNet_Combination([mesh_set, voxel_set])

encoder = MeshEncoder(30).to(args.device)
decoder = VoxelDecoder(30).to(args.device)

logdir = f'log/{args.expid}/AutoEncoder'
checkpoint = torch.load(os.path.join(logdir, 'best.ckpt'))
encoder.load_state_dict(checkpoint['encoder'])
decoder.load_state_dict(checkpoint['decoder'])

loss_epoch = 0.
num_batches = 0
num_items = 0

encoder.eval(), decoder.eval()
with torch.no_grad():
Esempio n. 4
0
                                      num_points=3000)
images_set = shapenet.ShapeNet_Images(root=args.shapenet_images_root,
                                      categories=args.categories,
                                      train=True,
                                      split=.7,
                                      views=23,
                                      transform=preprocess)
if args.latent_loss:
    mesh_set = shapenet.ShapeNet_Surface_Meshes(root=args.shapenet_root,
                                                cache_dir=args.cache_dir,
                                                categories=args.categories,
                                                resolution=100,
                                                train=True,
                                                split=.7,
                                                mode='Tri')
    train_set = shapenet.ShapeNet_Combination(
        [points_set, images_set, mesh_set])
    dataloader_train = DataLoader(train_set,
                                  batch_size=args.batch_size,
                                  shuffle=True,
                                  collate_fn=collate_fn,
                                  num_workers=8)
else:
    train_set = shapenet.ShapeNet_Combination([points_set, images_set])
    dataloader_train = DataLoader(train_set,
                                  batch_size=args.batch_size,
                                  shuffle=True,
                                  num_workers=8)

points_set_valid = shapenet.ShapeNet_Points(root=args.shapenet_root,
                                            cache_dir=args.cache_dir,
                                            categories=args.categories,
Esempio n. 5
0
    num_points=3000)
point_set = shapenet.ShapeNet_Points(
    root='/media/archana/Local/Datasets/ShapeNetCore.v1.zip/ShapeNetCore.v1',
    cache_dir='/media/archana/Local/Datasets/ShapeNetCore.v1.zip/cache/',
    categories=args.categories,
    train=True,
    split=.7,
    num_points=3000)
images_set = shapenet.ShapeNet_Images(
    root='/media/archana/Local/Datasets/ShapeNetRendering',
    categories=args.categories,
    train=True,
    split=.7,
    views=23,
    transform=preprocess)
train_set = shapenet.ShapeNet_Combination([sdf_set, images_set, point_set])

dataloader_train = DataLoader(train_set,
                              batch_size=args.batchsize,
                              shuffle=True,
                              num_workers=8)

sdf_set_valid = shapenet.ShapeNet_SDF_Points(
    root='/media/archana/Local/Datasets/ShapeNetCore.v1.zip/ShapeNetCore.v1',
    cache_dir='/media/archana/Local/Datasets/ShapeNetCore.v1.zip/cache/',
    categories=args.categories,
    train=False,
    split=.2,
    num_points=1000)
point_set_valid = shapenet.ShapeNet_Points(
    root='/media/archana/Local/Datasets/ShapeNetCore.v1.zip/ShapeNetCore.v1',